| // This file is generated by Tools/cases_generator/tier1_generator.py |
| // from: |
| // Python/bytecodes.c |
| // Do not edit! |
| |
| #ifdef TIER_TWO |
| #error "This file is for Tier 1 only" |
| #endif |
| #define TIER_ONE 1 |
| |
| |
| TARGET(BINARY_OP) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_OP); |
| PREDICTED(BINARY_OP); |
| _Py_CODEUNIT* const this_instr = next_instr - 2; |
| (void)this_instr; |
| _PyStackRef lhs; |
| _PyStackRef rhs; |
| _PyStackRef res; |
| // _SPECIALIZE_BINARY_OP |
| { |
| rhs = stack_pointer[-1]; |
| lhs = stack_pointer[-2]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION_FT |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(BINARY_OP); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION_FT */ |
| assert(NB_ADD <= oparg); |
| assert(oparg <= NB_INPLACE_XOR); |
| } |
| // _BINARY_OP |
| { |
| PyObject *lhs_o = PyStackRef_AsPyObjectBorrow(lhs); |
| PyObject *rhs_o = PyStackRef_AsPyObjectBorrow(rhs); |
| assert(_PyEval_BinaryOps[oparg]); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = _PyEval_BinaryOps[oparg](lhs_o, rhs_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(lhs); |
| PyStackRef_CLOSE(rhs); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_OP_ADD_FLOAT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_OP_ADD_FLOAT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_FLOAT |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyFloat_CheckExact(left_o), BINARY_OP); |
| DEOPT_IF(!PyFloat_CheckExact(right_o), BINARY_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _BINARY_OP_ADD_FLOAT |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| STAT_INC(BINARY_OP, hit); |
| double dres = |
| ((PyFloatObject *)left_o)->ob_fval + |
| ((PyFloatObject *)right_o)->ob_fval; |
| PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_OP_ADD_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_OP_ADD_INT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_INT |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyLong_CheckExact(left_o), BINARY_OP); |
| DEOPT_IF(!PyLong_CheckExact(right_o), BINARY_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _BINARY_OP_ADD_INT |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| STAT_INC(BINARY_OP, hit); |
| PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); |
| PyStackRef_CLOSE_SPECIALIZED(right, (destructor)PyObject_Free); |
| PyStackRef_CLOSE_SPECIALIZED(left, (destructor)PyObject_Free); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_OP_ADD_UNICODE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_OP_ADD_UNICODE); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_UNICODE |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyUnicode_CheckExact(left_o), BINARY_OP); |
| DEOPT_IF(!PyUnicode_CheckExact(right_o), BINARY_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _BINARY_OP_ADD_UNICODE |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| STAT_INC(BINARY_OP, hit); |
| PyObject *res_o = PyUnicode_Concat(left_o, right_o); |
| PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); |
| PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_OP_INPLACE_ADD_UNICODE); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| // _GUARD_BOTH_UNICODE |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyUnicode_CheckExact(left_o), BINARY_OP); |
| DEOPT_IF(!PyUnicode_CheckExact(right_o), BINARY_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _BINARY_OP_INPLACE_ADD_UNICODE |
| { |
| #ifndef NDEBUG |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| #endif |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| int next_oparg; |
| #if TIER_ONE |
| assert(next_instr->op.code == STORE_FAST); |
| next_oparg = next_instr->op.arg; |
| #else |
| next_oparg = CURRENT_OPERAND0(); |
| #endif |
| _PyStackRef *target_local = &GETLOCAL(next_oparg); |
| DEOPT_IF(!PyStackRef_Is(*target_local, left), BINARY_OP); |
| STAT_INC(BINARY_OP, hit); |
| /* Handle `left = left + right` or `left += right` for str. |
| * |
| * When possible, extend `left` in place rather than |
| * allocating a new PyUnicodeObject. This attempts to avoid |
| * quadratic behavior when one neglects to use str.join(). |
| * |
| * If `left` has only two references remaining (one from |
| * the stack, one in the locals), DECREFing `left` leaves |
| * only the locals reference, so PyUnicode_Append knows |
| * that the string is safe to mutate. |
| */ |
| assert(Py_REFCNT(left_o) >= 2); |
| PyStackRef_CLOSE(left); |
| PyObject *temp = PyStackRef_AsPyObjectBorrow(*target_local); |
| PyUnicode_Append(&temp, right_o); |
| *target_local = PyStackRef_FromPyObjectSteal(temp); |
| PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); |
| if (PyStackRef_IsNull(*target_local)) goto pop_2_error; |
| #if TIER_ONE |
| // The STORE_FAST is already done. This is done here in tier one, |
| // and during trace projection in tier two: |
| assert(next_instr->op.code == STORE_FAST); |
| SKIP_OVER(1); |
| #endif |
| } |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_OP_MULTIPLY_FLOAT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_OP_MULTIPLY_FLOAT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_FLOAT |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyFloat_CheckExact(left_o), BINARY_OP); |
| DEOPT_IF(!PyFloat_CheckExact(right_o), BINARY_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _BINARY_OP_MULTIPLY_FLOAT |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| STAT_INC(BINARY_OP, hit); |
| double dres = |
| ((PyFloatObject *)left_o)->ob_fval * |
| ((PyFloatObject *)right_o)->ob_fval; |
| PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_OP_MULTIPLY_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_OP_MULTIPLY_INT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_INT |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyLong_CheckExact(left_o), BINARY_OP); |
| DEOPT_IF(!PyLong_CheckExact(right_o), BINARY_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _BINARY_OP_MULTIPLY_INT |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| STAT_INC(BINARY_OP, hit); |
| PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); |
| PyStackRef_CLOSE_SPECIALIZED(right, (destructor)PyObject_Free); |
| PyStackRef_CLOSE_SPECIALIZED(left, (destructor)PyObject_Free); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_OP_SUBTRACT_FLOAT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_OP_SUBTRACT_FLOAT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_FLOAT |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyFloat_CheckExact(left_o), BINARY_OP); |
| DEOPT_IF(!PyFloat_CheckExact(right_o), BINARY_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _BINARY_OP_SUBTRACT_FLOAT |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| STAT_INC(BINARY_OP, hit); |
| double dres = |
| ((PyFloatObject *)left_o)->ob_fval - |
| ((PyFloatObject *)right_o)->ob_fval; |
| PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_OP_SUBTRACT_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_OP_SUBTRACT_INT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_INT |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyLong_CheckExact(left_o), BINARY_OP); |
| DEOPT_IF(!PyLong_CheckExact(right_o), BINARY_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _BINARY_OP_SUBTRACT_INT |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| STAT_INC(BINARY_OP, hit); |
| PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); |
| PyStackRef_CLOSE_SPECIALIZED(right, (destructor)PyObject_Free); |
| PyStackRef_CLOSE_SPECIALIZED(left, (destructor)PyObject_Free); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_SLICE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(BINARY_SLICE); |
| _PyStackRef container; |
| _PyStackRef start; |
| _PyStackRef stop; |
| _PyStackRef res; |
| // _SPECIALIZE_BINARY_SLICE |
| { |
| // Placeholder until we implement BINARY_SLICE specialization |
| #if ENABLE_SPECIALIZATION |
| OPCODE_DEFERRED_INC(BINARY_SLICE); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| // _BINARY_SLICE |
| { |
| stop = stack_pointer[-1]; |
| start = stack_pointer[-2]; |
| container = stack_pointer[-3]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *slice = _PyBuildSlice_ConsumeRefs(PyStackRef_AsPyObjectSteal(start), |
| PyStackRef_AsPyObjectSteal(stop)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyObject *res_o; |
| // Can't use ERROR_IF() here, because we haven't |
| // DECREF'ed container yet, and we still own slice. |
| if (slice == NULL) { |
| res_o = NULL; |
| } |
| else { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| res_o = PyObject_GetItem(PyStackRef_AsPyObjectBorrow(container), slice); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| Py_DECREF(slice); |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| PyStackRef_CLOSE(container); |
| if (res_o == NULL) goto pop_3_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_SUBSCR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_SUBSCR); |
| PREDICTED(BINARY_SUBSCR); |
| _Py_CODEUNIT* const this_instr = next_instr - 2; |
| (void)this_instr; |
| _PyStackRef container; |
| _PyStackRef sub; |
| _PyStackRef res; |
| // _SPECIALIZE_BINARY_SUBSCR |
| { |
| sub = stack_pointer[-1]; |
| container = stack_pointer[-2]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| assert(frame->stackpointer == NULL); |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_BinarySubscr(container, sub, next_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(BINARY_SUBSCR); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| // _BINARY_SUBSCR |
| { |
| PyObject *container_o = PyStackRef_AsPyObjectBorrow(container); |
| PyObject *sub_o = PyStackRef_AsPyObjectBorrow(sub); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_GetItem(container_o, sub_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(container); |
| PyStackRef_CLOSE(sub); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_SUBSCR_DICT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_SUBSCR_DICT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); |
| _PyStackRef dict_st; |
| _PyStackRef sub_st; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| sub_st = stack_pointer[-1]; |
| dict_st = stack_pointer[-2]; |
| PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st); |
| PyObject *dict = PyStackRef_AsPyObjectBorrow(dict_st); |
| DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); |
| STAT_INC(BINARY_SUBSCR, hit); |
| PyObject *res_o; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int rc = PyDict_GetItemRef(dict, sub, &res_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (rc == 0) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_SetKeyError(sub); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| PyStackRef_CLOSE(dict_st); |
| PyStackRef_CLOSE(sub_st); |
| if (rc <= 0) goto pop_2_error; |
| // not found or error |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_SUBSCR_GETITEM) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_SUBSCR_GETITEM); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); |
| _PyStackRef container; |
| _PyStackRef sub; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, BINARY_SUBSCR); |
| } |
| // _BINARY_SUBSCR_CHECK_FUNC |
| { |
| container = stack_pointer[-2]; |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); |
| DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR); |
| PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; |
| PyObject *getitem = ht->_spec_cache.getitem; |
| DEOPT_IF(getitem == NULL, BINARY_SUBSCR); |
| assert(PyFunction_Check(getitem)); |
| uint32_t cached_version = ht->_spec_cache.getitem_version; |
| DEOPT_IF(((PyFunctionObject *)getitem)->func_version != cached_version, BINARY_SUBSCR); |
| PyCodeObject *code = (PyCodeObject *)PyFunction_GET_CODE(getitem); |
| assert(code->co_argcount == 2); |
| DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); |
| STAT_INC(BINARY_SUBSCR, hit); |
| } |
| // _BINARY_SUBSCR_INIT_CALL |
| { |
| sub = stack_pointer[-1]; |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); |
| PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; |
| PyObject *getitem = ht->_spec_cache.getitem; |
| new_frame = _PyFrame_PushUnchecked(tstate, PyStackRef_FromPyObjectNew(getitem), 2, frame); |
| new_frame->localsplus[0] = container; |
| new_frame->localsplus[1] = sub; |
| frame->return_offset = 2 ; |
| } |
| // _PUSH_FRAME |
| { |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_SUBSCR_LIST_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_SUBSCR_LIST_INT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); |
| _PyStackRef list_st; |
| _PyStackRef sub_st; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| sub_st = stack_pointer[-1]; |
| list_st = stack_pointer[-2]; |
| PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st); |
| PyObject *list = PyStackRef_AsPyObjectBorrow(list_st); |
| DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); |
| DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); |
| // Deopt unless 0 <= sub < PyList_Size(list) |
| DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); |
| Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; |
| DEOPT_IF(index >= PyList_GET_SIZE(list), BINARY_SUBSCR); |
| STAT_INC(BINARY_SUBSCR, hit); |
| PyObject *res_o = PyList_GET_ITEM(list, index); |
| assert(res_o != NULL); |
| Py_INCREF(res_o); |
| PyStackRef_CLOSE_SPECIALIZED(sub_st, (destructor)PyObject_Free); |
| PyStackRef_CLOSE(list_st); |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_SUBSCR_STR_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_SUBSCR_STR_INT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); |
| _PyStackRef str_st; |
| _PyStackRef sub_st; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| sub_st = stack_pointer[-1]; |
| str_st = stack_pointer[-2]; |
| PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st); |
| PyObject *str = PyStackRef_AsPyObjectBorrow(str_st); |
| DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); |
| DEOPT_IF(!PyUnicode_CheckExact(str), BINARY_SUBSCR); |
| DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); |
| Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; |
| DEOPT_IF(PyUnicode_GET_LENGTH(str) <= index, BINARY_SUBSCR); |
| // Specialize for reading an ASCII character from any string: |
| Py_UCS4 c = PyUnicode_READ_CHAR(str, index); |
| DEOPT_IF(Py_ARRAY_LENGTH(_Py_SINGLETON(strings).ascii) <= c, BINARY_SUBSCR); |
| STAT_INC(BINARY_SUBSCR, hit); |
| PyObject *res_o = (PyObject*)&_Py_SINGLETON(strings).ascii[c]; |
| PyStackRef_CLOSE_SPECIALIZED(sub_st, (destructor)PyObject_Free); |
| PyStackRef_CLOSE(str_st); |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BINARY_SUBSCR_TUPLE_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(BINARY_SUBSCR_TUPLE_INT); |
| static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); |
| _PyStackRef tuple_st; |
| _PyStackRef sub_st; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| sub_st = stack_pointer[-1]; |
| tuple_st = stack_pointer[-2]; |
| PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st); |
| PyObject *tuple = PyStackRef_AsPyObjectBorrow(tuple_st); |
| DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); |
| DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); |
| // Deopt unless 0 <= sub < PyTuple_Size(list) |
| DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); |
| Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; |
| DEOPT_IF(index >= PyTuple_GET_SIZE(tuple), BINARY_SUBSCR); |
| STAT_INC(BINARY_SUBSCR, hit); |
| PyObject *res_o = PyTuple_GET_ITEM(tuple, index); |
| assert(res_o != NULL); |
| Py_INCREF(res_o); |
| PyStackRef_CLOSE_SPECIALIZED(sub_st, (destructor)PyObject_Free); |
| PyStackRef_CLOSE(tuple_st); |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BUILD_LIST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(BUILD_LIST); |
| _PyStackRef *values; |
| _PyStackRef list; |
| values = &stack_pointer[-oparg]; |
| PyObject *list_o = _PyList_FromStackRefSteal(values, oparg); |
| if (list_o == NULL) { |
| stack_pointer += -oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| list = PyStackRef_FromPyObjectSteal(list_o); |
| stack_pointer[-oparg] = list; |
| stack_pointer += 1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BUILD_MAP) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(BUILD_MAP); |
| _PyStackRef *values; |
| _PyStackRef map; |
| values = &stack_pointer[-oparg*2]; |
| STACKREFS_TO_PYOBJECTS(values, oparg*2, values_o); |
| if (CONVERSION_FAILED(values_o)) { |
| for (int _i = oparg*2; --_i >= 0;) { |
| PyStackRef_CLOSE(values[_i]); |
| } |
| { |
| stack_pointer += -oparg*2; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *map_o = _PyDict_FromItems( |
| values_o, 2, |
| values_o+1, 2, |
| oparg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(values_o); |
| for (int _i = oparg*2; --_i >= 0;) { |
| PyStackRef_CLOSE(values[_i]); |
| } |
| if (map_o == NULL) { |
| stack_pointer += -oparg*2; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| map = PyStackRef_FromPyObjectSteal(map_o); |
| stack_pointer[-oparg*2] = map; |
| stack_pointer += 1 - oparg*2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BUILD_SET) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(BUILD_SET); |
| _PyStackRef *values; |
| _PyStackRef set; |
| values = &stack_pointer[-oparg]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *set_o = PySet_New(NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (set_o == NULL) { |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(values[_i]); |
| } |
| { |
| stack_pointer += -oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| int err = 0; |
| for (int i = 0; i < oparg; i++) { |
| if (err == 0) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| err = PySet_Add(set_o, PyStackRef_AsPyObjectBorrow(values[i])); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| PyStackRef_CLOSE(values[i]); |
| } |
| if (err != 0) { |
| Py_DECREF(set_o); |
| { |
| stack_pointer += -oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| set = PyStackRef_FromPyObjectSteal(set_o); |
| stack_pointer[-oparg] = set; |
| stack_pointer += 1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BUILD_SLICE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(BUILD_SLICE); |
| _PyStackRef start; |
| _PyStackRef stop; |
| _PyStackRef step = PyStackRef_NULL; |
| _PyStackRef slice; |
| if (oparg == 3) { step = stack_pointer[-((oparg == 3) ? 1 : 0)]; } |
| stop = stack_pointer[-1 - ((oparg == 3) ? 1 : 0)]; |
| start = stack_pointer[-2 - ((oparg == 3) ? 1 : 0)]; |
| PyObject *start_o = PyStackRef_AsPyObjectBorrow(start); |
| PyObject *stop_o = PyStackRef_AsPyObjectBorrow(stop); |
| PyObject *step_o = PyStackRef_AsPyObjectBorrow(step); |
| PyObject *slice_o = PySlice_New(start_o, stop_o, step_o); |
| PyStackRef_CLOSE(start); |
| PyStackRef_CLOSE(stop); |
| PyStackRef_XCLOSE(step); |
| if (slice_o == NULL) { |
| stack_pointer += -2 - ((oparg == 3) ? 1 : 0); |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| slice = PyStackRef_FromPyObjectSteal(slice_o); |
| stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; |
| stack_pointer += -1 - ((oparg == 3) ? 1 : 0); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BUILD_STRING) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(BUILD_STRING); |
| _PyStackRef *pieces; |
| _PyStackRef str; |
| pieces = &stack_pointer[-oparg]; |
| STACKREFS_TO_PYOBJECTS(pieces, oparg, pieces_o); |
| if (CONVERSION_FAILED(pieces_o)) { |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(pieces[_i]); |
| } |
| { |
| stack_pointer += -oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| PyObject *str_o = _PyUnicode_JoinArray(&_Py_STR(empty), pieces_o, oparg); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(pieces_o); |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(pieces[_i]); |
| } |
| if (str_o == NULL) { |
| stack_pointer += -oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| str = PyStackRef_FromPyObjectSteal(str_o); |
| stack_pointer[-oparg] = str; |
| stack_pointer += 1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(BUILD_TUPLE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(BUILD_TUPLE); |
| _PyStackRef *values; |
| _PyStackRef tup; |
| values = &stack_pointer[-oparg]; |
| PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); |
| if (tup_o == NULL) { |
| stack_pointer += -oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| tup = PyStackRef_FromPyObjectSteal(tup_o); |
| stack_pointer[-oparg] = tup; |
| stack_pointer += 1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CACHE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(CACHE); |
| assert(0 && "Executing a cache."); |
| Py_FatalError("Executing a cache."); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL); |
| PREDICTED(CALL); |
| _Py_CODEUNIT* const this_instr = next_instr - 4; |
| (void)this_instr; |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef *func; |
| _PyStackRef *maybe_self; |
| _PyStackRef res; |
| // _SPECIALIZE_CALL |
| { |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_Call(callable[0], next_instr, oparg + !PyStackRef_IsNull(self_or_null[0])); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(CALL); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| /* Skip 2 cache entries */ |
| // _MAYBE_EXPAND_METHOD |
| { |
| args = &stack_pointer[-oparg]; |
| func = &stack_pointer[-2 - oparg]; |
| maybe_self = &stack_pointer[-1 - oparg]; |
| if (PyStackRef_TYPE(callable[0]) == &PyMethod_Type && PyStackRef_IsNull(self_or_null[0])) { |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| PyObject *self = ((PyMethodObject *)callable_o)->im_self; |
| maybe_self[0] = PyStackRef_FromPyObjectNew(self); |
| PyObject *method = ((PyMethodObject *)callable_o)->im_func; |
| _PyStackRef temp = callable[0]; |
| func[0] = PyStackRef_FromPyObjectNew(method); |
| PyStackRef_CLOSE(temp); |
| } |
| } |
| // _DO_CALL |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| // oparg counts all of the args, but *not* self: |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| // Check if the call can be inlined or not |
| if (Py_TYPE(callable_o) == &PyFunction_Type && |
| tstate->interp->eval_frame == NULL && |
| ((PyFunctionObject *)callable_o)->vectorcall == _PyFunction_Vectorcall) |
| { |
| int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; |
| PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( |
| tstate, callable[0], locals, |
| args, total_args, NULL, frame |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| // Manipulate stack directly since we leave using DISPATCH_INLINED(). |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| // The frame has stolen all the arguments from the stack, |
| // so there is no need to clean them up. |
| if (new_frame == NULL) { |
| goto error; |
| } |
| frame->return_offset = 4 ; |
| DISPATCH_INLINED(new_frame); |
| } |
| /* Callable is not a normal Python function */ |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_Vectorcall( |
| callable_o, args_o, |
| total_args | PY_VECTORCALL_ARGUMENTS_OFFSET, |
| NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| if (opcode == INSTRUMENTED_CALL) { |
| PyObject *arg = total_args == 0 ? |
| &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]); |
| if (res_o == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_call_instrumentation_exc2( |
| tstate, PY_MONITORING_EVENT_C_RAISE, |
| frame, this_instr, callable_o, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_2args( |
| tstate, PY_MONITORING_EVENT_C_RETURN, |
| frame, this_instr, callable_o, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| Py_CLEAR(res_o); |
| } |
| } |
| } |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| PyStackRef_CLOSE(callable[0]); |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_ALLOC_AND_ENTER_INIT) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_ALLOC_AND_ENTER_INIT); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *null; |
| _PyStackRef *args; |
| _PyStackRef *init; |
| _PyStackRef *self; |
| _PyInterpreterFrame *init_frame; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, CALL); |
| } |
| // _CHECK_AND_ALLOCATE_OBJECT |
| { |
| args = &stack_pointer[-oparg]; |
| null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| init = &stack_pointer[-2 - oparg]; |
| self = &stack_pointer[-1 - oparg]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| DEOPT_IF(!PyStackRef_IsNull(null[0]), CALL); |
| DEOPT_IF(!PyType_Check(callable_o), CALL); |
| PyTypeObject *tp = (PyTypeObject *)callable_o; |
| DEOPT_IF(tp->tp_version_tag != type_version, CALL); |
| assert(tp->tp_flags & Py_TPFLAGS_INLINE_VALUES); |
| PyHeapTypeObject *cls = (PyHeapTypeObject *)callable_o; |
| PyFunctionObject *init_func = (PyFunctionObject *)cls->_spec_cache.init; |
| PyCodeObject *code = (PyCodeObject *)init_func->func_code; |
| DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize + _Py_InitCleanup.co_framesize), CALL); |
| STAT_INC(CALL, hit); |
| PyObject *self_o = _PyType_NewManagedObject(tp); |
| if (self_o == NULL) { |
| goto error; |
| } |
| self[0] = PyStackRef_FromPyObjectSteal(self_o); |
| _PyStackRef temp = callable[0]; |
| init[0] = PyStackRef_FromPyObjectNew(init_func); |
| PyStackRef_CLOSE(temp); |
| } |
| // _CREATE_INIT_FRAME |
| { |
| args = &stack_pointer[-oparg]; |
| self = &stack_pointer[-1 - oparg]; |
| init = &stack_pointer[-2 - oparg]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyInterpreterFrame *shim = _PyFrame_PushTrampolineUnchecked( |
| tstate, (PyCodeObject *)&_Py_InitCleanup, 1, frame); |
| assert(_PyFrame_GetBytecode(shim)[0].op.code == EXIT_INIT_CHECK); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| /* Push self onto stack of shim */ |
| shim->localsplus[0] = PyStackRef_DUP(self[0]); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| init_frame = _PyEvalFramePushAndInit( |
| tstate, init[0], NULL, args-1, oparg+1, NULL, shim); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| stack_pointer[-2 - oparg].bits = (uintptr_t)init_frame; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| if (init_frame == NULL) { |
| _PyEval_FrameClearAndPop(tstate, shim); |
| goto error; |
| } |
| frame->return_offset = 1 + INLINE_CACHE_ENTRIES_CALL; |
| /* Account for pushing the extra frame. |
| * We don't check recursion depth here, |
| * as it will be checked after start_frame */ |
| tstate->py_recursion_remaining--; |
| } |
| // _PUSH_FRAME |
| { |
| new_frame = init_frame; |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_BOUND_METHOD_EXACT_ARGS); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *null; |
| _PyStackRef *func; |
| _PyStackRef *self; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, CALL); |
| } |
| // _CHECK_CALL_BOUND_METHOD_EXACT_ARGS |
| { |
| null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| DEOPT_IF(!PyStackRef_IsNull(null[0]), CALL); |
| DEOPT_IF(Py_TYPE(PyStackRef_AsPyObjectBorrow(callable[0])) != &PyMethod_Type, CALL); |
| } |
| // _INIT_CALL_BOUND_METHOD_EXACT_ARGS |
| { |
| func = &stack_pointer[-2 - oparg]; |
| self = &stack_pointer[-1 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| STAT_INC(CALL, hit); |
| self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); |
| _PyStackRef temp = callable[0]; |
| func[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); |
| PyStackRef_CLOSE(temp); |
| } |
| // flush |
| // _CHECK_FUNCTION_VERSION |
| { |
| callable = &stack_pointer[-2 - oparg]; |
| uint32_t func_version = read_u32(&this_instr[2].cache); |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| DEOPT_IF(!PyFunction_Check(callable_o), CALL); |
| PyFunctionObject *func = (PyFunctionObject *)callable_o; |
| DEOPT_IF(func->func_version != func_version, CALL); |
| } |
| // _CHECK_FUNCTION_EXACT_ARGS |
| { |
| self_or_null = &stack_pointer[-1 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| assert(PyFunction_Check(callable_o)); |
| PyFunctionObject *func = (PyFunctionObject *)callable_o; |
| PyCodeObject *code = (PyCodeObject *)func->func_code; |
| DEOPT_IF(code->co_argcount != oparg + (!PyStackRef_IsNull(self_or_null[0])), CALL); |
| } |
| // _CHECK_STACK_SPACE |
| { |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| PyFunctionObject *func = (PyFunctionObject *)callable_o; |
| PyCodeObject *code = (PyCodeObject *)func->func_code; |
| DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); |
| DEOPT_IF(tstate->py_recursion_remaining <= 1, CALL); |
| } |
| // _INIT_CALL_PY_EXACT_ARGS |
| { |
| args = &stack_pointer[-oparg]; |
| int has_self = !PyStackRef_IsNull(self_or_null[0]); |
| STAT_INC(CALL, hit); |
| new_frame = _PyFrame_PushUnchecked(tstate, callable[0], oparg + has_self, frame); |
| _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; |
| new_frame->localsplus[0] = self_or_null[0]; |
| for (int i = 0; i < oparg; i++) { |
| first_non_self_local[i] = args[i]; |
| } |
| } |
| // _SAVE_RETURN_OFFSET |
| { |
| #if TIER_ONE |
| frame->return_offset = (uint16_t)(next_instr - this_instr); |
| #endif |
| #if TIER_TWO |
| frame->return_offset = oparg; |
| #endif |
| } |
| // _PUSH_FRAME |
| { |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_BOUND_METHOD_GENERAL) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_BOUND_METHOD_GENERAL); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *null; |
| _PyStackRef *method; |
| _PyStackRef *self; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, CALL); |
| } |
| // _CHECK_METHOD_VERSION |
| { |
| null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| uint32_t func_version = read_u32(&this_instr[2].cache); |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| DEOPT_IF(Py_TYPE(callable_o) != &PyMethod_Type, CALL); |
| PyObject *func = ((PyMethodObject *)callable_o)->im_func; |
| DEOPT_IF(!PyFunction_Check(func), CALL); |
| DEOPT_IF(((PyFunctionObject *)func)->func_version != func_version, CALL); |
| DEOPT_IF(!PyStackRef_IsNull(null[0]), CALL); |
| } |
| // _EXPAND_METHOD |
| { |
| method = &stack_pointer[-2 - oparg]; |
| self = &stack_pointer[-1 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| assert(PyStackRef_IsNull(null[0])); |
| assert(Py_TYPE(callable_o) == &PyMethod_Type); |
| self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); |
| _PyStackRef temp = callable[0]; |
| method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); |
| assert(PyStackRef_FunctionCheck(method[0])); |
| PyStackRef_CLOSE(temp); |
| } |
| // flush |
| // _PY_FRAME_GENERAL |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| // oparg counts all of the args, but *not* self: |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| assert(Py_TYPE(callable_o) == &PyFunction_Type); |
| int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; |
| PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyInterpreterFrame *temp = _PyEvalFramePushAndInit( |
| tstate, callable[0], locals, |
| args, total_args, NULL, frame |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| // The frame has stolen all the arguments from the stack. |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| if (temp == NULL) { |
| goto error; |
| } |
| new_frame = temp; |
| } |
| // _SAVE_RETURN_OFFSET |
| { |
| #if TIER_ONE |
| frame->return_offset = (uint16_t)(next_instr - this_instr); |
| #endif |
| #if TIER_TWO |
| frame->return_offset = oparg; |
| #endif |
| } |
| // _PUSH_FRAME |
| { |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_BUILTIN_CLASS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_BUILTIN_CLASS); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_BUILTIN_CLASS |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| DEOPT_IF(!PyType_Check(callable_o), CALL); |
| PyTypeObject *tp = (PyTypeObject *)callable_o; |
| DEOPT_IF(tp->tp_vectorcall == NULL, CALL); |
| STAT_INC(CALL, hit); |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| PyStackRef_CLOSE(self_or_null[0]); |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(args[_i]); |
| } |
| { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = tp->tp_vectorcall((PyObject *)tp, args_o, total_args, NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| /* Free the arguments. */ |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| PyStackRef_CLOSE(callable[0]); |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_BUILTIN_FAST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_BUILTIN_FAST); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_BUILTIN_FAST |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| /* Builtin METH_FASTCALL functions, without keywords */ |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| DEOPT_IF(!PyCFunction_CheckExact(callable_o), CALL); |
| DEOPT_IF(PyCFunction_GET_FLAGS(callable_o) != METH_FASTCALL, CALL); |
| STAT_INC(CALL, hit); |
| PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable_o); |
| /* res = func(self, args, nargs) */ |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| PyStackRef_CLOSE(self_or_null[0]); |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(args[_i]); |
| } |
| { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = ((PyCFunctionFast)(void(*)(void))cfunc)( |
| PyCFunction_GET_SELF(callable_o), |
| args_o, |
| total_args); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| /* Free the arguments. */ |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| PyStackRef_CLOSE(callable[0]); |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_BUILTIN_FAST_WITH_KEYWORDS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_BUILTIN_FAST_WITH_KEYWORDS); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_BUILTIN_FAST_WITH_KEYWORDS |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| DEOPT_IF(!PyCFunction_CheckExact(callable_o), CALL); |
| DEOPT_IF(PyCFunction_GET_FLAGS(callable_o) != (METH_FASTCALL | METH_KEYWORDS), CALL); |
| STAT_INC(CALL, hit); |
| /* res = func(self, args, nargs, kwnames) */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyCFunctionFastWithKeywords cfunc = |
| (PyCFunctionFastWithKeywords)(void(*)(void)) |
| PyCFunction_GET_FUNCTION(callable_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| PyStackRef_CLOSE(self_or_null[0]); |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(args[_i]); |
| } |
| { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = cfunc(PyCFunction_GET_SELF(callable_o), args_o, total_args, NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| /* Free the arguments. */ |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| PyStackRef_CLOSE(callable[0]); |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_BUILTIN_O) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_BUILTIN_O); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_BUILTIN_O |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| /* Builtin METH_O functions */ |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| DEOPT_IF(total_args != 1, CALL); |
| DEOPT_IF(!PyCFunction_CheckExact(callable_o), CALL); |
| DEOPT_IF(PyCFunction_GET_FLAGS(callable_o) != METH_O, CALL); |
| // CPython promises to check all non-vectorcall function calls. |
| DEOPT_IF(tstate->c_recursion_remaining <= 0, CALL); |
| STAT_INC(CALL, hit); |
| PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable_o); |
| _PyStackRef arg = args[0]; |
| _Py_EnterRecursiveCallTstateUnchecked(tstate); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = _PyCFunction_TrampolineCall(cfunc, PyCFunction_GET_SELF(callable_o), PyStackRef_AsPyObjectBorrow(arg)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| _Py_LeaveRecursiveCallTstate(tstate); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| PyStackRef_CLOSE(arg); |
| PyStackRef_CLOSE(callable[0]); |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_FUNCTION_EX) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(CALL_FUNCTION_EX); |
| PREDICTED(CALL_FUNCTION_EX); |
| _Py_CODEUNIT* const this_instr = next_instr - 1; |
| (void)this_instr; |
| _PyStackRef func; |
| _PyStackRef callargs; |
| _PyStackRef kwargs_in = PyStackRef_NULL; |
| _PyStackRef tuple; |
| _PyStackRef kwargs_out = PyStackRef_NULL; |
| _PyStackRef func_st; |
| _PyStackRef callargs_st; |
| _PyStackRef kwargs_st = PyStackRef_NULL; |
| _PyStackRef result; |
| // _MAKE_CALLARGS_A_TUPLE |
| { |
| if (oparg & 1) { kwargs_in = stack_pointer[-(oparg & 1)]; } |
| callargs = stack_pointer[-1 - (oparg & 1)]; |
| func = stack_pointer[-3 - (oparg & 1)]; |
| PyObject *callargs_o = PyStackRef_AsPyObjectBorrow(callargs); |
| if (PyTuple_CheckExact(callargs_o)) { |
| tuple = callargs; |
| } |
| else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_Check_ArgsIterable(tstate, PyStackRef_AsPyObjectBorrow(func), callargs_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| goto error; |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *tuple_o = PySequence_Tuple(callargs_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (tuple_o == NULL) { |
| goto error; |
| } |
| PyStackRef_CLOSE(callargs); |
| tuple = PyStackRef_FromPyObjectSteal(tuple_o); |
| } |
| kwargs_out = kwargs_in; |
| } |
| // _DO_CALL_FUNCTION_EX |
| { |
| kwargs_st = kwargs_out; |
| callargs_st = tuple; |
| func_st = func; |
| PyObject *func = PyStackRef_AsPyObjectBorrow(func_st); |
| PyObject *callargs = PyStackRef_AsPyObjectBorrow(callargs_st); |
| PyObject *kwargs = PyStackRef_AsPyObjectBorrow(kwargs_st); |
| // DICT_MERGE is called before this opcode if there are kwargs. |
| // It converts all dict subtypes in kwargs into regular dicts. |
| assert(kwargs == NULL || PyDict_CheckExact(kwargs)); |
| assert(PyTuple_CheckExact(callargs)); |
| EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func); |
| PyObject *result_o; |
| assert(!_PyErr_Occurred(tstate)); |
| if (opcode == INSTRUMENTED_CALL_FUNCTION_EX) { |
| PyObject *arg = PyTuple_GET_SIZE(callargs) > 0 ? |
| PyTuple_GET_ITEM(callargs, 0) : &_PyInstrumentation_MISSING; |
| stack_pointer[-1 - (oparg & 1)] = callargs_st; |
| if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_st; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_2args( |
| tstate, PY_MONITORING_EVENT_CALL, |
| frame, this_instr, func, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) { |
| goto error; |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| result_o = PyObject_Call(func, callargs, kwargs); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (!PyFunction_Check(func) && !PyMethod_Check(func)) { |
| if (result_o == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_call_instrumentation_exc2( |
| tstate, PY_MONITORING_EVENT_C_RAISE, |
| frame, this_instr, func, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_2args( |
| tstate, PY_MONITORING_EVENT_C_RETURN, |
| frame, this_instr, func, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| Py_CLEAR(result_o); |
| } |
| } |
| } |
| } |
| else { |
| if (Py_TYPE(func) == &PyFunction_Type && |
| tstate->interp->eval_frame == NULL && |
| ((PyFunctionObject *)func)->vectorcall == _PyFunction_Vectorcall) { |
| assert(PyTuple_CheckExact(callargs)); |
| Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); |
| int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; |
| PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); |
| stack_pointer[-1 - (oparg & 1)] = callargs_st; |
| if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_st; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex( |
| tstate, func_st, locals, |
| nargs, callargs, kwargs, frame); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| // Need to sync the stack since we exit with DISPATCH_INLINED. |
| stack_pointer += -3 - (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| if (new_frame == NULL) { |
| goto error; |
| } |
| assert( 1 == 1); |
| frame->return_offset = 1; |
| DISPATCH_INLINED(new_frame); |
| } |
| stack_pointer[-1 - (oparg & 1)] = callargs_st; |
| if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_st; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| result_o = PyObject_Call(func, callargs, kwargs); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyStackRef_XCLOSE(kwargs_st); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(callargs_st); |
| PyStackRef_CLOSE(func_st); |
| if (result_o == NULL) { |
| stack_pointer += -3 - (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| result = PyStackRef_FromPyObjectSteal(result_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-3 - (oparg & 1)] = result; |
| stack_pointer += -2 - (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 2 + (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-3 - (oparg & 1)] = result; |
| stack_pointer += -2 - (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_INTRINSIC_1) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(CALL_INTRINSIC_1); |
| _PyStackRef value; |
| _PyStackRef res; |
| value = stack_pointer[-1]; |
| assert(oparg <= MAX_INTRINSIC_1); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, PyStackRef_AsPyObjectBorrow(value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(value); |
| if (res_o == NULL) goto pop_1_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_INTRINSIC_2) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(CALL_INTRINSIC_2); |
| _PyStackRef value2_st; |
| _PyStackRef value1_st; |
| _PyStackRef res; |
| value1_st = stack_pointer[-1]; |
| value2_st = stack_pointer[-2]; |
| assert(oparg <= MAX_INTRINSIC_2); |
| PyObject *value1 = PyStackRef_AsPyObjectBorrow(value1_st); |
| PyObject *value2 = PyStackRef_AsPyObjectBorrow(value2_st); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = _PyIntrinsics_BinaryFunctions[oparg].func(tstate, value2, value1); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(value2_st); |
| PyStackRef_CLOSE(value1_st); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_ISINSTANCE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_ISINSTANCE); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| /* isinstance(o, o2) */ |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| DEOPT_IF(total_args != 2, CALL); |
| PyInterpreterState *interp = tstate->interp; |
| DEOPT_IF(callable_o != interp->callable_cache.isinstance, CALL); |
| STAT_INC(CALL, hit); |
| _PyStackRef cls_stackref = args[1]; |
| _PyStackRef inst_stackref = args[0]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int retval = PyObject_IsInstance(PyStackRef_AsPyObjectBorrow(inst_stackref), PyStackRef_AsPyObjectBorrow(cls_stackref)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (retval < 0) { |
| goto error; |
| } |
| res = retval ? PyStackRef_True : PyStackRef_False; |
| assert((!PyStackRef_IsNull(res)) ^ (_PyErr_Occurred(tstate) != NULL)); |
| PyStackRef_CLOSE(inst_stackref); |
| PyStackRef_CLOSE(cls_stackref); |
| PyStackRef_CLOSE(callable[0]); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_KW) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_KW); |
| PREDICTED(CALL_KW); |
| _Py_CODEUNIT* const this_instr = next_instr - 4; |
| (void)this_instr; |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef kwnames; |
| _PyStackRef kwnames_in; |
| _PyStackRef *func; |
| _PyStackRef *maybe_self; |
| _PyStackRef kwnames_out; |
| _PyStackRef res; |
| // _SPECIALIZE_CALL_KW |
| { |
| self_or_null = &stack_pointer[-2 - oparg]; |
| callable = &stack_pointer[-3 - oparg]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_CallKw(callable[0], next_instr, oparg + !PyStackRef_IsNull(self_or_null[0])); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(CALL_KW); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| /* Skip 2 cache entries */ |
| // _MAYBE_EXPAND_METHOD_KW |
| { |
| kwnames_in = stack_pointer[-1]; |
| args = &stack_pointer[-1 - oparg]; |
| func = &stack_pointer[-3 - oparg]; |
| maybe_self = &stack_pointer[-2 - oparg]; |
| if (PyStackRef_TYPE(callable[0]) == &PyMethod_Type && PyStackRef_IsNull(self_or_null[0])) { |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| PyObject *self = ((PyMethodObject *)callable_o)->im_self; |
| maybe_self[0] = PyStackRef_FromPyObjectNew(self); |
| PyObject *method = ((PyMethodObject *)callable_o)->im_func; |
| _PyStackRef temp = callable[0]; |
| func[0] = PyStackRef_FromPyObjectNew(method); |
| PyStackRef_CLOSE(temp); |
| } |
| kwnames_out = kwnames_in; |
| } |
| // _DO_CALL_KW |
| { |
| kwnames = kwnames_out; |
| args = &stack_pointer[-1 - oparg]; |
| self_or_null = &stack_pointer[-2 - oparg]; |
| callable = &stack_pointer[-3 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); |
| // oparg counts all of the args, but *not* self: |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); |
| // Check if the call can be inlined or not |
| if (Py_TYPE(callable_o) == &PyFunction_Type && |
| tstate->interp->eval_frame == NULL && |
| ((PyFunctionObject *)callable_o)->vectorcall == _PyFunction_Vectorcall) |
| { |
| int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; |
| PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); |
| stack_pointer[-1] = kwnames; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( |
| tstate, callable[0], locals, |
| args, positional_args, kwnames_o, frame |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(kwnames); |
| // Sync stack explicitly since we leave using DISPATCH_INLINED(). |
| stack_pointer += -3 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| // The frame has stolen all the arguments from the stack, |
| // so there is no need to clean them up. |
| if (new_frame == NULL) { |
| goto error; |
| } |
| assert( 4 == 1 + INLINE_CACHE_ENTRIES_CALL_KW); |
| frame->return_offset = 4 ; |
| DISPATCH_INLINED(new_frame); |
| } |
| /* Callable is not a normal Python function */ |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| PyStackRef_CLOSE(self_or_null[0]); |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(args[_i]); |
| } |
| PyStackRef_CLOSE(kwnames); |
| { |
| stack_pointer += -3 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| stack_pointer[-1] = kwnames; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_Vectorcall( |
| callable_o, args_o, |
| positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, |
| kwnames_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| if (opcode == INSTRUMENTED_CALL_KW) { |
| PyObject *arg = total_args == 0 ? |
| &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]); |
| if (res_o == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_call_instrumentation_exc2( |
| tstate, PY_MONITORING_EVENT_C_RAISE, |
| frame, this_instr, callable_o, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_2args( |
| tstate, PY_MONITORING_EVENT_C_RETURN, |
| frame, this_instr, callable_o, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| Py_CLEAR(res_o); |
| } |
| } |
| } |
| PyStackRef_CLOSE(kwnames); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| PyStackRef_CLOSE(callable[0]); |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| if (res_o == NULL) { |
| stack_pointer += -3 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[-3 - oparg] = res; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_KW_BOUND_METHOD) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_KW_BOUND_METHOD); |
| static_assert(INLINE_CACHE_ENTRIES_CALL_KW == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *null; |
| _PyStackRef kwnames; |
| _PyStackRef *method; |
| _PyStackRef *self; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, CALL_KW); |
| } |
| // _CHECK_METHOD_VERSION_KW |
| { |
| null = &stack_pointer[-2 - oparg]; |
| callable = &stack_pointer[-3 - oparg]; |
| uint32_t func_version = read_u32(&this_instr[2].cache); |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| DEOPT_IF(Py_TYPE(callable_o) != &PyMethod_Type, CALL_KW); |
| PyObject *func = ((PyMethodObject *)callable_o)->im_func; |
| DEOPT_IF(!PyFunction_Check(func), CALL_KW); |
| DEOPT_IF(((PyFunctionObject *)func)->func_version != func_version, CALL_KW); |
| DEOPT_IF(!PyStackRef_IsNull(null[0]), CALL_KW); |
| } |
| // _EXPAND_METHOD_KW |
| { |
| method = &stack_pointer[-3 - oparg]; |
| self = &stack_pointer[-2 - oparg]; |
| _PyStackRef callable_s = callable[0]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable_s); |
| assert(PyStackRef_IsNull(null[0])); |
| assert(Py_TYPE(callable_o) == &PyMethod_Type); |
| self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); |
| method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); |
| assert(PyStackRef_FunctionCheck(method[0])); |
| PyStackRef_CLOSE(callable_s); |
| } |
| // flush |
| // _PY_FRAME_KW |
| { |
| kwnames = stack_pointer[-1]; |
| args = &stack_pointer[-1 - oparg]; |
| self_or_null = &stack_pointer[-2 - oparg]; |
| callable = &stack_pointer[-3 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| // oparg counts all of the args, but *not* self: |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); |
| int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); |
| assert(Py_TYPE(callable_o) == &PyFunction_Type); |
| int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; |
| PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| new_frame = _PyEvalFramePushAndInit( |
| tstate, callable[0], locals, |
| args, positional_args, kwnames_o, frame |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(kwnames); |
| // The frame has stolen all the arguments from the stack, |
| // so there is no need to clean them up. |
| stack_pointer[-3 - oparg].bits = (uintptr_t)new_frame; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| if (new_frame == NULL) { |
| goto error; |
| } |
| } |
| // _SAVE_RETURN_OFFSET |
| { |
| #if TIER_ONE |
| frame->return_offset = (uint16_t)(next_instr - this_instr); |
| #endif |
| #if TIER_TWO |
| frame->return_offset = oparg; |
| #endif |
| } |
| // _PUSH_FRAME |
| { |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_KW_NON_PY) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_KW_NON_PY); |
| static_assert(INLINE_CACHE_ENTRIES_CALL_KW == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef kwnames; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CHECK_IS_NOT_PY_CALLABLE_KW |
| { |
| callable = &stack_pointer[-3 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| DEOPT_IF(PyFunction_Check(callable_o), CALL_KW); |
| DEOPT_IF(Py_TYPE(callable_o) == &PyMethod_Type, CALL_KW); |
| } |
| // _CALL_KW_NON_PY |
| { |
| kwnames = stack_pointer[-1]; |
| args = &stack_pointer[-1 - oparg]; |
| self_or_null = &stack_pointer[-2 - oparg]; |
| #if TIER_ONE |
| assert(opcode != INSTRUMENTED_CALL); |
| #endif |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| /* Callable is not a normal Python function */ |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| PyStackRef_CLOSE(self_or_null[0]); |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(args[_i]); |
| } |
| PyStackRef_CLOSE(kwnames); |
| { |
| stack_pointer += -3 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); |
| int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_Vectorcall( |
| callable_o, args_o, |
| positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, |
| kwnames_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(kwnames); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| PyStackRef_CLOSE(callable[0]); |
| if (res_o == NULL) { |
| stack_pointer += -3 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-3 - oparg] = res; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 2 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-3 - oparg] = res; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_KW_PY) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_KW_PY); |
| static_assert(INLINE_CACHE_ENTRIES_CALL_KW == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef kwnames; |
| _PyStackRef *args; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, CALL_KW); |
| } |
| // _CHECK_FUNCTION_VERSION_KW |
| { |
| callable = &stack_pointer[-3 - oparg]; |
| uint32_t func_version = read_u32(&this_instr[2].cache); |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| DEOPT_IF(!PyFunction_Check(callable_o), CALL_KW); |
| PyFunctionObject *func = (PyFunctionObject *)callable_o; |
| DEOPT_IF(func->func_version != func_version, CALL_KW); |
| } |
| // _PY_FRAME_KW |
| { |
| kwnames = stack_pointer[-1]; |
| args = &stack_pointer[-1 - oparg]; |
| self_or_null = &stack_pointer[-2 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| // oparg counts all of the args, but *not* self: |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); |
| int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); |
| assert(Py_TYPE(callable_o) == &PyFunction_Type); |
| int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; |
| PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| new_frame = _PyEvalFramePushAndInit( |
| tstate, callable[0], locals, |
| args, positional_args, kwnames_o, frame |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(kwnames); |
| // The frame has stolen all the arguments from the stack, |
| // so there is no need to clean them up. |
| stack_pointer[-3 - oparg].bits = (uintptr_t)new_frame; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| if (new_frame == NULL) { |
| goto error; |
| } |
| } |
| // _SAVE_RETURN_OFFSET |
| { |
| #if TIER_ONE |
| frame->return_offset = (uint16_t)(next_instr - this_instr); |
| #endif |
| #if TIER_TWO |
| frame->return_offset = oparg; |
| #endif |
| } |
| // _PUSH_FRAME |
| { |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_LEN) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_LEN); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| /* len(o) */ |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| DEOPT_IF(total_args != 1, CALL); |
| PyInterpreterState *interp = tstate->interp; |
| DEOPT_IF(callable_o != interp->callable_cache.len, CALL); |
| STAT_INC(CALL, hit); |
| _PyStackRef arg_stackref = args[0]; |
| PyObject *arg = PyStackRef_AsPyObjectBorrow(arg_stackref); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| Py_ssize_t len_i = PyObject_Length(arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (len_i < 0) { |
| goto error; |
| } |
| PyObject *res_o = PyLong_FromSsize_t(len_i); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| if (res_o == NULL) { |
| GOTO_ERROR(error); |
| } |
| PyStackRef_CLOSE(callable[0]); |
| PyStackRef_CLOSE(arg_stackref); |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_LIST_APPEND) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_LIST_APPEND); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef callable; |
| _PyStackRef self; |
| _PyStackRef arg; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| arg = stack_pointer[-1]; |
| self = stack_pointer[-2]; |
| callable = stack_pointer[-3]; |
| assert(oparg == 1); |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); |
| PyObject *self_o = PyStackRef_AsPyObjectBorrow(self); |
| PyInterpreterState *interp = tstate->interp; |
| DEOPT_IF(callable_o != interp->callable_cache.list_append, CALL); |
| assert(self_o != NULL); |
| DEOPT_IF(!PyList_Check(self_o), CALL); |
| STAT_INC(CALL, hit); |
| int err = _PyList_AppendTakeRef((PyListObject *)self_o, PyStackRef_AsPyObjectSteal(arg)); |
| PyStackRef_CLOSE(self); |
| PyStackRef_CLOSE(callable); |
| if (err) goto pop_3_error; |
| #if TIER_ONE |
| // Skip the following POP_TOP. This is done here in tier one, and |
| // during trace projection in tier two: |
| assert(next_instr->op.code == POP_TOP); |
| SKIP_OVER(1); |
| #endif |
| stack_pointer += -3; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_METHOD_DESCRIPTOR_FAST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_METHOD_DESCRIPTOR_FAST); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_METHOD_DESCRIPTOR_FAST |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; |
| /* Builtin METH_FASTCALL methods, without keywords */ |
| DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); |
| PyMethodDef *meth = method->d_method; |
| DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); |
| PyObject *self = PyStackRef_AsPyObjectBorrow(args[0]); |
| DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); |
| STAT_INC(CALL, hit); |
| int nargs = total_args - 1; |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| PyStackRef_CLOSE(self_or_null[0]); |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(args[_i]); |
| } |
| { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyCFunctionFast cfunc = |
| (PyCFunctionFast)(void(*)(void))meth->ml_meth; |
| PyObject *res_o = cfunc(self, (args_o + 1), nargs); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| /* Clear the stack of the arguments. */ |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| PyStackRef_CLOSE(callable[0]); |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; |
| DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); |
| PyMethodDef *meth = method->d_method; |
| DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); |
| PyTypeObject *d_type = method->d_common.d_type; |
| PyObject *self = PyStackRef_AsPyObjectBorrow(args[0]); |
| DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); |
| STAT_INC(CALL, hit); |
| int nargs = total_args - 1; |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| PyStackRef_CLOSE(self_or_null[0]); |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(args[_i]); |
| } |
| { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyCFunctionFastWithKeywords cfunc = |
| (PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; |
| PyObject *res_o = cfunc(self, (args_o + 1), nargs, NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| /* Free the arguments. */ |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| PyStackRef_CLOSE(callable[0]); |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_METHOD_DESCRIPTOR_NOARGS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_METHOD_DESCRIPTOR_NOARGS); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_METHOD_DESCRIPTOR_NOARGS |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| assert(oparg == 0 || oparg == 1); |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| DEOPT_IF(total_args != 1, CALL); |
| PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; |
| DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); |
| PyMethodDef *meth = method->d_method; |
| _PyStackRef self_stackref = args[0]; |
| PyObject *self = PyStackRef_AsPyObjectBorrow(self_stackref); |
| DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); |
| DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); |
| // CPython promises to check all non-vectorcall function calls. |
| DEOPT_IF(tstate->c_recursion_remaining <= 0, CALL); |
| STAT_INC(CALL, hit); |
| PyCFunction cfunc = meth->ml_meth; |
| _Py_EnterRecursiveCallTstateUnchecked(tstate); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = _PyCFunction_TrampolineCall(cfunc, self, NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| _Py_LeaveRecursiveCallTstate(tstate); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| PyStackRef_CLOSE(self_stackref); |
| PyStackRef_CLOSE(callable[0]); |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_METHOD_DESCRIPTOR_O) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_METHOD_DESCRIPTOR_O); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_METHOD_DESCRIPTOR_O |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; |
| DEOPT_IF(total_args != 2, CALL); |
| DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); |
| PyMethodDef *meth = method->d_method; |
| DEOPT_IF(meth->ml_flags != METH_O, CALL); |
| // CPython promises to check all non-vectorcall function calls. |
| DEOPT_IF(tstate->c_recursion_remaining <= 0, CALL); |
| _PyStackRef arg_stackref = args[1]; |
| _PyStackRef self_stackref = args[0]; |
| DEOPT_IF(!Py_IS_TYPE(PyStackRef_AsPyObjectBorrow(self_stackref), |
| method->d_common.d_type), CALL); |
| STAT_INC(CALL, hit); |
| PyCFunction cfunc = meth->ml_meth; |
| _Py_EnterRecursiveCallTstateUnchecked(tstate); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = _PyCFunction_TrampolineCall(cfunc, |
| PyStackRef_AsPyObjectBorrow(self_stackref), |
| PyStackRef_AsPyObjectBorrow(arg_stackref)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| _Py_LeaveRecursiveCallTstate(tstate); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| PyStackRef_CLOSE(self_stackref); |
| PyStackRef_CLOSE(arg_stackref); |
| PyStackRef_CLOSE(callable[0]); |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_NON_PY_GENERAL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_NON_PY_GENERAL); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CHECK_IS_NOT_PY_CALLABLE |
| { |
| callable = &stack_pointer[-2 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| DEOPT_IF(PyFunction_Check(callable_o), CALL); |
| DEOPT_IF(Py_TYPE(callable_o) == &PyMethod_Type, CALL); |
| } |
| // _CALL_NON_PY_GENERAL |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| #if TIER_ONE |
| assert(opcode != INSTRUMENTED_CALL); |
| #endif |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| /* Callable is not a normal Python function */ |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| PyStackRef_CLOSE(self_or_null[0]); |
| for (int _i = oparg; --_i >= 0;) { |
| PyStackRef_CLOSE(args[_i]); |
| } |
| { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_Vectorcall( |
| callable_o, args_o, |
| total_args | PY_VECTORCALL_ARGUMENTS_OFFSET, |
| NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| PyStackRef_CLOSE(callable[0]); |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_PY_EXACT_ARGS) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_PY_EXACT_ARGS); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, CALL); |
| } |
| // _CHECK_FUNCTION_VERSION |
| { |
| callable = &stack_pointer[-2 - oparg]; |
| uint32_t func_version = read_u32(&this_instr[2].cache); |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| DEOPT_IF(!PyFunction_Check(callable_o), CALL); |
| PyFunctionObject *func = (PyFunctionObject *)callable_o; |
| DEOPT_IF(func->func_version != func_version, CALL); |
| } |
| // _CHECK_FUNCTION_EXACT_ARGS |
| { |
| self_or_null = &stack_pointer[-1 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| assert(PyFunction_Check(callable_o)); |
| PyFunctionObject *func = (PyFunctionObject *)callable_o; |
| PyCodeObject *code = (PyCodeObject *)func->func_code; |
| DEOPT_IF(code->co_argcount != oparg + (!PyStackRef_IsNull(self_or_null[0])), CALL); |
| } |
| // _CHECK_STACK_SPACE |
| { |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| PyFunctionObject *func = (PyFunctionObject *)callable_o; |
| PyCodeObject *code = (PyCodeObject *)func->func_code; |
| DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); |
| DEOPT_IF(tstate->py_recursion_remaining <= 1, CALL); |
| } |
| // _INIT_CALL_PY_EXACT_ARGS |
| { |
| args = &stack_pointer[-oparg]; |
| int has_self = !PyStackRef_IsNull(self_or_null[0]); |
| STAT_INC(CALL, hit); |
| new_frame = _PyFrame_PushUnchecked(tstate, callable[0], oparg + has_self, frame); |
| _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; |
| new_frame->localsplus[0] = self_or_null[0]; |
| for (int i = 0; i < oparg; i++) { |
| first_non_self_local[i] = args[i]; |
| } |
| } |
| // _SAVE_RETURN_OFFSET |
| { |
| #if TIER_ONE |
| frame->return_offset = (uint16_t)(next_instr - this_instr); |
| #endif |
| #if TIER_TWO |
| frame->return_offset = oparg; |
| #endif |
| } |
| // _PUSH_FRAME |
| { |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_PY_GENERAL) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_PY_GENERAL); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, CALL); |
| } |
| // _CHECK_FUNCTION_VERSION |
| { |
| callable = &stack_pointer[-2 - oparg]; |
| uint32_t func_version = read_u32(&this_instr[2].cache); |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| DEOPT_IF(!PyFunction_Check(callable_o), CALL); |
| PyFunctionObject *func = (PyFunctionObject *)callable_o; |
| DEOPT_IF(func->func_version != func_version, CALL); |
| } |
| // _PY_FRAME_GENERAL |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| // oparg counts all of the args, but *not* self: |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| assert(Py_TYPE(callable_o) == &PyFunction_Type); |
| int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; |
| PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyInterpreterFrame *temp = _PyEvalFramePushAndInit( |
| tstate, callable[0], locals, |
| args, total_args, NULL, frame |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| // The frame has stolen all the arguments from the stack. |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| if (temp == NULL) { |
| goto error; |
| } |
| new_frame = temp; |
| } |
| // _SAVE_RETURN_OFFSET |
| { |
| #if TIER_ONE |
| frame->return_offset = (uint16_t)(next_instr - this_instr); |
| #endif |
| #if TIER_TWO |
| frame->return_offset = oparg; |
| #endif |
| } |
| // _PUSH_FRAME |
| { |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_STR_1) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_STR_1); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef callable; |
| _PyStackRef null; |
| _PyStackRef arg; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_STR_1 |
| { |
| arg = stack_pointer[-1]; |
| null = stack_pointer[-2]; |
| callable = stack_pointer[-3]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); |
| PyObject *arg_o = PyStackRef_AsPyObjectBorrow(arg); |
| assert(oparg == 1); |
| DEOPT_IF(!PyStackRef_IsNull(null), CALL); |
| DEOPT_IF(callable_o != (PyObject *)&PyUnicode_Type, CALL); |
| STAT_INC(CALL, hit); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_Str(arg_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(arg); |
| if (res_o == NULL) goto pop_3_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_TUPLE_1) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_TUPLE_1); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef callable; |
| _PyStackRef null; |
| _PyStackRef arg; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| // _CALL_TUPLE_1 |
| { |
| arg = stack_pointer[-1]; |
| null = stack_pointer[-2]; |
| callable = stack_pointer[-3]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); |
| PyObject *arg_o = PyStackRef_AsPyObjectBorrow(arg); |
| assert(oparg == 1); |
| DEOPT_IF(!PyStackRef_IsNull(null), CALL); |
| DEOPT_IF(callable_o != (PyObject *)&PyTuple_Type, CALL); |
| STAT_INC(CALL, hit); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PySequence_Tuple(arg_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(arg); |
| if (res_o == NULL) goto pop_3_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CALL_TYPE_1) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(CALL_TYPE_1); |
| static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); |
| _PyStackRef callable; |
| _PyStackRef null; |
| _PyStackRef arg; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| arg = stack_pointer[-1]; |
| null = stack_pointer[-2]; |
| callable = stack_pointer[-3]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); |
| PyObject *arg_o = PyStackRef_AsPyObjectBorrow(arg); |
| assert(oparg == 1); |
| DEOPT_IF(!PyStackRef_IsNull(null), CALL); |
| DEOPT_IF(callable_o != (PyObject *)&PyType_Type, CALL); |
| STAT_INC(CALL, hit); |
| res = PyStackRef_FromPyObjectSteal(Py_NewRef(Py_TYPE(arg_o))); |
| PyStackRef_CLOSE(arg); |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CHECK_EG_MATCH) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(CHECK_EG_MATCH); |
| _PyStackRef exc_value_st; |
| _PyStackRef match_type_st; |
| _PyStackRef rest; |
| _PyStackRef match; |
| match_type_st = stack_pointer[-1]; |
| exc_value_st = stack_pointer[-2]; |
| PyObject *exc_value = PyStackRef_AsPyObjectBorrow(exc_value_st); |
| PyObject *match_type = PyStackRef_AsPyObjectBorrow(match_type_st); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _PyEval_CheckExceptStarTypeValid(tstate, match_type); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| PyStackRef_CLOSE(exc_value_st); |
| PyStackRef_CLOSE(match_type_st); |
| goto pop_2_error; |
| } |
| PyObject *match_o = NULL; |
| PyObject *rest_o = NULL; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, |
| &match_o, &rest_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(exc_value_st); |
| PyStackRef_CLOSE(match_type_st); |
| if (res < 0) goto pop_2_error; |
| assert((match_o == NULL) == (rest_o == NULL)); |
| if (match_o == NULL) goto pop_2_error; |
| if (!Py_IsNone(match_o)) { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyErr_SetHandledException(match_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| rest = PyStackRef_FromPyObjectSteal(rest_o); |
| match = PyStackRef_FromPyObjectSteal(match_o); |
| stack_pointer[-2] = rest; |
| stack_pointer[-1] = match; |
| DISPATCH(); |
| } |
| |
| TARGET(CHECK_EXC_MATCH) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(CHECK_EXC_MATCH); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef b; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| assert(PyExceptionInstance_Check(left_o)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _PyEval_CheckExceptTypeValid(tstate, right_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| PyStackRef_CLOSE(right); |
| goto pop_1_error; |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int res = PyErr_GivenExceptionMatches(left_o, right_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(right); |
| b = res ? PyStackRef_True : PyStackRef_False; |
| stack_pointer[-1] = b; |
| DISPATCH(); |
| } |
| |
| TARGET(CLEANUP_THROW) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(CLEANUP_THROW); |
| _PyStackRef sub_iter_st; |
| _PyStackRef last_sent_val_st; |
| _PyStackRef exc_value_st; |
| _PyStackRef none; |
| _PyStackRef value; |
| exc_value_st = stack_pointer[-1]; |
| last_sent_val_st = stack_pointer[-2]; |
| sub_iter_st = stack_pointer[-3]; |
| PyObject *exc_value = PyStackRef_AsPyObjectBorrow(exc_value_st); |
| assert(throwflag); |
| assert(exc_value && PyExceptionInstance_Check(exc_value)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int matches = PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (matches) { |
| none = PyStackRef_None; |
| value = PyStackRef_FromPyObjectNew(((PyStopIterationObject *)exc_value)->value); |
| PyStackRef_CLOSE(sub_iter_st); |
| PyStackRef_CLOSE(last_sent_val_st); |
| PyStackRef_CLOSE(exc_value_st); |
| } |
| else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_SetRaisedException(tstate, Py_NewRef(exc_value)); |
| monitor_reraise(tstate, frame, this_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto exception_unwind; |
| } |
| stack_pointer[-3] = none; |
| stack_pointer[-2] = value; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(COMPARE_OP) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(COMPARE_OP); |
| PREDICTED(COMPARE_OP); |
| _Py_CODEUNIT* const this_instr = next_instr - 2; |
| (void)this_instr; |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _SPECIALIZE_COMPARE_OP |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_CompareOp(left, right, next_instr, oparg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(COMPARE_OP); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| // _COMPARE_OP |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| assert((oparg >> 5) <= Py_GE); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_RichCompare(left_o, right_o, oparg >> 5); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(left); |
| PyStackRef_CLOSE(right); |
| if (res_o == NULL) goto pop_2_error; |
| if (oparg & 16) { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int res_bool = PyObject_IsTrue(res_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| Py_DECREF(res_o); |
| if (res_bool < 0) goto error; |
| res = res_bool ? PyStackRef_True : PyStackRef_False; |
| } |
| else { |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(COMPARE_OP_FLOAT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(COMPARE_OP_FLOAT); |
| static_assert(INLINE_CACHE_ENTRIES_COMPARE_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_FLOAT |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyFloat_CheckExact(left_o), COMPARE_OP); |
| DEOPT_IF(!PyFloat_CheckExact(right_o), COMPARE_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _COMPARE_OP_FLOAT |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| STAT_INC(COMPARE_OP, hit); |
| double dleft = PyFloat_AS_DOUBLE(left_o); |
| double dright = PyFloat_AS_DOUBLE(right_o); |
| // 1 if NaN, 2 if <, 4 if >, 8 if ==; this matches low four bits of the oparg |
| int sign_ish = COMPARISON_BIT(dleft, dright); |
| PyStackRef_CLOSE_SPECIALIZED(left, _PyFloat_ExactDealloc); |
| PyStackRef_CLOSE_SPECIALIZED(right, _PyFloat_ExactDealloc); |
| res = (sign_ish & oparg) ? PyStackRef_True : PyStackRef_False; |
| // It's always a bool, so we don't care about oparg & 16. |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(COMPARE_OP_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(COMPARE_OP_INT); |
| static_assert(INLINE_CACHE_ENTRIES_COMPARE_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_INT |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyLong_CheckExact(left_o), COMPARE_OP); |
| DEOPT_IF(!PyLong_CheckExact(right_o), COMPARE_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _COMPARE_OP_INT |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left_o), COMPARE_OP); |
| DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)right_o), COMPARE_OP); |
| STAT_INC(COMPARE_OP, hit); |
| assert(_PyLong_DigitCount((PyLongObject *)left_o) <= 1 && |
| _PyLong_DigitCount((PyLongObject *)right_o) <= 1); |
| Py_ssize_t ileft = _PyLong_CompactValue((PyLongObject *)left_o); |
| Py_ssize_t iright = _PyLong_CompactValue((PyLongObject *)right_o); |
| // 2 if <, 4 if >, 8 if ==; this matches the low 4 bits of the oparg |
| int sign_ish = COMPARISON_BIT(ileft, iright); |
| PyStackRef_CLOSE_SPECIALIZED(left, (destructor)PyObject_Free); |
| PyStackRef_CLOSE_SPECIALIZED(right, (destructor)PyObject_Free); |
| res = (sign_ish & oparg) ? PyStackRef_True : PyStackRef_False; |
| // It's always a bool, so we don't care about oparg & 16. |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(COMPARE_OP_STR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(COMPARE_OP_STR); |
| static_assert(INLINE_CACHE_ENTRIES_COMPARE_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef res; |
| // _GUARD_BOTH_UNICODE |
| { |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyUnicode_CheckExact(left_o), COMPARE_OP); |
| DEOPT_IF(!PyUnicode_CheckExact(right_o), COMPARE_OP); |
| } |
| /* Skip 1 cache entry */ |
| // _COMPARE_OP_STR |
| { |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| STAT_INC(COMPARE_OP, hit); |
| int eq = _PyUnicode_Equal(left_o, right_o); |
| assert((oparg >> 5) == Py_EQ || (oparg >> 5) == Py_NE); |
| PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); |
| PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); |
| assert(eq == 0 || eq == 1); |
| assert((oparg & 0xf) == COMPARISON_NOT_EQUALS || (oparg & 0xf) == COMPARISON_EQUALS); |
| assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS); |
| res = ((COMPARISON_NOT_EQUALS + eq) & oparg) ? PyStackRef_True : PyStackRef_False; |
| // It's always a bool, so we don't care about oparg & 16. |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CONTAINS_OP) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(CONTAINS_OP); |
| PREDICTED(CONTAINS_OP); |
| _Py_CODEUNIT* const this_instr = next_instr - 2; |
| (void)this_instr; |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef b; |
| // _SPECIALIZE_CONTAINS_OP |
| { |
| right = stack_pointer[-1]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION_FT |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_ContainsOp(right, next_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(CONTAINS_OP); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| // _CONTAINS_OP |
| { |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int res = PySequence_Contains(right_o, left_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(left); |
| PyStackRef_CLOSE(right); |
| if (res < 0) goto pop_2_error; |
| b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; |
| } |
| stack_pointer[-2] = b; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CONTAINS_OP_DICT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(CONTAINS_OP_DICT); |
| static_assert(INLINE_CACHE_ENTRIES_CONTAINS_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef b; |
| /* Skip 1 cache entry */ |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!PyDict_CheckExact(right_o), CONTAINS_OP); |
| STAT_INC(CONTAINS_OP, hit); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int res = PyDict_Contains(right_o, left_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(left); |
| PyStackRef_CLOSE(right); |
| if (res < 0) goto pop_2_error; |
| b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; |
| stack_pointer[-2] = b; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CONTAINS_OP_SET) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(CONTAINS_OP_SET); |
| static_assert(INLINE_CACHE_ENTRIES_CONTAINS_OP == 1, "incorrect cache size"); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef b; |
| /* Skip 1 cache entry */ |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); |
| PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); |
| DEOPT_IF(!(PySet_CheckExact(right_o) || PyFrozenSet_CheckExact(right_o)), CONTAINS_OP); |
| STAT_INC(CONTAINS_OP, hit); |
| // Note: both set and frozenset use the same seq_contains method! |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int res = _PySet_Contains((PySetObject *)right_o, left_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(left); |
| PyStackRef_CLOSE(right); |
| if (res < 0) goto pop_2_error; |
| b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; |
| stack_pointer[-2] = b; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(CONVERT_VALUE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(CONVERT_VALUE); |
| _PyStackRef value; |
| _PyStackRef result; |
| value = stack_pointer[-1]; |
| conversion_func conv_fn; |
| assert(oparg >= FVC_STR && oparg <= FVC_ASCII); |
| conv_fn = _PyEval_ConversionFuncs[oparg]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *result_o = conv_fn(PyStackRef_AsPyObjectBorrow(value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(value); |
| if (result_o == NULL) goto pop_1_error; |
| result = PyStackRef_FromPyObjectSteal(result_o); |
| stack_pointer[-1] = result; |
| DISPATCH(); |
| } |
| |
| TARGET(COPY) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(COPY); |
| _PyStackRef bottom; |
| _PyStackRef top; |
| bottom = stack_pointer[-1 - (oparg-1)]; |
| assert(oparg > 0); |
| top = PyStackRef_DUP(bottom); |
| stack_pointer[0] = top; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(COPY_FREE_VARS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(COPY_FREE_VARS); |
| /* Copy closure variables to free variables */ |
| PyCodeObject *co = _PyFrame_GetCode(frame); |
| assert(PyStackRef_FunctionCheck(frame->f_funcobj)); |
| PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); |
| PyObject *closure = func->func_closure; |
| assert(oparg == co->co_nfreevars); |
| int offset = co->co_nlocalsplus - oparg; |
| for (int i = 0; i < oparg; ++i) { |
| PyObject *o = PyTuple_GET_ITEM(closure, i); |
| frame->localsplus[offset + i] = PyStackRef_FromPyObjectNew(o); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(DELETE_ATTR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(DELETE_ATTR); |
| _PyStackRef owner; |
| owner = stack_pointer[-1]; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyObject_DelAttr(PyStackRef_AsPyObjectBorrow(owner), name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(owner); |
| if (err) goto pop_1_error; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(DELETE_DEREF) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(DELETE_DEREF); |
| PyObject *cell = PyStackRef_AsPyObjectBorrow(GETLOCAL(oparg)); |
| // Can't use ERROR_IF here. |
| // Fortunately we don't need its superpower. |
| PyObject *oldobj = PyCell_SwapTakeRef((PyCellObject *)cell, NULL); |
| if (oldobj == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| Py_DECREF(oldobj); |
| DISPATCH(); |
| } |
| |
| TARGET(DELETE_FAST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(DELETE_FAST); |
| _PyStackRef v = GETLOCAL(oparg); |
| if (PyStackRef_IsNull(v)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, |
| UNBOUNDLOCAL_ERROR_MSG, |
| PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| SETLOCAL(oparg, PyStackRef_NULL); |
| DISPATCH(); |
| } |
| |
| TARGET(DELETE_GLOBAL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(DELETE_GLOBAL); |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyDict_Pop(GLOBALS(), name, NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| // Can't use ERROR_IF here. |
| if (err < 0) { |
| goto error; |
| } |
| if (err == 0) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, |
| NAME_ERROR_MSG, name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(DELETE_NAME) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(DELETE_NAME); |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| PyObject *ns = LOCALS(); |
| int err; |
| if (ns == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_Format(tstate, PyExc_SystemError, |
| "no locals when deleting %R", name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| err = PyObject_DelItem(ns, name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| // Can't use ERROR_IF here. |
| if (err != 0) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, |
| NAME_ERROR_MSG, |
| name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(DELETE_SUBSCR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(DELETE_SUBSCR); |
| _PyStackRef container; |
| _PyStackRef sub; |
| sub = stack_pointer[-1]; |
| container = stack_pointer[-2]; |
| /* del container[sub] */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyObject_DelItem(PyStackRef_AsPyObjectBorrow(container), |
| PyStackRef_AsPyObjectBorrow(sub)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(container); |
| PyStackRef_CLOSE(sub); |
| if (err) goto pop_2_error; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(DICT_MERGE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(DICT_MERGE); |
| _PyStackRef callable; |
| _PyStackRef dict; |
| _PyStackRef update; |
| update = stack_pointer[-1]; |
| dict = stack_pointer[-2 - (oparg - 1)]; |
| callable = stack_pointer[-5 - (oparg - 1)]; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); |
| PyObject *dict_o = PyStackRef_AsPyObjectBorrow(dict); |
| PyObject *update_o = PyStackRef_AsPyObjectBorrow(update); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _PyDict_MergeEx(dict_o, update_o, 2); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatKwargsError(tstate, callable_o, update_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(update); |
| goto pop_1_error; |
| } |
| PyStackRef_CLOSE(update); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(DICT_UPDATE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(DICT_UPDATE); |
| _PyStackRef dict; |
| _PyStackRef update; |
| update = stack_pointer[-1]; |
| dict = stack_pointer[-2 - (oparg - 1)]; |
| PyObject *dict_o = PyStackRef_AsPyObjectBorrow(dict); |
| PyObject *update_o = PyStackRef_AsPyObjectBorrow(update); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyDict_Update(dict_o, update_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int matches = _PyErr_ExceptionMatches(tstate, PyExc_AttributeError); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (matches) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_Format(tstate, PyExc_TypeError, |
| "'%.200s' object is not a mapping", |
| Py_TYPE(update_o)->tp_name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| PyStackRef_CLOSE(update); |
| goto pop_1_error; |
| } |
| PyStackRef_CLOSE(update); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(END_ASYNC_FOR) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(END_ASYNC_FOR); |
| _PyStackRef awaitable_st; |
| _PyStackRef exc_st; |
| exc_st = stack_pointer[-1]; |
| awaitable_st = stack_pointer[-2]; |
| PyObject *exc = PyStackRef_AsPyObjectBorrow(exc_st); |
| assert(exc && PyExceptionInstance_Check(exc)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int matches = PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (matches) { |
| PyStackRef_CLOSE(awaitable_st); |
| PyStackRef_CLOSE(exc_st); |
| } |
| else { |
| Py_INCREF(exc); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_SetRaisedException(tstate, exc); |
| monitor_reraise(tstate, frame, this_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto exception_unwind; |
| } |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(END_FOR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(END_FOR); |
| _PyStackRef value; |
| value = stack_pointer[-1]; |
| PyStackRef_CLOSE(value); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(END_SEND) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(END_SEND); |
| _PyStackRef receiver; |
| _PyStackRef value; |
| _PyStackRef val; |
| value = stack_pointer[-1]; |
| receiver = stack_pointer[-2]; |
| (void)receiver; |
| val = value; |
| PyStackRef_CLOSE(receiver); |
| stack_pointer[-2] = val; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(ENTER_EXECUTOR) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(ENTER_EXECUTOR); |
| #ifdef _Py_TIER2 |
| PyCodeObject *code = _PyFrame_GetCode(frame); |
| _PyExecutorObject *executor = code->co_executors->executors[oparg & 255]; |
| assert(executor->vm_data.index == INSTR_OFFSET() - 1); |
| assert(executor->vm_data.code == code); |
| assert(executor->vm_data.valid); |
| assert(tstate->previous_executor == NULL); |
| /* If the eval breaker is set then stay in tier 1. |
| * This avoids any potentially infinite loops |
| * involving _RESUME_CHECK */ |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| opcode = executor->vm_data.opcode; |
| oparg = (oparg & ~255) | executor->vm_data.oparg; |
| next_instr = this_instr; |
| if (_PyOpcode_Caches[_PyOpcode_Deopt[opcode]]) { |
| PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| } |
| DISPATCH_GOTO(); |
| } |
| tstate->previous_executor = Py_None; |
| Py_INCREF(executor); |
| GOTO_TIER_TWO(executor); |
| #else |
| Py_FatalError("ENTER_EXECUTOR is not supported in this build"); |
| #endif /* _Py_TIER2 */ |
| DISPATCH(); |
| } |
| |
| TARGET(EXIT_INIT_CHECK) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(EXIT_INIT_CHECK); |
| _PyStackRef should_be_none; |
| should_be_none = stack_pointer[-1]; |
| assert(STACK_LEVEL() == 2); |
| if (!PyStackRef_Is(should_be_none, PyStackRef_None)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyErr_Format(PyExc_TypeError, |
| "__init__() should return None, not '%.200s'", |
| Py_TYPE(PyStackRef_AsPyObjectBorrow(should_be_none))->tp_name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(EXTENDED_ARG) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(EXTENDED_ARG); |
| assert(oparg); |
| opcode = next_instr->op.code; |
| oparg = oparg << 8 | next_instr->op.arg; |
| PRE_DISPATCH_GOTO(); |
| DISPATCH_GOTO(); |
| } |
| |
| TARGET(FORMAT_SIMPLE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(FORMAT_SIMPLE); |
| _PyStackRef value; |
| _PyStackRef res; |
| value = stack_pointer[-1]; |
| PyObject *value_o = PyStackRef_AsPyObjectBorrow(value); |
| /* If value is a unicode object, then we know the result |
| * of format(value) is value itself. */ |
| if (!PyUnicode_CheckExact(value_o)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_Format(value_o, NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(value); |
| if (res_o == NULL) goto pop_1_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| else { |
| res = value; |
| } |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(FORMAT_WITH_SPEC) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(FORMAT_WITH_SPEC); |
| _PyStackRef value; |
| _PyStackRef fmt_spec; |
| _PyStackRef res; |
| fmt_spec = stack_pointer[-1]; |
| value = stack_pointer[-2]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_Format(PyStackRef_AsPyObjectBorrow(value), PyStackRef_AsPyObjectBorrow(fmt_spec)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(value); |
| PyStackRef_CLOSE(fmt_spec); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(FOR_ITER) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(FOR_ITER); |
| PREDICTED(FOR_ITER); |
| _Py_CODEUNIT* const this_instr = next_instr - 2; |
| (void)this_instr; |
| _PyStackRef iter; |
| _PyStackRef next; |
| // _SPECIALIZE_FOR_ITER |
| { |
| iter = stack_pointer[-1]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_ForIter(iter, next_instr, oparg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(FOR_ITER); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| // _FOR_ITER |
| { |
| /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */ |
| PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (next_o == NULL) { |
| if (_PyErr_Occurred(tstate)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (!matches) { |
| goto error; |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_MonitorRaise(tstate, frame, this_instr); |
| _PyErr_Clear(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| /* iterator ended normally */ |
| assert(next_instr[oparg].op.code == END_FOR || |
| next_instr[oparg].op.code == INSTRUMENTED_END_FOR); |
| PyStackRef_CLOSE(iter); |
| STACK_SHRINK(1); |
| /* Jump forward oparg, then skip following END_FOR and POP_TOP instruction */ |
| JUMPBY(oparg + 2); |
| DISPATCH(); |
| } |
| next = PyStackRef_FromPyObjectSteal(next_o); |
| // Common case: no jump, leave it to the code generator |
| } |
| stack_pointer[0] = next; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(FOR_ITER_GEN) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(FOR_ITER_GEN); |
| static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); |
| _PyStackRef iter; |
| _PyInterpreterFrame *gen_frame; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, FOR_ITER); |
| } |
| // _FOR_ITER_GEN_FRAME |
| { |
| iter = stack_pointer[-1]; |
| PyGenObject *gen = (PyGenObject *)PyStackRef_AsPyObjectBorrow(iter); |
| DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); |
| DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER); |
| STAT_INC(FOR_ITER, hit); |
| gen_frame = &gen->gi_iframe; |
| _PyFrame_StackPush(gen_frame, PyStackRef_None); |
| gen->gi_frame_state = FRAME_EXECUTING; |
| gen->gi_exc_state.previous_item = tstate->exc_info; |
| tstate->exc_info = &gen->gi_exc_state; |
| gen_frame->previous = frame; |
| // oparg is the return offset from the next instruction. |
| frame->return_offset = (uint16_t)( 2 + oparg); |
| } |
| // _PUSH_FRAME |
| { |
| new_frame = gen_frame; |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(FOR_ITER_LIST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(FOR_ITER_LIST); |
| static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); |
| _PyStackRef iter; |
| _PyStackRef next; |
| /* Skip 1 cache entry */ |
| // _ITER_CHECK_LIST |
| { |
| iter = stack_pointer[-1]; |
| DEOPT_IF(Py_TYPE(PyStackRef_AsPyObjectBorrow(iter)) != &PyListIter_Type, FOR_ITER); |
| } |
| // _ITER_JUMP_LIST |
| { |
| PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); |
| _PyListIterObject *it = (_PyListIterObject *)iter_o; |
| assert(Py_TYPE(iter_o) == &PyListIter_Type); |
| STAT_INC(FOR_ITER, hit); |
| PyListObject *seq = it->it_seq; |
| if (seq == NULL || (size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) { |
| it->it_index = -1; |
| #ifndef Py_GIL_DISABLED |
| if (seq != NULL) { |
| it->it_seq = NULL; |
| Py_DECREF(seq); |
| } |
| #endif |
| PyStackRef_CLOSE(iter); |
| STACK_SHRINK(1); |
| /* Jump forward oparg, then skip following END_FOR and POP_TOP instructions */ |
| JUMPBY(oparg + 2); |
| DISPATCH(); |
| } |
| } |
| // _ITER_NEXT_LIST |
| { |
| PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); |
| _PyListIterObject *it = (_PyListIterObject *)iter_o; |
| assert(Py_TYPE(iter_o) == &PyListIter_Type); |
| PyListObject *seq = it->it_seq; |
| assert(seq); |
| assert(it->it_index < PyList_GET_SIZE(seq)); |
| next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++)); |
| } |
| stack_pointer[0] = next; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(FOR_ITER_RANGE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(FOR_ITER_RANGE); |
| static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); |
| _PyStackRef iter; |
| _PyStackRef next; |
| /* Skip 1 cache entry */ |
| // _ITER_CHECK_RANGE |
| { |
| iter = stack_pointer[-1]; |
| _PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter); |
| DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); |
| } |
| // _ITER_JUMP_RANGE |
| { |
| _PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter); |
| assert(Py_TYPE(r) == &PyRangeIter_Type); |
| STAT_INC(FOR_ITER, hit); |
| if (r->len <= 0) { |
| STACK_SHRINK(1); |
| PyStackRef_CLOSE(iter); |
| // Jump over END_FOR and POP_TOP instructions. |
| JUMPBY(oparg + 2); |
| DISPATCH(); |
| } |
| } |
| // _ITER_NEXT_RANGE |
| { |
| _PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter); |
| assert(Py_TYPE(r) == &PyRangeIter_Type); |
| assert(r->len > 0); |
| long value = r->start; |
| r->start = value + r->step; |
| r->len--; |
| PyObject *res = PyLong_FromLong(value); |
| if (res == NULL) goto error; |
| next = PyStackRef_FromPyObjectSteal(res); |
| } |
| stack_pointer[0] = next; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(FOR_ITER_TUPLE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(FOR_ITER_TUPLE); |
| static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); |
| _PyStackRef iter; |
| _PyStackRef next; |
| /* Skip 1 cache entry */ |
| // _ITER_CHECK_TUPLE |
| { |
| iter = stack_pointer[-1]; |
| DEOPT_IF(Py_TYPE(PyStackRef_AsPyObjectBorrow(iter)) != &PyTupleIter_Type, FOR_ITER); |
| } |
| // _ITER_JUMP_TUPLE |
| { |
| PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); |
| _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o; |
| assert(Py_TYPE(iter_o) == &PyTupleIter_Type); |
| STAT_INC(FOR_ITER, hit); |
| PyTupleObject *seq = it->it_seq; |
| if (seq == NULL || it->it_index >= PyTuple_GET_SIZE(seq)) { |
| if (seq != NULL) { |
| it->it_seq = NULL; |
| Py_DECREF(seq); |
| } |
| PyStackRef_CLOSE(iter); |
| STACK_SHRINK(1); |
| /* Jump forward oparg, then skip following END_FOR and POP_TOP instructions */ |
| JUMPBY(oparg + 2); |
| DISPATCH(); |
| } |
| } |
| // _ITER_NEXT_TUPLE |
| { |
| PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); |
| _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o; |
| assert(Py_TYPE(iter_o) == &PyTupleIter_Type); |
| PyTupleObject *seq = it->it_seq; |
| assert(seq); |
| assert(it->it_index < PyTuple_GET_SIZE(seq)); |
| next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, it->it_index++)); |
| } |
| stack_pointer[0] = next; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(GET_AITER) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(GET_AITER); |
| _PyStackRef obj; |
| _PyStackRef iter; |
| obj = stack_pointer[-1]; |
| unaryfunc getter = NULL; |
| PyObject *obj_o = PyStackRef_AsPyObjectBorrow(obj); |
| PyObject *iter_o; |
| PyTypeObject *type = Py_TYPE(obj_o); |
| if (type->tp_as_async != NULL) { |
| getter = type->tp_as_async->am_aiter; |
| } |
| if (getter == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_Format(tstate, PyExc_TypeError, |
| "'async for' requires an object with " |
| "__aiter__ method, got %.100s", |
| type->tp_name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(obj); |
| goto pop_1_error; |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| iter_o = (*getter)(obj_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(obj); |
| if (iter_o == NULL) goto pop_1_error; |
| if (Py_TYPE(iter_o)->tp_as_async == NULL || |
| Py_TYPE(iter_o)->tp_as_async->am_anext == NULL) { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_Format(tstate, PyExc_TypeError, |
| "'async for' received an object from __aiter__ " |
| "that does not implement __anext__: %.100s", |
| Py_TYPE(iter_o)->tp_name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| Py_DECREF(iter_o); |
| goto error; |
| } |
| iter = PyStackRef_FromPyObjectSteal(iter_o); |
| stack_pointer[-1] = iter; |
| DISPATCH(); |
| } |
| |
| TARGET(GET_ANEXT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(GET_ANEXT); |
| _PyStackRef aiter; |
| _PyStackRef awaitable; |
| aiter = stack_pointer[-1]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *awaitable_o = _PyEval_GetANext(PyStackRef_AsPyObjectBorrow(aiter)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (awaitable_o == NULL) { |
| goto error; |
| } |
| awaitable = PyStackRef_FromPyObjectSteal(awaitable_o); |
| stack_pointer[0] = awaitable; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(GET_AWAITABLE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(GET_AWAITABLE); |
| _PyStackRef iterable; |
| _PyStackRef iter; |
| iterable = stack_pointer[-1]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(iterable); |
| if (iter_o == NULL) goto pop_1_error; |
| iter = PyStackRef_FromPyObjectSteal(iter_o); |
| stack_pointer[-1] = iter; |
| DISPATCH(); |
| } |
| |
| TARGET(GET_ITER) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(GET_ITER); |
| _PyStackRef iterable; |
| _PyStackRef iter; |
| iterable = stack_pointer[-1]; |
| /* before: [obj]; after [getiter(obj)] */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(iterable); |
| if (iter_o == NULL) goto pop_1_error; |
| iter = PyStackRef_FromPyObjectSteal(iter_o); |
| stack_pointer[-1] = iter; |
| DISPATCH(); |
| } |
| |
| TARGET(GET_LEN) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(GET_LEN); |
| _PyStackRef obj; |
| _PyStackRef len; |
| obj = stack_pointer[-1]; |
| // PUSH(len(TOS)) |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| Py_ssize_t len_i = PyObject_Length(PyStackRef_AsPyObjectBorrow(obj)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (len_i < 0) goto error; |
| PyObject *len_o = PyLong_FromSsize_t(len_i); |
| if (len_o == NULL) goto error; |
| len = PyStackRef_FromPyObjectSteal(len_o); |
| stack_pointer[0] = len; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(GET_YIELD_FROM_ITER) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(GET_YIELD_FROM_ITER); |
| _PyStackRef iterable; |
| _PyStackRef iter; |
| iterable = stack_pointer[-1]; |
| /* before: [obj]; after [getiter(obj)] */ |
| PyObject *iterable_o = PyStackRef_AsPyObjectBorrow(iterable); |
| if (PyCoro_CheckExact(iterable_o)) { |
| /* `iterable` is a coroutine */ |
| if (!(_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_ITERABLE_COROUTINE))) { |
| /* and it is used in a 'yield from' expression of a |
| regular generator. */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_SetString(tstate, PyExc_TypeError, |
| "cannot 'yield from' a coroutine object " |
| "in a non-coroutine generator"); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| iter = iterable; |
| } |
| else { |
| if (PyGen_CheckExact(iterable_o)) { |
| iter = iterable; |
| } |
| else { |
| /* `iterable` is not a generator. */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *iter_o = PyObject_GetIter(iterable_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (iter_o == NULL) { |
| goto error; |
| } |
| iter = PyStackRef_FromPyObjectSteal(iter_o); |
| PyStackRef_CLOSE(iterable); |
| } |
| } |
| stack_pointer[-1] = iter; |
| DISPATCH(); |
| } |
| |
| TARGET(IMPORT_FROM) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(IMPORT_FROM); |
| _PyStackRef from; |
| _PyStackRef res; |
| from = stack_pointer[-1]; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (res_o == NULL) goto error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(IMPORT_NAME) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(IMPORT_NAME); |
| _PyStackRef level; |
| _PyStackRef fromlist; |
| _PyStackRef res; |
| fromlist = stack_pointer[-1]; |
| level = stack_pointer[-2]; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = _PyEval_ImportName(tstate, frame, name, |
| PyStackRef_AsPyObjectBorrow(fromlist), |
| PyStackRef_AsPyObjectBorrow(level)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(level); |
| PyStackRef_CLOSE(fromlist); |
| if (res_o == NULL) goto pop_2_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_CALL) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(INSTRUMENTED_CALL); |
| _PyStackRef *callable; |
| _PyStackRef *self_or_null; |
| _PyStackRef *args; |
| _PyStackRef *func; |
| _PyStackRef *maybe_self; |
| _PyStackRef res; |
| /* Skip 3 cache entries */ |
| // _MAYBE_EXPAND_METHOD |
| { |
| args = &stack_pointer[-oparg]; |
| self_or_null = &stack_pointer[-1 - oparg]; |
| callable = &stack_pointer[-2 - oparg]; |
| func = &stack_pointer[-2 - oparg]; |
| maybe_self = &stack_pointer[-1 - oparg]; |
| if (PyStackRef_TYPE(callable[0]) == &PyMethod_Type && PyStackRef_IsNull(self_or_null[0])) { |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| PyObject *self = ((PyMethodObject *)callable_o)->im_self; |
| maybe_self[0] = PyStackRef_FromPyObjectNew(self); |
| PyObject *method = ((PyMethodObject *)callable_o)->im_func; |
| _PyStackRef temp = callable[0]; |
| func[0] = PyStackRef_FromPyObjectNew(method); |
| PyStackRef_CLOSE(temp); |
| } |
| } |
| // _MONITOR_CALL |
| { |
| args = &stack_pointer[-oparg]; |
| maybe_self = &stack_pointer[-1 - oparg]; |
| func = &stack_pointer[-2 - oparg]; |
| int is_meth = !PyStackRef_IsNull(maybe_self[0]); |
| PyObject *function = PyStackRef_AsPyObjectBorrow(func[0]); |
| PyObject *arg0; |
| if (is_meth) { |
| arg0 = PyStackRef_AsPyObjectBorrow(maybe_self[0]); |
| } |
| else { |
| if (oparg) { |
| arg0 = PyStackRef_AsPyObjectBorrow(args[0]); |
| } |
| else { |
| arg0 = &_PyInstrumentation_MISSING; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_2args( |
| tstate, PY_MONITORING_EVENT_CALL, |
| frame, this_instr, function, arg0 |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) goto error; |
| } |
| // _DO_CALL |
| { |
| self_or_null = maybe_self; |
| callable = func; |
| PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); |
| // oparg counts all of the args, but *not* self: |
| int total_args = oparg; |
| if (!PyStackRef_IsNull(self_or_null[0])) { |
| args--; |
| total_args++; |
| } |
| // Check if the call can be inlined or not |
| if (Py_TYPE(callable_o) == &PyFunction_Type && |
| tstate->interp->eval_frame == NULL && |
| ((PyFunctionObject *)callable_o)->vectorcall == _PyFunction_Vectorcall) |
| { |
| int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; |
| PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( |
| tstate, callable[0], locals, |
| args, total_args, NULL, frame |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| // Manipulate stack directly since we leave using DISPATCH_INLINED(). |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| // The frame has stolen all the arguments from the stack, |
| // so there is no need to clean them up. |
| if (new_frame == NULL) { |
| goto error; |
| } |
| frame->return_offset = 4 ; |
| DISPATCH_INLINED(new_frame); |
| } |
| /* Callable is not a normal Python function */ |
| STACKREFS_TO_PYOBJECTS(args, total_args, args_o); |
| if (CONVERSION_FAILED(args_o)) { |
| PyStackRef_CLOSE(callable[0]); |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_Vectorcall( |
| callable_o, args_o, |
| total_args | PY_VECTORCALL_ARGUMENTS_OFFSET, |
| NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); |
| if (opcode == INSTRUMENTED_CALL) { |
| PyObject *arg = total_args == 0 ? |
| &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]); |
| if (res_o == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_call_instrumentation_exc2( |
| tstate, PY_MONITORING_EVENT_C_RAISE, |
| frame, this_instr, callable_o, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_2args( |
| tstate, PY_MONITORING_EVENT_C_RETURN, |
| frame, this_instr, callable_o, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| Py_CLEAR(res_o); |
| } |
| } |
| } |
| assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); |
| PyStackRef_CLOSE(callable[0]); |
| for (int i = 0; i < total_args; i++) { |
| PyStackRef_CLOSE(args[i]); |
| } |
| if (res_o == NULL) { |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| goto error; |
| } |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| stack_pointer += 1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| } |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_CALL_FUNCTION_EX) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INSTRUMENTED_CALL_FUNCTION_EX); |
| GO_TO_INSTRUCTION(CALL_FUNCTION_EX); |
| } |
| |
| TARGET(INSTRUMENTED_CALL_KW) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(INSTRUMENTED_CALL_KW); |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| uint32_t version = read_u32(&this_instr[2].cache); |
| (void)version; |
| int is_meth = !PyStackRef_IsNull(PEEK(oparg + 2)); |
| int total_args = oparg + is_meth; |
| PyObject *function = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 3)); |
| PyObject *arg = total_args == 0 ? &_PyInstrumentation_MISSING |
| : PyStackRef_AsPyObjectBorrow(PEEK(total_args + 1)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_2args( |
| tstate, PY_MONITORING_EVENT_CALL, |
| frame, this_instr, function, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) goto error; |
| PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| GO_TO_INSTRUCTION(CALL_KW); |
| } |
| |
| TARGET(INSTRUMENTED_END_FOR) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INSTRUMENTED_END_FOR); |
| _PyStackRef receiver; |
| _PyStackRef value; |
| value = stack_pointer[-1]; |
| receiver = stack_pointer[-2]; |
| /* Need to create a fake StopIteration error here, |
| * to conform to PEP 380 */ |
| if (PyStackRef_GenCheck(receiver)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) { |
| goto error; |
| } |
| } |
| PyStackRef_CLOSE(value); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_END_SEND) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INSTRUMENTED_END_SEND); |
| _PyStackRef receiver; |
| _PyStackRef value; |
| _PyStackRef val; |
| value = stack_pointer[-1]; |
| receiver = stack_pointer[-2]; |
| PyObject *receiver_o = PyStackRef_AsPyObjectBorrow(receiver); |
| if (PyGen_Check(receiver_o) || PyCoro_CheckExact(receiver_o)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) { |
| goto error; |
| } |
| } |
| val = value; |
| PyStackRef_CLOSE(receiver); |
| stack_pointer[-2] = val; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_FOR_ITER) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(INSTRUMENTED_FOR_ITER); |
| /* Skip 1 cache entry */ |
| _Py_CODEUNIT *target; |
| _PyStackRef iter_stackref = TOP(); |
| PyObject *iter = PyStackRef_AsPyObjectBorrow(iter_stackref); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (next != NULL) { |
| PUSH(PyStackRef_FromPyObjectSteal(next)); |
| target = next_instr; |
| } |
| else { |
| if (_PyErr_Occurred(tstate)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (!matches) { |
| goto error; |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_MonitorRaise(tstate, frame, this_instr); |
| _PyErr_Clear(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| /* iterator ended normally */ |
| assert(next_instr[oparg].op.code == END_FOR || |
| next_instr[oparg].op.code == INSTRUMENTED_END_FOR); |
| STACK_SHRINK(1); |
| PyStackRef_CLOSE(iter_stackref); |
| /* Skip END_FOR and POP_TOP */ |
| target = next_instr + oparg + 2; |
| } |
| INSTRUMENTED_JUMP(this_instr, target, PY_MONITORING_EVENT_BRANCH); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_INSTRUCTION) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INSTRUMENTED_INSTRUCTION); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int next_opcode = _Py_call_instrumentation_instruction( |
| tstate, frame, this_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (next_opcode < 0) goto error; |
| next_instr = this_instr; |
| if (_PyOpcode_Caches[next_opcode]) { |
| PAUSE_ADAPTIVE_COUNTER(next_instr[1].counter); |
| } |
| assert(next_opcode > 0 && next_opcode < 256); |
| opcode = next_opcode; |
| DISPATCH_GOTO(); |
| } |
| |
| TARGET(INSTRUMENTED_JUMP_BACKWARD) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(INSTRUMENTED_JUMP_BACKWARD); |
| /* Skip 1 cache entry */ |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| } |
| } |
| // _MONITOR_JUMP_BACKWARD |
| { |
| INSTRUMENTED_JUMP(this_instr, next_instr - oparg, PY_MONITORING_EVENT_JUMP); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_JUMP_FORWARD) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INSTRUMENTED_JUMP_FORWARD); |
| INSTRUMENTED_JUMP(this_instr, next_instr + oparg, PY_MONITORING_EVENT_JUMP); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_LINE) { |
| _Py_CODEUNIT* const prev_instr = frame->instr_ptr; |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INSTRUMENTED_LINE); |
| int original_opcode = 0; |
| if (tstate->tracing) { |
| PyCodeObject *code = _PyFrame_GetCode(frame); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyFrame_GetBytecode(frame))].original_opcode; |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| next_instr = this_instr; |
| } else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| original_opcode = _Py_call_instrumentation_line( |
| tstate, frame, this_instr, prev_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (original_opcode < 0) { |
| next_instr = this_instr+1; |
| goto error; |
| } |
| next_instr = frame->instr_ptr; |
| if (next_instr != this_instr) { |
| DISPATCH(); |
| } |
| } |
| if (_PyOpcode_Caches[original_opcode]) { |
| _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1); |
| /* Prevent the underlying instruction from specializing |
| * and overwriting the instrumentation. */ |
| PAUSE_ADAPTIVE_COUNTER(cache->counter); |
| } |
| opcode = original_opcode; |
| DISPATCH_GOTO(); |
| } |
| |
| TARGET(INSTRUMENTED_LOAD_SUPER_ATTR) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(INSTRUMENTED_LOAD_SUPER_ATTR); |
| /* Skip 1 cache entry */ |
| // cancel out the decrement that will happen in LOAD_SUPER_ATTR; we |
| // don't want to specialize instrumented instructions |
| PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| GO_TO_INSTRUCTION(LOAD_SUPER_ATTR); |
| } |
| |
| TARGET(INSTRUMENTED_POP_JUMP_IF_FALSE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_FALSE); |
| /* Skip 1 cache entry */ |
| _PyStackRef cond = POP(); |
| assert(PyStackRef_BoolCheck(cond)); |
| int flag = PyStackRef_Is(cond, PyStackRef_False); |
| int offset = flag * oparg; |
| RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); |
| INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_POP_JUMP_IF_NONE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_NONE); |
| /* Skip 1 cache entry */ |
| _PyStackRef value_stackref = POP(); |
| int flag = PyStackRef_Is(value_stackref, PyStackRef_None); |
| int offset; |
| if (flag) { |
| offset = oparg; |
| } |
| else { |
| PyStackRef_CLOSE(value_stackref); |
| offset = 0; |
| } |
| RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); |
| INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_POP_JUMP_IF_NOT_NONE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_NOT_NONE); |
| /* Skip 1 cache entry */ |
| _PyStackRef value_stackref = POP(); |
| int offset; |
| int nflag = PyStackRef_Is(value_stackref, PyStackRef_None); |
| if (nflag) { |
| offset = 0; |
| } |
| else { |
| PyStackRef_CLOSE(value_stackref); |
| offset = oparg; |
| } |
| #if ENABLE_SPECIALIZATION |
| this_instr[1].cache = (this_instr[1].cache << 1) | !nflag; |
| #endif |
| INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_POP_JUMP_IF_TRUE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_TRUE); |
| /* Skip 1 cache entry */ |
| _PyStackRef cond = POP(); |
| assert(PyStackRef_BoolCheck(cond)); |
| int flag = PyStackRef_Is(cond, PyStackRef_True); |
| int offset = flag * oparg; |
| RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); |
| INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_RESUME) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INSTRUMENTED_RESUME); |
| // _LOAD_BYTECODE |
| { |
| #ifdef Py_GIL_DISABLED |
| if (frame->tlbc_index != |
| ((_PyThreadStateImpl *)tstate)->tlbc_index) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_CODEUNIT *bytecode = |
| _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (bytecode == NULL) goto error; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| ptrdiff_t off = this_instr - _PyFrame_GetBytecode(frame); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| frame->tlbc_index = ((_PyThreadStateImpl *)tstate)->tlbc_index; |
| frame->instr_ptr = bytecode + off; |
| // Make sure this_instr gets reset correctley for any uops that |
| // follow |
| next_instr = frame->instr_ptr; |
| DISPATCH(); |
| } |
| #endif |
| } |
| // _MAYBE_INSTRUMENT |
| { |
| if (tstate->tracing == 0) { |
| uintptr_t global_version = _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & ~_PY_EVAL_EVENTS_MASK; |
| uintptr_t code_version = FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version); |
| if (code_version != global_version) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_Instrument(_PyFrame_GetCode(frame), tstate->interp); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) { |
| goto error; |
| } |
| next_instr = this_instr; |
| DISPATCH(); |
| } |
| } |
| } |
| // _CHECK_PERIODIC_IF_NOT_YIELD_FROM |
| { |
| if ((oparg & RESUME_OPARG_LOCATION_MASK) < RESUME_AFTER_YIELD_FROM) { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); \ |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| } |
| } |
| } |
| // _MONITOR_RESUME |
| { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation( |
| tstate, oparg > 0, frame, this_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) goto error; |
| if (frame->instr_ptr != this_instr) { |
| /* Instrumentation has jumped */ |
| next_instr = frame->instr_ptr; |
| } |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_RETURN_VALUE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INSTRUMENTED_RETURN_VALUE); |
| _PyStackRef val; |
| _PyStackRef retval; |
| _PyStackRef res; |
| // _RETURN_VALUE_EVENT |
| { |
| val = stack_pointer[-1]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_arg( |
| tstate, PY_MONITORING_EVENT_PY_RETURN, |
| frame, this_instr, PyStackRef_AsPyObjectBorrow(val)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) goto error; |
| } |
| // _RETURN_VALUE |
| { |
| retval = val; |
| #if TIER_ONE |
| assert(frame != &entry_frame); |
| #endif |
| _PyStackRef temp = retval; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(EMPTY()); |
| _Py_LeaveRecursiveCallPy(tstate); |
| // GH-99729: We need to unlink the frame *before* clearing it: |
| _PyInterpreterFrame *dying = frame; |
| frame = tstate->current_frame = dying->previous; |
| _PyEval_FrameClearAndPop(tstate, dying); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| LOAD_IP(frame->return_offset); |
| res = temp; |
| LLTRACE_RESUME_FRAME(); |
| } |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(INSTRUMENTED_YIELD_VALUE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INSTRUMENTED_YIELD_VALUE); |
| _PyStackRef val; |
| _PyStackRef retval; |
| _PyStackRef value; |
| // _YIELD_VALUE_EVENT |
| { |
| val = stack_pointer[-1]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_arg( |
| tstate, PY_MONITORING_EVENT_PY_YIELD, |
| frame, this_instr, PyStackRef_AsPyObjectBorrow(val)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) { |
| goto error; |
| } |
| if (frame->instr_ptr != this_instr) { |
| next_instr = frame->instr_ptr; |
| DISPATCH(); |
| } |
| } |
| // _YIELD_VALUE |
| { |
| retval = val; |
| // NOTE: It's important that YIELD_VALUE never raises an exception! |
| // The compiler treats any exception raised here as a failed close() |
| // or throw() call. |
| #if TIER_ONE |
| assert(frame != &entry_frame); |
| #endif |
| frame->instr_ptr++; |
| PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); |
| assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); |
| assert(oparg == 0 || oparg == 1); |
| gen->gi_frame_state = FRAME_SUSPENDED + oparg; |
| _PyStackRef temp = retval; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| tstate->exc_info = gen->gi_exc_state.previous_item; |
| gen->gi_exc_state.previous_item = NULL; |
| _Py_LeaveRecursiveCallPy(tstate); |
| _PyInterpreterFrame *gen_frame = frame; |
| frame = tstate->current_frame = frame->previous; |
| gen_frame->previous = NULL; |
| /* We don't know which of these is relevant here, so keep them equal */ |
| assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER); |
| #if TIER_ONE |
| assert(frame->instr_ptr->op.code == INSTRUMENTED_LINE || |
| frame->instr_ptr->op.code == INSTRUMENTED_INSTRUCTION || |
| _PyOpcode_Deopt[frame->instr_ptr->op.code] == SEND || |
| _PyOpcode_Deopt[frame->instr_ptr->op.code] == FOR_ITER || |
| _PyOpcode_Deopt[frame->instr_ptr->op.code] == INTERPRETER_EXIT || |
| _PyOpcode_Deopt[frame->instr_ptr->op.code] == ENTER_EXECUTOR); |
| #endif |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND); |
| value = temp; |
| LLTRACE_RESUME_FRAME(); |
| } |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(INTERPRETER_EXIT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(INTERPRETER_EXIT); |
| _PyStackRef retval; |
| retval = stack_pointer[-1]; |
| assert(frame == &entry_frame); |
| assert(_PyFrame_IsIncomplete(frame)); |
| /* Restore previous frame and return. */ |
| tstate->current_frame = frame->previous; |
| assert(!_PyErr_Occurred(tstate)); |
| tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; |
| PyObject *result = PyStackRef_AsPyObjectSteal(retval); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| /* Not strictly necessary, but prevents warnings */ |
| return result; |
| } |
| |
| TARGET(IS_OP) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(IS_OP); |
| _PyStackRef left; |
| _PyStackRef right; |
| _PyStackRef b; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| #ifdef Py_GIL_DISABLED |
| // On free-threaded builds, objects are conditionally immortalized. |
| // So their bits don't always compare equally. |
| int res = Py_Is(PyStackRef_AsPyObjectBorrow(left), PyStackRef_AsPyObjectBorrow(right)) ^ oparg; |
| #else |
| int res = PyStackRef_Is(left, right) ^ oparg; |
| #endif |
| PyStackRef_CLOSE(left); |
| PyStackRef_CLOSE(right); |
| b = res ? PyStackRef_True : PyStackRef_False; |
| stack_pointer[-2] = b; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(JUMP_BACKWARD) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(JUMP_BACKWARD); |
| // _CHECK_PERIODIC |
| { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| } |
| } |
| // _JUMP_BACKWARD |
| { |
| uint16_t the_counter = read_u16(&this_instr[1].cache); |
| (void)the_counter; |
| assert(oparg <= INSTR_OFFSET()); |
| JUMPBY(-oparg); |
| #ifdef _Py_TIER2 |
| #if ENABLE_SPECIALIZATION |
| _Py_BackoffCounter counter = this_instr[1].counter; |
| if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD) { |
| _Py_CODEUNIT *start = this_instr; |
| /* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */ |
| while (oparg > 255) { |
| oparg >>= 8; |
| start--; |
| } |
| _PyExecutorObject *executor; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor, 0); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (optimized < 0) goto error; |
| if (optimized) { |
| assert(tstate->previous_executor == NULL); |
| tstate->previous_executor = Py_None; |
| GOTO_TIER_TWO(executor); |
| } |
| else { |
| this_instr[1].counter = restart_backoff_counter(counter); |
| } |
| } |
| else { |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| } |
| #endif /* ENABLE_SPECIALIZATION */ |
| #endif /* _Py_TIER2 */ |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(JUMP_BACKWARD_NO_INTERRUPT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(JUMP_BACKWARD_NO_INTERRUPT); |
| /* This bytecode is used in the `yield from` or `await` loop. |
| * If there is an interrupt, we want it handled in the innermost |
| * generator or coroutine, so we deliberately do not check it here. |
| * (see bpo-30039). |
| */ |
| JUMPBY(-oparg); |
| DISPATCH(); |
| } |
| |
| TARGET(JUMP_FORWARD) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(JUMP_FORWARD); |
| JUMPBY(oparg); |
| DISPATCH(); |
| } |
| |
| TARGET(LIST_APPEND) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LIST_APPEND); |
| _PyStackRef list; |
| _PyStackRef v; |
| v = stack_pointer[-1]; |
| list = stack_pointer[-2 - (oparg-1)]; |
| int err = _PyList_AppendTakeRef((PyListObject *)PyStackRef_AsPyObjectBorrow(list), |
| PyStackRef_AsPyObjectSteal(v)); |
| if (err < 0) goto pop_1_error; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LIST_EXTEND) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LIST_EXTEND); |
| _PyStackRef list_st; |
| _PyStackRef iterable_st; |
| iterable_st = stack_pointer[-1]; |
| list_st = stack_pointer[-2 - (oparg-1)]; |
| PyObject *list = PyStackRef_AsPyObjectBorrow(list_st); |
| PyObject *iterable = PyStackRef_AsPyObjectBorrow(iterable_st); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (none_val == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int matches = _PyErr_ExceptionMatches(tstate, PyExc_TypeError); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (matches && |
| (Py_TYPE(iterable)->tp_iter == NULL && !PySequence_Check(iterable))) |
| { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_Clear(tstate); |
| _PyErr_Format(tstate, PyExc_TypeError, |
| "Value after * must be an iterable, not %.200s", |
| Py_TYPE(iterable)->tp_name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| PyStackRef_CLOSE(iterable_st); |
| goto pop_1_error; |
| } |
| assert(Py_IsNone(none_val)); |
| PyStackRef_CLOSE(iterable_st); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR); |
| PREDICTED(LOAD_ATTR); |
| _Py_CODEUNIT* const this_instr = next_instr - 10; |
| (void)this_instr; |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef self_or_null = PyStackRef_NULL; |
| // _SPECIALIZE_LOAD_ATTR |
| { |
| owner = stack_pointer[-1]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_LoadAttr(owner, next_instr, name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(LOAD_ATTR); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| /* Skip 8 cache entries */ |
| // _LOAD_ATTR |
| { |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); |
| PyObject *attr_o; |
| if (oparg & 1) { |
| /* Designed to work in tandem with CALL, pushes two values. */ |
| attr_o = NULL; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int is_meth = _PyObject_GetMethod(PyStackRef_AsPyObjectBorrow(owner), name, &attr_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (is_meth) { |
| /* We can bypass temporary bound method object. |
| meth is unbound method and obj is self. |
| meth | self | arg1 | ... | argN |
| */ |
| assert(attr_o != NULL); // No errors on this branch |
| self_or_null = owner; // Transfer ownership |
| } |
| else { |
| /* meth is not an unbound method (but a regular attr, or |
| something was returned by a descriptor protocol). Set |
| the second element of the stack to NULL, to signal |
| CALL that it's not a method call. |
| meth | NULL | arg1 | ... | argN |
| */ |
| PyStackRef_CLOSE(owner); |
| if (attr_o == NULL) goto pop_1_error; |
| self_or_null = PyStackRef_NULL; |
| } |
| } |
| else { |
| /* Classic, pushes one value. */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(owner); |
| if (attr_o == NULL) goto pop_1_error; |
| /* We need to define self_or_null on all paths */ |
| self_or_null = PyStackRef_NULL; |
| } |
| attr = PyStackRef_FromPyObjectSteal(attr_o); |
| } |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = self_or_null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_CLASS) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_CLASS); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef null = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _CHECK_ATTR_CLASS |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| DEOPT_IF(!PyType_Check(owner_o), LOAD_ATTR); |
| assert(type_version != 0); |
| DEOPT_IF(((PyTypeObject *)owner_o)->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| /* Skip 2 cache entries */ |
| // _LOAD_ATTR_CLASS |
| { |
| PyObject *descr = read_obj(&this_instr[6].cache); |
| STAT_INC(LOAD_ATTR, hit); |
| assert(descr != NULL); |
| attr = PyStackRef_FromPyObjectNew(descr); |
| null = PyStackRef_NULL; |
| PyStackRef_CLOSE(owner); |
| } |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_CLASS_WITH_METACLASS_CHECK) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_CLASS_WITH_METACLASS_CHECK); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef null = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _CHECK_ATTR_CLASS |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| DEOPT_IF(!PyType_Check(owner_o), LOAD_ATTR); |
| assert(type_version != 0); |
| DEOPT_IF(((PyTypeObject *)owner_o)->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| // _GUARD_TYPE_VERSION |
| { |
| uint32_t type_version = read_u32(&this_instr[4].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| // _LOAD_ATTR_CLASS |
| { |
| PyObject *descr = read_obj(&this_instr[6].cache); |
| STAT_INC(LOAD_ATTR, hit); |
| assert(descr != NULL); |
| attr = PyStackRef_FromPyObjectNew(descr); |
| null = PyStackRef_NULL; |
| PyStackRef_CLOSE(owner); |
| } |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| /* Skip 1 cache entry */ |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| uint32_t func_version = read_u32(&this_instr[4].cache); |
| PyObject *getattribute = read_obj(&this_instr[6].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| assert((oparg & 1) == 0); |
| DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); |
| PyTypeObject *cls = Py_TYPE(owner_o); |
| assert(type_version != 0); |
| DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); |
| assert(Py_IS_TYPE(getattribute, &PyFunction_Type)); |
| PyFunctionObject *f = (PyFunctionObject *)getattribute; |
| assert(func_version != 0); |
| DEOPT_IF(f->func_version != func_version, LOAD_ATTR); |
| PyCodeObject *code = (PyCodeObject *)f->func_code; |
| assert(code->co_argcount == 2); |
| DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); |
| STAT_INC(LOAD_ATTR, hit); |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); |
| _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked( |
| tstate, PyStackRef_FromPyObjectNew(f), 2, frame); |
| // Manipulate stack directly because we exit with DISPATCH_INLINED(). |
| STACK_SHRINK(1); |
| new_frame->localsplus[0] = owner; |
| new_frame->localsplus[1] = PyStackRef_FromPyObjectNew(name); |
| frame->return_offset = 10 ; |
| DISPATCH_INLINED(new_frame); |
| } |
| |
| TARGET(LOAD_ATTR_INSTANCE_VALUE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_INSTANCE_VALUE); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef null = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| // _CHECK_MANAGED_OBJECT_HAS_VALUES |
| { |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| assert(Py_TYPE(owner_o)->tp_dictoffset < 0); |
| assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); |
| DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid, LOAD_ATTR); |
| } |
| // _LOAD_ATTR_INSTANCE_VALUE |
| { |
| uint16_t offset = read_u16(&this_instr[4].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset); |
| PyObject *attr_o = *value_ptr; |
| DEOPT_IF(attr_o == NULL, LOAD_ATTR); |
| STAT_INC(LOAD_ATTR, hit); |
| Py_INCREF(attr_o); |
| null = PyStackRef_NULL; |
| attr = PyStackRef_FromPyObjectSteal(attr_o); |
| PyStackRef_CLOSE(owner); |
| } |
| /* Skip 5 cache entries */ |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_METHOD_LAZY_DICT) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_METHOD_LAZY_DICT); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef self = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| // _CHECK_ATTR_METHOD_LAZY_DICT |
| { |
| uint16_t dictoffset = read_u16(&this_instr[4].cache); |
| char *ptr = ((char *)PyStackRef_AsPyObjectBorrow(owner)) + MANAGED_DICT_OFFSET + dictoffset; |
| PyObject *dict = *(PyObject **)ptr; |
| /* This object has a __dict__, just not yet created */ |
| DEOPT_IF(dict != NULL, LOAD_ATTR); |
| } |
| /* Skip 1 cache entry */ |
| // _LOAD_ATTR_METHOD_LAZY_DICT |
| { |
| PyObject *descr = read_obj(&this_instr[6].cache); |
| assert(oparg & 1); |
| STAT_INC(LOAD_ATTR, hit); |
| assert(descr != NULL); |
| assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); |
| attr = PyStackRef_FromPyObjectNew(descr); |
| self = owner; |
| } |
| stack_pointer[-1] = attr; |
| stack_pointer[0] = self; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_METHOD_NO_DICT) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_METHOD_NO_DICT); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef self = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| /* Skip 2 cache entries */ |
| // _LOAD_ATTR_METHOD_NO_DICT |
| { |
| PyObject *descr = read_obj(&this_instr[6].cache); |
| assert(oparg & 1); |
| assert(Py_TYPE(PyStackRef_AsPyObjectBorrow(owner))->tp_dictoffset == 0); |
| STAT_INC(LOAD_ATTR, hit); |
| assert(descr != NULL); |
| assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); |
| attr = PyStackRef_FromPyObjectNew(descr); |
| self = owner; |
| } |
| stack_pointer[-1] = attr; |
| stack_pointer[0] = self; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_METHOD_WITH_VALUES) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_METHOD_WITH_VALUES); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef self = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| // _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT |
| { |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); |
| DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid, LOAD_ATTR); |
| } |
| // _GUARD_KEYS_VERSION |
| { |
| uint32_t keys_version = read_u32(&this_instr[4].cache); |
| PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; |
| DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR); |
| } |
| // _LOAD_ATTR_METHOD_WITH_VALUES |
| { |
| PyObject *descr = read_obj(&this_instr[6].cache); |
| assert(oparg & 1); |
| /* Cached method object */ |
| STAT_INC(LOAD_ATTR, hit); |
| assert(descr != NULL); |
| assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); |
| attr = PyStackRef_FromPyObjectNew(descr); |
| self = owner; |
| } |
| stack_pointer[-1] = attr; |
| stack_pointer[0] = self; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_MODULE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_MODULE); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef null = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _CHECK_ATTR_MODULE |
| { |
| owner = stack_pointer[-1]; |
| uint32_t dict_version = read_u32(&this_instr[2].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| DEOPT_IF(!PyModule_CheckExact(owner_o), LOAD_ATTR); |
| PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner_o)->md_dict; |
| assert(dict != NULL); |
| DEOPT_IF(dict->ma_keys->dk_version != dict_version, LOAD_ATTR); |
| } |
| // _LOAD_ATTR_MODULE |
| { |
| uint16_t index = read_u16(&this_instr[4].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner_o)->md_dict; |
| assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); |
| assert(index < dict->ma_keys->dk_nentries); |
| PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + index; |
| PyObject *attr_o = ep->me_value; |
| DEOPT_IF(attr_o == NULL, LOAD_ATTR); |
| STAT_INC(LOAD_ATTR, hit); |
| Py_INCREF(attr_o); |
| attr = PyStackRef_FromPyObjectSteal(attr_o); |
| null = PyStackRef_NULL; |
| PyStackRef_CLOSE(owner); |
| } |
| /* Skip 5 cache entries */ |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_NONDESCRIPTOR_NO_DICT) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_NONDESCRIPTOR_NO_DICT); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| /* Skip 2 cache entries */ |
| // _LOAD_ATTR_NONDESCRIPTOR_NO_DICT |
| { |
| PyObject *descr = read_obj(&this_instr[6].cache); |
| assert((oparg & 1) == 0); |
| assert(Py_TYPE(PyStackRef_AsPyObjectBorrow(owner))->tp_dictoffset == 0); |
| STAT_INC(LOAD_ATTR, hit); |
| assert(descr != NULL); |
| PyStackRef_CLOSE(owner); |
| attr = PyStackRef_FromPyObjectNew(descr); |
| } |
| stack_pointer[-1] = attr; |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| // _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT |
| { |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); |
| DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid, LOAD_ATTR); |
| } |
| // _GUARD_KEYS_VERSION |
| { |
| uint32_t keys_version = read_u32(&this_instr[4].cache); |
| PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; |
| DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR); |
| } |
| // _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES |
| { |
| PyObject *descr = read_obj(&this_instr[6].cache); |
| assert((oparg & 1) == 0); |
| STAT_INC(LOAD_ATTR, hit); |
| assert(descr != NULL); |
| PyStackRef_CLOSE(owner); |
| attr = PyStackRef_FromPyObjectNew(descr); |
| } |
| stack_pointer[-1] = attr; |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_PROPERTY) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_PROPERTY); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); |
| } |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| /* Skip 2 cache entries */ |
| // _LOAD_ATTR_PROPERTY_FRAME |
| { |
| PyObject *fget = read_obj(&this_instr[6].cache); |
| assert((oparg & 1) == 0); |
| assert(Py_IS_TYPE(fget, &PyFunction_Type)); |
| PyFunctionObject *f = (PyFunctionObject *)fget; |
| PyCodeObject *code = (PyCodeObject *)f->func_code; |
| DEOPT_IF((code->co_flags & (CO_VARKEYWORDS | CO_VARARGS | CO_OPTIMIZED)) != CO_OPTIMIZED, LOAD_ATTR); |
| DEOPT_IF(code->co_kwonlyargcount, LOAD_ATTR); |
| DEOPT_IF(code->co_argcount != 1, LOAD_ATTR); |
| DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); |
| STAT_INC(LOAD_ATTR, hit); |
| new_frame = _PyFrame_PushUnchecked(tstate, PyStackRef_FromPyObjectNew(fget), 1, frame); |
| new_frame->localsplus[0] = owner; |
| } |
| // _SAVE_RETURN_OFFSET |
| { |
| #if TIER_ONE |
| frame->return_offset = (uint16_t)(next_instr - this_instr); |
| #endif |
| #if TIER_TWO |
| frame->return_offset = oparg; |
| #endif |
| } |
| // _PUSH_FRAME |
| { |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_SLOT) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_SLOT); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef null = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| // _LOAD_ATTR_SLOT |
| { |
| uint16_t index = read_u16(&this_instr[4].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| char *addr = (char *)owner_o + index; |
| PyObject *attr_o = *(PyObject **)addr; |
| DEOPT_IF(attr_o == NULL, LOAD_ATTR); |
| STAT_INC(LOAD_ATTR, hit); |
| null = PyStackRef_NULL; |
| attr = PyStackRef_FromPyObjectNew(attr_o); |
| PyStackRef_CLOSE(owner); |
| } |
| /* Skip 5 cache entries */ |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_ATTR_WITH_HINT) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 10; |
| INSTRUCTION_STATS(LOAD_ATTR_WITH_HINT); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef null = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); |
| } |
| // _CHECK_ATTR_WITH_HINT |
| { |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_MANAGED_DICT); |
| PyDictObject *dict = _PyObject_GetManagedDict(owner_o); |
| DEOPT_IF(dict == NULL, LOAD_ATTR); |
| assert(PyDict_CheckExact((PyObject *)dict)); |
| } |
| // _LOAD_ATTR_WITH_HINT |
| { |
| uint16_t hint = read_u16(&this_instr[4].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| PyObject *attr_o; |
| PyDictObject *dict = _PyObject_GetManagedDict(owner_o); |
| DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR); |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); |
| DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys), LOAD_ATTR); |
| PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; |
| DEOPT_IF(ep->me_key != name, LOAD_ATTR); |
| attr_o = ep->me_value; |
| DEOPT_IF(attr_o == NULL, LOAD_ATTR); |
| STAT_INC(LOAD_ATTR, hit); |
| Py_INCREF(attr_o); |
| attr = PyStackRef_FromPyObjectSteal(attr_o); |
| null = PyStackRef_NULL; |
| PyStackRef_CLOSE(owner); |
| } |
| /* Skip 5 cache entries */ |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_BUILD_CLASS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_BUILD_CLASS); |
| _PyStackRef bc; |
| PyObject *bc_o; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyMapping_GetOptionalItem(BUILTINS(), &_Py_ID(__build_class__), &bc_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) goto error; |
| if (bc_o == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_SetString(tstate, PyExc_NameError, |
| "__build_class__ not found"); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| bc = PyStackRef_FromPyObjectSteal(bc_o); |
| stack_pointer[0] = bc; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_COMMON_CONSTANT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_COMMON_CONSTANT); |
| _PyStackRef value; |
| // Keep in sync with _common_constants in opcode.py |
| // If we ever have more than two constants, use a lookup table |
| PyObject *val; |
| if (oparg == CONSTANT_ASSERTIONERROR) { |
| val = PyExc_AssertionError; |
| } |
| else { |
| assert(oparg == CONSTANT_NOTIMPLEMENTEDERROR); |
| val = PyExc_NotImplementedError; |
| } |
| value = PyStackRef_FromPyObjectImmortal(val); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_CONST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_CONST); |
| PREDICTED(LOAD_CONST); |
| _PyStackRef value; |
| value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg)); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_CONST_IMMORTAL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_CONST_IMMORTAL); |
| static_assert(0 == 0, "incorrect cache size"); |
| _PyStackRef value; |
| PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); |
| assert(_Py_IsImmortal(obj)); |
| value = PyStackRef_FromPyObjectImmortal(obj); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_DEREF) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_DEREF); |
| _PyStackRef value; |
| PyCellObject *cell = (PyCellObject *)PyStackRef_AsPyObjectBorrow(GETLOCAL(oparg)); |
| PyObject *value_o = PyCell_GetRef(cell); |
| if (value_o == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| value = PyStackRef_FromPyObjectSteal(value_o); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_FAST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_FAST); |
| _PyStackRef value; |
| assert(!PyStackRef_IsNull(GETLOCAL(oparg))); |
| value = PyStackRef_DUP(GETLOCAL(oparg)); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_FAST_AND_CLEAR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_FAST_AND_CLEAR); |
| _PyStackRef value; |
| value = GETLOCAL(oparg); |
| // do not use SETLOCAL here, it decrefs the old value |
| GETLOCAL(oparg) = PyStackRef_NULL; |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_FAST_CHECK) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_FAST_CHECK); |
| _PyStackRef value; |
| _PyStackRef value_s = GETLOCAL(oparg); |
| if (PyStackRef_IsNull(value_s)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, |
| UNBOUNDLOCAL_ERROR_MSG, |
| PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| value = PyStackRef_DUP(value_s); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_FAST_LOAD_FAST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_FAST_LOAD_FAST); |
| _PyStackRef value1; |
| _PyStackRef value2; |
| uint32_t oparg1 = oparg >> 4; |
| uint32_t oparg2 = oparg & 15; |
| value1 = PyStackRef_DUP(GETLOCAL(oparg1)); |
| value2 = PyStackRef_DUP(GETLOCAL(oparg2)); |
| stack_pointer[0] = value1; |
| stack_pointer[1] = value2; |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_FROM_DICT_OR_DEREF) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_FROM_DICT_OR_DEREF); |
| _PyStackRef class_dict_st; |
| _PyStackRef value; |
| class_dict_st = stack_pointer[-1]; |
| PyObject *value_o; |
| PyObject *name; |
| PyObject *class_dict = PyStackRef_AsPyObjectBorrow(class_dict_st); |
| assert(class_dict); |
| assert(oparg >= 0 && oparg < _PyFrame_GetCode(frame)->co_nlocalsplus); |
| name = PyTuple_GET_ITEM(_PyFrame_GetCode(frame)->co_localsplusnames, oparg); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyMapping_GetOptionalItem(class_dict, name, &value_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| goto error; |
| } |
| if (!value_o) { |
| PyCellObject *cell = (PyCellObject *)PyStackRef_AsPyObjectBorrow(GETLOCAL(oparg)); |
| value_o = PyCell_GetRef(cell); |
| if (value_o == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| } |
| PyStackRef_CLOSE(class_dict_st); |
| value = PyStackRef_FromPyObjectSteal(value_o); |
| stack_pointer[-1] = value; |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_FROM_DICT_OR_GLOBALS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_FROM_DICT_OR_GLOBALS); |
| _PyStackRef mod_or_class_dict; |
| _PyStackRef v; |
| mod_or_class_dict = stack_pointer[-1]; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| PyObject *v_o; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyMapping_GetOptionalItem(PyStackRef_AsPyObjectBorrow(mod_or_class_dict), name, &v_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(mod_or_class_dict); |
| if (err < 0) goto pop_1_error; |
| if (v_o == NULL) { |
| if (PyDict_CheckExact(GLOBALS()) |
| && PyDict_CheckExact(BUILTINS())) |
| { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| v_o = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), |
| (PyDictObject *)BUILTINS(), |
| name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (v_o == NULL) { |
| if (!_PyErr_Occurred(tstate)) { |
| /* _PyDict_LoadGlobal() returns NULL without raising |
| * an exception if the key doesn't exist */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, |
| NAME_ERROR_MSG, name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| goto error; |
| } |
| } |
| else { |
| /* Slow-path if globals or builtins is not a dict */ |
| /* namespace 1: globals */ |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyMapping_GetOptionalItem(GLOBALS(), name, &v_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) goto error; |
| if (v_o == NULL) { |
| /* namespace 2: builtins */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyMapping_GetOptionalItem(BUILTINS(), name, &v_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) goto error; |
| if (v_o == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_FormatExcCheckArg( |
| tstate, PyExc_NameError, |
| NAME_ERROR_MSG, name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| } |
| } |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| v = PyStackRef_FromPyObjectSteal(v_o); |
| stack_pointer[-1] = v; |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_GLOBAL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 5; |
| INSTRUCTION_STATS(LOAD_GLOBAL); |
| PREDICTED(LOAD_GLOBAL); |
| _Py_CODEUNIT* const this_instr = next_instr - 5; |
| (void)this_instr; |
| _PyStackRef *res; |
| _PyStackRef null = PyStackRef_NULL; |
| // _SPECIALIZE_LOAD_GLOBAL |
| { |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(LOAD_GLOBAL); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| /* Skip 1 cache entry */ |
| /* Skip 1 cache entry */ |
| /* Skip 1 cache entry */ |
| // _LOAD_GLOBAL |
| { |
| res = &stack_pointer[0]; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_LoadGlobalStackRef(GLOBALS(), BUILTINS(), name, res); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (PyStackRef_IsNull(*res)) goto error; |
| null = PyStackRef_NULL; |
| } |
| if (oparg & 1) stack_pointer[1] = null; |
| stack_pointer += 1 + (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_GLOBAL_BUILTIN) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 5; |
| INSTRUCTION_STATS(LOAD_GLOBAL_BUILTIN); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); |
| PyDictKeysObject *builtins_keys; |
| _PyStackRef res; |
| _PyStackRef null = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _GUARD_GLOBALS_VERSION |
| { |
| uint16_t version = read_u16(&this_instr[2].cache); |
| PyDictObject *dict = (PyDictObject *)GLOBALS(); |
| DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); |
| DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); |
| assert(DK_IS_UNICODE(dict->ma_keys)); |
| } |
| // _GUARD_BUILTINS_VERSION_PUSH_KEYS |
| { |
| uint16_t version = read_u16(&this_instr[3].cache); |
| PyDictObject *dict = (PyDictObject *)BUILTINS(); |
| DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); |
| DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); |
| builtins_keys = dict->ma_keys; |
| assert(DK_IS_UNICODE(builtins_keys)); |
| } |
| // _LOAD_GLOBAL_BUILTINS_FROM_KEYS |
| { |
| uint16_t index = read_u16(&this_instr[4].cache); |
| PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(builtins_keys); |
| PyObject *res_o = entries[index].me_value; |
| DEOPT_IF(res_o == NULL, LOAD_GLOBAL); |
| Py_INCREF(res_o); |
| STAT_INC(LOAD_GLOBAL, hit); |
| null = PyStackRef_NULL; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[0] = res; |
| if (oparg & 1) stack_pointer[1] = null; |
| stack_pointer += 1 + (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_GLOBAL_MODULE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 5; |
| INSTRUCTION_STATS(LOAD_GLOBAL_MODULE); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); |
| PyDictKeysObject *globals_keys; |
| _PyStackRef res; |
| _PyStackRef null = PyStackRef_NULL; |
| /* Skip 1 cache entry */ |
| // _GUARD_GLOBALS_VERSION_PUSH_KEYS |
| { |
| uint16_t version = read_u16(&this_instr[2].cache); |
| PyDictObject *dict = (PyDictObject *)GLOBALS(); |
| DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); |
| DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); |
| globals_keys = dict->ma_keys; |
| assert(DK_IS_UNICODE(globals_keys)); |
| } |
| /* Skip 1 cache entry */ |
| // _LOAD_GLOBAL_MODULE_FROM_KEYS |
| { |
| uint16_t index = read_u16(&this_instr[4].cache); |
| PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(globals_keys); |
| PyObject *res_o = entries[index].me_value; |
| DEOPT_IF(res_o == NULL, LOAD_GLOBAL); |
| Py_INCREF(res_o); |
| STAT_INC(LOAD_GLOBAL, hit); |
| null = PyStackRef_NULL; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| } |
| stack_pointer[0] = res; |
| if (oparg & 1) stack_pointer[1] = null; |
| stack_pointer += 1 + (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_LOCALS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_LOCALS); |
| _PyStackRef locals; |
| PyObject *l = LOCALS(); |
| if (l == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_SetString(tstate, PyExc_SystemError, |
| "no locals found"); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| locals = PyStackRef_FromPyObjectNew(l); |
| stack_pointer[0] = locals; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_NAME) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_NAME); |
| _PyStackRef v; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *v_o = _PyEval_LoadName(tstate, frame, name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (v_o == NULL) goto error; |
| v = PyStackRef_FromPyObjectSteal(v_o); |
| stack_pointer[0] = v; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_SMALL_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_SMALL_INT); |
| _PyStackRef value; |
| assert(oparg < _PY_NSMALLPOSINTS); |
| PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg]; |
| value = PyStackRef_FromPyObjectImmortal(obj); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_SPECIAL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(LOAD_SPECIAL); |
| _PyStackRef owner; |
| _PyStackRef attr; |
| _PyStackRef self_or_null; |
| owner = stack_pointer[-1]; |
| assert(oparg <= SPECIAL_MAX); |
| PyObject *owner_o = PyStackRef_AsPyObjectSteal(owner); |
| PyObject *name = _Py_SpecialMethods[oparg].name; |
| PyObject *self_or_null_o; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *attr_o = _PyObject_LookupSpecialMethod(owner_o, name, &self_or_null_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (attr_o == NULL) { |
| if (!_PyErr_Occurred(tstate)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_Format(tstate, PyExc_TypeError, |
| _Py_SpecialMethods[oparg].error, |
| Py_TYPE(owner_o)->tp_name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| goto error; |
| } |
| attr = PyStackRef_FromPyObjectSteal(attr_o); |
| self_or_null = self_or_null_o == NULL ? |
| PyStackRef_NULL : PyStackRef_FromPyObjectSteal(self_or_null_o); |
| stack_pointer[0] = attr; |
| stack_pointer[1] = self_or_null; |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_SUPER_ATTR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(LOAD_SUPER_ATTR); |
| PREDICTED(LOAD_SUPER_ATTR); |
| _Py_CODEUNIT* const this_instr = next_instr - 2; |
| (void)this_instr; |
| _PyStackRef global_super_st; |
| _PyStackRef class_st; |
| _PyStackRef self_st; |
| _PyStackRef attr; |
| _PyStackRef null = PyStackRef_NULL; |
| // _SPECIALIZE_LOAD_SUPER_ATTR |
| { |
| class_st = stack_pointer[-2]; |
| global_super_st = stack_pointer[-3]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| int load_method = oparg & 1; |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_LoadSuperAttr(global_super_st, class_st, next_instr, load_method); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(LOAD_SUPER_ATTR); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| // _LOAD_SUPER_ATTR |
| { |
| self_st = stack_pointer[-1]; |
| PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st); |
| PyObject *class = PyStackRef_AsPyObjectBorrow(class_st); |
| PyObject *self = PyStackRef_AsPyObjectBorrow(self_st); |
| if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) { |
| PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_2args( |
| tstate, PY_MONITORING_EVENT_CALL, |
| frame, this_instr, global_super, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) { |
| PyStackRef_CLOSE(global_super_st); |
| PyStackRef_CLOSE(class_st); |
| PyStackRef_CLOSE(self_st); |
| goto pop_3_error; |
| } |
| } |
| // we make no attempt to optimize here; specializations should |
| // handle any case whose performance we care about |
| PyObject *stack[] = {class, self}; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) { |
| PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING; |
| if (super == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_call_instrumentation_exc2( |
| tstate, PY_MONITORING_EVENT_C_RAISE, |
| frame, this_instr, global_super, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_call_instrumentation_2args( |
| tstate, PY_MONITORING_EVENT_C_RETURN, |
| frame, this_instr, global_super, arg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) { |
| Py_CLEAR(super); |
| } |
| } |
| } |
| PyStackRef_CLOSE(global_super_st); |
| PyStackRef_CLOSE(class_st); |
| PyStackRef_CLOSE(self_st); |
| if (super == NULL) goto pop_3_error; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); |
| stack_pointer += -3; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *attr_o = PyObject_GetAttr(super, name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| Py_DECREF(super); |
| if (attr_o == NULL) goto error; |
| attr = PyStackRef_FromPyObjectSteal(attr_o); |
| null = PyStackRef_NULL; |
| } |
| stack_pointer[0] = attr; |
| if (oparg & 1) stack_pointer[1] = null; |
| stack_pointer += 1 + (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_SUPER_ATTR_ATTR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(LOAD_SUPER_ATTR_ATTR); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR == 1, "incorrect cache size"); |
| _PyStackRef global_super_st; |
| _PyStackRef class_st; |
| _PyStackRef self_st; |
| _PyStackRef attr_st; |
| /* Skip 1 cache entry */ |
| self_st = stack_pointer[-1]; |
| class_st = stack_pointer[-2]; |
| global_super_st = stack_pointer[-3]; |
| PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st); |
| PyObject *class = PyStackRef_AsPyObjectBorrow(class_st); |
| PyObject *self = PyStackRef_AsPyObjectBorrow(self_st); |
| assert(!(oparg & 1)); |
| DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); |
| DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); |
| STAT_INC(LOAD_SUPER_ATTR, hit); |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *attr = _PySuper_Lookup((PyTypeObject *)class, self, name, NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(global_super_st); |
| PyStackRef_CLOSE(class_st); |
| PyStackRef_CLOSE(self_st); |
| if (attr == NULL) goto pop_3_error; |
| attr_st = PyStackRef_FromPyObjectSteal(attr); |
| stack_pointer[-3] = attr_st; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(LOAD_SUPER_ATTR_METHOD) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(LOAD_SUPER_ATTR_METHOD); |
| static_assert(INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR == 1, "incorrect cache size"); |
| _PyStackRef global_super_st; |
| _PyStackRef class_st; |
| _PyStackRef self_st; |
| _PyStackRef attr; |
| _PyStackRef self_or_null; |
| /* Skip 1 cache entry */ |
| self_st = stack_pointer[-1]; |
| class_st = stack_pointer[-2]; |
| global_super_st = stack_pointer[-3]; |
| PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st); |
| PyObject *class = PyStackRef_AsPyObjectBorrow(class_st); |
| PyObject *self = PyStackRef_AsPyObjectBorrow(self_st); |
| assert(oparg & 1); |
| DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); |
| DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); |
| STAT_INC(LOAD_SUPER_ATTR, hit); |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); |
| PyTypeObject *cls = (PyTypeObject *)class; |
| int method_found = 0; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *attr_o = _PySuper_Lookup(cls, self, name, |
| Py_TYPE(self)->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(global_super_st); |
| PyStackRef_CLOSE(class_st); |
| if (attr_o == NULL) { |
| PyStackRef_CLOSE(self_st); |
| goto pop_3_error; |
| } |
| if (method_found) { |
| self_or_null = self_st; // transfer ownership |
| } else { |
| PyStackRef_CLOSE(self_st); |
| self_or_null = PyStackRef_NULL; |
| } |
| attr = PyStackRef_FromPyObjectSteal(attr_o); |
| stack_pointer[-3] = attr; |
| stack_pointer[-2] = self_or_null; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(MAKE_CELL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(MAKE_CELL); |
| // "initial" is probably NULL but not if it's an arg (or set |
| // via the f_locals proxy before MAKE_CELL has run). |
| PyObject *initial = PyStackRef_AsPyObjectBorrow(GETLOCAL(oparg)); |
| PyObject *cell = PyCell_New(initial); |
| if (cell == NULL) { |
| goto error; |
| } |
| SETLOCAL(oparg, PyStackRef_FromPyObjectSteal(cell)); |
| DISPATCH(); |
| } |
| |
| TARGET(MAKE_FUNCTION) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(MAKE_FUNCTION); |
| _PyStackRef codeobj_st; |
| _PyStackRef func; |
| codeobj_st = stack_pointer[-1]; |
| PyObject *codeobj = PyStackRef_AsPyObjectBorrow(codeobj_st); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyFunctionObject *func_obj = (PyFunctionObject *) |
| PyFunction_New(codeobj, GLOBALS()); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(codeobj_st); |
| if (func_obj == NULL) goto pop_1_error; |
| _PyFunction_SetVersion( |
| func_obj, ((PyCodeObject *)codeobj)->co_version); |
| func = PyStackRef_FromPyObjectSteal((PyObject *)func_obj); |
| stack_pointer[-1] = func; |
| DISPATCH(); |
| } |
| |
| TARGET(MAP_ADD) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(MAP_ADD); |
| _PyStackRef dict_st; |
| _PyStackRef key; |
| _PyStackRef value; |
| value = stack_pointer[-1]; |
| key = stack_pointer[-2]; |
| dict_st = stack_pointer[-3 - (oparg - 1)]; |
| PyObject *dict = PyStackRef_AsPyObjectBorrow(dict_st); |
| assert(PyDict_CheckExact(dict)); |
| /* dict[key] = value */ |
| // Do not DECREF INPUTS because the function steals the references |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _PyDict_SetItem_Take2( |
| (PyDictObject *)dict, |
| PyStackRef_AsPyObjectSteal(key), |
| PyStackRef_AsPyObjectSteal(value) |
| ); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto pop_2_error; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(MATCH_CLASS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(MATCH_CLASS); |
| _PyStackRef subject; |
| _PyStackRef type; |
| _PyStackRef names; |
| _PyStackRef attrs; |
| names = stack_pointer[-1]; |
| type = stack_pointer[-2]; |
| subject = stack_pointer[-3]; |
| // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or |
| // None on failure. |
| assert(PyTuple_CheckExact(PyStackRef_AsPyObjectBorrow(names))); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *attrs_o = _PyEval_MatchClass(tstate, |
| PyStackRef_AsPyObjectBorrow(subject), |
| PyStackRef_AsPyObjectBorrow(type), oparg, |
| PyStackRef_AsPyObjectBorrow(names)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(subject); |
| PyStackRef_CLOSE(type); |
| PyStackRef_CLOSE(names); |
| if (attrs_o) { |
| assert(PyTuple_CheckExact(attrs_o)); // Success! |
| attrs = PyStackRef_FromPyObjectSteal(attrs_o); |
| } |
| else { |
| if (_PyErr_Occurred(tstate)) goto pop_3_error; |
| // Error! |
| attrs = PyStackRef_None; // Failure! |
| } |
| stack_pointer[-3] = attrs; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(MATCH_KEYS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(MATCH_KEYS); |
| _PyStackRef subject; |
| _PyStackRef keys; |
| _PyStackRef values_or_none; |
| keys = stack_pointer[-1]; |
| subject = stack_pointer[-2]; |
| // On successful match, PUSH(values). Otherwise, PUSH(None). |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *values_or_none_o = _PyEval_MatchKeys(tstate, |
| PyStackRef_AsPyObjectBorrow(subject), PyStackRef_AsPyObjectBorrow(keys)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (values_or_none_o == NULL) goto error; |
| values_or_none = PyStackRef_FromPyObjectSteal(values_or_none_o); |
| stack_pointer[0] = values_or_none; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(MATCH_MAPPING) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(MATCH_MAPPING); |
| _PyStackRef subject; |
| _PyStackRef res; |
| subject = stack_pointer[-1]; |
| int match = PyStackRef_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; |
| res = match ? PyStackRef_True : PyStackRef_False; |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(MATCH_SEQUENCE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(MATCH_SEQUENCE); |
| _PyStackRef subject; |
| _PyStackRef res; |
| subject = stack_pointer[-1]; |
| int match = PyStackRef_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; |
| res = match ? PyStackRef_True : PyStackRef_False; |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(NOP) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(NOP); |
| DISPATCH(); |
| } |
| |
| TARGET(POP_EXCEPT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(POP_EXCEPT); |
| _PyStackRef exc_value; |
| exc_value = stack_pointer[-1]; |
| _PyErr_StackItem *exc_info = tstate->exc_info; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| Py_XSETREF(exc_info->exc_value, |
| PyStackRef_Is(exc_value, PyStackRef_None) |
| ? NULL : PyStackRef_AsPyObjectSteal(exc_value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(POP_JUMP_IF_FALSE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(POP_JUMP_IF_FALSE); |
| _PyStackRef cond; |
| /* Skip 1 cache entry */ |
| cond = stack_pointer[-1]; |
| assert(PyStackRef_BoolCheck(cond)); |
| int flag = PyStackRef_Is(cond, PyStackRef_False); |
| RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); |
| JUMPBY(oparg * flag); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(POP_JUMP_IF_NONE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(POP_JUMP_IF_NONE); |
| _PyStackRef value; |
| _PyStackRef b; |
| _PyStackRef cond; |
| /* Skip 1 cache entry */ |
| // _IS_NONE |
| { |
| value = stack_pointer[-1]; |
| if (PyStackRef_Is(value, PyStackRef_None)) { |
| b = PyStackRef_True; |
| } |
| else { |
| b = PyStackRef_False; |
| PyStackRef_CLOSE(value); |
| } |
| } |
| // _POP_JUMP_IF_TRUE |
| { |
| cond = b; |
| assert(PyStackRef_BoolCheck(cond)); |
| int flag = PyStackRef_Is(cond, PyStackRef_True); |
| RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); |
| JUMPBY(oparg * flag); |
| } |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(POP_JUMP_IF_NOT_NONE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(POP_JUMP_IF_NOT_NONE); |
| _PyStackRef value; |
| _PyStackRef b; |
| _PyStackRef cond; |
| /* Skip 1 cache entry */ |
| // _IS_NONE |
| { |
| value = stack_pointer[-1]; |
| if (PyStackRef_Is(value, PyStackRef_None)) { |
| b = PyStackRef_True; |
| } |
| else { |
| b = PyStackRef_False; |
| PyStackRef_CLOSE(value); |
| } |
| } |
| // _POP_JUMP_IF_FALSE |
| { |
| cond = b; |
| assert(PyStackRef_BoolCheck(cond)); |
| int flag = PyStackRef_Is(cond, PyStackRef_False); |
| RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); |
| JUMPBY(oparg * flag); |
| } |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(POP_JUMP_IF_TRUE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(POP_JUMP_IF_TRUE); |
| _PyStackRef cond; |
| /* Skip 1 cache entry */ |
| cond = stack_pointer[-1]; |
| assert(PyStackRef_BoolCheck(cond)); |
| int flag = PyStackRef_Is(cond, PyStackRef_True); |
| RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); |
| JUMPBY(oparg * flag); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(POP_TOP) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(POP_TOP); |
| _PyStackRef value; |
| value = stack_pointer[-1]; |
| PyStackRef_CLOSE(value); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(PUSH_EXC_INFO) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(PUSH_EXC_INFO); |
| _PyStackRef exc; |
| _PyStackRef prev_exc; |
| _PyStackRef new_exc; |
| exc = stack_pointer[-1]; |
| _PyErr_StackItem *exc_info = tstate->exc_info; |
| if (exc_info->exc_value != NULL) { |
| prev_exc = PyStackRef_FromPyObjectSteal(exc_info->exc_value); |
| } |
| else { |
| prev_exc = PyStackRef_None; |
| } |
| assert(PyStackRef_ExceptionInstanceCheck(exc)); |
| exc_info->exc_value = PyStackRef_AsPyObjectNew(exc); |
| new_exc = exc; |
| stack_pointer[-1] = prev_exc; |
| stack_pointer[0] = new_exc; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(PUSH_NULL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(PUSH_NULL); |
| _PyStackRef res; |
| res = PyStackRef_NULL; |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(RAISE_VARARGS) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(RAISE_VARARGS); |
| _PyStackRef *args; |
| args = &stack_pointer[-oparg]; |
| assert(oparg < 3); |
| PyObject *cause = oparg == 2 ? PyStackRef_AsPyObjectSteal(args[1]) : NULL; |
| PyObject *exc = oparg > 0 ? PyStackRef_AsPyObjectSteal(args[0]) : NULL; |
| stack_pointer += -oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = do_raise(tstate, exc, cause); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) { |
| assert(oparg == 0); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| monitor_reraise(tstate, frame, this_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto exception_unwind; |
| } |
| goto error; |
| } |
| |
| TARGET(RERAISE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| (void)this_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(RERAISE); |
| _PyStackRef *values; |
| _PyStackRef exc_st; |
| exc_st = stack_pointer[-1]; |
| values = &stack_pointer[-1 - oparg]; |
| PyObject *exc = PyStackRef_AsPyObjectSteal(exc_st); |
| assert(oparg >= 0 && oparg <= 2); |
| if (oparg) { |
| PyObject *lasti = PyStackRef_AsPyObjectBorrow(values[0]); |
| if (PyLong_Check(lasti)) { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| frame->instr_ptr = _PyFrame_GetBytecode(frame) + PyLong_AsLong(lasti); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| assert(!_PyErr_Occurred(tstate)); |
| } |
| else { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| Py_DECREF(exc); |
| goto error; |
| } |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| assert(exc && PyExceptionInstance_Check(exc)); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_SetRaisedException(tstate, exc); |
| monitor_reraise(tstate, frame, this_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto exception_unwind; |
| } |
| |
| TARGET(RESERVED) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(RESERVED); |
| assert(0 && "Executing RESERVED instruction."); |
| Py_FatalError("Executing RESERVED instruction."); |
| DISPATCH(); |
| } |
| |
| TARGET(RESUME) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(RESUME); |
| PREDICTED(RESUME); |
| _Py_CODEUNIT* const this_instr = next_instr - 1; |
| (void)this_instr; |
| // _LOAD_BYTECODE |
| { |
| #ifdef Py_GIL_DISABLED |
| if (frame->tlbc_index != |
| ((_PyThreadStateImpl *)tstate)->tlbc_index) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_CODEUNIT *bytecode = |
| _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (bytecode == NULL) goto error; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| ptrdiff_t off = this_instr - _PyFrame_GetBytecode(frame); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| frame->tlbc_index = ((_PyThreadStateImpl *)tstate)->tlbc_index; |
| frame->instr_ptr = bytecode + off; |
| // Make sure this_instr gets reset correctley for any uops that |
| // follow |
| next_instr = frame->instr_ptr; |
| DISPATCH(); |
| } |
| #endif |
| } |
| // _MAYBE_INSTRUMENT |
| { |
| if (tstate->tracing == 0) { |
| uintptr_t global_version = _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & ~_PY_EVAL_EVENTS_MASK; |
| uintptr_t code_version = FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version); |
| if (code_version != global_version) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_Instrument(_PyFrame_GetCode(frame), tstate->interp); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err) { |
| goto error; |
| } |
| next_instr = this_instr; |
| DISPATCH(); |
| } |
| } |
| } |
| // _QUICKEN_RESUME |
| { |
| #if ENABLE_SPECIALIZATION_FT |
| if (tstate->tracing == 0 && this_instr->op.code == RESUME) { |
| FT_ATOMIC_STORE_UINT8_RELAXED(this_instr->op.code, RESUME_CHECK); |
| } |
| #endif /* ENABLE_SPECIALIZATION_FT */ |
| } |
| // _CHECK_PERIODIC_IF_NOT_YIELD_FROM |
| { |
| if ((oparg & RESUME_OPARG_LOCATION_MASK) < RESUME_AFTER_YIELD_FROM) { |
| _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); |
| QSBR_QUIESCENT_STATE(tstate); \ |
| if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _Py_HandlePending(tstate); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err != 0) goto error; |
| } |
| } |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(RESUME_CHECK) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(RESUME_CHECK); |
| static_assert(0 == 0, "incorrect cache size"); |
| #if defined(__EMSCRIPTEN__) |
| DEOPT_IF(_Py_emscripten_signal_clock == 0, RESUME); |
| _Py_emscripten_signal_clock -= Py_EMSCRIPTEN_SIGNAL_HANDLING; |
| #endif |
| uintptr_t eval_breaker = _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker); |
| uintptr_t version = FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version); |
| assert((version & _PY_EVAL_EVENTS_MASK) == 0); |
| DEOPT_IF(eval_breaker != version, RESUME); |
| #ifdef Py_GIL_DISABLED |
| DEOPT_IF(frame->tlbc_index != |
| ((_PyThreadStateImpl *)tstate)->tlbc_index, RESUME); |
| #endif |
| DISPATCH(); |
| } |
| |
| TARGET(RETURN_GENERATOR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(RETURN_GENERATOR); |
| _PyStackRef res; |
| assert(PyStackRef_FunctionCheck(frame->f_funcobj)); |
| PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (gen == NULL) goto error; |
| assert(EMPTY()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyInterpreterFrame *gen_frame = &gen->gi_iframe; |
| frame->instr_ptr++; |
| _PyFrame_Copy(frame, gen_frame); |
| assert(frame->frame_obj == NULL); |
| gen->gi_frame_state = FRAME_CREATED; |
| gen_frame->owner = FRAME_OWNED_BY_GENERATOR; |
| _Py_LeaveRecursiveCallPy(tstate); |
| _PyInterpreterFrame *prev = frame->previous; |
| _PyThreadState_PopFrame(tstate, frame); |
| frame = tstate->current_frame = prev; |
| LOAD_IP(frame->return_offset); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| res = PyStackRef_FromPyObjectSteal((PyObject *)gen); |
| LLTRACE_RESUME_FRAME(); |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(RETURN_VALUE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(RETURN_VALUE); |
| _PyStackRef retval; |
| _PyStackRef res; |
| retval = stack_pointer[-1]; |
| #if TIER_ONE |
| assert(frame != &entry_frame); |
| #endif |
| _PyStackRef temp = retval; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(EMPTY()); |
| _Py_LeaveRecursiveCallPy(tstate); |
| // GH-99729: We need to unlink the frame *before* clearing it: |
| _PyInterpreterFrame *dying = frame; |
| frame = tstate->current_frame = dying->previous; |
| _PyEval_FrameClearAndPop(tstate, dying); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| LOAD_IP(frame->return_offset); |
| res = temp; |
| LLTRACE_RESUME_FRAME(); |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(SEND) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(SEND); |
| PREDICTED(SEND); |
| _Py_CODEUNIT* const this_instr = next_instr - 2; |
| (void)this_instr; |
| _PyStackRef receiver; |
| _PyStackRef v; |
| _PyStackRef retval; |
| // _SPECIALIZE_SEND |
| { |
| receiver = stack_pointer[-2]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_Send(receiver, next_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(SEND); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| // _SEND |
| { |
| v = stack_pointer[-1]; |
| PyObject *receiver_o = PyStackRef_AsPyObjectBorrow(receiver); |
| PyObject *retval_o; |
| assert(frame != &entry_frame); |
| if ((tstate->interp->eval_frame == NULL) && |
| (Py_TYPE(receiver_o) == &PyGen_Type || Py_TYPE(receiver_o) == &PyCoro_Type) && |
| ((PyGenObject *)receiver_o)->gi_frame_state < FRAME_EXECUTING) |
| { |
| PyGenObject *gen = (PyGenObject *)receiver_o; |
| _PyInterpreterFrame *gen_frame = &gen->gi_iframe; |
| STACK_SHRINK(1); |
| _PyFrame_StackPush(gen_frame, v); |
| gen->gi_frame_state = FRAME_EXECUTING; |
| gen->gi_exc_state.previous_item = tstate->exc_info; |
| tstate->exc_info = &gen->gi_exc_state; |
| assert( 2 + oparg <= UINT16_MAX); |
| frame->return_offset = (uint16_t)( 2 + oparg); |
| assert(gen_frame->previous == NULL); |
| gen_frame->previous = frame; |
| DISPATCH_INLINED(gen_frame); |
| } |
| if (PyStackRef_Is(v, PyStackRef_None) && PyIter_Check(receiver_o)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| retval_o = Py_TYPE(receiver_o)->tp_iternext(receiver_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| retval_o = PyObject_CallMethodOneArg(receiver_o, |
| &_Py_ID(send), |
| PyStackRef_AsPyObjectBorrow(v)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| if (retval_o == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (matches) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyEval_MonitorRaise(tstate, frame, this_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _PyGen_FetchStopIterationValue(&retval_o); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err == 0) { |
| assert(retval_o != NULL); |
| JUMPBY(oparg); |
| } |
| else { |
| PyStackRef_CLOSE(v); |
| goto pop_1_error; |
| } |
| } |
| PyStackRef_CLOSE(v); |
| retval = PyStackRef_FromPyObjectSteal(retval_o); |
| } |
| stack_pointer[-1] = retval; |
| DISPATCH(); |
| } |
| |
| TARGET(SEND_GEN) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(SEND_GEN); |
| static_assert(INLINE_CACHE_ENTRIES_SEND == 1, "incorrect cache size"); |
| _PyStackRef receiver; |
| _PyStackRef v; |
| _PyInterpreterFrame *gen_frame; |
| _PyInterpreterFrame *new_frame; |
| /* Skip 1 cache entry */ |
| // _CHECK_PEP_523 |
| { |
| DEOPT_IF(tstate->interp->eval_frame, SEND); |
| } |
| // _SEND_GEN_FRAME |
| { |
| v = stack_pointer[-1]; |
| receiver = stack_pointer[-2]; |
| PyGenObject *gen = (PyGenObject *)PyStackRef_AsPyObjectBorrow(receiver); |
| DEOPT_IF(Py_TYPE(gen) != &PyGen_Type && Py_TYPE(gen) != &PyCoro_Type, SEND); |
| DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, SEND); |
| STAT_INC(SEND, hit); |
| gen_frame = &gen->gi_iframe; |
| _PyFrame_StackPush(gen_frame, v); |
| gen->gi_frame_state = FRAME_EXECUTING; |
| gen->gi_exc_state.previous_item = tstate->exc_info; |
| tstate->exc_info = &gen->gi_exc_state; |
| assert( 2 + oparg <= UINT16_MAX); |
| frame->return_offset = (uint16_t)( 2 + oparg); |
| gen_frame->previous = frame; |
| } |
| // _PUSH_FRAME |
| { |
| new_frame = gen_frame; |
| // Write it out explicitly because it's subtly different. |
| // Eventually this should be the only occurrence of this code. |
| assert(tstate->interp->eval_frame == NULL); |
| _PyInterpreterFrame *temp = new_frame; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| assert(new_frame->previous == frame || new_frame->previous->previous == frame); |
| CALL_STAT_INC(inlined_py_calls); |
| frame = tstate->current_frame = temp; |
| tstate->py_recursion_remaining--; |
| LOAD_SP(); |
| LOAD_IP(0); |
| LLTRACE_RESUME_FRAME(); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(SETUP_ANNOTATIONS) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(SETUP_ANNOTATIONS); |
| PyObject *ann_dict; |
| if (LOCALS() == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_Format(tstate, PyExc_SystemError, |
| "no locals found when setting up annotations"); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| goto error; |
| } |
| /* check if __annotations__ in locals()... */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyMapping_GetOptionalItem(LOCALS(), &_Py_ID(__annotations__), &ann_dict); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (err < 0) goto error; |
| if (ann_dict == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| ann_dict = PyDict_New(); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (ann_dict == NULL) goto error; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), |
| ann_dict); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| Py_DECREF(ann_dict); |
| if (err) goto error; |
| } |
| else { |
| Py_DECREF(ann_dict); |
| } |
| DISPATCH(); |
| } |
| |
| TARGET(SET_ADD) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(SET_ADD); |
| _PyStackRef set; |
| _PyStackRef v; |
| v = stack_pointer[-1]; |
| set = stack_pointer[-2 - (oparg-1)]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PySet_Add(PyStackRef_AsPyObjectBorrow(set), |
| PyStackRef_AsPyObjectBorrow(v)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(v); |
| if (err) goto pop_1_error; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(SET_FUNCTION_ATTRIBUTE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(SET_FUNCTION_ATTRIBUTE); |
| _PyStackRef attr_st; |
| _PyStackRef func_in; |
| _PyStackRef func_out; |
| func_in = stack_pointer[-1]; |
| attr_st = stack_pointer[-2]; |
| PyObject *func = PyStackRef_AsPyObjectBorrow(func_in); |
| PyObject *attr = PyStackRef_AsPyObjectSteal(attr_st); |
| func_out = func_in; |
| assert(PyFunction_Check(func)); |
| size_t offset = _Py_FunctionAttributeOffsets[oparg]; |
| assert(offset != 0); |
| PyObject **ptr = (PyObject **)(((char *)func) + offset); |
| assert(*ptr == NULL); |
| *ptr = attr; |
| stack_pointer[-2] = func_out; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(SET_UPDATE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(SET_UPDATE); |
| _PyStackRef set; |
| _PyStackRef iterable; |
| iterable = stack_pointer[-1]; |
| set = stack_pointer[-2 - (oparg-1)]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _PySet_Update(PyStackRef_AsPyObjectBorrow(set), |
| PyStackRef_AsPyObjectBorrow(iterable)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(iterable); |
| if (err < 0) goto pop_1_error; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_ATTR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 5; |
| INSTRUCTION_STATS(STORE_ATTR); |
| PREDICTED(STORE_ATTR); |
| _Py_CODEUNIT* const this_instr = next_instr - 5; |
| (void)this_instr; |
| _PyStackRef owner; |
| _PyStackRef v; |
| // _SPECIALIZE_STORE_ATTR |
| { |
| owner = stack_pointer[-1]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_StoreAttr(owner, next_instr, name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(STORE_ATTR); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| /* Skip 3 cache entries */ |
| // _STORE_ATTR |
| { |
| v = stack_pointer[-2]; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyObject_SetAttr(PyStackRef_AsPyObjectBorrow(owner), |
| name, PyStackRef_AsPyObjectBorrow(v)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(v); |
| PyStackRef_CLOSE(owner); |
| if (err) goto pop_2_error; |
| } |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_ATTR_INSTANCE_VALUE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 5; |
| INSTRUCTION_STATS(STORE_ATTR_INSTANCE_VALUE); |
| static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef value; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); |
| } |
| // _GUARD_DORV_NO_DICT |
| { |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| assert(Py_TYPE(owner_o)->tp_dictoffset < 0); |
| assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); |
| DEOPT_IF(_PyObject_GetManagedDict(owner_o), STORE_ATTR); |
| DEOPT_IF(_PyObject_InlineValues(owner_o)->valid == 0, STORE_ATTR); |
| } |
| // _STORE_ATTR_INSTANCE_VALUE |
| { |
| value = stack_pointer[-2]; |
| uint16_t offset = read_u16(&this_instr[4].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| STAT_INC(STORE_ATTR, hit); |
| assert(_PyObject_GetManagedDict(owner_o) == NULL); |
| PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset); |
| PyObject *old_value = *value_ptr; |
| *value_ptr = PyStackRef_AsPyObjectSteal(value); |
| if (old_value == NULL) { |
| PyDictValues *values = _PyObject_InlineValues(owner_o); |
| Py_ssize_t index = value_ptr - values->values; |
| _PyDictValues_AddToInsertionOrder(values, index); |
| } |
| else { |
| Py_DECREF(old_value); |
| } |
| PyStackRef_CLOSE(owner); |
| } |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_ATTR_SLOT) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 5; |
| INSTRUCTION_STATS(STORE_ATTR_SLOT); |
| static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef value; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); |
| } |
| // _STORE_ATTR_SLOT |
| { |
| value = stack_pointer[-2]; |
| uint16_t index = read_u16(&this_instr[4].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| char *addr = (char *)owner_o + index; |
| STAT_INC(STORE_ATTR, hit); |
| PyObject *old_value = *(PyObject **)addr; |
| *(PyObject **)addr = PyStackRef_AsPyObjectSteal(value); |
| Py_XDECREF(old_value); |
| PyStackRef_CLOSE(owner); |
| } |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_ATTR_WITH_HINT) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 5; |
| INSTRUCTION_STATS(STORE_ATTR_WITH_HINT); |
| static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef value; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); |
| } |
| // _STORE_ATTR_WITH_HINT |
| { |
| value = stack_pointer[-2]; |
| uint16_t hint = read_u16(&this_instr[4].cache); |
| PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); |
| assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_MANAGED_DICT); |
| PyDictObject *dict = _PyObject_GetManagedDict(owner_o); |
| DEOPT_IF(dict == NULL, STORE_ATTR); |
| assert(PyDict_CheckExact((PyObject *)dict)); |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR); |
| DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys), STORE_ATTR); |
| PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; |
| DEOPT_IF(ep->me_key != name, STORE_ATTR); |
| PyObject *old_value = ep->me_value; |
| DEOPT_IF(old_value == NULL, STORE_ATTR); |
| /* Ensure dict is GC tracked if it needs to be */ |
| if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) { |
| _PyObject_GC_TRACK(dict); |
| } |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| ep->me_value = PyStackRef_AsPyObjectSteal(value); |
| // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, |
| // when dict only holds the strong reference to value in ep->me_value. |
| Py_XDECREF(old_value); |
| STAT_INC(STORE_ATTR, hit); |
| PyStackRef_CLOSE(owner); |
| } |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_DEREF) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(STORE_DEREF); |
| _PyStackRef v; |
| v = stack_pointer[-1]; |
| PyCellObject *cell = (PyCellObject *)PyStackRef_AsPyObjectBorrow(GETLOCAL(oparg)); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyCell_SetTakeRef(cell, PyStackRef_AsPyObjectSteal(v)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_FAST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(STORE_FAST); |
| _PyStackRef value; |
| value = stack_pointer[-1]; |
| SETLOCAL(oparg, value); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_FAST_LOAD_FAST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(STORE_FAST_LOAD_FAST); |
| _PyStackRef value1; |
| _PyStackRef value2; |
| value1 = stack_pointer[-1]; |
| uint32_t oparg1 = oparg >> 4; |
| uint32_t oparg2 = oparg & 15; |
| SETLOCAL(oparg1, value1); |
| value2 = PyStackRef_DUP(GETLOCAL(oparg2)); |
| stack_pointer[-1] = value2; |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_FAST_STORE_FAST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(STORE_FAST_STORE_FAST); |
| _PyStackRef value2; |
| _PyStackRef value1; |
| value1 = stack_pointer[-1]; |
| value2 = stack_pointer[-2]; |
| uint32_t oparg1 = oparg >> 4; |
| uint32_t oparg2 = oparg & 15; |
| SETLOCAL(oparg1, value1); |
| SETLOCAL(oparg2, value2); |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_GLOBAL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(STORE_GLOBAL); |
| _PyStackRef v; |
| v = stack_pointer[-1]; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyDict_SetItem(GLOBALS(), name, PyStackRef_AsPyObjectBorrow(v)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(v); |
| if (err) goto pop_1_error; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_NAME) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(STORE_NAME); |
| _PyStackRef v; |
| v = stack_pointer[-1]; |
| PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); |
| PyObject *ns = LOCALS(); |
| int err; |
| if (ns == NULL) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _PyErr_Format(tstate, PyExc_SystemError, |
| "no locals found when storing %R", name); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(v); |
| goto pop_1_error; |
| } |
| if (PyDict_CheckExact(ns)) { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| err = PyDict_SetItem(ns, name, PyStackRef_AsPyObjectBorrow(v)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| else { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| err = PyObject_SetItem(ns, name, PyStackRef_AsPyObjectBorrow(v)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| } |
| PyStackRef_CLOSE(v); |
| if (err) goto pop_1_error; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_SLICE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(STORE_SLICE); |
| _PyStackRef v; |
| _PyStackRef container; |
| _PyStackRef start; |
| _PyStackRef stop; |
| // _SPECIALIZE_STORE_SLICE |
| { |
| // Placeholder until we implement STORE_SLICE specialization |
| #if ENABLE_SPECIALIZATION |
| OPCODE_DEFERRED_INC(STORE_SLICE); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| // _STORE_SLICE |
| { |
| stop = stack_pointer[-1]; |
| start = stack_pointer[-2]; |
| container = stack_pointer[-3]; |
| v = stack_pointer[-4]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *slice = _PyBuildSlice_ConsumeRefs(PyStackRef_AsPyObjectSteal(start), |
| PyStackRef_AsPyObjectSteal(stop)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| int err; |
| if (slice == NULL) { |
| err = 1; |
| } |
| else { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| err = PyObject_SetItem(PyStackRef_AsPyObjectBorrow(container), slice, PyStackRef_AsPyObjectBorrow(v)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| Py_DECREF(slice); |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| PyStackRef_CLOSE(v); |
| PyStackRef_CLOSE(container); |
| if (err) goto pop_4_error; |
| } |
| stack_pointer += -4; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_SUBSCR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(STORE_SUBSCR); |
| PREDICTED(STORE_SUBSCR); |
| _Py_CODEUNIT* const this_instr = next_instr - 2; |
| (void)this_instr; |
| _PyStackRef container; |
| _PyStackRef sub; |
| _PyStackRef v; |
| // _SPECIALIZE_STORE_SUBSCR |
| { |
| sub = stack_pointer[-1]; |
| container = stack_pointer[-2]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_StoreSubscr(container, sub, next_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(STORE_SUBSCR); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| // _STORE_SUBSCR |
| { |
| v = stack_pointer[-3]; |
| /* container[sub] = v */ |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyObject_SetItem(PyStackRef_AsPyObjectBorrow(container), PyStackRef_AsPyObjectBorrow(sub), PyStackRef_AsPyObjectBorrow(v)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(v); |
| PyStackRef_CLOSE(container); |
| PyStackRef_CLOSE(sub); |
| if (err) goto pop_3_error; |
| } |
| stack_pointer += -3; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_SUBSCR_DICT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(STORE_SUBSCR_DICT); |
| static_assert(INLINE_CACHE_ENTRIES_STORE_SUBSCR == 1, "incorrect cache size"); |
| _PyStackRef value; |
| _PyStackRef dict_st; |
| _PyStackRef sub; |
| /* Skip 1 cache entry */ |
| sub = stack_pointer[-1]; |
| dict_st = stack_pointer[-2]; |
| value = stack_pointer[-3]; |
| PyObject *dict = PyStackRef_AsPyObjectBorrow(dict_st); |
| DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); |
| STAT_INC(STORE_SUBSCR, hit); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = _PyDict_SetItem_Take2((PyDictObject *)dict, |
| PyStackRef_AsPyObjectSteal(sub), |
| PyStackRef_AsPyObjectSteal(value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(dict_st); |
| if (err) goto pop_3_error; |
| stack_pointer += -3; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(STORE_SUBSCR_LIST_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(STORE_SUBSCR_LIST_INT); |
| static_assert(INLINE_CACHE_ENTRIES_STORE_SUBSCR == 1, "incorrect cache size"); |
| _PyStackRef value; |
| _PyStackRef list_st; |
| _PyStackRef sub_st; |
| /* Skip 1 cache entry */ |
| sub_st = stack_pointer[-1]; |
| list_st = stack_pointer[-2]; |
| value = stack_pointer[-3]; |
| PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st); |
| PyObject *list = PyStackRef_AsPyObjectBorrow(list_st); |
| DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); |
| DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); |
| // Ensure nonnegative, zero-or-one-digit ints. |
| DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), STORE_SUBSCR); |
| Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; |
| // Ensure index < len(list) |
| DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); |
| STAT_INC(STORE_SUBSCR, hit); |
| PyObject *old_value = PyList_GET_ITEM(list, index); |
| PyList_SET_ITEM(list, index, PyStackRef_AsPyObjectSteal(value)); |
| assert(old_value != NULL); |
| Py_DECREF(old_value); |
| PyStackRef_CLOSE_SPECIALIZED(sub_st, (destructor)PyObject_Free); |
| PyStackRef_CLOSE(list_st); |
| stack_pointer += -3; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(SWAP) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(SWAP); |
| _PyStackRef bottom_in; |
| _PyStackRef top_in; |
| _PyStackRef top_out; |
| _PyStackRef bottom_out; |
| top_in = stack_pointer[-1]; |
| bottom_in = stack_pointer[-2 - (oparg-2)]; |
| bottom_out = bottom_in; |
| top_out = top_in; |
| assert(oparg >= 2); |
| stack_pointer[-2 - (oparg-2)] = top_out; |
| stack_pointer[-1] = bottom_out; |
| DISPATCH(); |
| } |
| |
| TARGET(TO_BOOL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(TO_BOOL); |
| PREDICTED(TO_BOOL); |
| _Py_CODEUNIT* const this_instr = next_instr - 4; |
| (void)this_instr; |
| _PyStackRef value; |
| _PyStackRef res; |
| // _SPECIALIZE_TO_BOOL |
| { |
| value = stack_pointer[-1]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_ToBool(value, next_instr); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(TO_BOOL); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| } |
| /* Skip 2 cache entries */ |
| // _TO_BOOL |
| { |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int err = PyObject_IsTrue(PyStackRef_AsPyObjectBorrow(value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(value); |
| if (err < 0) goto pop_1_error; |
| res = err ? PyStackRef_True : PyStackRef_False; |
| } |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(TO_BOOL_ALWAYS_TRUE) { |
| _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(TO_BOOL_ALWAYS_TRUE); |
| static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); |
| _PyStackRef owner; |
| _PyStackRef value; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| // _GUARD_TYPE_VERSION |
| { |
| owner = stack_pointer[-1]; |
| uint32_t type_version = read_u32(&this_instr[2].cache); |
| PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); |
| assert(type_version != 0); |
| DEOPT_IF(tp->tp_version_tag != type_version, TO_BOOL); |
| } |
| // _REPLACE_WITH_TRUE |
| { |
| value = owner; |
| PyStackRef_CLOSE(value); |
| res = PyStackRef_True; |
| } |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(TO_BOOL_BOOL) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(TO_BOOL_BOOL); |
| static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); |
| _PyStackRef value; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| value = stack_pointer[-1]; |
| DEOPT_IF(!PyStackRef_BoolCheck(value), TO_BOOL); |
| STAT_INC(TO_BOOL, hit); |
| DISPATCH(); |
| } |
| |
| TARGET(TO_BOOL_INT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(TO_BOOL_INT); |
| static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); |
| _PyStackRef value; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| value = stack_pointer[-1]; |
| PyObject *value_o = PyStackRef_AsPyObjectBorrow(value); |
| DEOPT_IF(!PyLong_CheckExact(value_o), TO_BOOL); |
| STAT_INC(TO_BOOL, hit); |
| if (_PyLong_IsZero((PyLongObject *)value_o)) { |
| assert(_Py_IsImmortal(value_o)); |
| res = PyStackRef_False; |
| } |
| else { |
| PyStackRef_CLOSE(value); |
| res = PyStackRef_True; |
| } |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(TO_BOOL_LIST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(TO_BOOL_LIST); |
| static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); |
| _PyStackRef value; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| value = stack_pointer[-1]; |
| PyObject *value_o = PyStackRef_AsPyObjectBorrow(value); |
| DEOPT_IF(!PyList_CheckExact(value_o), TO_BOOL); |
| STAT_INC(TO_BOOL, hit); |
| res = Py_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; |
| PyStackRef_CLOSE(value); |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(TO_BOOL_NONE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(TO_BOOL_NONE); |
| static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); |
| _PyStackRef value; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| value = stack_pointer[-1]; |
| // This one is a bit weird, because we expect *some* failures: |
| DEOPT_IF(!PyStackRef_Is(value, PyStackRef_None), TO_BOOL); |
| STAT_INC(TO_BOOL, hit); |
| res = PyStackRef_False; |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(TO_BOOL_STR) { |
| frame->instr_ptr = next_instr; |
| next_instr += 4; |
| INSTRUCTION_STATS(TO_BOOL_STR); |
| static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); |
| _PyStackRef value; |
| _PyStackRef res; |
| /* Skip 1 cache entry */ |
| /* Skip 2 cache entries */ |
| value = stack_pointer[-1]; |
| PyObject *value_o = PyStackRef_AsPyObjectBorrow(value); |
| DEOPT_IF(!PyUnicode_CheckExact(value_o), TO_BOOL); |
| STAT_INC(TO_BOOL, hit); |
| if (value_o == &_Py_STR(empty)) { |
| assert(_Py_IsImmortal(value_o)); |
| res = PyStackRef_False; |
| } |
| else { |
| assert(Py_SIZE(value_o)); |
| PyStackRef_CLOSE(value); |
| res = PyStackRef_True; |
| } |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(UNARY_INVERT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(UNARY_INVERT); |
| _PyStackRef value; |
| _PyStackRef res; |
| value = stack_pointer[-1]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyNumber_Invert(PyStackRef_AsPyObjectBorrow(value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(value); |
| if (res_o == NULL) goto pop_1_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(UNARY_NEGATIVE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(UNARY_NEGATIVE); |
| _PyStackRef value; |
| _PyStackRef res; |
| value = stack_pointer[-1]; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyNumber_Negative(PyStackRef_AsPyObjectBorrow(value)); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(value); |
| if (res_o == NULL) goto pop_1_error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(UNARY_NOT) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(UNARY_NOT); |
| _PyStackRef value; |
| _PyStackRef res; |
| value = stack_pointer[-1]; |
| assert(PyStackRef_BoolCheck(value)); |
| res = PyStackRef_Is(value, PyStackRef_False) |
| ? PyStackRef_True : PyStackRef_False; |
| stack_pointer[-1] = res; |
| DISPATCH(); |
| } |
| |
| TARGET(UNPACK_EX) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(UNPACK_EX); |
| _PyStackRef seq; |
| _PyStackRef *right; |
| seq = stack_pointer[-1]; |
| right = &stack_pointer[(oparg & 0xFF)]; |
| _PyStackRef *top = right + (oparg >> 8); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(seq); |
| if (res == 0) goto pop_1_error; |
| stack_pointer += (oparg & 0xFF) + (oparg >> 8); |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(UNPACK_SEQUENCE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(UNPACK_SEQUENCE); |
| PREDICTED(UNPACK_SEQUENCE); |
| _Py_CODEUNIT* const this_instr = next_instr - 2; |
| (void)this_instr; |
| _PyStackRef seq; |
| _PyStackRef *output; |
| // _SPECIALIZE_UNPACK_SEQUENCE |
| { |
| seq = stack_pointer[-1]; |
| uint16_t counter = read_u16(&this_instr[1].cache); |
| (void)counter; |
| #if ENABLE_SPECIALIZATION |
| if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { |
| next_instr = this_instr; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| _Py_Specialize_UnpackSequence(seq, next_instr, oparg); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| DISPATCH_SAME_OPARG(); |
| } |
| OPCODE_DEFERRED_INC(UNPACK_SEQUENCE); |
| ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); |
| #endif /* ENABLE_SPECIALIZATION */ |
| (void)seq; |
| (void)counter; |
| } |
| // _UNPACK_SEQUENCE |
| { |
| output = &stack_pointer[-1]; |
| _PyStackRef *top = output + oparg; |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| PyStackRef_CLOSE(seq); |
| if (res == 0) goto pop_1_error; |
| } |
| stack_pointer += -1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(UNPACK_SEQUENCE_LIST) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(UNPACK_SEQUENCE_LIST); |
| static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); |
| _PyStackRef seq; |
| _PyStackRef *values; |
| /* Skip 1 cache entry */ |
| seq = stack_pointer[-1]; |
| values = &stack_pointer[-1]; |
| PyObject *seq_o = PyStackRef_AsPyObjectBorrow(seq); |
| DEOPT_IF(!PyList_CheckExact(seq_o), UNPACK_SEQUENCE); |
| DEOPT_IF(PyList_GET_SIZE(seq_o) != oparg, UNPACK_SEQUENCE); |
| STAT_INC(UNPACK_SEQUENCE, hit); |
| PyObject **items = _PyList_ITEMS(seq_o); |
| for (int i = oparg; --i >= 0; ) { |
| *values++ = PyStackRef_FromPyObjectNew(items[i]); |
| } |
| PyStackRef_CLOSE(seq); |
| stack_pointer += -1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(UNPACK_SEQUENCE_TUPLE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(UNPACK_SEQUENCE_TUPLE); |
| static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); |
| _PyStackRef seq; |
| _PyStackRef *values; |
| /* Skip 1 cache entry */ |
| seq = stack_pointer[-1]; |
| values = &stack_pointer[-1]; |
| PyObject *seq_o = PyStackRef_AsPyObjectBorrow(seq); |
| DEOPT_IF(!PyTuple_CheckExact(seq_o), UNPACK_SEQUENCE); |
| DEOPT_IF(PyTuple_GET_SIZE(seq_o) != oparg, UNPACK_SEQUENCE); |
| STAT_INC(UNPACK_SEQUENCE, hit); |
| PyObject **items = _PyTuple_ITEMS(seq_o); |
| for (int i = oparg; --i >= 0; ) { |
| *values++ = PyStackRef_FromPyObjectNew(items[i]); |
| } |
| PyStackRef_CLOSE(seq); |
| stack_pointer += -1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(UNPACK_SEQUENCE_TWO_TUPLE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 2; |
| INSTRUCTION_STATS(UNPACK_SEQUENCE_TWO_TUPLE); |
| static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); |
| _PyStackRef seq; |
| _PyStackRef val1; |
| _PyStackRef val0; |
| /* Skip 1 cache entry */ |
| seq = stack_pointer[-1]; |
| assert(oparg == 2); |
| PyObject *seq_o = PyStackRef_AsPyObjectBorrow(seq); |
| DEOPT_IF(!PyTuple_CheckExact(seq_o), UNPACK_SEQUENCE); |
| DEOPT_IF(PyTuple_GET_SIZE(seq_o) != 2, UNPACK_SEQUENCE); |
| STAT_INC(UNPACK_SEQUENCE, hit); |
| val0 = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq_o, 0)); |
| val1 = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq_o, 1)); |
| PyStackRef_CLOSE(seq); |
| stack_pointer[-1] = val1; |
| stack_pointer[0] = val0; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(WITH_EXCEPT_START) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(WITH_EXCEPT_START); |
| _PyStackRef exit_func; |
| _PyStackRef exit_self; |
| _PyStackRef lasti; |
| _PyStackRef val; |
| _PyStackRef res; |
| val = stack_pointer[-1]; |
| lasti = stack_pointer[-3]; |
| exit_self = stack_pointer[-4]; |
| exit_func = stack_pointer[-5]; |
| /* At the top of the stack are 4 values: |
| - val: TOP = exc_info() |
| - unused: SECOND = previous exception |
| - lasti: THIRD = lasti of exception in exc_info() |
| - exit_self: FOURTH = the context or NULL |
| - exit_func: FIFTH = the context.__exit__ function or context.__exit__ bound method |
| We call FOURTH(type(TOP), TOP, GetTraceback(TOP)). |
| Then we push the __exit__ return value. |
| */ |
| PyObject *exc, *tb; |
| PyObject *val_o = PyStackRef_AsPyObjectBorrow(val); |
| PyObject *exit_func_o = PyStackRef_AsPyObjectBorrow(exit_func); |
| assert(val_o && PyExceptionInstance_Check(val_o)); |
| exc = PyExceptionInstance_Class(val_o); |
| tb = PyException_GetTraceback(val_o); |
| if (tb == NULL) { |
| tb = Py_None; |
| } |
| else { |
| Py_DECREF(tb); |
| } |
| assert(PyStackRef_LongCheck(lasti)); |
| (void)lasti; // Shut up compiler warning if asserts are off |
| PyObject *stack[5] = {NULL, PyStackRef_AsPyObjectBorrow(exit_self), exc, val_o, tb}; |
| int has_self = !PyStackRef_IsNull(exit_self); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| PyObject *res_o = PyObject_Vectorcall(exit_func_o, stack + 2 - has_self, |
| (3 + has_self) | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| if (res_o == NULL) goto error; |
| res = PyStackRef_FromPyObjectSteal(res_o); |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| |
| TARGET(YIELD_VALUE) { |
| frame->instr_ptr = next_instr; |
| next_instr += 1; |
| INSTRUCTION_STATS(YIELD_VALUE); |
| _PyStackRef retval; |
| _PyStackRef value; |
| retval = stack_pointer[-1]; |
| // NOTE: It's important that YIELD_VALUE never raises an exception! |
| // The compiler treats any exception raised here as a failed close() |
| // or throw() call. |
| #if TIER_ONE |
| assert(frame != &entry_frame); |
| #endif |
| frame->instr_ptr++; |
| PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); |
| assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); |
| assert(oparg == 0 || oparg == 1); |
| gen->gi_frame_state = FRAME_SUSPENDED + oparg; |
| _PyStackRef temp = retval; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| _PyFrame_SetStackPointer(frame, stack_pointer); |
| tstate->exc_info = gen->gi_exc_state.previous_item; |
| gen->gi_exc_state.previous_item = NULL; |
| _Py_LeaveRecursiveCallPy(tstate); |
| _PyInterpreterFrame *gen_frame = frame; |
| frame = tstate->current_frame = frame->previous; |
| gen_frame->previous = NULL; |
| /* We don't know which of these is relevant here, so keep them equal */ |
| assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER); |
| #if TIER_ONE |
| assert(frame->instr_ptr->op.code == INSTRUMENTED_LINE || |
| frame->instr_ptr->op.code == INSTRUMENTED_INSTRUCTION || |
| _PyOpcode_Deopt[frame->instr_ptr->op.code] == SEND || |
| _PyOpcode_Deopt[frame->instr_ptr->op.code] == FOR_ITER || |
| _PyOpcode_Deopt[frame->instr_ptr->op.code] == INTERPRETER_EXIT || |
| _PyOpcode_Deopt[frame->instr_ptr->op.code] == ENTER_EXECUTOR); |
| #endif |
| stack_pointer = _PyFrame_GetStackPointer(frame); |
| LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND); |
| value = temp; |
| LLTRACE_RESUME_FRAME(); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| DISPATCH(); |
| } |
| #undef TIER_ONE |