| // This file is generated by Tools/cases_generator/optimizer_generator.py |
| // from: |
| // Python/optimizer_bytecodes.c |
| // Do not edit! |
| |
| case _NOP: { |
| break; |
| } |
| |
| case _CHECK_PERIODIC: { |
| break; |
| } |
| |
| case _CHECK_PERIODIC_IF_NOT_YIELD_FROM: { |
| break; |
| } |
| |
| /* _QUICKEN_RESUME is not a viable micro-op for tier 2 */ |
| |
| /* _LOAD_BYTECODE is not a viable micro-op for tier 2 */ |
| |
| case _RESUME_CHECK: { |
| break; |
| } |
| |
| /* _MONITOR_RESUME is not a viable micro-op for tier 2 */ |
| |
| case _LOAD_FAST_CHECK: { |
| _Py_UopsSymbol *value; |
| value = GETLOCAL(oparg); |
| // We guarantee this will error - just bail and don't optimize it. |
| if (sym_is_null(value)) { |
| ctx->done = true; |
| } |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_FAST: { |
| _Py_UopsSymbol *value; |
| value = GETLOCAL(oparg); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_FAST_AND_CLEAR: { |
| _Py_UopsSymbol *value; |
| value = GETLOCAL(oparg); |
| _Py_UopsSymbol *temp = sym_new_null(ctx); |
| GETLOCAL(oparg) = temp; |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_CONST: { |
| _Py_UopsSymbol *value; |
| PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg); |
| int opcode = _Py_IsImmortal(val) ? _LOAD_CONST_INLINE_BORROW : _LOAD_CONST_INLINE; |
| REPLACE_OP(this_instr, opcode, 0, (uintptr_t)val); |
| value = sym_new_const(ctx, val); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_CONST_IMMORTAL: { |
| _Py_UopsSymbol *value; |
| PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg); |
| REPLACE_OP(this_instr, _LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)val); |
| value = sym_new_const(ctx, val); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_SMALL_INT: { |
| _Py_UopsSymbol *value; |
| PyObject *val = PyLong_FromLong(this_instr->oparg); |
| value = sym_new_const(ctx, val); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_FAST: { |
| _Py_UopsSymbol *value; |
| value = stack_pointer[-1]; |
| GETLOCAL(oparg) = value; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _POP_TOP: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _PUSH_NULL: { |
| _Py_UopsSymbol *res; |
| res = sym_new_null(ctx); |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _END_SEND: { |
| _Py_UopsSymbol *val; |
| val = sym_new_not_null(ctx); |
| stack_pointer[-2] = val; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _UNARY_NEGATIVE: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _UNARY_NOT: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _TO_BOOL: { |
| _Py_UopsSymbol *value; |
| _Py_UopsSymbol *res; |
| value = stack_pointer[-1]; |
| if (!optimize_to_bool(this_instr, ctx, value, &res)) { |
| res = sym_new_type(ctx, &PyBool_Type); |
| } |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _TO_BOOL_BOOL: { |
| _Py_UopsSymbol *value; |
| _Py_UopsSymbol *res; |
| value = stack_pointer[-1]; |
| if (!optimize_to_bool(this_instr, ctx, value, &res)) { |
| sym_set_type(value, &PyBool_Type); |
| res = value; |
| } |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _TO_BOOL_INT: { |
| _Py_UopsSymbol *value; |
| _Py_UopsSymbol *res; |
| value = stack_pointer[-1]; |
| if (!optimize_to_bool(this_instr, ctx, value, &res)) { |
| sym_set_type(value, &PyLong_Type); |
| res = sym_new_type(ctx, &PyBool_Type); |
| } |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _TO_BOOL_LIST: { |
| _Py_UopsSymbol *value; |
| _Py_UopsSymbol *res; |
| value = stack_pointer[-1]; |
| if (!optimize_to_bool(this_instr, ctx, value, &res)) { |
| sym_set_type(value, &PyList_Type); |
| res = sym_new_type(ctx, &PyBool_Type); |
| } |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _TO_BOOL_NONE: { |
| _Py_UopsSymbol *value; |
| _Py_UopsSymbol *res; |
| value = stack_pointer[-1]; |
| if (!optimize_to_bool(this_instr, ctx, value, &res)) { |
| sym_set_const(value, Py_None); |
| res = sym_new_const(ctx, Py_False); |
| } |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _TO_BOOL_STR: { |
| _Py_UopsSymbol *value; |
| _Py_UopsSymbol *res; |
| value = stack_pointer[-1]; |
| if (!optimize_to_bool(this_instr, ctx, value, &res)) { |
| res = sym_new_type(ctx, &PyBool_Type); |
| sym_set_type(value, &PyUnicode_Type); |
| } |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _REPLACE_WITH_TRUE: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _UNARY_INVERT: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _GUARD_BOTH_INT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_matches_type(left, &PyLong_Type)) { |
| if (sym_matches_type(right, &PyLong_Type)) { |
| REPLACE_OP(this_instr, _NOP, 0, 0); |
| } |
| else { |
| REPLACE_OP(this_instr, _GUARD_TOS_INT, 0, 0); |
| } |
| } |
| else { |
| if (sym_matches_type(right, &PyLong_Type)) { |
| REPLACE_OP(this_instr, _GUARD_NOS_INT, 0, 0); |
| } |
| } |
| sym_set_type(left, &PyLong_Type); |
| sym_set_type(right, &PyLong_Type); |
| break; |
| } |
| |
| case _GUARD_NOS_INT: { |
| break; |
| } |
| |
| case _GUARD_TOS_INT: { |
| break; |
| } |
| |
| case _BINARY_OP_MULTIPLY_INT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_is_const(left) && sym_is_const(right) && |
| sym_matches_type(left, &PyLong_Type) && sym_matches_type(right, &PyLong_Type)) |
| { |
| assert(PyLong_CheckExact(sym_get_const(left))); |
| assert(PyLong_CheckExact(sym_get_const(right))); |
| PyObject *temp = _PyLong_Multiply((PyLongObject *)sym_get_const(left), |
| (PyLongObject *)sym_get_const(right)); |
| if (temp == NULL) { |
| goto error; |
| } |
| res = sym_new_const(ctx, temp); |
| Py_DECREF(temp); |
| // TODO gh-115506: |
| // replace opcode with constant propagated one and add tests! |
| } |
| else { |
| res = sym_new_type(ctx, &PyLong_Type); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_OP_ADD_INT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_is_const(left) && sym_is_const(right) && |
| sym_matches_type(left, &PyLong_Type) && sym_matches_type(right, &PyLong_Type)) |
| { |
| assert(PyLong_CheckExact(sym_get_const(left))); |
| assert(PyLong_CheckExact(sym_get_const(right))); |
| PyObject *temp = _PyLong_Add((PyLongObject *)sym_get_const(left), |
| (PyLongObject *)sym_get_const(right)); |
| if (temp == NULL) { |
| goto error; |
| } |
| res = sym_new_const(ctx, temp); |
| Py_DECREF(temp); |
| // TODO gh-115506: |
| // replace opcode with constant propagated one and add tests! |
| } |
| else { |
| res = sym_new_type(ctx, &PyLong_Type); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_OP_SUBTRACT_INT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_is_const(left) && sym_is_const(right) && |
| sym_matches_type(left, &PyLong_Type) && sym_matches_type(right, &PyLong_Type)) |
| { |
| assert(PyLong_CheckExact(sym_get_const(left))); |
| assert(PyLong_CheckExact(sym_get_const(right))); |
| PyObject *temp = _PyLong_Subtract((PyLongObject *)sym_get_const(left), |
| (PyLongObject *)sym_get_const(right)); |
| if (temp == NULL) { |
| goto error; |
| } |
| res = sym_new_const(ctx, temp); |
| Py_DECREF(temp); |
| // TODO gh-115506: |
| // replace opcode with constant propagated one and add tests! |
| } |
| else { |
| res = sym_new_type(ctx, &PyLong_Type); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GUARD_BOTH_FLOAT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_matches_type(left, &PyFloat_Type)) { |
| if (sym_matches_type(right, &PyFloat_Type)) { |
| REPLACE_OP(this_instr, _NOP, 0, 0); |
| } |
| else { |
| REPLACE_OP(this_instr, _GUARD_TOS_FLOAT, 0, 0); |
| } |
| } |
| else { |
| if (sym_matches_type(right, &PyFloat_Type)) { |
| REPLACE_OP(this_instr, _GUARD_NOS_FLOAT, 0, 0); |
| } |
| } |
| sym_set_type(left, &PyFloat_Type); |
| sym_set_type(right, &PyFloat_Type); |
| break; |
| } |
| |
| case _GUARD_NOS_FLOAT: { |
| break; |
| } |
| |
| case _GUARD_TOS_FLOAT: { |
| break; |
| } |
| |
| case _BINARY_OP_MULTIPLY_FLOAT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_is_const(left) && sym_is_const(right) && |
| sym_matches_type(left, &PyFloat_Type) && sym_matches_type(right, &PyFloat_Type)) |
| { |
| assert(PyFloat_CheckExact(sym_get_const(left))); |
| assert(PyFloat_CheckExact(sym_get_const(right))); |
| PyObject *temp = PyFloat_FromDouble( |
| PyFloat_AS_DOUBLE(sym_get_const(left)) * |
| PyFloat_AS_DOUBLE(sym_get_const(right))); |
| if (temp == NULL) { |
| goto error; |
| } |
| res = sym_new_const(ctx, temp); |
| Py_DECREF(temp); |
| // TODO gh-115506: |
| // replace opcode with constant propagated one and update tests! |
| } |
| else { |
| res = sym_new_type(ctx, &PyFloat_Type); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_OP_ADD_FLOAT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_is_const(left) && sym_is_const(right) && |
| sym_matches_type(left, &PyFloat_Type) && sym_matches_type(right, &PyFloat_Type)) |
| { |
| assert(PyFloat_CheckExact(sym_get_const(left))); |
| assert(PyFloat_CheckExact(sym_get_const(right))); |
| PyObject *temp = PyFloat_FromDouble( |
| PyFloat_AS_DOUBLE(sym_get_const(left)) + |
| PyFloat_AS_DOUBLE(sym_get_const(right))); |
| if (temp == NULL) { |
| goto error; |
| } |
| res = sym_new_const(ctx, temp); |
| Py_DECREF(temp); |
| // TODO gh-115506: |
| // replace opcode with constant propagated one and update tests! |
| } |
| else { |
| res = sym_new_type(ctx, &PyFloat_Type); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_OP_SUBTRACT_FLOAT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_is_const(left) && sym_is_const(right) && |
| sym_matches_type(left, &PyFloat_Type) && sym_matches_type(right, &PyFloat_Type)) |
| { |
| assert(PyFloat_CheckExact(sym_get_const(left))); |
| assert(PyFloat_CheckExact(sym_get_const(right))); |
| PyObject *temp = PyFloat_FromDouble( |
| PyFloat_AS_DOUBLE(sym_get_const(left)) - |
| PyFloat_AS_DOUBLE(sym_get_const(right))); |
| if (temp == NULL) { |
| goto error; |
| } |
| res = sym_new_const(ctx, temp); |
| Py_DECREF(temp); |
| // TODO gh-115506: |
| // replace opcode with constant propagated one and update tests! |
| } |
| else { |
| res = sym_new_type(ctx, &PyFloat_Type); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GUARD_BOTH_UNICODE: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_matches_type(left, &PyUnicode_Type) && |
| sym_matches_type(right, &PyUnicode_Type)) { |
| REPLACE_OP(this_instr, _NOP, 0 ,0); |
| } |
| sym_set_type(left, &PyUnicode_Type); |
| sym_set_type(left, &PyUnicode_Type); |
| break; |
| } |
| |
| case _BINARY_OP_ADD_UNICODE: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| if (sym_is_const(left) && sym_is_const(right) && |
| sym_matches_type(left, &PyUnicode_Type) && sym_matches_type(right, &PyUnicode_Type)) { |
| PyObject *temp = PyUnicode_Concat(sym_get_const(left), sym_get_const(right)); |
| if (temp == NULL) { |
| goto error; |
| } |
| res = sym_new_const(ctx, temp); |
| Py_DECREF(temp); |
| } |
| else { |
| res = sym_new_type(ctx, &PyUnicode_Type); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_OP_INPLACE_ADD_UNICODE: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| _Py_UopsSymbol *res; |
| if (sym_is_const(left) && sym_is_const(right) && |
| sym_matches_type(left, &PyUnicode_Type) && sym_matches_type(right, &PyUnicode_Type)) { |
| PyObject *temp = PyUnicode_Concat(sym_get_const(left), sym_get_const(right)); |
| if (temp == NULL) { |
| goto error; |
| } |
| res = sym_new_const(ctx, temp); |
| Py_DECREF(temp); |
| } |
| else { |
| res = sym_new_type(ctx, &PyUnicode_Type); |
| } |
| // _STORE_FAST: |
| GETLOCAL(this_instr->operand0) = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_SUBSCR: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_SLICE: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_SLICE: { |
| stack_pointer += -4; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_SUBSCR_LIST_INT: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_SUBSCR_STR_INT: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_SUBSCR_TUPLE_INT: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_SUBSCR_DICT: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_SUBSCR_CHECK_FUNC: { |
| break; |
| } |
| |
| case _BINARY_SUBSCR_INIT_CALL: { |
| _Py_UopsSymbol *sub; |
| _Py_UopsSymbol *container; |
| _Py_UOpsAbstractFrame *new_frame; |
| sub = stack_pointer[-1]; |
| container = stack_pointer[-2]; |
| (void)container; |
| (void)sub; |
| new_frame = NULL; |
| ctx->done = true; |
| stack_pointer[-2] = (_Py_UopsSymbol *)new_frame; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LIST_APPEND: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _SET_ADD: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_SUBSCR: { |
| stack_pointer += -3; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_SUBSCR_LIST_INT: { |
| stack_pointer += -3; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_SUBSCR_DICT: { |
| stack_pointer += -3; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _DELETE_SUBSCR: { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_INTRINSIC_1: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _CALL_INTRINSIC_2: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _RETURN_VALUE: { |
| _Py_UopsSymbol *retval; |
| _Py_UopsSymbol *res; |
| retval = stack_pointer[-1]; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| ctx->frame->stack_pointer = stack_pointer; |
| frame_pop(ctx); |
| stack_pointer = ctx->frame->stack_pointer; |
| /* Stack space handling */ |
| assert(corresponding_check_stack == NULL); |
| assert(co != NULL); |
| int framesize = co->co_framesize; |
| assert(framesize > 0); |
| assert(framesize <= curr_space); |
| curr_space -= framesize; |
| co = get_code(this_instr); |
| if (co == NULL) { |
| // might be impossible, but bailing is still safe |
| ctx->done = true; |
| } |
| res = retval; |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GET_AITER: { |
| _Py_UopsSymbol *iter; |
| iter = sym_new_not_null(ctx); |
| stack_pointer[-1] = iter; |
| break; |
| } |
| |
| case _GET_ANEXT: { |
| _Py_UopsSymbol *awaitable; |
| awaitable = sym_new_not_null(ctx); |
| stack_pointer[0] = awaitable; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GET_AWAITABLE: { |
| _Py_UopsSymbol *iter; |
| iter = sym_new_not_null(ctx); |
| stack_pointer[-1] = iter; |
| break; |
| } |
| |
| /* _SEND is not a viable micro-op for tier 2 */ |
| |
| case _SEND_GEN_FRAME: { |
| // We are about to hit the end of the trace: |
| ctx->done = true; |
| break; |
| } |
| |
| case _YIELD_VALUE: { |
| _Py_UopsSymbol *res; |
| res = sym_new_unknown(ctx); |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _POP_EXCEPT: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_COMMON_CONSTANT: { |
| _Py_UopsSymbol *value; |
| value = sym_new_not_null(ctx); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_BUILD_CLASS: { |
| _Py_UopsSymbol *bc; |
| bc = sym_new_not_null(ctx); |
| stack_pointer[0] = bc; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_NAME: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _DELETE_NAME: { |
| break; |
| } |
| |
| case _UNPACK_SEQUENCE: { |
| _Py_UopsSymbol *seq; |
| _Py_UopsSymbol **values; |
| seq = stack_pointer[-1]; |
| values = &stack_pointer[-1]; |
| /* This has to be done manually */ |
| (void)seq; |
| for (int i = 0; i < oparg; i++) { |
| values[i] = sym_new_unknown(ctx); |
| } |
| stack_pointer += -1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _UNPACK_SEQUENCE_TWO_TUPLE: { |
| _Py_UopsSymbol *val1; |
| _Py_UopsSymbol *val0; |
| val1 = sym_new_not_null(ctx); |
| val0 = sym_new_not_null(ctx); |
| stack_pointer[-1] = val1; |
| stack_pointer[0] = val0; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _UNPACK_SEQUENCE_TUPLE: { |
| _Py_UopsSymbol **values; |
| values = &stack_pointer[-1]; |
| for (int _i = oparg; --_i >= 0;) { |
| values[_i] = sym_new_not_null(ctx); |
| } |
| stack_pointer += -1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _UNPACK_SEQUENCE_LIST: { |
| _Py_UopsSymbol **values; |
| values = &stack_pointer[-1]; |
| for (int _i = oparg; --_i >= 0;) { |
| values[_i] = sym_new_not_null(ctx); |
| } |
| stack_pointer += -1 + oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _UNPACK_EX: { |
| _Py_UopsSymbol *seq; |
| _Py_UopsSymbol **values; |
| seq = stack_pointer[-1]; |
| values = &stack_pointer[-1]; |
| /* This has to be done manually */ |
| (void)seq; |
| int totalargs = (oparg & 0xFF) + (oparg >> 8) + 1; |
| for (int i = 0; i < totalargs; i++) { |
| values[i] = sym_new_unknown(ctx); |
| } |
| stack_pointer += (oparg & 0xFF) + (oparg >> 8); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_ATTR: { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _DELETE_ATTR: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_GLOBAL: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _DELETE_GLOBAL: { |
| break; |
| } |
| |
| case _LOAD_LOCALS: { |
| _Py_UopsSymbol *locals; |
| locals = sym_new_not_null(ctx); |
| stack_pointer[0] = locals; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| /* _LOAD_FROM_DICT_OR_GLOBALS is not a viable micro-op for tier 2 */ |
| |
| case _LOAD_NAME: { |
| _Py_UopsSymbol *v; |
| v = sym_new_not_null(ctx); |
| stack_pointer[0] = v; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_GLOBAL: { |
| _Py_UopsSymbol **res; |
| _Py_UopsSymbol *null = NULL; |
| res = &stack_pointer[0]; |
| res[0] = sym_new_not_null(ctx); |
| null = sym_new_null(ctx); |
| if (oparg & 1) stack_pointer[1] = null; |
| stack_pointer += 1 + (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GUARD_GLOBALS_VERSION: { |
| break; |
| } |
| |
| case _GUARD_GLOBALS_VERSION_PUSH_KEYS: { |
| _Py_UopsSymbol *globals_keys; |
| uint16_t version = (uint16_t)this_instr->operand0; |
| globals_keys = sym_new_unknown(ctx); |
| (void)version; |
| stack_pointer[0] = globals_keys; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GUARD_BUILTINS_VERSION_PUSH_KEYS: { |
| _Py_UopsSymbol *builtins_keys; |
| uint16_t version = (uint16_t)this_instr->operand0; |
| builtins_keys = sym_new_unknown(ctx); |
| (void)version; |
| stack_pointer[0] = builtins_keys; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_GLOBAL_MODULE_FROM_KEYS: { |
| _Py_UopsSymbol *res; |
| _Py_UopsSymbol *null = NULL; |
| res = sym_new_not_null(ctx); |
| null = sym_new_null(ctx); |
| stack_pointer[-1] = res; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_GLOBAL_BUILTINS_FROM_KEYS: { |
| _Py_UopsSymbol *res; |
| _Py_UopsSymbol *null = NULL; |
| res = sym_new_not_null(ctx); |
| null = sym_new_null(ctx); |
| stack_pointer[-1] = res; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _DELETE_FAST: { |
| break; |
| } |
| |
| case _MAKE_CELL: { |
| break; |
| } |
| |
| case _DELETE_DEREF: { |
| break; |
| } |
| |
| case _LOAD_FROM_DICT_OR_DEREF: { |
| _Py_UopsSymbol *value; |
| value = sym_new_not_null(ctx); |
| stack_pointer[-1] = value; |
| break; |
| } |
| |
| case _LOAD_DEREF: { |
| _Py_UopsSymbol *value; |
| value = sym_new_not_null(ctx); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_DEREF: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _COPY_FREE_VARS: { |
| break; |
| } |
| |
| case _BUILD_STRING: { |
| _Py_UopsSymbol *str; |
| str = sym_new_not_null(ctx); |
| stack_pointer[-oparg] = str; |
| stack_pointer += 1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BUILD_TUPLE: { |
| _Py_UopsSymbol *tup; |
| tup = sym_new_not_null(ctx); |
| stack_pointer[-oparg] = tup; |
| stack_pointer += 1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BUILD_LIST: { |
| _Py_UopsSymbol *list; |
| list = sym_new_not_null(ctx); |
| stack_pointer[-oparg] = list; |
| stack_pointer += 1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LIST_EXTEND: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _SET_UPDATE: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BUILD_SET: { |
| _Py_UopsSymbol *set; |
| set = sym_new_not_null(ctx); |
| stack_pointer[-oparg] = set; |
| stack_pointer += 1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BUILD_MAP: { |
| _Py_UopsSymbol *map; |
| map = sym_new_not_null(ctx); |
| stack_pointer[-oparg*2] = map; |
| stack_pointer += 1 - oparg*2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _SETUP_ANNOTATIONS: { |
| break; |
| } |
| |
| case _DICT_UPDATE: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _DICT_MERGE: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _MAP_ADD: { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| /* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */ |
| |
| case _LOAD_SUPER_ATTR_ATTR: { |
| _Py_UopsSymbol *attr_st; |
| attr_st = sym_new_not_null(ctx); |
| stack_pointer[-3] = attr_st; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_SUPER_ATTR_METHOD: { |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *self_or_null; |
| attr = sym_new_not_null(ctx); |
| self_or_null = sym_new_not_null(ctx); |
| stack_pointer[-3] = attr; |
| stack_pointer[-2] = self_or_null; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_ATTR: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *self_or_null = NULL; |
| owner = stack_pointer[-1]; |
| (void)owner; |
| attr = sym_new_not_null(ctx); |
| self_or_null = sym_new_unknown(ctx); |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = self_or_null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GUARD_TYPE_VERSION: { |
| _Py_UopsSymbol *owner; |
| owner = stack_pointer[-1]; |
| uint32_t type_version = (uint32_t)this_instr->operand0; |
| assert(type_version); |
| if (sym_matches_type_version(owner, type_version)) { |
| REPLACE_OP(this_instr, _NOP, 0, 0); |
| } else { |
| // add watcher so that whenever the type changes we invalidate this |
| PyTypeObject *type = _PyType_LookupByVersion(type_version); |
| // if the type is null, it was not found in the cache (there was a conflict) |
| // with the key, in which case we can't trust the version |
| if (type) { |
| // if the type version was set properly, then add a watcher |
| // if it wasn't this means that the type version was previously set to something else |
| // and we set the owner to bottom, so we don't need to add a watcher because we must have |
| // already added one earlier. |
| if (sym_set_type_version(owner, type_version)) { |
| PyType_Watch(TYPE_WATCHER_ID, (PyObject *)type); |
| _Py_BloomFilter_Add(dependencies, type); |
| } |
| } |
| } |
| break; |
| } |
| |
| case _CHECK_MANAGED_OBJECT_HAS_VALUES: { |
| break; |
| } |
| |
| case _LOAD_ATTR_INSTANCE_VALUE: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *null = NULL; |
| owner = stack_pointer[-1]; |
| uint16_t offset = (uint16_t)this_instr->operand0; |
| attr = sym_new_not_null(ctx); |
| null = sym_new_null(ctx); |
| (void)offset; |
| (void)owner; |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CHECK_ATTR_MODULE: { |
| _Py_UopsSymbol *owner; |
| owner = stack_pointer[-1]; |
| uint32_t dict_version = (uint32_t)this_instr->operand0; |
| (void)dict_version; |
| if (sym_is_const(owner)) { |
| PyObject *cnst = sym_get_const(owner); |
| if (PyModule_CheckExact(cnst)) { |
| PyModuleObject *mod = (PyModuleObject *)cnst; |
| PyObject *dict = mod->md_dict; |
| uint64_t watched_mutations = get_mutations(dict); |
| if (watched_mutations < _Py_MAX_ALLOWED_GLOBALS_MODIFICATIONS) { |
| PyDict_Watch(GLOBALS_WATCHER_ID, dict); |
| _Py_BloomFilter_Add(dependencies, dict); |
| this_instr->opcode = _NOP; |
| } |
| } |
| } |
| break; |
| } |
| |
| case _LOAD_ATTR_MODULE: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *null = NULL; |
| owner = stack_pointer[-1]; |
| uint16_t index = (uint16_t)this_instr->operand0; |
| (void)index; |
| null = sym_new_null(ctx); |
| attr = NULL; |
| if (this_instr[-1].opcode == _NOP) { |
| // Preceding _CHECK_ATTR_MODULE was removed: mod is const and dict is watched. |
| assert(sym_is_const(owner)); |
| PyModuleObject *mod = (PyModuleObject *)sym_get_const(owner); |
| assert(PyModule_CheckExact(mod)); |
| PyObject *dict = mod->md_dict; |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| PyObject *res = convert_global_to_const(this_instr, dict); |
| if (res != NULL) { |
| this_instr[-1].opcode = _POP_TOP; |
| attr = sym_new_const(ctx, res); |
| } |
| stack_pointer += -(oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| if (attr == NULL) { |
| /* No conversion made. We don't know what `attr` is. */ |
| attr = sym_new_not_null(ctx); |
| } |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CHECK_ATTR_WITH_HINT: { |
| break; |
| } |
| |
| case _LOAD_ATTR_WITH_HINT: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *null = NULL; |
| owner = stack_pointer[-1]; |
| uint16_t hint = (uint16_t)this_instr->operand0; |
| attr = sym_new_not_null(ctx); |
| null = sym_new_null(ctx); |
| (void)hint; |
| (void)owner; |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_ATTR_SLOT: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *null = NULL; |
| owner = stack_pointer[-1]; |
| uint16_t index = (uint16_t)this_instr->operand0; |
| attr = sym_new_not_null(ctx); |
| null = sym_new_null(ctx); |
| (void)index; |
| (void)owner; |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CHECK_ATTR_CLASS: { |
| break; |
| } |
| |
| case _LOAD_ATTR_CLASS: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *null = NULL; |
| owner = stack_pointer[-1]; |
| PyObject *descr = (PyObject *)this_instr->operand0; |
| attr = sym_new_not_null(ctx); |
| null = sym_new_null(ctx); |
| (void)descr; |
| (void)owner; |
| stack_pointer[-1] = attr; |
| if (oparg & 1) stack_pointer[0] = null; |
| stack_pointer += (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_ATTR_PROPERTY_FRAME: { |
| _Py_UopsSymbol *owner; |
| _Py_UOpsAbstractFrame *new_frame; |
| owner = stack_pointer[-1]; |
| PyObject *fget = (PyObject *)this_instr->operand0; |
| (void)fget; |
| (void)owner; |
| new_frame = NULL; |
| ctx->done = true; |
| stack_pointer[-1] = (_Py_UopsSymbol *)new_frame; |
| break; |
| } |
| |
| /* _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN is not a viable micro-op for tier 2 */ |
| |
| case _GUARD_DORV_NO_DICT: { |
| break; |
| } |
| |
| case _STORE_ATTR_INSTANCE_VALUE: { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_ATTR_WITH_HINT: { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _STORE_ATTR_SLOT: { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _COMPARE_OP: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| (void)left; |
| (void)right; |
| if (oparg & 16) { |
| res = sym_new_type(ctx, &PyBool_Type); |
| } |
| else { |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| res = _Py_uop_sym_new_not_null(ctx); |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _COMPARE_OP_FLOAT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| (void)left; |
| (void)right; |
| res = sym_new_type(ctx, &PyBool_Type); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _COMPARE_OP_INT: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| (void)left; |
| (void)right; |
| res = sym_new_type(ctx, &PyBool_Type); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _COMPARE_OP_STR: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| (void)left; |
| (void)right; |
| res = sym_new_type(ctx, &PyBool_Type); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _IS_OP: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| (void)left; |
| (void)right; |
| res = sym_new_type(ctx, &PyBool_Type); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CONTAINS_OP: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| (void)left; |
| (void)right; |
| res = sym_new_type(ctx, &PyBool_Type); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CONTAINS_OP_SET: { |
| _Py_UopsSymbol *b; |
| b = sym_new_not_null(ctx); |
| stack_pointer[-2] = b; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CONTAINS_OP_DICT: { |
| _Py_UopsSymbol *b; |
| b = sym_new_not_null(ctx); |
| stack_pointer[-2] = b; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CHECK_EG_MATCH: { |
| _Py_UopsSymbol *rest; |
| _Py_UopsSymbol *match; |
| rest = sym_new_not_null(ctx); |
| match = sym_new_not_null(ctx); |
| stack_pointer[-2] = rest; |
| stack_pointer[-1] = match; |
| break; |
| } |
| |
| case _CHECK_EXC_MATCH: { |
| _Py_UopsSymbol *b; |
| b = sym_new_not_null(ctx); |
| stack_pointer[-1] = b; |
| break; |
| } |
| |
| case _IMPORT_NAME: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _IMPORT_FROM: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| /* _POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 */ |
| |
| /* _POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */ |
| |
| case _IS_NONE: { |
| _Py_UopsSymbol *b; |
| b = sym_new_not_null(ctx); |
| stack_pointer[-1] = b; |
| break; |
| } |
| |
| case _GET_LEN: { |
| _Py_UopsSymbol *len; |
| len = sym_new_not_null(ctx); |
| stack_pointer[0] = len; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _MATCH_CLASS: { |
| _Py_UopsSymbol *attrs; |
| attrs = sym_new_not_null(ctx); |
| stack_pointer[-3] = attrs; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _MATCH_MAPPING: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _MATCH_SEQUENCE: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _MATCH_KEYS: { |
| _Py_UopsSymbol *values_or_none; |
| values_or_none = sym_new_not_null(ctx); |
| stack_pointer[0] = values_or_none; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GET_ITER: { |
| _Py_UopsSymbol *iter; |
| iter = sym_new_not_null(ctx); |
| stack_pointer[-1] = iter; |
| break; |
| } |
| |
| case _GET_YIELD_FROM_ITER: { |
| _Py_UopsSymbol *iter; |
| iter = sym_new_not_null(ctx); |
| stack_pointer[-1] = iter; |
| break; |
| } |
| |
| /* _FOR_ITER is not a viable micro-op for tier 2 */ |
| |
| case _FOR_ITER_TIER_TWO: { |
| _Py_UopsSymbol *next; |
| next = sym_new_not_null(ctx); |
| stack_pointer[0] = next; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| /* _INSTRUMENTED_FOR_ITER is not a viable micro-op for tier 2 */ |
| |
| case _ITER_CHECK_LIST: { |
| break; |
| } |
| |
| /* _ITER_JUMP_LIST is not a viable micro-op for tier 2 */ |
| |
| case _GUARD_NOT_EXHAUSTED_LIST: { |
| break; |
| } |
| |
| case _ITER_NEXT_LIST: { |
| _Py_UopsSymbol *next; |
| next = sym_new_not_null(ctx); |
| stack_pointer[0] = next; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _ITER_CHECK_TUPLE: { |
| break; |
| } |
| |
| /* _ITER_JUMP_TUPLE is not a viable micro-op for tier 2 */ |
| |
| case _GUARD_NOT_EXHAUSTED_TUPLE: { |
| break; |
| } |
| |
| case _ITER_NEXT_TUPLE: { |
| _Py_UopsSymbol *next; |
| next = sym_new_not_null(ctx); |
| stack_pointer[0] = next; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _ITER_CHECK_RANGE: { |
| break; |
| } |
| |
| /* _ITER_JUMP_RANGE is not a viable micro-op for tier 2 */ |
| |
| case _GUARD_NOT_EXHAUSTED_RANGE: { |
| break; |
| } |
| |
| case _ITER_NEXT_RANGE: { |
| _Py_UopsSymbol *iter; |
| _Py_UopsSymbol *next; |
| iter = stack_pointer[-1]; |
| next = sym_new_type(ctx, &PyLong_Type); |
| (void)iter; |
| stack_pointer[0] = next; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _FOR_ITER_GEN_FRAME: { |
| /* We are about to hit the end of the trace */ |
| ctx->done = true; |
| break; |
| } |
| |
| case _LOAD_SPECIAL: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *self_or_null; |
| owner = stack_pointer[-1]; |
| (void)owner; |
| attr = sym_new_not_null(ctx); |
| self_or_null = sym_new_unknown(ctx); |
| stack_pointer[-1] = attr; |
| stack_pointer[0] = self_or_null; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _WITH_EXCEPT_START: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _PUSH_EXC_INFO: { |
| _Py_UopsSymbol *prev_exc; |
| _Py_UopsSymbol *new_exc; |
| prev_exc = sym_new_not_null(ctx); |
| new_exc = sym_new_not_null(ctx); |
| stack_pointer[-1] = prev_exc; |
| stack_pointer[0] = new_exc; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: { |
| break; |
| } |
| |
| case _GUARD_KEYS_VERSION: { |
| break; |
| } |
| |
| case _LOAD_ATTR_METHOD_WITH_VALUES: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *self = NULL; |
| owner = stack_pointer[-1]; |
| PyObject *descr = (PyObject *)this_instr->operand0; |
| (void)descr; |
| attr = sym_new_not_null(ctx); |
| self = owner; |
| stack_pointer[-1] = attr; |
| stack_pointer[0] = self; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_ATTR_METHOD_NO_DICT: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *self = NULL; |
| owner = stack_pointer[-1]; |
| PyObject *descr = (PyObject *)this_instr->operand0; |
| (void)descr; |
| attr = sym_new_not_null(ctx); |
| self = owner; |
| stack_pointer[-1] = attr; |
| stack_pointer[0] = self; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: { |
| _Py_UopsSymbol *attr; |
| attr = sym_new_not_null(ctx); |
| stack_pointer[-1] = attr; |
| break; |
| } |
| |
| case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: { |
| _Py_UopsSymbol *attr; |
| attr = sym_new_not_null(ctx); |
| stack_pointer[-1] = attr; |
| break; |
| } |
| |
| case _CHECK_ATTR_METHOD_LAZY_DICT: { |
| break; |
| } |
| |
| case _LOAD_ATTR_METHOD_LAZY_DICT: { |
| _Py_UopsSymbol *owner; |
| _Py_UopsSymbol *attr; |
| _Py_UopsSymbol *self = NULL; |
| owner = stack_pointer[-1]; |
| PyObject *descr = (PyObject *)this_instr->operand0; |
| (void)descr; |
| attr = sym_new_not_null(ctx); |
| self = owner; |
| stack_pointer[-1] = attr; |
| stack_pointer[0] = self; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _MAYBE_EXPAND_METHOD: { |
| _Py_UopsSymbol **args; |
| _Py_UopsSymbol *self_or_null; |
| _Py_UopsSymbol *callable; |
| _Py_UopsSymbol *func; |
| _Py_UopsSymbol *maybe_self; |
| args = &stack_pointer[-oparg]; |
| self_or_null = stack_pointer[-1 - oparg]; |
| callable = stack_pointer[-2 - oparg]; |
| args = &stack_pointer[-oparg]; |
| (void)callable; |
| (void)self_or_null; |
| (void)args; |
| func = sym_new_not_null(ctx); |
| maybe_self = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = func; |
| stack_pointer[-1 - oparg] = maybe_self; |
| break; |
| } |
| |
| /* _DO_CALL is not a viable micro-op for tier 2 */ |
| |
| /* _MONITOR_CALL is not a viable micro-op for tier 2 */ |
| |
| case _PY_FRAME_GENERAL: { |
| _Py_UopsSymbol *self_or_null; |
| _Py_UopsSymbol *callable; |
| _Py_UOpsAbstractFrame *new_frame; |
| self_or_null = stack_pointer[-1 - oparg]; |
| callable = stack_pointer[-2 - oparg]; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| (void)(self_or_null); |
| (void)(callable); |
| PyCodeObject *co = NULL; |
| assert((this_instr + 2)->opcode == _PUSH_FRAME); |
| co = get_code_with_logging((this_instr + 2)); |
| if (co == NULL) { |
| ctx->done = true; |
| break; |
| } |
| new_frame = frame_new(ctx, co, 0, NULL, 0); |
| stack_pointer[0] = (_Py_UopsSymbol *)new_frame; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CHECK_FUNCTION_VERSION: { |
| _Py_UopsSymbol *self_or_null; |
| _Py_UopsSymbol *callable; |
| self_or_null = stack_pointer[-1 - oparg]; |
| callable = stack_pointer[-2 - oparg]; |
| uint32_t func_version = (uint32_t)this_instr->operand0; |
| (void)self_or_null; |
| if (sym_is_const(callable) && sym_matches_type(callable, &PyFunction_Type)) { |
| assert(PyFunction_Check(sym_get_const(callable))); |
| REPLACE_OP(this_instr, _CHECK_FUNCTION_VERSION_INLINE, 0, func_version); |
| this_instr->operand1 = (uintptr_t)sym_get_const(callable); |
| } |
| sym_set_type(callable, &PyFunction_Type); |
| break; |
| } |
| |
| case _CHECK_FUNCTION_VERSION_INLINE: { |
| break; |
| } |
| |
| case _CHECK_METHOD_VERSION: { |
| break; |
| } |
| |
| case _EXPAND_METHOD: { |
| _Py_UopsSymbol **method; |
| _Py_UopsSymbol **self; |
| method = &stack_pointer[-2 - oparg]; |
| self = &stack_pointer[-1 - oparg]; |
| method[0] = sym_new_not_null(ctx); |
| self[0] = sym_new_not_null(ctx); |
| break; |
| } |
| |
| case _CHECK_IS_NOT_PY_CALLABLE: { |
| break; |
| } |
| |
| case _CALL_NON_PY_GENERAL: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: { |
| _Py_UopsSymbol *null; |
| _Py_UopsSymbol *callable; |
| null = stack_pointer[-1 - oparg]; |
| callable = stack_pointer[-2 - oparg]; |
| sym_set_null(null); |
| sym_set_type(callable, &PyMethod_Type); |
| break; |
| } |
| |
| case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: { |
| _Py_UopsSymbol *callable; |
| _Py_UopsSymbol *func; |
| _Py_UopsSymbol *self; |
| callable = stack_pointer[-2 - oparg]; |
| (void)callable; |
| func = sym_new_not_null(ctx); |
| self = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = func; |
| stack_pointer[-1 - oparg] = self; |
| break; |
| } |
| |
| case _CHECK_PEP_523: { |
| /* Setting the eval frame function invalidates |
| * all executors, so no need to check dynamically */ |
| if (_PyInterpreterState_GET()->eval_frame == NULL) { |
| REPLACE_OP(this_instr, _NOP, 0 ,0); |
| } |
| break; |
| } |
| |
| case _CHECK_FUNCTION_EXACT_ARGS: { |
| _Py_UopsSymbol *self_or_null; |
| _Py_UopsSymbol *callable; |
| self_or_null = stack_pointer[-1 - oparg]; |
| callable = stack_pointer[-2 - oparg]; |
| assert(sym_matches_type(callable, &PyFunction_Type)); |
| if (sym_is_const(callable)) { |
| if (sym_is_null(self_or_null) || sym_is_not_null(self_or_null)) { |
| PyFunctionObject *func = (PyFunctionObject *)sym_get_const(callable); |
| PyCodeObject *co = (PyCodeObject *)func->func_code; |
| if (co->co_argcount == oparg + !sym_is_null(self_or_null)) { |
| REPLACE_OP(this_instr, _NOP, 0 ,0); |
| } |
| } |
| } |
| (void)self_or_null; |
| break; |
| } |
| |
| case _CHECK_STACK_SPACE: { |
| assert(corresponding_check_stack == NULL); |
| corresponding_check_stack = this_instr; |
| break; |
| } |
| |
| case _INIT_CALL_PY_EXACT_ARGS: { |
| _Py_UopsSymbol **args; |
| _Py_UopsSymbol *self_or_null; |
| _Py_UopsSymbol *callable; |
| _Py_UOpsAbstractFrame *new_frame; |
| args = &stack_pointer[-oparg]; |
| self_or_null = stack_pointer[-1 - oparg]; |
| callable = stack_pointer[-2 - oparg]; |
| int argcount = oparg; |
| (void)callable; |
| PyCodeObject *co = NULL; |
| assert((this_instr + 2)->opcode == _PUSH_FRAME); |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| co = get_code_with_logging((this_instr + 2)); |
| if (co == NULL) { |
| ctx->done = true; |
| break; |
| } |
| assert(self_or_null != NULL); |
| assert(args != NULL); |
| if (sym_is_not_null(self_or_null)) { |
| // Bound method fiddling, same as _INIT_CALL_PY_EXACT_ARGS in VM |
| args--; |
| argcount++; |
| } |
| if (sym_is_null(self_or_null) || sym_is_not_null(self_or_null)) { |
| new_frame = frame_new(ctx, co, 0, args, argcount); |
| } else { |
| new_frame = frame_new(ctx, co, 0, NULL, 0); |
| } |
| stack_pointer[0] = (_Py_UopsSymbol *)new_frame; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _PUSH_FRAME: { |
| _Py_UOpsAbstractFrame *new_frame; |
| new_frame = (_Py_UOpsAbstractFrame *)stack_pointer[-1]; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| ctx->frame->stack_pointer = stack_pointer; |
| ctx->frame = new_frame; |
| ctx->curr_frame_depth++; |
| stack_pointer = new_frame->stack_pointer; |
| co = get_code(this_instr); |
| if (co == NULL) { |
| // should be about to _EXIT_TRACE anyway |
| ctx->done = true; |
| break; |
| } |
| /* Stack space handling */ |
| int framesize = co->co_framesize; |
| assert(framesize > 0); |
| curr_space += framesize; |
| if (curr_space < 0 || curr_space > INT32_MAX) { |
| // won't fit in signed 32-bit int |
| ctx->done = true; |
| break; |
| } |
| max_space = curr_space > max_space ? curr_space : max_space; |
| if (first_valid_check_stack == NULL) { |
| first_valid_check_stack = corresponding_check_stack; |
| } |
| else { |
| if (corresponding_check_stack) { |
| // delete all but the first valid _CHECK_STACK_SPACE |
| corresponding_check_stack->opcode = _NOP; |
| } |
| } |
| corresponding_check_stack = NULL; |
| break; |
| } |
| |
| case _CALL_TYPE_1: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_STR_1: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_TUPLE_1: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-3] = res; |
| stack_pointer += -2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CHECK_AND_ALLOCATE_OBJECT: { |
| _Py_UopsSymbol **args; |
| _Py_UopsSymbol *null; |
| _Py_UopsSymbol *callable; |
| _Py_UopsSymbol *self; |
| _Py_UopsSymbol *init; |
| args = &stack_pointer[-oparg]; |
| null = stack_pointer[-1 - oparg]; |
| callable = stack_pointer[-2 - oparg]; |
| args = &stack_pointer[-oparg]; |
| uint32_t type_version = (uint32_t)this_instr->operand0; |
| (void)type_version; |
| (void)callable; |
| (void)null; |
| (void)args; |
| self = sym_new_not_null(ctx); |
| init = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = self; |
| stack_pointer[-1 - oparg] = init; |
| break; |
| } |
| |
| case _CREATE_INIT_FRAME: { |
| _Py_UopsSymbol **args; |
| _Py_UopsSymbol *init; |
| _Py_UopsSymbol *self; |
| _Py_UOpsAbstractFrame *init_frame; |
| args = &stack_pointer[-oparg]; |
| init = stack_pointer[-1 - oparg]; |
| self = stack_pointer[-2 - oparg]; |
| (void)self; |
| (void)init; |
| (void)args; |
| init_frame = NULL; |
| ctx->done = true; |
| stack_pointer[-2 - oparg] = (_Py_UopsSymbol *)init_frame; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _EXIT_INIT_CHECK: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_BUILTIN_CLASS: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_BUILTIN_O: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_BUILTIN_FAST: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_BUILTIN_FAST_WITH_KEYWORDS: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_LEN: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_ISINSTANCE: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_LIST_APPEND: { |
| stack_pointer += -3; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_METHOD_DESCRIPTOR_O: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_METHOD_DESCRIPTOR_NOARGS: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CALL_METHOD_DESCRIPTOR_FAST: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2 - oparg] = res; |
| stack_pointer += -1 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| /* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 */ |
| |
| case _MAYBE_EXPAND_METHOD_KW: { |
| _Py_UopsSymbol **func; |
| _Py_UopsSymbol **maybe_self; |
| _Py_UopsSymbol **args; |
| _Py_UopsSymbol *kwnames_out; |
| func = &stack_pointer[-3 - oparg]; |
| maybe_self = &stack_pointer[-2 - oparg]; |
| args = &stack_pointer[-1 - oparg]; |
| func[0] = sym_new_not_null(ctx); |
| maybe_self[0] = sym_new_not_null(ctx); |
| for (int _i = oparg; --_i >= 0;) { |
| args[_i] = sym_new_not_null(ctx); |
| } |
| kwnames_out = sym_new_not_null(ctx); |
| stack_pointer[-1] = kwnames_out; |
| break; |
| } |
| |
| /* _DO_CALL_KW is not a viable micro-op for tier 2 */ |
| |
| case _PY_FRAME_KW: { |
| _Py_UopsSymbol *kwnames; |
| _Py_UopsSymbol **args; |
| _Py_UopsSymbol *self_or_null; |
| _Py_UopsSymbol *callable; |
| _Py_UOpsAbstractFrame *new_frame; |
| kwnames = stack_pointer[-1]; |
| args = &stack_pointer[-1 - oparg]; |
| self_or_null = stack_pointer[-2 - oparg]; |
| callable = stack_pointer[-3 - oparg]; |
| (void)callable; |
| (void)self_or_null; |
| (void)args; |
| (void)kwnames; |
| new_frame = NULL; |
| ctx->done = true; |
| stack_pointer[-3 - oparg] = (_Py_UopsSymbol *)new_frame; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CHECK_FUNCTION_VERSION_KW: { |
| break; |
| } |
| |
| case _CHECK_METHOD_VERSION_KW: { |
| break; |
| } |
| |
| case _EXPAND_METHOD_KW: { |
| _Py_UopsSymbol **method; |
| _Py_UopsSymbol **self; |
| method = &stack_pointer[-3 - oparg]; |
| self = &stack_pointer[-2 - oparg]; |
| method[0] = sym_new_not_null(ctx); |
| self[0] = sym_new_not_null(ctx); |
| break; |
| } |
| |
| case _CHECK_IS_NOT_PY_CALLABLE_KW: { |
| break; |
| } |
| |
| case _CALL_KW_NON_PY: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-3 - oparg] = res; |
| stack_pointer += -2 - oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| /* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */ |
| |
| case _MAKE_CALLARGS_A_TUPLE: { |
| _Py_UopsSymbol *tuple; |
| _Py_UopsSymbol *kwargs_out = NULL; |
| tuple = sym_new_not_null(ctx); |
| kwargs_out = sym_new_not_null(ctx); |
| stack_pointer[-1 - (oparg & 1)] = tuple; |
| if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_out; |
| break; |
| } |
| |
| /* _DO_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */ |
| |
| case _MAKE_FUNCTION: { |
| _Py_UopsSymbol *func; |
| func = sym_new_not_null(ctx); |
| stack_pointer[-1] = func; |
| break; |
| } |
| |
| case _SET_FUNCTION_ATTRIBUTE: { |
| _Py_UopsSymbol *func_out; |
| func_out = sym_new_not_null(ctx); |
| stack_pointer[-2] = func_out; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _RETURN_GENERATOR: { |
| _Py_UopsSymbol *res; |
| ctx->frame->stack_pointer = stack_pointer; |
| frame_pop(ctx); |
| stack_pointer = ctx->frame->stack_pointer; |
| res = sym_new_unknown(ctx); |
| /* Stack space handling */ |
| assert(corresponding_check_stack == NULL); |
| assert(co != NULL); |
| int framesize = co->co_framesize; |
| assert(framesize > 0); |
| assert(framesize <= curr_space); |
| curr_space -= framesize; |
| stack_pointer[0] = res; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| co = get_code(this_instr); |
| if (co == NULL) { |
| // might be impossible, but bailing is still safe |
| ctx->done = true; |
| } |
| break; |
| } |
| |
| case _BUILD_SLICE: { |
| _Py_UopsSymbol *slice; |
| slice = sym_new_not_null(ctx); |
| stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; |
| stack_pointer += -1 - ((oparg == 3) ? 1 : 0); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CONVERT_VALUE: { |
| _Py_UopsSymbol *result; |
| result = sym_new_not_null(ctx); |
| stack_pointer[-1] = result; |
| break; |
| } |
| |
| case _FORMAT_SIMPLE: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-1] = res; |
| break; |
| } |
| |
| case _FORMAT_WITH_SPEC: { |
| _Py_UopsSymbol *res; |
| res = sym_new_not_null(ctx); |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _COPY: { |
| _Py_UopsSymbol *bottom; |
| _Py_UopsSymbol *top; |
| bottom = stack_pointer[-1 - (oparg-1)]; |
| assert(oparg > 0); |
| top = bottom; |
| stack_pointer[0] = top; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _BINARY_OP: { |
| _Py_UopsSymbol *right; |
| _Py_UopsSymbol *left; |
| _Py_UopsSymbol *res; |
| right = stack_pointer[-1]; |
| left = stack_pointer[-2]; |
| PyTypeObject *ltype = sym_get_type(left); |
| PyTypeObject *rtype = sym_get_type(right); |
| if (ltype != NULL && (ltype == &PyLong_Type || ltype == &PyFloat_Type) && |
| rtype != NULL && (rtype == &PyLong_Type || rtype == &PyFloat_Type)) |
| { |
| if (oparg != NB_TRUE_DIVIDE && oparg != NB_INPLACE_TRUE_DIVIDE && |
| ltype == &PyLong_Type && rtype == &PyLong_Type) { |
| /* If both inputs are ints and the op is not division the result is an int */ |
| res = sym_new_type(ctx, &PyLong_Type); |
| } |
| else { |
| /* For any other op combining ints/floats the result is a float */ |
| res = sym_new_type(ctx, &PyFloat_Type); |
| } |
| } |
| else { |
| res = sym_new_unknown(ctx); |
| } |
| stack_pointer[-2] = res; |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _SWAP: { |
| _Py_UopsSymbol *top_in; |
| _Py_UopsSymbol *bottom_in; |
| _Py_UopsSymbol *top_out; |
| _Py_UopsSymbol *bottom_out; |
| top_in = stack_pointer[-1]; |
| bottom_in = stack_pointer[-2 - (oparg-2)]; |
| bottom_out = bottom_in; |
| top_out = top_in; |
| stack_pointer[-2 - (oparg-2)] = top_out; |
| stack_pointer[-1] = bottom_out; |
| break; |
| } |
| |
| /* _INSTRUMENTED_LINE is not a viable micro-op for tier 2 */ |
| |
| /* _INSTRUMENTED_INSTRUCTION is not a viable micro-op for tier 2 */ |
| |
| /* _INSTRUMENTED_JUMP_FORWARD is not a viable micro-op for tier 2 */ |
| |
| /* _MONITOR_JUMP_BACKWARD is not a viable micro-op for tier 2 */ |
| |
| /* _INSTRUMENTED_POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */ |
| |
| /* _INSTRUMENTED_POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 */ |
| |
| /* _INSTRUMENTED_POP_JUMP_IF_NONE is not a viable micro-op for tier 2 */ |
| |
| /* _INSTRUMENTED_POP_JUMP_IF_NOT_NONE is not a viable micro-op for tier 2 */ |
| |
| case _GUARD_IS_TRUE_POP: { |
| _Py_UopsSymbol *flag; |
| flag = stack_pointer[-1]; |
| if (sym_is_const(flag)) { |
| PyObject *value = sym_get_const(flag); |
| assert(value != NULL); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| eliminate_pop_guard(this_instr, value != Py_True); |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GUARD_IS_FALSE_POP: { |
| _Py_UopsSymbol *flag; |
| flag = stack_pointer[-1]; |
| if (sym_is_const(flag)) { |
| PyObject *value = sym_get_const(flag); |
| assert(value != NULL); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| eliminate_pop_guard(this_instr, value != Py_False); |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _GUARD_IS_NONE_POP: { |
| _Py_UopsSymbol *flag; |
| flag = stack_pointer[-1]; |
| if (sym_is_const(flag)) { |
| PyObject *value = sym_get_const(flag); |
| assert(value != NULL); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| eliminate_pop_guard(this_instr, !Py_IsNone(value)); |
| } |
| else { |
| if (sym_has_type(flag)) { |
| assert(!sym_matches_type(flag, &_PyNone_Type)); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| eliminate_pop_guard(this_instr, true); |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| break; |
| } |
| |
| case _GUARD_IS_NOT_NONE_POP: { |
| _Py_UopsSymbol *flag; |
| flag = stack_pointer[-1]; |
| if (sym_is_const(flag)) { |
| PyObject *value = sym_get_const(flag); |
| assert(value != NULL); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| eliminate_pop_guard(this_instr, Py_IsNone(value)); |
| } |
| else { |
| if (sym_has_type(flag)) { |
| assert(!sym_matches_type(flag, &_PyNone_Type)); |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| eliminate_pop_guard(this_instr, false); |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| } |
| break; |
| } |
| |
| case _JUMP_TO_TOP: { |
| ctx->done = true; |
| break; |
| } |
| |
| case _SET_IP: { |
| break; |
| } |
| |
| case _CHECK_STACK_SPACE_OPERAND: { |
| uint32_t framesize = (uint32_t)this_instr->operand0; |
| (void)framesize; |
| /* We should never see _CHECK_STACK_SPACE_OPERANDs. |
| * They are only created at the end of this pass. */ |
| Py_UNREACHABLE(); |
| break; |
| } |
| |
| case _SAVE_RETURN_OFFSET: { |
| break; |
| } |
| |
| case _EXIT_TRACE: { |
| PyObject *exit_p = (PyObject *)this_instr->operand0; |
| (void)exit_p; |
| ctx->done = true; |
| break; |
| } |
| |
| case _CHECK_VALIDITY: { |
| break; |
| } |
| |
| case _LOAD_CONST_INLINE: { |
| _Py_UopsSymbol *value; |
| PyObject *ptr = (PyObject *)this_instr->operand0; |
| value = sym_new_const(ctx, ptr); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_CONST_INLINE_BORROW: { |
| _Py_UopsSymbol *value; |
| PyObject *ptr = (PyObject *)this_instr->operand0; |
| value = sym_new_const(ctx, ptr); |
| stack_pointer[0] = value; |
| stack_pointer += 1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _POP_TOP_LOAD_CONST_INLINE_BORROW: { |
| _Py_UopsSymbol *value; |
| value = sym_new_not_null(ctx); |
| stack_pointer[-1] = value; |
| break; |
| } |
| |
| case _LOAD_CONST_INLINE_WITH_NULL: { |
| _Py_UopsSymbol *value; |
| _Py_UopsSymbol *null; |
| PyObject *ptr = (PyObject *)this_instr->operand0; |
| value = sym_new_const(ctx, ptr); |
| null = sym_new_null(ctx); |
| stack_pointer[0] = value; |
| stack_pointer[1] = null; |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_CONST_INLINE_BORROW_WITH_NULL: { |
| _Py_UopsSymbol *value; |
| _Py_UopsSymbol *null; |
| PyObject *ptr = (PyObject *)this_instr->operand0; |
| value = sym_new_const(ctx, ptr); |
| null = sym_new_null(ctx); |
| stack_pointer[0] = value; |
| stack_pointer[1] = null; |
| stack_pointer += 2; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _CHECK_FUNCTION: { |
| break; |
| } |
| |
| case _LOAD_GLOBAL_MODULE: { |
| _Py_UopsSymbol *res; |
| _Py_UopsSymbol *null = NULL; |
| res = sym_new_not_null(ctx); |
| null = sym_new_null(ctx); |
| stack_pointer[0] = res; |
| if (oparg & 1) stack_pointer[1] = null; |
| stack_pointer += 1 + (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _LOAD_GLOBAL_BUILTINS: { |
| _Py_UopsSymbol *res; |
| _Py_UopsSymbol *null = NULL; |
| res = sym_new_not_null(ctx); |
| null = sym_new_null(ctx); |
| stack_pointer[0] = res; |
| if (oparg & 1) stack_pointer[1] = null; |
| stack_pointer += 1 + (oparg & 1); |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _INTERNAL_INCREMENT_OPT_COUNTER: { |
| stack_pointer += -1; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _DYNAMIC_EXIT: { |
| break; |
| } |
| |
| case _START_EXECUTOR: { |
| break; |
| } |
| |
| case _MAKE_WARM: { |
| break; |
| } |
| |
| case _FATAL_ERROR: { |
| break; |
| } |
| |
| case _CHECK_VALIDITY_AND_SET_IP: { |
| break; |
| } |
| |
| case _DEOPT: { |
| break; |
| } |
| |
| case _ERROR_POP_N: { |
| stack_pointer += -oparg; |
| assert(WITHIN_STACK_BOUNDS()); |
| break; |
| } |
| |
| case _TIER2_RESUME_CHECK: { |
| break; |
| } |
| |