Skip to content

Commit

Permalink
Merge branch 'underflow' into underflow-known-dynamic
Browse files Browse the repository at this point in the history
  • Loading branch information
brandtbucher committed Sep 6, 2024
2 parents 175d922 + b523cd2 commit 7ad3776
Show file tree
Hide file tree
Showing 9 changed files with 147 additions and 43 deletions.
18 changes: 16 additions & 2 deletions Python/bytecodes.c
Original file line number Diff line number Diff line change
Expand Up @@ -980,6 +980,9 @@ dummy_func(
_PyEval_FrameClearAndPop(tstate, dying);
LOAD_SP();
LOAD_IP(frame->return_offset);
#if TIER_TWO
frame->instr_ptr += frame->return_offset;
#endif
res = retval;
LLTRACE_RESUME_FRAME();
}
Expand Down Expand Up @@ -1173,6 +1176,9 @@ dummy_func(
_PyOpcode_Deopt[frame->instr_ptr->op.code] == ENTER_EXECUTOR);
#endif
LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND);
#if TIER_TWO
frame->instr_ptr += 1 + INLINE_CACHE_ENTRIES_SEND;
#endif
LOAD_SP();
value = retval;
LLTRACE_RESUME_FRAME();
Expand Down Expand Up @@ -2766,7 +2772,8 @@ dummy_func(
}
/* iterator ended normally */
assert(next_instr[oparg].op.code == END_FOR ||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
next_instr[oparg].op.code == INSTRUMENTED_END_FOR ||
next_instr[oparg].op.code == ENTER_EXECUTOR);
PyStackRef_CLOSE(iter);
STACK_SHRINK(1);
/* Jump forward oparg, then skip following END_FOR and POP_TOP instruction */
Expand Down Expand Up @@ -2820,7 +2827,8 @@ dummy_func(
}
/* iterator ended normally */
assert(next_instr[oparg].op.code == END_FOR ||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
next_instr[oparg].op.code == INSTRUMENTED_END_FOR ||
next_instr[oparg].op.code == ENTER_EXECUTOR);
STACK_SHRINK(1);
PyStackRef_CLOSE(iter_stackref);
/* Skip END_FOR and POP_TOP */
Expand Down Expand Up @@ -3505,6 +3513,9 @@ dummy_func(
tstate->py_recursion_remaining--;
LOAD_SP();
LOAD_IP(0);
#if TIER_TWO
frame->instr_ptr += 0;
#endif
LLTRACE_RESUME_FRAME();
}

Expand Down Expand Up @@ -4437,6 +4448,9 @@ dummy_func(
_PyThreadState_PopFrame(tstate, frame);
frame = tstate->current_frame = prev;
LOAD_IP(frame->return_offset);
#if TIER_TWO
frame->instr_ptr += frame->return_offset;
#endif
LOAD_SP();
LLTRACE_RESUME_FRAME();
}
Expand Down
12 changes: 12 additions & 0 deletions Python/executor_cases.c.h

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

60 changes: 58 additions & 2 deletions Python/generated_cases.c.h

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

44 changes: 22 additions & 22 deletions Python/optimizer.c
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,11 @@ _PyOptimizer_Optimize(
_PyStackRef *stack_pointer, _PyExecutorObject **executor_ptr, int chain_depth,
int frames_pushed)
{
if (!PyCode_Check(frame->f_executable) ||
!PyFunction_Check(frame->f_funcobj))
{
return 0;
}
// The first executor in a chain and the MAX_CHAIN_DEPTH'th executor *must*
// make progress in order to avoid infinite loops or excessively-long
// side-exit chains. We can only insert the executor into the bytecode if
Expand Down Expand Up @@ -513,19 +518,25 @@ add_to_trace(
return 0; \
} \
assert(func == NULL || func->func_code == (PyObject *)code); \
assert(code != NULL); \
trace_stack[trace_stack_depth].func = func; \
trace_stack[trace_stack_depth].code = code; \
trace_stack[trace_stack_depth].instr = instr; \
trace_stack_depth++;
#define TRACE_STACK_POP() \
if (trace_stack_depth <= 0) { \
Py_FatalError("Trace stack underflow\n"); \
func = NULL; \
code = NULL; \
instr = NULL; \
} \
trace_stack_depth--; \
func = trace_stack[trace_stack_depth].func; \
code = trace_stack[trace_stack_depth].code; \
assert(func == NULL || func->func_code == (PyObject *)code); \
instr = trace_stack[trace_stack_depth].instr;
else { \
trace_stack_depth--; \
func = trace_stack[trace_stack_depth].func; \
code = trace_stack[trace_stack_depth].code; \
assert(func == NULL || func->func_code == (PyObject *)code); \
assert(code != NULL); \
instr = trace_stack[trace_stack_depth].instr; \
}

/* Returns the length of the trace on success,
* 0 if it failed to produce a worthwhile trace,
Expand Down Expand Up @@ -743,17 +754,6 @@ translate_bytecode_to_trace(
// Reserve space for nuops (+ _SET_IP + _EXIT_TRACE)
int nuops = expansion->nuops;
RESERVE(nuops + 1); /* One extra for exit */
int16_t last_op = expansion->uops[nuops-1].uop;
if (last_op == _RETURN_VALUE || last_op == _RETURN_GENERATOR || last_op == _YIELD_VALUE) {
// Check for trace stack underflow now:
// We can't bail e.g. in the middle of
// LOAD_CONST + _RETURN_VALUE.
if (trace_stack_depth == 0) {
DPRINTF(2, "Trace stack underflow\n");
OPT_STAT_INC(trace_stack_underflow);
goto done;
}
}
uint32_t orig_oparg = oparg; // For OPARG_TOP/BOTTOM
for (int i = 0; i < nuops; i++) {
oparg = orig_oparg;
Expand Down Expand Up @@ -817,7 +817,9 @@ translate_bytecode_to_trace(
operand = (uintptr_t)code | 1;
}
else {
operand = 0;
ADD_TO_TRACE(uop, oparg, 0, target);
ADD_TO_TRACE(_DYNAMIC_EXIT, 0, 0, 0);
goto done;
}
trace_stack_depth++;
ADD_TO_TRACE(uop, oparg, operand, target);
Expand Down Expand Up @@ -888,11 +890,9 @@ translate_bytecode_to_trace(
if (new_func != NULL) {
operand = (uintptr_t)new_func;
}
else if (new_code != NULL) {
operand = (uintptr_t)new_code | 1;
}
else {
operand = 0;
assert(new_code != NULL);
operand = (uintptr_t)new_code | 1;
}
trace_stack_depth--;
ADD_TO_TRACE(uop, oparg, operand, target);
Expand Down
Loading

0 comments on commit 7ad3776

Please sign in to comment.