1 /* Frame object implementation */
2
3 #include "Python.h"
4 #include "pycore_ceval.h" // _PyEval_BuiltinsFromGlobals()
5 #include "pycore_code.h" // CO_FAST_LOCAL, etc.
6 #include "pycore_function.h" // _PyFunction_FromConstructor()
7 #include "pycore_moduleobject.h" // _PyModule_GetDict()
8 #include "pycore_object.h" // _PyObject_GC_UNTRACK()
9 #include "pycore_opcode.h" // _PyOpcode_Caches
10
11 #include "frameobject.h" // PyFrameObject
12 #include "pycore_frame.h"
13 #include "opcode.h" // EXTENDED_ARG
14 #include "structmember.h" // PyMemberDef
15
16 #define OFF(x) offsetof(PyFrameObject, x)
17
18 static PyMemberDef frame_memberlist[] = {
19 {"f_trace_lines", T_BOOL, OFF(f_trace_lines), 0},
20 {"f_trace_opcodes", T_BOOL, OFF(f_trace_opcodes), 0},
21 {NULL} /* Sentinel */
22 };
23
24
25 static PyObject *
frame_getlocals(PyFrameObject * f,void * closure)26 frame_getlocals(PyFrameObject *f, void *closure)
27 {
28 if (PyFrame_FastToLocalsWithError(f) < 0)
29 return NULL;
30 PyObject *locals = f->f_frame->f_locals;
31 Py_INCREF(locals);
32 return locals;
33 }
34
35 int
PyFrame_GetLineNumber(PyFrameObject * f)36 PyFrame_GetLineNumber(PyFrameObject *f)
37 {
38 assert(f != NULL);
39 if (f->f_lineno != 0) {
40 return f->f_lineno;
41 }
42 else {
43 return _PyInterpreterFrame_GetLine(f->f_frame);
44 }
45 }
46
47 static PyObject *
frame_getlineno(PyFrameObject * f,void * closure)48 frame_getlineno(PyFrameObject *f, void *closure)
49 {
50 int lineno = PyFrame_GetLineNumber(f);
51 if (lineno < 0) {
52 Py_RETURN_NONE;
53 }
54 else {
55 return PyLong_FromLong(lineno);
56 }
57 }
58
59 static PyObject *
frame_getlasti(PyFrameObject * f,void * closure)60 frame_getlasti(PyFrameObject *f, void *closure)
61 {
62 int lasti = _PyInterpreterFrame_LASTI(f->f_frame);
63 if (lasti < 0) {
64 return PyLong_FromLong(-1);
65 }
66 return PyLong_FromLong(lasti * sizeof(_Py_CODEUNIT));
67 }
68
69 static PyObject *
frame_getglobals(PyFrameObject * f,void * closure)70 frame_getglobals(PyFrameObject *f, void *closure)
71 {
72 PyObject *globals = f->f_frame->f_globals;
73 if (globals == NULL) {
74 globals = Py_None;
75 }
76 Py_INCREF(globals);
77 return globals;
78 }
79
80 static PyObject *
frame_getbuiltins(PyFrameObject * f,void * closure)81 frame_getbuiltins(PyFrameObject *f, void *closure)
82 {
83 PyObject *builtins = f->f_frame->f_builtins;
84 if (builtins == NULL) {
85 builtins = Py_None;
86 }
87 Py_INCREF(builtins);
88 return builtins;
89 }
90
91 static PyObject *
frame_getcode(PyFrameObject * f,void * closure)92 frame_getcode(PyFrameObject *f, void *closure)
93 {
94 if (PySys_Audit("object.__getattr__", "Os", f, "f_code") < 0) {
95 return NULL;
96 }
97 return (PyObject *)PyFrame_GetCode(f);
98 }
99
100 static PyObject *
frame_getback(PyFrameObject * f,void * closure)101 frame_getback(PyFrameObject *f, void *closure)
102 {
103 PyObject *res = (PyObject *)PyFrame_GetBack(f);
104 if (res == NULL) {
105 Py_RETURN_NONE;
106 }
107 return res;
108 }
109
110 // Given the index of the effective opcode, scan back to construct the oparg
111 // with EXTENDED_ARG. This only works correctly with *unquickened* code,
112 // obtained via a call to _PyCode_GetCode!
113 static unsigned int
get_arg(const _Py_CODEUNIT * codestr,Py_ssize_t i)114 get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i)
115 {
116 _Py_CODEUNIT word;
117 unsigned int oparg = _Py_OPARG(codestr[i]);
118 if (i >= 1 && _Py_OPCODE(word = codestr[i-1]) == EXTENDED_ARG) {
119 oparg |= _Py_OPARG(word) << 8;
120 if (i >= 2 && _Py_OPCODE(word = codestr[i-2]) == EXTENDED_ARG) {
121 oparg |= _Py_OPARG(word) << 16;
122 if (i >= 3 && _Py_OPCODE(word = codestr[i-3]) == EXTENDED_ARG) {
123 oparg |= _Py_OPARG(word) << 24;
124 }
125 }
126 }
127 return oparg;
128 }
129
130 /* Model the evaluation stack, to determine which jumps
131 * are safe and how many values needs to be popped.
132 * The stack is modelled by a 64 integer, treating any
133 * stack that can't fit into 64 bits as "overflowed".
134 */
135
136 typedef enum kind {
137 Iterator = 1,
138 Except = 2,
139 Object = 3,
140 Null = 4,
141 Lasti = 5,
142 } Kind;
143
144 static int
compatible_kind(Kind from,Kind to)145 compatible_kind(Kind from, Kind to) {
146 if (to == 0) {
147 return 0;
148 }
149 if (to == Object) {
150 return from != Null;
151 }
152 if (to == Null) {
153 return 1;
154 }
155 return from == to;
156 }
157
158 #define BITS_PER_BLOCK 3
159
160 #define UNINITIALIZED -2
161 #define OVERFLOWED -1
162
163 #define MAX_STACK_ENTRIES (63/BITS_PER_BLOCK)
164 #define WILL_OVERFLOW (1ULL<<((MAX_STACK_ENTRIES-1)*BITS_PER_BLOCK))
165
166 #define EMPTY_STACK 0
167
168 static inline int64_t
push_value(int64_t stack,Kind kind)169 push_value(int64_t stack, Kind kind)
170 {
171 if (((uint64_t)stack) >= WILL_OVERFLOW) {
172 return OVERFLOWED;
173 }
174 else {
175 return (stack << BITS_PER_BLOCK) | kind;
176 }
177 }
178
179 static inline int64_t
pop_value(int64_t stack)180 pop_value(int64_t stack)
181 {
182 return Py_ARITHMETIC_RIGHT_SHIFT(int64_t, stack, BITS_PER_BLOCK);
183 }
184
185 #define MASK ((1<<BITS_PER_BLOCK)-1)
186
187 static inline Kind
top_of_stack(int64_t stack)188 top_of_stack(int64_t stack)
189 {
190 return stack & MASK;
191 }
192
193 static inline Kind
peek(int64_t stack,int n)194 peek(int64_t stack, int n)
195 {
196 assert(n >= 1);
197 return (stack>>(BITS_PER_BLOCK*(n-1))) & MASK;
198 }
199
200 static Kind
stack_swap(int64_t stack,int n)201 stack_swap(int64_t stack, int n)
202 {
203 assert(n >= 1);
204 Kind to_swap = peek(stack, n);
205 Kind top = top_of_stack(stack);
206 int shift = BITS_PER_BLOCK*(n-1);
207 int64_t replaced_low = (stack & ~(MASK << shift)) | (top << shift);
208 int64_t replaced_top = (replaced_low & ~MASK) | to_swap;
209 return replaced_top;
210 }
211
212 static int64_t
pop_to_level(int64_t stack,int level)213 pop_to_level(int64_t stack, int level) {
214 if (level == 0) {
215 return EMPTY_STACK;
216 }
217 int64_t max_item = (1<<BITS_PER_BLOCK) - 1;
218 int64_t level_max_stack = max_item << ((level-1) * BITS_PER_BLOCK);
219 while (stack > level_max_stack) {
220 stack = pop_value(stack);
221 }
222 return stack;
223 }
224
225 #if 0
226 /* These functions are useful for debugging the stack marking code */
227
228 static char
229 tos_char(int64_t stack) {
230 switch(top_of_stack(stack)) {
231 case Iterator:
232 return 'I';
233 case Except:
234 return 'E';
235 case Object:
236 return 'O';
237 case Lasti:
238 return 'L';
239 case Null:
240 return 'N';
241 }
242 return '?';
243 }
244
245 static void
246 print_stack(int64_t stack) {
247 if (stack < 0) {
248 if (stack == UNINITIALIZED) {
249 printf("---");
250 }
251 else if (stack == OVERFLOWED) {
252 printf("OVERFLOWED");
253 }
254 else {
255 printf("??");
256 }
257 return;
258 }
259 while (stack) {
260 printf("%c", tos_char(stack));
261 stack = pop_value(stack);
262 }
263 }
264
265 static void
266 print_stacks(int64_t *stacks, int n) {
267 for (int i = 0; i < n; i++) {
268 printf("%d: ", i);
269 print_stack(stacks[i]);
270 printf("\n");
271 }
272 }
273
274 #endif
275
276 static int64_t *
mark_stacks(PyCodeObject * code_obj,int len)277 mark_stacks(PyCodeObject *code_obj, int len)
278 {
279 PyObject *co_code = _PyCode_GetCode(code_obj);
280 if (co_code == NULL) {
281 return NULL;
282 }
283 _Py_CODEUNIT *code = (_Py_CODEUNIT *)PyBytes_AS_STRING(co_code);
284 int64_t *stacks = PyMem_New(int64_t, len+1);
285 int i, j, opcode;
286
287 if (stacks == NULL) {
288 PyErr_NoMemory();
289 Py_DECREF(co_code);
290 return NULL;
291 }
292 for (int i = 1; i <= len; i++) {
293 stacks[i] = UNINITIALIZED;
294 }
295 stacks[0] = EMPTY_STACK;
296 if (code_obj->co_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR))
297 {
298 // Generators get sent None while starting:
299 stacks[0] = push_value(stacks[0], Object);
300 }
301 int todo = 1;
302 while (todo) {
303 todo = 0;
304 /* Scan instructions */
305 for (i = 0; i < len; i++) {
306 int64_t next_stack = stacks[i];
307 if (next_stack == UNINITIALIZED) {
308 continue;
309 }
310 opcode = _Py_OPCODE(code[i]);
311 switch (opcode) {
312 case JUMP_IF_FALSE_OR_POP:
313 case JUMP_IF_TRUE_OR_POP:
314 case POP_JUMP_FORWARD_IF_FALSE:
315 case POP_JUMP_BACKWARD_IF_FALSE:
316 case POP_JUMP_FORWARD_IF_TRUE:
317 case POP_JUMP_BACKWARD_IF_TRUE:
318 {
319 int64_t target_stack;
320 int j = get_arg(code, i);
321 if (opcode == POP_JUMP_FORWARD_IF_FALSE ||
322 opcode == POP_JUMP_FORWARD_IF_TRUE ||
323 opcode == JUMP_IF_FALSE_OR_POP ||
324 opcode == JUMP_IF_TRUE_OR_POP)
325 {
326 j += i + 1;
327 }
328 else {
329 assert(opcode == POP_JUMP_BACKWARD_IF_FALSE ||
330 opcode == POP_JUMP_BACKWARD_IF_TRUE);
331 j = i + 1 - j;
332 }
333 assert(j < len);
334 if (stacks[j] == UNINITIALIZED && j < i) {
335 todo = 1;
336 }
337 if (opcode == JUMP_IF_FALSE_OR_POP ||
338 opcode == JUMP_IF_TRUE_OR_POP)
339 {
340 target_stack = next_stack;
341 next_stack = pop_value(next_stack);
342 }
343 else {
344 next_stack = pop_value(next_stack);
345 target_stack = next_stack;
346 }
347 assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack);
348 stacks[j] = target_stack;
349 stacks[i+1] = next_stack;
350 break;
351 }
352 case SEND:
353 j = get_arg(code, i) + i + 1;
354 assert(j < len);
355 assert(stacks[j] == UNINITIALIZED || stacks[j] == pop_value(next_stack));
356 stacks[j] = pop_value(next_stack);
357 stacks[i+1] = next_stack;
358 break;
359 case JUMP_FORWARD:
360 j = get_arg(code, i) + i + 1;
361 assert(j < len);
362 assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack);
363 stacks[j] = next_stack;
364 break;
365 case JUMP_BACKWARD:
366 case JUMP_BACKWARD_NO_INTERRUPT:
367 j = i + 1 - get_arg(code, i);
368 assert(j >= 0);
369 assert(j < len);
370 if (stacks[j] == UNINITIALIZED && j < i) {
371 todo = 1;
372 }
373 assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack);
374 stacks[j] = next_stack;
375 break;
376 case GET_ITER:
377 case GET_AITER:
378 next_stack = push_value(pop_value(next_stack), Iterator);
379 stacks[i+1] = next_stack;
380 break;
381 case FOR_ITER:
382 {
383 int64_t target_stack = pop_value(next_stack);
384 stacks[i+1] = push_value(next_stack, Object);
385 j = get_arg(code, i) + i + 1;
386 assert(j < len);
387 assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack);
388 stacks[j] = target_stack;
389 break;
390 }
391 case END_ASYNC_FOR:
392 next_stack = pop_value(pop_value(next_stack));
393 stacks[i+1] = next_stack;
394 break;
395 case PUSH_EXC_INFO:
396 next_stack = push_value(next_stack, Except);
397 stacks[i+1] = next_stack;
398 break;
399 case POP_EXCEPT:
400 assert(top_of_stack(next_stack) == Except);
401 next_stack = pop_value(next_stack);
402 stacks[i+1] = next_stack;
403 break;
404 case RETURN_VALUE:
405 assert(pop_value(next_stack) == EMPTY_STACK);
406 assert(top_of_stack(next_stack) == Object);
407 break;
408 case RAISE_VARARGS:
409 break;
410 case RERAISE:
411 assert(top_of_stack(next_stack) == Except);
412 /* End of block */
413 break;
414 case PUSH_NULL:
415 next_stack = push_value(next_stack, Null);
416 stacks[i+1] = next_stack;
417 break;
418 case LOAD_GLOBAL:
419 if (_Py_OPARG(code[i]) & 1) {
420 next_stack = push_value(next_stack, Null);
421 }
422 next_stack = push_value(next_stack, Object);
423 stacks[i+1] = next_stack;
424 break;
425 case LOAD_METHOD:
426 assert(top_of_stack(next_stack) == Object);
427 next_stack = pop_value(next_stack);
428 next_stack = push_value(next_stack, Null);
429 next_stack = push_value(next_stack, Object);
430 stacks[i+1] = next_stack;
431 break;
432 case CALL:
433 {
434 next_stack = pop_value(pop_value(next_stack));
435 next_stack = push_value(next_stack, Object);
436 stacks[i+1] = next_stack;
437 break;
438 }
439 case SWAP:
440 {
441 int n = get_arg(code, i);
442 next_stack = stack_swap(next_stack, n);
443 stacks[i+1] = next_stack;
444 break;
445 }
446 case COPY:
447 {
448 int n = get_arg(code, i);
449 next_stack = push_value(next_stack, peek(next_stack, n));
450 stacks[i+1] = next_stack;
451 break;
452 }
453 default:
454 {
455 int delta = PyCompile_OpcodeStackEffect(opcode, get_arg(code, i));
456 assert(delta != PY_INVALID_STACK_EFFECT);
457 while (delta < 0) {
458 next_stack = pop_value(next_stack);
459 delta++;
460 }
461 while (delta > 0) {
462 next_stack = push_value(next_stack, Object);
463 delta--;
464 }
465 stacks[i+1] = next_stack;
466 }
467 }
468 }
469 /* Scan exception table */
470 unsigned char *start = (unsigned char *)PyBytes_AS_STRING(code_obj->co_exceptiontable);
471 unsigned char *end = start + PyBytes_GET_SIZE(code_obj->co_exceptiontable);
472 unsigned char *scan = start;
473 while (scan < end) {
474 int start_offset, size, handler;
475 scan = parse_varint(scan, &start_offset);
476 assert(start_offset >= 0 && start_offset < len);
477 scan = parse_varint(scan, &size);
478 assert(size >= 0 && start_offset+size <= len);
479 scan = parse_varint(scan, &handler);
480 assert(handler >= 0 && handler < len);
481 int depth_and_lasti;
482 scan = parse_varint(scan, &depth_and_lasti);
483 int level = depth_and_lasti >> 1;
484 int lasti = depth_and_lasti & 1;
485 if (stacks[start_offset] != UNINITIALIZED) {
486 if (stacks[handler] == UNINITIALIZED) {
487 todo = 1;
488 uint64_t target_stack = pop_to_level(stacks[start_offset], level);
489 if (lasti) {
490 target_stack = push_value(target_stack, Lasti);
491 }
492 target_stack = push_value(target_stack, Except);
493 stacks[handler] = target_stack;
494 }
495 }
496 }
497 }
498 Py_DECREF(co_code);
499 return stacks;
500 }
501
502 static int
compatible_stack(int64_t from_stack,int64_t to_stack)503 compatible_stack(int64_t from_stack, int64_t to_stack)
504 {
505 if (from_stack < 0 || to_stack < 0) {
506 return 0;
507 }
508 while(from_stack > to_stack) {
509 from_stack = pop_value(from_stack);
510 }
511 while(from_stack) {
512 Kind from_top = top_of_stack(from_stack);
513 Kind to_top = top_of_stack(to_stack);
514 if (!compatible_kind(from_top, to_top)) {
515 return 0;
516 }
517 from_stack = pop_value(from_stack);
518 to_stack = pop_value(to_stack);
519 }
520 return to_stack == 0;
521 }
522
523 static const char *
explain_incompatible_stack(int64_t to_stack)524 explain_incompatible_stack(int64_t to_stack)
525 {
526 assert(to_stack != 0);
527 if (to_stack == OVERFLOWED) {
528 return "stack is too deep to analyze";
529 }
530 if (to_stack == UNINITIALIZED) {
531 return "can't jump into an exception handler, or code may be unreachable";
532 }
533 Kind target_kind = top_of_stack(to_stack);
534 switch(target_kind) {
535 case Except:
536 return "can't jump into an 'except' block as there's no exception";
537 case Lasti:
538 return "can't jump into a re-raising block as there's no location";
539 case Object:
540 case Null:
541 return "incompatible stacks";
542 case Iterator:
543 return "can't jump into the body of a for loop";
544 default:
545 Py_UNREACHABLE();
546 }
547 }
548
549 static int *
marklines(PyCodeObject * code,int len)550 marklines(PyCodeObject *code, int len)
551 {
552 PyCodeAddressRange bounds;
553 _PyCode_InitAddressRange(code, &bounds);
554 assert (bounds.ar_end == 0);
555 int last_line = -1;
556
557 int *linestarts = PyMem_New(int, len);
558 if (linestarts == NULL) {
559 return NULL;
560 }
561 for (int i = 0; i < len; i++) {
562 linestarts[i] = -1;
563 }
564
565 while (_PyLineTable_NextAddressRange(&bounds)) {
566 assert(bounds.ar_start / (int)sizeof(_Py_CODEUNIT) < len);
567 if (bounds.ar_line != last_line && bounds.ar_line != -1) {
568 linestarts[bounds.ar_start / sizeof(_Py_CODEUNIT)] = bounds.ar_line;
569 last_line = bounds.ar_line;
570 }
571 }
572 return linestarts;
573 }
574
575 static int
first_line_not_before(int * lines,int len,int line)576 first_line_not_before(int *lines, int len, int line)
577 {
578 int result = INT_MAX;
579 for (int i = 0; i < len; i++) {
580 if (lines[i] < result && lines[i] >= line) {
581 result = lines[i];
582 }
583 }
584 if (result == INT_MAX) {
585 return -1;
586 }
587 return result;
588 }
589
590 static PyFrameState
_PyFrame_GetState(PyFrameObject * frame)591 _PyFrame_GetState(PyFrameObject *frame)
592 {
593 assert(!_PyFrame_IsIncomplete(frame->f_frame));
594 if (frame->f_frame->stacktop == 0) {
595 return FRAME_CLEARED;
596 }
597 switch(frame->f_frame->owner) {
598 case FRAME_OWNED_BY_GENERATOR:
599 {
600 PyGenObject *gen = _PyFrame_GetGenerator(frame->f_frame);
601 return gen->gi_frame_state;
602 }
603 case FRAME_OWNED_BY_THREAD:
604 {
605 if (_PyInterpreterFrame_LASTI(frame->f_frame) < 0) {
606 return FRAME_CREATED;
607 }
608 switch (_PyOpcode_Deopt[_Py_OPCODE(*frame->f_frame->prev_instr)])
609 {
610 case COPY_FREE_VARS:
611 case MAKE_CELL:
612 case RETURN_GENERATOR:
613 /* Frame not fully initialized */
614 return FRAME_CREATED;
615 default:
616 return FRAME_EXECUTING;
617 }
618 }
619 case FRAME_OWNED_BY_FRAME_OBJECT:
620 return FRAME_COMPLETED;
621 }
622 Py_UNREACHABLE();
623 }
624
625
626 /* Setter for f_lineno - you can set f_lineno from within a trace function in
627 * order to jump to a given line of code, subject to some restrictions. Most
628 * lines are OK to jump to because they don't make any assumptions about the
629 * state of the stack (obvious because you could remove the line and the code
630 * would still work without any stack errors), but there are some constructs
631 * that limit jumping:
632 *
633 * o Any exception handlers.
634 * o 'for' and 'async for' loops can't be jumped into because the
635 * iterator needs to be on the stack.
636 * o Jumps cannot be made from within a trace function invoked with a
637 * 'return' or 'exception' event since the eval loop has been exited at
638 * that time.
639 */
640 static int
frame_setlineno(PyFrameObject * f,PyObject * p_new_lineno,void * Py_UNUSED (ignored))641 frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignored))
642 {
643 if (p_new_lineno == NULL) {
644 PyErr_SetString(PyExc_AttributeError, "cannot delete attribute");
645 return -1;
646 }
647 /* f_lineno must be an integer. */
648 if (!PyLong_CheckExact(p_new_lineno)) {
649 PyErr_SetString(PyExc_ValueError,
650 "lineno must be an integer");
651 return -1;
652 }
653
654 PyFrameState state = _PyFrame_GetState(f);
655 /*
656 * This code preserves the historical restrictions on
657 * setting the line number of a frame.
658 * Jumps are forbidden on a 'return' trace event (except after a yield).
659 * Jumps from 'call' trace events are also forbidden.
660 * In addition, jumps are forbidden when not tracing,
661 * as this is a debugging feature.
662 */
663 switch(PyThreadState_GET()->tracing_what) {
664 case PyTrace_EXCEPTION:
665 PyErr_SetString(PyExc_ValueError,
666 "can only jump from a 'line' trace event");
667 return -1;
668 case PyTrace_CALL:
669 PyErr_Format(PyExc_ValueError,
670 "can't jump from the 'call' trace event of a new frame");
671 return -1;
672 case PyTrace_LINE:
673 break;
674 case PyTrace_RETURN:
675 if (state == FRAME_SUSPENDED) {
676 break;
677 }
678 /* fall through */
679 default:
680 PyErr_SetString(PyExc_ValueError,
681 "can only jump from a 'line' trace event");
682 return -1;
683 }
684 if (!f->f_trace) {
685 PyErr_Format(PyExc_ValueError,
686 "f_lineno can only be set by a trace function");
687 return -1;
688 }
689
690 int new_lineno;
691
692 /* Fail if the line falls outside the code block and
693 select first line with actual code. */
694 int overflow;
695 long l_new_lineno = PyLong_AsLongAndOverflow(p_new_lineno, &overflow);
696 if (overflow
697 #if SIZEOF_LONG > SIZEOF_INT
698 || l_new_lineno > INT_MAX
699 || l_new_lineno < INT_MIN
700 #endif
701 ) {
702 PyErr_SetString(PyExc_ValueError,
703 "lineno out of range");
704 return -1;
705 }
706 new_lineno = (int)l_new_lineno;
707
708 if (new_lineno < f->f_frame->f_code->co_firstlineno) {
709 PyErr_Format(PyExc_ValueError,
710 "line %d comes before the current code block",
711 new_lineno);
712 return -1;
713 }
714
715 /* PyCode_NewWithPosOnlyArgs limits co_code to be under INT_MAX so this
716 * should never overflow. */
717 int len = (int)Py_SIZE(f->f_frame->f_code);
718 int *lines = marklines(f->f_frame->f_code, len);
719 if (lines == NULL) {
720 return -1;
721 }
722
723 new_lineno = first_line_not_before(lines, len, new_lineno);
724 if (new_lineno < 0) {
725 PyErr_Format(PyExc_ValueError,
726 "line %d comes after the current code block",
727 (int)l_new_lineno);
728 PyMem_Free(lines);
729 return -1;
730 }
731
732 int64_t *stacks = mark_stacks(f->f_frame->f_code, len);
733 if (stacks == NULL) {
734 PyMem_Free(lines);
735 return -1;
736 }
737
738 int64_t best_stack = OVERFLOWED;
739 int best_addr = -1;
740 int64_t start_stack = stacks[_PyInterpreterFrame_LASTI(f->f_frame)];
741 int err = -1;
742 const char *msg = "cannot find bytecode for specified line";
743 for (int i = 0; i < len; i++) {
744 if (lines[i] == new_lineno) {
745 int64_t target_stack = stacks[i];
746 if (compatible_stack(start_stack, target_stack)) {
747 err = 0;
748 if (target_stack > best_stack) {
749 best_stack = target_stack;
750 best_addr = i;
751 }
752 }
753 else if (err < 0) {
754 if (start_stack == OVERFLOWED) {
755 msg = "stack to deep to analyze";
756 }
757 else if (start_stack == UNINITIALIZED) {
758 msg = "can't jump from unreachable code";
759 }
760 else {
761 msg = explain_incompatible_stack(target_stack);
762 err = 1;
763 }
764 }
765 }
766 }
767 PyMem_Free(stacks);
768 PyMem_Free(lines);
769 if (err) {
770 PyErr_SetString(PyExc_ValueError, msg);
771 return -1;
772 }
773 if (state == FRAME_SUSPENDED) {
774 /* Account for value popped by yield */
775 start_stack = pop_value(start_stack);
776 }
777 while (start_stack > best_stack) {
778 if (top_of_stack(start_stack) == Except) {
779 /* Pop exception stack as well as the evaluation stack */
780 PyThreadState *tstate = _PyThreadState_GET();
781 _PyErr_StackItem *exc_info = tstate->exc_info;
782 PyObject *value = exc_info->exc_value;
783 PyObject *exc = _PyFrame_StackPop(f->f_frame);
784 assert(PyExceptionInstance_Check(exc) || exc == Py_None);
785 exc_info->exc_value = exc;
786 Py_XDECREF(value);
787 }
788 else {
789 PyObject *v = _PyFrame_StackPop(f->f_frame);
790 Py_XDECREF(v);
791 }
792 start_stack = pop_value(start_stack);
793 }
794 /* Finally set the new lasti and return OK. */
795 f->f_lineno = 0;
796 f->f_frame->prev_instr = _PyCode_CODE(f->f_frame->f_code) + best_addr;
797 return 0;
798 }
799
800 static PyObject *
frame_gettrace(PyFrameObject * f,void * closure)801 frame_gettrace(PyFrameObject *f, void *closure)
802 {
803 PyObject* trace = f->f_trace;
804
805 if (trace == NULL)
806 trace = Py_None;
807
808 Py_INCREF(trace);
809
810 return trace;
811 }
812
813 static int
frame_settrace(PyFrameObject * f,PyObject * v,void * closure)814 frame_settrace(PyFrameObject *f, PyObject* v, void *closure)
815 {
816 if (v == Py_None) {
817 v = NULL;
818 }
819 Py_XINCREF(v);
820 Py_XSETREF(f->f_trace, v);
821
822 return 0;
823 }
824
825
826 static PyGetSetDef frame_getsetlist[] = {
827 {"f_back", (getter)frame_getback, NULL, NULL},
828 {"f_locals", (getter)frame_getlocals, NULL, NULL},
829 {"f_lineno", (getter)frame_getlineno,
830 (setter)frame_setlineno, NULL},
831 {"f_trace", (getter)frame_gettrace, (setter)frame_settrace, NULL},
832 {"f_lasti", (getter)frame_getlasti, NULL, NULL},
833 {"f_globals", (getter)frame_getglobals, NULL, NULL},
834 {"f_builtins", (getter)frame_getbuiltins, NULL, NULL},
835 {"f_code", (getter)frame_getcode, NULL, NULL},
836 {0}
837 };
838
839 /* Stack frames are allocated and deallocated at a considerable rate.
840 In an attempt to improve the speed of function calls, we maintain
841 a separate free list of stack frames (just like floats are
842 allocated in a special way -- see floatobject.c). When a stack
843 frame is on the free list, only the following members have a meaning:
844 ob_type == &Frametype
845 f_back next item on free list, or NULL
846 */
847
848 static void
frame_dealloc(PyFrameObject * f)849 frame_dealloc(PyFrameObject *f)
850 {
851 /* It is the responsibility of the owning generator/coroutine
852 * to have cleared the generator pointer */
853
854 assert(f->f_frame->owner != FRAME_OWNED_BY_GENERATOR ||
855 _PyFrame_GetGenerator(f->f_frame)->gi_frame_state == FRAME_CLEARED);
856
857 if (_PyObject_GC_IS_TRACKED(f)) {
858 _PyObject_GC_UNTRACK(f);
859 }
860
861 Py_TRASHCAN_BEGIN(f, frame_dealloc);
862 PyCodeObject *co = NULL;
863
864 /* Kill all local variables including specials, if we own them */
865 if (f->f_frame->owner == FRAME_OWNED_BY_FRAME_OBJECT) {
866 assert(f->f_frame == (_PyInterpreterFrame *)f->_f_frame_data);
867 _PyInterpreterFrame *frame = (_PyInterpreterFrame *)f->_f_frame_data;
868 /* Don't clear code object until the end */
869 co = frame->f_code;
870 frame->f_code = NULL;
871 Py_CLEAR(frame->f_func);
872 Py_CLEAR(frame->f_locals);
873 PyObject **locals = _PyFrame_GetLocalsArray(frame);
874 for (int i = 0; i < frame->stacktop; i++) {
875 Py_CLEAR(locals[i]);
876 }
877 }
878 Py_CLEAR(f->f_back);
879 Py_CLEAR(f->f_trace);
880 PyObject_GC_Del(f);
881 Py_XDECREF(co);
882 Py_TRASHCAN_END;
883 }
884
885 static int
frame_traverse(PyFrameObject * f,visitproc visit,void * arg)886 frame_traverse(PyFrameObject *f, visitproc visit, void *arg)
887 {
888 Py_VISIT(f->f_back);
889 Py_VISIT(f->f_trace);
890 if (f->f_frame->owner != FRAME_OWNED_BY_FRAME_OBJECT) {
891 return 0;
892 }
893 assert(f->f_frame->frame_obj == NULL);
894 return _PyFrame_Traverse(f->f_frame, visit, arg);
895 }
896
897 static int
frame_tp_clear(PyFrameObject * f)898 frame_tp_clear(PyFrameObject *f)
899 {
900 Py_CLEAR(f->f_trace);
901
902 /* locals and stack */
903 PyObject **locals = _PyFrame_GetLocalsArray(f->f_frame);
904 assert(f->f_frame->stacktop >= 0);
905 for (int i = 0; i < f->f_frame->stacktop; i++) {
906 Py_CLEAR(locals[i]);
907 }
908 f->f_frame->stacktop = 0;
909 return 0;
910 }
911
912 static PyObject *
frame_clear(PyFrameObject * f,PyObject * Py_UNUSED (ignored))913 frame_clear(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
914 {
915 if (f->f_frame->owner == FRAME_OWNED_BY_GENERATOR) {
916 PyGenObject *gen = _PyFrame_GetGenerator(f->f_frame);
917 if (gen->gi_frame_state == FRAME_EXECUTING) {
918 goto running;
919 }
920 _PyGen_Finalize((PyObject *)gen);
921 }
922 else if (f->f_frame->owner == FRAME_OWNED_BY_THREAD) {
923 goto running;
924 }
925 else {
926 assert(f->f_frame->owner == FRAME_OWNED_BY_FRAME_OBJECT);
927 (void)frame_tp_clear(f);
928 }
929 Py_RETURN_NONE;
930 running:
931 PyErr_SetString(PyExc_RuntimeError,
932 "cannot clear an executing frame");
933 return NULL;
934 }
935
936 PyDoc_STRVAR(clear__doc__,
937 "F.clear(): clear most references held by the frame");
938
939 static PyObject *
frame_sizeof(PyFrameObject * f,PyObject * Py_UNUSED (ignored))940 frame_sizeof(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
941 {
942 Py_ssize_t res;
943 res = offsetof(PyFrameObject, _f_frame_data) + offsetof(_PyInterpreterFrame, localsplus);
944 PyCodeObject *code = f->f_frame->f_code;
945 res += (code->co_nlocalsplus+code->co_stacksize) * sizeof(PyObject *);
946 return PyLong_FromSsize_t(res);
947 }
948
949 PyDoc_STRVAR(sizeof__doc__,
950 "F.__sizeof__() -> size of F in memory, in bytes");
951
952 static PyObject *
frame_repr(PyFrameObject * f)953 frame_repr(PyFrameObject *f)
954 {
955 int lineno = PyFrame_GetLineNumber(f);
956 PyCodeObject *code = f->f_frame->f_code;
957 return PyUnicode_FromFormat(
958 "<frame at %p, file %R, line %d, code %S>",
959 f, code->co_filename, lineno, code->co_name);
960 }
961
962 static PyMethodDef frame_methods[] = {
963 {"clear", (PyCFunction)frame_clear, METH_NOARGS,
964 clear__doc__},
965 {"__sizeof__", (PyCFunction)frame_sizeof, METH_NOARGS,
966 sizeof__doc__},
967 {NULL, NULL} /* sentinel */
968 };
969
970 PyTypeObject PyFrame_Type = {
971 PyVarObject_HEAD_INIT(&PyType_Type, 0)
972 "frame",
973 offsetof(PyFrameObject, _f_frame_data) +
974 offsetof(_PyInterpreterFrame, localsplus),
975 sizeof(PyObject *),
976 (destructor)frame_dealloc, /* tp_dealloc */
977 0, /* tp_vectorcall_offset */
978 0, /* tp_getattr */
979 0, /* tp_setattr */
980 0, /* tp_as_async */
981 (reprfunc)frame_repr, /* tp_repr */
982 0, /* tp_as_number */
983 0, /* tp_as_sequence */
984 0, /* tp_as_mapping */
985 0, /* tp_hash */
986 0, /* tp_call */
987 0, /* tp_str */
988 PyObject_GenericGetAttr, /* tp_getattro */
989 PyObject_GenericSetAttr, /* tp_setattro */
990 0, /* tp_as_buffer */
991 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
992 0, /* tp_doc */
993 (traverseproc)frame_traverse, /* tp_traverse */
994 (inquiry)frame_tp_clear, /* tp_clear */
995 0, /* tp_richcompare */
996 0, /* tp_weaklistoffset */
997 0, /* tp_iter */
998 0, /* tp_iternext */
999 frame_methods, /* tp_methods */
1000 frame_memberlist, /* tp_members */
1001 frame_getsetlist, /* tp_getset */
1002 0, /* tp_base */
1003 0, /* tp_dict */
1004 };
1005
1006 static void
init_frame(_PyInterpreterFrame * frame,PyFunctionObject * func,PyObject * locals)1007 init_frame(_PyInterpreterFrame *frame, PyFunctionObject *func, PyObject *locals)
1008 {
1009 /* _PyFrame_InitializeSpecials consumes reference to func */
1010 Py_INCREF(func);
1011 PyCodeObject *code = (PyCodeObject *)func->func_code;
1012 _PyFrame_InitializeSpecials(frame, func, locals, code->co_nlocalsplus);
1013 frame->previous = NULL;
1014 for (Py_ssize_t i = 0; i < code->co_nlocalsplus; i++) {
1015 frame->localsplus[i] = NULL;
1016 }
1017 }
1018
1019 PyFrameObject*
_PyFrame_New_NoTrack(PyCodeObject * code)1020 _PyFrame_New_NoTrack(PyCodeObject *code)
1021 {
1022 CALL_STAT_INC(frame_objects_created);
1023 int slots = code->co_nlocalsplus + code->co_stacksize;
1024 PyFrameObject *f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, slots);
1025 if (f == NULL) {
1026 return NULL;
1027 }
1028 f->f_back = NULL;
1029 f->f_trace = NULL;
1030 f->f_trace_lines = 1;
1031 f->f_trace_opcodes = 0;
1032 f->f_fast_as_locals = 0;
1033 f->f_lineno = 0;
1034 return f;
1035 }
1036
1037 /* Legacy API */
1038 PyFrameObject*
PyFrame_New(PyThreadState * tstate,PyCodeObject * code,PyObject * globals,PyObject * locals)1039 PyFrame_New(PyThreadState *tstate, PyCodeObject *code,
1040 PyObject *globals, PyObject *locals)
1041 {
1042 PyObject *builtins = _PyEval_BuiltinsFromGlobals(tstate, globals); // borrowed ref
1043 if (builtins == NULL) {
1044 return NULL;
1045 }
1046 PyFrameConstructor desc = {
1047 .fc_globals = globals,
1048 .fc_builtins = builtins,
1049 .fc_name = code->co_name,
1050 .fc_qualname = code->co_name,
1051 .fc_code = (PyObject *)code,
1052 .fc_defaults = NULL,
1053 .fc_kwdefaults = NULL,
1054 .fc_closure = NULL
1055 };
1056 PyFunctionObject *func = _PyFunction_FromConstructor(&desc);
1057 if (func == NULL) {
1058 return NULL;
1059 }
1060 PyFrameObject *f = _PyFrame_New_NoTrack(code);
1061 if (f == NULL) {
1062 Py_DECREF(func);
1063 return NULL;
1064 }
1065 init_frame((_PyInterpreterFrame *)f->_f_frame_data, func, locals);
1066 f->f_frame = (_PyInterpreterFrame *)f->_f_frame_data;
1067 f->f_frame->owner = FRAME_OWNED_BY_FRAME_OBJECT;
1068 // This frame needs to be "complete", so pretend that the first RESUME ran:
1069 f->f_frame->prev_instr = _PyCode_CODE(code) + code->_co_firsttraceable;
1070 assert(!_PyFrame_IsIncomplete(f->f_frame));
1071 Py_DECREF(func);
1072 _PyObject_GC_TRACK(f);
1073 return f;
1074 }
1075
1076 static int
_PyFrame_OpAlreadyRan(_PyInterpreterFrame * frame,int opcode,int oparg)1077 _PyFrame_OpAlreadyRan(_PyInterpreterFrame *frame, int opcode, int oparg)
1078 {
1079 // This only works when opcode is a non-quickened form:
1080 assert(_PyOpcode_Deopt[opcode] == opcode);
1081 int check_oparg = 0;
1082 for (_Py_CODEUNIT *instruction = _PyCode_CODE(frame->f_code);
1083 instruction < frame->prev_instr; instruction++)
1084 {
1085 int check_opcode = _PyOpcode_Deopt[_Py_OPCODE(*instruction)];
1086 check_oparg |= _Py_OPARG(*instruction);
1087 if (check_opcode == opcode && check_oparg == oparg) {
1088 return 1;
1089 }
1090 if (check_opcode == EXTENDED_ARG) {
1091 check_oparg <<= 8;
1092 }
1093 else {
1094 check_oparg = 0;
1095 }
1096 instruction += _PyOpcode_Caches[check_opcode];
1097 }
1098 return 0;
1099 }
1100
1101 int
_PyFrame_FastToLocalsWithError(_PyInterpreterFrame * frame)1102 _PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) {
1103 /* Merge fast locals into f->f_locals */
1104 PyObject *locals;
1105 PyObject **fast;
1106 PyCodeObject *co;
1107 locals = frame->f_locals;
1108 if (locals == NULL) {
1109 locals = frame->f_locals = PyDict_New();
1110 if (locals == NULL)
1111 return -1;
1112 }
1113 co = frame->f_code;
1114 fast = _PyFrame_GetLocalsArray(frame);
1115 // COPY_FREE_VARS has no quickened forms, so no need to use _PyOpcode_Deopt
1116 // here:
1117 int lasti = _PyInterpreterFrame_LASTI(frame);
1118 if (lasti < 0 && _Py_OPCODE(_PyCode_CODE(co)[0]) == COPY_FREE_VARS) {
1119 /* Free vars have not been initialized -- Do that */
1120 PyCodeObject *co = frame->f_code;
1121 PyObject *closure = frame->f_func->func_closure;
1122 int offset = co->co_nlocals + co->co_nplaincellvars;
1123 for (int i = 0; i < co->co_nfreevars; ++i) {
1124 PyObject *o = PyTuple_GET_ITEM(closure, i);
1125 Py_INCREF(o);
1126 frame->localsplus[offset + i] = o;
1127 }
1128 // COPY_FREE_VARS doesn't have inline CACHEs, either:
1129 frame->prev_instr = _PyCode_CODE(frame->f_code);
1130 }
1131 for (int i = 0; i < co->co_nlocalsplus; i++) {
1132 _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
1133
1134 /* If the namespace is unoptimized, then one of the
1135 following cases applies:
1136 1. It does not contain free variables, because it
1137 uses import * or is a top-level namespace.
1138 2. It is a class namespace.
1139 We don't want to accidentally copy free variables
1140 into the locals dict used by the class.
1141 */
1142 if (kind & CO_FAST_FREE && !(co->co_flags & CO_OPTIMIZED)) {
1143 continue;
1144 }
1145
1146 PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
1147 PyObject *value = fast[i];
1148 if (frame->stacktop) {
1149 if (kind & CO_FAST_FREE) {
1150 // The cell was set by COPY_FREE_VARS.
1151 assert(value != NULL && PyCell_Check(value));
1152 value = PyCell_GET(value);
1153 }
1154 else if (kind & CO_FAST_CELL) {
1155 // Note that no *_DEREF ops can happen before MAKE_CELL
1156 // executes. So there's no need to duplicate the work
1157 // that MAKE_CELL would otherwise do later, if it hasn't
1158 // run yet.
1159 if (value != NULL) {
1160 if (PyCell_Check(value) &&
1161 _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) {
1162 // (likely) MAKE_CELL must have executed already.
1163 value = PyCell_GET(value);
1164 }
1165 // (likely) Otherwise it it is an arg (kind & CO_FAST_LOCAL),
1166 // with the initial value set when the frame was created...
1167 // (unlikely) ...or it was set to some initial value by
1168 // an earlier call to PyFrame_LocalsToFast().
1169 }
1170 }
1171 }
1172 else {
1173 assert(value == NULL);
1174 }
1175 if (value == NULL) {
1176 if (PyObject_DelItem(locals, name) != 0) {
1177 if (PyErr_ExceptionMatches(PyExc_KeyError)) {
1178 PyErr_Clear();
1179 }
1180 else {
1181 return -1;
1182 }
1183 }
1184 }
1185 else {
1186 if (PyObject_SetItem(locals, name, value) != 0) {
1187 return -1;
1188 }
1189 }
1190 }
1191 return 0;
1192 }
1193
1194 int
PyFrame_FastToLocalsWithError(PyFrameObject * f)1195 PyFrame_FastToLocalsWithError(PyFrameObject *f)
1196 {
1197 assert(!_PyFrame_IsIncomplete(f->f_frame));
1198 if (f == NULL) {
1199 PyErr_BadInternalCall();
1200 return -1;
1201 }
1202 int err = _PyFrame_FastToLocalsWithError(f->f_frame);
1203 if (err == 0) {
1204 f->f_fast_as_locals = 1;
1205 }
1206 return err;
1207 }
1208
1209 void
PyFrame_FastToLocals(PyFrameObject * f)1210 PyFrame_FastToLocals(PyFrameObject *f)
1211 {
1212 int res;
1213 assert(!_PyFrame_IsIncomplete(f->f_frame));
1214 assert(!PyErr_Occurred());
1215
1216 res = PyFrame_FastToLocalsWithError(f);
1217 if (res < 0)
1218 PyErr_Clear();
1219 }
1220
1221 void
_PyFrame_LocalsToFast(_PyInterpreterFrame * frame,int clear)1222 _PyFrame_LocalsToFast(_PyInterpreterFrame *frame, int clear)
1223 {
1224 /* Merge locals into fast locals */
1225 PyObject *locals;
1226 PyObject **fast;
1227 PyObject *error_type, *error_value, *error_traceback;
1228 PyCodeObject *co;
1229 locals = frame->f_locals;
1230 if (locals == NULL) {
1231 return;
1232 }
1233 fast = _PyFrame_GetLocalsArray(frame);
1234 co = frame->f_code;
1235
1236 PyErr_Fetch(&error_type, &error_value, &error_traceback);
1237 for (int i = 0; i < co->co_nlocalsplus; i++) {
1238 _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
1239
1240 /* Same test as in PyFrame_FastToLocals() above. */
1241 if (kind & CO_FAST_FREE && !(co->co_flags & CO_OPTIMIZED)) {
1242 continue;
1243 }
1244 PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
1245 PyObject *value = PyObject_GetItem(locals, name);
1246 /* We only care about NULLs if clear is true. */
1247 if (value == NULL) {
1248 PyErr_Clear();
1249 if (!clear) {
1250 continue;
1251 }
1252 }
1253 PyObject *oldvalue = fast[i];
1254 PyObject *cell = NULL;
1255 if (kind == CO_FAST_FREE) {
1256 // The cell was set when the frame was created from
1257 // the function's closure.
1258 assert(oldvalue != NULL && PyCell_Check(oldvalue));
1259 cell = oldvalue;
1260 }
1261 else if (kind & CO_FAST_CELL && oldvalue != NULL) {
1262 /* Same test as in PyFrame_FastToLocals() above. */
1263 if (PyCell_Check(oldvalue) &&
1264 _PyFrame_OpAlreadyRan(frame, MAKE_CELL, i)) {
1265 // (likely) MAKE_CELL must have executed already.
1266 cell = oldvalue;
1267 }
1268 // (unlikely) Otherwise, it must have been set to some
1269 // initial value by an earlier call to PyFrame_LocalsToFast().
1270 }
1271 if (cell != NULL) {
1272 oldvalue = PyCell_GET(cell);
1273 if (value != oldvalue) {
1274 Py_XDECREF(oldvalue);
1275 Py_XINCREF(value);
1276 PyCell_SET(cell, value);
1277 }
1278 }
1279 else if (value != oldvalue) {
1280 Py_XINCREF(value);
1281 Py_XSETREF(fast[i], value);
1282 }
1283 Py_XDECREF(value);
1284 }
1285 PyErr_Restore(error_type, error_value, error_traceback);
1286 }
1287
1288 void
PyFrame_LocalsToFast(PyFrameObject * f,int clear)1289 PyFrame_LocalsToFast(PyFrameObject *f, int clear)
1290 {
1291 assert(!_PyFrame_IsIncomplete(f->f_frame));
1292 if (f && f->f_fast_as_locals && _PyFrame_GetState(f) != FRAME_CLEARED) {
1293 _PyFrame_LocalsToFast(f->f_frame, clear);
1294 f->f_fast_as_locals = 0;
1295 }
1296 }
1297
1298
_PyFrame_IsEntryFrame(PyFrameObject * frame)1299 int _PyFrame_IsEntryFrame(PyFrameObject *frame)
1300 {
1301 assert(frame != NULL);
1302 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1303 return frame->f_frame->is_entry;
1304 }
1305
1306
1307 PyCodeObject *
PyFrame_GetCode(PyFrameObject * frame)1308 PyFrame_GetCode(PyFrameObject *frame)
1309 {
1310 assert(frame != NULL);
1311 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1312 PyCodeObject *code = frame->f_frame->f_code;
1313 assert(code != NULL);
1314 Py_INCREF(code);
1315 return code;
1316 }
1317
1318
1319 PyFrameObject*
PyFrame_GetBack(PyFrameObject * frame)1320 PyFrame_GetBack(PyFrameObject *frame)
1321 {
1322 assert(frame != NULL);
1323 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1324 PyFrameObject *back = frame->f_back;
1325 if (back == NULL) {
1326 _PyInterpreterFrame *prev = frame->f_frame->previous;
1327 while (prev && _PyFrame_IsIncomplete(prev)) {
1328 prev = prev->previous;
1329 }
1330 if (prev) {
1331 back = _PyFrame_GetFrameObject(prev);
1332 }
1333 }
1334 Py_XINCREF(back);
1335 return back;
1336 }
1337
1338 PyObject*
PyFrame_GetLocals(PyFrameObject * frame)1339 PyFrame_GetLocals(PyFrameObject *frame)
1340 {
1341 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1342 return frame_getlocals(frame, NULL);
1343 }
1344
1345 PyObject*
PyFrame_GetGlobals(PyFrameObject * frame)1346 PyFrame_GetGlobals(PyFrameObject *frame)
1347 {
1348 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1349 return frame_getglobals(frame, NULL);
1350 }
1351
1352 PyObject*
PyFrame_GetBuiltins(PyFrameObject * frame)1353 PyFrame_GetBuiltins(PyFrameObject *frame)
1354 {
1355 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1356 return frame_getbuiltins(frame, NULL);
1357 }
1358
1359 int
PyFrame_GetLasti(PyFrameObject * frame)1360 PyFrame_GetLasti(PyFrameObject *frame)
1361 {
1362 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1363 int lasti = _PyInterpreterFrame_LASTI(frame->f_frame);
1364 if (lasti < 0) {
1365 return -1;
1366 }
1367 return lasti * sizeof(_Py_CODEUNIT);
1368 }
1369
1370 PyObject *
PyFrame_GetGenerator(PyFrameObject * frame)1371 PyFrame_GetGenerator(PyFrameObject *frame)
1372 {
1373 assert(!_PyFrame_IsIncomplete(frame->f_frame));
1374 if (frame->f_frame->owner != FRAME_OWNED_BY_GENERATOR) {
1375 return NULL;
1376 }
1377 PyGenObject *gen = _PyFrame_GetGenerator(frame->f_frame);
1378 return Py_NewRef(gen);
1379 }
1380
1381 PyObject*
_PyEval_BuiltinsFromGlobals(PyThreadState * tstate,PyObject * globals)1382 _PyEval_BuiltinsFromGlobals(PyThreadState *tstate, PyObject *globals)
1383 {
1384 PyObject *builtins = PyDict_GetItemWithError(globals, &_Py_ID(__builtins__));
1385 if (builtins) {
1386 if (PyModule_Check(builtins)) {
1387 builtins = _PyModule_GetDict(builtins);
1388 assert(builtins != NULL);
1389 }
1390 return builtins;
1391 }
1392 if (PyErr_Occurred()) {
1393 return NULL;
1394 }
1395
1396 return _PyEval_GetBuiltins(tstate);
1397 }
1398
1399
1400