this repo has no description
1// Copyright (c) Facebook, Inc. and its affiliates. (http://www.facebook.com)
2#include "interpreter.h"
3
4#include <cstdio>
5#include <cstdlib>
6#include <sstream>
7
8#include "attributedict.h"
9#include "builtins-module.h"
10#include "bytes-builtins.h"
11#include "complex-builtins.h"
12#include "dict-builtins.h"
13#include "event.h"
14#include "exception-builtins.h"
15#include "float-builtins.h"
16#include "frame.h"
17#include "generator-builtins.h"
18#include "ic.h"
19#include "int-builtins.h"
20#include "interpreter-gen.h"
21#include "list-builtins.h"
22#include "module-builtins.h"
23#include "object-builtins.h"
24#include "objects.h"
25#include "profiling.h"
26#include "runtime.h"
27#include "set-builtins.h"
28#include "str-builtins.h"
29#include "thread.h"
30#include "trampolines.h"
31#include "tuple-builtins.h"
32#include "type-builtins.h"
33#include "utils.h"
34
35// TODO(emacs): Figure out why this produces different (more) results than
36// using EVENT_ID with the opcode as arg0 and remove EVENT_CACHE.
37#define EVENT_CACHE(op) EVENT(InvalidateInlineCache_##op)
38
39namespace py {
40
41using Continue = Interpreter::Continue;
42
43// We want opcode handlers inlined into the interpreter in optimized builds.
44// Keep them outlined for nicer debugging in debug builds.
45#ifdef NDEBUG
46#define HANDLER_INLINE ALWAYS_INLINE __attribute__((used))
47#else
48#define HANDLER_INLINE __attribute__((noinline))
49#endif
50
51static const SymbolId kBinaryOperationSelector[] = {
52 ID(__add__), ID(__sub__), ID(__mul__), ID(__matmul__),
53 ID(__truediv__), ID(__floordiv__), ID(__mod__), ID(__divmod__),
54 ID(__pow__), ID(__lshift__), ID(__rshift__), ID(__and__),
55 ID(__xor__), ID(__or__)};
56
57static const SymbolId kSwappedBinaryOperationSelector[] = {
58 ID(__radd__), ID(__rsub__), ID(__rmul__), ID(__rmatmul__),
59 ID(__rtruediv__), ID(__rfloordiv__), ID(__rmod__), ID(__rdivmod__),
60 ID(__rpow__), ID(__rlshift__), ID(__rrshift__), ID(__rand__),
61 ID(__rxor__), ID(__ror__)};
62
63static const SymbolId kInplaceOperationSelector[] = {
64 ID(__iadd__), ID(__isub__), ID(__imul__), ID(__imatmul__),
65 ID(__itruediv__), ID(__ifloordiv__), ID(__imod__), SymbolId::kMaxId,
66 ID(__ipow__), ID(__ilshift__), ID(__irshift__), ID(__iand__),
67 ID(__ixor__), ID(__ior__)};
68
69static const SymbolId kComparisonSelector[] = {
70 ID(__lt__), ID(__le__), ID(__eq__), ID(__ne__), ID(__gt__), ID(__ge__)};
71
72static const CompareOp kSwappedCompareOp[] = {GT, GE, EQ, NE, LT, LE};
73
74SymbolId Interpreter::binaryOperationSelector(Interpreter::BinaryOp op) {
75 return kBinaryOperationSelector[static_cast<int>(op)];
76}
77
78SymbolId Interpreter::swappedBinaryOperationSelector(Interpreter::BinaryOp op) {
79 return kSwappedBinaryOperationSelector[static_cast<int>(op)];
80}
81
82SymbolId Interpreter::inplaceOperationSelector(Interpreter::BinaryOp op) {
83 DCHECK(op != Interpreter::BinaryOp::DIVMOD,
84 "DIVMOD is not a valid inplace op");
85 return kInplaceOperationSelector[static_cast<int>(op)];
86}
87
88SymbolId Interpreter::comparisonSelector(CompareOp op) {
89 DCHECK(op >= CompareOp::LT, "invalid compare op");
90 DCHECK(op <= CompareOp::GE, "invalid compare op");
91 return kComparisonSelector[op];
92}
93
94SymbolId Interpreter::swappedComparisonSelector(CompareOp op) {
95 DCHECK(op >= CompareOp::LT, "invalid compare op");
96 DCHECK(op <= CompareOp::GE, "invalid compare op");
97 CompareOp swapped_op = kSwappedCompareOp[op];
98 return comparisonSelector(swapped_op);
99}
100
101Interpreter::~Interpreter() {}
102
103RawObject Interpreter::prepareCallable(Thread* thread, Object* callable,
104 Object* self) {
105 DCHECK(!callable->isFunction(),
106 "prepareCallable should only be called on non-function types");
107 HandleScope scope(thread);
108 Runtime* runtime = thread->runtime();
109
110 for (;;) {
111 if (callable->isBoundMethod()) {
112 BoundMethod method(&scope, **callable);
113 Object maybe_function(&scope, method.function());
114 if (maybe_function.isFunction()) {
115 // If we have an exact function, unwrap as a fast-path. Otherwise, fall
116 // back to __call__.
117 *callable = *maybe_function;
118 *self = method.self();
119 return Bool::trueObj();
120 }
121 }
122
123 if (callable->isType()) {
124 // In case `callable` is a type (e.g., str("value")), this call is
125 // resolved via type.__call__(callable, ...). The most common operation
126 // performed by such a path is object creation through __init__ and
127 // __new__. In case callable.underCtor is explicitly defined, it can
128 // perform such instance creation of the exact type `callable` directly
129 // without dispatching to `type.__call__` if it exists. Otherwise,
130 // callable.underCtor is guaranteed to be same as type.__call__.
131 RawType type = Type::cast(**callable);
132 RawObject ctor = type.ctor();
133 DCHECK(ctor.isFunction(), "ctor is expected to be a function");
134 *self = type;
135 *callable = ctor;
136 return Bool::trueObj();
137 }
138 // TODO(T44238481): Look into using lookupMethod() once it's fixed.
139 Type type(&scope, runtime->typeOf(**callable));
140 Object dunder_call(&scope,
141 typeLookupInMroById(thread, *type, ID(__call__)));
142 if (!dunder_call.isErrorNotFound()) {
143 if (dunder_call.isFunction()) {
144 // Avoid calling function.__get__ and creating a short-lived BoundMethod
145 // object. Instead, return the unpacked values directly.
146 *self = **callable;
147 *callable = *dunder_call;
148 return Bool::trueObj();
149 }
150 Type call_type(&scope, runtime->typeOf(*dunder_call));
151 if (typeIsNonDataDescriptor(*call_type)) {
152 *callable = callDescriptorGet(thread, dunder_call, *callable, type);
153 if (callable->isErrorException()) return **callable;
154 if (callable->isFunction()) return Bool::falseObj();
155
156 // Retry the lookup using the object returned by the descriptor.
157 continue;
158 }
159 // Update callable for the exception message below.
160 *callable = *dunder_call;
161 }
162 return thread->raiseWithFmt(LayoutId::kTypeError,
163 "'%T' object is not callable", callable);
164 }
165}
166
167HANDLER_INLINE USED Interpreter::PrepareCallableResult
168Interpreter::prepareCallableCall(Thread* thread, word nargs,
169 word callable_idx) {
170 RawObject callable = thread->stackPeek(callable_idx);
171 if (callable.isFunction()) {
172 return {callable, nargs};
173 }
174
175 if (callable.isBoundMethod()) {
176 RawBoundMethod method = BoundMethod::cast(callable);
177 RawObject method_function = method.function();
178 if (method_function.isFunction()) {
179 thread->stackSetAt(callable_idx, method_function);
180 thread->stackInsertAt(callable_idx, method.self());
181 return {method_function, nargs + 1};
182 }
183 }
184 return prepareCallableCallDunderCall(thread, nargs, callable_idx);
185}
186
187NEVER_INLINE
188Interpreter::PrepareCallableResult Interpreter::prepareCallableCallDunderCall(
189 Thread* thread, word nargs, word callable_idx) {
190 HandleScope scope(thread);
191 Object callable(&scope, thread->stackPeek(callable_idx));
192 Object self(&scope, NoneType::object());
193 RawObject prepare_result = prepareCallable(thread, &callable, &self);
194 if (prepare_result.isErrorException()) {
195 return {prepare_result, nargs};
196 }
197 thread->stackSetAt(callable_idx, *callable);
198 if (prepare_result == Bool::trueObj()) {
199 // Shift all arguments on the stack down by 1 and use the unpacked
200 // BoundMethod.
201 //
202 // We don't need to worry too much about the performance overhead for method
203 // calls here.
204 //
205 // Python 3.7 introduces two new opcodes, LOAD_METHOD and CALL_METHOD, that
206 // eliminate the need to create a temporary BoundMethod object when
207 // performing a method call.
208 //
209 // The other pattern of bound method usage occurs when someone passes around
210 // a reference to a method e.g.:
211 //
212 // m = foo.method
213 // m()
214 //
215 // Our contention is that uses of this pattern are not performance
216 // sensitive.
217 thread->stackInsertAt(callable_idx, *self);
218 return {*callable, nargs + 1};
219 }
220 return {*callable, nargs};
221}
222
223RawObject Interpreter::call(Thread* thread, word nargs) {
224 DCHECK(!thread->hasPendingException(), "unhandled exception lingering");
225 RawObject* post_call_sp = thread->stackPointer() + nargs + 1;
226 PrepareCallableResult prepare_result =
227 prepareCallableCall(thread, nargs, nargs);
228 RawObject function = prepare_result.function;
229 nargs = prepare_result.nargs;
230 if (function.isErrorException()) {
231 thread->stackDrop(nargs + 1);
232 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
233 return function;
234 }
235 return callFunction(thread, nargs, function);
236}
237
238ALWAYS_INLINE RawObject Interpreter::callFunction(Thread* thread, word nargs,
239 RawObject function) {
240 DCHECK(!thread->hasPendingException(), "unhandled exception lingering");
241 RawObject* post_call_sp = thread->stackPointer() + nargs + 1;
242 DCHECK(function == thread->stackPeek(nargs),
243 "thread->stackPeek(nargs) is expected to be the given function");
244 RawObject result = Function::cast(function).entry()(thread, nargs);
245 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
246 return result;
247}
248
249RawObject Interpreter::callKw(Thread* thread, word nargs) {
250 // Top of stack is a tuple of keyword argument names in the order they
251 // appear on the stack.
252 RawObject* post_call_sp = thread->stackPointer() + nargs + 2;
253 PrepareCallableResult prepare_result =
254 prepareCallableCall(thread, nargs, nargs + 1);
255 RawObject function = prepare_result.function;
256 nargs = prepare_result.nargs;
257 if (function.isErrorException()) {
258 thread->stackDrop(nargs + 2);
259 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
260 return function;
261 }
262 RawObject result = Function::cast(function).entryKw()(thread, nargs);
263 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
264 return result;
265}
266
267RawObject Interpreter::callEx(Thread* thread, word flags) {
268 // Low bit of flags indicates whether var-keyword argument is on TOS.
269 // In all cases, var-positional tuple is next, followed by the function
270 // pointer.
271 word callable_idx = (flags & CallFunctionExFlag::VAR_KEYWORDS) ? 2 : 1;
272 RawObject* post_call_sp = thread->stackPointer() + callable_idx + 1;
273 HandleScope scope(thread);
274 Object callable(&scope, prepareCallableEx(thread, callable_idx));
275 if (callable.isErrorException()) {
276 thread->stackDrop(callable_idx + 1);
277 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
278 return *callable;
279 }
280 RawObject result = RawFunction::cast(*callable).entryEx()(thread, flags);
281 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
282 return result;
283}
284
285RawObject Interpreter::prepareCallableEx(Thread* thread, word callable_idx) {
286 HandleScope scope(thread);
287 Object callable(&scope, thread->stackPeek(callable_idx));
288 word args_idx = callable_idx - 1;
289 Object args_obj(&scope, thread->stackPeek(args_idx));
290 if (!args_obj.isTuple()) {
291 // Make sure the argument sequence is a tuple.
292 if (args_obj.isList()) {
293 List list(&scope, *args_obj);
294 Tuple list_items(&scope, list.items());
295 args_obj = thread->runtime()->tupleSubseq(thread, list_items, 0,
296 list.numItems());
297 }
298 args_obj = thread->invokeFunction1(ID(builtins), ID(tuple), args_obj);
299 if (args_obj.isErrorException()) return *args_obj;
300 thread->stackSetAt(args_idx, *args_obj);
301 }
302 if (!callable.isFunction()) {
303 Object self(&scope, NoneType::object());
304 Object result(&scope, prepareCallable(thread, &callable, &self));
305 if (result.isErrorException()) return *result;
306 thread->stackSetAt(callable_idx, *callable);
307
308 if (result == Bool::trueObj()) {
309 // Create a new argument tuple with self as the first argument
310 Tuple args(&scope, *args_obj);
311 MutableTuple new_args(
312 &scope, thread->runtime()->newMutableTuple(args.length() + 1));
313 new_args.atPut(0, *self);
314 new_args.replaceFromWith(1, *args, args.length());
315 thread->stackSetAt(args_idx, new_args.becomeImmutable());
316 }
317 }
318 return *callable;
319}
320
321static RawObject callDunderHash(Thread* thread, const Object& value) {
322 HandleScope scope(thread);
323 // TODO(T52406106): This lookup is unfortunately not inline-cached but should
324 // eventually be called less and less as code moves to managed.
325 Object dunder_hash(&scope,
326 Interpreter::lookupMethod(thread, value, ID(__hash__)));
327 if (dunder_hash.isNoneType() || dunder_hash.isError()) {
328 if (dunder_hash.isErrorException()) {
329 thread->clearPendingException();
330 } else {
331 DCHECK(dunder_hash.isErrorNotFound() || dunder_hash.isNoneType(),
332 "expected Error::notFound() or None");
333 }
334 return thread->raiseWithFmt(LayoutId::kTypeError, "unhashable type: '%T'",
335 &value);
336 }
337 Object result(&scope, Interpreter::callMethod1(thread, dunder_hash, value));
338 if (result.isErrorException()) return *result;
339 if (!thread->runtime()->isInstanceOfInt(*result)) {
340 return thread->raiseWithFmt(LayoutId::kTypeError,
341 "__hash__ method should return an integer");
342 }
343 Int hash_int(&scope, intUnderlying(*result));
344 if (hash_int.isSmallInt()) {
345 // cpython always replaces -1 hash values with -2.
346 if (hash_int == SmallInt::fromWord(-1)) {
347 return SmallInt::fromWord(-2);
348 }
349 return *hash_int;
350 }
351 if (hash_int.isBool()) {
352 return SmallInt::fromWord(Bool::cast(*hash_int).value() ? 1 : 0);
353 }
354 // Note that cpython keeps the hash values unaltered as long as they fit into
355 // `Py_hash_t` (aka `Py_ssize_t`) while we must return a `SmallInt` here so
356 // we have to invoke the large int hashing for 1 bit smaller numbers than
357 // cpython.
358 return SmallInt::fromWord(largeIntHash(LargeInt::cast(*hash_int)));
359}
360
361RawObject Interpreter::hash(Thread* thread, const Object& value) {
362 // Directly call into hash functions for all types supported by the marshal
363 // code to avoid bootstrapping problems. It also helps performance.
364 LayoutId layout_id = value.layoutId();
365 word result;
366 switch (layout_id) {
367 case LayoutId::kBool:
368 result = Bool::cast(*value).hash();
369 break;
370 case LayoutId::kComplex:
371 result = complexHash(*value);
372 break;
373 case LayoutId::kFloat:
374 result = floatHash(*value);
375 break;
376 case LayoutId::kFrozenSet:
377 return frozensetHash(thread, value);
378 case LayoutId::kSmallInt:
379 result = SmallInt::cast(*value).hash();
380 break;
381 case LayoutId::kLargeBytes:
382 case LayoutId::kSmallBytes:
383 result = bytesHash(thread, *value);
384 break;
385 case LayoutId::kLargeInt:
386 result = largeIntHash(LargeInt::cast(*value));
387 break;
388 case LayoutId::kLargeStr:
389 case LayoutId::kSmallStr:
390 result = strHash(thread, *value);
391 break;
392 case LayoutId::kTuple: {
393 HandleScope scope(thread);
394 Tuple value_tuple(&scope, *value);
395 return tupleHash(thread, value_tuple);
396 }
397 case LayoutId::kNoneType:
398 case LayoutId::kEllipsis:
399 case LayoutId::kStopIteration:
400 result = thread->runtime()->hash(*value);
401 break;
402 default: {
403 Runtime* runtime = thread->runtime();
404 RawType value_type = runtime->typeOf(*value);
405 if (value_type.hasFlag(Type::Flag::kHasObjectDunderHash)) {
406 // At this point we already handled all immediate value types, as well
407 // as LargeStr and LargeBytes, so we can directly call
408 // `Runtime::identityHash` instead of `Runtime::hash`.
409 result = runtime->identityHash(*value);
410 } else if (value_type.hasFlag(Type::Flag::kHasStrDunderHash) &&
411 runtime->isInstanceOfStr(*value)) {
412 result = strHash(thread, strUnderlying(*value));
413 } else {
414 return callDunderHash(thread, value);
415 }
416 break;
417 }
418 }
419 return SmallInt::fromWordTruncated(result);
420}
421
422RawObject Interpreter::stringJoin(Thread* thread, RawObject* sp, word num) {
423 word new_len = 0;
424 for (word i = num - 1; i >= 0; i--) {
425 if (!sp[i].isStr()) {
426 UNIMPLEMENTED("Conversion of non-string values not supported.");
427 }
428 new_len += Str::cast(sp[i]).length();
429 }
430
431 if (new_len <= RawSmallStr::kMaxLength) {
432 byte buffer[RawSmallStr::kMaxLength];
433 byte* ptr = buffer;
434 for (word i = num - 1; i >= 0; i--) {
435 RawStr str = Str::cast(sp[i]);
436 word len = str.length();
437 str.copyTo(ptr, len);
438 ptr += len;
439 }
440 return SmallStr::fromBytes(View<byte>(buffer, new_len));
441 }
442
443 HandleScope scope(thread);
444 MutableBytes result(&scope,
445 thread->runtime()->newMutableBytesUninitialized(new_len));
446 word offset = 0;
447 for (word i = num - 1; i >= 0; i--) {
448 RawStr str = Str::cast(sp[i]);
449 word len = str.length();
450 result.replaceFromWithStr(offset, str, len);
451 offset += len;
452 }
453 return result.becomeStr();
454}
455
456RawObject Interpreter::callDescriptorGet(Thread* thread,
457 const Object& descriptor,
458 const Object& receiver,
459 const Object& receiver_type) {
460 HandleScope scope(thread);
461 Runtime* runtime = thread->runtime();
462 switch (descriptor.layoutId()) {
463 case LayoutId::kClassMethod: {
464 Object method(&scope, ClassMethod::cast(*descriptor).function());
465 return runtime->newBoundMethod(method, receiver_type);
466 }
467 case LayoutId::kFunction: {
468 if (receiver.isNoneType()) {
469 if (receiver_type.rawCast<RawType>().builtinBase() !=
470 LayoutId::kNoneType) {
471 // Type lookup.
472 return *descriptor;
473 }
474 }
475 return runtime->newBoundMethod(descriptor, receiver);
476 }
477 case LayoutId::kProperty: {
478 Object getter(&scope, Property::cast(*descriptor).getter());
479 if (getter.isNoneType()) break;
480 if (receiver.isNoneType()) {
481 return *descriptor;
482 }
483 return Interpreter::call1(thread, getter, receiver);
484 }
485 case LayoutId::kStaticMethod:
486 return StaticMethod::cast(*descriptor).function();
487 default:
488 break;
489 }
490 Object method(
491 &scope, typeLookupInMroById(
492 thread, thread->runtime()->typeOf(*descriptor), ID(__get__)));
493 DCHECK(!method.isErrorNotFound(), "no __get__ method found");
494 return call3(thread, method, descriptor, receiver, receiver_type);
495}
496
497RawObject Interpreter::callDescriptorSet(Thread* thread,
498 const Object& descriptor,
499 const Object& receiver,
500 const Object& value) {
501 return thread->invokeMethod3(descriptor, ID(__set__), receiver, value);
502}
503
504RawObject Interpreter::callDescriptorDelete(Thread* thread,
505 const Object& descriptor,
506 const Object& receiver) {
507 return thread->invokeMethod2(descriptor, ID(__delete__), receiver);
508}
509
510RawObject Interpreter::lookupMethod(Thread* thread, const Object& receiver,
511 SymbolId selector) {
512 Runtime* runtime = thread->runtime();
513 RawType raw_type = runtime->typeOf(*receiver).rawCast<RawType>();
514 RawObject raw_method = typeLookupInMroById(thread, raw_type, selector);
515 if (raw_method.isFunction() || raw_method.isErrorNotFound()) {
516 // Do not create a short-lived bound method object, and propagate
517 // exceptions.
518 return raw_method;
519 }
520 HandleScope scope(thread);
521 Type type(&scope, raw_type);
522 Object method(&scope, raw_method);
523 return resolveDescriptorGet(thread, method, receiver, type);
524}
525
526RawObject Interpreter::call0(Thread* thread, const Object& callable) {
527 thread->stackPush(*callable);
528 return call(thread, 0);
529}
530
531RawObject Interpreter::call1(Thread* thread, const Object& callable,
532 const Object& arg1) {
533 thread->stackPush(*callable);
534 thread->stackPush(*arg1);
535 return call(thread, 1);
536}
537
538RawObject Interpreter::call2(Thread* thread, const Object& callable,
539 const Object& arg1, const Object& arg2) {
540 thread->stackPush(*callable);
541 thread->stackPush(*arg1);
542 thread->stackPush(*arg2);
543 return call(thread, 2);
544}
545
546RawObject Interpreter::call3(Thread* thread, const Object& callable,
547 const Object& arg1, const Object& arg2,
548 const Object& arg3) {
549 thread->stackPush(*callable);
550 thread->stackPush(*arg1);
551 thread->stackPush(*arg2);
552 thread->stackPush(*arg3);
553 return call(thread, 3);
554}
555
556RawObject Interpreter::call4(Thread* thread, const Object& callable,
557 const Object& arg1, const Object& arg2,
558 const Object& arg3, const Object& arg4) {
559 thread->stackPush(*callable);
560 thread->stackPush(*arg1);
561 thread->stackPush(*arg2);
562 thread->stackPush(*arg3);
563 thread->stackPush(*arg4);
564 return call(thread, 4);
565}
566
567RawObject Interpreter::call5(Thread* thread, const Object& callable,
568 const Object& arg1, const Object& arg2,
569 const Object& arg3, const Object& arg4,
570 const Object& arg5) {
571 thread->stackPush(*callable);
572 thread->stackPush(*arg1);
573 thread->stackPush(*arg2);
574 thread->stackPush(*arg3);
575 thread->stackPush(*arg4);
576 thread->stackPush(*arg5);
577 return call(thread, 5);
578}
579
580RawObject Interpreter::call6(Thread* thread, const Object& callable,
581 const Object& arg1, const Object& arg2,
582 const Object& arg3, const Object& arg4,
583 const Object& arg5, const Object& arg6) {
584 thread->stackPush(*callable);
585 thread->stackPush(*arg1);
586 thread->stackPush(*arg2);
587 thread->stackPush(*arg3);
588 thread->stackPush(*arg4);
589 thread->stackPush(*arg5);
590 thread->stackPush(*arg6);
591 return call(thread, 6);
592}
593
594RawObject Interpreter::callMethod1(Thread* thread, const Object& method,
595 const Object& self) {
596 word nargs = 0;
597 thread->stackPush(*method);
598 if (method.isFunction()) {
599 thread->stackPush(*self);
600 return callFunction(thread, nargs + 1, *method);
601 }
602 return call(thread, nargs);
603}
604
605RawObject Interpreter::callMethod2(Thread* thread, const Object& method,
606 const Object& self, const Object& other) {
607 word nargs = 1;
608 thread->stackPush(*method);
609 if (method.isFunction()) {
610 thread->stackPush(*self);
611 thread->stackPush(*other);
612 return callFunction(thread, nargs + 1, *method);
613 }
614 thread->stackPush(*other);
615 return call(thread, nargs);
616}
617
618RawObject Interpreter::callMethod3(Thread* thread, const Object& method,
619 const Object& self, const Object& arg1,
620 const Object& arg2) {
621 word nargs = 2;
622 thread->stackPush(*method);
623 if (method.isFunction()) {
624 thread->stackPush(*self);
625 thread->stackPush(*arg1);
626 thread->stackPush(*arg2);
627 return callFunction(thread, nargs + 1, *method);
628 }
629 thread->stackPush(*arg1);
630 thread->stackPush(*arg2);
631 return call(thread, nargs);
632}
633
634RawObject Interpreter::callMethod4(Thread* thread, const Object& method,
635 const Object& self, const Object& arg1,
636 const Object& arg2, const Object& arg3) {
637 word nargs = 3;
638 thread->stackPush(*method);
639 if (method.isFunction()) {
640 thread->stackPush(*self);
641 thread->stackPush(*arg1);
642 thread->stackPush(*arg2);
643 thread->stackPush(*arg3);
644 return callFunction(thread, nargs + 1, *method);
645 }
646 thread->stackPush(*arg1);
647 thread->stackPush(*arg2);
648 thread->stackPush(*arg3);
649 return call(thread, nargs);
650}
651
652HANDLER_INLINE
653Continue Interpreter::tailcallMethod1(Thread* thread, RawObject method,
654 RawObject self) {
655 word nargs = 0;
656 thread->stackPush(method);
657 if (method.isFunction()) {
658 thread->stackPush(self);
659 nargs++;
660 return tailcallFunction(thread, nargs, method);
661 }
662 return tailcall(thread, nargs);
663}
664
665HANDLER_INLINE Continue Interpreter::tailcall(Thread* thread, word arg) {
666 return handleCall(thread, arg, arg, preparePositionalCall, &Function::entry);
667}
668
669static RawObject raiseUnaryOpTypeError(Thread* thread, const Object& object,
670 SymbolId selector) {
671 HandleScope scope(thread);
672 Runtime* runtime = thread->runtime();
673 Type type(&scope, runtime->typeOf(*object));
674 Object type_name(&scope, type.name());
675 Object op_name(&scope, runtime->symbols()->at(selector));
676 return thread->raiseWithFmt(LayoutId::kTypeError,
677 "bad operand type for unary '%S': '%S'", &op_name,
678 &type_name);
679}
680
681RawObject Interpreter::unaryOperation(Thread* thread, const Object& self,
682 SymbolId selector) {
683 RawObject result = thread->invokeMethod1(self, selector);
684 if (result.isErrorNotFound()) {
685 return raiseUnaryOpTypeError(thread, self, selector);
686 }
687 return result;
688}
689
690HANDLER_INLINE Continue Interpreter::doUnaryOperation(SymbolId selector,
691 Thread* thread) {
692 HandleScope scope(thread);
693 Object receiver(&scope, thread->stackTop());
694 RawObject result = unaryOperation(thread, receiver, selector);
695 if (result.isErrorException()) return Continue::UNWIND;
696 thread->stackSetTop(result);
697 return Continue::NEXT;
698}
699
700static RawObject binaryOperationLookupReflected(Thread* thread,
701 Interpreter::BinaryOp op,
702 const Object& left,
703 const Object& right) {
704 HandleScope scope(thread);
705 Runtime* runtime = thread->runtime();
706 SymbolId swapped_selector = Interpreter::swappedBinaryOperationSelector(op);
707 Object right_reversed_method(
708 &scope,
709 typeLookupInMroById(thread, runtime->typeOf(*right), swapped_selector));
710 if (right_reversed_method.isErrorNotFound()) return *right_reversed_method;
711
712 // Python doesn't bother calling the reverse method when the slot on left and
713 // right points to the same method. We compare the reverse methods to get
714 // close to this behavior.
715 Object left_reversed_method(
716 &scope,
717 typeLookupInMroById(thread, runtime->typeOf(*left), swapped_selector));
718 if (left_reversed_method == right_reversed_method) {
719 return Error::notFound();
720 }
721
722 return *right_reversed_method;
723}
724
725static RawObject executeAndCacheBinaryOp(Thread* thread, const Object& method,
726 BinaryOpFlags flags,
727 const Object& left,
728 const Object& right,
729 Object* method_out,
730 BinaryOpFlags* flags_out) {
731 if (method.isErrorNotFound()) {
732 return NotImplementedType::object();
733 }
734
735 if (method_out != nullptr) {
736 DCHECK(method.isFunction(), "must be a plain function");
737 *method_out = *method;
738 *flags_out = flags;
739 return Interpreter::binaryOperationWithMethod(thread, *method, flags, *left,
740 *right);
741 }
742 if (flags & kBinaryOpReflected) {
743 return Interpreter::callMethod2(thread, method, right, left);
744 }
745 return Interpreter::callMethod2(thread, method, left, right);
746}
747
748RawObject Interpreter::binaryOperationSetMethod(Thread* thread, BinaryOp op,
749 const Object& left,
750 const Object& right,
751 Object* method_out,
752 BinaryOpFlags* flags_out) {
753 HandleScope scope(thread);
754 Runtime* runtime = thread->runtime();
755 SymbolId selector = binaryOperationSelector(op);
756 Type left_type(&scope, runtime->typeOf(*left));
757 Type right_type(&scope, runtime->typeOf(*right));
758 Object left_method(&scope, typeLookupInMroById(thread, *left_type, selector));
759
760 // Figure out whether we want to run the normal or the reverse operation
761 // first and set `flags` accordingly.
762 Object method(&scope, NoneType::object());
763 BinaryOpFlags flags = kBinaryOpNone;
764 if (left_type != right_type && (left_method.isErrorNotFound() ||
765 typeIsSubclass(*right_type, *left_type))) {
766 method = binaryOperationLookupReflected(thread, op, left, right);
767 if (!method.isErrorNotFound()) {
768 flags = kBinaryOpReflected;
769 if (!left_method.isErrorNotFound()) {
770 flags =
771 static_cast<BinaryOpFlags>(flags | kBinaryOpNotImplementedRetry);
772 }
773 if (!method.isFunction()) {
774 method_out = nullptr;
775 method = resolveDescriptorGet(thread, method, right, right_type);
776 if (method.isErrorException()) return *method;
777 }
778 }
779 }
780 if (flags == kBinaryOpNone) {
781 flags = kBinaryOpNotImplementedRetry;
782 method = *left_method;
783 if (!method.isFunction() && !method.isErrorNotFound()) {
784 method_out = nullptr;
785 method = resolveDescriptorGet(thread, method, left, left_type);
786 if (method.isErrorException()) return *method;
787 }
788 }
789
790 Object result(&scope, executeAndCacheBinaryOp(thread, method, flags, left,
791 right, method_out, flags_out));
792 if (!result.isNotImplementedType()) return *result;
793
794 // Invoke a 2nd method (normal or reverse depends on what we did the first
795 // time) or report an error.
796 return binaryOperationRetry(thread, op, flags, left, right);
797}
798
799RawObject Interpreter::binaryOperation(Thread* thread, BinaryOp op,
800 const Object& left,
801 const Object& right) {
802 return binaryOperationSetMethod(thread, op, left, right, nullptr, nullptr);
803}
804
805HANDLER_INLINE Continue Interpreter::doBinaryOperation(BinaryOp op,
806 Thread* thread) {
807 HandleScope scope(thread);
808 Object other(&scope, thread->stackPop());
809 Object self(&scope, thread->stackPop());
810 RawObject result = binaryOperation(thread, op, self, other);
811 if (result.isErrorException()) return Continue::UNWIND;
812 thread->stackPush(result);
813 return Continue::NEXT;
814}
815
816RawObject Interpreter::inplaceOperationSetMethod(Thread* thread, BinaryOp op,
817 const Object& left,
818 const Object& right,
819 Object* method_out,
820 BinaryOpFlags* flags_out) {
821 HandleScope scope(thread);
822 Runtime* runtime = thread->runtime();
823 SymbolId selector = inplaceOperationSelector(op);
824 Type left_type(&scope, runtime->typeOf(*left));
825 Object method(&scope, typeLookupInMroById(thread, *left_type, selector));
826 if (!method.isErrorNotFound()) {
827 if (method.isFunction()) {
828 if (method_out != nullptr) {
829 *method_out = *method;
830 *flags_out = kInplaceBinaryOpRetry;
831 }
832 } else {
833 method = resolveDescriptorGet(thread, method, left, left_type);
834 if (method.isErrorException()) return *method;
835 }
836
837 // Make sure we do not put a possible 2nd method call (from
838 // binaryOperationSetMethod() down below) into the cache.
839 method_out = nullptr;
840 Object result(&scope, callMethod2(thread, method, left, right));
841 if (result != NotImplementedType::object()) {
842 return *result;
843 }
844 }
845 return binaryOperationSetMethod(thread, op, left, right, method_out,
846 flags_out);
847}
848
849RawObject Interpreter::inplaceOperation(Thread* thread, BinaryOp op,
850 const Object& left,
851 const Object& right) {
852 return inplaceOperationSetMethod(thread, op, left, right, nullptr, nullptr);
853}
854
855HANDLER_INLINE Continue Interpreter::doInplaceOperation(BinaryOp op,
856 Thread* thread) {
857 HandleScope scope(thread);
858 Object right(&scope, thread->stackPop());
859 Object left(&scope, thread->stackPop());
860 RawObject result = inplaceOperation(thread, op, left, right);
861 if (result.isErrorException()) return Continue::UNWIND;
862 thread->stackPush(result);
863 return Continue::NEXT;
864}
865
866RawObject Interpreter::compareOperationSetMethod(Thread* thread, CompareOp op,
867 const Object& left,
868 const Object& right,
869 Object* method_out,
870 BinaryOpFlags* flags_out) {
871 HandleScope scope(thread);
872 Runtime* runtime = thread->runtime();
873 SymbolId selector = comparisonSelector(op);
874 Type left_type(&scope, runtime->typeOf(*left));
875 Type right_type(&scope, runtime->typeOf(*right));
876 Object left_method(&scope, typeLookupInMroById(thread, *left_type, selector));
877
878 // Figure out whether we want to run the normal or the reverse operation
879 // first and set `flags` accordingly.
880 Object method(&scope, *left_method);
881 BinaryOpFlags flags = kBinaryOpNone;
882 if (left_type != right_type && (left_method.isErrorNotFound() ||
883 typeIsSubclass(*right_type, *left_type))) {
884 SymbolId reverse_selector = swappedComparisonSelector(op);
885 method = typeLookupInMroById(thread, *right_type, reverse_selector);
886 if (!method.isErrorNotFound()) {
887 flags = kBinaryOpReflected;
888 if (!left_method.isErrorNotFound()) {
889 flags =
890 static_cast<BinaryOpFlags>(flags | kBinaryOpNotImplementedRetry);
891 }
892 if (!method.isFunction()) {
893 method_out = nullptr;
894 method = resolveDescriptorGet(thread, method, right, right_type);
895 if (method.isErrorException()) return *method;
896 }
897 }
898 }
899 if (flags == kBinaryOpNone) {
900 flags = kBinaryOpNotImplementedRetry;
901 method = *left_method;
902 if (!method.isFunction() && !method.isErrorNotFound()) {
903 method_out = nullptr;
904 method = resolveDescriptorGet(thread, method, left, left_type);
905 if (method.isErrorException()) return *method;
906 }
907 }
908
909 Object result(&scope, executeAndCacheBinaryOp(thread, method, flags, left,
910 right, method_out, flags_out));
911 if (!result.isNotImplementedType()) return *result;
912
913 return compareOperationRetry(thread, op, flags, left, right);
914}
915
916RawObject Interpreter::compareOperationRetry(Thread* thread, CompareOp op,
917 BinaryOpFlags flags,
918 const Object& left,
919 const Object& right) {
920 HandleScope scope(thread);
921 Runtime* runtime = thread->runtime();
922
923 if (flags & kBinaryOpNotImplementedRetry) {
924 // If we tried reflected first, try normal now.
925 if (flags & kBinaryOpReflected) {
926 SymbolId selector = comparisonSelector(op);
927 Object method(&scope, lookupMethod(thread, left, selector));
928 if (method.isError()) {
929 if (method.isErrorException()) return *method;
930 DCHECK(method.isErrorNotFound(), "expected not found");
931 } else {
932 Object result(&scope, callMethod2(thread, method, left, right));
933 if (!result.isNotImplementedType()) return *result;
934 }
935 } else {
936 // If we tried normal first, try to find a reflected method and call it.
937 SymbolId selector = swappedComparisonSelector(op);
938 Object method(&scope, lookupMethod(thread, right, selector));
939 if (!method.isErrorNotFound()) {
940 if (!method.isFunction()) {
941 Type right_type(&scope, runtime->typeOf(*right));
942 method = resolveDescriptorGet(thread, method, right, right_type);
943 if (method.isErrorException()) return *method;
944 }
945 Object result(&scope, callMethod2(thread, method, right, left));
946 if (!result.isNotImplementedType()) return *result;
947 }
948 }
949 }
950
951 if (op == CompareOp::EQ) {
952 return Bool::fromBool(*left == *right);
953 }
954 if (op == CompareOp::NE) {
955 return Bool::fromBool(*left != *right);
956 }
957
958 SymbolId op_symbol = comparisonSelector(op);
959 return thread->raiseUnsupportedBinaryOperation(left, right, op_symbol);
960}
961
962HANDLER_INLINE USED RawObject Interpreter::binaryOperationWithMethod(
963 Thread* thread, RawObject method, BinaryOpFlags flags, RawObject left,
964 RawObject right) {
965 DCHECK(method.isFunction(), "function is expected");
966 thread->stackPush(method);
967 if (flags & kBinaryOpReflected) {
968 thread->stackPush(right);
969 thread->stackPush(left);
970 } else {
971 thread->stackPush(left);
972 thread->stackPush(right);
973 }
974 return callFunction(thread, /*nargs=*/2, method);
975}
976
977RawObject Interpreter::binaryOperationRetry(Thread* thread, BinaryOp op,
978 BinaryOpFlags flags,
979 const Object& left,
980 const Object& right) {
981 HandleScope scope(thread);
982 Runtime* runtime = thread->runtime();
983
984 if (flags & kBinaryOpNotImplementedRetry) {
985 // If we tried reflected first, try normal now.
986 if (flags & kBinaryOpReflected) {
987 SymbolId selector = binaryOperationSelector(op);
988 Object method(&scope, lookupMethod(thread, left, selector));
989 if (method.isError()) {
990 if (method.isErrorException()) return *method;
991 DCHECK(method.isErrorNotFound(), "expected not found");
992 } else {
993 Object result(&scope, callMethod2(thread, method, left, right));
994 if (!result.isNotImplementedType()) return *result;
995 }
996 } else {
997 // If we tried normal first, try to find a reflected method and call it.
998 Object method(&scope,
999 binaryOperationLookupReflected(thread, op, left, right));
1000 if (!method.isErrorNotFound()) {
1001 if (!method.isFunction()) {
1002 Type right_type(&scope, runtime->typeOf(*right));
1003 method = resolveDescriptorGet(thread, method, right, right_type);
1004 if (method.isErrorException()) return *method;
1005 }
1006 Object result(&scope, callMethod2(thread, method, right, left));
1007 if (!result.isNotImplementedType()) return *result;
1008 }
1009 }
1010 }
1011
1012 SymbolId op_symbol = binaryOperationSelector(op);
1013 return thread->raiseUnsupportedBinaryOperation(left, right, op_symbol);
1014}
1015
1016RawObject Interpreter::compareOperation(Thread* thread, CompareOp op,
1017 const Object& left,
1018 const Object& right) {
1019 return compareOperationSetMethod(thread, op, left, right, nullptr, nullptr);
1020}
1021
1022RawObject Interpreter::createIterator(Thread* thread, const Object& iterable) {
1023 Runtime* runtime = thread->runtime();
1024 HandleScope scope(thread);
1025 Object dunder_iter(&scope, lookupMethod(thread, iterable, ID(__iter__)));
1026 if (dunder_iter.isError() || dunder_iter.isNoneType()) {
1027 if (dunder_iter.isErrorNotFound() &&
1028 runtime->isSequence(thread, iterable)) {
1029 return runtime->newSeqIterator(iterable);
1030 }
1031 thread->clearPendingException();
1032 return thread->raiseWithFmt(LayoutId::kTypeError,
1033 "'%T' object is not iterable", &iterable);
1034 }
1035 Object iterator(&scope, callMethod1(thread, dunder_iter, iterable));
1036 if (iterator.isErrorException()) return *iterator;
1037 if (!runtime->isIterator(thread, iterator)) {
1038 return thread->raiseWithFmt(LayoutId::kTypeError,
1039 "iter() returned non-iterator of type '%T'",
1040 &iterator);
1041 }
1042 return *iterator;
1043}
1044
1045RawObject Interpreter::sequenceIterSearch(Thread* thread, const Object& value,
1046 const Object& container) {
1047 HandleScope scope(thread);
1048 Object iter(&scope, createIterator(thread, container));
1049 if (iter.isErrorException()) {
1050 return *iter;
1051 }
1052 Object dunder_next(&scope, lookupMethod(thread, iter, ID(__next__)));
1053 if (dunder_next.isError()) {
1054 if (dunder_next.isErrorException()) {
1055 thread->clearPendingException();
1056 } else {
1057 DCHECK(dunder_next.isErrorNotFound(),
1058 "expected Error::exception() or Error::notFound()");
1059 }
1060 return thread->raiseWithFmt(LayoutId::kTypeError,
1061 "__next__ not defined on iterator");
1062 }
1063 Object current(&scope, NoneType::object());
1064 Object compare_result(&scope, NoneType::object());
1065 Object result(&scope, NoneType::object());
1066 for (;;) {
1067 current = callMethod1(thread, dunder_next, iter);
1068 if (current.isErrorException()) {
1069 if (thread->hasPendingStopIteration()) {
1070 thread->clearPendingStopIteration();
1071 break;
1072 }
1073 return *current;
1074 }
1075 compare_result = compareOperation(thread, EQ, value, current);
1076 if (compare_result.isErrorException()) {
1077 return *compare_result;
1078 }
1079 result = isTrue(thread, *compare_result);
1080 // isTrue can return Error or Bool, and we would want to return on Error or
1081 // True.
1082 if (result != Bool::falseObj()) {
1083 return *result;
1084 }
1085 }
1086 return Bool::falseObj();
1087}
1088
1089RawObject Interpreter::sequenceContains(Thread* thread, const Object& value,
1090 const Object& container) {
1091 return sequenceContainsSetMethod(thread, value, container, nullptr);
1092}
1093
1094RawObject Interpreter::sequenceContainsSetMethod(Thread* thread,
1095 const Object& value,
1096 const Object& container,
1097 Object* method_out) {
1098 HandleScope scope(thread);
1099 Object method(&scope, lookupMethod(thread, container, ID(__contains__)));
1100 if (!method.isError()) {
1101 if (method_out != nullptr && method.isFunction()) *method_out = *method;
1102 Object result(&scope, callMethod2(thread, method, container, value));
1103 if (result.isErrorException()) {
1104 return *result;
1105 }
1106 return isTrue(thread, *result);
1107 }
1108 if (method.isErrorException()) {
1109 thread->clearPendingException();
1110 } else {
1111 DCHECK(method.isErrorNotFound(),
1112 "expected Error::exception() or Error::notFound()");
1113 }
1114 return sequenceIterSearch(thread, value, container);
1115}
1116
1117HANDLER_INLINE USED RawObject Interpreter::isTrue(Thread* thread,
1118 RawObject value_obj) {
1119 if (value_obj == Bool::trueObj()) return Bool::trueObj();
1120 if (value_obj == Bool::falseObj()) return Bool::falseObj();
1121 return isTrueSlowPath(thread, value_obj);
1122}
1123
1124RawObject Interpreter::isTrueSlowPath(Thread* thread, RawObject value_obj) {
1125 switch (value_obj.layoutId()) {
1126 case LayoutId::kNoneType:
1127 return Bool::falseObj();
1128 case LayoutId::kEllipsis:
1129 case LayoutId::kFunction:
1130 case LayoutId::kLargeBytes:
1131 case LayoutId::kLargeInt:
1132 case LayoutId::kLargeStr:
1133 case LayoutId::kModule:
1134 case LayoutId::kNotImplementedType:
1135 case LayoutId::kType:
1136 return Bool::trueObj();
1137 case LayoutId::kDict:
1138 return Bool::fromBool(Dict::cast(value_obj).numItems() > 0);
1139 case LayoutId::kList:
1140 return Bool::fromBool(List::cast(value_obj).numItems() > 0);
1141 case LayoutId::kSet:
1142 case LayoutId::kFrozenSet:
1143 return Bool::fromBool(RawSetBase::cast(value_obj).numItems() > 0);
1144 case LayoutId::kSmallBytes:
1145 return Bool::fromBool(value_obj != Bytes::empty());
1146 case LayoutId::kSmallInt:
1147 return Bool::fromBool(value_obj != SmallInt::fromWord(0));
1148 case LayoutId::kSmallStr:
1149 return Bool::fromBool(value_obj != Str::empty());
1150 case LayoutId::kTuple:
1151 return Bool::fromBool(Tuple::cast(value_obj).length() > 0);
1152 default:
1153 break;
1154 }
1155 word type_flags =
1156 thread->runtime()->typeOf(value_obj).rawCast<RawType>().flags();
1157 if (type_flags & Type::Flag::kHasDunderBool) {
1158 HandleScope scope(thread);
1159 Object value(&scope, value_obj);
1160 Object result(&scope, thread->invokeMethod1(value, ID(__bool__)));
1161 DCHECK(!result.isErrorNotFound(), "__bool__ is expected to be found");
1162 if (result.isErrorException()) {
1163 return *result;
1164 }
1165 if (result.isBool()) return *result;
1166 return thread->raiseWithFmt(LayoutId::kTypeError,
1167 "__bool__ should return bool");
1168 }
1169 if (type_flags & Type::Flag::kHasDunderLen) {
1170 HandleScope scope(thread);
1171 Object value(&scope, value_obj);
1172 Object result(&scope, thread->invokeMethod1(value, ID(__len__)));
1173 DCHECK(!result.isErrorNotFound(), "__len__ is expected to be found");
1174 if (result.isErrorException()) {
1175 return *result;
1176 }
1177 if (thread->runtime()->isInstanceOfInt(*result)) {
1178 Int integer(&scope, intUnderlying(*result));
1179 if (integer.isPositive()) return Bool::trueObj();
1180 if (integer.isZero()) return Bool::falseObj();
1181 return thread->raiseWithFmt(LayoutId::kValueError,
1182 "__len__() should return >= 0");
1183 }
1184 return thread->raiseWithFmt(LayoutId::kTypeError,
1185 "object cannot be interpreted as an integer");
1186 }
1187 return Bool::trueObj();
1188}
1189
1190HANDLER_INLINE void Interpreter::raise(Thread* thread, RawObject exc_obj,
1191 RawObject cause_obj) {
1192 Runtime* runtime = thread->runtime();
1193 HandleScope scope(thread);
1194 Object exc(&scope, exc_obj);
1195 Object cause(&scope, cause_obj);
1196 Object type(&scope, NoneType::object());
1197 Object value(&scope, NoneType::object());
1198
1199 if (runtime->isInstanceOfType(*exc) &&
1200 Type(&scope, *exc).isBaseExceptionSubclass()) {
1201 // raise was given a BaseException subtype. Use it as the type, and call
1202 // the type object to create the value.
1203 type = *exc;
1204 value = Interpreter::call0(thread, type);
1205 if (value.isErrorException()) return;
1206 if (!runtime->isInstanceOfBaseException(*value)) {
1207 thread->raiseWithFmt(
1208 LayoutId::kTypeError,
1209 "calling exception type did not return an instance of BaseException, "
1210 "but '%T' object",
1211 &value);
1212 return;
1213 }
1214 } else if (runtime->isInstanceOfBaseException(*exc)) {
1215 // raise was given an instance of a BaseException subtype. Use it as the
1216 // value and pull out its type.
1217 value = *exc;
1218 type = runtime->typeOf(*value);
1219 } else {
1220 // raise was given some other, unexpected value.
1221 thread->raiseWithFmt(LayoutId::kTypeError,
1222 "exceptions must derive from BaseException");
1223 return;
1224 }
1225
1226 // Handle the two-arg form of RAISE_VARARGS, corresponding to "raise x from
1227 // y". If the cause is a type, call it to create an instance. Either way,
1228 // attach the cause to the primary exception.
1229 if (!cause.isErrorNotFound()) { // TODO(T25860930) use Unbound rather than
1230 // Error.
1231 if (runtime->isInstanceOfType(*cause) &&
1232 Type(&scope, *cause).isBaseExceptionSubclass()) {
1233 cause = Interpreter::call0(thread, cause);
1234 if (cause.isErrorException()) return;
1235 } else if (!runtime->isInstanceOfBaseException(*cause) &&
1236 !cause.isNoneType()) {
1237 thread->raiseWithFmt(LayoutId::kTypeError,
1238 "exception causes must derive from BaseException");
1239 return;
1240 }
1241 BaseException(&scope, *value).setCause(*cause);
1242 }
1243
1244 // If we made it here, the process didn't fail with a different exception.
1245 // Set the pending exception, which is now ready for unwinding. This leaves
1246 // the VM in a state similar to API functions like PyErr_SetObject(). The
1247 // main difference is that pendingExceptionValue() will always be an
1248 // exception instance here, but in the API call case it may be any object
1249 // (most commonly a str). This discrepancy is cleaned up by
1250 // normalizeException() in unwind().
1251 thread->raiseWithType(*type, *value);
1252}
1253
1254HANDLER_INLINE void Interpreter::unwindExceptHandler(Thread* thread,
1255 TryBlock block) {
1256 // Drop all dead values except for the 3 that are popped into the caught
1257 // exception state.
1258 DCHECK(block.kind() == TryBlock::kExceptHandler, "Invalid TryBlock Kind");
1259 thread->stackDrop(thread->valueStackSize() - block.level() - 3);
1260 thread->setCaughtExceptionType(thread->stackPop());
1261 thread->setCaughtExceptionValue(thread->stackPop());
1262 thread->setCaughtExceptionTraceback(thread->stackPop());
1263}
1264
1265NEVER_INLINE static bool handleReturnModes(Thread* thread, word return_mode,
1266 RawObject* retval_ptr) {
1267 HandleScope scope(thread);
1268 Object retval(&scope, *retval_ptr);
1269
1270 if (return_mode == Frame::ReturnMode::kJitReturn) {
1271 thread->popFrame();
1272 thread->stackPush(*retval);
1273 // Signal to do emulated ret to JIT code.
1274 *retval_ptr = Error::notFound();
1275 return true;
1276 }
1277 if (return_mode & Frame::kProfilerReturn) {
1278 profiling_return(thread);
1279 }
1280
1281 thread->popFrame();
1282 *retval_ptr = *retval;
1283 return (return_mode & Frame::kExitRecursiveInterpreter) != 0;
1284}
1285
1286RawObject Interpreter::handleReturn(Thread* thread) {
1287 Frame* frame = thread->currentFrame();
1288 RawObject retval = thread->stackPop();
1289 DCHECK(frame->blockStackEmpty(), "block stack should be empty");
1290 DCHECK(!retval.isError(), "should not return error");
1291
1292 // Check whether we should exit the interpreter loop.
1293 word return_mode = frame->returnMode();
1294 if (return_mode == 0) {
1295 thread->popFrame();
1296 } else if (return_mode == Frame::kExitRecursiveInterpreter) {
1297 thread->popFrame();
1298 return retval;
1299 } else if (handleReturnModes(thread, return_mode, &retval)) {
1300 return retval;
1301 }
1302 thread->stackPush(retval);
1303 return Error::error(); // continue interpreter loop.
1304}
1305
1306RawObject Interpreter::unwind(Thread* thread) {
1307 DCHECK(thread->hasPendingException(),
1308 "unwind() called without a pending exception");
1309 HandleScope scope(thread);
1310
1311 Runtime* runtime = thread->runtime();
1312 Frame* frame = thread->currentFrame();
1313 Object new_traceback(&scope, NoneType::object());
1314 Object caught_exc_state(&scope, NoneType::object());
1315 Object type(&scope, NoneType::object());
1316 Object value(&scope, NoneType::object());
1317 Object traceback(&scope, NoneType::object());
1318 for (;;) {
1319 new_traceback = runtime->newTraceback();
1320 Traceback::cast(*new_traceback).setFunction(frame->function());
1321 if (!frame->isNative()) {
1322 word lasti = frame->virtualPC() - kCodeUnitSize;
1323 Traceback::cast(*new_traceback).setLasti(SmallInt::fromWord(lasti));
1324 }
1325 Traceback::cast(*new_traceback)
1326 .setNext(thread->pendingExceptionTraceback());
1327 thread->setPendingExceptionTraceback(*new_traceback);
1328
1329 while (!frame->blockStackEmpty()) {
1330 TryBlock block = frame->blockStackPop();
1331 if (block.kind() == TryBlock::kExceptHandler) {
1332 unwindExceptHandler(thread, block);
1333 continue;
1334 }
1335 DCHECK(block.kind() == TryBlock::kFinally, "expected finally block");
1336 thread->stackDrop(thread->valueStackSize() - block.level());
1337
1338 // Push a handler block and save the current caught exception, if any.
1339 frame->blockStackPush(
1340 TryBlock{TryBlock::kExceptHandler, 0, thread->valueStackSize()});
1341 caught_exc_state = thread->topmostCaughtExceptionState();
1342 if (caught_exc_state.isNoneType()) {
1343 thread->stackPush(NoneType::object());
1344 thread->stackPush(NoneType::object());
1345 thread->stackPush(NoneType::object());
1346 } else {
1347 thread->stackPush(ExceptionState::cast(*caught_exc_state).traceback());
1348 thread->stackPush(ExceptionState::cast(*caught_exc_state).value());
1349 thread->stackPush(ExceptionState::cast(*caught_exc_state).type());
1350 }
1351
1352 // Load and normalize the pending exception.
1353 type = thread->pendingExceptionType();
1354 value = thread->pendingExceptionValue();
1355 traceback = thread->pendingExceptionTraceback();
1356 thread->clearPendingException();
1357 normalizeException(thread, &type, &value, &traceback);
1358 BaseException(&scope, *value).setTraceback(*traceback);
1359
1360 // Promote the normalized exception to caught, push it for the bytecode
1361 // handler, and jump to the handler.
1362 thread->setCaughtExceptionType(*type);
1363 thread->setCaughtExceptionValue(*value);
1364 thread->setCaughtExceptionTraceback(*traceback);
1365 thread->stackPush(*traceback);
1366 thread->stackPush(*value);
1367 thread->stackPush(*type);
1368 frame->setVirtualPC(block.handler());
1369 return Error::error(); // continue interpreter loop.
1370 }
1371
1372 word return_mode = frame->returnMode();
1373 RawObject retval = Error::exception();
1374 if (return_mode == 0) {
1375 frame = thread->popFrame();
1376 } else if (return_mode == Frame::kExitRecursiveInterpreter) {
1377 thread->popFrame();
1378 return Error::exception();
1379 } else if (handleReturnModes(thread, return_mode, &retval)) {
1380 return retval;
1381 } else {
1382 frame = thread->currentFrame();
1383 }
1384 }
1385}
1386
1387static Bytecode currentBytecode(Thread* thread) {
1388 Frame* frame = thread->currentFrame();
1389 word pc = frame->virtualPC() - kCodeUnitSize;
1390 return static_cast<Bytecode>(frame->bytecode().byteAt(pc));
1391}
1392
1393static inline word currentCacheIndex(Frame* frame) {
1394 word pc = frame->virtualPC() - kCodeUnitSize;
1395 return frame->bytecode().uint16At(pc + 2);
1396}
1397
1398static void rewriteCurrentBytecode(Frame* frame, Bytecode bytecode) {
1399 word pc = frame->virtualPC() - kCodeUnitSize;
1400 MutableBytes::cast(frame->bytecode()).byteAtPut(pc, bytecode);
1401}
1402
1403HANDLER_INLINE Continue Interpreter::doInvalidBytecode(Thread* thread, word) {
1404 Bytecode bc = currentBytecode(thread);
1405 UNREACHABLE("bytecode '%s'", kBytecodeNames[bc]);
1406}
1407
1408HANDLER_INLINE Continue Interpreter::doPopTop(Thread* thread, word) {
1409 thread->stackPop();
1410 return Continue::NEXT;
1411}
1412
1413HANDLER_INLINE Continue Interpreter::doRotTwo(Thread* thread, word) {
1414 RawObject peek0 = thread->stackPeek(0);
1415 RawObject peek1 = thread->stackPeek(1);
1416 thread->stackSetAt(1, peek0);
1417 thread->stackSetAt(0, peek1);
1418 return Continue::NEXT;
1419}
1420
1421HANDLER_INLINE Continue Interpreter::doRotThree(Thread* thread, word) {
1422 RawObject top = thread->stackTop();
1423 thread->stackSetAt(0, thread->stackPeek(1));
1424 thread->stackSetAt(1, thread->stackPeek(2));
1425 thread->stackSetAt(2, top);
1426 return Continue::NEXT;
1427}
1428
1429HANDLER_INLINE Continue Interpreter::doRotFour(Thread* thread, word) {
1430 RawObject top = thread->stackTop();
1431 thread->stackSetAt(0, thread->stackPeek(1));
1432 thread->stackSetAt(1, thread->stackPeek(2));
1433 thread->stackSetAt(2, thread->stackPeek(3));
1434 thread->stackSetAt(3, top);
1435 return Continue::NEXT;
1436}
1437
1438HANDLER_INLINE Continue Interpreter::doDupTop(Thread* thread, word) {
1439 thread->stackPush(thread->stackTop());
1440 return Continue::NEXT;
1441}
1442
1443HANDLER_INLINE Continue Interpreter::doDupTopTwo(Thread* thread, word) {
1444 RawObject first = thread->stackTop();
1445 RawObject second = thread->stackPeek(1);
1446 thread->stackPush(second);
1447 thread->stackPush(first);
1448 return Continue::NEXT;
1449}
1450
1451HANDLER_INLINE Continue Interpreter::doNop(Thread*, word) {
1452 return Continue::NEXT;
1453}
1454
1455HANDLER_INLINE Continue Interpreter::doUnaryPositive(Thread* thread, word) {
1456 return doUnaryOperation(ID(__pos__), thread);
1457}
1458
1459HANDLER_INLINE
1460Continue Interpreter::doUnaryNegativeSmallInt(Thread* thread, word) {
1461 RawObject obj = thread->stackPeek(0);
1462 if (obj.isSmallInt()) {
1463 word value = SmallInt::cast(obj).value();
1464 word result_value = -value;
1465 if (SmallInt::isValid(result_value)) {
1466 thread->stackSetTop(SmallInt::fromWord(result_value));
1467 return Continue::NEXT;
1468 }
1469 }
1470 EVENT_CACHE(UNARY_NEGATIVE_SMALLINT);
1471 Frame* frame = thread->currentFrame();
1472 rewriteCurrentBytecode(frame, UNARY_NEGATIVE);
1473 return doUnaryNegative(thread, /*arg=*/0);
1474}
1475
1476HANDLER_INLINE
1477Continue Interpreter::doUnaryOpAnamorphic(Thread* thread, word arg) {
1478 Frame* frame = thread->currentFrame();
1479 if (thread->stackPeek(0).isSmallInt()) {
1480 switch (static_cast<UnaryOp>(arg)) {
1481 case UnaryOp::NEGATIVE:
1482 rewriteCurrentBytecode(frame, UNARY_NEGATIVE_SMALLINT);
1483 return doUnaryNegativeSmallInt(thread, arg);
1484 default:
1485 UNIMPLEMENTED("cached unary operations other than NEGATIVE");
1486 break;
1487 }
1488 }
1489 // TODO(emacs): Add caching for methods on non-smallints
1490 switch (static_cast<UnaryOp>(arg)) {
1491 case UnaryOp::NEGATIVE:
1492 rewriteCurrentBytecode(frame, UNARY_NEGATIVE);
1493 return doUnaryNegative(thread, /*arg=*/0);
1494 default:
1495 UNIMPLEMENTED("cached unary operations other than NEGATIVE");
1496 break;
1497 }
1498 UNREACHABLE("all UnaryOp cases should be handled in the switch");
1499}
1500
1501HANDLER_INLINE Continue Interpreter::doUnaryNegative(Thread* thread, word) {
1502 return doUnaryOperation(ID(__neg__), thread);
1503}
1504
1505HANDLER_INLINE Continue Interpreter::doUnaryNot(Thread* thread, word) {
1506 RawObject value = thread->stackTop();
1507 if (!value.isBool()) {
1508 value = isTrue(thread, value);
1509 if (value.isErrorException()) return Continue::UNWIND;
1510 }
1511 thread->stackSetTop(RawBool::negate(value));
1512 return Continue::NEXT;
1513}
1514
1515HANDLER_INLINE Continue Interpreter::doUnaryInvert(Thread* thread, word) {
1516 return doUnaryOperation(ID(__invert__), thread);
1517}
1518
1519HANDLER_INLINE Continue Interpreter::doBinaryMatrixMultiply(Thread* thread,
1520 word) {
1521 return doBinaryOperation(BinaryOp::MATMUL, thread);
1522}
1523
1524HANDLER_INLINE Continue Interpreter::doInplaceMatrixMultiply(Thread* thread,
1525 word) {
1526 return doInplaceOperation(BinaryOp::MATMUL, thread);
1527}
1528
1529HANDLER_INLINE Continue Interpreter::doBinaryPower(Thread* thread, word) {
1530 return doBinaryOperation(BinaryOp::POW, thread);
1531}
1532
1533HANDLER_INLINE Continue Interpreter::doBinaryMultiply(Thread* thread, word) {
1534 return doBinaryOperation(BinaryOp::MUL, thread);
1535}
1536
1537HANDLER_INLINE Continue Interpreter::doBinaryModulo(Thread* thread, word) {
1538 return doBinaryOperation(BinaryOp::MOD, thread);
1539}
1540
1541HANDLER_INLINE Continue Interpreter::doBinaryAdd(Thread* thread, word) {
1542 return doBinaryOperation(BinaryOp::ADD, thread);
1543}
1544
1545HANDLER_INLINE Continue Interpreter::doBinarySubtract(Thread* thread, word) {
1546 return doBinaryOperation(BinaryOp::SUB, thread);
1547}
1548
1549Continue Interpreter::binarySubscrUpdateCache(Thread* thread, word cache) {
1550 Frame* frame = thread->currentFrame();
1551 HandleScope scope(thread);
1552 Function dependent(&scope, frame->function());
1553 if (dependent.isCompiled()) {
1554 return Continue::DEOPT;
1555 }
1556 Object container(&scope, thread->stackPeek(1));
1557 Runtime* runtime = thread->runtime();
1558 Type type(&scope, runtime->typeOf(*container));
1559 Object getitem(&scope, typeLookupInMroById(thread, *type, ID(__getitem__)));
1560 if (getitem.isErrorNotFound()) {
1561 if (runtime->isInstanceOfType(*container)) {
1562 Type container_as_type(&scope, *container);
1563 Str dunder_class_getitem_name(
1564 &scope, runtime->symbols()->at(ID(__class_getitem__)));
1565 getitem = typeGetAttribute(thread, container_as_type,
1566 dunder_class_getitem_name);
1567 }
1568 if (getitem.isErrorNotFound()) {
1569 thread->raiseWithFmt(LayoutId::kTypeError,
1570 "'%T' object is not subscriptable", &container);
1571 return Continue::UNWIND;
1572 }
1573 }
1574 if (!getitem.isFunction()) {
1575 getitem = resolveDescriptorGet(thread, getitem, container, type);
1576 if (getitem.isErrorException()) return Continue::UNWIND;
1577 thread->stackSetAt(1, *getitem);
1578 return tailcall(thread, 1);
1579 }
1580 if (cache >= 0) {
1581 // TODO(T55274956): Make this into a separate function to be shared.
1582 MutableTuple caches(&scope, frame->caches());
1583 Str get_item_name(&scope, runtime->symbols()->at(ID(__getitem__)));
1584 ICState next_cache_state =
1585 icUpdateAttr(thread, caches, cache, container.layoutId(), getitem,
1586 get_item_name, dependent);
1587 rewriteCurrentBytecode(frame, next_cache_state == ICState::kMonomorphic
1588 ? BINARY_SUBSCR_MONOMORPHIC
1589 : BINARY_SUBSCR_POLYMORPHIC);
1590 }
1591 thread->stackSetAt(1, *getitem);
1592 thread->stackInsertAt(1, *container);
1593 return tailcallFunction(thread, 2, *getitem);
1594}
1595
1596HANDLER_INLINE Continue Interpreter::doBinarySubscr(Thread* thread, word) {
1597 return binarySubscrUpdateCache(thread, -1);
1598}
1599
1600HANDLER_INLINE Continue Interpreter::doBinarySubscrDict(Thread* thread, word) {
1601 RawObject container = thread->stackPeek(1);
1602 if (!container.isDict()) {
1603 EVENT_CACHE(BINARY_SUBSCR_DICT);
1604 word cache = currentCacheIndex(thread->currentFrame());
1605 return binarySubscrUpdateCache(thread, cache);
1606 }
1607 HandleScope scope(thread);
1608 Dict dict(&scope, container);
1609 Object key(&scope, thread->stackPeek(0));
1610 Object hash_obj(&scope, Interpreter::hash(thread, key));
1611 if (hash_obj.isErrorException()) {
1612 return Continue::UNWIND;
1613 }
1614 word hash = SmallInt::cast(*hash_obj).value();
1615 Object result(&scope, dictAt(thread, dict, key, hash));
1616 if (result.isError()) {
1617 if (result.isErrorNotFound()) {
1618 thread->raise(LayoutId::kKeyError, *key);
1619 return Continue::UNWIND;
1620 }
1621 if (result.isErrorException()) {
1622 return Continue::UNWIND;
1623 }
1624 UNREACHABLE("error should be either notFound or errorException");
1625 }
1626 thread->stackPop();
1627 thread->stackSetTop(*result);
1628 return Continue::NEXT;
1629}
1630
1631HANDLER_INLINE Continue Interpreter::doBinarySubscrList(Thread* thread, word) {
1632 RawObject container = thread->stackPeek(1);
1633 RawObject key = thread->stackPeek(0);
1634 if (container.isList() && key.isSmallInt()) {
1635 word index = SmallInt::cast(key).value();
1636 RawList list = List::cast(container);
1637 word length = list.numItems();
1638 if (0 <= index && index < length) {
1639 thread->stackPop();
1640 thread->stackSetTop(list.at(index));
1641 return Continue::NEXT;
1642 }
1643 }
1644 EVENT_CACHE(BINARY_SUBSCR_LIST);
1645 word cache = currentCacheIndex(thread->currentFrame());
1646 return binarySubscrUpdateCache(thread, cache);
1647}
1648
1649HANDLER_INLINE Continue Interpreter::doBinarySubscrTuple(Thread* thread, word) {
1650 RawObject container = thread->stackPeek(1);
1651 RawObject key = thread->stackPeek(0);
1652 if (container.isTuple() && key.isSmallInt()) {
1653 word index = SmallInt::cast(key).value();
1654 RawTuple tuple = Tuple::cast(container);
1655 word length = tuple.length();
1656 if (0 <= index && index < length) {
1657 thread->stackPop();
1658 thread->stackSetTop(tuple.at(index));
1659 return Continue::NEXT;
1660 }
1661 }
1662 EVENT_CACHE(BINARY_SUBSCR_TUPLE);
1663 word cache = currentCacheIndex(thread->currentFrame());
1664 return binarySubscrUpdateCache(thread, cache);
1665}
1666
1667HANDLER_INLINE Continue Interpreter::doBinarySubscrMonomorphic(Thread* thread,
1668 word) {
1669 Frame* frame = thread->currentFrame();
1670 word cache = currentCacheIndex(frame);
1671 RawMutableTuple caches = MutableTuple::cast(frame->caches());
1672 LayoutId receiver_layout_id = thread->stackPeek(1).layoutId();
1673 bool is_found;
1674 RawObject cached =
1675 icLookupMonomorphic(caches, cache, receiver_layout_id, &is_found);
1676 if (!is_found) {
1677 EVENT_CACHE(BINARY_SUBSCR_MONOMORPHIC);
1678 return binarySubscrUpdateCache(thread, cache);
1679 }
1680 thread->stackInsertAt(2, cached);
1681 return tailcallFunction(thread, 2, cached);
1682}
1683
1684HANDLER_INLINE Continue Interpreter::doBinarySubscrPolymorphic(Thread* thread,
1685 word) {
1686 Frame* frame = thread->currentFrame();
1687 LayoutId container_layout_id = thread->stackPeek(1).layoutId();
1688 bool is_found;
1689 word cache = currentCacheIndex(frame);
1690 RawObject cached = icLookupPolymorphic(MutableTuple::cast(frame->caches()),
1691 cache, container_layout_id, &is_found);
1692 if (!is_found) {
1693 EVENT_CACHE(BINARY_SUBSCR_POLYMORPHIC);
1694 return binarySubscrUpdateCache(thread, cache);
1695 }
1696 thread->stackInsertAt(2, cached);
1697 return tailcallFunction(thread, 2, cached);
1698}
1699
1700HANDLER_INLINE Continue Interpreter::doBinarySubscrAnamorphic(Thread* thread,
1701 word arg) {
1702 Frame* frame = thread->currentFrame();
1703 RawObject container = thread->stackPeek(1);
1704 RawObject key = thread->stackPeek(0);
1705 switch (container.layoutId()) {
1706 case LayoutId::kDict:
1707 rewriteCurrentBytecode(frame, BINARY_SUBSCR_DICT);
1708 return doBinarySubscrDict(thread, arg);
1709 case LayoutId::kList:
1710 if (key.isSmallInt()) {
1711 rewriteCurrentBytecode(frame, BINARY_SUBSCR_LIST);
1712 return doBinarySubscrList(thread, arg);
1713 }
1714 break;
1715 case LayoutId::kTuple:
1716 if (key.isSmallInt()) {
1717 rewriteCurrentBytecode(frame, BINARY_SUBSCR_TUPLE);
1718 return doBinarySubscrTuple(thread, arg);
1719 }
1720 break;
1721 default:
1722 break;
1723 }
1724 word cache = currentCacheIndex(frame);
1725 return binarySubscrUpdateCache(thread, cache);
1726}
1727
1728HANDLER_INLINE Continue Interpreter::doBinaryFloorDivide(Thread* thread, word) {
1729 return doBinaryOperation(BinaryOp::FLOORDIV, thread);
1730}
1731
1732HANDLER_INLINE Continue Interpreter::doBinaryTrueDivide(Thread* thread, word) {
1733 return doBinaryOperation(BinaryOp::TRUEDIV, thread);
1734}
1735
1736HANDLER_INLINE Continue Interpreter::doInplaceFloorDivide(Thread* thread,
1737 word) {
1738 return doInplaceOperation(BinaryOp::FLOORDIV, thread);
1739}
1740
1741HANDLER_INLINE Continue Interpreter::doInplaceTrueDivide(Thread* thread, word) {
1742 return doInplaceOperation(BinaryOp::TRUEDIV, thread);
1743}
1744
1745HANDLER_INLINE Continue Interpreter::doGetAiter(Thread* thread, word) {
1746 HandleScope scope(thread);
1747 Object obj(&scope, thread->stackPop());
1748 Object method(&scope, lookupMethod(thread, obj, ID(__aiter__)));
1749 if (method.isError()) {
1750 if (method.isErrorException()) {
1751 thread->clearPendingException();
1752 } else {
1753 DCHECK(method.isErrorNotFound(),
1754 "expected Error::exception() or Error::notFound()");
1755 }
1756 thread->raiseWithFmt(
1757 LayoutId::kTypeError,
1758 "'async for' requires an object with __aiter__ method");
1759 return Continue::UNWIND;
1760 }
1761 return tailcallMethod1(thread, *method, *obj);
1762}
1763
1764HANDLER_INLINE Continue Interpreter::doGetAnext(Thread* thread, word) {
1765 HandleScope scope(thread);
1766 Object obj(&scope, thread->stackTop());
1767 // TODO(T67736679) Add inline caching for this method lookup.
1768 Object anext(&scope, lookupMethod(thread, obj, ID(__anext__)));
1769 if (anext.isError()) {
1770 if (anext.isErrorException()) {
1771 thread->clearPendingException();
1772 } else {
1773 DCHECK(anext.isErrorNotFound(),
1774 "expected Error::exception() or Error::notFound()");
1775 }
1776 thread->raiseWithFmt(
1777 LayoutId::kTypeError,
1778 "'async for' requires an iterator with __anext__ method");
1779 return Continue::UNWIND;
1780 }
1781 Object awaitable(&scope, callMethod1(thread, anext, obj));
1782 if (awaitable.isErrorException()) return Continue::UNWIND;
1783 thread->stackPush(*awaitable);
1784 // TODO(T67736679) Add inline caching for the lookupMethod() in
1785 // awaitableIter.
1786 Object result(
1787 &scope,
1788 awaitableIter(thread,
1789 "'async for' received an invalid object from __anext__"));
1790 if (!result.isError()) return Continue::NEXT;
1791 thread->raiseWithFmtChainingPendingAsCause(
1792 LayoutId::kTypeError,
1793 "'async for' received an invalid object from __anext__");
1794 return Continue::UNWIND;
1795}
1796
1797HANDLER_INLINE Continue Interpreter::doBeginFinally(Thread* thread, word) {
1798 thread->stackPush(NoneType::object());
1799 return Continue::NEXT;
1800}
1801
1802HANDLER_INLINE Continue Interpreter::doBeforeAsyncWith(Thread* thread, word) {
1803 HandleScope scope(thread);
1804 Object manager(&scope, thread->stackPop());
1805
1806 // resolve __aexit__ and push it
1807 Runtime* runtime = thread->runtime();
1808 Object exit(&scope, runtime->attributeAtById(thread, manager, ID(__aexit__)));
1809 if (exit.isErrorException()) {
1810 return Continue::UNWIND;
1811 }
1812 thread->stackPush(*exit);
1813
1814 // resolve __aenter__ call it and push the return value
1815 Object enter(&scope, lookupMethod(thread, manager, ID(__aenter__)));
1816 if (enter.isError()) {
1817 if (enter.isErrorNotFound()) {
1818 Object aenter_str(&scope, runtime->newStrFromFmt("__aenter__"));
1819 objectRaiseAttributeError(thread, manager, aenter_str);
1820 return Continue::UNWIND;
1821 }
1822 if (enter.isErrorException()) {
1823 return Continue::UNWIND;
1824 }
1825 }
1826 return tailcallMethod1(thread, *enter, *manager);
1827}
1828
1829HANDLER_INLINE Continue Interpreter::doInplaceAdd(Thread* thread, word) {
1830 return doInplaceOperation(BinaryOp::ADD, thread);
1831}
1832
1833HANDLER_INLINE Continue Interpreter::doInplaceSubtract(Thread* thread, word) {
1834 return doInplaceOperation(BinaryOp::SUB, thread);
1835}
1836
1837HANDLER_INLINE Continue Interpreter::doInplaceMultiply(Thread* thread, word) {
1838 return doInplaceOperation(BinaryOp::MUL, thread);
1839}
1840
1841HANDLER_INLINE Continue Interpreter::doInplaceModulo(Thread* thread, word) {
1842 return doInplaceOperation(BinaryOp::MOD, thread);
1843}
1844
1845HANDLER_INLINE Continue Interpreter::doStoreSubscr(Thread* thread, word) {
1846 HandleScope scope(thread);
1847 Object key(&scope, thread->stackPop());
1848 Object container(&scope, thread->stackPop());
1849 Object setitem(&scope, lookupMethod(thread, container, ID(__setitem__)));
1850 if (setitem.isError()) {
1851 if (setitem.isErrorNotFound()) {
1852 thread->raiseWithFmt(LayoutId::kTypeError,
1853 "'%T' object does not support item assignment",
1854 &container);
1855 } else {
1856 DCHECK(setitem.isErrorException(),
1857 "expected Error::exception() or Error::notFound()");
1858 }
1859 return Continue::UNWIND;
1860 }
1861 Object value(&scope, thread->stackPop());
1862 if (callMethod3(thread, setitem, container, key, value).isErrorException()) {
1863 return Continue::UNWIND;
1864 }
1865 return Continue::NEXT;
1866}
1867
1868HANDLER_INLINE Continue Interpreter::doStoreSubscrList(Thread* thread, word) {
1869 RawObject container = thread->stackPeek(1);
1870 RawObject key = thread->stackPeek(0);
1871 if (container.isList() && key.isSmallInt()) {
1872 word index = SmallInt::cast(key).value();
1873 RawList list = List::cast(container);
1874 word length = list.numItems();
1875 if (0 <= index && index < length) {
1876 RawObject value = thread->stackPeek(2);
1877 list.atPut(index, value);
1878 thread->stackDrop(3);
1879 return Continue::NEXT;
1880 }
1881 }
1882 EVENT_CACHE(STORE_SUBSCR_LIST);
1883 word cache = currentCacheIndex(thread->currentFrame());
1884 return storeSubscrUpdateCache(thread, cache);
1885}
1886
1887HANDLER_INLINE Continue Interpreter::doStoreSubscrDict(Thread* thread, word) {
1888 RawObject container = thread->stackPeek(1);
1889 if (!container.isDict()) {
1890 EVENT_CACHE(STORE_SUBSCR_DICT);
1891 word cache = currentCacheIndex(thread->currentFrame());
1892 return storeSubscrUpdateCache(thread, cache);
1893 }
1894 HandleScope scope(thread);
1895 Dict dict(&scope, container);
1896 Object key(&scope, thread->stackPeek(0));
1897 Object value(&scope, thread->stackPeek(2));
1898 Object hash_obj(&scope, Interpreter::hash(thread, key));
1899 if (hash_obj.isErrorException()) {
1900 return Continue::UNWIND;
1901 }
1902 word hash = SmallInt::cast(*hash_obj).value();
1903 if (dictAtPut(thread, dict, key, hash, value).isErrorException()) {
1904 return Continue::UNWIND;
1905 }
1906 thread->stackDrop(3);
1907 return Continue::NEXT;
1908}
1909
1910NEVER_INLINE Continue Interpreter::storeSubscrUpdateCache(Thread* thread,
1911 word cache) {
1912 HandleScope scope(thread);
1913 Frame* frame = thread->currentFrame();
1914 Function dependent(&scope, frame->function());
1915 if (dependent.isCompiled()) {
1916 return Continue::DEOPT;
1917 }
1918 Object key(&scope, thread->stackPop());
1919 Object container(&scope, thread->stackPop());
1920 Object setitem(&scope, lookupMethod(thread, container, ID(__setitem__)));
1921 if (setitem.isError()) {
1922 if (setitem.isErrorNotFound()) {
1923 thread->raiseWithFmt(LayoutId::kTypeError,
1924 "'%T' object does not support item assignment",
1925 &container);
1926 } else {
1927 DCHECK(setitem.isErrorException(),
1928 "expected Error::exception() or Error::notFound()");
1929 }
1930 return Continue::UNWIND;
1931 }
1932 if (setitem.isFunction()) {
1933 MutableTuple caches(&scope, frame->caches());
1934 Str set_item_name(&scope,
1935 thread->runtime()->symbols()->at(ID(__setitem__)));
1936 ICState next_cache_state =
1937 icUpdateAttr(thread, caches, cache, container.layoutId(), setitem,
1938 set_item_name, dependent);
1939 rewriteCurrentBytecode(frame, next_cache_state == ICState::kMonomorphic
1940 ? STORE_SUBSCR_MONOMORPHIC
1941 : STORE_SUBSCR_POLYMORPHIC);
1942 }
1943 Object value(&scope, thread->stackPop());
1944 if (callMethod3(thread, setitem, container, key, value).isErrorException()) {
1945 return Continue::UNWIND;
1946 }
1947 return Continue::NEXT;
1948}
1949
1950ALWAYS_INLINE Continue Interpreter::storeSubscr(Thread* thread,
1951 RawObject set_item_method) {
1952 DCHECK(set_item_method.isFunction(), "cached should be a function");
1953 // The shape of the frame before STORE_SUBSCR:
1954 // 2: value
1955 // 1: container
1956 // 0: key
1957 //
1958 // The shape of the frame is modified to call __setitem__ as follows:
1959 // 3: function (__setitem__)
1960 // 2: container
1961 // 1: key
1962 // 0: value
1963 RawObject value_raw = thread->stackPeek(2);
1964 thread->stackSetAt(2, set_item_method);
1965 thread->stackPush(value_raw);
1966
1967 RawObject result = callFunction(thread, /*nargs=*/3, set_item_method);
1968 if (result.isErrorException()) {
1969 return Continue::UNWIND;
1970 }
1971 return Continue::NEXT;
1972}
1973
1974HANDLER_INLINE Continue Interpreter::doStoreSubscrMonomorphic(Thread* thread,
1975 word) {
1976 Frame* frame = thread->currentFrame();
1977 RawMutableTuple caches = MutableTuple::cast(frame->caches());
1978 LayoutId container_layout_id = thread->stackPeek(1).layoutId();
1979 word cache = currentCacheIndex(frame);
1980 bool is_found;
1981 RawObject cached =
1982 icLookupMonomorphic(caches, cache, container_layout_id, &is_found);
1983 if (!is_found) {
1984 EVENT_CACHE(STORE_SUBSCR_MONOMORPHIC);
1985 return storeSubscrUpdateCache(thread, cache);
1986 }
1987 return storeSubscr(thread, cached);
1988}
1989
1990HANDLER_INLINE Continue Interpreter::doStoreSubscrPolymorphic(Thread* thread,
1991 word) {
1992 Frame* frame = thread->currentFrame();
1993 RawObject container_raw = thread->stackPeek(1);
1994 LayoutId container_layout_id = container_raw.layoutId();
1995 word cache = currentCacheIndex(frame);
1996 bool is_found;
1997 RawObject cached = icLookupPolymorphic(MutableTuple::cast(frame->caches()),
1998 cache, container_layout_id, &is_found);
1999 if (!is_found) {
2000 EVENT_CACHE(STORE_SUBSCR_POLYMORPHIC);
2001 return storeSubscrUpdateCache(thread, cache);
2002 }
2003 return storeSubscr(thread, cached);
2004}
2005
2006HANDLER_INLINE Continue Interpreter::doStoreSubscrAnamorphic(Thread* thread,
2007 word arg) {
2008 RawObject container = thread->stackPeek(1);
2009 RawObject key = thread->stackPeek(0);
2010 switch (container.layoutId()) {
2011 case LayoutId::kDict:
2012 rewriteCurrentBytecode(thread->currentFrame(), STORE_SUBSCR_DICT);
2013 return doStoreSubscrDict(thread, arg);
2014 case LayoutId::kList:
2015 if (key.isSmallInt()) {
2016 rewriteCurrentBytecode(thread->currentFrame(), STORE_SUBSCR_LIST);
2017 return doStoreSubscrList(thread, arg);
2018 }
2019 break;
2020 default:
2021 break;
2022 }
2023 word cache = currentCacheIndex(thread->currentFrame());
2024 return storeSubscrUpdateCache(thread, cache);
2025}
2026
2027HANDLER_INLINE Continue Interpreter::doDeleteSubscr(Thread* thread, word) {
2028 HandleScope scope(thread);
2029 Object key(&scope, thread->stackPop());
2030 Object container(&scope, thread->stackPop());
2031 Object delitem(&scope, lookupMethod(thread, container, ID(__delitem__)));
2032 if (delitem.isError()) {
2033 if (delitem.isErrorNotFound()) {
2034 thread->raiseWithFmt(LayoutId::kTypeError,
2035 "'%T' object does not support item deletion",
2036 &container);
2037 } else {
2038 DCHECK(delitem.isErrorException(),
2039 "expected Error::exception() or Error::notFound()");
2040 }
2041 return Continue::UNWIND;
2042 }
2043 if (callMethod2(thread, delitem, container, key).isErrorException()) {
2044 return Continue::UNWIND;
2045 }
2046 return Continue::NEXT;
2047}
2048
2049HANDLER_INLINE Continue Interpreter::doBinaryLshift(Thread* thread, word) {
2050 return doBinaryOperation(BinaryOp::LSHIFT, thread);
2051}
2052
2053HANDLER_INLINE Continue Interpreter::doBinaryRshift(Thread* thread, word) {
2054 return doBinaryOperation(BinaryOp::RSHIFT, thread);
2055}
2056
2057HANDLER_INLINE Continue Interpreter::doBinaryAnd(Thread* thread, word) {
2058 return doBinaryOperation(BinaryOp::AND, thread);
2059}
2060
2061HANDLER_INLINE Continue Interpreter::doBinaryXor(Thread* thread, word) {
2062 return doBinaryOperation(BinaryOp::XOR, thread);
2063}
2064
2065HANDLER_INLINE Continue Interpreter::doBinaryOr(Thread* thread, word) {
2066 return doBinaryOperation(BinaryOp::OR, thread);
2067}
2068
2069HANDLER_INLINE Continue Interpreter::doInplacePower(Thread* thread, word) {
2070 return doInplaceOperation(BinaryOp::POW, thread);
2071}
2072
2073HANDLER_INLINE Continue Interpreter::doGetIter(Thread* thread, word) {
2074 HandleScope scope(thread);
2075 Runtime* runtime = thread->runtime();
2076 Object iterable(&scope, thread->stackPop());
2077 Object iterator(&scope, NoneType::object());
2078 switch (iterable.layoutId()) {
2079 case LayoutId::kList:
2080 iterator = runtime->newListIterator(iterable);
2081 break;
2082 case LayoutId::kDict: {
2083 Dict dict(&scope, *iterable);
2084 iterator = runtime->newDictKeyIterator(thread, dict);
2085 break;
2086 }
2087 case LayoutId::kGenerator:
2088 iterator = *iterable;
2089 break;
2090 case LayoutId::kTuple: {
2091 Tuple tuple(&scope, *iterable);
2092 iterator = runtime->newTupleIterator(tuple, tuple.length());
2093 break;
2094 }
2095 case LayoutId::kRange: {
2096 Range range(&scope, *iterable);
2097 Int start_int(&scope, intUnderlying(range.start()));
2098 Int stop_int(&scope, intUnderlying(range.stop()));
2099 Int step_int(&scope, intUnderlying(range.step()));
2100 if (start_int.isLargeInt() || stop_int.isLargeInt() ||
2101 step_int.isLargeInt()) {
2102 iterator = runtime->newLongRangeIterator(start_int, stop_int, step_int);
2103 break;
2104 }
2105 word start = start_int.asWord();
2106 word stop = stop_int.asWord();
2107 word step = step_int.asWord();
2108 word length = Slice::length(start, stop, step);
2109 if (SmallInt::isValid(length)) {
2110 iterator = runtime->newRangeIterator(start, step, length);
2111 break;
2112 }
2113 iterator = runtime->newLongRangeIterator(start_int, stop_int, step_int);
2114 break;
2115 }
2116 case LayoutId::kStr: {
2117 Str str(&scope, *iterable);
2118 iterator = runtime->newStrIterator(str);
2119 break;
2120 }
2121 case LayoutId::kBytearray: {
2122 Bytearray byte_array(&scope, *iterable);
2123 iterator = runtime->newBytearrayIterator(thread, byte_array);
2124 break;
2125 }
2126 case LayoutId::kBytes: {
2127 Bytes bytes(&scope, *iterable);
2128 iterator = runtime->newBytesIterator(thread, bytes);
2129 break;
2130 }
2131 case LayoutId::kSet: {
2132 Set set(&scope, *iterable);
2133 iterator = thread->runtime()->newSetIterator(set);
2134 break;
2135 }
2136 default:
2137 break;
2138 }
2139 if (!iterator.isNoneType()) {
2140 thread->stackPush(*iterator);
2141 return Continue::NEXT;
2142 }
2143 // TODO(T44729606): Add caching, and turn into a simpler call for builtin
2144 // types with known iterator creating functions
2145 iterator = createIterator(thread, iterable);
2146 if (iterator.isErrorException()) return Continue::UNWIND;
2147 thread->stackPush(*iterator);
2148 return Continue::NEXT;
2149}
2150
2151HANDLER_INLINE Continue Interpreter::doGetYieldFromIter(Thread* thread, word) {
2152 HandleScope scope(thread);
2153 Object iterable(&scope, thread->stackTop());
2154
2155 if (iterable.isGenerator()) return Continue::NEXT;
2156
2157 if (iterable.isCoroutine()) {
2158 Function function(&scope, thread->currentFrame()->function());
2159 if (!(function.isCoroutine() || function.isIterableCoroutine())) {
2160 thread->raiseWithFmt(
2161 LayoutId::kTypeError,
2162 "cannot 'yield from' a coroutine object in a non-coroutine "
2163 "generator");
2164 return Continue::UNWIND;
2165 }
2166 return Continue::NEXT;
2167 }
2168
2169 thread->stackDrop(1);
2170 // TODO(T44729661): Add caching, and turn into a simpler call for builtin
2171 // types with known iterator creating functions
2172 Object iterator(&scope, createIterator(thread, iterable));
2173 if (iterator.isErrorException()) return Continue::UNWIND;
2174 thread->stackPush(*iterator);
2175 return Continue::NEXT;
2176}
2177
2178HANDLER_INLINE Continue Interpreter::doPrintExpr(Thread* thread, word) {
2179 HandleScope scope(thread);
2180 Object value(&scope, thread->stackPop());
2181 ValueCell value_cell(&scope, thread->runtime()->displayHook());
2182 if (value_cell.isUnbound()) {
2183 UNIMPLEMENTED("RuntimeError: lost sys.displayhook");
2184 }
2185 // TODO(T55021263): Replace with non-recursive call
2186 Object display_hook(&scope, value_cell.value());
2187 return callMethod1(thread, display_hook, value).isErrorException()
2188 ? Continue::UNWIND
2189 : Continue::NEXT;
2190}
2191
2192HANDLER_INLINE Continue Interpreter::doLoadBuildClass(Thread* thread, word) {
2193 RawValueCell value_cell = ValueCell::cast(thread->runtime()->buildClass());
2194 thread->stackPush(value_cell.value());
2195 return Continue::NEXT;
2196}
2197
2198HANDLER_INLINE Continue Interpreter::doYieldFrom(Thread* thread, word) {
2199 HandleScope scope(thread);
2200
2201 Object value(&scope, thread->stackPop());
2202 Object iterator(&scope, thread->stackTop());
2203 Object result(&scope, NoneType::object());
2204 if (iterator.isGenerator()) {
2205 result = generatorSend(thread, iterator, value);
2206 } else if (iterator.isCoroutine()) {
2207 result = coroutineSend(thread, iterator, value);
2208 } else if (!value.isNoneType()) {
2209 Object send_method(&scope, lookupMethod(thread, iterator, ID(send)));
2210 if (send_method.isError()) {
2211 if (send_method.isErrorException()) {
2212 thread->clearPendingException();
2213 } else {
2214 DCHECK(send_method.isErrorNotFound(),
2215 "expected Error::exception() or Error::notFound()");
2216 }
2217 thread->raiseWithFmt(LayoutId::kTypeError,
2218 "iter() returned non-iterator");
2219 return Continue::UNWIND;
2220 }
2221 result = callMethod2(thread, send_method, iterator, value);
2222 } else {
2223 Object next_method(&scope, lookupMethod(thread, iterator, ID(__next__)));
2224 if (next_method.isError()) {
2225 if (next_method.isErrorException()) {
2226 thread->clearPendingException();
2227 } else {
2228 DCHECK(next_method.isErrorNotFound(),
2229 "expected Error::exception() or Error::notFound()");
2230 }
2231 thread->raiseWithFmt(LayoutId::kTypeError,
2232 "iter() returned non-iterator");
2233 return Continue::UNWIND;
2234 }
2235 result = callMethod1(thread, next_method, iterator);
2236 }
2237 if (result.isErrorException()) {
2238 if (!thread->hasPendingStopIteration()) return Continue::UNWIND;
2239
2240 thread->stackSetTop(thread->pendingStopIterationValue());
2241 thread->clearPendingException();
2242 return Continue::NEXT;
2243 }
2244
2245 // Decrement PC: We want this to re-execute until the subiterator is
2246 // exhausted.
2247 Frame* frame = thread->currentFrame();
2248 frame->setVirtualPC(frame->virtualPC() - kCodeUnitSize);
2249 thread->stackPush(*result);
2250 return Continue::YIELD;
2251}
2252
2253RawObject Interpreter::awaitableIter(Thread* thread,
2254 const char* invalid_type_message) {
2255 HandleScope scope(thread);
2256 Object obj(&scope, thread->stackTop());
2257 if (obj.isCoroutine() || obj.isAsyncGenerator()) {
2258 return *obj;
2259 }
2260 if (obj.isGenerator()) {
2261 Generator generator(&scope, *obj);
2262 GeneratorFrame generator_frame(&scope, generator.generatorFrame());
2263 Function func(&scope, generator_frame.function());
2264 if (func.isIterableCoroutine()) {
2265 return *obj;
2266 }
2267 return thread->raiseWithFmt(LayoutId::kTypeError, invalid_type_message);
2268 }
2269 thread->stackPop();
2270 Object await(&scope, lookupMethod(thread, obj, ID(__await__)));
2271 if (await.isError()) {
2272 if (await.isErrorException()) {
2273 thread->clearPendingException();
2274 } else {
2275 DCHECK(await.isErrorNotFound(),
2276 "expected Error::exception() or Error::notFound()");
2277 }
2278 return thread->raiseWithFmt(LayoutId::kTypeError, invalid_type_message);
2279 }
2280 Object result(&scope, callMethod1(thread, await, obj));
2281 if (result.isError()) return *result;
2282 if (result.isGenerator()) {
2283 Generator gen(&scope, *result);
2284 GeneratorFrame gen_frame(&scope, gen.generatorFrame());
2285 Function gen_func(&scope, gen_frame.function());
2286 if (gen_func.isIterableCoroutine()) {
2287 return thread->raiseWithFmt(LayoutId::kTypeError,
2288 "__await__() returned a coroutine");
2289 }
2290 }
2291 if (result.isCoroutine()) {
2292 return thread->raiseWithFmt(LayoutId::kTypeError,
2293 "__await__() returned a coroutine");
2294 }
2295 // This check is lower priority than for coroutine above which will also fail
2296 // isIterator() and raise TypeError but with a different string.
2297 if (!thread->runtime()->isIterator(thread, result)) {
2298 return thread->raiseWithFmt(
2299 LayoutId::kTypeError, "__await__() returned non-iterator of type '%T'",
2300 &result);
2301 }
2302 thread->stackPush(*result);
2303 return *obj;
2304}
2305
2306HANDLER_INLINE Continue Interpreter::doGetAwaitable(Thread* thread, word) {
2307 // TODO(T67736679) Add inline caching for the lookupMethod() in awaitableIter.
2308 RawObject iter =
2309 awaitableIter(thread, "object can't be used in 'await' expression");
2310 if (iter.isError()) {
2311 return Continue::UNWIND;
2312 }
2313 if (iter.isCoroutine()) {
2314 if (!findYieldFrom(GeneratorBase::cast(iter)).isNoneType()) {
2315 thread->raiseWithFmt(LayoutId::kRuntimeError,
2316 "coroutine is being awaited already");
2317 return Continue::UNWIND;
2318 }
2319 }
2320 return Continue::NEXT;
2321}
2322
2323HANDLER_INLINE Continue Interpreter::doInplaceLshift(Thread* thread, word) {
2324 return doInplaceOperation(BinaryOp::LSHIFT, thread);
2325}
2326
2327HANDLER_INLINE Continue Interpreter::doInplaceRshift(Thread* thread, word) {
2328 return doInplaceOperation(BinaryOp::RSHIFT, thread);
2329}
2330
2331HANDLER_INLINE Continue Interpreter::doInplaceAnd(Thread* thread, word) {
2332 return doInplaceOperation(BinaryOp::AND, thread);
2333}
2334
2335HANDLER_INLINE Continue Interpreter::doInplaceXor(Thread* thread, word) {
2336 return doInplaceOperation(BinaryOp::XOR, thread);
2337}
2338
2339HANDLER_INLINE Continue Interpreter::doInplaceOr(Thread* thread, word) {
2340 return doInplaceOperation(BinaryOp::OR, thread);
2341}
2342
2343HANDLER_INLINE Continue Interpreter::doWithCleanupStart(Thread* thread, word) {
2344 HandleScope scope(thread);
2345 Frame* frame = thread->currentFrame();
2346 Object exc(&scope, thread->stackPop());
2347 Object value(&scope, NoneType::object());
2348 Object traceback(&scope, NoneType::object());
2349 Object exit(&scope, NoneType::object());
2350
2351 // The stack currently contains a sequence of values understood by
2352 // END_FINALLY, followed by __exit__ from the context manager. We need to
2353 // determine the location of __exit__ and remove it from the stack, shifting
2354 // everything above it down to compensate.
2355 if (exc.isNoneType()) {
2356 // The with block exited normally. __exit__ is just below the None.
2357 exit = thread->stackTop();
2358 thread->stackSetTop(NoneType::object());
2359 } else {
2360 DCHECK(thread->runtime()->isInstanceOfType(*exc) &&
2361 exc.rawCast<RawType>().isBaseExceptionSubclass(),
2362 "expected BaseException subclass");
2363 // The stack contains the caught exception, the previous exception state,
2364 // then __exit__. Grab __exit__ then shift everything else down.
2365 exit = thread->stackPeek(5);
2366 for (word i = 5; i > 0; i--) {
2367 thread->stackSetAt(i, thread->stackPeek(i - 1));
2368 }
2369
2370 // Put exc at the top of the stack and grab value/traceback from below it.
2371 thread->stackSetTop(*exc);
2372 value = thread->stackPeek(1);
2373 traceback = thread->stackPeek(2);
2374
2375 // We popped __exit__ out from under the depth recorded by the top
2376 // ExceptHandler block, so adjust it.
2377 TryBlock block = frame->blockStackPop();
2378 DCHECK(block.kind() == TryBlock::kExceptHandler,
2379 "Unexpected TryBlock Kind");
2380 frame->blockStackPush(
2381 TryBlock(block.kind(), block.handler(), block.level() - 1));
2382 }
2383
2384 // Push exc, to be consumed by WITH_CLEANUP_FINISH.
2385 thread->stackPush(*exc);
2386
2387 // Call exit(exc, value, traceback), leaving the result on the stack for
2388 // WITH_CLEANUP_FINISH.
2389 thread->stackPush(*exit);
2390 thread->stackPush(*exc);
2391 thread->stackPush(*value);
2392 thread->stackPush(*traceback);
2393 return tailcall(thread, 3);
2394}
2395
2396HANDLER_INLINE Continue Interpreter::doWithCleanupFinish(Thread* thread, word) {
2397 HandleScope scope(thread);
2398 Object result(&scope, thread->stackPop());
2399 Object exc(&scope, thread->stackPop());
2400 if (exc.isNoneType()) return Continue::NEXT;
2401
2402 Object is_true(&scope, isTrue(thread, *result));
2403 if (is_true.isErrorException()) return Continue::UNWIND;
2404 if (*is_true == Bool::trueObj()) {
2405 Frame* frame = thread->currentFrame();
2406 TryBlock block = frame->blockStackPop();
2407 DCHECK(block.kind() == TryBlock::kExceptHandler, "expected kExceptHandler");
2408 unwindExceptHandler(thread, block);
2409 thread->stackPush(NoneType::object());
2410 }
2411 return Continue::NEXT;
2412}
2413
2414HANDLER_INLINE Continue Interpreter::doReturnValue(Thread*, word) {
2415 return Continue::RETURN;
2416}
2417
2418HANDLER_INLINE Continue Interpreter::doSetupAnnotations(Thread* thread, word) {
2419 HandleScope scope(thread);
2420 Runtime* runtime = thread->runtime();
2421 Frame* frame = thread->currentFrame();
2422 Str dunder_annotations(&scope, runtime->symbols()->at(ID(__annotations__)));
2423 if (frame->implicitGlobals().isNoneType()) {
2424 // Module body
2425 Module module(&scope, frame->function().moduleObject());
2426 if (moduleAt(module, dunder_annotations).isErrorNotFound()) {
2427 Object annotations(&scope, runtime->newDict());
2428 moduleAtPut(thread, module, dunder_annotations, annotations);
2429 }
2430 } else {
2431 // Class body
2432 Object implicit_globals(&scope, frame->implicitGlobals());
2433 if (implicit_globals.isDict()) {
2434 Dict implicit_globals_dict(&scope, frame->implicitGlobals());
2435 word hash = strHash(thread, *dunder_annotations);
2436 Object include_result(&scope, dictIncludes(thread, implicit_globals_dict,
2437 dunder_annotations, hash));
2438 if (include_result.isErrorException()) {
2439 return Continue::UNWIND;
2440 }
2441 if (include_result == Bool::falseObj()) {
2442 Object annotations(&scope, runtime->newDict());
2443 if (dictAtPut(thread, implicit_globals_dict, dunder_annotations, hash,
2444 annotations)
2445 .isErrorException()) {
2446 return Continue::UNWIND;
2447 }
2448 }
2449 } else {
2450 if (objectGetItem(thread, implicit_globals, dunder_annotations)
2451 .isErrorException()) {
2452 if (!thread->pendingExceptionMatches(LayoutId::kKeyError)) {
2453 return Continue::UNWIND;
2454 }
2455 thread->clearPendingException();
2456 Object annotations(&scope, runtime->newDict());
2457 if (objectSetItem(thread, implicit_globals, dunder_annotations,
2458 annotations)
2459 .isErrorException()) {
2460 return Continue::UNWIND;
2461 }
2462 }
2463 }
2464 }
2465 return Continue::NEXT;
2466}
2467
2468HANDLER_INLINE Continue Interpreter::doYieldValue(Thread* thread, word) {
2469 Frame* frame = thread->currentFrame();
2470 // Wrap values directly yielded from asynchronous generator. This
2471 // distinguishes generator-like yields from async-like yields which propagate
2472 // from awaitables via `YIELD_FROM`.
2473 if (Code::cast(frame->code()).isAsyncGenerator()) {
2474 HandleScope scope(thread);
2475 Object value(&scope, thread->stackPop());
2476 Runtime* runtime = thread->runtime();
2477 Layout async_gen_wrapped_value_layout(
2478 &scope, runtime->layoutAt(LayoutId::kAsyncGeneratorWrappedValue));
2479 AsyncGeneratorWrappedValue wrapped_value(
2480 &scope, runtime->newInstance(async_gen_wrapped_value_layout));
2481 wrapped_value.setValue(*value);
2482 thread->stackPush(*wrapped_value);
2483 }
2484 return Continue::YIELD;
2485}
2486
2487static RawObject implicitGlobalsAtPut(Thread* thread, Frame* frame,
2488 const Object& implicit_globals_obj,
2489 const Str& name, const Object& value) {
2490 HandleScope scope(thread);
2491 if (implicit_globals_obj.isNoneType()) {
2492 Module module(&scope, frame->function().moduleObject());
2493 moduleAtPut(thread, module, name, value);
2494 return NoneType::object();
2495 }
2496 if (implicit_globals_obj.isDict()) {
2497 Dict implicit_globals(&scope, *implicit_globals_obj);
2498 dictAtPutByStr(thread, implicit_globals, name, value);
2499 } else {
2500 Object result(&scope,
2501 objectSetItem(thread, implicit_globals_obj, name, value));
2502 if (result.isErrorException()) return *result;
2503 }
2504 return NoneType::object();
2505}
2506
2507static RawObject callImportAllFrom(Thread* thread, Frame* frame,
2508 const Object& object) {
2509 HandleScope scope(thread);
2510 Object implicit_globals(&scope, frame->implicitGlobals());
2511 if (implicit_globals.isNoneType()) {
2512 Module module(&scope, frame->function().moduleObject());
2513 implicit_globals = module.moduleProxy();
2514 }
2515 return thread->invokeFunction2(ID(builtins), ID(_import_all_from),
2516 implicit_globals, object);
2517}
2518
2519RawObject Interpreter::importAllFrom(Thread* thread, Frame* frame,
2520 const Object& object) {
2521 // We have a short-cut if `object` is a module and `__all__` does not exist
2522 // or is a tuple or list; otherwise call `builtins._import_all_from`.
2523 if (!object.isModule()) {
2524 return callImportAllFrom(thread, frame, object);
2525 }
2526
2527 HandleScope scope(thread);
2528 Runtime* runtime = thread->runtime();
2529 bool skip_names_with_underscore_prefix = false;
2530 Module module(&scope, *object);
2531 Object dunder_all(&scope, runtime->symbols()->at(ID(__all__)));
2532 Object all_obj(&scope, moduleGetAttribute(thread, module, dunder_all));
2533 if (all_obj.isErrorException()) return *all_obj;
2534 if (all_obj.isErrorNotFound()) {
2535 all_obj = moduleKeys(thread, module);
2536 skip_names_with_underscore_prefix = true;
2537 }
2538 Tuple all(&scope, runtime->emptyTuple());
2539 word all_len;
2540 if (all_obj.isList()) {
2541 all = List::cast(*all_obj).items();
2542 all_len = List::cast(*all_obj).numItems();
2543 } else if (all_obj.isTuple()) {
2544 all = Tuple::cast(*all_obj);
2545 all_len = all.length();
2546 } else {
2547 return callImportAllFrom(thread, frame, object);
2548 }
2549
2550 Object implicit_globals(&scope, frame->implicitGlobals());
2551 Object name(&scope, NoneType::object());
2552 Str interned(&scope, Str::empty());
2553 Object value(&scope, NoneType::object());
2554 for (word i = 0; i < all_len; i++) {
2555 name = all.at(i);
2556 interned = attributeName(thread, name);
2557 if (interned.isErrorException()) return *interned;
2558 if (skip_names_with_underscore_prefix && interned.length() > 0 &&
2559 interned.byteAt(0) == '_') {
2560 continue;
2561 }
2562 value = moduleGetAttribute(thread, module, interned);
2563 if (value.isErrorNotFound()) {
2564 return moduleRaiseAttributeError(thread, module, interned);
2565 }
2566 if (value.isErrorException()) return *value;
2567 value =
2568 implicitGlobalsAtPut(thread, frame, implicit_globals, interned, value);
2569 if (value.isErrorException()) return *value;
2570 }
2571 return NoneType::object();
2572}
2573
2574HANDLER_INLINE Continue Interpreter::doImportStar(Thread* thread, word) {
2575 HandleScope scope(thread);
2576 Frame* frame = thread->currentFrame();
2577
2578 // Pre-python3 this used to merge the locals with the locals dict. However,
2579 // that's not necessary anymore. You can't import * inside a function
2580 // body anymore.
2581
2582 Object object(&scope, thread->stackPop());
2583 if (importAllFrom(thread, frame, object).isErrorException()) {
2584 return Continue::UNWIND;
2585 }
2586 return Continue::NEXT;
2587}
2588
2589HANDLER_INLINE Continue Interpreter::doPopBlock(Thread* thread, word) {
2590 Frame* frame = thread->currentFrame();
2591 frame->blockStackPop();
2592 return Continue::NEXT;
2593}
2594
2595HANDLER_INLINE Continue Interpreter::doEndAsyncFor(Thread* thread, word arg) {
2596 Frame* frame = thread->currentFrame();
2597 Runtime* runtime = thread->runtime();
2598 RawObject exc = thread->stackPop();
2599 DCHECK(runtime->isInstanceOfType(exc) &&
2600 exc.rawCast<RawType>().isBaseExceptionSubclass(),
2601 "Expected BaseException subclass");
2602 // Check if TOS is StopIteration type or a subclass of it.
2603 if (typeIsSubclass(exc, runtime->typeAt(LayoutId::kStopAsyncIteration))) {
2604 TryBlock block = frame->blockStackPop();
2605 unwindExceptHandler(thread, block);
2606 thread->stackPop();
2607 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
2608 return Continue::NEXT;
2609 }
2610
2611 thread->setPendingExceptionType(exc);
2612 thread->setPendingExceptionValue(thread->stackPop());
2613 thread->setPendingExceptionTraceback(thread->stackPop());
2614 return Continue::UNWIND;
2615}
2616
2617HANDLER_INLINE Continue Interpreter::doEndFinally(Thread* thread, word) {
2618 RawObject top = thread->stackPop();
2619 if (top.isNoneType()) {
2620 return Continue::NEXT;
2621 }
2622 if (top.isSmallInt()) {
2623 word value = SmallInt::cast(top).value();
2624 if (value == -1 && thread->hasPendingException()) {
2625 return Continue::UNWIND;
2626 }
2627 Frame* frame = thread->currentFrame();
2628 frame->setVirtualPC(value);
2629 return Continue::NEXT;
2630 }
2631 DCHECK(thread->runtime()->isInstanceOfType(top) &&
2632 top.rawCast<RawType>().isBaseExceptionSubclass(),
2633 "expected None, SmallInt or BaseException subclass");
2634 thread->setPendingExceptionType(top);
2635 thread->setPendingExceptionValue(thread->stackPop());
2636 thread->setPendingExceptionTraceback(thread->stackPop());
2637 return Continue::UNWIND;
2638}
2639
2640HANDLER_INLINE Continue Interpreter::doPopExcept(Thread* thread, word) {
2641 Frame* frame = thread->currentFrame();
2642
2643 TryBlock block = frame->blockStackPop();
2644 DCHECK(block.kind() == TryBlock::kExceptHandler,
2645 "popped block is not an except handler");
2646 word level = block.level();
2647 word current_level = thread->valueStackSize();
2648 // The only things left on the stack at this point should be the exc_type,
2649 // exc_value, exc_traceback values and potentially a result value.
2650 DCHECK(current_level == level + 3 || current_level == level + 4,
2651 "unexpected level");
2652 thread->setCaughtExceptionType(thread->stackPop());
2653 thread->setCaughtExceptionValue(thread->stackPop());
2654 thread->setCaughtExceptionTraceback(thread->stackPop());
2655
2656 return Continue::NEXT;
2657}
2658
2659HANDLER_INLINE Continue Interpreter::doPopFinally(Thread* thread, word arg) {
2660 HandleScope scope(thread);
2661 Object res(&scope, NoneType::object());
2662 if (arg != 0) {
2663 res = thread->stackPop();
2664 }
2665 Object exc(&scope, thread->stackPop());
2666 if (exc.isNoneType() || exc.isInt()) {
2667 } else {
2668 thread->stackPop();
2669 thread->stackPop();
2670 Frame* frame = thread->currentFrame();
2671 TryBlock block = frame->blockStackPop();
2672 if (block.kind() != TryBlock::Kind::kExceptHandler) {
2673 thread->raiseWithFmt(LayoutId::kSystemError,
2674 "popped block is not an except handler");
2675 return Continue::UNWIND;
2676 }
2677 thread->setCaughtExceptionType(thread->stackPop());
2678 thread->setCaughtExceptionValue(thread->stackPop());
2679 thread->setCaughtExceptionTraceback(thread->stackPop());
2680 }
2681 if (arg != 0) {
2682 thread->stackPush(*res);
2683 }
2684 return Continue::NEXT;
2685}
2686
2687HANDLER_INLINE Continue Interpreter::doCallFinally(Thread* thread, word arg) {
2688 Frame* frame = thread->currentFrame();
2689 word next_pc = frame->virtualPC();
2690 thread->stackPush(SmallInt::fromWord(next_pc));
2691 frame->setVirtualPC(next_pc + arg * kCodeUnitScale);
2692 return Continue::NEXT;
2693}
2694
2695HANDLER_INLINE Continue Interpreter::doStoreName(Thread* thread, word arg) {
2696 Frame* frame = thread->currentFrame();
2697 HandleScope scope(thread);
2698 RawObject names = Code::cast(frame->code()).names();
2699 Str name(&scope, Tuple::cast(names).at(arg));
2700 Object value(&scope, thread->stackPop());
2701 Object implicit_globals(&scope, frame->implicitGlobals());
2702 if (implicitGlobalsAtPut(thread, frame, implicit_globals, name, value)
2703 .isErrorException()) {
2704 return Continue::UNWIND;
2705 }
2706 return Continue::NEXT;
2707}
2708
2709static Continue raiseUndefinedName(Thread* thread, const Object& name) {
2710 thread->raiseWithFmt(LayoutId::kNameError, "name '%S' is not defined", &name);
2711 return Continue::UNWIND;
2712}
2713
2714HANDLER_INLINE Continue Interpreter::doDeleteName(Thread* thread, word arg) {
2715 Frame* frame = thread->currentFrame();
2716 HandleScope scope(thread);
2717 Object implicit_globals_obj(&scope, frame->implicitGlobals());
2718 // Forward to doDeleteGlobal() when implicit globals and globals are the same.
2719 // This avoids duplicating all the cache invalidation logic here.
2720 // TODO(T47581831) This should be removed and invalidation should happen when
2721 // changing the globals dictionary.
2722 if (implicit_globals_obj.isNoneType()) {
2723 return doDeleteGlobal(thread, arg);
2724 }
2725 RawObject names = Code::cast(frame->code()).names();
2726 Str name(&scope, Tuple::cast(names).at(arg));
2727 if (implicit_globals_obj.isDict()) {
2728 Dict implicit_globals(&scope, *implicit_globals_obj);
2729 if (dictRemoveByStr(thread, implicit_globals, name).isErrorNotFound()) {
2730 return raiseUndefinedName(thread, name);
2731 }
2732 } else {
2733 if (objectDelItem(thread, implicit_globals_obj, name).isErrorException()) {
2734 thread->clearPendingException();
2735 return raiseUndefinedName(thread, name);
2736 }
2737 }
2738 return Continue::NEXT;
2739}
2740
2741static NEVER_INLINE RawObject unpackSequenceIterable(Thread* thread,
2742 word length,
2743 RawObject iterable_raw) {
2744 HandleScope scope(thread);
2745 Object iterable(&scope, iterable_raw);
2746 Object iterator(&scope, Interpreter::createIterator(thread, iterable));
2747 if (iterator.isErrorException()) {
2748 Runtime* runtime = thread->runtime();
2749 if (thread->pendingExceptionMatches(LayoutId::kTypeError) &&
2750 typeLookupInMroById(thread, runtime->typeOf(*iterable), ID(__iter__))
2751 .isErrorNotFound() &&
2752 !runtime->isSequence(thread, iterable)) {
2753 thread->clearPendingException();
2754 return thread->raiseWithFmt(LayoutId::kTypeError,
2755 "cannot unpack non-iterable %T object",
2756 &iterable);
2757 }
2758 return *iterator;
2759 }
2760
2761 Object next_method(&scope,
2762 Interpreter::lookupMethod(thread, iterator, ID(__next__)));
2763 if (next_method.isError()) {
2764 if (next_method.isErrorException()) {
2765 thread->clearPendingException();
2766 } else {
2767 DCHECK(next_method.isErrorNotFound(),
2768 "expected Error::exception() or Error::notFound()");
2769 }
2770 return thread->raiseWithFmt(LayoutId::kTypeError,
2771 "iter() returned non-iterator");
2772 }
2773 word num_pushed = 0;
2774 Object value(&scope, RawNoneType::object());
2775 for (;;) {
2776 value = Interpreter::callMethod1(thread, next_method, iterator);
2777 if (value.isErrorException()) {
2778 if (thread->clearPendingStopIteration()) {
2779 if (num_pushed == length) break;
2780 return thread->raiseWithFmt(LayoutId::kValueError,
2781 "not enough values to unpack");
2782 }
2783 return *value;
2784 }
2785 if (num_pushed == length) {
2786 return thread->raiseWithFmt(LayoutId::kValueError,
2787 "too many values to unpack");
2788 }
2789 thread->stackPush(*value);
2790 ++num_pushed;
2791 }
2792
2793 // swap values on the stack
2794 Object tmp(&scope, NoneType::object());
2795 for (word i = 0, j = num_pushed - 1, half = num_pushed / 2; i < half;
2796 ++i, --j) {
2797 tmp = thread->stackPeek(i);
2798 thread->stackSetAt(i, thread->stackPeek(j));
2799 thread->stackSetAt(j, *tmp);
2800 }
2801 return NoneType::object();
2802}
2803
2804HANDLER_INLINE USED RawObject Interpreter::unpackSequence(Thread* thread,
2805 word length,
2806 RawObject iterable) {
2807 word count;
2808 if (iterable.isTuple()) {
2809 count = Tuple::cast(iterable).length();
2810 } else if (iterable.isList()) {
2811 count = List::cast(iterable).numItems();
2812 iterable = List::cast(iterable).items();
2813 } else if (thread->runtime()->typeOf(iterable).hasFlag(
2814 Type::Flag::kIsStructseq)) {
2815 iterable = Tuple::cast(iterable.rawCast<RawUserTupleBase>().value());
2816 count = Tuple::cast(iterable).length();
2817 } else {
2818 return unpackSequenceIterable(thread, length, iterable);
2819 }
2820 if (count != length) {
2821 return thread->raiseWithFmt(LayoutId::kValueError,
2822 count < length ? "not enough values to unpack"
2823 : "too many values to unpack");
2824 }
2825 for (word i = count - 1; i >= 0; i--) {
2826 thread->stackPush(Tuple::cast(iterable).at(i));
2827 }
2828 return NoneType::object();
2829}
2830
2831HANDLER_INLINE Continue Interpreter::doUnpackSequence(Thread* thread,
2832 word arg) {
2833 RawObject iterable = thread->stackPop();
2834 if (unpackSequence(thread, arg, iterable).isErrorException()) {
2835 return Continue::UNWIND;
2836 }
2837 return Continue::NEXT;
2838}
2839
2840HANDLER_INLINE Continue Interpreter::doForIter(Thread* thread, word arg) {
2841 return forIterUpdateCache(thread, arg, -1);
2842}
2843
2844Continue Interpreter::forIterUpdateCache(Thread* thread, word arg, word cache) {
2845 Frame* frame = thread->currentFrame();
2846 HandleScope scope(thread);
2847 Function function(&scope, frame->function());
2848 if (function.isCompiled()) {
2849 return Continue::DEOPT;
2850 }
2851 Object iter(&scope, thread->stackTop());
2852 Type type(&scope, thread->runtime()->typeOf(*iter));
2853 Object next(&scope, typeLookupInMroById(thread, *type, ID(__next__)));
2854 if (next.isErrorNotFound()) {
2855 thread->raiseWithFmt(LayoutId::kTypeError, "iter() returned non-iterator");
2856 return Continue::UNWIND;
2857 }
2858
2859 Object result(&scope, NoneType::object());
2860 if (next.isFunction()) {
2861 if (cache >= 0) {
2862 MutableTuple caches(&scope, frame->caches());
2863 Str next_name(&scope, thread->runtime()->symbols()->at(ID(__next__)));
2864 Function dependent(&scope, frame->function());
2865 ICState next_cache_state = icUpdateAttr(
2866 thread, caches, cache, iter.layoutId(), next, next_name, dependent);
2867 rewriteCurrentBytecode(frame, next_cache_state == ICState::kMonomorphic
2868 ? FOR_ITER_MONOMORPHIC
2869 : FOR_ITER_POLYMORPHIC);
2870 }
2871 result = Interpreter::callMethod1(thread, next, iter);
2872 } else {
2873 next = resolveDescriptorGet(thread, next, iter, type);
2874 if (next.isErrorException()) return Continue::UNWIND;
2875 result = call0(thread, next);
2876 }
2877
2878 if (result.isErrorException()) {
2879 if (thread->clearPendingStopIteration()) {
2880 thread->stackPop();
2881 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
2882 return Continue::NEXT;
2883 }
2884 return Continue::UNWIND;
2885 }
2886 thread->stackPush(*result);
2887 return Continue::NEXT;
2888}
2889
2890static RawObject builtinsAt(Thread* thread, const Module& module,
2891 const Object& name) {
2892 HandleScope scope(thread);
2893 Object builtins(&scope, moduleAtById(thread, module, ID(__builtins__)));
2894 Module builtins_module(&scope, *module);
2895 if (builtins.isModuleProxy()) {
2896 builtins_module = ModuleProxy::cast(*builtins).module();
2897 } else if (builtins.isModule()) {
2898 builtins_module = *builtins;
2899 } else if (builtins.isErrorNotFound()) {
2900 return Error::notFound();
2901 } else {
2902 return objectGetItem(thread, builtins, name);
2903 }
2904 return moduleAt(builtins_module, name);
2905}
2906
2907static RawObject globalsAt(Thread* thread, const Module& module,
2908 const Object& name) {
2909 RawObject result = moduleValueCellAt(thread, module, name);
2910 if (!result.isErrorNotFound() && !ValueCell::cast(result).isPlaceholder()) {
2911 return ValueCell::cast(result).value();
2912 }
2913 return builtinsAt(thread, module, name);
2914}
2915
2916ALWAYS_INLINE Continue Interpreter::forIter(Thread* thread,
2917 RawObject next_method, word arg) {
2918 DCHECK(next_method.isFunction(), "Unexpected next_method value");
2919 Frame* frame = thread->currentFrame();
2920 RawObject iter = thread->stackTop();
2921 thread->stackPush(next_method);
2922 thread->stackPush(iter);
2923 RawObject result = callFunction(thread, /*nargs=*/1, next_method);
2924 if (result.isErrorException()) {
2925 if (thread->clearPendingStopIteration()) {
2926 thread->stackPop();
2927 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
2928 return Continue::NEXT;
2929 }
2930 return Continue::UNWIND;
2931 }
2932 thread->stackPush(result);
2933 return Continue::NEXT;
2934}
2935
2936static Continue retryForIterAnamorphic(Thread* thread, word arg) {
2937 // Revert the opcode, and retry FOR_ITER_CACHED.
2938 Frame* frame = thread->currentFrame();
2939 if (frame->function().isCompiled()) {
2940 return Continue::DEOPT;
2941 }
2942 rewriteCurrentBytecode(frame, FOR_ITER_ANAMORPHIC);
2943 return Interpreter::doForIterAnamorphic(thread, arg);
2944}
2945
2946HANDLER_INLINE Continue Interpreter::doForIterList(Thread* thread, word arg) {
2947 Frame* frame = thread->currentFrame();
2948 RawObject iter_obj = thread->stackTop();
2949 if (!iter_obj.isListIterator()) {
2950 EVENT_CACHE(FOR_ITER_LIST);
2951 return retryForIterAnamorphic(thread, arg);
2952 }
2953 // NOTE: This should be synced with listIteratorNext in list-builtins.cpp.
2954 RawListIterator iter = ListIterator::cast(iter_obj);
2955 word idx = iter.index();
2956 RawList underlying = iter.iterable().rawCast<RawList>();
2957 if (idx >= underlying.numItems()) {
2958 thread->stackPop();
2959 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
2960 } else {
2961 thread->stackPush(underlying.at(idx));
2962 iter.setIndex(idx + 1);
2963 }
2964 return Continue::NEXT;
2965}
2966
2967HANDLER_INLINE Continue Interpreter::doForIterDict(Thread* thread, word arg) {
2968 Frame* frame = thread->currentFrame();
2969 RawObject iter_obj = thread->stackTop();
2970 if (!iter_obj.isDictKeyIterator()) {
2971 EVENT_CACHE(FOR_ITER_DICT);
2972 return retryForIterAnamorphic(thread, arg);
2973 }
2974 // NOTE: This should be synced with dictKeyIteratorNext in dict-builtins.cpp.
2975 HandleScope scope(thread);
2976 DictKeyIterator iter(&scope, DictKeyIterator::cast(iter_obj));
2977 Dict dict(&scope, iter.iterable());
2978 word i = iter.index();
2979 Object key(&scope, NoneType::object());
2980 if (dictNextKey(dict, &i, &key)) {
2981 // At this point, we have found a valid index in the buckets.
2982 iter.setIndex(i);
2983 iter.setNumFound(iter.numFound() + 1);
2984 thread->stackPush(*key);
2985 } else {
2986 // We hit the end.
2987 iter.setIndex(i);
2988 thread->stackPop();
2989 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
2990 }
2991 return Continue::NEXT;
2992}
2993
2994HANDLER_INLINE Continue Interpreter::doForIterGenerator(Thread* thread,
2995 word arg) {
2996 Frame* frame = thread->currentFrame();
2997 RawObject iter_obj = thread->stackTop();
2998 if (!iter_obj.isGenerator()) {
2999 EVENT_CACHE(FOR_ITER_GENERATOR);
3000 return retryForIterAnamorphic(thread, arg);
3001 }
3002 HandleScope scope(thread);
3003 Generator gen(&scope, iter_obj);
3004 Object value(&scope, NoneType::object());
3005 Object result(&scope, resumeGenerator(thread, gen, value));
3006 if (result.isErrorException()) {
3007 if (thread->clearPendingStopIteration()) {
3008 thread->stackPop();
3009 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
3010 return Continue::NEXT;
3011 }
3012 return Continue::UNWIND;
3013 }
3014 thread->stackPush(*result);
3015 return Continue::NEXT;
3016}
3017
3018HANDLER_INLINE Continue Interpreter::doForIterTuple(Thread* thread, word arg) {
3019 Frame* frame = thread->currentFrame();
3020 RawObject iter_obj = thread->stackTop();
3021 if (!iter_obj.isTupleIterator()) {
3022 EVENT_CACHE(FOR_ITER_TUPLE);
3023 return retryForIterAnamorphic(thread, arg);
3024 }
3025 // NOTE: This should be synced with tupleIteratorNext in tuple-builtins.cpp.
3026 RawTupleIterator iter = TupleIterator::cast(iter_obj);
3027 word idx = iter.index();
3028 if (idx == iter.length()) {
3029 thread->stackPop();
3030 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
3031 } else {
3032 RawTuple underlying = iter.iterable().rawCast<RawTuple>();
3033 RawObject item = underlying.at(idx);
3034 iter.setIndex(idx + 1);
3035 thread->stackPush(item);
3036 }
3037 return Continue::NEXT;
3038}
3039
3040HANDLER_INLINE Continue Interpreter::doForIterRange(Thread* thread, word arg) {
3041 Frame* frame = thread->currentFrame();
3042 RawObject iter_obj = thread->stackTop();
3043 if (!iter_obj.isRangeIterator()) {
3044 EVENT_CACHE(FOR_ITER_RANGE);
3045 return retryForIterAnamorphic(thread, arg);
3046 }
3047 // NOTE: This should be synced with rangeIteratorNext in range-builtins.cpp.
3048 RawRangeIterator iter = RangeIterator::cast(iter_obj);
3049 word length = iter.length();
3050 if (length == 0) {
3051 thread->stackPop();
3052 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
3053 } else {
3054 iter.setLength(length - 1);
3055 word next = iter.next();
3056 if (length > 1) {
3057 word step = iter.step();
3058 iter.setNext(next + step);
3059 }
3060 thread->stackPush(SmallInt::fromWord(next));
3061 }
3062 return Continue::NEXT;
3063}
3064
3065HANDLER_INLINE Continue Interpreter::doForIterStr(Thread* thread, word arg) {
3066 Frame* frame = thread->currentFrame();
3067 RawObject iter_obj = thread->stackTop();
3068 if (!iter_obj.isStrIterator()) {
3069 EVENT_CACHE(FOR_ITER_STR);
3070 return retryForIterAnamorphic(thread, arg);
3071 }
3072 // NOTE: This should be synced with strIteratorNext in str-builtins.cpp.
3073 RawStrIterator iter = StrIterator::cast(iter_obj);
3074 word byte_offset = iter.index();
3075 RawStr underlying = iter.iterable().rawCast<RawStr>();
3076 if (byte_offset == underlying.length()) {
3077 thread->stackPop();
3078 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
3079 } else {
3080 word num_bytes = 0;
3081 word code_point = underlying.codePointAt(byte_offset, &num_bytes);
3082 iter.setIndex(byte_offset + num_bytes);
3083 thread->stackPush(RawSmallStr::fromCodePoint(code_point));
3084 }
3085 return Continue::NEXT;
3086}
3087
3088HANDLER_INLINE Continue Interpreter::doForIterMonomorphic(Thread* thread,
3089 word arg) {
3090 Frame* frame = thread->currentFrame();
3091 word cache = currentCacheIndex(frame);
3092 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3093 LayoutId iter_layout_id = thread->stackTop().layoutId();
3094 bool is_found;
3095 RawObject cached =
3096 icLookupMonomorphic(caches, cache, iter_layout_id, &is_found);
3097 if (!is_found) {
3098 EVENT_CACHE(FOR_ITER_MONOMORPHIC);
3099 return forIterUpdateCache(thread, arg, cache);
3100 }
3101 return forIter(thread, cached, arg);
3102}
3103
3104HANDLER_INLINE Continue Interpreter::doForIterPolymorphic(Thread* thread,
3105 word arg) {
3106 Frame* frame = thread->currentFrame();
3107 word cache = currentCacheIndex(frame);
3108 RawObject iter = thread->stackTop();
3109 LayoutId iter_layout_id = iter.layoutId();
3110 bool is_found;
3111 RawObject cached = icLookupPolymorphic(MutableTuple::cast(frame->caches()),
3112 cache, iter_layout_id, &is_found);
3113 if (!is_found) {
3114 EVENT_CACHE(FOR_ITER_POLYMORPHIC);
3115 return forIterUpdateCache(thread, arg, cache);
3116 }
3117 return forIter(thread, cached, arg);
3118}
3119
3120HANDLER_INLINE Continue Interpreter::doForIterAnamorphic(Thread* thread,
3121 word arg) {
3122 Frame* frame = thread->currentFrame();
3123 RawObject iter = thread->stackTop();
3124 LayoutId iter_layout_id = iter.layoutId();
3125 switch (iter_layout_id) {
3126 case LayoutId::kListIterator:
3127 rewriteCurrentBytecode(frame, FOR_ITER_LIST);
3128 return doForIterList(thread, arg);
3129 case LayoutId::kDictKeyIterator:
3130 rewriteCurrentBytecode(frame, FOR_ITER_DICT);
3131 return doForIterDict(thread, arg);
3132 case LayoutId::kTupleIterator:
3133 rewriteCurrentBytecode(frame, FOR_ITER_TUPLE);
3134 return doForIterTuple(thread, arg);
3135 case LayoutId::kRangeIterator:
3136 rewriteCurrentBytecode(frame, FOR_ITER_RANGE);
3137 return doForIterRange(thread, arg);
3138 case LayoutId::kStrIterator:
3139 rewriteCurrentBytecode(frame, FOR_ITER_STR);
3140 return doForIterStr(thread, arg);
3141 case LayoutId::kGenerator:
3142 rewriteCurrentBytecode(frame, FOR_ITER_GENERATOR);
3143 return doForIterGenerator(thread, arg);
3144 default:
3145 break;
3146 }
3147 word cache = currentCacheIndex(frame);
3148 return forIterUpdateCache(thread, arg, cache);
3149}
3150
3151HANDLER_INLINE Continue Interpreter::doUnpackEx(Thread* thread, word arg) {
3152 Runtime* runtime = thread->runtime();
3153 HandleScope scope(thread);
3154 Object iterable(&scope, thread->stackPop());
3155 Object iterator(&scope, createIterator(thread, iterable));
3156 if (iterator.isErrorException()) return Continue::UNWIND;
3157
3158 Object next_method(&scope, lookupMethod(thread, iterator, ID(__next__)));
3159 if (next_method.isError()) {
3160 if (next_method.isErrorException()) {
3161 thread->clearPendingException();
3162 } else {
3163 DCHECK(next_method.isErrorNotFound(),
3164 "expected Error::exception() or Error::notFound()");
3165 }
3166 thread->raiseWithFmt(LayoutId::kTypeError, "iter() returned non-iterator");
3167 return Continue::UNWIND;
3168 }
3169
3170 word before = arg & kMaxByte;
3171 word after = (arg >> kBitsPerByte) & kMaxByte;
3172 word num_pushed = 0;
3173 Object value(&scope, RawNoneType::object());
3174 for (; num_pushed < before; ++num_pushed) {
3175 value = callMethod1(thread, next_method, iterator);
3176 if (value.isErrorException()) {
3177 if (thread->clearPendingStopIteration()) break;
3178 return Continue::UNWIND;
3179 }
3180 thread->stackPush(*value);
3181 }
3182
3183 if (num_pushed < before) {
3184 thread->raiseWithFmt(LayoutId::kValueError, "not enough values to unpack");
3185 return Continue::UNWIND;
3186 }
3187
3188 List list(&scope, runtime->newList());
3189 for (;;) {
3190 value = callMethod1(thread, next_method, iterator);
3191 if (value.isErrorException()) {
3192 if (thread->clearPendingStopIteration()) break;
3193 return Continue::UNWIND;
3194 }
3195 runtime->listAdd(thread, list, value);
3196 }
3197
3198 thread->stackPush(*list);
3199 num_pushed++;
3200
3201 if (list.numItems() < after) {
3202 thread->raiseWithFmt(LayoutId::kValueError, "not enough values to unpack");
3203 return Continue::UNWIND;
3204 }
3205
3206 if (after > 0) {
3207 // Pop elements off the list and set them on the stack
3208 for (word i = list.numItems() - after, j = list.numItems(); i < j;
3209 ++i, ++num_pushed) {
3210 thread->stackPush(list.at(i));
3211 list.atPut(i, NoneType::object());
3212 }
3213 list.setNumItems(list.numItems() - after);
3214 }
3215
3216 // swap values on the stack
3217 Object tmp(&scope, NoneType::object());
3218 for (word i = 0, j = num_pushed - 1, half = num_pushed / 2; i < half;
3219 ++i, --j) {
3220 tmp = thread->stackPeek(i);
3221 thread->stackSetAt(i, thread->stackPeek(j));
3222 thread->stackSetAt(j, *tmp);
3223 }
3224 return Continue::NEXT;
3225}
3226
3227void Interpreter::storeAttrWithLocation(Thread* thread, RawObject receiver,
3228 RawObject location, RawObject value) {
3229 word offset = SmallInt::cast(location).value();
3230 RawInstance instance = Instance::cast(receiver);
3231 if (offset >= 0) {
3232 instance.instanceVariableAtPut(offset, value);
3233 return;
3234 }
3235
3236 RawLayout layout = Layout::cast(thread->runtime()->layoutOf(receiver));
3237 RawTuple overflow =
3238 Tuple::cast(instance.instanceVariableAt(layout.overflowOffset()));
3239 overflow.atPut(-offset - 1, value);
3240}
3241
3242RawObject Interpreter::storeAttrSetLocation(Thread* thread,
3243 const Object& object,
3244 const Object& name,
3245 const Object& value,
3246 Object* location_out) {
3247 Runtime* runtime = thread->runtime();
3248 HandleScope scope(thread);
3249 Type type(&scope, runtime->typeOf(*object));
3250 Object dunder_setattr(&scope,
3251 typeLookupInMroById(thread, *type, ID(__setattr__)));
3252 if (dunder_setattr == runtime->objectDunderSetattr()) {
3253 return objectSetAttrSetLocation(thread, object, name, value, location_out);
3254 }
3255 Object result(&scope,
3256 thread->invokeMethod3(object, ID(__setattr__), name, value));
3257 return *result;
3258}
3259
3260Continue Interpreter::storeAttrUpdateCache(Thread* thread, word arg,
3261 word cache) {
3262 Frame* frame = thread->currentFrame();
3263 HandleScope scope(thread);
3264 Function function(&scope, frame->function());
3265 if (function.isCompiled()) {
3266 return Continue::DEOPT;
3267 }
3268 Object receiver(&scope, thread->stackPop());
3269 Str name(&scope, Tuple::cast(Code::cast(frame->code()).names()).at(arg));
3270 Object value(&scope, thread->stackPop());
3271
3272 Object location(&scope, NoneType::object());
3273 LayoutId saved_layout_id = receiver.layoutId();
3274 Object result(&scope,
3275 storeAttrSetLocation(thread, receiver, name, value, &location));
3276 if (result.isErrorException()) return Continue::UNWIND;
3277 if (location.isNoneType()) return Continue::NEXT;
3278 DCHECK(location.isSmallInt(), "unexpected location");
3279 bool is_in_object = SmallInt::cast(*location).value() >= 0;
3280
3281 MutableTuple caches(&scope, frame->caches());
3282 ICState ic_state = icCurrentState(*caches, cache);
3283 Function dependent(&scope, frame->function());
3284 LayoutId receiver_layout_id = receiver.layoutId();
3285 // TODO(T59400994): Clean up when storeAttrSetLocation can return a
3286 // StoreAttrKind.
3287 if (ic_state == ICState::kAnamorphic) {
3288 if (saved_layout_id == receiver_layout_id) {
3289 // No layout transition.
3290 if (is_in_object) {
3291 rewriteCurrentBytecode(frame, STORE_ATTR_INSTANCE);
3292 icUpdateAttr(thread, caches, cache, saved_layout_id, location, name,
3293 dependent);
3294 } else {
3295 rewriteCurrentBytecode(frame, STORE_ATTR_INSTANCE_OVERFLOW);
3296 icUpdateAttr(thread, caches, cache, saved_layout_id, location, name,
3297 dependent);
3298 }
3299 } else {
3300 // Layout transition.
3301 word offset = SmallInt::cast(*location).value();
3302 if (offset < 0) offset = -offset - 1;
3303 DCHECK(offset < (1 << Header::kLayoutIdBits), "offset doesn't fit");
3304 word new_layout_id = static_cast<word>(receiver_layout_id);
3305 SmallInt layout_offset(
3306 &scope,
3307 SmallInt::fromWord(offset << Header::kLayoutIdBits | new_layout_id));
3308 if (is_in_object) {
3309 rewriteCurrentBytecode(frame, STORE_ATTR_INSTANCE_UPDATE);
3310 icUpdateAttr(thread, caches, cache, saved_layout_id, layout_offset,
3311 name, dependent);
3312 } else {
3313 rewriteCurrentBytecode(frame, STORE_ATTR_INSTANCE_OVERFLOW_UPDATE);
3314 icUpdateAttr(thread, caches, cache, saved_layout_id, layout_offset,
3315 name, dependent);
3316 }
3317 }
3318 } else {
3319 DCHECK(currentBytecode(thread) == STORE_ATTR_INSTANCE ||
3320 currentBytecode(thread) == STORE_ATTR_INSTANCE_OVERFLOW ||
3321 currentBytecode(thread) == STORE_ATTR_POLYMORPHIC,
3322 "unexpected opcode");
3323 if (saved_layout_id == receiver_layout_id) {
3324 rewriteCurrentBytecode(frame, STORE_ATTR_POLYMORPHIC);
3325 icUpdateAttr(thread, caches, cache, saved_layout_id, location, name,
3326 dependent);
3327 }
3328 }
3329 return Continue::NEXT;
3330}
3331
3332HANDLER_INLINE Continue Interpreter::doStoreAttrAnamorphic(Thread* thread,
3333 word arg) {
3334 word cache = currentCacheIndex(thread->currentFrame());
3335 return storeAttrUpdateCache(thread, arg, cache);
3336}
3337
3338// This code cleans-up a monomorphic cache and prepares it for its potential
3339// use as a polymorphic cache. This code should be removed when we change the
3340// structure of our caches directly accessible from a function to be monomophic
3341// and to allocate the relatively uncommon polymorphic caches in a separate
3342// object.
3343static Continue retryStoreAttrCached(Thread* thread, word arg, word cache) {
3344 // Revert the opcode, clear the cache, and retry the attribute lookup.
3345 Frame* frame = thread->currentFrame();
3346 if (frame->function().isCompiled()) {
3347 return Continue::DEOPT;
3348 }
3349 rewriteCurrentBytecode(frame, STORE_ATTR_ANAMORPHIC);
3350 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3351 word index = cache * kIcPointersPerEntry;
3352 caches.atPut(index + kIcEntryKeyOffset, NoneType::object());
3353 caches.atPut(index + kIcEntryValueOffset, NoneType::object());
3354 return Interpreter::doStoreAttrAnamorphic(thread, arg);
3355}
3356
3357HANDLER_INLINE Continue Interpreter::doStoreAttrInstance(Thread* thread,
3358 word arg) {
3359 Frame* frame = thread->currentFrame();
3360 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3361 RawObject receiver = thread->stackTop();
3362 word cache = currentCacheIndex(frame);
3363 bool is_found;
3364 RawObject cached =
3365 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3366 if (!is_found) {
3367 EVENT_CACHE(STORE_ATTR_INSTANCE);
3368 return storeAttrUpdateCache(thread, arg, cache);
3369 }
3370 word offset = SmallInt::cast(cached).value();
3371 DCHECK(offset >= 0, "unexpected offset");
3372 RawInstance instance = Instance::cast(receiver);
3373 instance.instanceVariableAtPut(offset, thread->stackPeek(1));
3374 thread->stackDrop(2);
3375 return Continue::NEXT;
3376}
3377
3378HANDLER_INLINE Continue Interpreter::doStoreAttrInstanceOverflow(Thread* thread,
3379 word arg) {
3380 Frame* frame = thread->currentFrame();
3381 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3382 RawObject receiver = thread->stackTop();
3383 word cache = currentCacheIndex(frame);
3384 bool is_found;
3385 RawObject cached =
3386 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3387 if (!is_found) {
3388 EVENT_CACHE(STORE_ATTR_INSTANCE_OVERFLOW);
3389 return storeAttrUpdateCache(thread, arg, cache);
3390 }
3391 word offset = SmallInt::cast(cached).value();
3392 DCHECK(offset < 0, "unexpected offset");
3393 RawInstance instance = Instance::cast(receiver);
3394 RawLayout layout = Layout::cast(thread->runtime()->layoutOf(receiver));
3395 RawTuple overflow =
3396 Tuple::cast(instance.instanceVariableAt(layout.overflowOffset()));
3397 overflow.atPut(-offset - 1, thread->stackPeek(1));
3398 thread->stackDrop(2);
3399 return Continue::NEXT;
3400}
3401
3402HANDLER_INLINE Continue
3403Interpreter::doStoreAttrInstanceOverflowUpdate(Thread* thread, word arg) {
3404 Frame* frame = thread->currentFrame();
3405 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3406 RawObject receiver = thread->stackTop();
3407 word cache = currentCacheIndex(frame);
3408 bool is_found;
3409 RawObject cached =
3410 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3411 if (!is_found) {
3412 EVENT_CACHE(STORE_ATTR_INSTANCE_OVERFLOW_UPDATE);
3413 return retryStoreAttrCached(thread, arg, cache);
3414 }
3415 // Set the value in an overflow tuple that needs expansion.
3416 word offset_and_new_offset_id = SmallInt::cast(cached).value();
3417 LayoutId new_layout_id =
3418 static_cast<LayoutId>(offset_and_new_offset_id & Header::kLayoutIdMask);
3419 word offset = offset_and_new_offset_id >> Header::kLayoutIdBits;
3420
3421 HandleScope scope(thread);
3422 Instance instance(&scope, receiver);
3423 Layout layout(&scope, thread->runtime()->layoutOf(receiver));
3424 Tuple overflow(&scope, instance.instanceVariableAt(layout.overflowOffset()));
3425 Object value(&scope, thread->stackPeek(1));
3426 if (offset >= overflow.length()) {
3427 instanceGrowOverflow(thread, instance, offset + 1);
3428 overflow = instance.instanceVariableAt(layout.overflowOffset());
3429 }
3430 instance.setLayoutId(new_layout_id);
3431 overflow.atPut(offset, *value);
3432 thread->stackDrop(2);
3433 return Continue::NEXT;
3434}
3435
3436HANDLER_INLINE Continue Interpreter::doStoreAttrInstanceUpdate(Thread* thread,
3437 word arg) {
3438 Frame* frame = thread->currentFrame();
3439 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3440 RawObject receiver = thread->stackTop();
3441 word cache = currentCacheIndex(frame);
3442 bool is_found;
3443 RawObject cached =
3444 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3445 if (!is_found) {
3446 EVENT_CACHE(STORE_ATTR_INSTANCE_UPDATE);
3447 return retryStoreAttrCached(thread, arg, cache);
3448 }
3449 // Set the value in object at offset.
3450 // TODO(T59462341): Encapsulate this in a function.
3451 word offset_and_new_offset_id = SmallInt::cast(cached).value();
3452 LayoutId new_layout_id =
3453 static_cast<LayoutId>(offset_and_new_offset_id & Header::kLayoutIdMask);
3454 word offset = offset_and_new_offset_id >> Header::kLayoutIdBits;
3455 DCHECK(offset >= 0, "unexpected offset");
3456 RawInstance instance = Instance::cast(receiver);
3457 instance.instanceVariableAtPut(offset, thread->stackPeek(1));
3458 instance.setLayoutId(new_layout_id);
3459 thread->stackDrop(2);
3460 return Continue::NEXT;
3461}
3462
3463HANDLER_INLINE Continue Interpreter::doStoreAttrPolymorphic(Thread* thread,
3464 word arg) {
3465 Frame* frame = thread->currentFrame();
3466 RawObject receiver = thread->stackTop();
3467 LayoutId layout_id = receiver.layoutId();
3468 word cache = currentCacheIndex(frame);
3469 bool is_found;
3470 RawObject cached = icLookupPolymorphic(MutableTuple::cast(frame->caches()),
3471 cache, layout_id, &is_found);
3472 if (!is_found) {
3473 EVENT_CACHE(STORE_ATTR_POLYMORPHIC);
3474 return storeAttrUpdateCache(thread, arg, cache);
3475 }
3476 RawObject value = thread->stackPeek(1);
3477 thread->stackDrop(2);
3478 storeAttrWithLocation(thread, receiver, cached, value);
3479 return Continue::NEXT;
3480}
3481
3482HANDLER_INLINE Continue Interpreter::doStoreAttr(Thread* thread, word arg) {
3483 HandleScope scope(thread);
3484 Frame* frame = thread->currentFrame();
3485 Object receiver(&scope, thread->stackPop());
3486 Tuple names(&scope, Code::cast(frame->code()).names());
3487 Str name(&scope, names.at(arg));
3488 Object value(&scope, thread->stackPop());
3489 if (thread->invokeMethod3(receiver, ID(__setattr__), name, value)
3490 .isErrorException()) {
3491 return Continue::UNWIND;
3492 }
3493 return Continue::NEXT;
3494}
3495
3496HANDLER_INLINE Continue Interpreter::doDeleteAttr(Thread* thread, word arg) {
3497 HandleScope scope(thread);
3498 Frame* frame = thread->currentFrame();
3499 Object receiver(&scope, thread->stackPop());
3500 Tuple names(&scope, Code::cast(frame->code()).names());
3501 Str name(&scope, names.at(arg));
3502 if (delAttribute(thread, receiver, name).isErrorException()) {
3503 return Continue::UNWIND;
3504 }
3505 return Continue::NEXT;
3506}
3507
3508HANDLER_INLINE Continue Interpreter::doStoreGlobal(Thread* thread, word arg) {
3509 Frame* frame = thread->currentFrame();
3510 HandleScope scope(thread);
3511 Tuple names(&scope, Code::cast(frame->code()).names());
3512 Str name(&scope, names.at(arg));
3513 Object value(&scope, thread->stackPop());
3514 Module module(&scope, frame->function().moduleObject());
3515 Function function(&scope, frame->function());
3516 ValueCell module_result(&scope, moduleAtPut(thread, module, name, value));
3517 icUpdateGlobalVar(thread, function, arg, module_result);
3518 return Continue::NEXT;
3519}
3520
3521HANDLER_INLINE Continue Interpreter::doStoreGlobalCached(Thread* thread,
3522 word arg) {
3523 Frame* frame = thread->currentFrame();
3524 RawObject cached =
3525 icLookupGlobalVar(MutableTuple::cast(frame->caches()), arg);
3526 ValueCell::cast(cached).setValue(thread->stackPop());
3527 return Continue::NEXT;
3528}
3529
3530HANDLER_INLINE Continue Interpreter::doDeleteGlobal(Thread* thread, word arg) {
3531 Frame* frame = thread->currentFrame();
3532 HandleScope scope(thread);
3533 Module module(&scope, frame->function().moduleObject());
3534 Tuple names(&scope, Code::cast(frame->code()).names());
3535 Str name(&scope, names.at(arg));
3536 if (moduleRemove(thread, module, name).isErrorNotFound()) {
3537 return raiseUndefinedName(thread, name);
3538 }
3539 return Continue::NEXT;
3540}
3541
3542HANDLER_INLINE Continue Interpreter::doLoadConst(Thread* thread, word arg) {
3543 Frame* frame = thread->currentFrame();
3544 RawObject consts = Code::cast(frame->code()).consts();
3545 thread->stackPush(Tuple::cast(consts).at(arg));
3546 return Continue::NEXT;
3547}
3548
3549HANDLER_INLINE Continue Interpreter::doLoadImmediate(Thread* thread, word arg) {
3550 thread->stackPush(objectFromOparg(arg));
3551 return Continue::NEXT;
3552}
3553
3554HANDLER_INLINE Continue Interpreter::doLoadName(Thread* thread, word arg) {
3555 Frame* frame = thread->currentFrame();
3556 HandleScope scope(thread);
3557 Object names(&scope, Code::cast(frame->code()).names());
3558 Str name(&scope, Tuple::cast(*names).at(arg));
3559 Object implicit_globals_obj(&scope, frame->implicitGlobals());
3560 if (!implicit_globals_obj.isNoneType()) {
3561 // Give implicit_globals_obj a higher priority than globals.
3562 if (implicit_globals_obj.isDict()) {
3563 // Shortcut for the common case of implicit_globals being a dict.
3564 Dict implicit_globals(&scope, *implicit_globals_obj);
3565 Object result(&scope, dictAtByStr(thread, implicit_globals, name));
3566 DCHECK(!result.isError() || result.isErrorNotFound(),
3567 "expected value or not found");
3568 if (!result.isErrorNotFound()) {
3569 thread->stackPush(*result);
3570 return Continue::NEXT;
3571 }
3572 } else {
3573 Object result(&scope, objectGetItem(thread, implicit_globals_obj, name));
3574 if (!result.isErrorException()) {
3575 thread->stackPush(*result);
3576 return Continue::NEXT;
3577 }
3578 if (!thread->pendingExceptionMatches(LayoutId::kKeyError)) {
3579 return Continue::UNWIND;
3580 }
3581 thread->clearPendingException();
3582 }
3583 }
3584 Module module(&scope, frame->function().moduleObject());
3585 Object result(&scope, globalsAt(thread, module, name));
3586 if (result.isError()) {
3587 if (result.isErrorNotFound()) return raiseUndefinedName(thread, name);
3588 DCHECK(result.isErrorException(), "Expected ErrorException");
3589 return Continue::UNWIND;
3590 }
3591 thread->stackPush(*result);
3592 return Continue::NEXT;
3593}
3594
3595HANDLER_INLINE Continue Interpreter::doLoadType(Thread* thread, word arg) {
3596 HandleScope scope(thread);
3597 Object receiver(&scope, thread->stackTop());
3598 Type type(&scope, thread->runtime()->typeOf(*receiver));
3599 if (!type.isType() || !type.hasFlag(Type::Flag::kHasObjectDunderClass)) {
3600 EVENT_CACHE(LOAD_TYPE);
3601 word cache = currentCacheIndex(thread->currentFrame());
3602 return retryLoadAttrCached(thread, arg, cache);
3603 }
3604 thread->stackSetTop(*type);
3605 return Continue::NEXT;
3606}
3607
3608HANDLER_INLINE Continue Interpreter::doBuildTuple(Thread* thread, word arg) {
3609 if (arg == 0) {
3610 thread->stackPush(thread->runtime()->emptyTuple());
3611 return Continue::NEXT;
3612 }
3613 HandleScope scope(thread);
3614 MutableTuple tuple(&scope, thread->runtime()->newMutableTuple(arg));
3615 for (word i = arg - 1; i >= 0; i--) {
3616 tuple.atPut(i, thread->stackPop());
3617 }
3618 thread->stackPush(tuple.becomeImmutable());
3619 return Continue::NEXT;
3620}
3621
3622HANDLER_INLINE Continue Interpreter::doBuildList(Thread* thread, word arg) {
3623 Runtime* runtime = thread->runtime();
3624 if (arg == 0) {
3625 thread->stackPush(runtime->newList());
3626 return Continue::NEXT;
3627 }
3628 HandleScope scope(thread);
3629 MutableTuple array(&scope, runtime->newMutableTuple(arg));
3630 for (word i = arg - 1; i >= 0; i--) {
3631 array.atPut(i, thread->stackPop());
3632 }
3633 RawList list = List::cast(runtime->newList());
3634 list.setItems(*array);
3635 list.setNumItems(array.length());
3636 thread->stackPush(list);
3637 return Continue::NEXT;
3638}
3639
3640HANDLER_INLINE Continue Interpreter::doBuildSet(Thread* thread, word arg) {
3641 HandleScope scope(thread);
3642 Runtime* runtime = thread->runtime();
3643 Set set(&scope, runtime->newSet());
3644 Object value(&scope, NoneType::object());
3645 Object hash_obj(&scope, NoneType::object());
3646 for (word i = arg - 1; i >= 0; i--) {
3647 value = thread->stackPop();
3648 hash_obj = hash(thread, value);
3649 if (hash_obj.isErrorException()) return Continue::UNWIND;
3650 word hash = SmallInt::cast(*hash_obj).value();
3651 setAdd(thread, set, value, hash);
3652 }
3653 thread->stackPush(*set);
3654 return Continue::NEXT;
3655}
3656
3657HANDLER_INLINE Continue Interpreter::doBuildMap(Thread* thread, word arg) {
3658 Runtime* runtime = thread->runtime();
3659 HandleScope scope(thread);
3660 Dict dict(&scope, runtime->newDictWithSize(arg));
3661 Object value(&scope, NoneType::object());
3662 Object key(&scope, NoneType::object());
3663 Object hash_obj(&scope, NoneType::object());
3664 for (word i = ((arg - 1) * 2); i >= 0; i -= 2) {
3665 value = thread->stackPeek(i);
3666 key = thread->stackPeek(i + 1);
3667 hash_obj = hash(thread, key);
3668 if (hash_obj.isErrorException()) return Continue::UNWIND;
3669 word hash = SmallInt::cast(*hash_obj).value();
3670 if (dictAtPut(thread, dict, key, hash, value).isErrorException()) {
3671 return Continue::UNWIND;
3672 }
3673 }
3674 thread->stackDrop(arg * 2);
3675 thread->stackPush(*dict);
3676 return Continue::NEXT;
3677}
3678
3679HANDLER_INLINE Continue Interpreter::doLoadAttr(Thread* thread, word arg) {
3680 Frame* frame = thread->currentFrame();
3681 HandleScope scope(thread);
3682 Object receiver(&scope, thread->stackTop());
3683 Tuple names(&scope, Code::cast(frame->code()).names());
3684 Str name(&scope, names.at(arg));
3685 RawObject result = thread->runtime()->attributeAt(thread, receiver, name);
3686 if (result.isErrorException()) return Continue::UNWIND;
3687 thread->stackSetTop(result);
3688 return Continue::NEXT;
3689}
3690
3691Continue Interpreter::loadAttrUpdateCache(Thread* thread, word arg,
3692 word cache) {
3693 HandleScope scope(thread);
3694 Frame* frame = thread->currentFrame();
3695 Function function(&scope, frame->function());
3696 if (function.isCompiled()) {
3697 return Continue::DEOPT;
3698 }
3699 Object receiver(&scope, thread->stackTop());
3700 Str name(&scope, Tuple::cast(Code::cast(frame->code()).names()).at(arg));
3701
3702 Object location(&scope, NoneType::object());
3703 LoadAttrKind kind;
3704 Object result(&scope, thread->runtime()->attributeAtSetLocation(
3705 thread, receiver, name, &kind, &location));
3706 if (result.isErrorException()) return Continue::UNWIND;
3707 if (location.isNoneType()) {
3708 thread->stackSetTop(*result);
3709 return Continue::NEXT;
3710 }
3711
3712 // Cache the attribute load
3713 MutableTuple caches(&scope, frame->caches());
3714 ICState ic_state = icCurrentState(*caches, cache);
3715 Function dependent(&scope, frame->function());
3716 LayoutId receiver_layout_id = receiver.layoutId();
3717 if (ic_state == ICState::kAnamorphic) {
3718 switch (kind) {
3719 case LoadAttrKind::kInstanceOffset:
3720 rewriteCurrentBytecode(frame, LOAD_ATTR_INSTANCE);
3721 icUpdateAttr(thread, caches, cache, receiver_layout_id, location, name,
3722 dependent);
3723 break;
3724 case LoadAttrKind::kInstanceFunction:
3725 rewriteCurrentBytecode(frame, LOAD_ATTR_INSTANCE_TYPE_BOUND_METHOD);
3726 icUpdateAttr(thread, caches, cache, receiver_layout_id, location, name,
3727 dependent);
3728 break;
3729 case LoadAttrKind::kInstanceProperty:
3730 rewriteCurrentBytecode(frame, LOAD_ATTR_INSTANCE_PROPERTY);
3731 icUpdateAttr(thread, caches, cache, receiver_layout_id, location, name,
3732 dependent);
3733 break;
3734 case LoadAttrKind::kInstanceSlotDescr:
3735 rewriteCurrentBytecode(frame, LOAD_ATTR_INSTANCE_SLOT_DESCR);
3736 icUpdateAttr(thread, caches, cache, receiver_layout_id, location, name,
3737 dependent);
3738 break;
3739 case LoadAttrKind::kInstanceType:
3740 rewriteCurrentBytecode(frame, LOAD_ATTR_INSTANCE_TYPE);
3741 icUpdateAttr(thread, caches, cache, receiver_layout_id, location, name,
3742 dependent);
3743 break;
3744 case LoadAttrKind::kInstanceTypeDescr:
3745 rewriteCurrentBytecode(frame, LOAD_ATTR_INSTANCE_TYPE_DESCR);
3746 icUpdateAttr(thread, caches, cache, receiver_layout_id, location, name,
3747 dependent);
3748 break;
3749 case LoadAttrKind::kModule: {
3750 ValueCell value_cell(&scope, *location);
3751 DCHECK(location.isValueCell(), "location must be ValueCell");
3752 icUpdateAttrModule(thread, caches, cache, receiver, value_cell,
3753 dependent);
3754 } break;
3755 case LoadAttrKind::kType:
3756 icUpdateAttrType(thread, caches, cache, receiver, name, location,
3757 dependent);
3758 break;
3759 case LoadAttrKind::kDunderClass:
3760 rewriteCurrentBytecode(frame, LOAD_TYPE);
3761 icUpdateDunderClass(thread, receiver_layout_id, name, dependent);
3762 break;
3763 default:
3764 UNREACHABLE("kinds should have been handled before");
3765 }
3766 } else {
3767 DCHECK(
3768 currentBytecode(thread) == LOAD_ATTR_INSTANCE ||
3769 currentBytecode(thread) == LOAD_ATTR_INSTANCE_TYPE_BOUND_METHOD ||
3770 currentBytecode(thread) == LOAD_ATTR_POLYMORPHIC,
3771 "unexpected opcode");
3772 switch (kind) {
3773 case LoadAttrKind::kInstanceOffset:
3774 case LoadAttrKind::kInstanceFunction:
3775 rewriteCurrentBytecode(frame, LOAD_ATTR_POLYMORPHIC);
3776 icUpdateAttr(thread, caches, cache, receiver_layout_id, location, name,
3777 dependent);
3778 break;
3779 default:
3780 break;
3781 }
3782 }
3783 thread->stackSetTop(*result);
3784 return Continue::NEXT;
3785}
3786
3787HANDLER_INLINE USED RawObject Interpreter::loadAttrWithLocation(
3788 Thread* thread, RawObject receiver, RawObject location) {
3789 if (location.isFunction()) {
3790 HandleScope scope(thread);
3791 Object self(&scope, receiver);
3792 Object function(&scope, location);
3793 return thread->runtime()->newBoundMethod(function, self);
3794 }
3795
3796 word offset = SmallInt::cast(location).value();
3797
3798 DCHECK(receiver.isHeapObject(), "expected heap object");
3799 RawInstance instance = Instance::cast(receiver);
3800 if (offset >= 0) {
3801 return instance.instanceVariableAt(offset);
3802 }
3803
3804 RawLayout layout = Layout::cast(thread->runtime()->layoutOf(receiver));
3805 RawTuple overflow =
3806 Tuple::cast(instance.instanceVariableAt(layout.overflowOffset()));
3807 return overflow.at(-offset - 1);
3808}
3809
3810HANDLER_INLINE Continue Interpreter::doLoadAttrAnamorphic(Thread* thread,
3811 word arg) {
3812 word cache = currentCacheIndex(thread->currentFrame());
3813 return loadAttrUpdateCache(thread, arg, cache);
3814}
3815
3816HANDLER_INLINE Continue Interpreter::doLoadAttrInstance(Thread* thread,
3817 word arg) {
3818 Frame* frame = thread->currentFrame();
3819 word cache = currentCacheIndex(frame);
3820 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3821 RawObject receiver = thread->stackTop();
3822 bool is_found;
3823 RawObject cached =
3824 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3825 if (!is_found) {
3826 EVENT_CACHE(LOAD_ATTR_INSTANCE);
3827 return Interpreter::loadAttrUpdateCache(thread, arg, cache);
3828 }
3829 RawObject result = loadAttrWithLocation(thread, receiver, cached);
3830 thread->stackSetTop(result);
3831 return Continue::NEXT;
3832}
3833
3834HANDLER_INLINE Continue
3835Interpreter::doLoadAttrInstanceTypeBoundMethod(Thread* thread, word arg) {
3836 Frame* frame = thread->currentFrame();
3837 word cache = currentCacheIndex(frame);
3838 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3839 RawObject receiver = thread->stackTop();
3840 bool is_found;
3841 RawObject cached =
3842 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3843 if (!is_found) {
3844 EVENT_CACHE(LOAD_ATTR_INSTANCE_TYPE_BOUND_METHOD);
3845 return Interpreter::loadAttrUpdateCache(thread, arg, cache);
3846 }
3847 HandleScope scope(thread);
3848 Object self(&scope, receiver);
3849 Object function(&scope, cached);
3850 thread->stackSetTop(thread->runtime()->newBoundMethod(function, self));
3851 return Continue::NEXT;
3852}
3853
3854// This code cleans-up a monomorphic cache and prepares it for its potential
3855// use as a polymorphic cache. This code should be removed when we change the
3856// structure of our caches directly accessible from a function to be monomophic
3857// and to allocate the relatively uncommon polymorphic caches in a separate
3858// object.
3859NEVER_INLINE Continue Interpreter::retryLoadAttrCached(Thread* thread, word arg,
3860 word cache) {
3861 // Revert the opcode, clear the cache, and retry the attribute lookup.
3862 Frame* frame = thread->currentFrame();
3863 if (frame->function().isCompiled()) {
3864 return Continue::DEOPT;
3865 }
3866 rewriteCurrentBytecode(frame, LOAD_ATTR_ANAMORPHIC);
3867 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3868 word index = cache * kIcPointersPerEntry;
3869 caches.atPut(index + kIcEntryKeyOffset, NoneType::object());
3870 caches.atPut(index + kIcEntryValueOffset, NoneType::object());
3871 return Interpreter::loadAttrUpdateCache(thread, arg, cache);
3872}
3873
3874HANDLER_INLINE Continue Interpreter::doLoadAttrInstanceProperty(Thread* thread,
3875 word arg) {
3876 Frame* frame = thread->currentFrame();
3877 word cache = currentCacheIndex(frame);
3878 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3879 RawObject receiver = thread->stackTop();
3880 bool is_found;
3881 RawObject cached =
3882 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3883 if (!is_found) {
3884 EVENT_CACHE(LOAD_ATTR_INSTANCE_PROPERTY);
3885 return retryLoadAttrCached(thread, arg, cache);
3886 }
3887 thread->stackPush(receiver);
3888 thread->stackSetAt(1, cached);
3889 return tailcallFunction(thread, 1, cached);
3890}
3891
3892HANDLER_INLINE Continue Interpreter::doLoadAttrInstanceSlotDescr(Thread* thread,
3893 word arg) {
3894 Frame* frame = thread->currentFrame();
3895 word cache = currentCacheIndex(frame);
3896 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3897 RawObject receiver = thread->stackTop();
3898 bool is_found;
3899 RawObject cached =
3900 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3901 if (!is_found) {
3902 EVENT_CACHE(LOAD_ATTR_INSTANCE_SLOT_DESCR);
3903 return retryLoadAttrCached(thread, arg, cache);
3904 }
3905 word offset = SmallInt::cast(cached).value();
3906 RawObject value = Instance::cast(receiver).instanceVariableAt(offset);
3907 if (!value.isUnbound()) {
3908 thread->stackSetTop(value);
3909 return Continue::NEXT;
3910 }
3911 // If the value is unbound, we remove the cached slot descriptor.
3912 EVENT_CACHE(LOAD_ATTR_INSTANCE_SLOT_DESCR);
3913 return retryLoadAttrCached(thread, arg, cache);
3914}
3915
3916HANDLER_INLINE Continue Interpreter::doLoadAttrInstanceTypeDescr(Thread* thread,
3917 word arg) {
3918 Frame* frame = thread->currentFrame();
3919 word cache = currentCacheIndex(frame);
3920 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3921 RawObject receiver = thread->stackTop();
3922 bool is_found;
3923 RawObject cached =
3924 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3925 if (!is_found) {
3926 EVENT_CACHE(LOAD_ATTR_INSTANCE_TYPE_DESCR);
3927 return retryLoadAttrCached(thread, arg, cache);
3928 }
3929 HandleScope scope(thread);
3930 Object descr(&scope, cached);
3931 Object self(&scope, receiver);
3932 Type self_type(&scope, thread->runtime()->typeAt(self.layoutId()));
3933 Object result(&scope,
3934 Interpreter::callDescriptorGet(thread, descr, self, self_type));
3935 if (result.isError()) return Continue::UNWIND;
3936 thread->stackSetTop(*result);
3937 return Continue::NEXT;
3938}
3939
3940HANDLER_INLINE Continue Interpreter::doLoadAttrInstanceType(Thread* thread,
3941 word arg) {
3942 Frame* frame = thread->currentFrame();
3943 word cache = currentCacheIndex(frame);
3944 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3945 RawObject receiver = thread->stackTop();
3946 bool is_found;
3947 RawObject cached =
3948 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
3949 if (!is_found) {
3950 EVENT_CACHE(LOAD_ATTR_INSTANCE_TYPE);
3951 return retryLoadAttrCached(thread, arg, cache);
3952 }
3953 thread->stackSetTop(cached);
3954 return Continue::NEXT;
3955}
3956
3957HANDLER_INLINE Continue Interpreter::doLoadAttrModule(Thread* thread,
3958 word arg) {
3959 Frame* frame = thread->currentFrame();
3960 RawObject receiver = thread->stackTop();
3961 RawMutableTuple caches = MutableTuple::cast(frame->caches());
3962 word cache = currentCacheIndex(frame);
3963 word index = cache * kIcPointersPerEntry;
3964 RawObject cache_key = caches.at(index + kIcEntryKeyOffset);
3965 // isInstanceOfModule() should be just as fast as isModule() in the common
3966 // case. If code size or quality is an issue we can adjust this as needed
3967 // based on the types that actually flow through here.
3968 if (thread->runtime()->isInstanceOfModule(receiver) &&
3969 // Use rawCast() to support subclasses without the overhead of a
3970 // handle.
3971 SmallInt::fromWord(receiver.rawCast<RawModule>().id()) == cache_key) {
3972 RawObject result = caches.at(index + kIcEntryValueOffset);
3973 DCHECK(result.isValueCell(), "cached value is not a value cell");
3974 DCHECK(!ValueCell::cast(result).isPlaceholder(),
3975 "attribute has been deleted");
3976 thread->stackSetTop(ValueCell::cast(result).value());
3977 return Continue::NEXT;
3978 }
3979 EVENT_CACHE(LOAD_ATTR_MODULE);
3980 return retryLoadAttrCached(thread, arg, cache);
3981}
3982
3983HANDLER_INLINE Continue Interpreter::doLoadAttrPolymorphic(Thread* thread,
3984 word arg) {
3985 Frame* frame = thread->currentFrame();
3986 RawObject receiver = thread->stackTop();
3987 LayoutId layout_id = receiver.layoutId();
3988 word cache = currentCacheIndex(frame);
3989 bool is_found;
3990 RawObject cached = icLookupPolymorphic(MutableTuple::cast(frame->caches()),
3991 cache, layout_id, &is_found);
3992 if (!is_found) {
3993 EVENT_CACHE(LOAD_ATTR_POLYMORPHIC);
3994 return loadAttrUpdateCache(thread, arg, cache);
3995 }
3996 RawObject result = loadAttrWithLocation(thread, receiver, cached);
3997 thread->stackSetTop(result);
3998 return Continue::NEXT;
3999}
4000
4001HANDLER_INLINE Continue Interpreter::doLoadAttrType(Thread* thread, word arg) {
4002 Frame* frame = thread->currentFrame();
4003 RawObject receiver = thread->stackTop();
4004 RawMutableTuple caches = MutableTuple::cast(frame->caches());
4005 word cache = currentCacheIndex(frame);
4006 word index = cache * kIcPointersPerEntry;
4007 RawObject layout_id = caches.at(index + kIcEntryKeyOffset);
4008 Runtime* runtime = thread->runtime();
4009 if (runtime->isInstanceOfType(receiver)) {
4010 word id = static_cast<word>(receiver.rawCast<RawType>().instanceLayoutId());
4011 if (SmallInt::fromWord(id) == layout_id) {
4012 RawObject result = caches.at(index + kIcEntryValueOffset);
4013 DCHECK(result.isValueCell(), "cached value is not a value cell");
4014 thread->stackSetTop(ValueCell::cast(result).value());
4015 return Continue::NEXT;
4016 }
4017 }
4018 EVENT_CACHE(LOAD_ATTR_TYPE);
4019 return retryLoadAttrCached(thread, arg, cache);
4020}
4021
4022HANDLER_INLINE Continue Interpreter::doLoadBool(Thread* thread, word arg) {
4023 DCHECK(arg == 0x80 || arg == 0, "unexpected arg");
4024 thread->stackPush(Bool::fromBool(arg));
4025 return Continue::NEXT;
4026}
4027
4028static RawObject excMatch(Thread* thread, const Object& left,
4029 const Object& right) {
4030 Runtime* runtime = thread->runtime();
4031 HandleScope scope(thread);
4032
4033 static const char* cannot_catch_msg =
4034 "catching classes that do not inherit from BaseException is not allowed";
4035 if (runtime->isInstanceOfTuple(*right)) {
4036 Tuple tuple(&scope, tupleUnderlying(*right));
4037 for (word i = 0, length = tuple.length(); i < length; i++) {
4038 Object obj(&scope, tuple.at(i));
4039 if (!(runtime->isInstanceOfType(*obj) &&
4040 Type(&scope, *obj).isBaseExceptionSubclass())) {
4041 return thread->raiseWithFmt(LayoutId::kTypeError, cannot_catch_msg);
4042 }
4043 }
4044 } else if (!(runtime->isInstanceOfType(*right) &&
4045 Type(&scope, *right).isBaseExceptionSubclass())) {
4046 return thread->raiseWithFmt(LayoutId::kTypeError, cannot_catch_msg);
4047 }
4048
4049 return Bool::fromBool(givenExceptionMatches(thread, left, right));
4050}
4051
4052HANDLER_INLINE Continue Interpreter::doCompareIs(Thread* thread, word) {
4053 RawObject right = thread->stackPop();
4054 RawObject left = thread->stackPop();
4055 thread->stackPush(Bool::fromBool(left == right));
4056 return Continue::NEXT;
4057}
4058
4059HANDLER_INLINE Continue Interpreter::doCompareIsNot(Thread* thread, word) {
4060 RawObject right = thread->stackPop();
4061 RawObject left = thread->stackPop();
4062 thread->stackPush(Bool::fromBool(left != right));
4063 return Continue::NEXT;
4064}
4065
4066HANDLER_INLINE Continue Interpreter::doCompareOp(Thread* thread, word arg) {
4067 HandleScope scope(thread);
4068 Object right(&scope, thread->stackPop());
4069 Object left(&scope, thread->stackPop());
4070 CompareOp op = static_cast<CompareOp>(arg);
4071 RawObject result = NoneType::object();
4072 if (op == IS) {
4073 result = Bool::fromBool(*left == *right);
4074 } else if (op == IS_NOT) {
4075 result = Bool::fromBool(*left != *right);
4076 } else if (op == IN) {
4077 result = sequenceContains(thread, left, right);
4078 } else if (op == NOT_IN) {
4079 result = sequenceContains(thread, left, right);
4080 if (result.isBool()) result = Bool::negate(result);
4081 } else if (op == EXC_MATCH) {
4082 result = excMatch(thread, left, right);
4083 } else {
4084 result = compareOperation(thread, op, left, right);
4085 }
4086
4087 if (result.isErrorException()) return Continue::UNWIND;
4088 thread->stackPush(result);
4089 return Continue::NEXT;
4090}
4091
4092HANDLER_INLINE Continue Interpreter::doImportName(Thread* thread, word arg) {
4093 HandleScope scope(thread);
4094 Frame* frame = thread->currentFrame();
4095 Code code(&scope, frame->code());
4096 Object name(&scope, Tuple::cast(code.names()).at(arg));
4097 Object fromlist(&scope, thread->stackPop());
4098 Object level(&scope, thread->stackPop());
4099 Module module(&scope, frame->function().moduleObject());
4100 Object globals(&scope, module.moduleProxy());
4101 // TODO(T41634372) Pass in a dict that is similar to what `builtins.locals`
4102 // returns. Use `None` for now since the default importlib behavior is to
4103 // ignore the value and this only matters if `__import__` is replaced.
4104 Object locals(&scope, NoneType::object());
4105
4106 // Call __builtins__.__import__(name, globals, locals, fromlist, level).
4107 Runtime* runtime = thread->runtime();
4108 Object dunder_import_name(&scope, runtime->symbols()->at(ID(__import__)));
4109 Object dunder_import(&scope, builtinsAt(thread, module, dunder_import_name));
4110 if (dunder_import.isErrorNotFound()) {
4111 thread->raiseWithFmt(LayoutId::kImportError, "__import__ not found");
4112 return Continue::UNWIND;
4113 }
4114
4115 thread->stackPush(*dunder_import);
4116 thread->stackPush(*name);
4117 thread->stackPush(*globals);
4118 thread->stackPush(*locals);
4119 thread->stackPush(*fromlist);
4120 thread->stackPush(*level);
4121 return tailcall(thread, 5);
4122}
4123
4124static RawObject tryImportFromSysModules(Thread* thread, const Object& from,
4125 const Object& name) {
4126 HandleScope scope(thread);
4127 Runtime* runtime = thread->runtime();
4128 Object fully_qualified_name(
4129 &scope, runtime->attributeAtById(thread, from, ID(__name__)));
4130 if (fully_qualified_name.isErrorException() ||
4131 !runtime->isInstanceOfStr(*fully_qualified_name)) {
4132 thread->clearPendingException();
4133 return Error::notFound();
4134 }
4135 Object module_name(
4136 &scope, runtime->newStrFromFmt("%S.%S", &fully_qualified_name, &name));
4137 Object result(&scope, runtime->findModule(module_name));
4138 if (result.isNoneType()) {
4139 return Error::notFound();
4140 }
4141 return *result;
4142}
4143
4144HANDLER_INLINE Continue Interpreter::doImportFrom(Thread* thread, word arg) {
4145 HandleScope scope(thread);
4146 Frame* frame = thread->currentFrame();
4147 Code code(&scope, frame->code());
4148 Str name(&scope, Tuple::cast(code.names()).at(arg));
4149 Object from(&scope, thread->stackTop());
4150
4151 Object value(&scope, NoneType::object());
4152 if (from.isModule()) {
4153 // Common case of a lookup done on the built-in module type.
4154 Module from_module(&scope, *from);
4155 value = moduleGetAttribute(thread, from_module, name);
4156 } else {
4157 // Do a generic attribute lookup.
4158 value = thread->runtime()->attributeAt(thread, from, name);
4159 }
4160
4161 if (value.isErrorException()) {
4162 if (!thread->pendingExceptionMatches(LayoutId::kAttributeError)) {
4163 return Continue::UNWIND;
4164 }
4165 thread->clearPendingException();
4166 value = Error::notFound();
4167 }
4168
4169 if (value.isErrorNotFound()) {
4170 // in case this failed because of a circular relative import, try to
4171 // fallback on reading the module directly from sys.modules.
4172 // See cpython bpo-17636.
4173 value = tryImportFromSysModules(thread, from, name);
4174 if (value.isErrorNotFound()) {
4175 Runtime* runtime = thread->runtime();
4176 if (runtime->isInstanceOfModule(*from)) {
4177 Module from_module(&scope, *from);
4178 Object module_name(&scope, from_module.name());
4179 if (runtime->isInstanceOfStr(*module_name)) {
4180 thread->raiseWithFmt(LayoutId::kImportError,
4181 "cannot import name '%S' from '%S'", &name,
4182 &module_name);
4183 return Continue::UNWIND;
4184 }
4185 }
4186 thread->raiseWithFmt(LayoutId::kImportError, "cannot import name '%S'",
4187 &name);
4188 return Continue::UNWIND;
4189 }
4190 }
4191 thread->stackPush(*value);
4192 return Continue::NEXT;
4193}
4194
4195HANDLER_INLINE Continue Interpreter::doJumpForward(Thread* thread, word arg) {
4196 Frame* frame = thread->currentFrame();
4197 frame->setVirtualPC(frame->virtualPC() + arg * kCodeUnitScale);
4198 return Continue::NEXT;
4199}
4200
4201HANDLER_INLINE Continue Interpreter::doJumpIfFalseOrPop(Thread* thread,
4202 word arg) {
4203 Frame* frame = thread->currentFrame();
4204 RawObject value = thread->stackTop();
4205 value = isTrue(thread, value);
4206 if (LIKELY(value == Bool::falseObj())) {
4207 frame->setVirtualPC(arg * kCodeUnitScale);
4208 return Continue::NEXT;
4209 }
4210 if (value == Bool::trueObj()) {
4211 thread->stackPop();
4212 return Continue::NEXT;
4213 }
4214 DCHECK(value.isErrorException(), "value must be error");
4215 return Continue::UNWIND;
4216}
4217
4218HANDLER_INLINE Continue Interpreter::doJumpIfTrueOrPop(Thread* thread,
4219 word arg) {
4220 Frame* frame = thread->currentFrame();
4221 RawObject value = thread->stackTop();
4222 value = isTrue(thread, value);
4223 if (LIKELY(value == Bool::trueObj())) {
4224 frame->setVirtualPC(arg * kCodeUnitScale);
4225 return Continue::NEXT;
4226 }
4227 if (value == Bool::falseObj()) {
4228 thread->stackPop();
4229 return Continue::NEXT;
4230 }
4231 DCHECK(value.isErrorException(), "value must be error");
4232 return Continue::UNWIND;
4233}
4234
4235HANDLER_INLINE Continue Interpreter::doJumpAbsolute(Thread* thread, word arg) {
4236 Frame* frame = thread->currentFrame();
4237 frame->setVirtualPC(arg * kCodeUnitScale);
4238 return Continue::NEXT;
4239}
4240
4241HANDLER_INLINE Continue Interpreter::doPopJumpIfFalse(Thread* thread,
4242 word arg) {
4243 Frame* frame = thread->currentFrame();
4244 RawObject value = thread->stackPop();
4245 value = isTrue(thread, value);
4246 if (LIKELY(value == Bool::falseObj())) {
4247 frame->setVirtualPC(arg * kCodeUnitScale);
4248 return Continue::NEXT;
4249 }
4250 if (value == Bool::trueObj()) {
4251 return Continue::NEXT;
4252 }
4253 DCHECK(value.isErrorException(), "value must be error");
4254 return Continue::UNWIND;
4255}
4256
4257HANDLER_INLINE Continue Interpreter::doPopJumpIfTrue(Thread* thread, word arg) {
4258 Frame* frame = thread->currentFrame();
4259 RawObject value = thread->stackPop();
4260 value = isTrue(thread, value);
4261 if (LIKELY(value == Bool::trueObj())) {
4262 frame->setVirtualPC(arg * kCodeUnitScale);
4263 return Continue::NEXT;
4264 }
4265 if (value == Bool::falseObj()) {
4266 return Continue::NEXT;
4267 }
4268 DCHECK(value.isErrorException(), "value must be error");
4269 return Continue::UNWIND;
4270}
4271
4272HANDLER_INLINE Continue Interpreter::doLoadGlobal(Thread* thread, word arg) {
4273 Frame* frame = thread->currentFrame();
4274 HandleScope scope(thread);
4275 Tuple names(&scope, Code::cast(frame->code()).names());
4276 Str name(&scope, names.at(arg));
4277 Function function(&scope, frame->function());
4278 Module module(&scope, function.moduleObject());
4279
4280 Object module_result(&scope, moduleValueCellAt(thread, module, name));
4281 if (!module_result.isErrorNotFound() &&
4282 !ValueCell::cast(*module_result).isPlaceholder()) {
4283 ValueCell value_cell(&scope, *module_result);
4284 icUpdateGlobalVar(thread, function, arg, value_cell);
4285 thread->stackPush(value_cell.value());
4286 return Continue::NEXT;
4287 }
4288 Object builtins(&scope, moduleAtById(thread, module, ID(__builtins__)));
4289 Module builtins_module(&scope, *module);
4290 if (builtins.isModuleProxy()) {
4291 builtins_module = ModuleProxy::cast(*builtins).module();
4292 } else if (builtins.isModule()) {
4293 builtins_module = *builtins;
4294 } else if (builtins.isErrorNotFound()) {
4295 return raiseUndefinedName(thread, name);
4296 } else {
4297 Object result(&scope, objectGetItem(thread, builtins, name));
4298 if (result.isErrorException()) return Continue::UNWIND;
4299 thread->stackPush(*result);
4300 return Continue::NEXT;
4301 }
4302 Object builtins_result(&scope,
4303 moduleValueCellAt(thread, builtins_module, name));
4304 if (builtins_result.isErrorNotFound()) {
4305 return raiseUndefinedName(thread, name);
4306 }
4307 ValueCell value_cell(&scope, *builtins_result);
4308 if (value_cell.isPlaceholder()) {
4309 return raiseUndefinedName(thread, name);
4310 }
4311 icUpdateGlobalVar(thread, function, arg, value_cell);
4312 // Set up a placeholder in module to signify that a builtin entry under
4313 // the same name is cached.
4314 attributeValueCellAtPut(thread, module, name);
4315 thread->stackPush(value_cell.value());
4316 return Continue::NEXT;
4317}
4318
4319HANDLER_INLINE Continue Interpreter::doLoadGlobalCached(Thread* thread,
4320 word arg) {
4321 Frame* frame = thread->currentFrame();
4322 RawObject cached =
4323 icLookupGlobalVar(MutableTuple::cast(frame->caches()), arg);
4324 DCHECK(cached.isValueCell(), "cached value must be a ValueCell");
4325 DCHECK(!ValueCell::cast(cached).isPlaceholder(),
4326 "cached ValueCell must not be a placeholder");
4327 thread->stackPush(ValueCell::cast(cached).value());
4328 return Continue::NEXT;
4329}
4330
4331HANDLER_INLINE Continue Interpreter::doSetupFinally(Thread* thread, word arg) {
4332 Frame* frame = thread->currentFrame();
4333 word stack_depth = thread->valueStackSize();
4334 word handler_pc = frame->virtualPC() + arg * kCodeUnitScale;
4335 frame->blockStackPush(TryBlock(TryBlock::kFinally, handler_pc, stack_depth));
4336 return Continue::NEXT;
4337}
4338
4339HANDLER_INLINE Continue Interpreter::doLoadFast(Thread* thread, word arg) {
4340 Frame* frame = thread->currentFrame();
4341 RawObject value = frame->local(arg);
4342 if (UNLIKELY(value.isErrorNotFound())) {
4343 HandleScope scope(thread);
4344 Str name(&scope, Tuple::cast(Code::cast(frame->code()).varnames()).at(arg));
4345 thread->raiseWithFmt(LayoutId::kUnboundLocalError,
4346 "local variable '%S' referenced before assignment",
4347 &name);
4348 return Continue::UNWIND;
4349 }
4350 thread->stackPush(value);
4351 return Continue::NEXT;
4352}
4353
4354HANDLER_INLINE Continue Interpreter::doLoadFastReverse(Thread* thread,
4355 word arg) {
4356 Frame* frame = thread->currentFrame();
4357 RawObject value = frame->localWithReverseIndex(arg);
4358 if (UNLIKELY(value.isErrorNotFound())) {
4359 HandleScope scope(thread);
4360 Code code(&scope, frame->code());
4361 word name_idx = code.nlocals() - arg - 1;
4362 Str name(&scope, Tuple::cast(code.varnames()).at(name_idx));
4363 thread->raiseWithFmt(LayoutId::kUnboundLocalError,
4364 "local variable '%S' referenced before assignment",
4365 &name);
4366 return Continue::UNWIND;
4367 }
4368 thread->stackPush(value);
4369 return Continue::NEXT;
4370}
4371
4372HANDLER_INLINE Continue Interpreter::doLoadFastReverseUnchecked(Thread* thread,
4373 word arg) {
4374 RawObject value = thread->currentFrame()->localWithReverseIndex(arg);
4375 DCHECK(!value.isErrorNotFound(), "no value assigned yet");
4376 thread->stackPush(value);
4377 return Continue::NEXT;
4378}
4379
4380HANDLER_INLINE Continue Interpreter::doStoreFast(Thread* thread, word arg) {
4381 Frame* frame = thread->currentFrame();
4382 RawObject value = thread->stackPop();
4383 frame->setLocal(arg, value);
4384 return Continue::NEXT;
4385}
4386
4387HANDLER_INLINE Continue Interpreter::doStoreFastReverse(Thread* thread,
4388 word arg) {
4389 Frame* frame = thread->currentFrame();
4390 RawObject value = thread->stackPop();
4391 frame->setLocalWithReverseIndex(arg, value);
4392 return Continue::NEXT;
4393}
4394
4395HANDLER_INLINE Continue
4396Interpreter::doDeleteFastReverseUnchecked(Thread* thread, word arg) {
4397 Frame* frame = thread->currentFrame();
4398 frame->setLocalWithReverseIndex(arg, Error::notFound());
4399 return Continue::NEXT;
4400}
4401
4402HANDLER_INLINE Continue Interpreter::doDeleteFast(Thread* thread, word arg) {
4403 Frame* frame = thread->currentFrame();
4404 if (UNLIKELY(frame->local(arg).isErrorNotFound())) {
4405 HandleScope scope(thread);
4406 Object name(&scope,
4407 Tuple::cast(Code::cast(frame->code()).varnames()).at(arg));
4408 thread->raiseWithFmt(LayoutId::kUnboundLocalError,
4409 "local variable '%S' referenced before assignment",
4410 &name);
4411 return Continue::UNWIND;
4412 }
4413 frame->setLocal(arg, Error::notFound());
4414 return Continue::NEXT;
4415}
4416
4417HANDLER_INLINE Continue Interpreter::doRaiseVarargs(Thread* thread, word arg) {
4418 DCHECK(arg >= 0, "Negative argument to RAISE_VARARGS");
4419 DCHECK(arg <= 2, "Argument to RAISE_VARARGS too large");
4420
4421 if (arg == 0) {
4422 // Re-raise the caught exception.
4423 HandleScope scope(thread);
4424 Object caught_exc_state_obj(&scope, thread->topmostCaughtExceptionState());
4425 if (caught_exc_state_obj.isNoneType()) {
4426 thread->raiseWithFmt(LayoutId::kRuntimeError,
4427 "No active exception to reraise");
4428 } else {
4429 ExceptionState caught_exc_state(&scope, *caught_exc_state_obj);
4430 thread->setPendingExceptionType(caught_exc_state.type());
4431 thread->setPendingExceptionValue(caught_exc_state.value());
4432 thread->setPendingExceptionTraceback(caught_exc_state.traceback());
4433 }
4434 } else {
4435 RawObject cause = (arg >= 2) ? thread->stackPop() : Error::notFound();
4436 RawObject exn = (arg >= 1) ? thread->stackPop() : NoneType::object();
4437 raise(thread, exn, cause);
4438 }
4439
4440 return Continue::UNWIND;
4441}
4442
4443HANDLER_INLINE
4444Continue Interpreter::callTrampoline(Thread* thread, Function::Entry entry,
4445 word nargs, RawObject* post_call_sp) {
4446 RawObject result = entry(thread, nargs);
4447 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
4448 if (result.isErrorException()) return Continue::UNWIND;
4449 thread->stackPush(result);
4450 return Continue::NEXT;
4451}
4452
4453static HANDLER_INLINE Continue
4454callInterpretedImpl(Thread* thread, word nargs, RawFunction function,
4455 RawObject* post_call_sp, PrepareCallFunc prepare_args) {
4456 // Warning: This code is using `RawXXX` variables for performance! This is
4457 // despite the fact that we call functions that do potentially perform memory
4458 // allocations. This is legal here because we always rely on the functions
4459 // returning an up-to-date address and we make sure to never access any value
4460 // produce before a call after that call. Be careful not to break this
4461 // invariant if you change the code!
4462
4463 RawObject result = prepare_args(thread, nargs, function);
4464 if (result.isErrorException()) {
4465 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
4466 return Continue::UNWIND;
4467 }
4468 function = RawFunction::cast(result);
4469
4470 bool has_freevars_or_cellvars = function.hasFreevarsOrCellvars();
4471 Frame* callee_frame = thread->pushCallFrame(function);
4472 if (UNLIKELY(callee_frame == nullptr)) {
4473 return Continue::UNWIND;
4474 }
4475 if (has_freevars_or_cellvars) {
4476 processFreevarsAndCellvars(thread, callee_frame);
4477 }
4478 return Continue::NEXT;
4479}
4480
4481Continue Interpreter::callInterpreted(Thread* thread, word nargs,
4482 RawFunction function) {
4483 RawObject* post_call_sp = thread->stackPointer() + nargs + 1;
4484 return callInterpretedImpl(thread, nargs, function, post_call_sp,
4485 preparePositionalCall);
4486}
4487
4488HANDLER_INLINE Continue Interpreter::handleCall(
4489 Thread* thread, word nargs, word callable_idx, PrepareCallFunc prepare_args,
4490 Function::Entry (RawFunction::*get_entry)() const) {
4491 // Warning: This code is using `RawXXX` variables for performance! This is
4492 // despite the fact that we call functions that do potentially perform memory
4493 // allocations. This is legal here because we always rely on the functions
4494 // returning an up-to-date address and we make sure to never access any value
4495 // produce before a call after that call. Be careful not to break this
4496 // invariant if you change the code!
4497
4498 RawObject* post_call_sp = thread->stackPointer() + callable_idx + 1;
4499 PrepareCallableResult prepare_result =
4500 prepareCallableCall(thread, nargs, callable_idx);
4501 nargs = prepare_result.nargs;
4502 if (prepare_result.function.isErrorException()) {
4503 thread->stackDrop(nargs + 1);
4504 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
4505 return Continue::UNWIND;
4506 }
4507 RawFunction function = RawFunction::cast(prepare_result.function);
4508
4509 IntrinsicFunction intrinsic =
4510 reinterpret_cast<IntrinsicFunction>(function.intrinsic());
4511 if (intrinsic != nullptr) {
4512 // Executes the function at the given symbol without pushing a new frame.
4513 // If the call succeeds, pops the arguments off of the caller's frame, sets
4514 // the top value to the result, and returns true. If the call fails, leaves
4515 // the stack unchanged and returns false.
4516 if ((*intrinsic)(thread)) {
4517 DCHECK(thread->stackPointer() == post_call_sp - 1, "stack not cleaned");
4518 return Continue::NEXT;
4519 }
4520 }
4521
4522 if (!function.isInterpreted()) {
4523 return callTrampoline(thread, (function.*get_entry)(), nargs, post_call_sp);
4524 }
4525
4526 return callInterpretedImpl(thread, nargs, function, post_call_sp,
4527 prepare_args);
4528}
4529
4530ALWAYS_INLINE Continue Interpreter::tailcallFunction(Thread* thread, word nargs,
4531 RawObject function_obj) {
4532 RawObject* post_call_sp = thread->stackPointer() + nargs + 1;
4533 DCHECK(function_obj == thread->stackPeek(nargs),
4534 "thread->stackPeek(nargs) is expected to be the given function");
4535 RawFunction function = Function::cast(function_obj);
4536 IntrinsicFunction intrinsic =
4537 reinterpret_cast<IntrinsicFunction>(function.intrinsic());
4538 if (intrinsic != nullptr) {
4539 // Executes the function at the given symbol without pushing a new frame.
4540 // If the call succeeds, pops the arguments off of the caller's frame, sets
4541 // the top value to the result, and returns true. If the call fails, leaves
4542 // the stack unchanged and returns false.
4543 if ((*intrinsic)(thread)) {
4544 DCHECK(thread->stackPointer() == post_call_sp - 1, "stack not cleaned");
4545 return Continue::NEXT;
4546 }
4547 }
4548 if (!function.isInterpreted()) {
4549 return callTrampoline(thread, function.entry(), nargs, post_call_sp);
4550 }
4551 return callInterpretedImpl(thread, nargs, function, post_call_sp,
4552 preparePositionalCall);
4553}
4554
4555HANDLER_INLINE Continue Interpreter::doCallFunction(Thread* thread, word arg) {
4556 return handleCall(thread, arg, arg, preparePositionalCall, &Function::entry);
4557}
4558
4559HANDLER_INLINE Continue Interpreter::doCallFunctionAnamorphic(Thread* thread,
4560 word arg) {
4561 Frame* frame = thread->currentFrame();
4562 RawObject callable = thread->stackPeek(arg);
4563 // TODO(T87427456): Also rewrite if callable is a metaclass that does not
4564 // override __call__
4565 if (callable.isType()) {
4566 word cache = currentCacheIndex(frame);
4567 return callFunctionTypeNewUpdateCache(thread, arg, cache);
4568 }
4569 rewriteCurrentBytecode(frame, CALL_FUNCTION);
4570 return doCallFunction(thread, arg);
4571}
4572
4573Continue Interpreter::callFunctionTypeNewUpdateCache(Thread* thread, word arg,
4574 word cache) {
4575 HandleScope scope(thread);
4576 Frame* frame = thread->currentFrame();
4577 word callable_idx = arg;
4578 Type receiver(&scope, thread->stackPeek(callable_idx));
4579 Function dependent(&scope, frame->function());
4580 if (dependent.isCompiled()) {
4581 return Continue::DEOPT;
4582 }
4583 MutableTuple caches(&scope, frame->caches());
4584 Object ctor(&scope, receiver.ctor());
4585 bool set_ctor = false;
4586 Runtime* runtime = thread->runtime();
4587 if (arg == 1) {
4588 switch (receiver.instanceLayoutId()) {
4589 case LayoutId::kStr:
4590 ctor = runtime->lookupNameInModule(thread, ID(_builtins),
4591 ID(_str_ctor_obj));
4592 DCHECK(!ctor.isError(), "cannot find _str_ctor_obj");
4593 set_ctor = true;
4594 break;
4595 case LayoutId::kType:
4596 ctor =
4597 runtime->lookupNameInModule(thread, ID(_builtins), ID(_type_ctor));
4598 DCHECK(!ctor.isError(), "cannot find _type_ctor");
4599 set_ctor = true;
4600 break;
4601 case LayoutId::kInt:
4602 ctor = runtime->lookupNameInModule(thread, ID(_builtins),
4603 ID(_int_ctor_obj));
4604 DCHECK(!ctor.isError(), "cannot find _int_ctor_obj");
4605 set_ctor = true;
4606 break;
4607 case LayoutId::kRange:
4608 ctor = runtime->lookupNameInModule(thread, ID(_builtins),
4609 ID(_range_ctor_stop));
4610 DCHECK(!ctor.isError(), "cannot find _range_ctor_stop");
4611 set_ctor = true;
4612 break;
4613 default:
4614 break;
4615 }
4616 } else if (arg == 2) {
4617 switch (receiver.instanceLayoutId()) {
4618 case LayoutId::kRange:
4619 ctor = runtime->lookupNameInModule(thread, ID(_builtins),
4620 ID(_range_ctor_start_stop));
4621 DCHECK(!ctor.isError(), "cannot find _range_ctor_start_stop");
4622 set_ctor = true;
4623 break;
4624 default:
4625 break;
4626 }
4627 } else if (arg == 3) {
4628 switch (receiver.instanceLayoutId()) {
4629 case LayoutId::kRange:
4630 ctor = runtime->lookupNameInModule(thread, ID(_builtins),
4631 ID(_range_ctor_start_stop_step));
4632 DCHECK(!ctor.isError(), "cannot find _range_ctor_start_stop_step");
4633 set_ctor = true;
4634 break;
4635 default:
4636 break;
4637 }
4638 }
4639 if (!set_ctor) {
4640 // TODO(emacs): Split out objectInit from objectNew and split opcode into
4641 // slots vs no slots
4642 // TODO(emacs): Only cache if not abstract (and do not include that in
4643 // objectInit)
4644 // TODO(emacs): Also split by tuple overflow/no tuple overflow
4645 // TODO(emacs): Also cache instanceSize so we can allocate without the
4646 // layout object
4647 bool use_object_dunder_new =
4648 receiver.isType() && receiver.hasFlag(Type::Flag::kHasObjectDunderNew);
4649 if (use_object_dunder_new) {
4650 // Metaclass is "type" so we do not need to check for __init__ being a
4651 // datadescriptor and we can look it up directly on the type.
4652 ctor = typeLookupInMroById(thread, *receiver, ID(__init__));
4653 DCHECK(!ctor.isError(), "self must have __init__");
4654 rewriteCurrentBytecode(frame, CALL_FUNCTION_TYPE_INIT);
4655 icUpdateCallFunctionTypeNew(thread, caches, cache, receiver, ctor,
4656 dependent);
4657 return doCallFunctionTypeInit(thread, arg);
4658 }
4659 }
4660 rewriteCurrentBytecode(frame, CALL_FUNCTION_TYPE_NEW);
4661 icUpdateCallFunctionTypeNew(thread, caches, cache, receiver, ctor, dependent);
4662 return doCallFunctionTypeNew(thread, arg);
4663}
4664
4665HANDLER_INLINE Continue Interpreter::doCallFunctionTypeNew(Thread* thread,
4666 word arg) {
4667 HandleScope scope(thread);
4668 Frame* frame = thread->currentFrame();
4669 word callable_idx = arg;
4670 Object receiver(&scope, thread->stackPeek(callable_idx));
4671 if (!receiver.isType()) {
4672 EVENT_CACHE(CALL_FUNCTION_TYPE_NEW);
4673 rewriteCurrentBytecode(frame, CALL_FUNCTION);
4674 return doCallFunction(thread, arg);
4675 }
4676 MutableTuple caches(&scope, frame->caches());
4677 word cache = currentCacheIndex(frame);
4678 bool is_found;
4679 Object ctor(&scope, icLookupMonomorphic(
4680 *caches, cache,
4681 Type::cast(*receiver).instanceLayoutId(), &is_found));
4682 if (!is_found) {
4683 EVENT_CACHE(CALL_FUNCTION_TYPE_NEW);
4684 rewriteCurrentBytecode(frame, CALL_FUNCTION);
4685 return doCallFunction(thread, arg);
4686 }
4687 // TODO(Txxx): Separate into two opcodes. Normal type.ctor() functions take
4688 // cls as the first parameter, but specialized cached ctors such as
4689 // _str_ctor_obj need only take one argument: the arg to be converted. Avoid
4690 // the stack shuffle in the fast case.
4691 DCHECK(ctor.isFunction(), "cached is expected to be a function");
4692 thread->stackSetAt(callable_idx, *ctor);
4693 thread->stackInsertAt(callable_idx, *receiver);
4694 return tailcallFunction(thread, arg + 1, *ctor);
4695}
4696
4697HANDLER_INLINE Continue Interpreter::doCallFunctionTypeInit(Thread* thread,
4698 word arg) {
4699 HandleScope scope(thread);
4700 Frame* frame = thread->currentFrame();
4701 word callable_idx = arg;
4702 Object receiver(&scope, thread->stackPeek(callable_idx));
4703 if (!receiver.isType()) {
4704 EVENT_CACHE(CALL_FUNCTION_TYPE_INIT);
4705 rewriteCurrentBytecode(frame, CALL_FUNCTION);
4706 return doCallFunction(thread, arg);
4707 }
4708 MutableTuple caches(&scope, frame->caches());
4709 word cache = currentCacheIndex(frame);
4710 bool is_found;
4711 Object init(&scope, icLookupMonomorphic(
4712 *caches, cache,
4713 Type::cast(*receiver).instanceLayoutId(), &is_found));
4714 if (!is_found) {
4715 EVENT_CACHE(CALL_FUNCTION_TYPE_INIT);
4716 rewriteCurrentBytecode(frame, CALL_FUNCTION);
4717 return doCallFunction(thread, arg);
4718 }
4719 Type type(&scope, *receiver);
4720 Object instance(&scope, objectNew(thread, type));
4721 DCHECK(init.isFunction(), "cached is expected to be a function");
4722 thread->stackSetAt(callable_idx, *init);
4723 thread->stackInsertAt(callable_idx, *instance);
4724 Object result(&scope, callFunction(thread, arg + 1, *init));
4725 // TODO(emacs): When we have real call/ret working in the assembly
4726 // interpreter, add an asm implementation of this. Right now it does not make
4727 // much sense to do that because the only way to call an entryAsm is to
4728 // tailcall it... but we need to do the None check and return the instance.
4729 if (!result.isNoneType()) {
4730 if (!result.isErrorException()) {
4731 Object type_name(&scope, type.name());
4732 thread->raiseWithFmt(LayoutId::kTypeError,
4733 "%S.__init__ returned non None", &type_name);
4734 }
4735 return Continue::UNWIND;
4736 }
4737 thread->stackPush(*instance);
4738 return Continue::NEXT;
4739}
4740
4741HANDLER_INLINE Continue Interpreter::doMakeFunction(Thread* thread, word arg) {
4742 HandleScope scope(thread);
4743 Frame* frame = thread->currentFrame();
4744 Object qualname(&scope, thread->stackPop());
4745 Code code(&scope, thread->stackPop());
4746 Module module(&scope, frame->function().moduleObject());
4747 Runtime* runtime = thread->runtime();
4748 Function function(
4749 &scope, runtime->newFunctionWithCode(thread, qualname, code, module));
4750 if (arg & MakeFunctionFlag::CLOSURE) {
4751 function.setClosure(thread->stackPop());
4752 DCHECK(runtime->isInstanceOfTuple(function.closure()), "expected tuple");
4753 }
4754 if (arg & MakeFunctionFlag::ANNOTATION_DICT) {
4755 function.setAnnotations(thread->stackPop());
4756 DCHECK(runtime->isInstanceOfDict(function.annotations()), "expected dict");
4757 }
4758 if (arg & MakeFunctionFlag::DEFAULT_KW) {
4759 function.setKwDefaults(thread->stackPop());
4760 DCHECK(runtime->isInstanceOfDict(function.kwDefaults()), "expected dict");
4761 }
4762 if (arg & MakeFunctionFlag::DEFAULT) {
4763 function.setDefaults(thread->stackPop());
4764 DCHECK(runtime->isInstanceOfTuple(function.defaults()), "expected tuple");
4765 }
4766 thread->stackPush(*function);
4767 return Continue::NEXT;
4768}
4769
4770HANDLER_INLINE Continue Interpreter::doBuildSlice(Thread* thread, word arg) {
4771 RawObject step = arg == 3 ? thread->stackPop() : NoneType::object();
4772 RawObject stop = thread->stackPop();
4773 RawObject start = thread->stackTop();
4774 Runtime* runtime = thread->runtime();
4775 if (start.isNoneType() && stop.isNoneType() && step.isNoneType()) {
4776 thread->stackSetTop(runtime->emptySlice());
4777 } else {
4778 HandleScope scope(thread);
4779 Object start_obj(&scope, start);
4780 Object stop_obj(&scope, stop);
4781 Object step_obj(&scope, step);
4782 thread->stackSetTop(runtime->newSlice(start_obj, stop_obj, step_obj));
4783 }
4784 return Continue::NEXT;
4785}
4786
4787HANDLER_INLINE Continue Interpreter::doLoadClosure(Thread* thread, word arg) {
4788 Frame* frame = thread->currentFrame();
4789 RawCode code = Code::cast(frame->code());
4790 thread->stackPush(frame->local(code.nlocals() + arg));
4791 return Continue::NEXT;
4792}
4793
4794static RawObject raiseUnboundCellFreeVar(Thread* thread, const Code& code,
4795 word idx) {
4796 HandleScope scope(thread);
4797 Object names_obj(&scope, NoneType::object());
4798 const char* fmt;
4799 if (idx < code.numCellvars()) {
4800 names_obj = code.cellvars();
4801 fmt = "local variable '%S' referenced before assignment";
4802 } else {
4803 idx -= code.numCellvars();
4804 names_obj = code.freevars();
4805 fmt =
4806 "free variable '%S' referenced before assignment in enclosing "
4807 "scope";
4808 }
4809 Tuple names(&scope, *names_obj);
4810 Str name(&scope, names.at(idx));
4811 return thread->raiseWithFmt(LayoutId::kUnboundLocalError, fmt, &name);
4812}
4813
4814HANDLER_INLINE Continue Interpreter::doLoadDeref(Thread* thread, word arg) {
4815 Frame* frame = thread->currentFrame();
4816 HandleScope scope(thread);
4817 Code code(&scope, frame->code());
4818 Cell cell(&scope, frame->local(code.nlocals() + arg));
4819 Object value(&scope, cell.value());
4820 if (value.isUnbound()) {
4821 raiseUnboundCellFreeVar(thread, code, arg);
4822 return Continue::UNWIND;
4823 }
4824 thread->stackPush(*value);
4825 return Continue::NEXT;
4826}
4827
4828HANDLER_INLINE Continue Interpreter::doStoreDeref(Thread* thread, word arg) {
4829 Frame* frame = thread->currentFrame();
4830 RawCode code = Code::cast(frame->code());
4831 Cell::cast(frame->local(code.nlocals() + arg)).setValue(thread->stackPop());
4832 return Continue::NEXT;
4833}
4834
4835HANDLER_INLINE Continue Interpreter::doDeleteDeref(Thread* thread, word arg) {
4836 Frame* frame = thread->currentFrame();
4837 RawCode code = Code::cast(frame->code());
4838 Cell::cast(frame->local(code.nlocals() + arg)).setValue(Unbound::object());
4839 return Continue::NEXT;
4840}
4841
4842HANDLER_INLINE Continue Interpreter::doCallFunctionKw(Thread* thread,
4843 word arg) {
4844 return handleCall(thread, arg, arg + 1, prepareKeywordCall,
4845 &Function::entryKw);
4846}
4847
4848HANDLER_INLINE Continue Interpreter::doCallFunctionEx(Thread* thread,
4849 word arg) {
4850 word callable_idx = (arg & CallFunctionExFlag::VAR_KEYWORDS) ? 2 : 1;
4851 RawObject* post_call_sp = thread->stackPointer() + callable_idx + 1;
4852 HandleScope scope(thread);
4853 Object callable(&scope, prepareCallableEx(thread, callable_idx));
4854 if (callable.isErrorException()) {
4855 thread->stackDrop(callable_idx + 1);
4856 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
4857 return Continue::UNWIND;
4858 }
4859
4860 Function function(&scope, *callable);
4861 if (!function.isInterpreted()) {
4862 return callTrampoline(thread, function.entryEx(), arg, post_call_sp);
4863 }
4864
4865 if (prepareExplodeCall(thread, arg, *function).isErrorException()) {
4866 DCHECK(thread->stackPointer() == post_call_sp, "stack not cleaned");
4867 return Continue::UNWIND;
4868 }
4869
4870 bool has_freevars_or_cellvars = function.hasFreevarsOrCellvars();
4871 Frame* callee_frame = thread->pushCallFrame(*function);
4872 if (UNLIKELY(callee_frame == nullptr)) {
4873 return Continue::UNWIND;
4874 }
4875 if (has_freevars_or_cellvars) {
4876 processFreevarsAndCellvars(thread, callee_frame);
4877 }
4878 return Continue::NEXT;
4879}
4880
4881HANDLER_INLINE Continue Interpreter::doSetupWith(Thread* thread, word arg) {
4882 HandleScope scope(thread);
4883 Runtime* runtime = thread->runtime();
4884 Object mgr(&scope, thread->stackTop());
4885 Type mgr_type(&scope, runtime->typeOf(*mgr));
4886 Object enter(&scope, typeLookupInMroById(thread, *mgr_type, ID(__enter__)));
4887 if (enter.isError()) {
4888 if (enter.isErrorNotFound()) {
4889 thread->raise(LayoutId::kAttributeError,
4890 runtime->symbols()->at(ID(__enter__)));
4891 } else {
4892 DCHECK(enter.isErrorException(),
4893 "expected Error::exception() or Error::notFound()");
4894 }
4895 return Continue::UNWIND;
4896 }
4897
4898 Object exit(&scope, typeLookupInMroById(thread, *mgr_type, ID(__exit__)));
4899 if (exit.isError()) {
4900 if (exit.isErrorNotFound()) {
4901 thread->raise(LayoutId::kAttributeError,
4902 runtime->symbols()->at(ID(__exit__)));
4903 } else {
4904 DCHECK(exit.isErrorException(),
4905 "expected Error::exception() or Error::notFound()");
4906 }
4907 return Continue::UNWIND;
4908 }
4909 Object exit_bound(&scope,
4910 exit.isFunction()
4911 ? runtime->newBoundMethod(exit, mgr)
4912 : resolveDescriptorGet(thread, exit, mgr, mgr_type));
4913 thread->stackSetTop(*exit_bound);
4914
4915 Object result(&scope, NoneType::object());
4916 if (enter.isFunction()) {
4917 result = callMethod1(thread, enter, mgr);
4918 } else {
4919 thread->stackPush(resolveDescriptorGet(thread, enter, mgr, mgr_type));
4920 result = call(thread, 0);
4921 }
4922 if (result.isErrorException()) return Continue::UNWIND;
4923
4924 word stack_depth = thread->valueStackSize();
4925 Frame* frame = thread->currentFrame();
4926 word handler_pc = frame->virtualPC() + arg * kCodeUnitScale;
4927 frame->blockStackPush(TryBlock(TryBlock::kFinally, handler_pc, stack_depth));
4928 thread->stackPush(*result);
4929 return Continue::NEXT;
4930}
4931
4932HANDLER_INLINE Continue Interpreter::doListAppend(Thread* thread, word arg) {
4933 HandleScope scope(thread);
4934 Object value(&scope, thread->stackPop());
4935 List list(&scope, thread->stackPeek(arg - 1));
4936 thread->runtime()->listAdd(thread, list, value);
4937 return Continue::NEXT;
4938}
4939
4940HANDLER_INLINE Continue Interpreter::doSetAdd(Thread* thread, word arg) {
4941 HandleScope scope(thread);
4942 Object value(&scope, thread->stackPop());
4943 Object hash_obj(&scope, hash(thread, value));
4944 if (hash_obj.isErrorException()) {
4945 return Continue::UNWIND;
4946 }
4947 word hash = SmallInt::cast(*hash_obj).value();
4948 Set set(&scope, Set::cast(thread->stackPeek(arg - 1)));
4949 setAdd(thread, set, value, hash);
4950 return Continue::NEXT;
4951}
4952
4953HANDLER_INLINE Continue Interpreter::doMapAdd(Thread* thread, word arg) {
4954 HandleScope scope(thread);
4955 Object value(&scope, thread->stackPop());
4956 Object key(&scope, thread->stackPop());
4957 Dict dict(&scope, Dict::cast(thread->stackPeek(arg - 1)));
4958 Object hash_obj(&scope, Interpreter::hash(thread, key));
4959 if (hash_obj.isErrorException()) return Continue::UNWIND;
4960 word hash = SmallInt::cast(*hash_obj).value();
4961 Object result(&scope, dictAtPut(thread, dict, key, hash, value));
4962 if (result.isErrorException()) return Continue::UNWIND;
4963 return Continue::NEXT;
4964}
4965
4966HANDLER_INLINE Continue Interpreter::doLoadClassDeref(Thread* thread,
4967 word arg) {
4968 Frame* frame = thread->currentFrame();
4969 HandleScope scope(thread);
4970 Code code(&scope, frame->code());
4971 word idx = arg - code.numCellvars();
4972 Str name(&scope, Tuple::cast(code.freevars()).at(idx));
4973 Object result(&scope, NoneType::object());
4974 if (frame->implicitGlobals().isNoneType()) {
4975 // Module body
4976 Module module(&scope, frame->function().moduleObject());
4977 result = moduleAt(module, name);
4978 } else {
4979 // Class body
4980 Object implicit_globals(&scope, frame->implicitGlobals());
4981 if (implicit_globals.isDict()) {
4982 Dict implicit_globals_dict(&scope, *implicit_globals);
4983 result = dictAtByStr(thread, implicit_globals_dict, name);
4984 } else {
4985 result = objectGetItem(thread, implicit_globals, name);
4986 if (result.isErrorException()) {
4987 if (!thread->pendingExceptionMatches(LayoutId::kKeyError)) {
4988 return Continue::UNWIND;
4989 }
4990 thread->clearPendingException();
4991 }
4992 }
4993 }
4994
4995 if (result.isErrorNotFound()) {
4996 Cell cell(&scope, frame->local(code.nlocals() + arg));
4997 if (cell.isUnbound()) {
4998 UNIMPLEMENTED("unbound free var %s", Str::cast(*name).toCStr());
4999 }
5000 thread->stackPush(cell.value());
5001 } else {
5002 thread->stackPush(*result);
5003 }
5004
5005 return Continue::NEXT;
5006}
5007
5008static RawObject listUnpack(Thread* thread, const List& list,
5009 const Object& iterable, Tuple* src_handle) {
5010 word src_length;
5011 if (iterable.isList()) {
5012 *src_handle = List::cast(*iterable).items();
5013 src_length = List::cast(*iterable).numItems();
5014 } else if (iterable.isTuple()) {
5015 *src_handle = *iterable;
5016 src_length = src_handle->length();
5017 } else {
5018 return thread->invokeMethodStatic2(LayoutId::kList, ID(extend), list,
5019 iterable);
5020 }
5021 listExtend(thread, list, *src_handle, src_length);
5022 return NoneType::object();
5023}
5024
5025HANDLER_INLINE Continue Interpreter::doBuildListUnpack(Thread* thread,
5026 word arg) {
5027 Runtime* runtime = thread->runtime();
5028 HandleScope scope(thread);
5029 List list(&scope, runtime->newList());
5030 Object iterable(&scope, NoneType::object());
5031 Tuple src_handle(&scope, runtime->emptyTuple());
5032 for (word i = arg - 1; i >= 0; i--) {
5033 iterable = thread->stackPeek(i);
5034 if (listUnpack(thread, list, iterable, &src_handle).isErrorException()) {
5035 return Continue::UNWIND;
5036 }
5037 }
5038 thread->stackDrop(arg - 1);
5039 thread->stackSetTop(*list);
5040 return Continue::NEXT;
5041}
5042
5043HANDLER_INLINE Continue Interpreter::doBuildMapUnpack(Thread* thread,
5044 word arg) {
5045 Runtime* runtime = thread->runtime();
5046 HandleScope scope(thread);
5047 Dict dict(&scope, runtime->newDict());
5048 Object obj(&scope, NoneType::object());
5049 for (word i = arg - 1; i >= 0; i--) {
5050 obj = thread->stackPeek(i);
5051 if (dictMergeOverride(thread, dict, obj).isErrorException()) {
5052 if (thread->pendingExceptionType() ==
5053 runtime->typeAt(LayoutId::kAttributeError)) {
5054 thread->clearPendingException();
5055 thread->raiseWithFmt(LayoutId::kTypeError,
5056 "'%T' object is not a mapping", &obj);
5057 }
5058 return Continue::UNWIND;
5059 }
5060 }
5061 thread->stackDrop(arg - 1);
5062 thread->stackSetTop(*dict);
5063 return Continue::NEXT;
5064}
5065
5066HANDLER_INLINE Continue Interpreter::doBuildMapUnpackWithCall(Thread* thread,
5067 word arg) {
5068 Runtime* runtime = thread->runtime();
5069 HandleScope scope(thread);
5070 Dict dict(&scope, runtime->newDict());
5071 Object obj(&scope, NoneType::object());
5072 for (word i = arg - 1; i >= 0; i--) {
5073 obj = thread->stackPeek(i);
5074 if (dictMergeError(thread, dict, obj).isErrorException()) {
5075 if (thread->pendingExceptionType() ==
5076 runtime->typeAt(LayoutId::kAttributeError)) {
5077 thread->clearPendingException();
5078 thread->raiseWithFmt(LayoutId::kTypeError,
5079 "'%T' object is not a mapping", &obj);
5080 } else if (thread->pendingExceptionType() ==
5081 runtime->typeAt(LayoutId::kKeyError)) {
5082 Object value(&scope, thread->pendingExceptionValue());
5083 thread->clearPendingException();
5084 if (runtime->isInstanceOfStr(*value)) {
5085 thread->raiseWithFmt(LayoutId::kTypeError,
5086 "got multiple values for keyword argument '%S'",
5087 &value);
5088 } else {
5089 thread->raiseWithFmt(LayoutId::kTypeError,
5090 "keywords must be strings");
5091 }
5092 }
5093 return Continue::UNWIND;
5094 }
5095 }
5096 thread->stackDrop(arg - 1);
5097 thread->stackSetTop(*dict);
5098 return Continue::NEXT;
5099}
5100
5101HANDLER_INLINE Continue Interpreter::doBuildTupleUnpack(Thread* thread,
5102 word arg) {
5103 Runtime* runtime = thread->runtime();
5104 HandleScope scope(thread);
5105 List list(&scope, runtime->newList());
5106 Object iterable(&scope, NoneType::object());
5107 Tuple src_handle(&scope, runtime->emptyTuple());
5108 for (word i = arg - 1; i >= 0; i--) {
5109 iterable = thread->stackPeek(i);
5110 if (listUnpack(thread, list, iterable, &src_handle).isErrorException()) {
5111 return Continue::UNWIND;
5112 }
5113 }
5114 Tuple items(&scope, list.items());
5115 Tuple tuple(&scope, runtime->tupleSubseq(thread, items, 0, list.numItems()));
5116 thread->stackDrop(arg - 1);
5117 thread->stackSetTop(*tuple);
5118 return Continue::NEXT;
5119}
5120
5121HANDLER_INLINE Continue Interpreter::doBuildSetUnpack(Thread* thread,
5122 word arg) {
5123 Runtime* runtime = thread->runtime();
5124 HandleScope scope(thread);
5125 Set set(&scope, runtime->newSet());
5126 Object obj(&scope, NoneType::object());
5127 for (word i = 0; i < arg; i++) {
5128 obj = thread->stackPeek(i);
5129 if (setUpdate(thread, set, obj).isErrorException()) return Continue::UNWIND;
5130 }
5131 thread->stackDrop(arg - 1);
5132 thread->stackSetTop(*set);
5133 return Continue::NEXT;
5134}
5135
5136HANDLER_INLINE Continue Interpreter::doSetupAsyncWith(Thread* thread,
5137 word arg) {
5138 Frame* frame = thread->currentFrame();
5139 HandleScope scope(thread);
5140 Object result(&scope, thread->stackPop());
5141 word stack_depth = thread->valueStackSize();
5142 word handler_pc = frame->virtualPC() + arg * kCodeUnitScale;
5143 frame->blockStackPush(TryBlock(TryBlock::kFinally, handler_pc, stack_depth));
5144 thread->stackPush(*result);
5145 return Continue::NEXT;
5146}
5147
5148HANDLER_INLINE Continue Interpreter::doFormatValue(Thread* thread, word arg) {
5149 HandleScope scope(thread);
5150 Runtime* runtime = thread->runtime();
5151 Object fmt_spec(&scope, Str::empty());
5152 if (arg & kFormatValueHasSpecBit) {
5153 fmt_spec = thread->stackPop();
5154 }
5155 Object value(&scope, thread->stackPop());
5156 switch (static_cast<FormatValueConv>(arg & kFormatValueConvMask)) {
5157 case FormatValueConv::kStr: {
5158 if (!value.isStr()) {
5159 value = thread->invokeMethod1(value, ID(__str__));
5160 DCHECK(!value.isErrorNotFound(), "`__str__` should always exist");
5161 if (value.isErrorException()) return Continue::UNWIND;
5162 if (!runtime->isInstanceOfStr(*value)) {
5163 thread->raiseWithFmt(LayoutId::kTypeError,
5164 "__str__ returned non-string (type %T)", &value);
5165 return Continue::UNWIND;
5166 }
5167 }
5168 break;
5169 }
5170 case FormatValueConv::kRepr: {
5171 value = thread->invokeMethod1(value, ID(__repr__));
5172 DCHECK(!value.isErrorNotFound(), "`__repr__` should always exist");
5173 if (value.isErrorException()) return Continue::UNWIND;
5174 if (!runtime->isInstanceOfStr(*value)) {
5175 thread->raiseWithFmt(LayoutId::kTypeError,
5176 "__repr__ returned non-string (type %T)", &value);
5177 return Continue::UNWIND;
5178 }
5179 break;
5180 }
5181 case FormatValueConv::kAscii: {
5182 value = thread->invokeMethod1(value, ID(__repr__));
5183 DCHECK(!value.isErrorNotFound(), "`__repr__` should always exist");
5184 if (value.isErrorException()) return Continue::UNWIND;
5185 if (!runtime->isInstanceOfStr(*value)) {
5186 thread->raiseWithFmt(LayoutId::kTypeError,
5187 "__repr__ returned non-string (type %T)", &value);
5188 return Continue::UNWIND;
5189 }
5190 Str value_str(&scope, strUnderlying(*value));
5191 value = strEscapeNonASCII(thread, value_str);
5192 break;
5193 }
5194 case FormatValueConv::kNone:
5195 break;
5196 }
5197
5198 if (fmt_spec != Str::empty() || !value.isStr()) {
5199 value = thread->invokeMethod2(value, ID(__format__), fmt_spec);
5200 if (value.isErrorException()) return Continue::UNWIND;
5201 if (!runtime->isInstanceOfStr(*value)) {
5202 thread->raiseWithFmt(LayoutId::kTypeError,
5203 "__format__ must return a str, not %T", &value);
5204 return Continue::UNWIND;
5205 }
5206 }
5207 thread->stackPush(*value);
5208 return Continue::NEXT;
5209}
5210
5211HANDLER_INLINE Continue Interpreter::doBuildConstKeyMap(Thread* thread,
5212 word arg) {
5213 HandleScope scope(thread);
5214 Tuple keys(&scope, thread->stackTop());
5215 Dict dict(&scope, thread->runtime()->newDictWithSize(keys.length()));
5216 Object key(&scope, NoneType::object());
5217 Object hash_obj(&scope, NoneType::object());
5218 for (word i = 0; i < arg; i++) {
5219 key = keys.at(i);
5220 hash_obj = Interpreter::hash(thread, key);
5221 if (hash_obj.isErrorException()) return Continue::UNWIND;
5222 word hash = SmallInt::cast(*hash_obj).value();
5223 Object value(&scope, thread->stackPeek(arg - i));
5224 if (dictAtPut(thread, dict, key, hash, value).isErrorException()) {
5225 return Continue::UNWIND;
5226 }
5227 }
5228 thread->stackDrop(arg + 1);
5229 thread->stackPush(*dict);
5230 return Continue::NEXT;
5231}
5232
5233HANDLER_INLINE Continue Interpreter::doBuildString(Thread* thread, word arg) {
5234 switch (arg) {
5235 case 0: // empty
5236 thread->stackPush(Str::empty());
5237 break;
5238 case 1: // no-op
5239 break;
5240 default: { // concat
5241 RawObject res = stringJoin(thread, thread->stackPointer(), arg);
5242 thread->stackDrop(arg - 1);
5243 thread->stackSetTop(res);
5244 break;
5245 }
5246 }
5247 return Continue::NEXT;
5248}
5249
5250// LOAD_METHOD shapes the stack as follows:
5251//
5252// receiver or unbound
5253// callable <- Top of stack / lower memory addresses
5254//
5255// LOAD_METHOD is paired with a CALL_METHOD, and the matching CALL_METHOD
5256// falls back to the behavior of CALL_FUNCTION in this shape of the stack.
5257HANDLER_INLINE Continue Interpreter::doLoadMethod(Thread* thread, word arg) {
5258 thread->stackInsertAt(1, Unbound::object());
5259 return doLoadAttr(thread, arg);
5260}
5261
5262Continue Interpreter::loadMethodUpdateCache(Thread* thread, word arg,
5263 word cache) {
5264 HandleScope scope(thread);
5265 Frame* frame = thread->currentFrame();
5266 Function dependent(&scope, frame->function());
5267 if (dependent.isCompiled()) {
5268 return Continue::DEOPT;
5269 }
5270 Object receiver(&scope, thread->stackTop());
5271 Str name(&scope, Tuple::cast(Code::cast(frame->code()).names()).at(arg));
5272
5273 Object location(&scope, NoneType::object());
5274 LoadAttrKind kind;
5275 Object result(&scope, thread->runtime()->attributeAtSetLocation(
5276 thread, receiver, name, &kind, &location));
5277 if (result.isErrorException()) return Continue::UNWIND;
5278 if (kind != LoadAttrKind::kInstanceFunction &&
5279 kind != LoadAttrKind::kModule) {
5280 thread->stackPush(*result);
5281 thread->stackSetAt(1, Unbound::object());
5282 return Continue::NEXT;
5283 }
5284
5285 // Cache the attribute load.
5286 MutableTuple caches(&scope, frame->caches());
5287 ICState ic_state = icCurrentState(*caches, cache);
5288
5289 if (ic_state == ICState::kAnamorphic) {
5290 switch (kind) {
5291 case LoadAttrKind::kInstanceFunction:
5292 rewriteCurrentBytecode(frame, LOAD_METHOD_INSTANCE_FUNCTION);
5293 icUpdateAttr(thread, caches, cache, receiver.layoutId(), location, name,
5294 dependent);
5295 break;
5296 case LoadAttrKind::kModule: {
5297 DCHECK(location.isValueCell(), "location must be ValueCell");
5298 ValueCell value_cell(&scope, *location);
5299 icUpdateMethodModule(thread, caches, cache, receiver, value_cell,
5300 dependent);
5301 thread->stackPush(*result);
5302 thread->stackSetAt(1, Unbound::object());
5303 return Continue::NEXT;
5304 }
5305 default:
5306 break;
5307 }
5308 } else {
5309 DCHECK(currentBytecode(thread) == LOAD_METHOD_INSTANCE_FUNCTION ||
5310 currentBytecode(thread) == LOAD_METHOD_MODULE ||
5311 currentBytecode(thread) == LOAD_METHOD_POLYMORPHIC,
5312 "unexpected opcode %s", kBytecodeNames[currentBytecode(thread)]);
5313 switch (kind) {
5314 case LoadAttrKind::kInstanceFunction:
5315 rewriteCurrentBytecode(frame, LOAD_METHOD_POLYMORPHIC);
5316 icUpdateAttr(thread, caches, cache, receiver.layoutId(), location, name,
5317 dependent);
5318 break;
5319 default:
5320 break;
5321 }
5322 }
5323 thread->stackPush(*result);
5324 thread->stackSetAt(1, Unbound::object());
5325 return Continue::NEXT;
5326}
5327
5328HANDLER_INLINE Continue Interpreter::doLoadMethodAnamorphic(Thread* thread,
5329 word arg) {
5330 word cache = currentCacheIndex(thread->currentFrame());
5331 return loadMethodUpdateCache(thread, arg, cache);
5332}
5333
5334// This code cleans-up a monomorphic cache and prepares it for its potential
5335// use as a polymorphic cache. This code should be removed when we change the
5336// structure of our caches directly accessible from a function to be monomophic
5337// and to allocate the relatively uncommon polymorphic caches in a separate
5338// object.
5339NEVER_INLINE Continue Interpreter::retryLoadMethodCached(Thread* thread,
5340 word arg, word cache) {
5341 // Revert the opcode, clear the cache, and retry the attribute lookup.
5342 Frame* frame = thread->currentFrame();
5343 if (frame->function().isCompiled()) {
5344 return Continue::DEOPT;
5345 }
5346 rewriteCurrentBytecode(frame, LOAD_METHOD_ANAMORPHIC);
5347 RawMutableTuple caches = MutableTuple::cast(frame->caches());
5348 word index = cache * kIcPointersPerEntry;
5349 caches.atPut(index + kIcEntryKeyOffset, NoneType::object());
5350 caches.atPut(index + kIcEntryValueOffset, NoneType::object());
5351 return Interpreter::loadMethodUpdateCache(thread, arg, cache);
5352}
5353
5354HANDLER_INLINE Continue Interpreter::doLoadMethodModule(Thread* thread,
5355 word arg) {
5356 Frame* frame = thread->currentFrame();
5357 RawObject receiver = thread->stackTop();
5358 RawMutableTuple caches = MutableTuple::cast(frame->caches());
5359 word cache = currentCacheIndex(frame);
5360 word index = cache * kIcPointersPerEntry;
5361 RawObject cache_key = caches.at(index + kIcEntryKeyOffset);
5362 // isInstanceOfModule() should be just as fast as isModule() in the common
5363 // case. If code size or quality is an issue we can adjust this as needed
5364 // based on the types that actually flow through here.
5365 if (thread->runtime()->isInstanceOfModule(receiver) &&
5366 // Use rawCast() to support subclasses without the overhead of a
5367 // handle.
5368 SmallInt::fromWord(receiver.rawCast<RawModule>().id()) == cache_key) {
5369 RawObject result = caches.at(index + kIcEntryValueOffset);
5370 DCHECK(result.isValueCell(), "cached value is not a value cell");
5371 DCHECK(!ValueCell::cast(result).isPlaceholder(),
5372 "attribute has been deleted");
5373 thread->stackPush(ValueCell::cast(result).value());
5374 thread->stackSetAt(1, Unbound::object());
5375 return Continue::NEXT;
5376 }
5377 EVENT_CACHE(LOAD_METHOD_MODULE);
5378 return retryLoadMethodCached(thread, arg, cache);
5379}
5380
5381HANDLER_INLINE Continue
5382Interpreter::doLoadMethodInstanceFunction(Thread* thread, word arg) {
5383 Frame* frame = thread->currentFrame();
5384 RawMutableTuple caches = MutableTuple::cast(frame->caches());
5385 RawObject receiver = thread->stackTop();
5386 word cache = currentCacheIndex(frame);
5387 bool is_found;
5388 RawObject cached =
5389 icLookupMonomorphic(caches, cache, receiver.layoutId(), &is_found);
5390 if (!is_found) {
5391 EVENT_CACHE(LOAD_METHOD_INSTANCE_FUNCTION);
5392 return loadMethodUpdateCache(thread, arg, cache);
5393 }
5394 DCHECK(cached.isFunction(), "cached is expected to be a function");
5395 thread->stackInsertAt(1, cached);
5396 return Continue::NEXT;
5397}
5398
5399HANDLER_INLINE Continue Interpreter::doLoadMethodPolymorphic(Thread* thread,
5400 word arg) {
5401 Frame* frame = thread->currentFrame();
5402 RawObject receiver = thread->stackTop();
5403 word cache = currentCacheIndex(frame);
5404 bool is_found;
5405 RawObject cached = icLookupPolymorphic(MutableTuple::cast(frame->caches()),
5406 cache, receiver.layoutId(), &is_found);
5407 if (!is_found) {
5408 EVENT_CACHE(LOAD_METHOD_POLYMORPHIC);
5409 return loadMethodUpdateCache(thread, arg, cache);
5410 }
5411 DCHECK(cached.isFunction(), "cached is expected to be a function");
5412 thread->stackInsertAt(1, cached);
5413 return Continue::NEXT;
5414}
5415
5416HANDLER_INLINE Continue Interpreter::doCallMethod(Thread* thread, word arg) {
5417 RawObject maybe_method = thread->stackPeek(arg + 1);
5418 if (maybe_method.isUnbound()) {
5419 thread->stackRemoveAt(arg + 1);
5420 return handleCall(thread, arg, arg, preparePositionalCall,
5421 &Function::entry);
5422 }
5423 // Add one to bind receiver to the self argument. See doLoadMethod()
5424 // for details on the stack's shape.
5425 return tailcallFunction(thread, arg + 1, maybe_method);
5426}
5427
5428NEVER_INLINE
5429Continue Interpreter::compareInUpdateCache(Thread* thread, word cache) {
5430 HandleScope scope(thread);
5431 Frame* frame = thread->currentFrame();
5432 Function dependent(&scope, frame->function());
5433 if (dependent.isCompiled()) {
5434 return Continue::DEOPT;
5435 }
5436 Object container(&scope, thread->stackPop());
5437 Object value(&scope, thread->stackPop());
5438 Object method(&scope, NoneType::object());
5439 Object result(&scope,
5440 sequenceContainsSetMethod(thread, value, container, &method));
5441 if (method.isFunction()) {
5442 MutableTuple caches(&scope, frame->caches());
5443 Str dunder_contains_name(
5444 &scope, thread->runtime()->symbols()->at(ID(__contains__)));
5445 ICState next_cache_state =
5446 icUpdateAttr(thread, caches, cache, container.layoutId(), method,
5447 dunder_contains_name, dependent);
5448 rewriteCurrentBytecode(frame, next_cache_state == ICState::kMonomorphic
5449 ? COMPARE_IN_MONOMORPHIC
5450 : COMPARE_IN_POLYMORPHIC);
5451 }
5452 if (result.isErrorException()) return Continue::UNWIND;
5453 thread->stackPush(*result);
5454 return Continue::NEXT;
5455}
5456
5457HANDLER_INLINE Continue Interpreter::doCompareInAnamorphic(Thread* thread,
5458 word arg) {
5459 Frame* frame = thread->currentFrame();
5460 RawObject container = thread->stackPeek(0);
5461 switch (container.layoutId()) {
5462 case LayoutId::kSmallStr:
5463 case LayoutId::kLargeStr:
5464 if (thread->stackPeek(1).isStr()) {
5465 rewriteCurrentBytecode(frame, COMPARE_IN_STR);
5466 return doCompareInStr(thread, arg);
5467 }
5468 {
5469 word cache = currentCacheIndex(frame);
5470 return compareInUpdateCache(thread, cache);
5471 }
5472 case LayoutId::kTuple:
5473 rewriteCurrentBytecode(frame, COMPARE_IN_TUPLE);
5474 return doCompareInTuple(thread, arg);
5475 case LayoutId::kDict:
5476 rewriteCurrentBytecode(frame, COMPARE_IN_DICT);
5477 return doCompareInDict(thread, arg);
5478 case LayoutId::kList:
5479 rewriteCurrentBytecode(frame, COMPARE_IN_LIST);
5480 return doCompareInList(thread, arg);
5481 default: {
5482 word cache = currentCacheIndex(frame);
5483 return compareInUpdateCache(thread, cache);
5484 }
5485 }
5486}
5487
5488HANDLER_INLINE Continue Interpreter::doCompareInDict(Thread* thread, word) {
5489 RawObject container = thread->stackPeek(0);
5490 if (!container.isDict()) {
5491 EVENT_CACHE(COMPARE_IN_DICT);
5492 word cache = currentCacheIndex(thread->currentFrame());
5493 return compareInUpdateCache(thread, cache);
5494 }
5495 HandleScope scope(thread);
5496 Dict dict(&scope, container);
5497 Object key(&scope, thread->stackPeek(1));
5498 Object hash_obj(&scope, Interpreter::hash(thread, key));
5499 if (hash_obj.isErrorException()) return Continue::UNWIND;
5500 word hash = SmallInt::cast(*hash_obj).value();
5501 RawObject result = dictAt(thread, dict, key, hash);
5502 DCHECK(!result.isErrorException(), "dictAt raised an exception");
5503 thread->stackDrop(2);
5504 if (result.isErrorNotFound()) {
5505 thread->stackPush(Bool::falseObj());
5506 } else {
5507 thread->stackPush(Bool::trueObj());
5508 }
5509 return Continue::NEXT;
5510}
5511
5512HANDLER_INLINE Continue Interpreter::doCompareInList(Thread* thread, word) {
5513 RawObject container = thread->stackPeek(0);
5514 if (!container.isList()) {
5515 EVENT_CACHE(COMPARE_IN_LIST);
5516 word cache = currentCacheIndex(thread->currentFrame());
5517 return compareInUpdateCache(thread, cache);
5518 }
5519 HandleScope scope(thread);
5520 List list(&scope, container);
5521 Object key(&scope, thread->stackPeek(1));
5522 Object result(&scope, listContains(thread, list, key));
5523 if (result.isErrorException()) return Continue::UNWIND;
5524 DCHECK(result.isBool(), "bool is unexpected");
5525 thread->stackDrop(2);
5526 thread->stackPush(*result);
5527 return Continue::NEXT;
5528}
5529
5530HANDLER_INLINE Continue Interpreter::doCompareInStr(Thread* thread, word) {
5531 RawObject container = thread->stackPeek(0);
5532 RawObject value = thread->stackPeek(1);
5533 if (!(container.isStr() && value.isStr())) {
5534 EVENT_CACHE(COMPARE_IN_STR);
5535 word cache = currentCacheIndex(thread->currentFrame());
5536 return compareInUpdateCache(thread, cache);
5537 }
5538 HandleScope scope(thread);
5539 Str haystack(&scope, container);
5540 Str needle(&scope, value);
5541 thread->stackDrop(2);
5542 thread->stackPush(Bool::fromBool(strFind(haystack, needle) != -1));
5543 return Continue::NEXT;
5544}
5545
5546HANDLER_INLINE Continue Interpreter::doCompareInTuple(Thread* thread, word) {
5547 RawObject container = thread->stackPeek(0);
5548 if (!container.isTuple()) {
5549 EVENT_CACHE(COMPARE_IN_TUPLE);
5550 word cache = currentCacheIndex(thread->currentFrame());
5551 return compareInUpdateCache(thread, cache);
5552 }
5553 HandleScope scope(thread);
5554 Tuple tuple(&scope, container);
5555 Object value(&scope, thread->stackPeek(1));
5556 RawObject result = tupleContains(thread, tuple, value);
5557 if (result.isErrorException()) {
5558 return Continue::UNWIND;
5559 }
5560 thread->stackDrop(2);
5561 thread->stackPush(result);
5562 return Continue::NEXT;
5563}
5564
5565HANDLER_INLINE Continue Interpreter::doCompareInMonomorphic(Thread* thread,
5566 word) {
5567 Frame* frame = thread->currentFrame();
5568 RawObject container = thread->stackPeek(0);
5569 RawObject value = thread->stackPeek(1);
5570 LayoutId container_layout_id = container.layoutId();
5571 word cache = currentCacheIndex(frame);
5572 bool is_found;
5573 RawObject cached = icLookupMonomorphic(MutableTuple::cast(frame->caches()),
5574 cache, container_layout_id, &is_found);
5575 if (!is_found) {
5576 EVENT_CACHE(COMPARE_IN_MONOMORPHIC);
5577 return compareInUpdateCache(thread, cache);
5578 }
5579 thread->stackDrop(2);
5580 thread->stackPush(cached);
5581 thread->stackPush(container);
5582 thread->stackPush(value);
5583 // A recursive call is needed to coerce the return value to bool.
5584 RawObject result = call(thread, 2);
5585 if (result.isErrorException()) return Continue::UNWIND;
5586 thread->stackPush(isTrue(thread, result));
5587 return Continue::NEXT;
5588}
5589
5590HANDLER_INLINE Continue Interpreter::doCompareInPolymorphic(Thread* thread,
5591 word) {
5592 Frame* frame = thread->currentFrame();
5593 RawObject container = thread->stackPeek(0);
5594 RawObject value = thread->stackPeek(1);
5595 LayoutId container_layout_id = container.layoutId();
5596 word cache = currentCacheIndex(frame);
5597 bool is_found;
5598 RawObject cached = icLookupPolymorphic(MutableTuple::cast(frame->caches()),
5599 cache, container_layout_id, &is_found);
5600 if (!is_found) {
5601 EVENT_CACHE(COMPARE_IN_POLYMORPHIC);
5602 return compareInUpdateCache(thread, cache);
5603 }
5604 thread->stackDrop(2);
5605 thread->stackPush(cached);
5606 thread->stackPush(container);
5607 thread->stackPush(value);
5608 // Should use a recursive call to convert it return type to bool.
5609 RawObject result = call(thread, 2);
5610 if (result.isError()) return Continue::UNWIND;
5611 thread->stackPush(isTrue(thread, result));
5612 return Continue::NEXT;
5613}
5614
5615Continue Interpreter::compareOpUpdateCache(Thread* thread, word arg,
5616 word cache) {
5617 HandleScope scope(thread);
5618 Frame* frame = thread->currentFrame();
5619 Function dependent(&scope, frame->function());
5620 if (dependent.isCompiled()) {
5621 return Continue::DEOPT;
5622 }
5623 Object right(&scope, thread->stackPop());
5624 Object left(&scope, thread->stackPop());
5625 CompareOp op = static_cast<CompareOp>(arg);
5626 Object method(&scope, NoneType::object());
5627 BinaryOpFlags flags;
5628 RawObject result =
5629 compareOperationSetMethod(thread, op, left, right, &method, &flags);
5630 if (result.isErrorException()) return Continue::UNWIND;
5631 if (!method.isNoneType()) {
5632 MutableTuple caches(&scope, frame->caches());
5633 LayoutId left_layout_id = left.layoutId();
5634 LayoutId right_layout_id = right.layoutId();
5635 ICState next_cache_state = icUpdateBinOp(
5636 thread, caches, cache, left_layout_id, right_layout_id, method, flags);
5637 icInsertCompareOpDependencies(thread, dependent, left_layout_id,
5638 right_layout_id, op);
5639 rewriteCurrentBytecode(frame, next_cache_state == ICState::kMonomorphic
5640 ? COMPARE_OP_MONOMORPHIC
5641 : COMPARE_OP_POLYMORPHIC);
5642 }
5643 thread->stackPush(result);
5644 return Continue::NEXT;
5645}
5646
5647Continue Interpreter::compareOpFallback(Thread* thread, word arg,
5648 BinaryOpFlags flags) {
5649 // Slow-path: We may need to call the reversed op when the first method
5650 // returned `NotImplemented`.
5651 HandleScope scope(thread);
5652 CompareOp op = static_cast<CompareOp>(arg);
5653 Object right(&scope, thread->stackPop());
5654 Object left(&scope, thread->stackPop());
5655 Object result(&scope, compareOperationRetry(thread, op, flags, left, right));
5656 if (result.isErrorException()) return Continue::UNWIND;
5657 thread->stackPush(*result);
5658 return Continue::NEXT;
5659}
5660
5661HANDLER_INLINE
5662Continue Interpreter::doCompareEqSmallInt(Thread* thread, word arg) {
5663 RawObject left = thread->stackPeek(1);
5664 RawObject right = thread->stackPeek(0);
5665 if (left.isSmallInt() && right.isSmallInt()) {
5666 word left_value = SmallInt::cast(left).value();
5667 word right_value = SmallInt::cast(right).value();
5668 thread->stackDrop(1);
5669 thread->stackSetTop(Bool::fromBool(left_value == right_value));
5670 return Continue::NEXT;
5671 }
5672 EVENT_CACHE(COMPARE_EQ_SMALLINT);
5673 word cache = currentCacheIndex(thread->currentFrame());
5674 return compareOpUpdateCache(thread, arg, cache);
5675}
5676
5677HANDLER_INLINE
5678Continue Interpreter::doCompareGtSmallInt(Thread* thread, word arg) {
5679 RawObject left = thread->stackPeek(1);
5680 RawObject right = thread->stackPeek(0);
5681 if (left.isSmallInt() && right.isSmallInt()) {
5682 word left_value = SmallInt::cast(left).value();
5683 word right_value = SmallInt::cast(right).value();
5684 thread->stackDrop(1);
5685 thread->stackSetTop(Bool::fromBool(left_value > right_value));
5686 return Continue::NEXT;
5687 }
5688 EVENT_CACHE(COMPARE_GT_SMALLINT);
5689 word cache = currentCacheIndex(thread->currentFrame());
5690 return compareOpUpdateCache(thread, arg, cache);
5691}
5692
5693HANDLER_INLINE
5694Continue Interpreter::doCompareLtSmallInt(Thread* thread, word arg) {
5695 RawObject left = thread->stackPeek(1);
5696 RawObject right = thread->stackPeek(0);
5697 if (left.isSmallInt() && right.isSmallInt()) {
5698 word left_value = SmallInt::cast(left).value();
5699 word right_value = SmallInt::cast(right).value();
5700 thread->stackDrop(1);
5701 thread->stackSetTop(Bool::fromBool(left_value < right_value));
5702 return Continue::NEXT;
5703 }
5704 EVENT_CACHE(COMPARE_LT_SMALLINT);
5705 word cache = currentCacheIndex(thread->currentFrame());
5706 return compareOpUpdateCache(thread, arg, cache);
5707}
5708
5709HANDLER_INLINE
5710Continue Interpreter::doCompareGeSmallInt(Thread* thread, word arg) {
5711 RawObject left = thread->stackPeek(1);
5712 RawObject right = thread->stackPeek(0);
5713 if (left.isSmallInt() && right.isSmallInt()) {
5714 word left_value = SmallInt::cast(left).value();
5715 word right_value = SmallInt::cast(right).value();
5716 thread->stackDrop(1);
5717 thread->stackSetTop(Bool::fromBool(left_value >= right_value));
5718 return Continue::NEXT;
5719 }
5720 EVENT_CACHE(COMPARE_GE_SMALLINT);
5721 word cache = currentCacheIndex(thread->currentFrame());
5722 return compareOpUpdateCache(thread, arg, cache);
5723}
5724
5725HANDLER_INLINE
5726Continue Interpreter::doCompareNeSmallInt(Thread* thread, word arg) {
5727 RawObject left = thread->stackPeek(1);
5728 RawObject right = thread->stackPeek(0);
5729 if (left.isSmallInt() && right.isSmallInt()) {
5730 word left_value = SmallInt::cast(left).value();
5731 word right_value = SmallInt::cast(right).value();
5732 thread->stackDrop(1);
5733 thread->stackSetTop(Bool::fromBool(left_value != right_value));
5734 return Continue::NEXT;
5735 }
5736 EVENT_CACHE(COMPARE_NE_SMALLINT);
5737 word cache = currentCacheIndex(thread->currentFrame());
5738 return compareOpUpdateCache(thread, arg, cache);
5739}
5740
5741HANDLER_INLINE
5742Continue Interpreter::doCompareLeSmallInt(Thread* thread, word arg) {
5743 RawObject left = thread->stackPeek(1);
5744 RawObject right = thread->stackPeek(0);
5745 if (left.isSmallInt() && right.isSmallInt()) {
5746 word left_value = SmallInt::cast(left).value();
5747 word right_value = SmallInt::cast(right).value();
5748 thread->stackDrop(1);
5749 thread->stackSetTop(Bool::fromBool(left_value <= right_value));
5750 return Continue::NEXT;
5751 }
5752 EVENT_CACHE(COMPARE_LE_SMALLINT);
5753 word cache = currentCacheIndex(thread->currentFrame());
5754 return compareOpUpdateCache(thread, arg, cache);
5755}
5756
5757HANDLER_INLINE
5758Continue Interpreter::doCompareEqStr(Thread* thread, word arg) {
5759 RawObject left = thread->stackPeek(1);
5760 RawObject right = thread->stackPeek(0);
5761 if (left.isStr() && right.isStr()) {
5762 thread->stackDrop(1);
5763 thread->stackSetTop(
5764 Bool::fromBool(Str::cast(left).equals(Str::cast(right))));
5765 return Continue::NEXT;
5766 }
5767 EVENT_CACHE(COMPARE_EQ_STR);
5768 word cache = currentCacheIndex(thread->currentFrame());
5769 return compareOpUpdateCache(thread, arg, cache);
5770}
5771
5772HANDLER_INLINE
5773Continue Interpreter::doCompareNeStr(Thread* thread, word arg) {
5774 RawObject left = thread->stackPeek(1);
5775 RawObject right = thread->stackPeek(0);
5776 if (left.isStr() && right.isStr()) {
5777 thread->stackDrop(1);
5778 thread->stackSetTop(
5779 Bool::fromBool(!Str::cast(left).equals(Str::cast(right))));
5780 return Continue::NEXT;
5781 }
5782 EVENT_CACHE(COMPARE_NE_STR);
5783 word cache = currentCacheIndex(thread->currentFrame());
5784 return compareOpUpdateCache(thread, arg, cache);
5785}
5786
5787HANDLER_INLINE
5788Continue Interpreter::doCompareOpMonomorphic(Thread* thread, word arg) {
5789 Frame* frame = thread->currentFrame();
5790 RawObject left_raw = thread->stackPeek(1);
5791 RawObject right_raw = thread->stackPeek(0);
5792 LayoutId left_layout_id = left_raw.layoutId();
5793 LayoutId right_layout_id = right_raw.layoutId();
5794 BinaryOpFlags flags = kBinaryOpNone;
5795 word cache = currentCacheIndex(frame);
5796 RawObject method =
5797 icLookupBinOpMonomorphic(MutableTuple::cast(frame->caches()), cache,
5798 left_layout_id, right_layout_id, &flags);
5799 if (method.isErrorNotFound()) {
5800 EVENT_CACHE(COMPARE_OP_MONOMORPHIC);
5801 return compareOpUpdateCache(thread, arg, cache);
5802 }
5803 return binaryOp(thread, arg, method, flags, left_raw, right_raw,
5804 compareOpFallback);
5805}
5806
5807HANDLER_INLINE
5808Continue Interpreter::doCompareOpPolymorphic(Thread* thread, word arg) {
5809 Frame* frame = thread->currentFrame();
5810 RawObject left_raw = thread->stackPeek(1);
5811 RawObject right_raw = thread->stackPeek(0);
5812 LayoutId left_layout_id = left_raw.layoutId();
5813 LayoutId right_layout_id = right_raw.layoutId();
5814 BinaryOpFlags flags = kBinaryOpNone;
5815 word cache = currentCacheIndex(frame);
5816 RawObject method =
5817 icLookupBinOpPolymorphic(MutableTuple::cast(frame->caches()), cache,
5818 left_layout_id, right_layout_id, &flags);
5819 if (method.isErrorNotFound()) {
5820 EVENT_CACHE(COMPARE_OP_POLYMORPHIC);
5821 return compareOpUpdateCache(thread, arg, cache);
5822 }
5823 return binaryOp(thread, arg, method, flags, left_raw, right_raw,
5824 compareOpFallback);
5825}
5826
5827HANDLER_INLINE
5828Continue Interpreter::doCompareOpAnamorphic(Thread* thread, word arg) {
5829 Frame* frame = thread->currentFrame();
5830 RawObject left = thread->stackPeek(1);
5831 RawObject right = thread->stackPeek(0);
5832 if (left.isSmallInt() && right.isSmallInt()) {
5833 switch (static_cast<CompareOp>(arg)) {
5834 case CompareOp::EQ:
5835 rewriteCurrentBytecode(frame, COMPARE_EQ_SMALLINT);
5836 return doCompareEqSmallInt(thread, arg);
5837 case CompareOp::GT:
5838 rewriteCurrentBytecode(frame, COMPARE_GT_SMALLINT);
5839 return doCompareGtSmallInt(thread, arg);
5840 case CompareOp::LT:
5841 rewriteCurrentBytecode(frame, COMPARE_LT_SMALLINT);
5842 return doCompareLtSmallInt(thread, arg);
5843 case CompareOp::GE:
5844 rewriteCurrentBytecode(frame, COMPARE_GE_SMALLINT);
5845 return doCompareGeSmallInt(thread, arg);
5846 case CompareOp::NE:
5847 rewriteCurrentBytecode(frame, COMPARE_NE_SMALLINT);
5848 return doCompareNeSmallInt(thread, arg);
5849 case CompareOp::LE:
5850 rewriteCurrentBytecode(frame, COMPARE_LE_SMALLINT);
5851 return doCompareLeSmallInt(thread, arg);
5852 default: {
5853 word cache = currentCacheIndex(frame);
5854 return compareOpUpdateCache(thread, arg, cache);
5855 }
5856 }
5857 }
5858 if (left.isStr() && right.isStr()) {
5859 switch (static_cast<CompareOp>(arg)) {
5860 case CompareOp::EQ:
5861 rewriteCurrentBytecode(frame, COMPARE_EQ_STR);
5862 return doCompareEqStr(thread, arg);
5863 case CompareOp::NE:
5864 rewriteCurrentBytecode(frame, COMPARE_NE_STR);
5865 return doCompareNeStr(thread, arg);
5866 default: {
5867 word cache = currentCacheIndex(frame);
5868 return compareOpUpdateCache(thread, arg, cache);
5869 }
5870 }
5871 }
5872 word cache = currentCacheIndex(frame);
5873 return compareOpUpdateCache(thread, arg, cache);
5874}
5875
5876Continue Interpreter::inplaceOpUpdateCache(Thread* thread, word arg,
5877 word cache) {
5878 HandleScope scope(thread);
5879 Frame* frame = thread->currentFrame();
5880 Function dependent(&scope, frame->function());
5881 if (dependent.isCompiled()) {
5882 return Continue::DEOPT;
5883 }
5884 Object right(&scope, thread->stackPop());
5885 Object left(&scope, thread->stackPop());
5886 BinaryOp op = static_cast<BinaryOp>(arg);
5887 Object method(&scope, NoneType::object());
5888 BinaryOpFlags flags;
5889 RawObject result =
5890 inplaceOperationSetMethod(thread, op, left, right, &method, &flags);
5891 if (!method.isNoneType()) {
5892 MutableTuple caches(&scope, frame->caches());
5893 LayoutId left_layout_id = left.layoutId();
5894 LayoutId right_layout_id = right.layoutId();
5895 ICState next_cache_state = icUpdateBinOp(
5896 thread, caches, cache, left_layout_id, right_layout_id, method, flags);
5897 icInsertInplaceOpDependencies(thread, dependent, left_layout_id,
5898 right_layout_id, op);
5899 rewriteCurrentBytecode(frame, next_cache_state == ICState::kMonomorphic
5900 ? INPLACE_OP_MONOMORPHIC
5901 : INPLACE_OP_POLYMORPHIC);
5902 }
5903 if (result.isErrorException()) return Continue::UNWIND;
5904 thread->stackPush(result);
5905 return Continue::NEXT;
5906}
5907
5908Continue Interpreter::inplaceOpFallback(Thread* thread, word arg,
5909 BinaryOpFlags flags) {
5910 // Slow-path: We may need to try other ways to resolve things when the first
5911 // call returned `NotImplemented`.
5912 HandleScope scope(thread);
5913 BinaryOp op = static_cast<BinaryOp>(arg);
5914 Object right(&scope, thread->stackPop());
5915 Object left(&scope, thread->stackPop());
5916 Object result(&scope, NoneType::object());
5917 if (flags & kInplaceBinaryOpRetry) {
5918 // The cached operation was an in-place operation we have to try to the
5919 // usual binary operation mechanics now.
5920 result = binaryOperation(thread, op, left, right);
5921 } else {
5922 // The cached operation was already a binary operation (e.g. __add__ or
5923 // __radd__) so we have to invoke `binaryOperationRetry`.
5924 result = binaryOperationRetry(thread, op, flags, left, right);
5925 }
5926 if (result.isErrorException()) return Continue::UNWIND;
5927 thread->stackPush(*result);
5928 return Continue::NEXT;
5929}
5930
5931HANDLER_INLINE
5932Continue Interpreter::doInplaceOpMonomorphic(Thread* thread, word arg) {
5933 Frame* frame = thread->currentFrame();
5934 RawObject left_raw = thread->stackPeek(1);
5935 RawObject right_raw = thread->stackPeek(0);
5936 LayoutId left_layout_id = left_raw.layoutId();
5937 LayoutId right_layout_id = right_raw.layoutId();
5938 BinaryOpFlags flags = kBinaryOpNone;
5939 word cache = currentCacheIndex(frame);
5940 RawObject method =
5941 icLookupBinOpMonomorphic(MutableTuple::cast(frame->caches()), cache,
5942 left_layout_id, right_layout_id, &flags);
5943 if (method.isErrorNotFound()) {
5944 EVENT_CACHE(INPLACE_OP_MONOMORPHIC);
5945 return inplaceOpUpdateCache(thread, arg, cache);
5946 }
5947 return binaryOp(thread, arg, method, flags, left_raw, right_raw,
5948 inplaceOpFallback);
5949}
5950
5951HANDLER_INLINE
5952Continue Interpreter::doInplaceOpPolymorphic(Thread* thread, word arg) {
5953 Frame* frame = thread->currentFrame();
5954 RawObject left_raw = thread->stackPeek(1);
5955 RawObject right_raw = thread->stackPeek(0);
5956 LayoutId left_layout_id = left_raw.layoutId();
5957 LayoutId right_layout_id = right_raw.layoutId();
5958 BinaryOpFlags flags = kBinaryOpNone;
5959 word cache = currentCacheIndex(frame);
5960 RawObject method =
5961 icLookupBinOpPolymorphic(MutableTuple::cast(frame->caches()), cache,
5962 left_layout_id, right_layout_id, &flags);
5963 if (method.isErrorNotFound()) {
5964 EVENT_CACHE(INPLACE_OP_POLYMORPHIC);
5965 return inplaceOpUpdateCache(thread, arg, cache);
5966 }
5967 return binaryOp(thread, arg, method, flags, left_raw, right_raw,
5968 inplaceOpFallback);
5969}
5970
5971HANDLER_INLINE
5972Continue Interpreter::doInplaceAddSmallInt(Thread* thread, word arg) {
5973 RawObject left = thread->stackPeek(1);
5974 RawObject right = thread->stackPeek(0);
5975 if (left.isSmallInt() && right.isSmallInt()) {
5976 word left_value = SmallInt::cast(left).value();
5977 word right_value = SmallInt::cast(right).value();
5978 word result_value = left_value + right_value;
5979 if (SmallInt::isValid(result_value)) {
5980 thread->stackDrop(1);
5981 thread->stackSetTop(SmallInt::fromWord(result_value));
5982 return Continue::NEXT;
5983 }
5984 }
5985 EVENT_CACHE(INPLACE_ADD_SMALLINT);
5986 word cache = currentCacheIndex(thread->currentFrame());
5987 return inplaceOpUpdateCache(thread, arg, cache);
5988}
5989
5990HANDLER_INLINE
5991Continue Interpreter::doInplaceSubSmallInt(Thread* thread, word arg) {
5992 RawObject left = thread->stackPeek(1);
5993 RawObject right = thread->stackPeek(0);
5994 if (left.isSmallInt() && right.isSmallInt()) {
5995 word left_value = SmallInt::cast(left).value();
5996 word right_value = SmallInt::cast(right).value();
5997 word result_value = left_value - right_value;
5998 if (SmallInt::isValid(result_value)) {
5999 thread->stackDrop(1);
6000 thread->stackSetTop(SmallInt::fromWord(result_value));
6001 return Continue::NEXT;
6002 }
6003 }
6004 EVENT_CACHE(INPLACE_SUB_SMALLINT);
6005 word cache = currentCacheIndex(thread->currentFrame());
6006 return inplaceOpUpdateCache(thread, arg, cache);
6007}
6008
6009HANDLER_INLINE
6010Continue Interpreter::doInplaceOpAnamorphic(Thread* thread, word arg) {
6011 Frame* frame = thread->currentFrame();
6012 if (thread->stackPeek(0).isSmallInt() && thread->stackPeek(1).isSmallInt()) {
6013 switch (static_cast<BinaryOp>(arg)) {
6014 case BinaryOp::ADD:
6015 rewriteCurrentBytecode(frame, INPLACE_ADD_SMALLINT);
6016 return doInplaceAddSmallInt(thread, arg);
6017 case BinaryOp::SUB:
6018 rewriteCurrentBytecode(frame, INPLACE_SUB_SMALLINT);
6019 return doInplaceSubSmallInt(thread, arg);
6020 default: {
6021 word cache = currentCacheIndex(frame);
6022 return inplaceOpUpdateCache(thread, arg, cache);
6023 }
6024 }
6025 }
6026 word cache = currentCacheIndex(frame);
6027 return inplaceOpUpdateCache(thread, arg, cache);
6028}
6029
6030Continue Interpreter::binaryOpUpdateCache(Thread* thread, word arg,
6031 word cache) {
6032 HandleScope scope(thread);
6033 Frame* frame = thread->currentFrame();
6034 Function dependent(&scope, frame->function());
6035 if (dependent.isCompiled()) {
6036 return Continue::DEOPT;
6037 }
6038 Object right(&scope, thread->stackPop());
6039 Object left(&scope, thread->stackPop());
6040 BinaryOp op = static_cast<BinaryOp>(arg);
6041 Object method(&scope, NoneType::object());
6042 BinaryOpFlags flags;
6043 Object result(&scope, binaryOperationSetMethod(thread, op, left, right,
6044 &method, &flags));
6045 if (!method.isNoneType()) {
6046 MutableTuple caches(&scope, frame->caches());
6047 LayoutId left_layout_id = left.layoutId();
6048 LayoutId right_layout_id = right.layoutId();
6049 ICState next_cache_state = icUpdateBinOp(
6050 thread, caches, cache, left_layout_id, right_layout_id, method, flags);
6051 icInsertBinaryOpDependencies(thread, dependent, left_layout_id,
6052 right_layout_id, op);
6053 rewriteCurrentBytecode(frame, next_cache_state == ICState::kMonomorphic
6054 ? BINARY_OP_MONOMORPHIC
6055 : BINARY_OP_POLYMORPHIC);
6056 }
6057 if (result.isErrorException()) return Continue::UNWIND;
6058 thread->stackPush(*result);
6059 return Continue::NEXT;
6060}
6061
6062Continue Interpreter::binaryOpFallback(Thread* thread, word arg,
6063 BinaryOpFlags flags) {
6064 // Slow-path: We may need to call the reversed op when the first method
6065 // returned `NotImplemented`.
6066 HandleScope scope(thread);
6067 BinaryOp op = static_cast<BinaryOp>(arg);
6068 Object right(&scope, thread->stackPop());
6069 Object left(&scope, thread->stackPop());
6070 Object result(&scope, binaryOperationRetry(thread, op, flags, left, right));
6071 if (result.isErrorException()) return Continue::UNWIND;
6072 thread->stackPush(*result);
6073 return Continue::NEXT;
6074}
6075
6076ALWAYS_INLINE Continue Interpreter::binaryOp(Thread* thread, word arg,
6077 RawObject method,
6078 BinaryOpFlags flags,
6079 RawObject left, RawObject right,
6080 BinaryOpFallbackHandler fallback) {
6081 DCHECK(method.isFunction(), "method is expected to be a function");
6082 RawObject result =
6083 binaryOperationWithMethod(thread, method, flags, left, right);
6084 if (result.isErrorException()) return Continue::UNWIND;
6085 if (!result.isNotImplementedType()) {
6086 thread->stackDrop(1);
6087 thread->stackSetTop(result);
6088 return Continue::NEXT;
6089 }
6090 return fallback(thread, arg, flags);
6091}
6092
6093HANDLER_INLINE
6094Continue Interpreter::doBinaryOpMonomorphic(Thread* thread, word arg) {
6095 Frame* frame = thread->currentFrame();
6096 RawObject left_raw = thread->stackPeek(1);
6097 RawObject right_raw = thread->stackPeek(0);
6098 LayoutId left_layout_id = left_raw.layoutId();
6099 LayoutId right_layout_id = right_raw.layoutId();
6100 BinaryOpFlags flags = kBinaryOpNone;
6101 word cache = currentCacheIndex(frame);
6102 RawObject method =
6103 icLookupBinOpMonomorphic(MutableTuple::cast(frame->caches()), cache,
6104 left_layout_id, right_layout_id, &flags);
6105 if (method.isErrorNotFound()) {
6106 EVENT_CACHE(BINARY_OP_MONOMORPHIC);
6107 return binaryOpUpdateCache(thread, arg, cache);
6108 }
6109 return binaryOp(thread, arg, method, flags, left_raw, right_raw,
6110 binaryOpFallback);
6111}
6112
6113HANDLER_INLINE
6114Continue Interpreter::doBinaryOpPolymorphic(Thread* thread, word arg) {
6115 Frame* frame = thread->currentFrame();
6116 RawObject left_raw = thread->stackPeek(1);
6117 RawObject right_raw = thread->stackPeek(0);
6118 LayoutId left_layout_id = left_raw.layoutId();
6119 LayoutId right_layout_id = right_raw.layoutId();
6120 BinaryOpFlags flags = kBinaryOpNone;
6121 word cache = currentCacheIndex(frame);
6122 RawObject method =
6123 icLookupBinOpPolymorphic(MutableTuple::cast(frame->caches()), cache,
6124 left_layout_id, right_layout_id, &flags);
6125 if (method.isErrorNotFound()) {
6126 EVENT_CACHE(BINARY_OP_POLYMORPHIC);
6127 return binaryOpUpdateCache(thread, arg, cache);
6128 }
6129 return binaryOp(thread, arg, method, flags, left_raw, right_raw,
6130 binaryOpFallback);
6131}
6132
6133HANDLER_INLINE
6134Continue Interpreter::doBinaryAddSmallInt(Thread* thread, word arg) {
6135 RawObject left = thread->stackPeek(1);
6136 RawObject right = thread->stackPeek(0);
6137 if (left.isSmallInt() && right.isSmallInt()) {
6138 word left_value = SmallInt::cast(left).value();
6139 word right_value = SmallInt::cast(right).value();
6140 word result_value = left_value + right_value;
6141 if (SmallInt::isValid(result_value)) {
6142 thread->stackDrop(1);
6143 thread->stackSetTop(SmallInt::fromWord(result_value));
6144 return Continue::NEXT;
6145 }
6146 }
6147 EVENT_CACHE(BINARY_ADD_SMALLINT);
6148 word cache = currentCacheIndex(thread->currentFrame());
6149 return binaryOpUpdateCache(thread, arg, cache);
6150}
6151
6152HANDLER_INLINE
6153Continue Interpreter::doBinaryAndSmallInt(Thread* thread, word arg) {
6154 RawObject left = thread->stackPeek(1);
6155 RawObject right = thread->stackPeek(0);
6156 if (left.isSmallInt() && right.isSmallInt()) {
6157 word left_value = SmallInt::cast(left).value();
6158 word right_value = SmallInt::cast(right).value();
6159 word result_value = left_value & right_value;
6160 DCHECK(SmallInt::isValid(result_value), "result should be a SmallInt");
6161 thread->stackDrop(1);
6162 thread->stackSetTop(SmallInt::fromWord(result_value));
6163 return Continue::NEXT;
6164 }
6165 EVENT_CACHE(BINARY_AND_SMALLINT);
6166 word cache = currentCacheIndex(thread->currentFrame());
6167 return binaryOpUpdateCache(thread, arg, cache);
6168}
6169
6170HANDLER_INLINE
6171Continue Interpreter::doBinaryMulSmallInt(Thread* thread, word arg) {
6172 RawObject left = thread->stackPeek(1);
6173 RawObject right = thread->stackPeek(0);
6174 if (left.isSmallInt() && right.isSmallInt()) {
6175 word result;
6176 if (!__builtin_mul_overflow(SmallInt::cast(left).value(),
6177 SmallInt::cast(right).value(), &result) &&
6178 SmallInt::isValid(result)) {
6179 thread->stackDrop(1);
6180 thread->stackSetTop(SmallInt::fromWord(result));
6181 return Continue::NEXT;
6182 }
6183 }
6184 EVENT_CACHE(BINARY_MUL_SMALLINT);
6185 word cache = currentCacheIndex(thread->currentFrame());
6186 return binaryOpUpdateCache(thread, arg, cache);
6187}
6188
6189HANDLER_INLINE
6190Continue Interpreter::doBinaryFloordivSmallInt(Thread* thread, word arg) {
6191 RawObject left = thread->stackPeek(1);
6192 RawObject right = thread->stackPeek(0);
6193 if (left.isSmallInt() && right.isSmallInt()) {
6194 word left_value = SmallInt::cast(left).value();
6195 word right_value = SmallInt::cast(right).value();
6196 if (right_value == 0) {
6197 thread->raiseWithFmt(LayoutId::kZeroDivisionError,
6198 "integer division or modulo by zero");
6199 return Continue::UNWIND;
6200 }
6201 word result_value = left_value / right_value;
6202 DCHECK(SmallInt::isValid(result_value), "result should be a SmallInt");
6203 thread->stackDrop(1);
6204 thread->stackSetTop(SmallInt::fromWord(result_value));
6205 return Continue::NEXT;
6206 }
6207 EVENT_CACHE(BINARY_FLOORDIV_SMALLINT);
6208 word cache = currentCacheIndex(thread->currentFrame());
6209 return binaryOpUpdateCache(thread, arg, cache);
6210}
6211
6212HANDLER_INLINE
6213Continue Interpreter::doBinarySubSmallInt(Thread* thread, word arg) {
6214 RawObject left = thread->stackPeek(1);
6215 RawObject right = thread->stackPeek(0);
6216 if (left.isSmallInt() && right.isSmallInt()) {
6217 word left_value = SmallInt::cast(left).value();
6218 word right_value = SmallInt::cast(right).value();
6219 word result_value = left_value - right_value;
6220 if (SmallInt::isValid(result_value)) {
6221 thread->stackDrop(1);
6222 thread->stackSetTop(SmallInt::fromWord(result_value));
6223 return Continue::NEXT;
6224 }
6225 }
6226 EVENT_CACHE(BINARY_SUB_SMALLINT);
6227 word cache = currentCacheIndex(thread->currentFrame());
6228 return binaryOpUpdateCache(thread, arg, cache);
6229}
6230
6231HANDLER_INLINE
6232Continue Interpreter::doBinaryOrSmallInt(Thread* thread, word arg) {
6233 RawObject left = thread->stackPeek(1);
6234 RawObject right = thread->stackPeek(0);
6235 if (left.isSmallInt() && right.isSmallInt()) {
6236 word left_value = SmallInt::cast(left).value();
6237 word right_value = SmallInt::cast(right).value();
6238 word result_value = left_value | right_value;
6239 DCHECK(SmallInt::isValid(result_value), "result should be a SmallInt");
6240 thread->stackDrop(1);
6241 thread->stackSetTop(SmallInt::fromWord(result_value));
6242 return Continue::NEXT;
6243 }
6244 EVENT_CACHE(BINARY_OR_SMALLINT);
6245 word cache = currentCacheIndex(thread->currentFrame());
6246 return binaryOpUpdateCache(thread, arg, cache);
6247}
6248
6249HANDLER_INLINE
6250Continue Interpreter::doBinaryOpAnamorphic(Thread* thread, word arg) {
6251 Frame* frame = thread->currentFrame();
6252 if (thread->stackPeek(0).isSmallInt() && thread->stackPeek(1).isSmallInt()) {
6253 switch (static_cast<BinaryOp>(arg)) {
6254 case BinaryOp::ADD:
6255 rewriteCurrentBytecode(frame, BINARY_ADD_SMALLINT);
6256 return doBinaryAddSmallInt(thread, arg);
6257 case BinaryOp::AND:
6258 rewriteCurrentBytecode(frame, BINARY_AND_SMALLINT);
6259 return doBinaryAndSmallInt(thread, arg);
6260 case BinaryOp::MUL:
6261 rewriteCurrentBytecode(frame, BINARY_MUL_SMALLINT);
6262 return doBinaryMulSmallInt(thread, arg);
6263 case BinaryOp::FLOORDIV:
6264 rewriteCurrentBytecode(frame, BINARY_FLOORDIV_SMALLINT);
6265 return doBinaryFloordivSmallInt(thread, arg);
6266 case BinaryOp::SUB:
6267 rewriteCurrentBytecode(frame, BINARY_SUB_SMALLINT);
6268 return doBinarySubSmallInt(thread, arg);
6269 case BinaryOp::OR:
6270 rewriteCurrentBytecode(frame, BINARY_OR_SMALLINT);
6271 return doBinaryOrSmallInt(thread, arg);
6272 default: {
6273 word cache = currentCacheIndex(frame);
6274 return binaryOpUpdateCache(thread, arg, cache);
6275 }
6276 }
6277 }
6278 word cache = currentCacheIndex(frame);
6279 return binaryOpUpdateCache(thread, arg, cache);
6280}
6281
6282RawObject Interpreter::execute(Thread* thread) {
6283 DCHECK(!thread->hasPendingException(), "unhandled exception lingering");
6284 return thread->interpreterFunc()(thread);
6285}
6286
6287static RawObject resumeGeneratorImpl(Thread* thread,
6288 const GeneratorBase& generator,
6289 const GeneratorFrame& generator_frame,
6290 const ExceptionState& exc_state) {
6291 HandleScope scope(thread);
6292 Frame* frame = thread->currentFrame();
6293 DCHECK((frame->returnMode() & Frame::kExitRecursiveInterpreter) != 0,
6294 "expected kExitRecursiveInterpreter return mode");
6295 generator.setRunning(Bool::trueObj());
6296 Object result(&scope, Interpreter::execute(thread));
6297 generator.setRunning(Bool::falseObj());
6298 thread->setCaughtExceptionState(exc_state.previous());
6299 exc_state.setPrevious(NoneType::object());
6300
6301 // Did generator end with yield?
6302 if (thread->currentFrame() == frame) {
6303 thread->popFrameToGeneratorFrame(generator_frame);
6304 return *result;
6305 }
6306 generator_frame.setVirtualPC(Frame::kFinishedGeneratorPC);
6307
6308 // Return now if generator ended with exception.
6309 if (result.isErrorException()) {
6310 if (thread->pendingExceptionMatches(LayoutId::kStopIteration)) {
6311 return thread->raiseWithFmtChainingPendingAsCause(
6312 LayoutId::kRuntimeError, generator.isAsyncGenerator()
6313 ? "async generator raised StopIteration"
6314 : "coroutine raised StopIteration");
6315 }
6316 if (generator.isAsyncGenerator() &&
6317 thread->pendingExceptionMatches(LayoutId::kStopAsyncIteration)) {
6318 return thread->raiseWithFmtChainingPendingAsCause(
6319 LayoutId::kRuntimeError, "async generator raised StopAsyncIteration");
6320 }
6321 return *result;
6322 }
6323 // Process generator return value.
6324 if (generator.isAsyncGenerator()) {
6325 // The Python compiler should disallow non-None return from asynchronous
6326 // generators.
6327 CHECK(result.isNoneType(), "Asynchronous generators cannot return values");
6328 return thread->raiseStopAsyncIteration();
6329 }
6330 return thread->raiseStopIterationWithValue(result);
6331}
6332
6333RawObject Interpreter::resumeGenerator(Thread* thread,
6334 const GeneratorBase& generator,
6335 const Object& send_value) {
6336 if (generator.running() == Bool::trueObj()) {
6337 return thread->raiseWithFmt(LayoutId::kValueError, "%T already executing",
6338 &generator);
6339 }
6340 HandleScope scope(thread);
6341 GeneratorFrame generator_frame(&scope, generator.generatorFrame());
6342 word pc = generator_frame.virtualPC();
6343 if (pc == Frame::kFinishedGeneratorPC) {
6344 if (generator.isCoroutine()) {
6345 return thread->raiseWithFmt(LayoutId::kRuntimeError,
6346 "cannot reuse already awaited coroutine");
6347 }
6348 return thread->raise(generator.isAsyncGenerator()
6349 ? LayoutId::kStopAsyncIteration
6350 : LayoutId::kStopIteration,
6351 NoneType::object());
6352 }
6353 Frame* frame = thread->pushGeneratorFrame(generator_frame);
6354 if (frame == nullptr) {
6355 return Error::exception();
6356 }
6357 if (pc != 0) {
6358 thread->stackPush(*send_value);
6359 } else if (!send_value.isNoneType()) {
6360 thread->popFrame();
6361 return thread->raiseWithFmt(
6362 LayoutId::kTypeError, "can't send non-None value to a just-started %T",
6363 &generator);
6364 }
6365
6366 // TODO(T38009294): Improve the compiler to avoid this exception state
6367 // overhead on every generator entry.
6368 ExceptionState exc_state(&scope, generator.exceptionState());
6369 exc_state.setPrevious(thread->caughtExceptionState());
6370 thread->setCaughtExceptionState(*exc_state);
6371 return resumeGeneratorImpl(thread, generator, generator_frame, exc_state);
6372}
6373
6374RawObject Interpreter::resumeGeneratorWithRaise(Thread* thread,
6375 const GeneratorBase& generator,
6376 const Object& type,
6377 const Object& value,
6378 const Object& traceback) {
6379 if (generator.running() == Bool::trueObj()) {
6380 return thread->raiseWithFmt(LayoutId::kValueError, "%T already executing",
6381 &generator);
6382 }
6383 HandleScope scope(thread);
6384 GeneratorFrame generator_frame(&scope, generator.generatorFrame());
6385 Frame* frame = thread->pushGeneratorFrame(generator_frame);
6386 if (frame == nullptr) {
6387 return Error::exception();
6388 }
6389 if (generator.isCoroutine() &&
6390 frame->virtualPC() == Frame::kFinishedGeneratorPC) {
6391 thread->popFrame();
6392 return thread->raiseWithFmt(LayoutId::kRuntimeError,
6393 "cannot reuse already awaited coroutine");
6394 }
6395
6396 // TODO(T38009294): Improve the compiler to avoid this exception state
6397 // overhead on every generator entry.
6398 ExceptionState exc_state(&scope, generator.exceptionState());
6399 exc_state.setPrevious(thread->caughtExceptionState());
6400 thread->setCaughtExceptionState(*exc_state);
6401 thread->setPendingExceptionType(*type);
6402 thread->setPendingExceptionValue(*value);
6403 thread->setPendingExceptionTraceback(*traceback);
6404 DCHECK((frame->returnMode() & Frame::kExitRecursiveInterpreter) != 0,
6405 "expected kExitRecursiveInterpreter return mode");
6406 RawObject result = Interpreter::unwind(thread);
6407 if (!result.isErrorError()) {
6408 // Exception was not caught; stop generator.
6409 thread->setCaughtExceptionState(exc_state.previous());
6410 exc_state.setPrevious(NoneType::object());
6411 if (thread->currentFrame() != frame) {
6412 generator_frame.setVirtualPC(Frame::kFinishedGeneratorPC);
6413 }
6414 return Error::exception();
6415 }
6416 if (frame->virtualPC() == Frame::kFinishedGeneratorPC) {
6417 thread->popFrame();
6418 return thread->raise(LayoutId::kStopIteration, NoneType::object());
6419 }
6420 return resumeGeneratorImpl(thread, generator, generator_frame, exc_state);
6421}
6422
6423// TODO(T69575746): Reduce the number of lookups by storing current generator
6424// state as it changes.
6425RawObject Interpreter::findYieldFrom(RawGeneratorBase gen) {
6426 if (gen.running() == Bool::trueObj()) return NoneType::object();
6427 RawGeneratorFrame gf = GeneratorFrame::cast(gen.generatorFrame());
6428 word pc = gf.virtualPC();
6429 if (pc == Frame::kFinishedGeneratorPC) return NoneType::object();
6430 RawFunction function = Function::cast(gf.function());
6431 RawMutableBytes bytecode = MutableBytes::cast(function.rewrittenBytecode());
6432 if (bytecode.byteAt(pc) != Bytecode::YIELD_FROM) return NoneType::object();
6433 return gf.valueStackTop()[0];
6434}
6435
6436namespace {
6437
6438class CppInterpreter : public Interpreter {
6439 public:
6440 ~CppInterpreter() override;
6441 void setupThread(Thread* thread) override;
6442 void* entryAsm(const Function& function) override;
6443 void setOpcodeCounting(bool) override;
6444
6445 private:
6446 static RawObject interpreterLoop(Thread* thread);
6447};
6448
6449CppInterpreter::~CppInterpreter() {}
6450
6451void CppInterpreter::setupThread(Thread* thread) {
6452 thread->setInterpreterFunc(interpreterLoop);
6453}
6454
6455void* CppInterpreter::entryAsm(const Function&) { return nullptr; }
6456
6457void CppInterpreter::setOpcodeCounting(bool) {
6458 UNIMPLEMENTED("opcode counting not supported by C++ interpreter");
6459}
6460
6461RawObject CppInterpreter::interpreterLoop(Thread* thread) {
6462 // Silence warnings about computed goto
6463#pragma GCC diagnostic push
6464#pragma GCC diagnostic ignored "-Wpedantic"
6465
6466 static const void* const dispatch_table[] = {
6467#define OP(name, id, handler) \
6468 name == EXTENDED_ARG ? &&extendedArg : &&handle##name,
6469 FOREACH_BYTECODE(OP)
6470#undef OP
6471 };
6472
6473 Frame* frame = thread->currentFrame();
6474 frame->addReturnMode(Frame::kExitRecursiveInterpreter);
6475
6476 Bytecode bc;
6477 int32_t arg;
6478 Continue cont;
6479 auto next_label = [&]() __attribute__((always_inline)) {
6480 Frame* current_frame = thread->currentFrame();
6481 word pc = current_frame->virtualPC();
6482 static_assert(endian::native == endian::little, "big endian unsupported");
6483 static_assert(kCodeUnitSize == sizeof(uint32_t), "matching type");
6484 bc = static_cast<Bytecode>(current_frame->bytecode().byteAt(pc));
6485 arg = current_frame->bytecode().byteAt(pc + 1);
6486 current_frame->setVirtualPC(pc + kCodeUnitSize);
6487 return dispatch_table[bc];
6488 };
6489
6490 goto* next_label();
6491
6492extendedArg:
6493 do {
6494 Frame* current_frame = thread->currentFrame();
6495 word pc = current_frame->virtualPC();
6496 static_assert(endian::native == endian::little, "big endian unsupported");
6497 static_assert(kCodeUnitSize == sizeof(uint32_t), "matching type");
6498 uint16_t bytes_at = current_frame->bytecode().uint16At(pc);
6499 current_frame->setVirtualPC(pc + kCodeUnitSize);
6500 bc = static_cast<Bytecode>(bytes_at & 0xFF);
6501 arg = (arg << 8) | (bytes_at >> 8);
6502 } while (bc == EXTENDED_ARG);
6503 goto* dispatch_table[bc];
6504
6505#define OP(name, id, handler) \
6506 handle##name : cont = handler(thread, arg); \
6507 if (LIKELY(cont == Continue::NEXT)) goto* next_label(); \
6508 goto handle_return_or_unwind;
6509 FOREACH_BYTECODE(OP)
6510#undef OP
6511
6512handle_return_or_unwind:
6513 if (cont == Continue::UNWIND) {
6514 RawObject result = unwind(thread);
6515 if (!result.isErrorError()) {
6516 return result;
6517 }
6518 } else if (cont == Continue::RETURN) {
6519 RawObject result = handleReturn(thread);
6520 if (!result.isErrorError()) {
6521 return result;
6522 }
6523 } else {
6524 DCHECK(cont == Continue::YIELD, "expected RETURN, UNWIND or YIELD");
6525 return thread->stackPop();
6526 }
6527 goto* next_label();
6528#pragma GCC diagnostic pop
6529}
6530
6531} // namespace
6532
6533Interpreter* createCppInterpreter() { return new CppInterpreter; }
6534
6535} // namespace py