this repo has no description
1// Copyright (c) Facebook, Inc. and its affiliates. (http://www.facebook.com)
2#include "ic.h"
3
4#include "attributedict.h"
5#include "bytecode.h"
6#include "dict-builtins.h"
7#include "interpreter.h"
8#include "runtime.h"
9#include "str-builtins.h"
10#include "type-builtins.h"
11#include "utils.h"
12
13namespace py {
14
15ICState icCurrentState(RawTuple caches, word cache) {
16 word index = cache * kIcPointersPerEntry;
17 RawObject key = caches.at(index + kIcEntryKeyOffset);
18 if (key.isNoneType()) {
19 return ICState::kAnamorphic;
20 }
21 if (key.isSmallInt()) {
22 return ICState::kMonomorphic;
23 }
24 DCHECK(key.isUnbound(),
25 "unbound is the expected key for a polymorphic cache");
26 return ICState::kPolymorphic;
27}
28
29// Perform the same lookup operation as typeLookupNameInMro as we're inserting
30// dependent into the ValueCell in each visited type dictionary.
31static void insertDependencyForTypeLookupInMro(Thread* thread,
32 LayoutId layout_id,
33 const Object& name,
34 const Object& dependent) {
35 HandleScope scope(thread);
36 Type type(&scope, thread->runtime()->typeAt(layout_id));
37 if (!type.hasMutableDict()) return;
38 Tuple mro(&scope, type.mro());
39 Type mro_type(&scope, *type);
40 for (word i = 0; i < mro.length(); i++) {
41 mro_type = mro.at(i);
42 if (!mro_type.hasMutableDict()) return;
43 ValueCell value_cell(&scope,
44 attributeValueCellAtPut(thread, mro_type, name));
45 icInsertDependentToValueCellDependencyLink(thread, dependent, value_cell);
46 if (!value_cell.isPlaceholder()) {
47 // Attribute lookup terminates here. Therefore, no dependency tracking is
48 // needed afterwards.
49 return;
50 }
51 }
52}
53
54void icUpdateDunderClass(Thread* thread, LayoutId layout_id, const Object& name,
55 const Object& dependent) {
56 insertDependencyForTypeLookupInMro(thread, layout_id, name, dependent);
57}
58
59ICState icUpdateAttr(Thread* thread, const MutableTuple& caches, word cache,
60 LayoutId layout_id, const Object& value,
61 const Object& name, const Function& dependent) {
62 Runtime* runtime = thread->runtime();
63 RawSmallInt key = SmallInt::fromWord(static_cast<word>(layout_id));
64 word index = cache * kIcPointersPerEntry;
65 RawObject entry_key = caches.at(index + kIcEntryKeyOffset);
66 if (entry_key.isNoneType() || entry_key == key) {
67 caches.atPut(index + kIcEntryKeyOffset, key);
68 caches.atPut(index + kIcEntryValueOffset, *value);
69 insertDependencyForTypeLookupInMro(thread, layout_id, name, dependent);
70 return ICState::kMonomorphic;
71 }
72 if (!caches.at(index + kIcEntryKeyOffset).isUnbound()) {
73 // This is currently not a polymorphic cache.
74 HandleScope scope(thread);
75 MutableTuple polymorphic_cache(
76 &scope, runtime->newMutableTuple(kIcPointersPerPolyCache));
77 polymorphic_cache.fill(NoneType::object());
78 polymorphic_cache.atPut(kIcEntryKeyOffset,
79 caches.at(index + kIcEntryKeyOffset));
80 polymorphic_cache.atPut(kIcEntryValueOffset,
81 caches.at(index + kIcEntryValueOffset));
82 // Mark this entry as a polymorphic cache.
83 caches.atPut(index + kIcEntryKeyOffset, Unbound::object());
84 caches.atPut(index + kIcEntryValueOffset, *polymorphic_cache);
85 }
86 RawMutableTuple polymorphic_cache =
87 MutableTuple::cast(caches.at(index + kIcEntryValueOffset));
88 for (word j = 0; j < kIcPointersPerPolyCache; j += kIcPointersPerEntry) {
89 entry_key = polymorphic_cache.at(j + kIcEntryKeyOffset);
90 if (entry_key.isNoneType() || entry_key == key) {
91 polymorphic_cache.atPut(j + kIcEntryKeyOffset, key);
92 polymorphic_cache.atPut(j + kIcEntryValueOffset, *value);
93 insertDependencyForTypeLookupInMro(thread, layout_id, name, dependent);
94 break;
95 }
96 }
97 return ICState::kPolymorphic;
98}
99
100bool icIsCacheEmpty(const MutableTuple& caches, word cache) {
101 word index = cache * kIcPointersPerEntry;
102 return caches.at(index + kIcEntryKeyOffset).isNoneType();
103}
104
105void icUpdateAttrModule(Thread* thread, const MutableTuple& caches, word cache,
106 const Object& receiver, const ValueCell& value_cell,
107 const Function& dependent) {
108 DCHECK(icIsCacheEmpty(caches, cache), "cache must be empty\n");
109 HandleScope scope(thread);
110 word index = cache * kIcPointersPerEntry;
111 Module module(&scope, *receiver);
112 caches.atPut(index + kIcEntryKeyOffset, SmallInt::fromWord(module.id()));
113 caches.atPut(index + kIcEntryValueOffset, *value_cell);
114 RawMutableBytes bytecode =
115 RawMutableBytes::cast(dependent.rewrittenBytecode());
116 word pc = thread->currentFrame()->virtualPC() - kCodeUnitSize;
117 DCHECK(bytecode.byteAt(pc) == LOAD_ATTR_ANAMORPHIC,
118 "current opcode must be LOAD_ATTR_ANAMORPHIC");
119 bytecode.byteAtPut(pc, LOAD_ATTR_MODULE);
120 icInsertDependentToValueCellDependencyLink(thread, dependent, value_cell);
121}
122
123void icUpdateMethodModule(Thread* thread, const MutableTuple& caches,
124 word cache, const Object& receiver,
125 const ValueCell& value_cell,
126 const Function& dependent) {
127 DCHECK(icIsCacheEmpty(caches, cache), "cache must be empty\n");
128 HandleScope scope(thread);
129 word index = cache * kIcPointersPerEntry;
130 Module module(&scope, *receiver);
131 caches.atPut(index + kIcEntryKeyOffset, SmallInt::fromWord(module.id()));
132 caches.atPut(index + kIcEntryValueOffset, *value_cell);
133 RawMutableBytes bytecode =
134 RawMutableBytes::cast(dependent.rewrittenBytecode());
135 word pc = thread->currentFrame()->virtualPC() - kCodeUnitSize;
136 DCHECK(bytecode.byteAt(pc) == LOAD_METHOD_ANAMORPHIC,
137 "current opcode must be LOAD_METHOD_ANAMORPHIC");
138 bytecode.byteAtPut(pc, LOAD_METHOD_MODULE);
139 icInsertDependentToValueCellDependencyLink(thread, dependent, value_cell);
140}
141
142void icUpdateAttrType(Thread* thread, const MutableTuple& caches, word cache,
143 const Object& receiver, const Object& selector,
144 const Object& value, const Function& dependent) {
145 DCHECK(icIsCacheEmpty(caches, cache), "cache must be empty\n");
146 word index = cache * kIcPointersPerEntry;
147 HandleScope scope(thread);
148 Type type(&scope, *receiver);
149 word id = static_cast<word>(type.instanceLayoutId());
150 caches.atPut(index + kIcEntryKeyOffset, SmallInt::fromWord(id));
151 caches.atPut(index + kIcEntryValueOffset, *value);
152 RawMutableBytes bytecode =
153 RawMutableBytes::cast(dependent.rewrittenBytecode());
154 word pc = thread->currentFrame()->virtualPC() - kCodeUnitSize;
155 DCHECK(bytecode.byteAt(pc) == LOAD_ATTR_ANAMORPHIC,
156 "current opcode must be LOAD_ATTR_ANAMORPHIC");
157 bytecode.byteAtPut(pc, LOAD_ATTR_TYPE);
158 LayoutId layout_id = receiver.rawCast<RawType>().instanceLayoutId();
159 insertDependencyForTypeLookupInMro(thread, layout_id, selector, dependent);
160}
161
162static void icInsertConstructorDependencies(Thread* thread, LayoutId layout_id,
163 const Function& dependent) {
164 HandleScope scope(thread);
165 Runtime* runtime = thread->runtime();
166 Object dunder_new(&scope, runtime->symbols()->at(ID(__new__)));
167 insertDependencyForTypeLookupInMro(thread, layout_id, dunder_new, dependent);
168 Object dunder_init(&scope, runtime->symbols()->at(ID(__init__)));
169 insertDependencyForTypeLookupInMro(thread, layout_id, dunder_init, dependent);
170}
171
172void icUpdateCallFunctionTypeNew(Thread* thread, const MutableTuple& caches,
173 word cache, const Object& receiver,
174 const Object& constructor,
175 const Function& dependent) {
176 DCHECK(icIsCacheEmpty(caches, cache), "cache must be empty\n");
177 word index = cache * kIcPointersPerEntry;
178 HandleScope scope(thread);
179 Type type(&scope, *receiver);
180 word id = static_cast<word>(type.instanceLayoutId());
181 caches.atPut(index + kIcEntryKeyOffset, SmallInt::fromWord(id));
182 caches.atPut(index + kIcEntryValueOffset, *constructor);
183 if (!type.isBuiltin()) {
184 icInsertConstructorDependencies(thread, static_cast<LayoutId>(id),
185 dependent);
186 }
187}
188
189void icRemoveDeadWeakLinks(RawValueCell cell) {
190 DCHECK(!cell.dependencyLink().isNoneType(),
191 "unlink should not be called with an empty list");
192 for (RawObject curr = cell.dependencyLink(); !curr.isNoneType();
193 curr = WeakLink::cast(curr).next()) {
194 if (!WeakLink::cast(curr).referent().isNoneType()) {
195 continue;
196 }
197 RawObject prev = WeakLink::cast(curr).prev();
198 RawObject next = WeakLink::cast(curr).next();
199 if (prev.isNoneType()) {
200 // Special case: unlinking the head.
201 cell.setDependencyLink(next);
202 } else {
203 WeakLink::cast(prev).setNext(next);
204 }
205 if (!next.isNoneType()) {
206 WeakLink::cast(next).setPrev(prev);
207 }
208 }
209}
210
211bool icInsertDependentToValueCellDependencyLink(Thread* thread,
212 const Object& dependent,
213 const ValueCell& value_cell) {
214 RawObject empty_link = NoneType::object();
215 bool has_dead_links = false;
216 for (RawObject curr = value_cell.dependencyLink(); !curr.isNoneType();
217 curr = WeakLink::cast(curr).next()) {
218 RawObject referent = WeakLink::cast(curr).referent();
219 if (referent == *dependent) {
220 // The dependent is already in the list. Don't add it again.
221 if (has_dead_links) icRemoveDeadWeakLinks(*value_cell);
222 return false;
223 }
224 if (referent.isNoneType()) {
225 if (empty_link.isNoneType()) {
226 // Save the current WeakLink as a potential space for the new
227 // dependent.
228 empty_link = curr;
229 } else {
230 // We need to clean up the dead WeakLinks later.
231 has_dead_links = true;
232 }
233 }
234 }
235 if (!empty_link.isNoneType()) {
236 // We did not find dependent and we have a space for it, so fill the space
237 WeakLink::cast(empty_link).setReferent(*dependent);
238 if (has_dead_links) icRemoveDeadWeakLinks(*value_cell);
239 return true;
240 }
241 // We did not find the dependent and we do not have space for it, so allocate
242 // space and prepend it to the list.
243 // Note that this implies that there were no dead WeakLinks.
244 HandleScope scope(thread);
245 Object old_head(&scope, value_cell.dependencyLink());
246 Object none(&scope, NoneType::object());
247 WeakLink new_head(&scope, thread->runtime()->newWeakLink(thread, dependent,
248 none, old_head));
249 if (old_head.isWeakLink()) {
250 WeakLink::cast(*old_head).setPrev(*new_head);
251 }
252 value_cell.setDependencyLink(*new_head);
253 return true;
254}
255
256static void insertBinaryOpDependencies(Thread* thread,
257 const Function& dependent,
258 LayoutId left_layout_id,
259 const SymbolId left_operator_id,
260 LayoutId right_layout_id,
261 const SymbolId right_operator_id) {
262 HandleScope scope(thread);
263 Runtime* runtime = thread->runtime();
264 Object left_op_name(&scope, runtime->symbols()->at(left_operator_id));
265 insertDependencyForTypeLookupInMro(thread, left_layout_id, left_op_name,
266 dependent);
267 Object right_op_name(&scope, runtime->symbols()->at(right_operator_id));
268 insertDependencyForTypeLookupInMro(thread, right_layout_id, right_op_name,
269 dependent);
270}
271
272void icInsertBinaryOpDependencies(Thread* thread, const Function& dependent,
273 LayoutId left_layout_id,
274 LayoutId right_layout_id,
275 Interpreter::BinaryOp op) {
276 SymbolId left_operator_id = Interpreter::binaryOperationSelector(op);
277 SymbolId right_operator_id = Interpreter::swappedBinaryOperationSelector(op);
278 insertBinaryOpDependencies(thread, dependent, left_layout_id,
279 left_operator_id, right_layout_id,
280 right_operator_id);
281}
282
283void icInsertCompareOpDependencies(Thread* thread, const Function& dependent,
284 LayoutId left_layout_id,
285 LayoutId right_layout_id, CompareOp op) {
286 SymbolId left_operator_id = Interpreter::comparisonSelector(op);
287 SymbolId right_operator_id = Interpreter::swappedComparisonSelector(op);
288 insertBinaryOpDependencies(thread, dependent, left_layout_id,
289 left_operator_id, right_layout_id,
290 right_operator_id);
291}
292
293void icInsertInplaceOpDependencies(Thread* thread, const Function& dependent,
294 LayoutId left_layout_id,
295 LayoutId right_layout_id,
296 Interpreter::BinaryOp op) {
297 Runtime* runtime = thread->runtime();
298 HandleScope scope(thread);
299 Object inplace_op_name(
300 &scope,
301 runtime->symbols()->at(Interpreter::inplaceOperationSelector(op)));
302 insertDependencyForTypeLookupInMro(thread, left_layout_id, inplace_op_name,
303 dependent);
304 SymbolId left_operator_id = Interpreter::binaryOperationSelector(op);
305 SymbolId right_operator_id = Interpreter::swappedBinaryOperationSelector(op);
306 insertBinaryOpDependencies(thread, dependent, left_layout_id,
307 left_operator_id, right_layout_id,
308 right_operator_id);
309}
310
311void icDeleteDependentInValueCell(Thread* thread, const ValueCell& value_cell,
312 const Object& dependent) {
313 HandleScope scope(thread);
314 Object link(&scope, value_cell.dependencyLink());
315 Object prev(&scope, NoneType::object());
316 while (!link.isNoneType()) {
317 WeakLink weak_link(&scope, *link);
318 if (weak_link.referent() == *dependent) {
319 if (weak_link.next().isWeakLink()) {
320 WeakLink::cast(weak_link.next()).setPrev(*prev);
321 }
322 if (prev.isWeakLink()) {
323 WeakLink::cast(*prev).setNext(weak_link.next());
324 } else {
325 value_cell.setDependencyLink(weak_link.next());
326 }
327 break;
328 }
329 prev = *link;
330 link = weak_link.next();
331 }
332}
333
334void icDeleteDependentFromInheritingTypes(Thread* thread,
335 LayoutId cached_layout_id,
336 const Object& attr_name,
337 const Type& new_defining_type,
338 const Object& dependent) {
339 DCHECK(icIsCachedAttributeAffectedByUpdatedType(thread, cached_layout_id,
340 attr_name, new_defining_type),
341 "icIsCachedAttributeAffectedByUpdatedType must return true");
342 HandleScope scope(thread);
343 Runtime* runtime = thread->runtime();
344 Type cached_type(&scope, runtime->typeAt(cached_layout_id));
345 Tuple mro(&scope, cached_type.mro());
346 Type mro_type(&scope, *cached_type);
347 word hash = strHash(thread, *attr_name);
348 for (word i = 0; i < mro.length(); ++i) {
349 mro_type = mro.at(i);
350 // If a mro_type's dict is not mutable, its parents must be not imutable.
351 // Therefore, we can stop the MRO search here.
352 if (!mro_type.hasMutableDict()) break;
353 RawObject value_cell_raw = NoneType::object();
354 bool found = attributeValueCellAtWithHash(*mro_type, *attr_name, hash,
355 &value_cell_raw);
356 DCHECK(found, "value cell not found");
357 ValueCell value_cell(&scope, value_cell_raw);
358 icDeleteDependentInValueCell(thread, value_cell, dependent);
359 if (mro_type == new_defining_type) {
360 // This can be a placeholder for some caching opcodes that depend on not
361 // found attributes. For example, a >= b depends on type(b).__le__ even
362 // when it is not found in case it's defined afterwards.
363 return;
364 }
365 DCHECK(value_cell.isPlaceholder(),
366 "value_cell below updated_type must be Placeholder");
367 }
368}
369
370RawObject icHighestSuperTypeNotInMroOfOtherCachedTypes(
371 Thread* thread, LayoutId cached_layout_id, const Object& attr_name,
372 const Function& dependent) {
373 HandleScope scope(thread);
374 Object supertype_obj(&scope, NoneType::object());
375 Runtime* runtime = thread->runtime();
376 Type type(&scope, runtime->typeAt(cached_layout_id));
377 Tuple mro(&scope, type.mro());
378 word hash = strHash(thread, *attr_name);
379 Type mro_type(&scope, *type);
380 for (word i = 0; i < mro.length(); ++i) {
381 mro_type = mro.at(i);
382 if (!mro_type.hasMutableDict()) {
383 break;
384 }
385 RawObject unused = NoneType::object();
386 if (!attributeValueCellAtWithHash(*mro_type, *attr_name, hash, &unused) ||
387 icIsAttrCachedInDependent(thread, mro_type, attr_name, dependent)) {
388 break;
389 }
390 supertype_obj = *mro_type;
391 }
392 if (supertype_obj.isNoneType()) {
393 return Error::notFound();
394 }
395 return *supertype_obj;
396}
397
398bool icIsCachedAttributeAffectedByUpdatedType(Thread* thread,
399 LayoutId cached_layout_id,
400 const Object& attribute_name,
401 const Type& updated_type) {
402 HandleScope scope(thread);
403 Runtime* runtime = thread->runtime();
404 Type cached_type(&scope, runtime->typeAt(cached_layout_id));
405 if (!typeIsSubclass(*cached_type, *updated_type)) {
406 return false;
407 }
408 Tuple mro(&scope, cached_type.mro());
409 Type mro_type(&scope, *cached_type);
410 Object result(&scope, NoneType::object());
411 word hash = strHash(thread, *attribute_name);
412 for (word i = 0; i < mro.length(); ++i) {
413 mro_type = mro.at(i);
414 // If a mro_type's dict is not mutable, its parents must be not immutable.
415 // Therefore, we can stop the MRO search here.
416 if (!mro_type.hasMutableDict()) break;
417 if (!attributeValueCellAtWithHash(*mro_type, *attribute_name, hash,
418 &result)) {
419 // No ValueCell found, implying that no dependencies in this type dict and
420 // above.
421 return false;
422 }
423 DCHECK(result.isValueCell(), "result must be ValueCell");
424 if (mro_type == updated_type) {
425 // The current type in MRO is the searched type, and the searched
426 // attribute is unfound in MRO so far, so type[attribute_name] is the one
427 // retrieved from this mro.
428 return true;
429 }
430 if (!ValueCell::cast(*result).isPlaceholder()) {
431 // A non-placeholder is found for the attribute, this is retrived as the
432 // value for the attribute, so no shadowing happens.
433 return false;
434 }
435 }
436 return false;
437}
438
439bool icIsAttrCachedInDependent(Thread* thread, const Type& type,
440 const Object& attr_name,
441 const Function& dependent) {
442 HandleScope scope(thread);
443 Runtime* runtime = thread->runtime();
444 for (IcIterator it(&scope, runtime, *dependent); it.hasNext(); it.next()) {
445 if (it.isAttrCache()) {
446 if (!it.isAttrNameEqualTo(attr_name)) {
447 continue;
448 }
449 if (icIsCachedAttributeAffectedByUpdatedType(thread, it.layoutId(),
450 attr_name, type)) {
451 return true;
452 }
453 } else {
454 DCHECK(it.isBinaryOpCache() || it.isInplaceOpCache(),
455 "a cache must be for binops or inplace-ops");
456 if (attr_name == it.leftMethodName() &&
457 icIsCachedAttributeAffectedByUpdatedType(thread, it.leftLayoutId(),
458 attr_name, type)) {
459 return true;
460 }
461 if (attr_name == it.rightMethodName() &&
462 icIsCachedAttributeAffectedByUpdatedType(thread, it.rightLayoutId(),
463 attr_name, type)) {
464 return true;
465 }
466 if (it.isInplaceOpCache() && attr_name == it.inplaceMethodName() &&
467 icIsCachedAttributeAffectedByUpdatedType(thread, it.leftLayoutId(),
468 attr_name, type)) {
469 return true;
470 }
471 }
472 }
473 return false;
474}
475
476void icEvictAttr(Thread* thread, const IcIterator& it, const Type& updated_type,
477 const Object& updated_attr, AttributeKind attribute_kind,
478 const Function& dependent) {
479 DCHECK(it.isAttrCache(), "ic should point to an attribute cache");
480 if (!it.isAttrNameEqualTo(updated_attr)) {
481 return;
482 }
483 // We don't invalidate instance offset caches when non-data descriptor is
484 // assigned to the cached type.
485 if (it.isInstanceAttr() &&
486 attribute_kind == AttributeKind::kNotADataDescriptor) {
487 return;
488 }
489 // The updated type doesn't shadow the cached type.
490 if (!icIsCachedAttributeAffectedByUpdatedType(thread, it.layoutId(),
491 updated_attr, updated_type)) {
492 return;
493 }
494
495 // Now that we know that the updated type attribute shadows the cached type
496 // attribute, clear the cache.
497 LayoutId cached_layout_id = it.layoutId();
498 it.evict();
499
500 // Delete all direct/indirect dependencies from the deleted cache to
501 // dependent since such dependencies are gone now.
502 // TODO(T54202245): Remove dependency links in parent classes of updated_type.
503 icDeleteDependentFromInheritingTypes(thread, cached_layout_id, updated_attr,
504 updated_type, dependent);
505}
506
507void icDeleteDependentToDefiningType(Thread* thread, const Function& dependent,
508 LayoutId cached_layout_id,
509 const Object& attr_name) {
510 HandleScope scope(thread);
511 // Walk up the MRO from the updated class looking for a super type that is not
512 // referenced by another cache in this same function.
513 Object supertype_obj(&scope,
514 icHighestSuperTypeNotInMroOfOtherCachedTypes(
515 thread, cached_layout_id, attr_name, dependent));
516 if (supertype_obj.isErrorNotFound()) {
517 // typeAt(other_cached_layout_id).other_method_name_id is still cached so no
518 // more dependencies need to be deleted.
519 return;
520 }
521 Type supertype(&scope, *supertype_obj);
522 // Remove this function from all of the dependency links in the dictionaries
523 // of supertypes from the updated type up to the last supertype that is
524 // exclusively referenced by the type in this cache (and no other caches in
525 // this function.)
526 icDeleteDependentFromInheritingTypes(thread, cached_layout_id, attr_name,
527 supertype, dependent);
528
529 // TODO(T54202245): Remove depdency links in the parent classes of
530 // other_cached_layout_id.
531}
532
533// TODO(T54277418): Pass SymbolId for updated_attr.
534void icEvictBinaryOp(Thread* thread, const IcIterator& it,
535 const Type& updated_type, const Object& updated_attr,
536 const Function& dependent) {
537 if (it.leftMethodName() != updated_attr &&
538 it.rightMethodName() != updated_attr) {
539 // This cache cannot be affected since it references a different attribute
540 // than the one we are looking for.
541 return;
542 }
543 bool evict_lhs = false;
544 if (it.leftMethodName() == updated_attr) {
545 evict_lhs = icIsCachedAttributeAffectedByUpdatedType(
546 thread, it.leftLayoutId(), updated_attr, updated_type);
547 }
548 bool evict_rhs = false;
549 if (!evict_lhs && it.rightMethodName() == updated_attr) {
550 evict_rhs = icIsCachedAttributeAffectedByUpdatedType(
551 thread, it.rightLayoutId(), updated_attr, updated_type);
552 }
553
554 if (!evict_lhs && !evict_rhs) {
555 // This cache does not reference attributes that are implemented by the
556 // affected type.
557 return;
558 }
559
560 LayoutId cached_layout_id = it.leftLayoutId();
561 LayoutId other_cached_layout_id = it.rightLayoutId();
562 HandleScope scope(thread);
563 Object other_method_name(&scope, it.rightMethodName());
564 if (evict_rhs) {
565 // The RHS type is the one that is being affected. This is either because
566 // the RHS type is a supertype of the LHS type or because the LHS type did
567 // not implement the binary operation.
568 cached_layout_id = it.rightLayoutId();
569 other_cached_layout_id = it.leftLayoutId();
570 other_method_name = it.leftMethodName();
571 }
572 it.evict();
573
574 // Remove this function from the dependency links in the dictionaries of
575 // subtypes, starting at cached type, of the updated type that looked up the
576 // attribute through the updated type.
577 icDeleteDependentFromInheritingTypes(thread, cached_layout_id, updated_attr,
578 updated_type, dependent);
579
580 // TODO(T54202245): Remove dependency links in parent classes of update_type.
581
582 icDeleteDependentToDefiningType(thread, dependent, other_cached_layout_id,
583 other_method_name);
584}
585
586void icEvictInplaceOp(Thread* thread, const IcIterator& it,
587 const Type& updated_type, const Object& updated_attr,
588 const Function& dependent) {
589 if (it.inplaceMethodName() != updated_attr &&
590 it.leftMethodName() != updated_attr &&
591 it.rightMethodName() != updated_attr) {
592 // This cache cannot be affected since it references a different attribute
593 // than the one we are looking for.
594 return;
595 }
596 bool evict_inplace = false;
597 if (it.inplaceMethodName() == updated_attr) {
598 evict_inplace = icIsCachedAttributeAffectedByUpdatedType(
599 thread, it.leftLayoutId(), updated_attr, updated_type);
600 }
601 bool evict_lhs = false;
602 if (!evict_inplace && it.leftMethodName() == updated_attr) {
603 evict_lhs = icIsCachedAttributeAffectedByUpdatedType(
604 thread, it.leftLayoutId(), updated_attr, updated_type);
605 }
606 bool evict_rhs = false;
607 if (!evict_inplace && !evict_lhs && it.rightMethodName() == updated_attr) {
608 evict_rhs = icIsCachedAttributeAffectedByUpdatedType(
609 thread, it.rightLayoutId(), updated_attr, updated_type);
610 }
611
612 if (!evict_inplace && !evict_lhs && !evict_rhs) {
613 // This cache does not reference attributes that are implemented by the
614 // affected type.
615 return;
616 }
617
618 LayoutId left_layout_id = it.leftLayoutId();
619 LayoutId right_layout_id = it.rightLayoutId();
620 HandleScope scope(thread);
621 Object inplace_method_name(&scope, it.inplaceMethodName());
622 Object left_method_name(&scope, it.leftMethodName());
623 Object right_method_name(&scope, it.rightMethodName());
624 it.evict();
625
626 // Remove this function from the dependency links in the dictionaries of
627 // subtypes of the directly affected type.
628 // There are two other types that this function may not depend on anymore due
629 // to this eviction. We remove this function from the dependency links of
630 // these types up to their defining types to get rid of the dependencies being
631 // tracked for the evicted cache.
632 // TODO(T54202245): Remove dependency links in parent classes of the directly
633 // affected type.
634 if (evict_inplace) {
635 icDeleteDependentFromInheritingTypes(
636 thread, left_layout_id, inplace_method_name, updated_type, dependent);
637 icDeleteDependentToDefiningType(thread, dependent, left_layout_id,
638 left_method_name);
639 icDeleteDependentToDefiningType(thread, dependent, right_layout_id,
640 right_method_name);
641 return;
642 }
643 if (evict_lhs) {
644 icDeleteDependentFromInheritingTypes(
645 thread, left_layout_id, left_method_name, updated_type, dependent);
646 icDeleteDependentToDefiningType(thread, dependent, left_layout_id,
647 inplace_method_name);
648 icDeleteDependentToDefiningType(thread, dependent, right_layout_id,
649 right_method_name);
650 return;
651 }
652 DCHECK(evict_rhs, "evict_rhs must be true");
653 icDeleteDependentFromInheritingTypes(
654 thread, right_layout_id, right_method_name, updated_type, dependent);
655 icDeleteDependentToDefiningType(thread, dependent, left_layout_id,
656 inplace_method_name);
657 icDeleteDependentToDefiningType(thread, dependent, left_layout_id,
658 left_method_name);
659}
660
661void icEvictCache(Thread* thread, const Function& dependent, const Type& type,
662 const Object& attr_name, AttributeKind attribute_kind) {
663 HandleScope scope(thread);
664 // Scan through all caches and delete caches shadowed by type.attr_name.
665 // TODO(T54277418): Filter out attr that cannot be converted to SymbolId.
666 for (IcIterator it(&scope, thread->runtime(), *dependent); it.hasNext();
667 it.next()) {
668 if (it.isAttrCache()) {
669 icEvictAttr(thread, it, type, attr_name, attribute_kind, dependent);
670 } else if (it.isBinaryOpCache()) {
671 icEvictBinaryOp(thread, it, type, attr_name, dependent);
672 } else if (it.isInplaceOpCache()) {
673 icEvictInplaceOp(thread, it, type, attr_name, dependent);
674 } else {
675 CHECK(it.isModuleAttrCache(),
676 "a cache must be for attributes, binops, or inplace-ops");
677 }
678 }
679}
680
681void icInvalidateAttr(Thread* thread, const Type& type, const Object& attr_name,
682 const ValueCell& value_cell) {
683 HandleScope scope(thread);
684 // Delete caches for attr_name to be shadowed by the type[attr_name]
685 // change in all dependents that depend on the attribute being updated.
686 Type value_type(&scope, thread->runtime()->typeOf(value_cell.value()));
687 AttributeKind attribute_kind = typeIsDataDescriptor(*value_type)
688 ? AttributeKind::kDataDescriptor
689 : AttributeKind::kNotADataDescriptor;
690 Object link(&scope, value_cell.dependencyLink());
691 while (!link.isNoneType()) {
692 Function dependent(&scope, WeakLink::cast(*link).referent());
693 // Capturing the next node in case the current node is deleted by
694 // icEvictCacheForTypeAttrInDependent
695 link = WeakLink::cast(*link).next();
696 icEvictCache(thread, dependent, type, attr_name, attribute_kind);
697 }
698}
699
700RawSmallInt encodeBinaryOpKey(LayoutId left_layout_id, LayoutId right_layout_id,
701 BinaryOpFlags flags) {
702 word key_high_bits = static_cast<word>(left_layout_id)
703 << Header::kLayoutIdBits |
704 static_cast<word>(right_layout_id);
705 return SmallInt::fromWord(key_high_bits << kBitsPerByte |
706 static_cast<word>(flags));
707}
708
709ICState icUpdateBinOp(Thread* thread, const MutableTuple& caches, word cache,
710 LayoutId left_layout_id, LayoutId right_layout_id,
711 const Object& value, BinaryOpFlags flags) {
712 static_assert(Header::kLayoutIdBits * 2 + kBitsPerByte <= SmallInt::kBits,
713 "Two layout ids and flags overflow a SmallInt");
714 word key_high_bits = static_cast<word>(left_layout_id)
715 << Header::kLayoutIdBits |
716 static_cast<word>(right_layout_id);
717 word index = cache * kIcPointersPerEntry;
718 RawObject new_key = encodeBinaryOpKey(left_layout_id, right_layout_id, flags);
719 RawObject entry_key = caches.at(index + kIcEntryKeyOffset);
720 if (entry_key.isNoneType() ||
721 (entry_key.isSmallInt() &&
722 SmallInt::cast(entry_key).value() >> kBitsPerByte == key_high_bits)) {
723 caches.atPut(index + kIcEntryKeyOffset, new_key);
724 caches.atPut(index + kIcEntryValueOffset, *value);
725 return ICState::kMonomorphic;
726 }
727
728 if (!caches.at(index + kIcEntryKeyOffset).isUnbound()) {
729 // Upgrade this cache to a polymorphic cache.
730 HandleScope scope(thread);
731 MutableTuple polymorphic_cache(
732 &scope, thread->runtime()->newMutableTuple(kIcPointersPerPolyCache));
733 polymorphic_cache.fill(NoneType::object());
734 polymorphic_cache.atPut(kIcEntryKeyOffset,
735 caches.at(index + kIcEntryKeyOffset));
736 polymorphic_cache.atPut(kIcEntryValueOffset,
737 caches.at(index + kIcEntryValueOffset));
738 // Mark this entry as a polymorphic cache.
739 caches.atPut(index + kIcEntryKeyOffset, Unbound::object());
740 caches.atPut(index + kIcEntryValueOffset, *polymorphic_cache);
741 }
742 RawMutableTuple polymorphic_cache =
743 MutableTuple::cast(caches.at(index + kIcEntryValueOffset));
744 for (word j = 0; j < kIcPointersPerPolyCache; j += kIcPointersPerEntry) {
745 entry_key = polymorphic_cache.at(j + kIcEntryKeyOffset);
746 if (entry_key.isNoneType() ||
747 SmallInt::cast(entry_key).value() >> kBitsPerByte == key_high_bits) {
748 polymorphic_cache.atPut(j + kIcEntryKeyOffset, new_key);
749 polymorphic_cache.atPut(j + kIcEntryValueOffset, *value);
750 break;
751 }
752 }
753 return ICState::kPolymorphic;
754}
755
756void icUpdateGlobalVar(Thread* thread, const Function& function, word index,
757 const ValueCell& value_cell) {
758 HandleScope scope(thread);
759 MutableTuple caches(&scope, function.caches());
760 // TODO(T46426927): Remove this once an invariant of updating cache only once
761 // holds.
762 if (!caches.at(index).isNoneType()) {
763 // An attempt to update the same cache entry with the same value can happen
764 // by LOAD_NAME and STORE_NAME which don't get modified to a cached opcode.
765 DCHECK(caches.at(index) == *value_cell, "caches.at(index) == *value_cell");
766 return;
767 }
768 // Insert function as the first dependent on value_cell.
769 Object old_head(&scope, value_cell.dependencyLink());
770 Object none(&scope, NoneType::object());
771 WeakLink new_head(
772 &scope, thread->runtime()->newWeakLink(thread, function, none, old_head));
773 if (old_head.isWeakLink()) {
774 WeakLink::cast(*old_head).setPrev(*new_head);
775 }
776 value_cell.setDependencyLink(*new_head);
777
778 caches.atPut(index, *value_cell);
779
780 // Update all global variable access to the cached value in the function.
781 MutableBytes bytecode(&scope, function.rewrittenBytecode());
782 word num_opcodes = rewrittenBytecodeLength(bytecode);
783 byte target_arg = static_cast<byte>(index);
784 for (word i = 0; i < num_opcodes;) {
785 BytecodeOp op = nextBytecodeOp(bytecode, &i);
786 if (op.arg != target_arg) {
787 continue;
788 }
789 if (op.bc == LOAD_GLOBAL) {
790 rewrittenBytecodeOpAtPut(bytecode, i - 1, LOAD_GLOBAL_CACHED);
791 } else if (op.bc == STORE_GLOBAL) {
792 rewrittenBytecodeOpAtPut(bytecode, i - 1, STORE_GLOBAL_CACHED);
793 }
794 }
795}
796
797void icInvalidateGlobalVar(Thread* thread, const ValueCell& value_cell) {
798 Runtime* runtime = thread->runtime();
799 HandleScope scope(thread);
800 Object referent(&scope, NoneType::object());
801 Object function(&scope, NoneType::object());
802 Object link(&scope, value_cell.dependencyLink());
803 MutableBytes bytecode(&scope, runtime->newMutableBytesUninitialized(0));
804 while (!link.isNoneType()) {
805 DCHECK(link.isWeakLink(), "ValuCell.dependenyLink must be a WeakLink");
806 referent = WeakLink::cast(*link).referent();
807 if (referent.isNoneType()) {
808 // The function got deallocated.
809 link = WeakLink::cast(*link).next();
810 continue;
811 }
812 DCHECK(referent.isFunction(),
813 "dependencyLink's payload must be a function");
814 function = *referent;
815 word names_length =
816 Tuple::cast(Code::cast(Function::cast(*function).code()).names())
817 .length();
818 // Empty the cache.
819 MutableTuple caches(&scope, Function::cast(*function).caches());
820 DCHECK_BOUND(names_length, caches.length());
821 word name_index_found = -1;
822 for (word i = 0; i < names_length; i++) {
823 if (caches.at(i) == *value_cell) {
824 caches.atPut(i, NoneType::object());
825 name_index_found = i;
826 break;
827 }
828 }
829 bytecode = Function::cast(*function).rewrittenBytecode();
830 word num_opcodes = rewrittenBytecodeLength(bytecode);
831 for (word i = 0; i < num_opcodes;) {
832 BytecodeOp op = nextBytecodeOp(bytecode, &i);
833 Bytecode original_bc = op.bc;
834 switch (op.bc) {
835 case LOAD_ATTR_MODULE: {
836 original_bc = LOAD_ATTR_ANAMORPHIC;
837 word index = op.cache * kIcPointersPerEntry;
838 if (caches.at(index + kIcEntryValueOffset) == *value_cell) {
839 caches.atPut(index + kIcEntryKeyOffset, NoneType::object());
840 caches.atPut(index + kIcEntryValueOffset, NoneType::object());
841 }
842 break;
843 }
844 case LOAD_METHOD_MODULE: {
845 original_bc = LOAD_METHOD_ANAMORPHIC;
846 word index = op.cache * kIcPointersPerEntry;
847 if (caches.at(index + kIcEntryValueOffset) == *value_cell) {
848 caches.atPut(index + kIcEntryKeyOffset, NoneType::object());
849 caches.atPut(index + kIcEntryValueOffset, NoneType::object());
850 }
851 break;
852 }
853 case LOAD_GLOBAL_CACHED:
854 original_bc = LOAD_GLOBAL;
855 if (op.bc != original_bc && op.arg == name_index_found) {
856 rewrittenBytecodeOpAtPut(bytecode, i - 1, original_bc);
857 }
858 break;
859 case STORE_GLOBAL_CACHED:
860 original_bc = STORE_GLOBAL;
861 if (op.bc != original_bc && op.arg == name_index_found) {
862 rewrittenBytecodeOpAtPut(bytecode, i - 1, original_bc);
863 }
864 break;
865 default:
866 break;
867 }
868 }
869 link = WeakLink::cast(*link).next();
870 }
871 value_cell.setDependencyLink(NoneType::object());
872}
873
874bool IcIterator::isAttrNameEqualTo(const Object& attr_name) const {
875 DCHECK(isAttrCache(), "should be only called for attribute caches");
876 switch (bytecode_op_.bc) {
877 case FOR_ITER_MONOMORPHIC:
878 case FOR_ITER_POLYMORPHIC:
879 case FOR_ITER_ANAMORPHIC:
880 return attr_name == runtime_->symbols()->at(ID(__next__));
881 case BINARY_SUBSCR_ANAMORPHIC:
882 case BINARY_SUBSCR_MONOMORPHIC:
883 case BINARY_SUBSCR_POLYMORPHIC:
884 return attr_name == runtime_->symbols()->at(ID(__getitem__));
885 case CALL_FUNCTION_TYPE_INIT:
886 return attr_name == runtime_->symbols()->at(ID(__new__)) ||
887 attr_name == runtime_->symbols()->at(ID(__init__));
888 case CALL_FUNCTION_TYPE_NEW:
889 return attr_name == runtime_->symbols()->at(ID(__new__)) ||
890 attr_name == runtime_->symbols()->at(ID(__init__));
891 case STORE_SUBSCR_ANAMORPHIC:
892 return attr_name == runtime_->symbols()->at(ID(__setitem__));
893 default:
894 return attr_name == names_.at(bytecode_op_.arg);
895 }
896}
897
898static bool isBinaryOpOrInplaceOp(Bytecode bc) {
899 switch (bc) {
900 case BINARY_OP_MONOMORPHIC:
901 case BINARY_OP_POLYMORPHIC:
902 case BINARY_OP_ANAMORPHIC:
903 case INPLACE_OP_MONOMORPHIC:
904 case INPLACE_OP_POLYMORPHIC:
905 case INPLACE_OP_ANAMORPHIC:
906 return true;
907 default:
908 return false;
909 }
910}
911
912RawObject IcIterator::leftMethodName() const {
913 DCHECK(isBinaryOpCache() || isInplaceOpCache(),
914 "should be only called for binop or inplace-ops");
915 int32_t arg = bytecode_op_.arg;
916 SymbolId method;
917 if (isBinaryOpOrInplaceOp(bytecode_op_.bc)) {
918 Interpreter::BinaryOp binary_op = static_cast<Interpreter::BinaryOp>(arg);
919 method = Interpreter::binaryOperationSelector(binary_op);
920 } else {
921 DCHECK(bytecode_op_.bc == COMPARE_OP_MONOMORPHIC ||
922 bytecode_op_.bc == COMPARE_OP_POLYMORPHIC ||
923 bytecode_op_.bc == COMPARE_OP_ANAMORPHIC,
924 "binop cache must be for BINARY_OP_*, INPLACE_OP_*, or "
925 "COMPARE_OP_*");
926 CompareOp compare_op = static_cast<CompareOp>(arg);
927 method = Interpreter::comparisonSelector(compare_op);
928 }
929 return runtime_->symbols()->at(method);
930}
931
932RawObject IcIterator::rightMethodName() const {
933 DCHECK(isBinaryOpCache() || isInplaceOpCache(),
934 "should be only called for binop or inplace-ops");
935 int32_t arg = bytecode_op_.arg;
936 SymbolId method;
937 if (isBinaryOpOrInplaceOp(bytecode_op_.bc)) {
938 Interpreter::BinaryOp binary_op = static_cast<Interpreter::BinaryOp>(arg);
939 method = Interpreter::swappedBinaryOperationSelector(binary_op);
940 } else {
941 DCHECK(bytecode_op_.bc == COMPARE_OP_MONOMORPHIC ||
942 bytecode_op_.bc == COMPARE_OP_POLYMORPHIC ||
943 bytecode_op_.bc == COMPARE_OP_ANAMORPHIC,
944 "binop cache must be for BINARY_OP_*, INPLACE_OP_*, or "
945 "COMPARE_OP_*");
946 CompareOp compare_op = static_cast<CompareOp>(arg);
947 method = Interpreter::swappedComparisonSelector(compare_op);
948 }
949 return runtime_->symbols()->at(method);
950}
951
952RawObject IcIterator::inplaceMethodName() const {
953 DCHECK(bytecode_op_.bc == INPLACE_OP_MONOMORPHIC ||
954 bytecode_op_.bc == INPLACE_OP_POLYMORPHIC ||
955 bytecode_op_.bc == INPLACE_OP_ANAMORPHIC,
956 "should only be called for INPLACE_OP_*");
957 int32_t arg = bytecode_op_.arg;
958 Interpreter::BinaryOp binary_op = static_cast<Interpreter::BinaryOp>(arg);
959 SymbolId method = Interpreter::inplaceOperationSelector(binary_op);
960 return runtime_->symbols()->at(method);
961}
962
963} // namespace py