this repo has no description

Doesn't matter if size is aligned; heap pointer must be

Add tests.

authored by bernsteinbear.com and committed by

Max Bernstein 59031b63 a3392df4

+19 -11
+7 -1
compiler_tests.py
··· 48 48 def test_small_string_concat(self) -> None: 49 49 self.assertEqual(self._run('"abc" ++ "def"'), '"abcdef"\n') 50 50 51 - def test_heap_string(self) -> None: 51 + def test_const_large_string(self) -> None: 52 52 self.assertEqual(self._run('"hello world"'), '"hello world"\n') 53 + 54 + def test_heap_string_concat(self) -> None: 55 + self.assertEqual(self._run('"hello world" ++ " and goodbye"'), '"hello world and goodbye"\n') 53 56 54 57 def test_const_list(self) -> None: 55 58 self.assertEqual( ··· 68 71 69 72 def test_list(self) -> None: 70 73 self.assertEqual(self._run("[1, 2, 3]"), "[1, 2, 3]\n") 74 + 75 + def test_list_concat(self) -> None: 76 + self.assertEqual(self._run("0 >+ [1, 2, 3]"), "[0, 1, 2, 3]\n") 71 77 72 78 def test_var(self) -> None: 73 79 self.assertEqual(self._run("a . a = 1"), "1\n")
+12 -10
runtime.c
··· 124 124 struct space space; 125 125 }; 126 126 127 - static uintptr_t align(uintptr_t val, uintptr_t alignment) { 127 + static ALWAYS_INLINE uintptr_t align(uintptr_t val, uintptr_t alignment) { 128 128 return (val + alignment - 1) & ~(alignment - 1); 129 129 } 130 - static uintptr_t align_size(uintptr_t size) { 130 + static ALWAYS_INLINE uintptr_t align_size(uintptr_t size) { 131 131 return align(size, sizeof(uintptr_t)); 132 132 } 133 133 ··· 163 163 heap->from_space = heap->limit = heap->hp + space.size / 2; 164 164 } 165 165 166 + static ALWAYS_INLINE bool is_power_of_two(uword x) { return (x & (x - 1)) == 0; } 167 + 168 + static ALWAYS_INLINE bool is_aligned(uword value, uword alignment) { 169 + assert(is_power_of_two(alignment)); 170 + return (value & (alignment - 1)) == 0; 171 + } 172 + 166 173 struct gc_obj* copy(struct gc_heap* heap, struct gc_obj* obj) { 167 174 size_t size = heap_object_size(obj); 168 175 struct gc_obj* new_obj = (struct gc_obj*)heap->hp; 169 176 memcpy(new_obj, obj, size); 170 177 forward(obj, new_obj); 171 178 heap->hp += align_size(size); 179 + assert(is_aligned(heap->hp, 1 << kPrimaryTagBits) && "need 3 bits for tagging"); 172 180 return new_obj; 173 181 } 174 182 ··· 292 300 } 293 301 #endif 294 302 295 - bool is_power_of_two(uword x) { return (x & (x - 1)) == 0; } 296 - 297 - bool is_aligned(uword value, uword alignment) { 298 - assert(is_power_of_two(alignment)); 299 - return (value & (alignment - 1)) == 0; 300 - } 301 - 302 303 uword make_tag(uword tag, uword size_bytes) { 303 304 assert(size_bytes <= 0xffffffff); 304 305 return (size_bytes << kBitsPerByte) | tag; ··· 321 322 322 323 static ALWAYS_INLINE ALLOCATOR struct object* allocate(struct gc_heap* heap, 323 324 uword tag, uword size) { 324 - assert(is_aligned(size, 1 << kPrimaryTagBits) && "need 3 bits for tagging"); 325 325 uintptr_t addr = heap->hp; 326 326 uintptr_t new_hp = align_size(addr + size); 327 + assert(is_aligned(new_hp, 1 << kPrimaryTagBits) && "need 3 bits for tagging"); 327 328 if (UNLIKELY(heap->limit < new_hp)) { 328 329 allocate_slow_path(heap, size); 329 330 addr = heap->hp; 330 331 new_hp = align_size(addr + size); 332 + assert(is_aligned(new_hp, 1 << kPrimaryTagBits) && "need 3 bits for tagging"); 331 333 } 332 334 heap->hp = new_hp; 333 335 ((struct gc_obj*)addr)->tag = make_tag(tag, size);