A game about forced loneliness, made by TACStudios
1using System;
2using System.Collections;
3using System.Collections.Generic;
4using System.Diagnostics;
5using System.Runtime.InteropServices;
6using System.Threading;
7using Unity.Burst;
8using Unity.Mathematics;
9using Unity.Jobs;
10using Unity.Jobs.LowLevel.Unsafe;
11using UnityEngine.Assertions;
12using System.Runtime.CompilerServices;
13
14namespace Unity.Collections.LowLevel.Unsafe
15{
16 /// <summary>
17 /// A bucket of key-value pairs. Used as the internal storage for hash maps.
18 /// </summary>
19 /// <remarks>Exposed publicly only for advanced use cases.</remarks>
20 [GenerateTestsForBurstCompatibility]
21 public unsafe struct UnsafeParallelHashMapBucketData
22 {
23 internal UnsafeParallelHashMapBucketData(byte* v, byte* k, byte* n, byte* b, int bcm)
24 {
25 values = v;
26 keys = k;
27 next = n;
28 buckets = b;
29 bucketCapacityMask = bcm;
30 }
31
32 /// <summary>
33 /// The buffer of values.
34 /// </summary>
35 /// <value>The buffer of values.</value>
36 public readonly byte* values;
37
38 /// <summary>
39 /// The buffer of keys.
40 /// </summary>
41 /// <value>The buffer of keys.</value>
42 public readonly byte* keys;
43
44 /// <summary>
45 /// The next bucket in the chain.
46 /// </summary>
47 /// <value>The next bucket in the chain.</value>
48 public readonly byte* next;
49
50 /// <summary>
51 /// The first bucket in the chain.
52 /// </summary>
53 /// <value>The first bucket in the chain.</value>
54 public readonly byte* buckets;
55
56 /// <summary>
57 /// One less than the bucket capacity.
58 /// </summary>
59 /// <value>One less than the bucket capacity.</value>
60 public readonly int bucketCapacityMask;
61 }
62
63 [StructLayout(LayoutKind.Explicit)]
64 [GenerateTestsForBurstCompatibility]
65 internal unsafe struct UnsafeParallelHashMapData
66 {
67 [FieldOffset(0)]
68 internal byte* values;
69 // 4-byte padding on 32-bit architectures here
70
71 [FieldOffset(8)]
72 internal byte* keys;
73 // 4-byte padding on 32-bit architectures here
74
75 [FieldOffset(16)]
76 internal byte* next;
77 // 4-byte padding on 32-bit architectures here
78
79 [FieldOffset(24)]
80 internal byte* buckets;
81 // 4-byte padding on 32-bit architectures here
82
83 [FieldOffset(32)]
84 internal int keyCapacity;
85
86 [FieldOffset(36)]
87 internal int bucketCapacityMask; // = bucket capacity - 1
88
89 [FieldOffset(40)]
90 internal int allocatedIndexLength;
91
92#if UNITY_2022_2_14F1_OR_NEWER
93 const int kFirstFreeTLSOffset = JobsUtility.CacheLineSize < 64 ? 64 : JobsUtility.CacheLineSize;
94 internal int* firstFreeTLS => (int*)((byte*)UnsafeUtility.AddressOf(ref this) + kFirstFreeTLSOffset);
95#else
96 [FieldOffset(JobsUtility.CacheLineSize < 64 ? 64 : JobsUtility.CacheLineSize)]
97 internal fixed int firstFreeTLS[JobsUtility.MaxJobThreadCount * IntsPerCacheLine];
98#endif
99
100 // 64 is the cache line size on x86, arm usually has 32 - so it is possible to save some memory there
101 internal const int IntsPerCacheLine = JobsUtility.CacheLineSize / sizeof(int);
102
103 internal static int GetBucketSize(int capacity)
104 {
105 return capacity * 2;
106 }
107
108 internal static int GrowCapacity(int capacity)
109 {
110 if (capacity == 0)
111 {
112 return 1;
113 }
114
115 return capacity * 2;
116 }
117
118 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int), typeof(int) })]
119 internal static void AllocateHashMap<TKey, TValue>(int length, int bucketLength, AllocatorManager.AllocatorHandle label,
120 out UnsafeParallelHashMapData* outBuf)
121 where TKey : unmanaged
122 where TValue : unmanaged
123 {
124#if UNITY_2022_2_14F1_OR_NEWER
125 int maxThreadCount = JobsUtility.ThreadIndexCount;
126 // Calculate the size of UnsafeParallelHashMapData since we need to account for how many
127 // jow worker threads the runtime has available. -1 since UnsafeParallelHashMapData.firstFreeTLS accounts for 1 int already
128 Assert.IsTrue(sizeof(UnsafeParallelHashMapData) <= kFirstFreeTLSOffset);
129 int hashMapDataSize = kFirstFreeTLSOffset + (sizeof(int) * IntsPerCacheLine * maxThreadCount);
130#else
131 int hashMapDataSize = sizeof(UnsafeParallelHashMapData);
132#endif
133 UnsafeParallelHashMapData* data = (UnsafeParallelHashMapData*)Memory.Unmanaged.Allocate(hashMapDataSize, JobsUtility.CacheLineSize, label);
134
135 bucketLength = math.ceilpow2(bucketLength);
136
137 data->keyCapacity = length;
138 data->bucketCapacityMask = bucketLength - 1;
139
140 int keyOffset, nextOffset, bucketOffset;
141 int totalSize = CalculateDataSize<TKey, TValue>(length, bucketLength, out keyOffset, out nextOffset, out bucketOffset);
142
143 data->values = (byte*)Memory.Unmanaged.Allocate(totalSize, JobsUtility.CacheLineSize, label);
144 data->keys = data->values + keyOffset;
145 data->next = data->values + nextOffset;
146 data->buckets = data->values + bucketOffset;
147
148 outBuf = data;
149 }
150
151 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int), typeof(int) })]
152 internal static void ReallocateHashMap<TKey, TValue>(UnsafeParallelHashMapData* data, int newCapacity, int newBucketCapacity, AllocatorManager.AllocatorHandle label)
153 where TKey : unmanaged
154 where TValue : unmanaged
155 {
156 newBucketCapacity = math.ceilpow2(newBucketCapacity);
157
158 if (data->keyCapacity == newCapacity && (data->bucketCapacityMask + 1) == newBucketCapacity)
159 {
160 return;
161 }
162
163 CheckHashMapReallocateDoesNotShrink(data, newCapacity);
164
165 int keyOffset, nextOffset, bucketOffset;
166 int totalSize = CalculateDataSize<TKey, TValue>(newCapacity, newBucketCapacity, out keyOffset, out nextOffset, out bucketOffset);
167
168 byte* newData = (byte*)Memory.Unmanaged.Allocate(totalSize, JobsUtility.CacheLineSize, label);
169 byte* newKeys = newData + keyOffset;
170 byte* newNext = newData + nextOffset;
171 byte* newBuckets = newData + bucketOffset;
172
173 // The items are taken from a free-list and might not be tightly packed, copy all of the old capcity
174 UnsafeUtility.MemCpy(newData, data->values, data->keyCapacity * UnsafeUtility.SizeOf<TValue>());
175 UnsafeUtility.MemCpy(newKeys, data->keys, data->keyCapacity * UnsafeUtility.SizeOf<TKey>());
176 UnsafeUtility.MemCpy(newNext, data->next, data->keyCapacity * UnsafeUtility.SizeOf<int>());
177
178 for (int emptyNext = data->keyCapacity; emptyNext < newCapacity; ++emptyNext)
179 {
180 ((int*)newNext)[emptyNext] = -1;
181 }
182
183 // re-hash the buckets, first clear the new bucket list, then insert all values from the old list
184 for (int bucket = 0; bucket < newBucketCapacity; ++bucket)
185 {
186 ((int*)newBuckets)[bucket] = -1;
187 }
188
189 for (int bucket = 0; bucket <= data->bucketCapacityMask; ++bucket)
190 {
191 int* buckets = (int*)data->buckets;
192 int* nextPtrs = (int*)newNext;
193 while (buckets[bucket] >= 0)
194 {
195 int curEntry = buckets[bucket];
196 buckets[bucket] = nextPtrs[curEntry];
197 int newBucket = UnsafeUtility.ReadArrayElement<TKey>(data->keys, curEntry).GetHashCode() & (newBucketCapacity - 1);
198 nextPtrs[curEntry] = ((int*)newBuckets)[newBucket];
199 ((int*)newBuckets)[newBucket] = curEntry;
200 }
201 }
202
203 Memory.Unmanaged.Free(data->values, label);
204 if (data->allocatedIndexLength > data->keyCapacity)
205 {
206 data->allocatedIndexLength = data->keyCapacity;
207 }
208
209 data->values = newData;
210 data->keys = newKeys;
211 data->next = newNext;
212 data->buckets = newBuckets;
213 data->keyCapacity = newCapacity;
214 data->bucketCapacityMask = newBucketCapacity - 1;
215 }
216
217 internal static void DeallocateHashMap(UnsafeParallelHashMapData* data, AllocatorManager.AllocatorHandle allocator)
218 {
219 Memory.Unmanaged.Free(data->values, allocator);
220 Memory.Unmanaged.Free(data, allocator);
221 }
222
223 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int), typeof(int) })]
224 internal static int CalculateDataSize<TKey, TValue>(int length, int bucketLength, out int keyOffset, out int nextOffset, out int bucketOffset)
225 where TKey : unmanaged
226 where TValue : unmanaged
227 {
228 var sizeOfTValue = UnsafeUtility.SizeOf<TValue>();
229 var sizeOfTKey = UnsafeUtility.SizeOf<TKey>();
230 var sizeOfInt = UnsafeUtility.SizeOf<int>();
231
232 var valuesSize = CollectionHelper.Align(sizeOfTValue * length, JobsUtility.CacheLineSize);
233 var keysSize = CollectionHelper.Align(sizeOfTKey * length, JobsUtility.CacheLineSize);
234 var nextSize = CollectionHelper.Align(sizeOfInt * length, JobsUtility.CacheLineSize);
235 var bucketSize = CollectionHelper.Align(sizeOfInt * bucketLength, JobsUtility.CacheLineSize);
236 var totalSize = valuesSize + keysSize + nextSize + bucketSize;
237
238 keyOffset = 0 + valuesSize;
239 nextOffset = keyOffset + keysSize;
240 bucketOffset = nextOffset + nextSize;
241
242 return totalSize;
243 }
244
245 internal static bool IsEmpty(UnsafeParallelHashMapData* data)
246 {
247 if (data->allocatedIndexLength <= 0)
248 {
249 return true;
250 }
251
252 var bucketArray = (int*)data->buckets;
253 var bucketNext = (int*)data->next;
254 var capacityMask = data->bucketCapacityMask;
255
256 for (int i = 0; i <= capacityMask; ++i)
257 {
258 int bucket = bucketArray[i];
259
260 if (bucket != -1)
261 {
262 return false;
263 }
264 }
265
266 return true;
267 }
268
269 internal static int GetCount(UnsafeParallelHashMapData* data)
270 {
271 if (data->allocatedIndexLength <= 0)
272 {
273 return 0;
274 }
275
276 var bucketNext = (int*)data->next;
277 var freeListSize = 0;
278
279#if UNITY_2022_2_14F1_OR_NEWER
280 int maxThreadCount = JobsUtility.ThreadIndexCount;
281#else
282 int maxThreadCount = JobsUtility.MaxJobThreadCount;
283#endif
284 for (int tls = 0; tls < maxThreadCount; ++tls)
285 {
286 for (var freeIdx = data->firstFreeTLS[tls * IntsPerCacheLine]
287 ; freeIdx >= 0
288 ; freeIdx = bucketNext[freeIdx]
289 )
290 {
291 ++freeListSize;
292 }
293 }
294
295 return math.min(data->keyCapacity, data->allocatedIndexLength) - freeListSize;
296 }
297
298 internal static bool MoveNextSearch(UnsafeParallelHashMapData* data, ref int bucketIndex, ref int nextIndex, out int index)
299 {
300 var bucketArray = (int*)data->buckets;
301 var capacityMask = data->bucketCapacityMask;
302 for (int i = bucketIndex; i <= capacityMask; ++i)
303 {
304 var idx = bucketArray[i];
305
306 if (idx != -1)
307 {
308 var bucketNext = (int*)data->next;
309 index = idx;
310 bucketIndex = i + 1;
311 nextIndex = bucketNext[idx];
312
313 return true;
314 }
315 }
316
317 index = -1;
318 bucketIndex = capacityMask + 1;
319 nextIndex = -1;
320 return false;
321 }
322
323 [MethodImpl(MethodImplOptions.AggressiveInlining)]
324 internal static bool MoveNext(UnsafeParallelHashMapData* data, ref int bucketIndex, ref int nextIndex, out int index)
325 {
326 if (nextIndex != -1)
327 {
328 var bucketNext = (int*)data->next;
329 index = nextIndex;
330 nextIndex = bucketNext[nextIndex];
331 return true;
332 }
333
334 return MoveNextSearch(data, ref bucketIndex, ref nextIndex, out index);
335 }
336
337 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int) })]
338 internal static void GetKeyArray<TKey>(UnsafeParallelHashMapData* data, NativeArray<TKey> result)
339 where TKey : unmanaged
340 {
341 var bucketArray = (int*)data->buckets;
342 var bucketNext = (int*)data->next;
343
344 for (int i = 0, count = 0, max = result.Length; i <= data->bucketCapacityMask && count < max; ++i)
345 {
346 int bucket = bucketArray[i];
347
348 while (bucket != -1)
349 {
350 result[count++] = UnsafeUtility.ReadArrayElement<TKey>(data->keys, bucket);
351 bucket = bucketNext[bucket];
352 }
353 }
354 }
355
356 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int) })]
357 internal static void GetValueArray<TValue>(UnsafeParallelHashMapData* data, NativeArray<TValue> result)
358 where TValue : unmanaged
359 {
360 var bucketArray = (int*)data->buckets;
361 var bucketNext = (int*)data->next;
362
363 for (int i = 0, count = 0, max = result.Length, capacityMask = data->bucketCapacityMask
364 ; i <= capacityMask && count < max
365 ; ++i
366 )
367 {
368 int bucket = bucketArray[i];
369
370 while (bucket != -1)
371 {
372 result[count++] = UnsafeUtility.ReadArrayElement<TValue>(data->values, bucket);
373 bucket = bucketNext[bucket];
374 }
375 }
376 }
377
378 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int), typeof(int) })]
379 internal static void GetKeyValueArrays<TKey, TValue>(UnsafeParallelHashMapData* data, NativeKeyValueArrays<TKey, TValue> result)
380 where TKey : unmanaged
381 where TValue : unmanaged
382 {
383 var bucketArray = (int*)data->buckets;
384 var bucketNext = (int*)data->next;
385
386 for (int i = 0, count = 0, max = result.Length, capacityMask = data->bucketCapacityMask
387 ; i <= capacityMask && count < max
388 ; ++i
389 )
390 {
391 int bucket = bucketArray[i];
392
393 while (bucket != -1)
394 {
395 result.Keys[count] = UnsafeUtility.ReadArrayElement<TKey>(data->keys, bucket);
396 result.Values[count] = UnsafeUtility.ReadArrayElement<TValue>(data->values, bucket);
397 count++;
398 bucket = bucketNext[bucket];
399 }
400 }
401 }
402
403 internal UnsafeParallelHashMapBucketData GetBucketData()
404 {
405 return new UnsafeParallelHashMapBucketData(values, keys, next, buckets, bucketCapacityMask);
406 }
407
408 [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS"), Conditional("UNITY_DOTS_DEBUG")]
409 static void CheckHashMapReallocateDoesNotShrink(UnsafeParallelHashMapData* data, int newCapacity)
410 {
411 if (data->keyCapacity > newCapacity)
412 throw new InvalidOperationException("Shrinking a hash map is not supported");
413 }
414 }
415
416 [NativeContainer]
417 [GenerateTestsForBurstCompatibility]
418 internal unsafe struct UnsafeParallelHashMapDataDispose
419 {
420 [NativeDisableUnsafePtrRestriction]
421 internal UnsafeParallelHashMapData* m_Buffer;
422 internal AllocatorManager.AllocatorHandle m_AllocatorLabel;
423
424#if ENABLE_UNITY_COLLECTIONS_CHECKS
425 internal AtomicSafetyHandle m_Safety;
426#endif
427
428 public void Dispose()
429 {
430 UnsafeParallelHashMapData.DeallocateHashMap(m_Buffer, m_AllocatorLabel);
431 }
432 }
433
434 [BurstCompile]
435 internal unsafe struct UnsafeParallelHashMapDataDisposeJob : IJob
436 {
437 internal UnsafeParallelHashMapDataDispose Data;
438
439 public void Execute()
440 {
441 Data.Dispose();
442 }
443 }
444
445 [StructLayout(LayoutKind.Sequential)]
446 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int), typeof(int) })]
447 internal struct UnsafeParallelHashMapBase<TKey, TValue>
448 where TKey : unmanaged, IEquatable<TKey>
449 where TValue : unmanaged
450 {
451 internal static unsafe void Clear(UnsafeParallelHashMapData* data)
452 {
453 UnsafeUtility.MemSet(data->buckets, 0xff, (data->bucketCapacityMask + 1) * 4);
454 UnsafeUtility.MemSet(data->next, 0xff, (data->keyCapacity) * 4);
455
456#if UNITY_2022_2_14F1_OR_NEWER
457 int maxThreadCount = JobsUtility.ThreadIndexCount;
458#else
459 int maxThreadCount = JobsUtility.MaxJobThreadCount;
460#endif
461 for (int tls = 0; tls < maxThreadCount; ++tls)
462 {
463 data->firstFreeTLS[tls * UnsafeParallelHashMapData.IntsPerCacheLine] = -1;
464 }
465
466 data->allocatedIndexLength = 0;
467 }
468
469 private const int SentinelRefilling = -2;
470 private const int SentinelSwapInProgress = -3;
471 internal static unsafe int AllocEntry(UnsafeParallelHashMapData* data, int threadIndex)
472 {
473 int idx;
474 int* nextPtrs = (int*)data->next;
475
476 do
477 {
478 do
479 {
480 idx = Volatile.Read(ref data->firstFreeTLS[threadIndex * UnsafeParallelHashMapData.IntsPerCacheLine]);
481 } while (idx == SentinelSwapInProgress);
482
483 // Check if this thread has a free entry. Negative value means there is nothing free.
484 if (idx < 0)
485 {
486 // Try to refill local cache. The local cache is a linked list of 16 free entries.
487
488 // Indicate to other threads that we are refilling the cache.
489 // -2 means refilling cache.
490 // -1 means nothing free on this thread.
491 Interlocked.Exchange(ref data->firstFreeTLS[threadIndex * UnsafeParallelHashMapData.IntsPerCacheLine], SentinelRefilling);
492
493 // If it failed try to get one from the never-allocated array
494 if (data->allocatedIndexLength < data->keyCapacity)
495 {
496 idx = Interlocked.Add(ref data->allocatedIndexLength, 16) - 16;
497
498 if (idx < data->keyCapacity - 1)
499 {
500 int count = math.min(16, data->keyCapacity - idx);
501
502 // Set up a linked list of free entries.
503 for (int i = 1; i < count; ++i)
504 {
505 nextPtrs[idx + i] = idx + i + 1;
506 }
507
508 // Last entry points to null.
509 nextPtrs[idx + count - 1] = -1;
510
511 // The first entry is going to be allocated to someone so it also points to null.
512 nextPtrs[idx] = -1;
513
514 // Set the TLS first free to the head of the list, which is the one after the entry we are returning.
515 Interlocked.Exchange(ref data->firstFreeTLS[threadIndex * UnsafeParallelHashMapData.IntsPerCacheLine], idx + 1);
516
517 return idx;
518 }
519
520 if (idx == data->keyCapacity - 1)
521 {
522 // We tried to allocate more entries for this thread but we've already hit the key capacity,
523 // so we are in fact out of space. Record that this thread has no more entries.
524 Interlocked.Exchange(ref data->firstFreeTLS[threadIndex * UnsafeParallelHashMapData.IntsPerCacheLine], -1);
525
526 return idx;
527 }
528 }
529
530 // If we reach here, then we couldn't allocate more entries for this thread, so it's completely empty.
531 Interlocked.Exchange(ref data->firstFreeTLS[threadIndex * UnsafeParallelHashMapData.IntsPerCacheLine], -1);
532
533#if UNITY_2022_2_14F1_OR_NEWER
534 int maxThreadCount = JobsUtility.ThreadIndexCount;
535#else
536 int maxThreadCount = JobsUtility.MaxJobThreadCount;
537#endif
538 // Failed to get any, try to get one from another free list
539 bool again = true;
540 while (again)
541 {
542 again = false;
543 for (int other = (threadIndex + 1) % maxThreadCount
544 ; other != threadIndex
545 ; other = (other + 1) % maxThreadCount
546 )
547 {
548 // Attempt to grab a free entry from another thread and switch the other thread's free head
549 // atomically.
550 do
551 {
552 do
553 {
554 idx = Volatile.Read(ref data->firstFreeTLS[other * UnsafeParallelHashMapData.IntsPerCacheLine]);
555 } while (idx == SentinelSwapInProgress);
556
557 if (idx < 0)
558 {
559 break;
560 }
561 }
562 while (Interlocked.CompareExchange(
563 ref data->firstFreeTLS[other * UnsafeParallelHashMapData.IntsPerCacheLine]
564 , SentinelSwapInProgress
565 , idx
566 ) != idx
567 );
568
569 if (idx == -2)
570 {
571 // If the thread was refilling the cache, then try again.
572 again = true;
573 }
574 else if (idx >= 0)
575 {
576 // We succeeded in getting an entry from another thread so remove this entry from the
577 // linked list.
578 Interlocked.Exchange(ref data->firstFreeTLS[other * UnsafeParallelHashMapData.IntsPerCacheLine], nextPtrs[idx]);
579 nextPtrs[idx] = -1;
580 return idx;
581 }
582 }
583 }
584 ThrowFull();
585 }
586
587 CheckOutOfCapacity(idx, data->keyCapacity);
588 }
589 while (Interlocked.CompareExchange(
590 ref data->firstFreeTLS[threadIndex * UnsafeParallelHashMapData.IntsPerCacheLine]
591 , SentinelSwapInProgress
592 , idx
593 ) != idx
594 );
595
596 Interlocked.Exchange(ref data->firstFreeTLS[threadIndex * UnsafeParallelHashMapData.IntsPerCacheLine], nextPtrs[idx]);
597 nextPtrs[idx] = -1;
598 return idx;
599 }
600
601 internal static unsafe void FreeEntry(UnsafeParallelHashMapData* data, int idx, int threadIndex)
602 {
603 int* nextPtrs = (int*)data->next;
604 int next = -1;
605
606 do
607 {
608 do
609 {
610 next = Volatile.Read(ref data->firstFreeTLS[threadIndex * UnsafeParallelHashMapData.IntsPerCacheLine]);
611 } while (next == SentinelSwapInProgress);
612 nextPtrs[idx] = next;
613 }
614 while (Interlocked.CompareExchange(
615 ref data->firstFreeTLS[threadIndex * UnsafeParallelHashMapData.IntsPerCacheLine]
616 , idx
617 , next
618 ) != next
619 );
620 }
621
622 internal static unsafe bool TryAddAtomic(UnsafeParallelHashMapData* data, TKey key, TValue item, int threadIndex)
623 {
624 TValue tempItem;
625 NativeParallelMultiHashMapIterator<TKey> tempIt;
626 if (TryGetFirstValueAtomic(data, key, out tempItem, out tempIt))
627 {
628 return false;
629 }
630
631 // Allocate an entry from the free list
632 int idx = AllocEntry(data, threadIndex);
633
634 // Write the new value to the entry
635 UnsafeUtility.WriteArrayElement(data->keys, idx, key);
636 UnsafeUtility.WriteArrayElement(data->values, idx, item);
637
638 int bucket = key.GetHashCode() & data->bucketCapacityMask;
639 // Add the index to the hash-map
640 int* buckets = (int*)data->buckets;
641
642 // Make the bucket's head idx. If the exchange returns something other than -1, then the bucket had
643 // a non-null head which means we need to do more checks...
644 if (Interlocked.CompareExchange(ref buckets[bucket], idx, -1) != -1)
645 {
646 int* nextPtrs = (int*)data->next;
647 int next = -1;
648
649 do
650 {
651 // Link up this entry with the rest of the bucket under the assumption that this key
652 // doesn't already exist in the bucket. This assumption could be wrong, which will be
653 // checked later.
654 next = buckets[bucket];
655 nextPtrs[idx] = next;
656
657 // If the key already exists then we should free the entry we took earlier.
658 if (TryGetFirstValueAtomic(data, key, out tempItem, out tempIt))
659 {
660 // Put back the entry in the free list if someone else added it while trying to add
661 FreeEntry(data, idx, threadIndex);
662
663 return false;
664 }
665 }
666 while (Interlocked.CompareExchange(ref buckets[bucket], idx, next) != next);
667 }
668
669 return true;
670 }
671
672 internal static unsafe void AddAtomicMulti(UnsafeParallelHashMapData* data, TKey key, TValue item, int threadIndex)
673 {
674 // Allocate an entry from the free list
675 int idx = AllocEntry(data, threadIndex);
676
677 // Write the new value to the entry
678 UnsafeUtility.WriteArrayElement(data->keys, idx, key);
679 UnsafeUtility.WriteArrayElement(data->values, idx, item);
680
681 int bucket = key.GetHashCode() & data->bucketCapacityMask;
682 // Add the index to the hash-map
683 int* buckets = (int*)data->buckets;
684
685 int nextPtr;
686 int* nextPtrs = (int*)data->next;
687 do
688 {
689 nextPtr = buckets[bucket];
690 nextPtrs[idx] = nextPtr;
691 }
692 while (Interlocked.CompareExchange(ref buckets[bucket], idx, nextPtr) != nextPtr);
693 }
694
695 internal static unsafe bool TryAdd(UnsafeParallelHashMapData* data, TKey key, TValue item, bool isMultiHashMap, AllocatorManager.AllocatorHandle allocation)
696 {
697 TValue tempItem;
698 NativeParallelMultiHashMapIterator<TKey> tempIt;
699 if (isMultiHashMap || !TryGetFirstValueAtomic(data, key, out tempItem, out tempIt))
700 {
701 // Allocate an entry from the free list
702 int idx;
703 int* nextPtrs;
704
705 if (data->allocatedIndexLength >= data->keyCapacity && data->firstFreeTLS[0] < 0)
706 {
707#if UNITY_2022_2_14F1_OR_NEWER
708 int maxThreadCount = JobsUtility.ThreadIndexCount;
709#else
710 int maxThreadCount = JobsUtility.MaxJobThreadCount;
711#endif
712 for (int tls = 1; tls < maxThreadCount; ++tls)
713 {
714 if (data->firstFreeTLS[tls * UnsafeParallelHashMapData.IntsPerCacheLine] >= 0)
715 {
716 idx = data->firstFreeTLS[tls * UnsafeParallelHashMapData.IntsPerCacheLine];
717 nextPtrs = (int*)data->next;
718 data->firstFreeTLS[tls * UnsafeParallelHashMapData.IntsPerCacheLine] = nextPtrs[idx];
719 nextPtrs[idx] = -1;
720 data->firstFreeTLS[0] = idx;
721 break;
722 }
723 }
724
725 if (data->firstFreeTLS[0] < 0)
726 {
727 int newCap = UnsafeParallelHashMapData.GrowCapacity(data->keyCapacity);
728 UnsafeParallelHashMapData.ReallocateHashMap<TKey, TValue>(data, newCap, UnsafeParallelHashMapData.GetBucketSize(newCap), allocation);
729 }
730 }
731
732 idx = data->firstFreeTLS[0];
733
734 if (idx >= 0)
735 {
736 data->firstFreeTLS[0] = ((int*)data->next)[idx];
737 }
738 else
739 {
740 idx = data->allocatedIndexLength++;
741 }
742
743 CheckIndexOutOfBounds(data, idx);
744
745 // Write the new value to the entry
746 UnsafeUtility.WriteArrayElement(data->keys, idx, key);
747 UnsafeUtility.WriteArrayElement(data->values, idx, item);
748
749 int bucket = key.GetHashCode() & data->bucketCapacityMask;
750 // Add the index to the hash-map
751 int* buckets = (int*)data->buckets;
752 nextPtrs = (int*)data->next;
753 nextPtrs[idx] = buckets[bucket];
754 buckets[bucket] = idx;
755
756 return true;
757 }
758 return false;
759 }
760
761 internal static unsafe int Remove(UnsafeParallelHashMapData* data, TKey key, bool isMultiHashMap)
762 {
763 if (data->keyCapacity == 0)
764 {
765 return 0;
766 }
767
768 var removed = 0;
769
770 // First find the slot based on the hash
771 var buckets = (int*)data->buckets;
772 var nextPtrs = (int*)data->next;
773 var bucket = key.GetHashCode() & data->bucketCapacityMask;
774 var prevEntry = -1;
775 var entryIdx = buckets[bucket];
776
777 while (entryIdx >= 0 && entryIdx < data->keyCapacity)
778 {
779 if (UnsafeUtility.ReadArrayElement<TKey>(data->keys, entryIdx).Equals(key))
780 {
781 ++removed;
782
783 // Found matching element, remove it
784 if (prevEntry < 0)
785 {
786 buckets[bucket] = nextPtrs[entryIdx];
787 }
788 else
789 {
790 nextPtrs[prevEntry] = nextPtrs[entryIdx];
791 }
792
793 // And free the index
794 int nextIdx = nextPtrs[entryIdx];
795 nextPtrs[entryIdx] = data->firstFreeTLS[0];
796 data->firstFreeTLS[0] = entryIdx;
797 entryIdx = nextIdx;
798
799 // Can only be one hit in regular hashmaps, so return
800 if (!isMultiHashMap)
801 {
802 break;
803 }
804 }
805 else
806 {
807 prevEntry = entryIdx;
808 entryIdx = nextPtrs[entryIdx];
809 }
810 }
811
812 return removed;
813 }
814
815 internal static unsafe void Remove(UnsafeParallelHashMapData* data, NativeParallelMultiHashMapIterator<TKey> it)
816 {
817 // First find the slot based on the hash
818 int* buckets = (int*)data->buckets;
819 int* nextPtrs = (int*)data->next;
820 int bucket = it.key.GetHashCode() & data->bucketCapacityMask;
821
822 int entryIdx = buckets[bucket];
823
824 if (entryIdx == it.EntryIndex)
825 {
826 buckets[bucket] = nextPtrs[entryIdx];
827 }
828 else
829 {
830 while (entryIdx >= 0 && nextPtrs[entryIdx] != it.EntryIndex)
831 {
832 entryIdx = nextPtrs[entryIdx];
833 }
834
835 if (entryIdx < 0)
836 {
837 ThrowInvalidIterator();
838 }
839
840 nextPtrs[entryIdx] = nextPtrs[it.EntryIndex];
841 }
842
843 // And free the index
844 nextPtrs[it.EntryIndex] = data->firstFreeTLS[0];
845 data->firstFreeTLS[0] = it.EntryIndex;
846 }
847
848 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int) })]
849 internal static unsafe void RemoveKeyValue<TValueEQ>(UnsafeParallelHashMapData* data, TKey key, TValueEQ value)
850 where TValueEQ : unmanaged, IEquatable<TValueEQ>
851 {
852 if (data->keyCapacity == 0)
853 {
854 return;
855 }
856
857 var buckets = (int*)data->buckets;
858 var keyCapacity = (uint)data->keyCapacity;
859 var prevNextPtr = buckets + (key.GetHashCode() & data->bucketCapacityMask);
860 var entryIdx = *prevNextPtr;
861
862 if ((uint)entryIdx >= keyCapacity)
863 {
864 return;
865 }
866
867 var nextPtrs = (int*)data->next;
868 var keys = data->keys;
869 var values = data->values;
870 var firstFreeTLS = data->firstFreeTLS;
871
872 do
873 {
874 if (UnsafeUtility.ReadArrayElement<TKey>(keys, entryIdx).Equals(key)
875 && UnsafeUtility.ReadArrayElement<TValueEQ>(values, entryIdx).Equals(value))
876 {
877 int nextIdx = nextPtrs[entryIdx];
878 nextPtrs[entryIdx] = firstFreeTLS[0];
879 firstFreeTLS[0] = entryIdx;
880 *prevNextPtr = entryIdx = nextIdx;
881 }
882 else
883 {
884 prevNextPtr = nextPtrs + entryIdx;
885 entryIdx = *prevNextPtr;
886 }
887 }
888 while ((uint)entryIdx < keyCapacity);
889 }
890
891 internal static unsafe bool TryGetFirstValueAtomic(UnsafeParallelHashMapData* data, TKey key, out TValue item, out NativeParallelMultiHashMapIterator<TKey> it)
892 {
893 it.key = key;
894
895 if (data->allocatedIndexLength <= 0)
896 {
897 it.EntryIndex = it.NextEntryIndex = -1;
898 item = default;
899 return false;
900 }
901
902 // First find the slot based on the hash
903 int* buckets = (int*)data->buckets;
904 int bucket = key.GetHashCode() & data->bucketCapacityMask;
905 it.EntryIndex = it.NextEntryIndex = buckets[bucket];
906 return TryGetNextValueAtomic(data, out item, ref it);
907 }
908
909 internal static unsafe bool TryGetNextValueAtomic(UnsafeParallelHashMapData* data, out TValue item, ref NativeParallelMultiHashMapIterator<TKey> it)
910 {
911 int entryIdx = it.NextEntryIndex;
912 it.NextEntryIndex = -1;
913 it.EntryIndex = -1;
914 item = default;
915 if (entryIdx < 0 || entryIdx >= data->keyCapacity)
916 {
917 return false;
918 }
919
920 int* nextPtrs = (int*)data->next;
921 while (!UnsafeUtility.ReadArrayElement<TKey>(data->keys, entryIdx).Equals(it.key))
922 {
923 entryIdx = nextPtrs[entryIdx];
924 if (entryIdx < 0 || entryIdx >= data->keyCapacity)
925 {
926 return false;
927 }
928 }
929
930 it.NextEntryIndex = nextPtrs[entryIdx];
931 it.EntryIndex = entryIdx;
932
933 // Read the value
934 item = UnsafeUtility.ReadArrayElement<TValue>(data->values, entryIdx);
935
936 return true;
937 }
938
939 internal static unsafe bool SetValue(UnsafeParallelHashMapData* data, ref NativeParallelMultiHashMapIterator<TKey> it, ref TValue item)
940 {
941 int entryIdx = it.EntryIndex;
942 if (entryIdx < 0 || entryIdx >= data->keyCapacity)
943 {
944 return false;
945 }
946
947 UnsafeUtility.WriteArrayElement(data->values, entryIdx, item);
948 return true;
949 }
950
951 [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS"), Conditional("UNITY_DOTS_DEBUG")]
952 static void CheckOutOfCapacity(int idx, int keyCapacity)
953 {
954 if (idx >= keyCapacity)
955 {
956 throw new InvalidOperationException(string.Format("nextPtr idx {0} beyond capacity {1}", idx, keyCapacity));
957 }
958 }
959
960 [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS"), Conditional("UNITY_DOTS_DEBUG")]
961 static unsafe void CheckIndexOutOfBounds(UnsafeParallelHashMapData* data, int idx)
962 {
963 if (idx < 0 || idx >= data->keyCapacity)
964 throw new InvalidOperationException("Internal HashMap error");
965 }
966
967 [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS"), Conditional("UNITY_DOTS_DEBUG")]
968 static void ThrowFull()
969 {
970 throw new InvalidOperationException("HashMap is full");
971 }
972
973 [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS"), Conditional("UNITY_DOTS_DEBUG")]
974 static void ThrowInvalidIterator()
975 {
976 throw new InvalidOperationException("Invalid iterator passed to HashMap remove");
977 }
978 }
979
980 /// <summary>
981 /// A key-value pair.
982 /// </summary>
983 /// <remarks>Used for enumerators.</remarks>
984 /// <typeparam name="TKey">The type of the keys.</typeparam>
985 /// <typeparam name="TValue">The type of the values.</typeparam>
986 [DebuggerDisplay("Key = {Key}, Value = {Value}")]
987 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new[] {typeof(int), typeof(int)})]
988 public unsafe struct KeyValue<TKey, TValue>
989 where TKey : unmanaged, IEquatable<TKey>
990 where TValue : unmanaged
991 {
992 internal UnsafeParallelHashMapData* m_Buffer;
993 internal int m_Index;
994 internal int m_Next;
995
996 /// <summary>
997 /// An invalid KeyValue.
998 /// </summary>
999 /// <value>In a hash map enumerator's initial state, its <see cref="UnsafeParallelHashMap{TKey,TValue}.Enumerator.Current"/> value is Null.</value>
1000 public static KeyValue<TKey, TValue> Null => new KeyValue<TKey, TValue>{m_Index = -1};
1001
1002 /// <summary>
1003 /// The key.
1004 /// </summary>
1005 /// <value>The key. If this KeyValue is Null, returns the default of TKey.</value>
1006 public TKey Key
1007 {
1008 get
1009 {
1010 if (m_Index != -1)
1011 {
1012 return UnsafeUtility.ReadArrayElement<TKey>(m_Buffer->keys, m_Index);
1013 }
1014
1015 return default;
1016 }
1017 }
1018
1019 /// <summary>
1020 /// Value of key/value pair.
1021 /// </summary>
1022 public ref TValue Value
1023 {
1024 get
1025 {
1026#if ENABLE_UNITY_COLLECTIONS_CHECKS
1027 if (m_Index == -1)
1028 throw new ArgumentException("must be valid");
1029#endif
1030
1031 return ref UnsafeUtility.AsRef<TValue>(m_Buffer->values + UnsafeUtility.SizeOf<TValue>() * m_Index);
1032 }
1033 }
1034
1035 /// <summary>
1036 /// Gets the key and the value.
1037 /// </summary>
1038 /// <param name="key">Outputs the key. If this KeyValue is Null, outputs the default of TKey.</param>
1039 /// <param name="value">Outputs the value. If this KeyValue is Null, outputs the default of TValue.</param>
1040 /// <returns>True if the key-value pair is valid.</returns>
1041 public bool GetKeyValue(out TKey key, out TValue value)
1042 {
1043 if (m_Index != -1)
1044 {
1045 key = UnsafeUtility.ReadArrayElement<TKey>(m_Buffer->keys, m_Index);
1046 value = UnsafeUtility.ReadArrayElement<TValue>(m_Buffer->values, m_Index);
1047 return true;
1048 }
1049
1050 key = default;
1051 value = default;
1052 return false;
1053 }
1054 }
1055
1056 internal unsafe struct UnsafeParallelHashMapDataEnumerator
1057 {
1058 [NativeDisableUnsafePtrRestriction]
1059 internal UnsafeParallelHashMapData* m_Buffer;
1060 internal int m_Index;
1061 internal int m_BucketIndex;
1062 internal int m_NextIndex;
1063
1064 internal unsafe UnsafeParallelHashMapDataEnumerator(UnsafeParallelHashMapData* data)
1065 {
1066 m_Buffer = data;
1067 m_Index = -1;
1068 m_BucketIndex = 0;
1069 m_NextIndex = -1;
1070 }
1071
1072 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1073 internal bool MoveNext()
1074 {
1075 return UnsafeParallelHashMapData.MoveNext(m_Buffer, ref m_BucketIndex, ref m_NextIndex, out m_Index);
1076 }
1077
1078 internal void Reset()
1079 {
1080 m_Index = -1;
1081 m_BucketIndex = 0;
1082 m_NextIndex = -1;
1083 }
1084
1085 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1086 internal KeyValue<TKey, TValue> GetCurrent<TKey, TValue>()
1087 where TKey : unmanaged, IEquatable<TKey>
1088 where TValue : unmanaged
1089 {
1090 return new KeyValue<TKey, TValue> { m_Buffer = m_Buffer, m_Index = m_Index };
1091 }
1092
1093 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1094 internal TKey GetCurrentKey<TKey>()
1095 where TKey : unmanaged, IEquatable<TKey>
1096 {
1097 if (m_Index != -1)
1098 {
1099 return UnsafeUtility.ReadArrayElement<TKey>(m_Buffer->keys, m_Index);
1100 }
1101
1102 return default;
1103 }
1104 }
1105
1106 /// <summary>
1107 /// An unordered, expandable associative array.
1108 /// </summary>
1109 /// <typeparam name="TKey">The type of the keys.</typeparam>
1110 /// <typeparam name="TValue">The type of the values.</typeparam>
1111 [StructLayout(LayoutKind.Sequential)]
1112 [DebuggerDisplay("Count = {Count()}, Capacity = {Capacity}, IsCreated = {IsCreated}, IsEmpty = {IsEmpty}")]
1113 [DebuggerTypeProxy(typeof(UnsafeParallelHashMapDebuggerTypeProxy<,>))]
1114 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int), typeof(int) })]
1115 public unsafe struct UnsafeParallelHashMap<TKey, TValue>
1116 : INativeDisposable
1117 , IEnumerable<KeyValue<TKey, TValue>> // Used by collection initializers.
1118 where TKey : unmanaged, IEquatable<TKey>
1119 where TValue : unmanaged
1120 {
1121 [NativeDisableUnsafePtrRestriction]
1122 internal UnsafeParallelHashMapData* m_Buffer;
1123 internal AllocatorManager.AllocatorHandle m_AllocatorLabel;
1124
1125 /// <summary>
1126 /// Initializes and returns an instance of UnsafeParallelHashMap.
1127 /// </summary>
1128 /// <param name="capacity">The number of key-value pairs that should fit in the initial allocation.</param>
1129 /// <param name="allocator">The allocator to use.</param>
1130 public UnsafeParallelHashMap(int capacity, AllocatorManager.AllocatorHandle allocator)
1131 {
1132 m_AllocatorLabel = allocator;
1133 // Bucket size if bigger to reduce collisions
1134 UnsafeParallelHashMapData.AllocateHashMap<TKey, TValue>(capacity, capacity * 2, allocator, out m_Buffer);
1135
1136 Clear();
1137 }
1138
1139 /// <summary>
1140 /// Whether this hash map has been allocated (and not yet deallocated).
1141 /// </summary>
1142 /// <value>True if this hash map has been allocated (and not yet deallocated).</value>
1143 public readonly bool IsCreated
1144 {
1145 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1146 get => m_Buffer != null;
1147 }
1148
1149 /// <summary>
1150 /// Whether this hash map is empty.
1151 /// </summary>
1152 /// <value>True if this hash map is empty or the hash map has not been constructed.</value>
1153 public readonly bool IsEmpty
1154 {
1155 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1156 get => !IsCreated || UnsafeParallelHashMapData.IsEmpty(m_Buffer);
1157 }
1158
1159 /// <summary>
1160 /// The current number of key-value pairs in this hash map.
1161 /// </summary>
1162 /// <returns>The current number of key-value pairs in this hash map.</returns>
1163 public readonly int Count() => UnsafeParallelHashMapData.GetCount(m_Buffer);
1164
1165 /// <summary>
1166 /// The number of key-value pairs that fit in the current allocation.
1167 /// </summary>
1168 /// <value>The number of key-value pairs that fit in the current allocation.</value>
1169 /// <param name="value">A new capacity. Must be larger than the current capacity.</param>
1170 /// <exception cref="InvalidOperationException">Thrown if `value` is less than the current capacity.</exception>
1171 public int Capacity
1172 {
1173 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1174 readonly get
1175 {
1176 UnsafeParallelHashMapData* data = m_Buffer;
1177 return data->keyCapacity;
1178 }
1179
1180 set
1181 {
1182 UnsafeParallelHashMapData* data = m_Buffer;
1183 UnsafeParallelHashMapData.ReallocateHashMap<TKey, TValue>(data, value, UnsafeParallelHashMapData.GetBucketSize(value), m_AllocatorLabel);
1184 }
1185 }
1186
1187 /// <summary>
1188 /// Removes all key-value pairs.
1189 /// </summary>
1190 /// <remarks>Does not change the capacity.</remarks>
1191 public void Clear()
1192 {
1193 UnsafeParallelHashMapBase<TKey, TValue>.Clear(m_Buffer);
1194 }
1195
1196 /// <summary>
1197 /// Adds a new key-value pair.
1198 /// </summary>
1199 /// <remarks>If the key is already present, this method returns false without modifying the hash map.</remarks>
1200 /// <param name="key">The key to add.</param>
1201 /// <param name="item">The value to add.</param>
1202 /// <returns>True if the key-value pair was added.</returns>
1203 public bool TryAdd(TKey key, TValue item)
1204 {
1205 return UnsafeParallelHashMapBase<TKey, TValue>.TryAdd(m_Buffer, key, item, false, m_AllocatorLabel);
1206 }
1207
1208 /// <summary>
1209 /// Adds a new key-value pair.
1210 /// </summary>
1211 /// <remarks>If the key is already present, this method throws without modifying the hash map.</remarks>
1212 /// <param name="key">The key to add.</param>
1213 /// <param name="item">The value to add.</param>
1214 /// <exception cref="ArgumentException">Thrown if the key was already present.</exception>
1215 public void Add(TKey key, TValue item)
1216 {
1217 UnsafeParallelHashMapBase<TKey, TValue>.TryAdd(m_Buffer, key, item, false, m_AllocatorLabel);
1218 }
1219
1220 /// <summary>
1221 /// Removes a key-value pair.
1222 /// </summary>
1223 /// <param name="key">The key to remove.</param>
1224 /// <returns>True if a key-value pair was removed.</returns>
1225 public bool Remove(TKey key)
1226 {
1227 return UnsafeParallelHashMapBase<TKey, TValue>.Remove(m_Buffer, key, false) != 0;
1228 }
1229
1230 /// <summary>
1231 /// Returns the value associated with a key.
1232 /// </summary>
1233 /// <param name="key">The key to look up.</param>
1234 /// <param name="item">Outputs the value associated with the key. Outputs default if the key was not present.</param>
1235 /// <returns>True if the key was present.</returns>
1236 public bool TryGetValue(TKey key, out TValue item)
1237 {
1238 NativeParallelMultiHashMapIterator<TKey> tempIt;
1239 return UnsafeParallelHashMapBase<TKey, TValue>.TryGetFirstValueAtomic(m_Buffer, key, out item, out tempIt);
1240 }
1241
1242 /// <summary>
1243 /// Returns true if a given key is present in this hash map.
1244 /// </summary>
1245 /// <param name="key">The key to look up.</param>
1246 /// <returns>True if the key was present.</returns>
1247 public bool ContainsKey(TKey key)
1248 {
1249 return UnsafeParallelHashMapBase<TKey, TValue>.TryGetFirstValueAtomic(m_Buffer, key, out var tempValue, out var tempIt);
1250 }
1251
1252 /// <summary>
1253 /// Gets and sets values by key.
1254 /// </summary>
1255 /// <remarks>Getting a key that is not present will throw. Setting a key that is not already present will add the key.</remarks>
1256 /// <param name="key">The key to look up.</param>
1257 /// <value>The value associated with the key.</value>
1258 /// <exception cref="ArgumentException">For getting, thrown if the key was not present.</exception>
1259 public TValue this[TKey key]
1260 {
1261 get
1262 {
1263 TValue res;
1264 TryGetValue(key, out res);
1265 return res;
1266 }
1267
1268 set
1269 {
1270 if (UnsafeParallelHashMapBase<TKey, TValue>.TryGetFirstValueAtomic(m_Buffer, key, out var item, out var iterator))
1271 {
1272 UnsafeParallelHashMapBase<TKey, TValue>.SetValue(m_Buffer, ref iterator, ref value);
1273 }
1274 else
1275 {
1276 UnsafeParallelHashMapBase<TKey, TValue>.TryAdd(m_Buffer, key, value, false, m_AllocatorLabel);
1277 }
1278 }
1279 }
1280
1281 /// <summary>
1282 /// Releases all resources (memory).
1283 /// </summary>
1284 public void Dispose()
1285 {
1286 if (!IsCreated)
1287 {
1288 return;
1289 }
1290
1291 UnsafeParallelHashMapData.DeallocateHashMap(m_Buffer, m_AllocatorLabel);
1292 m_Buffer = null;
1293 }
1294
1295 /// <summary>
1296 /// Creates and schedules a job that will dispose this hash map.
1297 /// </summary>
1298 /// <param name="inputDeps">A job handle. The newly scheduled job will depend upon this handle.</param>
1299 /// <returns>The handle of a new job that will dispose this hash map.</returns>
1300 public JobHandle Dispose(JobHandle inputDeps)
1301 {
1302 if (!IsCreated)
1303 {
1304 return inputDeps;
1305 }
1306
1307 var jobHandle = new UnsafeParallelHashMapDisposeJob { Data = m_Buffer, Allocator = m_AllocatorLabel }.Schedule(inputDeps);
1308 m_Buffer = null;
1309 return jobHandle;
1310 }
1311
1312 /// <summary>
1313 /// Returns an array with a copy of all this hash map's keys (in no particular order).
1314 /// </summary>
1315 /// <param name="allocator">The allocator to use.</param>
1316 /// <returns>An array with a copy of all this hash map's keys (in no particular order).</returns>
1317 public NativeArray<TKey> GetKeyArray(AllocatorManager.AllocatorHandle allocator)
1318 {
1319 var result = CollectionHelper.CreateNativeArray<TKey>(UnsafeParallelHashMapData.GetCount(m_Buffer), allocator, NativeArrayOptions.UninitializedMemory);
1320 UnsafeParallelHashMapData.GetKeyArray(m_Buffer, result);
1321 return result;
1322 }
1323
1324 /// <summary>
1325 /// Returns an array with a copy of all this hash map's values (in no particular order).
1326 /// </summary>
1327 /// <param name="allocator">The allocator to use.</param>
1328 /// <returns>An array with a copy of all this hash map's values (in no particular order).</returns>
1329 public NativeArray<TValue> GetValueArray(AllocatorManager.AllocatorHandle allocator)
1330 {
1331 var result = CollectionHelper.CreateNativeArray<TValue>(UnsafeParallelHashMapData.GetCount(m_Buffer), allocator, NativeArrayOptions.UninitializedMemory);
1332 UnsafeParallelHashMapData.GetValueArray(m_Buffer, result);
1333 return result;
1334 }
1335
1336 /// <summary>
1337 /// Returns a NativeKeyValueArrays with a copy of all this hash map's keys and values.
1338 /// </summary>
1339 /// <remarks>The key-value pairs are copied in no particular order. For all `i`, `Values[i]` will be the value associated with `Keys[i]`.</remarks>
1340 /// <param name="allocator">The allocator to use.</param>
1341 /// <returns>A NativeKeyValueArrays with a copy of all this hash map's keys and values.</returns>
1342 public NativeKeyValueArrays<TKey, TValue> GetKeyValueArrays(AllocatorManager.AllocatorHandle allocator)
1343 {
1344 var result = new NativeKeyValueArrays<TKey, TValue>(UnsafeParallelHashMapData.GetCount(m_Buffer), allocator, NativeArrayOptions.UninitializedMemory);
1345 UnsafeParallelHashMapData.GetKeyValueArrays(m_Buffer, result);
1346 return result;
1347 }
1348
1349 /// <summary>
1350 /// Returns a parallel writer for this hash map.
1351 /// </summary>
1352 /// <returns>A parallel writer for this hash map.</returns>
1353 public ParallelWriter AsParallelWriter()
1354 {
1355 ParallelWriter writer;
1356 writer.m_ThreadIndex = 0;
1357 writer.m_Buffer = m_Buffer;
1358 return writer;
1359 }
1360
1361 /// <summary>
1362 /// Returns a readonly version of this UnsafeParallelHashMap instance.
1363 /// </summary>
1364 /// <remarks>ReadOnly containers point to the same underlying data as the UnsafeParallelHashMap it is made from.</remarks>
1365 /// <returns>ReadOnly instance for this.</returns>
1366 public ReadOnly AsReadOnly()
1367 {
1368 return new ReadOnly(this);
1369 }
1370
1371 /// <summary>
1372 /// A read-only alias for the value of a UnsafeParallelHashMap. Does not have its own allocated storage.
1373 /// </summary>
1374 [DebuggerDisplay("Count = {m_HashMapData.Count()}, Capacity = {m_HashMapData.Capacity}, IsCreated = {m_HashMapData.IsCreated}, IsEmpty = {IsEmpty}")]
1375 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new[] { typeof(int), typeof(int) })]
1376 public struct ReadOnly
1377 : IEnumerable<KeyValue<TKey, TValue>>
1378 {
1379 internal UnsafeParallelHashMap<TKey, TValue> m_HashMapData;
1380
1381 internal ReadOnly(UnsafeParallelHashMap<TKey, TValue> hashMapData)
1382 {
1383 m_HashMapData = hashMapData;
1384 }
1385
1386 /// <summary>
1387 /// Whether this hash map has been allocated (and not yet deallocated).
1388 /// </summary>
1389 /// <value>True if this hash map has been allocated (and not yet deallocated).</value>
1390 public readonly bool IsCreated
1391 {
1392 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1393 get => m_HashMapData.IsCreated;
1394 }
1395
1396 /// <summary>
1397 /// Whether this hash map is empty.
1398 /// </summary>
1399 /// <value>True if this hash map is empty or if the map has not been constructed.</value>
1400 public readonly bool IsEmpty
1401 {
1402 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1403 get
1404 {
1405 if (!IsCreated)
1406 {
1407 return true;
1408 }
1409
1410 return m_HashMapData.IsEmpty;
1411 }
1412 }
1413
1414 /// <summary>
1415 /// The current number of key-value pairs in this hash map.
1416 /// </summary>
1417 /// <returns>The current number of key-value pairs in this hash map.</returns>
1418 public readonly int Count()
1419 {
1420 return m_HashMapData.Count();
1421 }
1422
1423 /// <summary>
1424 /// The number of key-value pairs that fit in the current allocation.
1425 /// </summary>
1426 /// <value>The number of key-value pairs that fit in the current allocation.</value>
1427 public readonly int Capacity
1428 {
1429 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1430 get
1431 {
1432 return m_HashMapData.Capacity;
1433 }
1434 }
1435
1436 /// <summary>
1437 /// Returns the value associated with a key.
1438 /// </summary>
1439 /// <param name="key">The key to look up.</param>
1440 /// <param name="item">Outputs the value associated with the key. Outputs default if the key was not present.</param>
1441 /// <returns>True if the key was present.</returns>
1442 public readonly bool TryGetValue(TKey key, out TValue item)
1443 {
1444 return m_HashMapData.TryGetValue(key, out item);
1445 }
1446
1447 /// <summary>
1448 /// Returns true if a given key is present in this hash map.
1449 /// </summary>
1450 /// <param name="key">The key to look up.</param>
1451 /// <returns>True if the key was present.</returns>
1452 public readonly bool ContainsKey(TKey key)
1453 {
1454 return m_HashMapData.ContainsKey(key);
1455 }
1456
1457 /// <summary>
1458 /// Gets values by key.
1459 /// </summary>
1460 /// <remarks>Getting a key that is not present will throw.</remarks>
1461 /// <param name="key">The key to look up.</param>
1462 /// <value>The value associated with the key.</value>
1463 /// <exception cref="ArgumentException">For getting, thrown if the key was not present.</exception>
1464 public readonly TValue this[TKey key]
1465 {
1466 get
1467 {
1468 TValue res;
1469
1470 if (m_HashMapData.TryGetValue(key, out res))
1471 {
1472 return res;
1473 }
1474
1475 ThrowKeyNotPresent(key);
1476
1477 return default;
1478 }
1479 }
1480
1481 /// <summary>
1482 /// Returns an array with a copy of all this hash map's keys (in no particular order).
1483 /// </summary>
1484 /// <param name="allocator">The allocator to use.</param>
1485 /// <returns>An array with a copy of all this hash map's keys (in no particular order).</returns>
1486 public readonly NativeArray<TKey> GetKeyArray(AllocatorManager.AllocatorHandle allocator)
1487 {
1488 return m_HashMapData.GetKeyArray(allocator);
1489 }
1490
1491 /// <summary>
1492 /// Returns an array with a copy of all this hash map's values (in no particular order).
1493 /// </summary>
1494 /// <param name="allocator">The allocator to use.</param>
1495 /// <returns>An array with a copy of all this hash map's values (in no particular order).</returns>
1496 public readonly NativeArray<TValue> GetValueArray(AllocatorManager.AllocatorHandle allocator)
1497 {
1498 return m_HashMapData.GetValueArray(allocator);
1499 }
1500
1501 /// <summary>
1502 /// Returns a NativeKeyValueArrays with a copy of all this hash map's keys and values.
1503 /// </summary>
1504 /// <remarks>The key-value pairs are copied in no particular order. For all `i`, `Values[i]` will be the value associated with `Keys[i]`.</remarks>
1505 /// <param name="allocator">The allocator to use.</param>
1506 /// <returns>A NativeKeyValueArrays with a copy of all this hash map's keys and values.</returns>
1507 public readonly NativeKeyValueArrays<TKey, TValue> GetKeyValueArrays(AllocatorManager.AllocatorHandle allocator)
1508 {
1509 return m_HashMapData.GetKeyValueArrays(allocator);
1510 }
1511
1512 [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS"), Conditional("UNITY_DOTS_DEBUG")]
1513 readonly void ThrowKeyNotPresent(TKey key)
1514 {
1515 throw new ArgumentException($"Key: {key} is not present in the NativeParallelHashMap.");
1516 }
1517
1518 /// <summary>
1519 /// Returns an enumerator over the key-value pairs of this hash map.
1520 /// </summary>
1521 /// <returns>An enumerator over the key-value pairs of this hash map.</returns>
1522 public readonly Enumerator GetEnumerator()
1523 {
1524 return new Enumerator
1525 {
1526 m_Enumerator = new UnsafeParallelHashMapDataEnumerator(m_HashMapData.m_Buffer),
1527 };
1528 }
1529
1530 /// <summary>
1531 /// This method is not implemented. Use <see cref="GetEnumerator"/> instead.
1532 /// </summary>
1533 /// <returns>Throws NotImplementedException.</returns>
1534 /// <exception cref="NotImplementedException">Method is not implemented.</exception>
1535 IEnumerator<KeyValue<TKey, TValue>> IEnumerable<KeyValue<TKey, TValue>>.GetEnumerator()
1536 {
1537 throw new NotImplementedException();
1538 }
1539
1540 /// <summary>
1541 /// This method is not implemented. Use <see cref="GetEnumerator"/> instead.
1542 /// </summary>
1543 /// <returns>Throws NotImplementedException.</returns>
1544 /// <exception cref="NotImplementedException">Method is not implemented.</exception>
1545 IEnumerator IEnumerable.GetEnumerator()
1546 {
1547 throw new NotImplementedException();
1548 }
1549 }
1550
1551 /// <summary>
1552 /// A parallel writer for a NativeParallelHashMap.
1553 /// </summary>
1554 /// <remarks>
1555 /// Use <see cref="AsParallelWriter"/> to create a parallel writer for a NativeParallelHashMap.
1556 /// </remarks>
1557 [GenerateTestsForBurstCompatibility(GenericTypeArguments = new [] { typeof(int), typeof(int) })]
1558 public unsafe struct ParallelWriter
1559 {
1560 [NativeDisableUnsafePtrRestriction]
1561 internal UnsafeParallelHashMapData* m_Buffer;
1562
1563 [NativeSetThreadIndex]
1564 internal int m_ThreadIndex;
1565
1566 /// <summary>
1567 /// Returns the index of the current thread.
1568 /// </summary>
1569 /// <remarks>In a job, each thread gets its own copy of the ParallelWriter struct, and the job system assigns
1570 /// each copy the index of its thread.</remarks>
1571 /// <value>The index of the current thread.</value>
1572 public int ThreadIndex => m_ThreadIndex;
1573
1574 /// <summary>
1575 /// The number of key-value pairs that fit in the current allocation.
1576 /// </summary>
1577 /// <value>The number of key-value pairs that fit in the current allocation.</value>
1578 public readonly int Capacity
1579 {
1580 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1581 get
1582 {
1583 UnsafeParallelHashMapData* data = m_Buffer;
1584 return data->keyCapacity;
1585 }
1586 }
1587
1588 /// <summary>
1589 /// Adds a new key-value pair.
1590 /// </summary>
1591 /// <remarks>If the key is already present, this method returns false without modifying the hash map.</remarks>
1592 /// <param name="key">The key to add.</param>
1593 /// <param name="item">The value to add.</param>
1594 /// <returns>True if the key-value pair was added.</returns>
1595 public bool TryAdd(TKey key, TValue item)
1596 {
1597 Assert.IsTrue(m_ThreadIndex >= 0);
1598 return UnsafeParallelHashMapBase<TKey, TValue>.TryAddAtomic(m_Buffer, key, item, m_ThreadIndex);
1599 }
1600
1601 /// <summary>
1602 /// Adds a new key-value pair.
1603 /// </summary>
1604 /// <remarks>If the key is already present, this method returns false without modifying the hash map.</remarks>
1605 /// <param name="key">The key to add.</param>
1606 /// <param name="item">The value to add.</param>
1607 /// <param name="threadIndexOverride">The thread index which must be set by a field from a job struct with the <see cref="NativeSetThreadIndexAttribute"/> attribute.</param>
1608 /// <returns>True if the key-value pair was added.</returns>
1609 internal bool TryAdd(TKey key, TValue item, int threadIndexOverride)
1610 {
1611 Assert.IsTrue(threadIndexOverride >= 0);
1612 return UnsafeParallelHashMapBase<TKey, TValue>.TryAddAtomic(m_Buffer, key, item, threadIndexOverride);
1613 }
1614 }
1615
1616 /// <summary>
1617 /// Returns an enumerator over the key-value pairs of this hash map.
1618 /// </summary>
1619 /// <returns>An enumerator over the key-value pairs of this hash map.</returns>
1620 public Enumerator GetEnumerator()
1621 {
1622 return new Enumerator { m_Enumerator = new UnsafeParallelHashMapDataEnumerator(m_Buffer) };
1623 }
1624
1625 /// <summary>
1626 /// This method is not implemented. Use <see cref="GetEnumerator"/> instead.
1627 /// </summary>
1628 /// <returns>Throws NotImplementedException.</returns>
1629 /// <exception cref="NotImplementedException">Method is not implemented.</exception>
1630 IEnumerator<KeyValue<TKey, TValue>> IEnumerable<KeyValue<TKey, TValue>>.GetEnumerator()
1631 {
1632 throw new NotImplementedException();
1633 }
1634
1635 /// <summary>
1636 /// This method is not implemented. Use <see cref="GetEnumerator"/> instead.
1637 /// </summary>
1638 /// <returns>Throws NotImplementedException.</returns>
1639 /// <exception cref="NotImplementedException">Method is not implemented.</exception>
1640 IEnumerator IEnumerable.GetEnumerator()
1641 {
1642 throw new NotImplementedException();
1643 }
1644
1645 /// <summary>
1646 /// An enumerator over the key-value pairs of a hash map.
1647 /// </summary>
1648 /// <remarks>
1649 /// In an enumerator's initial state, <see cref="Current"/> is not valid to read.
1650 /// From this state, the first <see cref="MoveNext"/> call advances the enumerator to the first key-value pair.
1651 /// </remarks>
1652 public struct Enumerator : IEnumerator<KeyValue<TKey, TValue>>
1653 {
1654 internal UnsafeParallelHashMapDataEnumerator m_Enumerator;
1655
1656 /// <summary>
1657 /// Does nothing.
1658 /// </summary>
1659 public void Dispose() { }
1660
1661 /// <summary>
1662 /// Advances the enumerator to the next key-value pair.
1663 /// </summary>
1664 /// <returns>True if <see cref="Current"/> is valid to read after the call.</returns>
1665 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1666 public bool MoveNext() => m_Enumerator.MoveNext();
1667
1668 /// <summary>
1669 /// Resets the enumerator to its initial state.
1670 /// </summary>
1671 public void Reset() => m_Enumerator.Reset();
1672
1673 /// <summary>
1674 /// The current key-value pair.
1675 /// </summary>
1676 /// <value>The current key-value pair.</value>
1677 public KeyValue<TKey, TValue> Current
1678 {
1679 [MethodImpl(MethodImplOptions.AggressiveInlining)]
1680 get => m_Enumerator.GetCurrent<TKey, TValue>();
1681 }
1682
1683 object IEnumerator.Current => Current;
1684 }
1685 }
1686
1687 [BurstCompile]
1688 internal unsafe struct UnsafeParallelHashMapDisposeJob : IJob
1689 {
1690 [NativeDisableUnsafePtrRestriction]
1691 public UnsafeParallelHashMapData* Data;
1692 public AllocatorManager.AllocatorHandle Allocator;
1693
1694 public void Execute()
1695 {
1696 UnsafeParallelHashMapData.DeallocateHashMap(Data, Allocator);
1697 }
1698 }
1699
1700 sealed internal class UnsafeParallelHashMapDebuggerTypeProxy<TKey, TValue>
1701 where TKey : unmanaged, IEquatable<TKey>
1702 where TValue : unmanaged
1703 {
1704 UnsafeParallelHashMap<TKey, TValue> m_Target;
1705
1706 public UnsafeParallelHashMapDebuggerTypeProxy(UnsafeParallelHashMap<TKey, TValue> target)
1707 {
1708 m_Target = target;
1709 }
1710
1711 public List<Pair<TKey, TValue>> Items
1712 {
1713 get
1714 {
1715 var result = new List<Pair<TKey, TValue>>();
1716 using (var kva = m_Target.GetKeyValueArrays(Allocator.Temp))
1717 {
1718 for (var i = 0; i < kva.Length; ++i)
1719 {
1720 result.Add(new Pair<TKey, TValue>(kva.Keys[i], kva.Values[i]));
1721 }
1722 }
1723 return result;
1724 }
1725 }
1726 }
1727
1728 /// <summary>
1729 /// For internal use only.
1730 /// </summary>
1731 public unsafe struct UntypedUnsafeParallelHashMap
1732 {
1733#pragma warning disable 169
1734 [NativeDisableUnsafePtrRestriction]
1735 UnsafeParallelHashMapData* m_Buffer;
1736 AllocatorManager.AllocatorHandle m_AllocatorLabel;
1737#pragma warning restore 169
1738 }
1739}