A game about forced loneliness, made by TACStudios
at master 932 lines 39 kB view raw
1using NUnit.Framework; 2using UnityEngine; 3using Unity.Collections.LowLevel.Unsafe; 4using Unity.PerformanceTesting; 5using Unity.PerformanceTesting.Benchmark; 6using System.Runtime.CompilerServices; 7using System.Threading; 8 9namespace Unity.Collections.PerformanceTests 10{ 11 static class ParallelHashMapUtil 12 { 13 static public void AllocInt(ref NativeParallelHashMap<int, int> container, int capacity, bool addValues) 14 { 15 if (capacity >= 0) 16 { 17 Unity.Mathematics.Random random = new Unity.Mathematics.Random(HashMapUtil.K_RANDOM_SEED_1); 18 container = new NativeParallelHashMap<int, int>(capacity, Allocator.Persistent); 19 if (addValues) 20 { 21 int keysAdded = 0; 22 23 while (keysAdded < capacity) 24 { 25 int randKey = random.NextInt(); 26 if (container.TryAdd(randKey, keysAdded)) 27 { 28 ++keysAdded; 29 } 30 } 31 } 32 } 33 else 34 container.Dispose(); 35 } 36 static public void AllocInt(ref UnsafeParallelHashMap<int, int> container, int capacity, bool addValues) 37 { 38 if (capacity >= 0) 39 { 40 Unity.Mathematics.Random random = new Unity.Mathematics.Random(HashMapUtil.K_RANDOM_SEED_1); 41 container = new UnsafeParallelHashMap<int, int>(capacity, Allocator.Persistent); 42 if (addValues) 43 { 44 int keysAdded = 0; 45 46 while (keysAdded < capacity) 47 { 48 int randKey = random.NextInt(); 49 if (container.TryAdd(randKey, keysAdded)) 50 { 51 ++keysAdded; 52 } 53 } 54 } 55 } 56 else 57 container.Dispose(); 58 } 59 static public object AllocBclContainer(int capacity, bool addValues) 60 { 61 if (capacity < 0) 62 return null; 63 64 Unity.Mathematics.Random random = new Unity.Mathematics.Random(HashMapUtil.K_RANDOM_SEED_1); 65 66 // FROM MICROSOFT DOCUMENTATION 67 // The higher the concurrencyLevel, the higher the theoretical number of operations 68 // that could be performed concurrently on the ConcurrentDictionary. However, global 69 // operations like resizing the dictionary take longer as the concurrencyLevel rises. 70 // For the purposes of this example, we'll compromise at numCores * 2. 71 var bclContainer = new System.Collections.Concurrent.ConcurrentDictionary<int, int>(System.Environment.ProcessorCount * 2, capacity); 72 73 if (addValues) 74 { 75 int keysAdded = 0; 76 77 while (keysAdded < capacity) 78 { 79 int randKey = random.NextInt(); 80 if (bclContainer.TryAdd(randKey, keysAdded)) 81 { 82 ++keysAdded; 83 } 84 } 85 } 86 return bclContainer; 87 } 88 static public void CreateRandomKeys(int capacity, ref UnsafeList<int> keys) 89 { 90 if (capacity >= 0) 91 { 92 keys = new UnsafeList<int>(capacity, Allocator.Persistent); 93 using (UnsafeHashSet<int> randomFilter = new UnsafeHashSet<int>(capacity, Allocator.Persistent)) 94 { 95 Unity.Mathematics.Random random = new Unity.Mathematics.Random(HashMapUtil.K_RANDOM_SEED_2); 96 int keysAdded = 0; 97 98 while (keysAdded < capacity) 99 { 100 int randKey = random.NextInt(); 101 if (randomFilter.Add(randKey)) 102 { 103 keys.Add(randKey); 104 ++keysAdded; 105 } 106 } 107 } 108 } 109 else 110 keys.Dispose(); 111 } 112 113 static public void CreateRandomKeys(int capacity, ref UnsafeList<int> keys, ref UnsafeParallelHashMap<int, int> hashMap) 114 { 115 if (capacity >= 0) 116 { 117 keys = new UnsafeList<int>(capacity, Allocator.Persistent); 118 using (UnsafeHashSet<int> randomFilter = new UnsafeHashSet<int>(capacity, Allocator.Persistent)) 119 { 120 Unity.Mathematics.Random random = new Unity.Mathematics.Random(HashMapUtil.K_RANDOM_SEED_2); 121 int keysAdded = 0; 122 123 while (keysAdded < capacity) 124 { 125 int randKey = random.NextInt(); 126 if (randomFilter.Add(randKey)) 127 { 128 keys.Add(randKey); 129 ++keysAdded; 130 } 131 } 132 } 133 134 } 135 else 136 keys.Dispose(); 137 } 138 139 static public void CreateRandomKeys(int capacity, ref UnsafeList<int> keys, ref System.Collections.Concurrent.ConcurrentDictionary<int, int> hashMap) 140 { 141 if (capacity >= 0) 142 { 143 keys = new UnsafeList<int>(capacity, Allocator.Persistent); 144 using (UnsafeHashSet<int> randomFilter = new UnsafeHashSet<int>(capacity, Allocator.Persistent)) 145 { 146 Unity.Mathematics.Random random = new Unity.Mathematics.Random(HashMapUtil.K_RANDOM_SEED_2); 147 int keysAdded = 0; 148 149 while (keysAdded < capacity) 150 { 151 int randKey = random.NextInt(); 152 if (randomFilter.Add(randKey)) 153 { 154 keys.Add(randKey); 155 ++keysAdded; 156 } 157 } 158 } 159 160 } 161 else 162 keys.Dispose(); 163 } 164 165 static public void CreateRandomKeys(int capacity, ref UnsafeList<int> keys, ref NativeParallelHashMap<int, int> hashMap) 166 { 167 if (capacity >= 0) 168 { 169 keys = new UnsafeList<int>(capacity, Allocator.Persistent); 170 using (UnsafeHashSet<int> randomFilter = new UnsafeHashSet<int>(capacity, Allocator.Persistent)) 171 { 172 Unity.Mathematics.Random random = new Unity.Mathematics.Random(HashMapUtil.K_RANDOM_SEED_2); 173 int keysAdded = 0; 174 175 while (keysAdded < capacity) 176 { 177 int randKey = random.NextInt(); 178 if (randomFilter.Add(randKey)) 179 { 180 keys.Add(randKey); 181 ++keysAdded; 182 } 183 } 184 } 185 186 } 187 else 188 keys.Dispose(); 189 } 190 191 static public void RandomlyShuffleKeys(int capacity, ref UnsafeList<int> keys) 192 { 193 if (capacity >= 0) 194 { 195 Unity.Mathematics.Random random = new Mathematics.Random(HashMapUtil.K_RANDOM_SEED_3); 196 for (int i = 0; i < capacity; i++) 197 { 198 int keyAt = keys[i]; 199 int randomIndex = random.NextInt(0, capacity - 1); 200 keys[i] = keys[randomIndex]; 201 keys[randomIndex] = keyAt; 202 } 203 } 204 } 205 206 [MethodImpl(MethodImplOptions.AggressiveInlining)] 207 static public void SplitForWorkers(int count, int worker, int workers, out int startInclusive, out int endExclusive) 208 { 209 startInclusive = count * worker / workers; 210 endExclusive = count * (worker + 1) / workers; 211 } 212 } 213 214 struct ParallelHashMapIsEmpty100k : IBenchmarkContainerParallel 215 { 216 const int kIterations = 100_000; 217 int workers; 218 NativeParallelHashMap<int, int> nativeContainer; 219 UnsafeParallelHashMap<int, int> unsafeContainer; 220 221 void IBenchmarkContainerParallel.SetParams(int capacity, params int[] args) => workers = args[0]; 222 public void AllocNativeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, true); 223 public void AllocUnsafeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, true); 224 public object AllocBclContainer(int capacity) => ParallelHashMapUtil.AllocBclContainer(capacity, true); 225 226 [MethodImpl(MethodImplOptions.NoOptimization)] 227 public void MeasureNativeContainer(int worker, int threadIndex) 228 { 229 var reader = nativeContainer.AsReadOnly(); 230 ParallelHashMapUtil.SplitForWorkers(kIterations, worker, workers, out int start, out int end); 231 for (int i = start; i < end; i++) 232 _ = reader.IsEmpty; 233 } 234 [MethodImpl(MethodImplOptions.NoOptimization)] 235 public void MeasureUnsafeContainer(int worker, int threadIndex) 236 { 237 ParallelHashMapUtil.SplitForWorkers(kIterations, worker, workers, out int start, out int end); 238 for (int i = start; i < end; i++) 239 _ = unsafeContainer.IsEmpty; 240 } 241 [MethodImpl(MethodImplOptions.NoOptimization)] 242 public void MeasureBclContainer(object container, int worker) 243 { 244 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 245 ParallelHashMapUtil.SplitForWorkers(kIterations, worker, workers, out int start, out int end); 246 for (int i = start; i < end; i++) 247 _ = bclContainer.IsEmpty; 248 } 249 } 250 251 struct ParallelHashMapCount100k : IBenchmarkContainerParallel 252 { 253 const int kIterations = 100_000; 254 int workers; 255 NativeParallelHashMap<int, int> nativeContainer; 256 UnsafeParallelHashMap<int, int> unsafeContainer; 257 258 void IBenchmarkContainerParallel.SetParams(int capacity, params int[] args) => workers = args[0]; 259 public void AllocNativeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, true); 260 public void AllocUnsafeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, true); 261 public object AllocBclContainer(int capacity) => ParallelHashMapUtil.AllocBclContainer(capacity, true); 262 263 [MethodImpl(MethodImplOptions.NoOptimization)] 264 public void MeasureNativeContainer(int worker, int threadIndex) 265 { 266 var reader = nativeContainer.AsReadOnly(); 267 ParallelHashMapUtil.SplitForWorkers(kIterations, worker, workers, out int start, out int end); 268 for (int i = start; i < end; i++) 269 _ = reader.Count(); 270 } 271 [MethodImpl(MethodImplOptions.NoOptimization)] 272 public void MeasureUnsafeContainer(int worker, int threadIndex) 273 { 274 ParallelHashMapUtil.SplitForWorkers(kIterations, worker, workers, out int start, out int end); 275 for (int i = start; i < end; i++) 276 _ = unsafeContainer.Count(); 277 } 278 [MethodImpl(MethodImplOptions.NoOptimization)] 279 public void MeasureBclContainer(object container, int worker) 280 { 281 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 282 ParallelHashMapUtil.SplitForWorkers(kIterations, worker, workers, out int start, out int end); 283 for (int i = start; i < end; i++) 284 _ = bclContainer.Count; 285 } 286 } 287 288 struct ParallelHashMapToNativeArrayKeys : IBenchmarkContainerParallel 289 { 290 NativeParallelHashMap<int, int> nativeContainer; 291 UnsafeParallelHashMap<int, int> unsafeContainer; 292 293 public void AllocNativeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, true); 294 public void AllocUnsafeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, true); 295 public object AllocBclContainer(int capacity) => ParallelHashMapUtil.AllocBclContainer(capacity, true); 296 297 public void MeasureNativeContainer(int worker, int threadIndex) 298 { 299 var asArray = nativeContainer.GetKeyArray(Allocator.Temp); 300 asArray.Dispose(); 301 } 302 public void MeasureUnsafeContainer(int worker, int threadIndex) 303 { 304 var asArray = unsafeContainer.GetKeyArray(Allocator.Temp); 305 asArray.Dispose(); 306 } 307 public void MeasureBclContainer(object container, int worker) 308 { 309 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 310 int[] asArray = new int[bclContainer.Count]; 311 bclContainer.Keys.CopyTo(asArray, 0); 312 } 313 } 314 315 struct ParallelHashMapToNativeArrayValues : IBenchmarkContainerParallel 316 { 317 NativeParallelHashMap<int, int> nativeContainer; 318 UnsafeParallelHashMap<int, int> unsafeContainer; 319 320 public void AllocNativeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, true); 321 public void AllocUnsafeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, true); 322 public object AllocBclContainer(int capacity) => ParallelHashMapUtil.AllocBclContainer(capacity, true); 323 324 public void MeasureNativeContainer(int worker, int threadIndex) 325 { 326 var asArray = nativeContainer.GetValueArray(Allocator.Temp); 327 asArray.Dispose(); 328 } 329 public void MeasureUnsafeContainer(int worker, int threadIndex) 330 { 331 var asArray = unsafeContainer.GetValueArray(Allocator.Temp); 332 asArray.Dispose(); 333 } 334 public void MeasureBclContainer(object container, int worker) 335 { 336 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 337 int[] asArray = new int[bclContainer.Count]; 338 bclContainer.Values.CopyTo(asArray, 0); 339 } 340 } 341 342 struct ParallelHashMapInsert : IBenchmarkContainerParallel 343 { 344 int capacity; 345 int workers; 346 NativeParallelHashMap<int, int> nativeContainer; 347 UnsafeParallelHashMap<int, int> unsafeContainer; 348 UnsafeList<int> keys; 349 350 void IBenchmarkContainerParallel.SetParams(int capacity, params int[] args) 351 { 352 this.capacity = capacity; 353 workers = args[0]; 354 } 355 356 public void AllocNativeContainer(int capacity) 357 { 358 ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, false); 359 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 360 } 361 362 public void AllocUnsafeContainer(int capacity) 363 { 364 ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, false); 365 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 366 } 367 368 public object AllocBclContainer(int capacity) 369 { 370 object container = ParallelHashMapUtil.AllocBclContainer(capacity, false); 371 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 372 return container; 373 } 374 375 public void MeasureNativeContainer(int worker, int threadIndex) 376 { 377 var writer = nativeContainer.AsParallelWriter(); 378 ParallelHashMapUtil.SplitForWorkers(capacity, worker, workers, out int start, out int end); 379 for (int i = start; i < end; i++) 380 writer.TryAdd(keys[i], i, threadIndex); 381 } 382 public void MeasureUnsafeContainer(int worker, int threadIndex) 383 { 384 var writer = unsafeContainer.AsParallelWriter(); 385 ParallelHashMapUtil.SplitForWorkers(capacity, worker, workers, out int start, out int end); 386 for (int i = start; i < end; i++) 387 writer.TryAdd(keys[i], i, threadIndex); 388 } 389 public void MeasureBclContainer(object container, int worker) 390 { 391 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 392 ParallelHashMapUtil.SplitForWorkers(capacity, worker, workers, out int start, out int end); 393 for (int i = start; i < end; i++) 394 bclContainer.TryAdd(keys[i], i); 395 } 396 } 397 398 struct ParallelHashMapAddGrow : IBenchmarkContainerParallel 399 { 400 int capacity; 401 int toAdd; 402 NativeParallelHashMap<int, int> nativeContainer; 403 UnsafeParallelHashMap<int, int> unsafeContainer; 404 UnsafeList<int> keys; 405 406 void IBenchmarkContainerParallel.SetParams(int capacity, params int[] args) 407 { 408 this.capacity = capacity; 409 toAdd = args[0] - capacity; 410 } 411 412 public void AllocNativeContainer(int capacity) 413 { 414 ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, true); 415 int toAddCount = capacity < 0 ? -1 : toAdd; 416 ParallelHashMapUtil.CreateRandomKeys(toAddCount, ref keys, ref nativeContainer); 417 } 418 419 public void AllocUnsafeContainer(int capacity) 420 { 421 ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, true); 422 int toAddCount = capacity < 0 ? -1 : toAdd; 423 ParallelHashMapUtil.CreateRandomKeys(toAddCount, ref keys, ref unsafeContainer); 424 } 425 426 public object AllocBclContainer(int capacity) 427 { 428 object container = ParallelHashMapUtil.AllocBclContainer(capacity, true); 429 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 430 int toAddCount = capacity < 0 ? -1 : toAdd; 431 ParallelHashMapUtil.CreateRandomKeys(toAddCount, ref keys, ref bclContainer); 432 return container; 433 } 434 435 public void MeasureNativeContainer(int _, int __) 436 { 437 // Intentionally setting capacity small and growing by adding more items 438 for (int i = 0; i < toAdd; i++) 439 nativeContainer.Add(keys[i], i); 440 } 441 public void MeasureUnsafeContainer(int _, int __) 442 { 443 // Intentionally setting capacity small and growing by adding more items 444 for (int i = 0; i < toAdd; i++) 445 unsafeContainer.Add(keys[i], i); 446 } 447 public void MeasureBclContainer(object container, int _) 448 { 449 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 450 // Intentionally setting capacity small and growing by adding more items 451 for (int i = 0; i < toAdd; i++) 452 bclContainer.TryAdd(keys[i], i); 453 } 454 } 455 456 struct ParallelHashMapContains : IBenchmarkContainerParallel 457 { 458 int capacity; 459 int workers; 460 NativeParallelHashMap<int, int> nativeContainer; 461 UnsafeParallelHashMap<int, int> unsafeContainer; 462 UnsafeList<int> keys; 463 464 void IBenchmarkContainerParallel.SetParams(int capacity, params int[] args) 465 { 466 this.capacity = capacity; 467 workers = args[0]; 468 } 469 470 public void AllocNativeContainer(int capacity) 471 { 472 ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, false); 473 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 474 for (int i = 0; i < capacity; i++) 475 nativeContainer.TryAdd(keys[i], i); 476 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 477 } 478 public void AllocUnsafeContainer(int capacity) 479 { 480 ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, false); 481 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 482 for (int i = 0; i < capacity; i++) 483 unsafeContainer.TryAdd(keys[i], i); 484 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 485 } 486 public object AllocBclContainer(int capacity) 487 { 488 object container = ParallelHashMapUtil.AllocBclContainer(capacity, false); 489 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 490 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 491 for (int i = 0; i < capacity; i++) 492 bclContainer.TryAdd(keys[i], i); 493 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 494 return container; 495 } 496 497 public void MeasureNativeContainer(int worker, int threadIndex) 498 { 499 var reader = nativeContainer.AsReadOnly(); 500 ParallelHashMapUtil.SplitForWorkers(capacity, worker, workers, out int start, out int end); 501 bool data = false; 502 for (int i = start; i < end; i++) 503 Volatile.Write(ref data, reader.ContainsKey(keys[i])); 504 } 505 public void MeasureUnsafeContainer(int worker, int threadIndex) 506 { 507 ParallelHashMapUtil.SplitForWorkers(capacity, worker, workers, out int start, out int end); 508 bool data = false; 509 for (int i = start; i < end; i++) 510 Volatile.Write(ref data, unsafeContainer.ContainsKey(keys[i])); 511 } 512 public void MeasureBclContainer(object container, int worker) 513 { 514 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 515 ParallelHashMapUtil.SplitForWorkers(capacity, worker, workers, out int start, out int end); 516 bool data = false; 517 for (int i = start; i < end; i++) 518 Volatile.Write(ref data, bclContainer.ContainsKey(keys[i])); 519 } 520 } 521 522 struct ParallelHashMapIndexedRead : IBenchmarkContainerParallel 523 { 524 NativeParallelHashMap<int, int> nativeContainer; 525 UnsafeParallelHashMap<int, int> unsafeContainer; 526 UnsafeList<int> keys; 527 528 public void AllocNativeContainer(int capacity) 529 { 530 ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, false); 531 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 532 for (int i = 0; i < capacity; i++) 533 nativeContainer.TryAdd(keys[i], i); 534 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 535 } 536 public void AllocUnsafeContainer(int capacity) 537 { 538 ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, false); 539 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 540 for (int i = 0; i < capacity; i++) 541 unsafeContainer.TryAdd(keys[i], i); 542 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 543 } 544 public object AllocBclContainer(int capacity) 545 { 546 object container = ParallelHashMapUtil.AllocBclContainer(capacity, false); 547 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 548 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 549 for (int i = 0; i < capacity; i++) 550 bclContainer.TryAdd(keys[i], i); 551 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 552 return container; 553 } 554 555 public void MeasureNativeContainer(int worker, int threadIndex) 556 { 557 var reader = nativeContainer.AsReadOnly(); 558 int insertions = keys.Length; 559 int value = 0; 560 for (int i = 0; i < insertions; i++) 561 Volatile.Write(ref value, reader[keys[i]]); 562 } 563 public void MeasureUnsafeContainer(int worker, int threadIndex) 564 { 565 int insertions = keys.Length; 566 int value = 0; 567 for (int i = 0; i < insertions; i++) 568 Volatile.Write(ref value, unsafeContainer[keys[i]]); 569 } 570 public void MeasureBclContainer(object container, int worker) 571 { 572 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 573 int insertions = keys.Length; 574 int value = 0; 575 for (int i = 0; i < insertions; i++) 576 Volatile.Write(ref value, bclContainer[keys[i]]); 577 } 578 } 579 580 struct ParallelHashMapIndexedWrite : IBenchmarkContainerParallel 581 { 582 NativeParallelHashMap<int, int> nativeContainer; 583 UnsafeParallelHashMap<int, int> unsafeContainer; 584 UnsafeList<int> keys; 585 586 public void AllocNativeContainer(int capacity) 587 { 588 ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, false); 589 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 590 for (int i = 0; i < capacity; i++) 591 nativeContainer.TryAdd(keys[i], i); 592 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 593 } 594 public void AllocUnsafeContainer(int capacity) 595 { 596 ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, false); 597 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 598 for (int i = 0; i < capacity; i++) 599 unsafeContainer.TryAdd(keys[i], i); 600 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 601 } 602 public object AllocBclContainer(int capacity) 603 { 604 object container = ParallelHashMapUtil.AllocBclContainer(capacity, false); 605 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 606 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 607 for (int i = 0; i < capacity; i++) 608 bclContainer.TryAdd(keys[i], i); 609 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 610 return container; 611 } 612 613 public void MeasureNativeContainer(int worker, int threadIndex) 614 { 615 int insertions = keys.Length; 616 for (int i = 0; i < insertions; i++) 617 nativeContainer[keys[i]] = i; 618 } 619 public void MeasureUnsafeContainer(int worker, int threadIndex) 620 { 621 int insertions = keys.Length; 622 for (int i = 0; i < insertions; i++) 623 unsafeContainer[keys[i]] = i; 624 } 625 public void MeasureBclContainer(object container, int worker) 626 { 627 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 628 int insertions = keys.Length; 629 for (int i = 0; i < insertions; i++) 630 bclContainer[keys[i]] = i; 631 } 632 } 633 634 struct ParallelHashMapTryGetValue : IBenchmarkContainerParallel 635 { 636 int workers; 637 NativeParallelHashMap<int, int> nativeContainer; 638 UnsafeParallelHashMap<int, int> unsafeContainer; 639 UnsafeList<int> keys; 640 641 void IBenchmarkContainerParallel.SetParams(int capacity, params int[] args) => workers = args[0]; 642 643 public void AllocNativeContainer(int capacity) 644 { 645 ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, false); 646 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 647 for (int i = 0; i < capacity; i++) 648 nativeContainer.TryAdd(keys[i], i); 649 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 650 } 651 public void AllocUnsafeContainer(int capacity) 652 { 653 ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, false); 654 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 655 for (int i = 0; i < capacity; i++) 656 unsafeContainer.TryAdd(keys[i], i); 657 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 658 } 659 public object AllocBclContainer(int capacity) 660 { 661 object container = ParallelHashMapUtil.AllocBclContainer(capacity, false); 662 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 663 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 664 for (int i = 0; i < capacity; i++) 665 bclContainer.TryAdd(keys[i], i); 666 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 667 return container; 668 } 669 670 public void MeasureNativeContainer(int worker, int threadIndex) 671 { 672 var reader = nativeContainer.AsReadOnly(); 673 ParallelHashMapUtil.SplitForWorkers(keys.Length, worker, workers, out int start, out int end); 674 for (int i = start; i < end; i++) 675 { 676 reader.TryGetValue(keys[i], out var value); 677 Volatile.Read(ref value); 678 } 679 } 680 public void MeasureUnsafeContainer(int worker, int threadIndex) 681 { 682 ParallelHashMapUtil.SplitForWorkers(keys.Length, worker, workers, out int start, out int end); 683 for (int i = start; i < end; i++) 684 { 685 unsafeContainer.TryGetValue(keys[i], out var value); 686 Volatile.Read(ref value); 687 } 688 } 689 public void MeasureBclContainer(object container, int worker) 690 { 691 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 692 ParallelHashMapUtil.SplitForWorkers(keys.Length, worker, workers, out int start, out int end); 693 for (int i = start; i < end; i++) 694 { 695 bclContainer.TryGetValue(keys[i], out var value); 696 Volatile.Read(ref value); 697 } 698 } 699 } 700 701 struct ParallelHashMapRemove : IBenchmarkContainerParallel 702 { 703 NativeParallelHashMap<int, int> nativeContainer; 704 UnsafeParallelHashMap<int, int> unsafeContainer; 705 UnsafeList<int> keys; 706 707 public void AllocNativeContainer(int capacity) 708 { 709 ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, false); 710 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 711 for (int i = 0; i < capacity; i++) 712 nativeContainer.TryAdd(keys[i], i); 713 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 714 } 715 public void AllocUnsafeContainer(int capacity) 716 { 717 ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, false); 718 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 719 for (int i = 0; i < capacity; i++) 720 unsafeContainer.TryAdd(keys[i], i); 721 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 722 } 723 public object AllocBclContainer(int capacity) 724 { 725 object container = ParallelHashMapUtil.AllocBclContainer(capacity, false); 726 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 727 ParallelHashMapUtil.CreateRandomKeys(capacity, ref keys); 728 for (int i = 0; i < capacity; i++) 729 bclContainer.TryAdd(keys[i], i); 730 ParallelHashMapUtil.RandomlyShuffleKeys(capacity, ref keys); 731 return container; 732 } 733 734 public void MeasureNativeContainer(int worker, int threadIndex) 735 { 736 int insertions = keys.Length; 737 for (int i = 0; i < insertions; i++) 738 nativeContainer.Remove(keys[i]); 739 } 740 public void MeasureUnsafeContainer(int worker, int threadIndex) 741 { 742 int insertions = keys.Length; 743 for (int i = 0; i < insertions; i++) 744 unsafeContainer.Remove(keys[i]); 745 } 746 public void MeasureBclContainer(object container, int worker) 747 { 748 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 749 int insertions = keys.Length; 750 for (int i = 0; i < insertions; i++) 751 bclContainer.TryRemove(keys[i], out _); 752 } 753 } 754 755 struct ParallelHashMapForEach : IBenchmarkContainerParallel 756 { 757 NativeParallelHashMap<int, int> nativeContainer; 758 UnsafeParallelHashMap<int, int> unsafeContainer; 759 760 public void AllocNativeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref nativeContainer, capacity, true); 761 public void AllocUnsafeContainer(int capacity) => ParallelHashMapUtil.AllocInt(ref unsafeContainer, capacity, true); 762 public object AllocBclContainer(int capacity) => ParallelHashMapUtil.AllocBclContainer(capacity, true); 763 764 public void MeasureNativeContainer(int _, int __) 765 { 766 foreach (var pair in nativeContainer) 767 Volatile.Read(ref pair.Value); 768 } 769 public void MeasureUnsafeContainer(int _, int __) 770 { 771 foreach (var pair in unsafeContainer) 772 Volatile.Read(ref pair.Value); 773 } 774 public void MeasureBclContainer(object container, int _) 775 { 776 int value = 0; 777 var bclContainer = (System.Collections.Concurrent.ConcurrentDictionary<int, int>)container; 778 foreach (var pair in bclContainer) 779 Volatile.Write(ref value, pair.Value); 780 } 781 } 782 783 784 [Benchmark(typeof(BenchmarkContainerType))] 785 [BenchmarkNameOverride(BenchmarkContainerConfig.BCL, "ConcurrentDictionary")] 786 class ParallelHashMap 787 { 788#if UNITY_EDITOR 789 [UnityEditor.MenuItem(BenchmarkContainerConfig.kMenuItemIndividual + nameof(ParallelHashMap))] 790 static void RunIndividual() 791 => BenchmarkContainerConfig.RunBenchmark(typeof(ParallelHashMap)); 792#endif 793 794 [Test, Performance] 795 [Category("Performance")] 796 public unsafe void IsEmpty_x_100k( 797 [Values(1, 2, 4)] int workers, 798 [Values(0, 100)] int capacity, 799 [Values] BenchmarkContainerType type) 800 { 801 BenchmarkContainerRunnerParallel<ParallelHashMapIsEmpty100k>.Run(workers, capacity, type, workers); 802 } 803 804 [Test, Performance] 805 [Category("Performance")] 806 public unsafe void Count_x_100k( 807 [Values(1, 2, 4)] int workers, 808 [Values(0, 100)] int capacity, 809 [Values] BenchmarkContainerType type) 810 { 811 BenchmarkContainerRunnerParallel<ParallelHashMapCount100k>.Run(workers, capacity, type, workers); 812 } 813 814 [Test, Performance] 815 [Category("Performance")] 816 public unsafe void ToNativeArrayKeys( 817 [Values(1)] int workers, 818 [Values(10000, 100000, 1000000)] int capacity, 819 [Values] BenchmarkContainerType type) 820 { 821 BenchmarkContainerRunnerParallel<ParallelHashMapToNativeArrayKeys>.Run(workers, capacity, type); 822 } 823 824 [Test, Performance] 825 [Category("Performance")] 826 public unsafe void ToNativeArrayValues( 827 [Values(1)] int workers, 828 [Values(10000, 100000, 1000000)] int capacity, 829 [Values] BenchmarkContainerType type) 830 { 831 BenchmarkContainerRunnerParallel<ParallelHashMapToNativeArrayValues>.Run(workers, capacity, type); 832 } 833 834 [Test, Performance] 835 [Category("Performance")] 836 public unsafe void Insert( 837 [Values(1, 2, 4)] int workers, 838#if UNITY_STANDALONE || UNITY_EDITOR 839 [Values(10000, 100000, 1000000)] int insertions, 840#else 841 [Values(10000, 100000)] int insertions, // Observe potential lower memory requirement on non-desktop platforms 842#endif 843 [Values] BenchmarkContainerType type) 844 { 845 BenchmarkContainerRunnerParallel<ParallelHashMapInsert>.Run(workers, insertions, type, workers); 846 } 847 848 [Test, Performance] 849 [Category("Performance")] 850 [BenchmarkTestFootnote("Incrementally grows from `capacity` until reaching size of `growTo`")] 851 public unsafe void AddGrow( 852 [Values(1)] int workers, // Can't grow capacity in parallel 853 [Values(4, 65536)] int capacity, 854 [Values(1024 * 1024)] int growTo, 855 [Values] BenchmarkContainerType type) 856 { 857 BenchmarkContainerRunnerParallel<ParallelHashMapAddGrow>.Run(workers, capacity, type, growTo); 858 } 859 860 [Test, Performance] 861 [Category("Performance")] 862 public unsafe void Contains( 863 [Values(1, 2, 4)] int workers, 864#if UNITY_STANDALONE || UNITY_EDITOR 865 [Values(10000, 100000, 1000000)] int insertions, 866#else 867 [Values(10000, 100000)] int insertions, // Observe potential lower memory requirement on non-desktop platforms 868#endif 869 [Values] BenchmarkContainerType type) 870 { 871 BenchmarkContainerRunnerParallel<ParallelHashMapContains>.Run(workers, insertions, type, workers); 872 } 873 874 [Test, Performance] 875 [Category("Performance")] 876 public unsafe void IndexedRead( 877 [Values(1, 2, 4)] int workers, 878#if UNITY_STANDALONE || UNITY_EDITOR 879 [Values(10000, 100000, 1000000)] int insertions, 880#else 881 [Values(10000, 100000)] int insertions, // Observe potential lower memory requirement on non-desktop platforms 882#endif 883 [Values] BenchmarkContainerType type) 884 { 885 BenchmarkContainerRunnerParallel<ParallelHashMapIndexedRead>.Run(workers, insertions, type, workers); 886 } 887 888 [Test, Performance] 889 [Category("Performance")] 890 public unsafe void IndexedWrite( 891 [Values(1)] int workers, // Indexed write only available in single thread 892 [Values(10000, 100000, 1000000)] int insertions, 893 [Values] BenchmarkContainerType type) 894 { 895 BenchmarkContainerRunnerParallel<ParallelHashMapIndexedWrite>.Run(workers, insertions, type, workers); 896 } 897 898 [Test, Performance] 899 [Category("Performance")] 900 public unsafe void TryGetValue( 901 [Values(1, 2, 4)] int workers, 902#if UNITY_STANDALONE || UNITY_EDITOR 903 [Values(10000, 100000, 1000000)] int insertions, 904#else 905 [Values(10000, 100000)] int insertions, // Observe potential lower memory requirement on non-desktop platforms 906#endif 907 [Values] BenchmarkContainerType type) 908 { 909 BenchmarkContainerRunnerParallel<ParallelHashMapTryGetValue>.Run(workers, insertions, type, workers); 910 } 911 912 [Test, Performance] 913 [Category("Performance")] 914 public unsafe void Remove( 915 [Values(1)] int workers, // No API for ParallelWriter.TryRemove currently 916 [Values(10000, 100000, 1000000)] int insertions, 917 [Values] BenchmarkContainerType type) 918 { 919 BenchmarkContainerRunnerParallel<ParallelHashMapRemove>.Run(workers, insertions, type, workers); 920 } 921 922 [Test, Performance] 923 [Category("Performance")] 924 public unsafe void Foreach( 925 [Values(1)] int workers, // This work can't be split 926 [Values(10000, 100000, 1000000)] int insertions, 927 [Values] BenchmarkContainerType type) 928 { 929 BenchmarkContainerRunnerParallel<ParallelHashMapForEach>.Run(workers, insertions, type, workers); 930 } 931 } 932}