A game about forced loneliness, made by TACStudios
at master 237 lines 7.7 kB view raw
1using NUnit.Framework; 2using System; 3using System.Collections.Generic; 4using Unity.Jobs; 5using Unity.Burst; 6using Unity.Collections; 7using Assert = FastAssert; 8using Unity.Collections.Tests; 9 10internal class NativeParallelHashMapTests_InJobs : NativeParallelHashMapTestsFixture 11{ 12 struct NestedMapJob : IJob 13 { 14 public NativeParallelHashMap<int, NativeParallelHashMap<int, int>> nestedMap; 15 16 public void Execute() 17 { 18 nestedMap.Clear(); 19 } 20 } 21 22 [Test] 23 [TestRequiresCollectionChecks] 24 public void NativeParallelHashMap_NestedJob_Error() 25 { 26 var map = new NativeParallelHashMap<int, NativeParallelHashMap<int, int>>(hashMapSize, CommonRwdAllocator.Handle); 27 28 var nestedJob = new NestedMapJob 29 { 30 nestedMap = map 31 }; 32 33 JobHandle job = default; 34 Assert.Throws<InvalidOperationException>(() => { job = nestedJob.Schedule(); }); 35 job.Complete(); 36 37 map.Dispose(); 38 } 39 40 [Test] 41 public void NativeParallelHashMap_Read_And_Write() 42 { 43 var hashMap = new NativeParallelHashMap<int, int>(hashMapSize, CommonRwdAllocator.Handle); 44 var writeStatus = CollectionHelper.CreateNativeArray<int>(hashMapSize, CommonRwdAllocator.Handle); 45 var readValues = CollectionHelper.CreateNativeArray<int>(hashMapSize, CommonRwdAllocator.Handle); 46 47 var writeData = new HashMapWriteJob() 48 { 49 hashMap = hashMap.AsParallelWriter(), 50 status = writeStatus, 51 keyMod = hashMapSize, 52 }; 53 54 var readData = new HashMapReadParallelForJob() 55 { 56 hashMap = hashMap, 57 values = readValues, 58 keyMod = writeData.keyMod, 59 }; 60 61 var writeJob = writeData.Schedule(); 62 var readJob = readData.Schedule(hashMapSize, 1, writeJob); 63 readJob.Complete(); 64 65 for (int i = 0; i < hashMapSize; ++i) 66 { 67 Assert.AreEqual(0, writeStatus[i], "Job failed to write value to hash map"); 68 Assert.AreEqual(i, readValues[i], "Job failed to read from hash map"); 69 } 70 71 hashMap.Dispose(); 72 writeStatus.Dispose(); 73 readValues.Dispose(); 74 } 75 76 [Test] 77 [TestRequiresCollectionChecks] 78 public void NativeParallelHashMap_Read_And_Write_Full() 79 { 80 var hashMap = new NativeParallelHashMap<int, int>(hashMapSize / 2, CommonRwdAllocator.Handle); 81 var writeStatus = CollectionHelper.CreateNativeArray<int>(hashMapSize, CommonRwdAllocator.Handle); 82 var readValues = CollectionHelper.CreateNativeArray<int>(hashMapSize, CommonRwdAllocator.Handle); 83 84 var writeData = new HashMapWriteJob() 85 { 86 hashMap = hashMap.AsParallelWriter(), 87 status = writeStatus, 88 keyMod = hashMapSize, 89 }; 90 91 var readData = new HashMapReadParallelForJob() 92 { 93 hashMap = hashMap, 94 values = readValues, 95 keyMod = writeData.keyMod, 96 }; 97 98 var writeJob = writeData.Schedule(); 99 var readJob = readData.Schedule(hashMapSize, 1, writeJob); 100 readJob.Complete(); 101 102 var missing = new Dictionary<int, bool>(); 103 for (int i = 0; i < hashMapSize; ++i) 104 { 105 if (writeStatus[i] == -2) 106 { 107 missing[i] = true; 108 Assert.AreEqual(-1, readValues[i], "Job read a value form hash map which should not be there"); 109 } 110 else 111 { 112 Assert.AreEqual(0, writeStatus[i], "Job failed to write value to hash map"); 113 Assert.AreEqual(i, readValues[i], "Job failed to read from hash map"); 114 } 115 } 116 Assert.AreEqual(hashMapSize - hashMapSize / 2, missing.Count, "Wrong indices written to hash map"); 117 118 hashMap.Dispose(); 119 writeStatus.Dispose(); 120 readValues.Dispose(); 121 } 122 123 [Test] 124 public void NativeParallelHashMap_Key_Collisions() 125 { 126 var hashMap = new NativeParallelHashMap<int, int>(hashMapSize, CommonRwdAllocator.Handle); 127 var writeStatus = CollectionHelper.CreateNativeArray<int>(hashMapSize, CommonRwdAllocator.Handle); 128 var readValues = CollectionHelper.CreateNativeArray<int>(hashMapSize, CommonRwdAllocator.Handle); 129 130 var writeData = new HashMapWriteJob() 131 { 132 hashMap = hashMap.AsParallelWriter(), 133 status = writeStatus, 134 keyMod = 16, 135 }; 136 137 var readData = new HashMapReadParallelForJob() 138 { 139 hashMap = hashMap, 140 values = readValues, 141 keyMod = writeData.keyMod, 142 }; 143 144 var writeJob = writeData.Schedule(); 145 var readJob = readData.Schedule(hashMapSize, 1, writeJob); 146 readJob.Complete(); 147 148 var missing = new Dictionary<int, bool>(); 149 for (int i = 0; i < hashMapSize; ++i) 150 { 151 if (writeStatus[i] == -1) 152 { 153 missing[i] = true; 154 Assert.AreNotEqual(i, readValues[i], "Job read a value form hash map which should not be there"); 155 } 156 else 157 { 158 Assert.AreEqual(0, writeStatus[i], "Job failed to write value to hash map"); 159 Assert.AreEqual(i, readValues[i], "Job failed to read from hash map"); 160 } 161 } 162 Assert.AreEqual(hashMapSize - writeData.keyMod, missing.Count, "Wrong indices written to hash map"); 163 164 hashMap.Dispose(); 165 writeStatus.Dispose(); 166 readValues.Dispose(); 167 } 168 169 [BurstCompile(CompileSynchronously = true)] 170 struct Clear : IJob 171 { 172 public NativeParallelHashMap<int, int> hashMap; 173 174 public void Execute() 175 { 176 hashMap.Clear(); 177 } 178 } 179 180 [Test] 181 [TestRequiresCollectionChecks] 182 public void NativeParallelHashMap_Clear_And_Write() 183 { 184 var hashMap = new NativeParallelHashMap<int, int>(hashMapSize / 2, CommonRwdAllocator.Handle); 185 var writeStatus = CollectionHelper.CreateNativeArray<int>(hashMapSize, CommonRwdAllocator.Handle); 186 187 var clearJob = new Clear 188 { 189 hashMap = hashMap 190 }; 191 192 var clearJobHandle = clearJob.Schedule(); 193 194 var writeJob = new HashMapWriteJob 195 { 196 hashMap = hashMap.AsParallelWriter(), 197 status = writeStatus, 198 keyMod = hashMapSize, 199 }; 200 201 var writeJobHandle = writeJob.Schedule(clearJobHandle); 202 writeJobHandle.Complete(); 203 204 writeStatus.Dispose(); 205 hashMap.Dispose(); 206 } 207 208 [Test] 209 public void NativeParallelHashMap_DisposeJob() 210 { 211 var container0 = new NativeParallelHashMap<int, int>(1, Allocator.Persistent); 212 Assert.True(container0.IsCreated); 213 Assert.DoesNotThrow(() => { container0.Add(0, 1); }); 214 Assert.True(container0.ContainsKey(0)); 215 216 var container1 = new NativeParallelMultiHashMap<int, int>(1, Allocator.Persistent); 217 Assert.True(container1.IsCreated); 218 Assert.DoesNotThrow(() => { container1.Add(1, 2); }); 219 Assert.True(container1.ContainsKey(1)); 220 221 var disposeJob0 = container0.Dispose(default); 222 Assert.False(container0.IsCreated); 223#if ENABLE_UNITY_COLLECTIONS_CHECKS 224 Assert.Throws<ObjectDisposedException>( 225 () => { container0.ContainsKey(0); }); 226#endif 227 228 var disposeJob = container1.Dispose(disposeJob0); 229 Assert.False(container1.IsCreated); 230#if ENABLE_UNITY_COLLECTIONS_CHECKS 231 Assert.Throws<ObjectDisposedException>( 232 () => { container1.ContainsKey(1); }); 233#endif 234 235 disposeJob.Complete(); 236 } 237}