A game about forced loneliness, made by TACStudios
1using Unity.PerformanceTesting.Runtime; 2using System; 3using System.Diagnostics; 4using System.Collections.Generic; 5using System.Reflection; 6 7namespace Unity.PerformanceTesting.Benchmark 8{ 9 /// <summary> 10 /// An interface for performing measurements which works from Performance Test Framework or from the Benchmark Framework.<para /> 11 /// This functionality is intended to be wrapped in an implemenation specific to a type of benchmark comparison. See some of the included 12 /// benchmarking implementations in <see cref="Unity.Collections.PerformanceTests"/> such as BenchmarkContainerRunner or BenchmarkAllocatorRunner, as well as 13 /// the documentation in the Benchmark Framework repository for examples. 14 /// </summary> 15 public static class BenchmarkMeasure 16 { 17 internal static bool ForBenchmarks = false; 18 private static SampleGroup LastResultsInternal; 19 private static uint LastResultsFootnotes; 20 21 internal static BenchmarkResults CalculateLastResults(SampleUnit unit, BenchmarkRankingStatistic statistic) 22 { 23 for (int i = 0; i < LastResultsInternal.Samples.Count; i++) 24 LastResultsInternal.Samples[i] = Utils.ConvertSample(SampleUnit.Second, unit, LastResultsInternal.Samples[i]); 25 LastResultsInternal.Unit = unit; 26 Utils.UpdateStatistics(LastResultsInternal); 27 28 return new BenchmarkResults(LastResultsInternal, statistic, LastResultsFootnotes); 29 } 30 31 /// <summary> 32 /// Measure a set of samples for a given performance test. This functions correctly whether called through the Performance Test Framework 33 /// by the Unity Test Runner, or if it is called through the Benchmark framework.<para /> 34 /// This must be called when a test will be run in a parallel job, as it marks the results with a note specifying the irregularity 35 /// of parallel jobs due to work stealing.<para /> 36 /// When running a single threaded test (job or otherwise), use <see cref="Measure(Type, int, int, Action, Action, Action)"/> 37 /// </summary> 38 /// <param name="perfMeasureType">A type which contains a single performance test's implementation</param> 39 /// <param name="warmup">The number of warm up runs prior to collecting sample data</param> 40 /// <param name="measurements">The number of runs to collect sample data from</param> 41 /// <param name="action">The specific per-sample method to run for measurement</param> 42 /// <param name="setup">A per-sample setup method that will not be part of measurement</param> 43 /// <param name="teardown">A per-sample teardown method that will not be part of measurement</param> 44 public static void MeasureParallel(Type perfMeasureType, int warmup, int measurements, Action action, Action setup = null, Action teardown = null) 45 { 46 Measure(perfMeasureType, warmup, measurements, action, setup, teardown); 47 if (ForBenchmarks) 48 LastResultsFootnotes |= BenchmarkResults.kFlagParallelJobs; 49 } 50 51 /// <summary> 52 /// Measure a set of samples for a given performance test. This functions correctly whether called through the Performance Test Framework 53 /// by the Unity Test Runner, or if it is called through the Benchmark framework.<para /> 54 /// This must not be called when a test will be run in a parallel job, as this does not mark the results with a note specifying the irregularity 55 /// of parallel jobs due to work stealing.<para /> 56 /// When running a multithreaded test, use <see cref="MeasureParallel(Type, int, int, Action, Action, Action)"/> 57 /// </summary> 58 /// <param name="perfMeasureType">A type which contains a single performance test's implementation</param> 59 /// <param name="warmup">The number of warm up runs prior to collecting sample data</param> 60 /// <param name="measurements">The number of runs to collect sample data from</param> 61 /// <param name="action">The specific per-sample method to run for measurement</param> 62 /// <param name="setup">A per-sample setup method that will not be part of measurement</param> 63 /// <param name="teardown">A per-sample teardown method that will not be part of measurement</param> 64 public static void Measure(Type perfMeasureType, int warmup, int measurements, Action action, Action setup = null, Action teardown = null) 65 { 66 if (ForBenchmarks) 67 { 68 SampleGroup results = new SampleGroup(perfMeasureType.Name, SampleUnit.Second, false); 69 results.Samples = new List<double>(measurements); 70 71 Stopwatch stopwatch = Stopwatch.StartNew(); 72 73 for (int i = 0; i < warmup; i++) 74 { 75 setup?.Invoke(); 76 action(); 77 teardown?.Invoke(); 78 } 79 for (int i = 0; i < measurements; i++) 80 { 81 setup?.Invoke(); 82 83 stopwatch.Restart(); 84 action(); 85 results.Samples.Add(stopwatch.Elapsed.TotalSeconds); 86 87 teardown?.Invoke(); 88 } 89 90 LastResultsInternal = results; 91 LastResultsFootnotes = 0; 92 93 // Check if NoOptimization is part of this measurement. MethodImplAttribute is not found in 94 // CustomAttributes, and instead is a special-case found in MethodImplementationFlags. 95 var methods = perfMeasureType.GetMethods(BindingFlags.Public | BindingFlags.Instance); 96 foreach (var m in methods) 97 { 98 if (m.MethodImplementationFlags.HasFlag(MethodImplAttributes.NoOptimization)) 99 { 100 LastResultsFootnotes |= BenchmarkResults.kFlagNoOptimization; 101 break; 102 } 103 } 104 } 105 else 106 { 107 PerformanceTesting.Measure.Method(action) 108 .SampleGroup(perfMeasureType.Name) 109 .SetUp(setup) 110 .CleanUp(teardown) 111 .WarmupCount(warmup) 112 .MeasurementCount(measurements) 113 .Run(); 114 } 115 } 116 } 117}