|
| 1 | +using System; |
| 2 | +using System.Collections.Generic; |
| 3 | +using System.ComponentModel; |
| 4 | +using System.Data; |
| 5 | +using System.Diagnostics; |
| 6 | +using System.Globalization; |
| 7 | +using System.IO; |
| 8 | +using System.Linq; |
| 9 | +using System.Reflection.Metadata.Ecma335; |
| 10 | +using System.Threading; |
| 11 | +using System.Threading.Tasks; |
| 12 | +using BitFaster.Caching.Lru; |
| 13 | +using CsvHelper; |
| 14 | +using MathNet.Numerics.Distributions; |
| 15 | + |
| 16 | +namespace BitFaster.Caching.ThroughputAnalysis |
| 17 | +{ |
| 18 | + class Program |
| 19 | + { |
| 20 | + const double s = 0.86; |
| 21 | + const int n = 500; |
| 22 | + const int capacity = 50; |
| 23 | + const int maxThreads = 52; |
| 24 | + const int sampleCount = 2000; |
| 25 | + const int repeatCount = 200; |
| 26 | + |
| 27 | + private static int[] samples = new int[sampleCount]; |
| 28 | + |
| 29 | + static void Main(string[] args) |
| 30 | + { |
| 31 | + ThreadPool.SetMaxThreads(maxThreads, maxThreads); |
| 32 | + |
| 33 | + Console.WriteLine("Generating input distribution..."); |
| 34 | + samples = new int[sampleCount]; |
| 35 | + Zipf.Samples(samples, s, n); |
| 36 | + |
| 37 | + int[] threadCount = Enumerable.Range(1, maxThreads).ToArray(); |
| 38 | + |
| 39 | + // Desired output: |
| 40 | + // Class 1 2 3 4 5 |
| 41 | + // Classic 5 6 7 7 8 |
| 42 | + // Concurrent 5 6 7 7 8 |
| 43 | + DataTable resultTable = new DataTable(); |
| 44 | + resultTable.Clear(); |
| 45 | + resultTable.Columns.Add("Class"); |
| 46 | + foreach (var tc in threadCount) |
| 47 | + { |
| 48 | + resultTable.Columns.Add(tc.ToString()); |
| 49 | + } |
| 50 | + |
| 51 | + DataRow concurrentLru = resultTable.NewRow(); |
| 52 | + DataRow classicLru = resultTable.NewRow(); |
| 53 | + concurrentLru["Class"] = "concurrentLru"; |
| 54 | + classicLru["Class"] = "classicLru"; |
| 55 | + |
| 56 | + foreach (int tc in threadCount) |
| 57 | + { |
| 58 | + const int warmup = 3; |
| 59 | + const int runs = 6; |
| 60 | + double[] results = new double[warmup + runs]; |
| 61 | + |
| 62 | + for (int i = 0; i < warmup + runs; i++) |
| 63 | + { |
| 64 | + results[i] = MeasureThroughput(new ConcurrentLru<int, int>(tc, capacity, EqualityComparer<int>.Default), tc); |
| 65 | + } |
| 66 | + double avg = AverageLast(results, runs) / 1000000; |
| 67 | + Console.WriteLine($"ConcurrLru ({tc}) {avg} million ops/sec"); |
| 68 | + concurrentLru[tc.ToString()] = avg.ToString(); |
| 69 | + |
| 70 | + for (int i = 0; i < warmup + runs; i++) |
| 71 | + { |
| 72 | + results[i] = MeasureThroughput(new ClassicLru<int, int>(tc, capacity, EqualityComparer<int>.Default), tc); |
| 73 | + } |
| 74 | + avg = AverageLast(results, runs) / 1000000; |
| 75 | + Console.WriteLine($"ClassicLru ({tc}) {avg} million ops/sec"); |
| 76 | + classicLru[tc.ToString()] = avg.ToString(); |
| 77 | + } |
| 78 | + |
| 79 | + resultTable.Rows.Add(concurrentLru); |
| 80 | + resultTable.Rows.Add(classicLru); |
| 81 | + |
| 82 | + ExportCsv(resultTable); |
| 83 | + |
| 84 | + Console.WriteLine("Done."); |
| 85 | + } |
| 86 | + |
| 87 | + private static double AverageLast(double[] results, int count) |
| 88 | + { |
| 89 | + double result = 0; |
| 90 | + for (int i = results.Length - count; i < results.Length; i++) |
| 91 | + { |
| 92 | + result = results[i]; |
| 93 | + } |
| 94 | + |
| 95 | + return result / count; |
| 96 | + } |
| 97 | + |
| 98 | + |
| 99 | + private static double MeasureThroughput(ICache<int, int> cache, int threadCount) |
| 100 | + { |
| 101 | + var tasks = new Task[threadCount]; |
| 102 | + var sw = Stopwatch.StartNew(); |
| 103 | + |
| 104 | + for (int i = 0; i < threadCount; i++) |
| 105 | + { |
| 106 | + tasks[i] = Task.Run(() => Test(cache)); |
| 107 | + } |
| 108 | + |
| 109 | + Task.WaitAll(tasks); |
| 110 | + |
| 111 | + sw.Stop(); |
| 112 | + |
| 113 | + // throughput = ops/sec |
| 114 | + return (threadCount * sampleCount * repeatCount) / sw.Elapsed.TotalSeconds; |
| 115 | + } |
| 116 | + |
| 117 | + private static void Test(ICache<int, int> cache) |
| 118 | + { |
| 119 | + // cache has 50 capacity |
| 120 | + // make zipf for 500 total items, 2000 samples |
| 121 | + // each thread will lookup all samples 5 times in a row, for a total of 10k GetOrAdds per thread |
| 122 | + Func<int, int> func = x => x; |
| 123 | + |
| 124 | + for (int j = 0; j < repeatCount; j++) |
| 125 | + { |
| 126 | + for (int i = 0; i < sampleCount; i++) |
| 127 | + { |
| 128 | + cache.GetOrAdd(samples[i], func); |
| 129 | + } |
| 130 | + } |
| 131 | + } |
| 132 | + |
| 133 | + public static void ExportCsv(DataTable results) |
| 134 | + { |
| 135 | + using (var textWriter = File.CreateText(@"Results.csv")) |
| 136 | + using (var csv = new CsvWriter(textWriter, CultureInfo.InvariantCulture)) |
| 137 | + { |
| 138 | + foreach (DataColumn column in results.Columns) |
| 139 | + { |
| 140 | + csv.WriteField(column.ColumnName); |
| 141 | + } |
| 142 | + csv.NextRecord(); |
| 143 | + |
| 144 | + foreach (DataRow row in results.Rows) |
| 145 | + { |
| 146 | + for (var i = 0; i < results.Columns.Count; i++) |
| 147 | + { |
| 148 | + csv.WriteField(row[i]); |
| 149 | + } |
| 150 | + csv.NextRecord(); |
| 151 | + } |
| 152 | + } |
| 153 | + } |
| 154 | + } |
| 155 | +} |
0 commit comments