From bfdd33948f06821e916353b361d3fa637bcebc56 Mon Sep 17 00:00:00 2001 From: Ikiru Yoshizaki <3856350+guitarrapc@users.noreply.github.com> Date: Tue, 13 Aug 2024 14:17:10 +0900 Subject: [PATCH] chore: measure client latency on PerformanceTest --- .../PerformanceTest.Client/IScenario.cs | 4 +- .../PerformanceTestRunningContext.cs | 62 ++++++++++++++++++- .../PerformanceTest.Client/Program.cs | 45 +++++++++++++- .../Properties/launchSettings.json | 34 +++++++++- .../StreamingHubLargePayloadScenario.cs | 5 +- .../StreamingHubScenario.cs | 31 ++++++++-- .../UnaryLargePayloadScenario.cs | 5 +- .../PerformanceTest.Client/UnaryScenario.cs | 10 ++- .../Reporting/DatadogMetricsRecorder.cs | 42 +++---------- 9 files changed, 185 insertions(+), 53 deletions(-) diff --git a/perf/BenchmarkApp/PerformanceTest.Client/IScenario.cs b/perf/BenchmarkApp/PerformanceTest.Client/IScenario.cs index 61725218e..83d46e17f 100644 --- a/perf/BenchmarkApp/PerformanceTest.Client/IScenario.cs +++ b/perf/BenchmarkApp/PerformanceTest.Client/IScenario.cs @@ -3,5 +3,5 @@ public interface IScenario { ValueTask PrepareAsync(GrpcChannel channel); - ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken); -} \ No newline at end of file + ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken); +} diff --git a/perf/BenchmarkApp/PerformanceTest.Client/PerformanceTestRunningContext.cs b/perf/BenchmarkApp/PerformanceTest.Client/PerformanceTestRunningContext.cs index 60dfa50e8..2ee7ac55e 100644 --- a/perf/BenchmarkApp/PerformanceTest.Client/PerformanceTestRunningContext.cs +++ b/perf/BenchmarkApp/PerformanceTest.Client/PerformanceTestRunningContext.cs @@ -5,10 +5,17 @@ public class PerformanceTestRunningContext int count; bool isRunning; Stopwatch stopwatch; + List> latencyPerConnection = new(); + List locks = new(); - public PerformanceTestRunningContext() + public PerformanceTestRunningContext(int connectionCount) { stopwatch = new Stopwatch(); + for (var i = 0; i < connectionCount; i++) + { + latencyPerConnection.Add(new ()); + locks.Add(new ()); + } } public void Ready() @@ -25,6 +32,14 @@ public void Increment() } } + public void Latency(int connectionId, TimeSpan duration) + { + lock (locks[connectionId]) + { + latencyPerConnection[connectionId].Add(duration.TotalMilliseconds); + } + } + public void Complete() { isRunning = false; @@ -33,8 +48,49 @@ public void Complete() public PerformanceResult GetResult() { - return new PerformanceResult(count, count / (double)stopwatch.Elapsed.TotalSeconds, stopwatch.Elapsed); + var latency = MeasureLatency(); + return new PerformanceResult(count, count / (double)stopwatch.Elapsed.TotalSeconds, stopwatch.Elapsed, latency); + + Latency MeasureLatency() + { + var totalCount = 0; + var totalSum = 0.0; + for (var i = 0; i < latencyPerConnection.Count; i++) + { + for (var j = 0; j < latencyPerConnection[i].Count; j++) + { + totalSum += latencyPerConnection[i][j]; + totalCount++; + } + + latencyPerConnection[i].Sort(); + } + var latencyMean = (totalCount != 0) ? totalSum / totalCount : totalSum; + var latencyAllConnection = new List(); + foreach (var connections in latencyPerConnection) latencyAllConnection.AddRange(connections); + var latency50p = GetPercentile(50, latencyAllConnection); + var latency75p = GetPercentile(75, latencyAllConnection); + var latency90p = GetPercentile(90, latencyAllConnection); + var latency99p = GetPercentile(99, latencyAllConnection); + var latencyMax = GetPercentile(100, latencyAllConnection); + var latency = new Latency(latencyMean, latency50p, latency75p, latency90p, latency99p, latencyMax); + + return latency; + } + static double GetPercentile(int percent, IReadOnlyList sortedData) + { + if (percent == 100) + { + return sortedData[^1]; + } + + var i = ((long)percent * sortedData.Count) / 100.0 + 0.5; + var fractionPart = i - Math.Truncate(i); + + return (1.0 - fractionPart) * sortedData[(int)Math.Truncate(i) - 1] + fractionPart * sortedData[(int)Math.Ceiling(i) - 1]; + } } } -public record PerformanceResult(int TotalRequests, double RequestsPerSecond, TimeSpan Duration); +public record PerformanceResult(int TotalRequests, double RequestsPerSecond, TimeSpan Duration, Latency Latency); +public record Latency(double Mean, double P50, double P75, double P90, double P99, double Max); diff --git a/perf/BenchmarkApp/PerformanceTest.Client/Program.cs b/perf/BenchmarkApp/PerformanceTest.Client/Program.cs index 7bde4fccc..41637beb0 100644 --- a/perf/BenchmarkApp/PerformanceTest.Client/Program.cs +++ b/perf/BenchmarkApp/PerformanceTest.Client/Program.cs @@ -79,7 +79,7 @@ async Task Main( } var result = await RunScenarioAsync(scenario2, config, controlServiceClient); results.Add(result); - datadog.PutClientBenchmarkMetrics(scenario.ToString(), ApplicationInformation.Current, serialization.ToString(), result.RequestsPerSecond, result.Duration, result.TotalRequests); + datadog.PutClientBenchmarkMetrics(scenario.ToString(), ApplicationInformation.Current, serialization.ToString(), result); } } @@ -121,6 +121,11 @@ async Task Main( writer.WriteLine($"Requests per Second: {result.RequestsPerSecond:0.000} rps"); writer.WriteLine($"Duration : {result.Duration.TotalSeconds} s"); writer.WriteLine($"Total Requests : {result.TotalRequests} requests"); + writer.WriteLine($"Mean latency : {result.Latency.Mean:0.###} ms"); + writer.WriteLine($"Max latency : {result.Latency.Max:0.###} ms"); + writer.WriteLine($"p50 latency : {result.Latency.P50:0.###} ms"); + writer.WriteLine($"p90 latency : {result.Latency.P90:0.###} ms"); + writer.WriteLine($"p99 latency : {result.Latency.P99:0.###} ms"); writer.WriteLine($"========================================"); } } @@ -164,7 +169,7 @@ async Task RunScenarioAsync(ScenarioType scenario, ScenarioCo _ => throw new Exception($"Unknown Scenario: {scenario}"), }; - var ctx = new PerformanceTestRunningContext(); + var ctx = new PerformanceTestRunningContext(connectionCount: config.Channels); var cts = new CancellationTokenSource(); WriteLog($"Starting scenario '{scenario}'..."); @@ -175,11 +180,12 @@ async Task RunScenarioAsync(ScenarioType scenario, ScenarioCo for (var j = 0; j < config.Streams; j++) { if (config.Verbose) WriteLog($"Channel[{i}] - Stream[{j}]: Run"); + var connectionId = i; tasks.Add(Task.Run(async () => { var scenarioRunner = scenarioFactory(); await scenarioRunner.PrepareAsync(channel); - await scenarioRunner.RunAsync(ctx, cts.Token); + await scenarioRunner.RunAsync(connectionId, ctx, cts.Token); })); } } @@ -202,6 +208,12 @@ async Task RunScenarioAsync(ScenarioType scenario, ScenarioCo WriteLog($"Requests per Second: {result.RequestsPerSecond:0.000} rps"); WriteLog($"Duration: {result.Duration.TotalSeconds} s"); WriteLog($"Total Requests: {result.TotalRequests} requests"); + WriteLog($"Mean latency: {result.Latency.Mean:0.###} ms"); + WriteLog($"Max latency: {result.Latency.Max:0.###} ms"); + WriteLog($"p50 latency: {result.Latency.P50:0.###} ms"); + WriteLog($"p75 latency: {result.Latency.P75:0.###} ms"); + WriteLog($"p90 latency: {result.Latency.P90:0.###} ms"); + WriteLog($"p99 latency: {result.Latency.P99:0.###} ms"); return result; } @@ -242,4 +254,31 @@ IEnumerable GetRunScenarios(ScenarioType scenario) }; } +public static class DatadogMetricsRecorderExtensions +{ + /// + /// Put Client Benchmark metrics to background. + /// + /// + /// + /// + /// + /// + public static void PutClientBenchmarkMetrics(this DatadogMetricsRecorder recorder, string scenario, ApplicationInformation applicationInfo, string serialization, PerformanceResult result) + { + var tags = MetricsTagCache.Get((scenario, applicationInfo, serialization), static x => [$"app:MagicOnion", $"magiconion_version:{x.applicationInfo.MagicOnionVersion}", $"grpcdotnet_version:{x.applicationInfo.GrpcNetVersion}", $"messagepack_version:{x.applicationInfo.MessagePackVersion}", $"memorypack_version:{x.applicationInfo.MemoryPackVersion}", $"process_arch:{x.applicationInfo.ProcessArchitecture}", $"process_count:{x.applicationInfo.ProcessorCount}", $"scenario:{x.scenario}", $"serialization:{x.serialization}"]); + + // Don't want to await each put. Let's send it to queue and await when benchmark ends. + recorder.Record(recorder.SendAsync("benchmark.client.rps", result.RequestsPerSecond, DatadogMetricsType.Rate, tags, "request")); + recorder.Record(recorder.SendAsync("benchmark.client.duration", result.Duration.TotalSeconds, DatadogMetricsType.Gauge, tags, "second")); + recorder.Record(recorder.SendAsync("benchmark.client.total_requests", result.TotalRequests, DatadogMetricsType.Gauge, tags, "request")); + recorder.Record(recorder.SendAsync("benchmark.client.latency_mean", result.Latency.Mean, DatadogMetricsType.Gauge, tags, "millisecond")); + recorder.Record(recorder.SendAsync("benchmark.client.latency_max", result.Latency.Max, DatadogMetricsType.Gauge, tags, "millisecond")); + recorder.Record(recorder.SendAsync("benchmark.client.latency_p50", result.Latency.P50, DatadogMetricsType.Gauge, tags, "millisecond")); + recorder.Record(recorder.SendAsync("benchmark.client.latency_p75", result.Latency.P75, DatadogMetricsType.Gauge, tags, "millisecond")); + recorder.Record(recorder.SendAsync("benchmark.client.latency_p90", result.Latency.P90, DatadogMetricsType.Gauge, tags, "millisecond")); + recorder.Record(recorder.SendAsync("benchmark.client.latency_p99", result.Latency.P99, DatadogMetricsType.Gauge, tags, "millisecond")); + } +} + public record ScenarioConfiguration(string Url, int Warmup, int Duration, int Streams, int Channels, bool Verbose); diff --git a/perf/BenchmarkApp/PerformanceTest.Client/Properties/launchSettings.json b/perf/BenchmarkApp/PerformanceTest.Client/Properties/launchSettings.json index bce7788ff..837b5ceda 100644 --- a/perf/BenchmarkApp/PerformanceTest.Client/Properties/launchSettings.json +++ b/perf/BenchmarkApp/PerformanceTest.Client/Properties/launchSettings.json @@ -1,8 +1,40 @@ { "profiles": { - "PerformanceTest.Client": { + "PerformanceTest.Client (PingpongStreamingHub 1x1)": { + "commandName": "Project", + "commandLineArgs": "-u http://localhost:5000 -s PingpongStreamingHub --channels 1 --streams 1" + }, + "PerformanceTest.Client (PingpongStreamingHub 70)": { + "commandName": "Project", + "commandLineArgs": "-u http://localhost:5000 -s PingpongStreamingHub --channels 70 --streams 1" + }, + "PerformanceTest.Client (PingpongStreamingHub 70x1)": { + "commandName": "Project", + "commandLineArgs": "-u http://localhost:5000 -s PingpongStreamingHub --channels 1 --streams 70" + }, + "PerformanceTest.Client (StreamingHubValueTask 1x1)": { "commandName": "Project", "commandLineArgs": "-u http://localhost:5000 -s StreamingHubValueTask --channels 1 --streams 1" + }, + "PerformanceTest.Client (StreamingHubValueTask 70)": { + "commandName": "Project", + "commandLineArgs": "-u http://localhost:5000 -s StreamingHubValueTask --channels 70 --streams 1" + }, + "PerformanceTest.Client (StreamingHubValueTask 70x1)": { + "commandName": "Project", + "commandLineArgs": "-u http://localhost:5000 -s StreamingHubValueTask --channels 1 --streams 70" + }, + "PerformanceTest.Client (Unary 1x1)": { + "commandName": "Project", + "commandLineArgs": "-u http://localhost:5000 -s Unary --channels 1 --streams 1" + }, + "PerformanceTest.Client (Unary 70)": { + "commandName": "Project", + "commandLineArgs": "-u http://localhost:5000 -s Unary --channels 70 --streams 1" + }, + "PerformanceTest.Client (Unary 70x1)": { + "commandName": "Project", + "commandLineArgs": "-u http://localhost:5000 -s Unary --channels 1 --streams 70" } } } diff --git a/perf/BenchmarkApp/PerformanceTest.Client/StreamingHubLargePayloadScenario.cs b/perf/BenchmarkApp/PerformanceTest.Client/StreamingHubLargePayloadScenario.cs index c0faaa0dc..8091c1e80 100644 --- a/perf/BenchmarkApp/PerformanceTest.Client/StreamingHubLargePayloadScenario.cs +++ b/perf/BenchmarkApp/PerformanceTest.Client/StreamingHubLargePayloadScenario.cs @@ -6,6 +6,7 @@ public abstract class StreamingHubLargePayloadScenarioBase : IScenario, IPerfTes { IPerfTestHub client = default!; readonly int payloadSize; + readonly TimeProvider timeProvider = TimeProvider.System; public StreamingHubLargePayloadScenarioBase(int payloadSize) { @@ -17,14 +18,16 @@ public async ValueTask PrepareAsync(GrpcChannel channel) this.client = await StreamingHubClient.ConnectAsync(channel, this); } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { var data = new byte[payloadSize]; while (!cancellationToken.IsCancellationRequested) { + var begin = timeProvider.GetTimestamp(); _ = await client.CallMethodLargePayloadAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011, data); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin)); } } } diff --git a/perf/BenchmarkApp/PerformanceTest.Client/StreamingHubScenario.cs b/perf/BenchmarkApp/PerformanceTest.Client/StreamingHubScenario.cs index 536629031..7b3670a18 100644 --- a/perf/BenchmarkApp/PerformanceTest.Client/StreamingHubScenario.cs +++ b/perf/BenchmarkApp/PerformanceTest.Client/StreamingHubScenario.cs @@ -5,18 +5,22 @@ public class StreamingHubScenario : IScenario, IPerfTestHubReceiver { IPerfTestHub client = default!; + readonly TimeProvider timeProvider = TimeProvider.System; + long beginTimeStamp = default; public async ValueTask PrepareAsync(GrpcChannel channel) { this.client = await StreamingHubClient.ConnectAsync(channel, this); + this.beginTimeStamp = timeProvider.GetTimestamp(); } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { await client.CallMethodAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(this.beginTimeStamp)); } } } @@ -24,18 +28,21 @@ public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationT public class StreamingHubValueTaskScenario : IScenario, IPerfTestHubReceiver { IPerfTestHub client = default!; + readonly TimeProvider timeProvider = TimeProvider.System; public async ValueTask PrepareAsync(GrpcChannel channel) { this.client = await StreamingHubClient.ConnectAsync(channel, this); } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { + var begin = timeProvider.GetTimestamp(); await client.CallMethodValueTaskAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin)); } } } @@ -43,18 +50,21 @@ public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationT public class StreamingHubComplexScenario : IScenario, IPerfTestHubReceiver { IPerfTestHub client = default!; + readonly TimeProvider timeProvider = TimeProvider.System; public async ValueTask PrepareAsync(GrpcChannel channel) { this.client = await StreamingHubClient.ConnectAsync(channel, this); } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { + var begin = timeProvider.GetTimestamp(); await client.CallMethodComplexAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin)); } } } @@ -62,18 +72,21 @@ public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationT public class StreamingHubComplexValueTaskScenario : IScenario, IPerfTestHubReceiver { IPerfTestHub client = default!; + readonly TimeProvider timeProvider = TimeProvider.System; public async ValueTask PrepareAsync(GrpcChannel channel) { this.client = await StreamingHubClient.ConnectAsync(channel, this); } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { + var begin = timeProvider.GetTimestamp(); await client.CallMethodComplexValueTaskAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin)); } } } @@ -81,16 +94,18 @@ public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationT public class PingpongStreamingHubScenario : IScenario, IPerfTestHubReceiver { IPerfTestHub client = default!; + readonly TimeProvider timeProvider = TimeProvider.System; public async ValueTask PrepareAsync(GrpcChannel channel) { this.client = await StreamingHubClient.ConnectAsync(channel, this); } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { + var begin = timeProvider.GetTimestamp(); await client.PingpongAsync(new SimpleRequest { Payload = new byte[100], @@ -98,6 +113,7 @@ await client.PingpongAsync(new SimpleRequest UseCache = false, }); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin)); } } } @@ -105,18 +121,21 @@ await client.PingpongAsync(new SimpleRequest public class PingpongCachedStreamingHubScenario : IScenario, IPerfTestHubReceiver { IPerfTestHub client = default!; + readonly TimeProvider timeProvider = TimeProvider.System; public async ValueTask PrepareAsync(GrpcChannel channel) { this.client = await StreamingHubClient.ConnectAsync(channel, this); } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { + var begin = timeProvider.GetTimestamp(); await client.PingpongAsync(SimpleRequest.Cached); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin)); } } } diff --git a/perf/BenchmarkApp/PerformanceTest.Client/UnaryLargePayloadScenario.cs b/perf/BenchmarkApp/PerformanceTest.Client/UnaryLargePayloadScenario.cs index 8a5a47e6c..da63c7663 100644 --- a/perf/BenchmarkApp/PerformanceTest.Client/UnaryLargePayloadScenario.cs +++ b/perf/BenchmarkApp/PerformanceTest.Client/UnaryLargePayloadScenario.cs @@ -6,6 +6,7 @@ public abstract class UnaryLargePayloadScenarioBase : IScenario { IPerfTestService client = default!; readonly int payloadSize; + readonly TimeProvider timeProvider = TimeProvider.System; public UnaryLargePayloadScenarioBase(int payloadSize) { @@ -18,14 +19,16 @@ public ValueTask PrepareAsync(GrpcChannel channel) return ValueTask.CompletedTask; } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { var data = new byte[payloadSize]; while (!cancellationToken.IsCancellationRequested) { + var begin = timeProvider.GetTimestamp(); _ = await client.UnaryLargePayloadAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011, data); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin)); } } } diff --git a/perf/BenchmarkApp/PerformanceTest.Client/UnaryScenario.cs b/perf/BenchmarkApp/PerformanceTest.Client/UnaryScenario.cs index 9deb97cbb..09654d494 100644 --- a/perf/BenchmarkApp/PerformanceTest.Client/UnaryScenario.cs +++ b/perf/BenchmarkApp/PerformanceTest.Client/UnaryScenario.cs @@ -5,6 +5,7 @@ public class UnaryScenario : IScenario { IPerfTestService client = default!; + readonly TimeProvider timeProvider = TimeProvider.System; public ValueTask PrepareAsync(GrpcChannel channel) { @@ -12,12 +13,14 @@ public ValueTask PrepareAsync(GrpcChannel channel) return ValueTask.CompletedTask; } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { + var begin = timeProvider.GetTimestamp(); await client.UnaryArgDynamicArgumentTupleReturnValue("FooBarBaz🚀こんにちは世界", 123, 4567, 891011); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin)); } } } @@ -25,6 +28,7 @@ public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationT public class UnaryComplexScenario : IScenario { IPerfTestService client = default!; + readonly TimeProvider timeProvider = TimeProvider.System; public ValueTask PrepareAsync(GrpcChannel channel) { @@ -32,12 +36,14 @@ public ValueTask PrepareAsync(GrpcChannel channel) return ValueTask.CompletedTask; } - public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken) + public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { + var begin = timeProvider.GetTimestamp(); await client.UnaryComplexAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011); ctx.Increment(); + ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin)); } } } diff --git a/perf/BenchmarkApp/PerformanceTest.Shared/Reporting/DatadogMetricsRecorder.cs b/perf/BenchmarkApp/PerformanceTest.Shared/Reporting/DatadogMetricsRecorder.cs index 7f8efe971..89840b99a 100644 --- a/perf/BenchmarkApp/PerformanceTest.Shared/Reporting/DatadogMetricsRecorder.cs +++ b/perf/BenchmarkApp/PerformanceTest.Shared/Reporting/DatadogMetricsRecorder.cs @@ -17,7 +17,7 @@ public class DatadogMetricsRecorder private readonly JsonSerializerOptions jsonSerializerOptions; private readonly TimeProvider timeProvider = TimeProvider.System; private readonly HttpClient client; - private readonly ConcurrentQueue reservations; + private readonly ConcurrentQueue backgroundQueue; private DatadogMetricsRecorder(string apiKey) { @@ -32,7 +32,7 @@ private DatadogMetricsRecorder(string apiKey) client.DefaultRequestHeaders.Add("Accept", "application/json"); client.DefaultRequestHeaders.Add("DD-API-KEY", apiKey); - reservations = new ConcurrentQueue(); + backgroundQueue = new ConcurrentQueue(); } public static DatadogMetricsRecorder Create(bool validate = false) @@ -46,12 +46,12 @@ public static DatadogMetricsRecorder Create(bool validate = false) } /// - /// Pass to background + /// Pass task to background /// - /// - public void Record(Task reserve) + /// + public void Record(Task task) { - reservations.Enqueue(reserve); + backgroundQueue.Enqueue(task); } /// @@ -61,10 +61,10 @@ public void Record(Task reserve) public async Task WaitSaveAsync() { // sequential handling to avoid Datadog API quota - while (reservations.TryDequeue(out var task)) + while (backgroundQueue.TryDequeue(out var task)) { await task; - if (reservations.Count == 0) + if (backgroundQueue.Count == 0) { break; } @@ -116,32 +116,6 @@ public async Task SendAsync(string metricsName, double value, DatadogMetricsType } } -/// -/// see: https://docs.datadoghq.com/api/latest/metrics/?code-lang=go#submit-metrics -/// -public static class DatadogMetricsRecorderExtensions -{ - /// - /// Put Client Benchmark metrics to background. - /// - /// - /// - /// - /// - /// - /// - /// - public static void PutClientBenchmarkMetrics(this DatadogMetricsRecorder recorder, string scenario, ApplicationInformation applicationInfo, string serialization, double requestsPerSecond, TimeSpan duration, int totalRequests) - { - var tags = MetricsTagCache.Get((scenario, applicationInfo, serialization), static x => [$"app:MagicOnion", $"magiconion_version:{x.applicationInfo.MagicOnionVersion}", $"grpcdotnet_version:{x.applicationInfo.GrpcNetVersion}", $"messagepack_version:{x.applicationInfo.MessagePackVersion}", $"memorypack_version:{x.applicationInfo.MemoryPackVersion}", $"process_arch:{x.applicationInfo.ProcessArchitecture}", $"process_count:{x.applicationInfo.ProcessorCount}", $"scenario:{x.scenario}", $"serialization:{x.serialization}"]); - - // Don't want to await each put. Let's send it to queue and await when benchmark ends. - recorder.Record(recorder.SendAsync("benchmark.client.rps", requestsPerSecond, DatadogMetricsType.Rate, tags, "request")); - recorder.Record(recorder.SendAsync("benchmark.client.duration", duration.TotalSeconds, DatadogMetricsType.Gauge, tags, "second")); - recorder.Record(recorder.SendAsync("benchmark.client.total_requests", totalRequests, DatadogMetricsType.Gauge, tags, "request")); - } -} - public class DatadogMetricsRecord { [JsonPropertyName("series")]