Skip to content

Commit

Permalink
chore: measure client latency on PerformanceTest
Browse files Browse the repository at this point in the history
  • Loading branch information
guitarrapc committed Aug 13, 2024
1 parent da27283 commit bfdd339
Show file tree
Hide file tree
Showing 9 changed files with 185 additions and 53 deletions.
4 changes: 2 additions & 2 deletions perf/BenchmarkApp/PerformanceTest.Client/IScenario.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@
public interface IScenario
{
ValueTask PrepareAsync(GrpcChannel channel);
ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken);
}
ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken);
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,17 @@ public class PerformanceTestRunningContext
int count;
bool isRunning;
Stopwatch stopwatch;
List<List<double>> latencyPerConnection = new();
List<object> locks = new();

public PerformanceTestRunningContext()
public PerformanceTestRunningContext(int connectionCount)
{
stopwatch = new Stopwatch();
for (var i = 0; i < connectionCount; i++)
{
latencyPerConnection.Add(new ());
locks.Add(new ());
}
}

public void Ready()
Expand All @@ -25,6 +32,14 @@ public void Increment()
}
}

public void Latency(int connectionId, TimeSpan duration)
{
lock (locks[connectionId])
{
latencyPerConnection[connectionId].Add(duration.TotalMilliseconds);
}
}

public void Complete()
{
isRunning = false;
Expand All @@ -33,8 +48,49 @@ public void Complete()

public PerformanceResult GetResult()
{
return new PerformanceResult(count, count / (double)stopwatch.Elapsed.TotalSeconds, stopwatch.Elapsed);
var latency = MeasureLatency();
return new PerformanceResult(count, count / (double)stopwatch.Elapsed.TotalSeconds, stopwatch.Elapsed, latency);

Latency MeasureLatency()
{
var totalCount = 0;
var totalSum = 0.0;
for (var i = 0; i < latencyPerConnection.Count; i++)
{
for (var j = 0; j < latencyPerConnection[i].Count; j++)
{
totalSum += latencyPerConnection[i][j];
totalCount++;
}

latencyPerConnection[i].Sort();
}
var latencyMean = (totalCount != 0) ? totalSum / totalCount : totalSum;
var latencyAllConnection = new List<double>();
foreach (var connections in latencyPerConnection) latencyAllConnection.AddRange(connections);
var latency50p = GetPercentile(50, latencyAllConnection);
var latency75p = GetPercentile(75, latencyAllConnection);
var latency90p = GetPercentile(90, latencyAllConnection);
var latency99p = GetPercentile(99, latencyAllConnection);
var latencyMax = GetPercentile(100, latencyAllConnection);
var latency = new Latency(latencyMean, latency50p, latency75p, latency90p, latency99p, latencyMax);

return latency;
}
static double GetPercentile(int percent, IReadOnlyList<double> sortedData)
{
if (percent == 100)
{
return sortedData[^1];
}

var i = ((long)percent * sortedData.Count) / 100.0 + 0.5;
var fractionPart = i - Math.Truncate(i);

return (1.0 - fractionPart) * sortedData[(int)Math.Truncate(i) - 1] + fractionPart * sortedData[(int)Math.Ceiling(i) - 1];
}
}
}

public record PerformanceResult(int TotalRequests, double RequestsPerSecond, TimeSpan Duration);
public record PerformanceResult(int TotalRequests, double RequestsPerSecond, TimeSpan Duration, Latency Latency);
public record Latency(double Mean, double P50, double P75, double P90, double P99, double Max);
45 changes: 42 additions & 3 deletions perf/BenchmarkApp/PerformanceTest.Client/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ async Task Main(
}
var result = await RunScenarioAsync(scenario2, config, controlServiceClient);
results.Add(result);
datadog.PutClientBenchmarkMetrics(scenario.ToString(), ApplicationInformation.Current, serialization.ToString(), result.RequestsPerSecond, result.Duration, result.TotalRequests);
datadog.PutClientBenchmarkMetrics(scenario.ToString(), ApplicationInformation.Current, serialization.ToString(), result);
}
}

Expand Down Expand Up @@ -121,6 +121,11 @@ async Task Main(
writer.WriteLine($"Requests per Second: {result.RequestsPerSecond:0.000} rps");
writer.WriteLine($"Duration : {result.Duration.TotalSeconds} s");
writer.WriteLine($"Total Requests : {result.TotalRequests} requests");
writer.WriteLine($"Mean latency : {result.Latency.Mean:0.###} ms");
writer.WriteLine($"Max latency : {result.Latency.Max:0.###} ms");
writer.WriteLine($"p50 latency : {result.Latency.P50:0.###} ms");
writer.WriteLine($"p90 latency : {result.Latency.P90:0.###} ms");
writer.WriteLine($"p99 latency : {result.Latency.P99:0.###} ms");
writer.WriteLine($"========================================");
}
}
Expand Down Expand Up @@ -164,7 +169,7 @@ async Task<PerformanceResult> RunScenarioAsync(ScenarioType scenario, ScenarioCo
_ => throw new Exception($"Unknown Scenario: {scenario}"),
};

var ctx = new PerformanceTestRunningContext();
var ctx = new PerformanceTestRunningContext(connectionCount: config.Channels);
var cts = new CancellationTokenSource();

WriteLog($"Starting scenario '{scenario}'...");
Expand All @@ -175,11 +180,12 @@ async Task<PerformanceResult> RunScenarioAsync(ScenarioType scenario, ScenarioCo
for (var j = 0; j < config.Streams; j++)
{
if (config.Verbose) WriteLog($"Channel[{i}] - Stream[{j}]: Run");
var connectionId = i;
tasks.Add(Task.Run(async () =>
{
var scenarioRunner = scenarioFactory();
await scenarioRunner.PrepareAsync(channel);
await scenarioRunner.RunAsync(ctx, cts.Token);
await scenarioRunner.RunAsync(connectionId, ctx, cts.Token);
}));
}
}
Expand All @@ -202,6 +208,12 @@ async Task<PerformanceResult> RunScenarioAsync(ScenarioType scenario, ScenarioCo
WriteLog($"Requests per Second: {result.RequestsPerSecond:0.000} rps");
WriteLog($"Duration: {result.Duration.TotalSeconds} s");
WriteLog($"Total Requests: {result.TotalRequests} requests");
WriteLog($"Mean latency: {result.Latency.Mean:0.###} ms");
WriteLog($"Max latency: {result.Latency.Max:0.###} ms");
WriteLog($"p50 latency: {result.Latency.P50:0.###} ms");
WriteLog($"p75 latency: {result.Latency.P75:0.###} ms");
WriteLog($"p90 latency: {result.Latency.P90:0.###} ms");
WriteLog($"p99 latency: {result.Latency.P99:0.###} ms");

return result;
}
Expand Down Expand Up @@ -242,4 +254,31 @@ IEnumerable<ScenarioType> GetRunScenarios(ScenarioType scenario)
};
}

public static class DatadogMetricsRecorderExtensions
{
/// <summary>
/// Put Client Benchmark metrics to background.
/// </summary>
/// <param name="recorder"></param>
/// <param name="scenario"></param>
/// <param name="applicationInfo"></param>
/// <param name="serialization"></param>
/// <param name="result"></param>
public static void PutClientBenchmarkMetrics(this DatadogMetricsRecorder recorder, string scenario, ApplicationInformation applicationInfo, string serialization, PerformanceResult result)
{
var tags = MetricsTagCache.Get((scenario, applicationInfo, serialization), static x => [$"app:MagicOnion", $"magiconion_version:{x.applicationInfo.MagicOnionVersion}", $"grpcdotnet_version:{x.applicationInfo.GrpcNetVersion}", $"messagepack_version:{x.applicationInfo.MessagePackVersion}", $"memorypack_version:{x.applicationInfo.MemoryPackVersion}", $"process_arch:{x.applicationInfo.ProcessArchitecture}", $"process_count:{x.applicationInfo.ProcessorCount}", $"scenario:{x.scenario}", $"serialization:{x.serialization}"]);

// Don't want to await each put. Let's send it to queue and await when benchmark ends.
recorder.Record(recorder.SendAsync("benchmark.client.rps", result.RequestsPerSecond, DatadogMetricsType.Rate, tags, "request"));
recorder.Record(recorder.SendAsync("benchmark.client.duration", result.Duration.TotalSeconds, DatadogMetricsType.Gauge, tags, "second"));
recorder.Record(recorder.SendAsync("benchmark.client.total_requests", result.TotalRequests, DatadogMetricsType.Gauge, tags, "request"));
recorder.Record(recorder.SendAsync("benchmark.client.latency_mean", result.Latency.Mean, DatadogMetricsType.Gauge, tags, "millisecond"));
recorder.Record(recorder.SendAsync("benchmark.client.latency_max", result.Latency.Max, DatadogMetricsType.Gauge, tags, "millisecond"));
recorder.Record(recorder.SendAsync("benchmark.client.latency_p50", result.Latency.P50, DatadogMetricsType.Gauge, tags, "millisecond"));
recorder.Record(recorder.SendAsync("benchmark.client.latency_p75", result.Latency.P75, DatadogMetricsType.Gauge, tags, "millisecond"));
recorder.Record(recorder.SendAsync("benchmark.client.latency_p90", result.Latency.P90, DatadogMetricsType.Gauge, tags, "millisecond"));
recorder.Record(recorder.SendAsync("benchmark.client.latency_p99", result.Latency.P99, DatadogMetricsType.Gauge, tags, "millisecond"));
}
}

public record ScenarioConfiguration(string Url, int Warmup, int Duration, int Streams, int Channels, bool Verbose);
Original file line number Diff line number Diff line change
@@ -1,8 +1,40 @@
{
"profiles": {
"PerformanceTest.Client": {
"PerformanceTest.Client (PingpongStreamingHub 1x1)": {
"commandName": "Project",
"commandLineArgs": "-u http://localhost:5000 -s PingpongStreamingHub --channels 1 --streams 1"
},
"PerformanceTest.Client (PingpongStreamingHub 70)": {
"commandName": "Project",
"commandLineArgs": "-u http://localhost:5000 -s PingpongStreamingHub --channels 70 --streams 1"
},
"PerformanceTest.Client (PingpongStreamingHub 70x1)": {
"commandName": "Project",
"commandLineArgs": "-u http://localhost:5000 -s PingpongStreamingHub --channels 1 --streams 70"
},
"PerformanceTest.Client (StreamingHubValueTask 1x1)": {
"commandName": "Project",
"commandLineArgs": "-u http://localhost:5000 -s StreamingHubValueTask --channels 1 --streams 1"
},
"PerformanceTest.Client (StreamingHubValueTask 70)": {
"commandName": "Project",
"commandLineArgs": "-u http://localhost:5000 -s StreamingHubValueTask --channels 70 --streams 1"
},
"PerformanceTest.Client (StreamingHubValueTask 70x1)": {
"commandName": "Project",
"commandLineArgs": "-u http://localhost:5000 -s StreamingHubValueTask --channels 1 --streams 70"
},
"PerformanceTest.Client (Unary 1x1)": {
"commandName": "Project",
"commandLineArgs": "-u http://localhost:5000 -s Unary --channels 1 --streams 1"
},
"PerformanceTest.Client (Unary 70)": {
"commandName": "Project",
"commandLineArgs": "-u http://localhost:5000 -s Unary --channels 70 --streams 1"
},
"PerformanceTest.Client (Unary 70x1)": {
"commandName": "Project",
"commandLineArgs": "-u http://localhost:5000 -s Unary --channels 1 --streams 70"
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ public abstract class StreamingHubLargePayloadScenarioBase : IScenario, IPerfTes
{
IPerfTestHub client = default!;
readonly int payloadSize;
readonly TimeProvider timeProvider = TimeProvider.System;

public StreamingHubLargePayloadScenarioBase(int payloadSize)
{
Expand All @@ -17,14 +18,16 @@ public async ValueTask PrepareAsync(GrpcChannel channel)
this.client = await StreamingHubClient.ConnectAsync<IPerfTestHub, IPerfTestHubReceiver>(channel, this);
}

public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
{
var data = new byte[payloadSize];

while (!cancellationToken.IsCancellationRequested)
{
var begin = timeProvider.GetTimestamp();
_ = await client.CallMethodLargePayloadAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011, data);
ctx.Increment();
ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin));
}
}
}
Expand Down
31 changes: 25 additions & 6 deletions perf/BenchmarkApp/PerformanceTest.Client/StreamingHubScenario.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,118 +5,137 @@
public class StreamingHubScenario : IScenario, IPerfTestHubReceiver
{
IPerfTestHub client = default!;
readonly TimeProvider timeProvider = TimeProvider.System;
long beginTimeStamp = default;

public async ValueTask PrepareAsync(GrpcChannel channel)
{
this.client = await StreamingHubClient.ConnectAsync<IPerfTestHub, IPerfTestHubReceiver>(channel, this);
this.beginTimeStamp = timeProvider.GetTimestamp();
}

public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
{
while (!cancellationToken.IsCancellationRequested)
{
await client.CallMethodAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011);
ctx.Increment();
ctx.Latency(connectionId, timeProvider.GetElapsedTime(this.beginTimeStamp));
}
}
}

public class StreamingHubValueTaskScenario : IScenario, IPerfTestHubReceiver
{
IPerfTestHub client = default!;
readonly TimeProvider timeProvider = TimeProvider.System;

public async ValueTask PrepareAsync(GrpcChannel channel)
{
this.client = await StreamingHubClient.ConnectAsync<IPerfTestHub, IPerfTestHubReceiver>(channel, this);
}

public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
{
while (!cancellationToken.IsCancellationRequested)
{
var begin = timeProvider.GetTimestamp();
await client.CallMethodValueTaskAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011);
ctx.Increment();
ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin));
}
}
}

public class StreamingHubComplexScenario : IScenario, IPerfTestHubReceiver
{
IPerfTestHub client = default!;
readonly TimeProvider timeProvider = TimeProvider.System;

public async ValueTask PrepareAsync(GrpcChannel channel)
{
this.client = await StreamingHubClient.ConnectAsync<IPerfTestHub, IPerfTestHubReceiver>(channel, this);
}

public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
{
while (!cancellationToken.IsCancellationRequested)
{
var begin = timeProvider.GetTimestamp();
await client.CallMethodComplexAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011);
ctx.Increment();
ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin));
}
}
}

public class StreamingHubComplexValueTaskScenario : IScenario, IPerfTestHubReceiver
{
IPerfTestHub client = default!;
readonly TimeProvider timeProvider = TimeProvider.System;

public async ValueTask PrepareAsync(GrpcChannel channel)
{
this.client = await StreamingHubClient.ConnectAsync<IPerfTestHub, IPerfTestHubReceiver>(channel, this);
}

public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
{
while (!cancellationToken.IsCancellationRequested)
{
var begin = timeProvider.GetTimestamp();
await client.CallMethodComplexValueTaskAsync("FooBarBaz🚀こんにちは世界", 123, 4567, 891011);
ctx.Increment();
ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin));
}
}
}

public class PingpongStreamingHubScenario : IScenario, IPerfTestHubReceiver
{
IPerfTestHub client = default!;
readonly TimeProvider timeProvider = TimeProvider.System;

public async ValueTask PrepareAsync(GrpcChannel channel)
{
this.client = await StreamingHubClient.ConnectAsync<IPerfTestHub, IPerfTestHubReceiver>(channel, this);
}

public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
{
while (!cancellationToken.IsCancellationRequested)
{
var begin = timeProvider.GetTimestamp();
await client.PingpongAsync(new SimpleRequest
{
Payload = new byte[100],
ResponseSize = 100,
UseCache = false,
});
ctx.Increment();
ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin));
}
}
}

public class PingpongCachedStreamingHubScenario : IScenario, IPerfTestHubReceiver
{
IPerfTestHub client = default!;
readonly TimeProvider timeProvider = TimeProvider.System;

public async ValueTask PrepareAsync(GrpcChannel channel)
{
this.client = await StreamingHubClient.ConnectAsync<IPerfTestHub, IPerfTestHubReceiver>(channel, this);
}

public async ValueTask RunAsync(PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
public async ValueTask RunAsync(int connectionId, PerformanceTestRunningContext ctx, CancellationToken cancellationToken)
{
while (!cancellationToken.IsCancellationRequested)
{
var begin = timeProvider.GetTimestamp();
await client.PingpongAsync(SimpleRequest.Cached);
ctx.Increment();
ctx.Latency(connectionId, timeProvider.GetElapsedTime(begin));
}
}
}
Loading

0 comments on commit bfdd339

Please sign in to comment.