From ff6725f07aa49182a4c9846a9623d871a9f0b021 Mon Sep 17 00:00:00 2001 From: Vishwesh Bankwar Date: Tue, 12 Mar 2024 11:54:29 -0700 Subject: [PATCH] [otlp] Add Retry Handler (#5433) --- .../Implementation/ExportClient/OtlpRetry.cs | 12 +- .../OtlpExporterRetryTransmissionHandler.cs | 37 +++ .../Transmission/RetryHelper.cs | 32 +++ .../MockCollectorIntegrationTests.cs | 236 +++++++++++++++++- .../OtlpRetryTests.cs | 33 +-- 5 files changed, 322 insertions(+), 28 deletions(-) create mode 100644 src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/Transmission/OtlpExporterRetryTransmissionHandler.cs create mode 100644 src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/Transmission/RetryHelper.cs diff --git a/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/ExportClient/OtlpRetry.cs b/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/ExportClient/OtlpRetry.cs index 4d214fcfb7f..325236cc2a6 100644 --- a/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/ExportClient/OtlpRetry.cs +++ b/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/ExportClient/OtlpRetry.cs @@ -56,7 +56,6 @@ internal static class OtlpRetry public static bool TryGetHttpRetryResult(ExportClientHttpResponse response, int retryDelayInMilliSeconds, out RetryResult retryResult) { - retryResult = default; if (response.StatusCode.HasValue) { return TryGetRetryResult(response.StatusCode.Value, IsHttpStatusCodeRetryable, response.DeadlineUtc, response.Headers, TryGetHttpRetryDelay, retryDelayInMilliSeconds, out retryResult); @@ -73,6 +72,7 @@ public static bool TryGetHttpRetryResult(ExportClientHttpResponse response, int } } + retryResult = default; return false; } } @@ -83,9 +83,15 @@ public static bool ShouldHandleHttpRequestException(Exception? exception) return true; } - public static bool TryGetGrpcRetryResult(StatusCode statusCode, DateTime? deadline, Metadata trailers, int retryDelayMilliseconds, out RetryResult retryResult) + public static bool TryGetGrpcRetryResult(ExportClientGrpcResponse response, int retryDelayMilliseconds, out RetryResult retryResult) { - return TryGetRetryResult(statusCode, IsGrpcStatusCodeRetryable, deadline, trailers, TryGetGrpcRetryDelay, retryDelayMilliseconds, out retryResult); + if (response.Exception is RpcException rpcException) + { + return TryGetRetryResult(rpcException.StatusCode, IsGrpcStatusCodeRetryable, response.DeadlineUtc, rpcException.Trailers, TryGetGrpcRetryDelay, retryDelayMilliseconds, out retryResult); + } + + retryResult = default; + return false; } private static bool TryGetRetryResult(TStatusCode statusCode, Func isRetryable, DateTime? deadline, TCarrier carrier, Func throttleGetter, int nextRetryDelayMilliseconds, out RetryResult retryResult) diff --git a/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/Transmission/OtlpExporterRetryTransmissionHandler.cs b/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/Transmission/OtlpExporterRetryTransmissionHandler.cs new file mode 100644 index 00000000000..d4be5c9d640 --- /dev/null +++ b/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/Transmission/OtlpExporterRetryTransmissionHandler.cs @@ -0,0 +1,37 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +#nullable enable + +using OpenTelemetry.Exporter.OpenTelemetryProtocol.Implementation.ExportClient; + +namespace OpenTelemetry.Exporter.OpenTelemetryProtocol.Implementation.Transmission; + +internal sealed class OtlpExporterRetryTransmissionHandler : OtlpExporterTransmissionHandler +{ + internal OtlpExporterRetryTransmissionHandler(IExportClient exportClient, double timeoutMilliseconds) + : base(exportClient, timeoutMilliseconds) + { + } + + protected override bool OnSubmitRequestFailure(TRequest request, ExportClientResponse response) + { + var nextRetryDelayMilliseconds = OtlpRetry.InitialBackoffMilliseconds; + while (RetryHelper.ShouldRetryRequest(request, response, nextRetryDelayMilliseconds, out var retryResult)) + { + // Note: This delay cannot exceed the configured timeout period for otlp exporter. + // If the backend responds with `RetryAfter` duration that would result in exceeding the configured timeout period + // we would fail fast and drop the data. + Thread.Sleep(retryResult.RetryDelay); + + if (this.TryRetryRequest(request, response.DeadlineUtc, out response)) + { + return true; + } + + nextRetryDelayMilliseconds = retryResult.NextRetryDelayMilliseconds; + } + + return false; + } +} diff --git a/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/Transmission/RetryHelper.cs b/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/Transmission/RetryHelper.cs new file mode 100644 index 00000000000..cf663a5f792 --- /dev/null +++ b/src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/Transmission/RetryHelper.cs @@ -0,0 +1,32 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +#nullable enable + +using OpenTelemetry.Exporter.OpenTelemetryProtocol.Implementation.ExportClient; + +namespace OpenTelemetry.Exporter.OpenTelemetryProtocol.Implementation.Transmission; + +internal static class RetryHelper +{ + internal static bool ShouldRetryRequest(TRequest request, ExportClientResponse response, int retryDelayMilliseconds, out OtlpRetry.RetryResult retryResult) + { + if (response is ExportClientGrpcResponse grpcResponse) + { + if (OtlpRetry.TryGetGrpcRetryResult(grpcResponse, retryDelayMilliseconds, out retryResult)) + { + return true; + } + } + else if (response is ExportClientHttpResponse httpResponse) + { + if (OtlpRetry.TryGetHttpRetryResult(httpResponse, retryDelayMilliseconds, out retryResult)) + { + return true; + } + } + + retryResult = default; + return false; + } +} diff --git a/test/OpenTelemetry.Exporter.OpenTelemetryProtocol.Tests/MockCollectorIntegrationTests.cs b/test/OpenTelemetry.Exporter.OpenTelemetryProtocol.Tests/MockCollectorIntegrationTests.cs index 51536cfd477..dc2a0aad5f8 100644 --- a/test/OpenTelemetry.Exporter.OpenTelemetryProtocol.Tests/MockCollectorIntegrationTests.cs +++ b/test/OpenTelemetry.Exporter.OpenTelemetryProtocol.Tests/MockCollectorIntegrationTests.cs @@ -3,11 +3,16 @@ #if !NETFRAMEWORK using System.Diagnostics; +using System.Net; using Grpc.Core; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.Http; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using OpenTelemetry.Exporter.OpenTelemetryProtocol.Implementation.ExportClient; +using OpenTelemetry.Exporter.OpenTelemetryProtocol.Implementation.Transmission; using OpenTelemetry.Metrics; using OpenTelemetry.Proto.Collector.Trace.V1; using OpenTelemetry.Tests; @@ -18,21 +23,28 @@ namespace OpenTelemetry.Exporter.OpenTelemetryProtocol.Tests; public sealed class MockCollectorIntegrationTests { + private static int gRPCPort = 4317; + private static int httpPort = 5051; + [Fact] public async Task TestRecoveryAfterFailedExport() { + var testGrpcPort = Interlocked.Increment(ref gRPCPort); + var testHttpPort = Interlocked.Increment(ref httpPort); + using var host = await new HostBuilder() .ConfigureWebHostDefaults(webBuilder => webBuilder .ConfigureKestrel(options => { - options.ListenLocalhost(5050, listenOptions => listenOptions.Protocols = Microsoft.AspNetCore.Server.Kestrel.Core.HttpProtocols.Http1); - options.ListenLocalhost(4317, listenOptions => listenOptions.Protocols = Microsoft.AspNetCore.Server.Kestrel.Core.HttpProtocols.Http2); + options.ListenLocalhost(testHttpPort, listenOptions => listenOptions.Protocols = Microsoft.AspNetCore.Server.Kestrel.Core.HttpProtocols.Http1); + options.ListenLocalhost(testGrpcPort, listenOptions => listenOptions.Protocols = Microsoft.AspNetCore.Server.Kestrel.Core.HttpProtocols.Http2); }) .ConfigureServices(services => { services.AddSingleton(new MockCollectorState()); services.AddGrpc(); }) + .ConfigureLogging(loggingBuilder => loggingBuilder.ClearProviders()) .Configure(app => { app.UseRouting(); @@ -52,13 +64,13 @@ public async Task TestRecoveryAfterFailedExport() })) .StartAsync(); - var httpClient = new HttpClient() { BaseAddress = new Uri("http://localhost:5050") }; + using var httpClient = new HttpClient() { BaseAddress = new Uri($"http://localhost:{testHttpPort}") }; var codes = new[] { Grpc.Core.StatusCode.Unimplemented, Grpc.Core.StatusCode.OK }; await httpClient.GetAsync($"/MockCollector/SetResponseCodes/{string.Join(",", codes.Select(x => (int)x))}"); var exportResults = new List(); - var otlpExporter = new OtlpTraceExporter(new OtlpExporterOptions() { Endpoint = new Uri("http://localhost:4317") }); + var otlpExporter = new OtlpTraceExporter(new OtlpExporterOptions() { Endpoint = new Uri($"http://localhost:{testGrpcPort}") }); var delegatingExporter = new DelegatingExporter { OnExportFunc = (batch) => @@ -91,6 +103,203 @@ public async Task TestRecoveryAfterFailedExport() await host.StopAsync(); } + // For `Grpc.Core.StatusCode.DeadlineExceeded` + // See https://github.com/open-telemetry/opentelemetry-dotnet/issues/5436. + [Theory] + [InlineData(true, ExportResult.Success, Grpc.Core.StatusCode.Unavailable)] + [InlineData(true, ExportResult.Success, Grpc.Core.StatusCode.Cancelled)] + [InlineData(true, ExportResult.Success, Grpc.Core.StatusCode.Aborted)] + [InlineData(true, ExportResult.Success, Grpc.Core.StatusCode.OutOfRange)] + [InlineData(true, ExportResult.Success, Grpc.Core.StatusCode.DataLoss)] + [InlineData(true, ExportResult.Failure, Grpc.Core.StatusCode.Internal)] + [InlineData(true, ExportResult.Failure, Grpc.Core.StatusCode.InvalidArgument)] + [InlineData(true, ExportResult.Failure, Grpc.Core.StatusCode.Unimplemented)] + [InlineData(true, ExportResult.Failure, Grpc.Core.StatusCode.FailedPrecondition)] + [InlineData(true, ExportResult.Failure, Grpc.Core.StatusCode.PermissionDenied)] + [InlineData(true, ExportResult.Failure, Grpc.Core.StatusCode.Unauthenticated)] + [InlineData(true, ExportResult.Success, Grpc.Core.StatusCode.DeadlineExceeded)] + [InlineData(false, ExportResult.Failure, Grpc.Core.StatusCode.Unavailable)] + [InlineData(false, ExportResult.Failure, Grpc.Core.StatusCode.Cancelled)] + [InlineData(false, ExportResult.Failure, Grpc.Core.StatusCode.Aborted)] + [InlineData(false, ExportResult.Failure, Grpc.Core.StatusCode.OutOfRange)] + [InlineData(false, ExportResult.Failure, Grpc.Core.StatusCode.DataLoss)] + [InlineData(false, ExportResult.Failure, Grpc.Core.StatusCode.Internal)] + [InlineData(false, ExportResult.Failure, Grpc.Core.StatusCode.InvalidArgument)] + [InlineData(false, ExportResult.Failure, Grpc.Core.StatusCode.FailedPrecondition)] + [InlineData(false, ExportResult.Failure, Grpc.Core.StatusCode.DeadlineExceeded)] + public async Task GrpcRetryTests(bool useRetryTransmissionHandler, ExportResult expectedResult, Grpc.Core.StatusCode initialStatusCode) + { + var testGrpcPort = Interlocked.Increment(ref gRPCPort); + var testHttpPort = Interlocked.Increment(ref httpPort); + + using var host = await new HostBuilder() + .ConfigureWebHostDefaults(webBuilder => webBuilder + .ConfigureKestrel(options => + { + options.ListenLocalhost(testHttpPort, listenOptions => listenOptions.Protocols = Microsoft.AspNetCore.Server.Kestrel.Core.HttpProtocols.Http1); + options.ListenLocalhost(testGrpcPort, listenOptions => listenOptions.Protocols = Microsoft.AspNetCore.Server.Kestrel.Core.HttpProtocols.Http2); + }) + .ConfigureServices(services => + { + services.AddSingleton(new MockCollectorState()); + services.AddGrpc(); + }) + .ConfigureLogging(loggingBuilder => loggingBuilder.ClearProviders()) + .Configure(app => + { + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGet( + "/MockCollector/SetResponseCodes/{responseCodesCsv}", + (MockCollectorState collectorState, string responseCodesCsv) => + { + var codes = responseCodesCsv.Split(",").Select(x => int.Parse(x)).ToArray(); + collectorState.SetStatusCodes(codes); + }); + + endpoints.MapGrpcService(); + }); + })) + .StartAsync(); + + using var httpClient = new HttpClient() { BaseAddress = new Uri($"http://localhost:{testHttpPort}") }; + + // First reply with failure and then Ok + var codes = new[] { initialStatusCode, Grpc.Core.StatusCode.OK }; + await httpClient.GetAsync($"/MockCollector/SetResponseCodes/{string.Join(",", codes.Select(x => (int)x))}"); + + var endpoint = new Uri($"http://localhost:{testGrpcPort}"); + + var exporterOptions = new OtlpExporterOptions() { Endpoint = endpoint, TimeoutMilliseconds = 20000 }; + + var exportClient = new OtlpGrpcTraceExportClient(exporterOptions); + + OtlpExporterTransmissionHandler transmissionHandler; + + // TODO: update this to configure via experimental environment variable. + if (useRetryTransmissionHandler) + { + transmissionHandler = new OtlpExporterRetryTransmissionHandler(exportClient, exporterOptions.TimeoutMilliseconds); + } + else + { + transmissionHandler = new OtlpExporterTransmissionHandler(exportClient, exporterOptions.TimeoutMilliseconds); + } + + var otlpExporter = new OtlpTraceExporter(exporterOptions, new(), transmissionHandler); + + var activitySourceName = "otel.grpc.retry.test"; + using var source = new ActivitySource(activitySourceName); + + using var tracerProvider = Sdk.CreateTracerProviderBuilder() + .AddSource(activitySourceName) + .Build(); + + using var activity = source.StartActivity("GrpcRetryTest"); + activity.Stop(); + using var batch = new Batch([activity], 1); + + var exportResult = otlpExporter.Export(batch); + + Assert.Equal(expectedResult, exportResult); + + await host.StopAsync(); + } + + [Theory] + [InlineData(true, ExportResult.Success, HttpStatusCode.ServiceUnavailable)] + [InlineData(true, ExportResult.Success, HttpStatusCode.BadGateway)] + [InlineData(true, ExportResult.Success, HttpStatusCode.GatewayTimeout)] + [InlineData(true, ExportResult.Failure, HttpStatusCode.BadRequest)] + [InlineData(true, ExportResult.Success, HttpStatusCode.TooManyRequests)] + [InlineData(false, ExportResult.Failure, HttpStatusCode.ServiceUnavailable)] + [InlineData(false, ExportResult.Failure, HttpStatusCode.BadGateway)] + [InlineData(false, ExportResult.Failure, HttpStatusCode.GatewayTimeout)] + [InlineData(false, ExportResult.Failure, HttpStatusCode.TooManyRequests)] + [InlineData(false, ExportResult.Failure, HttpStatusCode.BadRequest)] + public async Task HttpRetryTests(bool useRetryTransmissionHandler, ExportResult expectedResult, HttpStatusCode initialHttpStatusCode) + { + var testHttpPort = Interlocked.Increment(ref httpPort); + + using var host = await new HostBuilder() + .ConfigureWebHostDefaults(webBuilder => webBuilder + .ConfigureKestrel(options => + { + options.ListenLocalhost(testHttpPort, listenOptions => listenOptions.Protocols = Microsoft.AspNetCore.Server.Kestrel.Core.HttpProtocols.Http1); + }) + .ConfigureServices(services => + { + services.AddSingleton(new MockCollectorHttpState()); + }) + .ConfigureLogging(loggingBuilder => loggingBuilder.ClearProviders()) + .Configure(app => + { + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + endpoints.MapGet( + "/MockCollector/SetResponseCodes/{responseCodesCsv}", + (MockCollectorHttpState collectorState, string responseCodesCsv) => + { + var codes = responseCodesCsv.Split(",").Select(x => int.Parse(x)).ToArray(); + collectorState.SetStatusCodes(codes); + }); + + endpoints.MapPost("/v1/traces", async ctx => + { + var state = ctx.RequestServices.GetRequiredService(); + ctx.Response.StatusCode = (int)state.NextStatus(); + + await ctx.Response.WriteAsync("Request Received."); + }); + }); + })) + .StartAsync(); + + using var httpClient = new HttpClient() { BaseAddress = new Uri($"http://localhost:{testHttpPort}") }; + + var codes = new[] { initialHttpStatusCode, HttpStatusCode.OK }; + await httpClient.GetAsync($"/MockCollector/SetResponseCodes/{string.Join(",", codes.Select(x => (int)x))}"); + + var endpoint = new Uri($"http://localhost:{testHttpPort}/v1/traces"); + + var exporterOptions = new OtlpExporterOptions() { Endpoint = endpoint, TimeoutMilliseconds = 20000 }; + + var exportClient = new OtlpHttpTraceExportClient(exporterOptions, new HttpClient()); + + OtlpExporterTransmissionHandler transmissionHandler; + + // TODO: update this to configure via experimental environment variable. + if (useRetryTransmissionHandler) + { + transmissionHandler = new OtlpExporterRetryTransmissionHandler(exportClient, exporterOptions.TimeoutMilliseconds); + } + else + { + transmissionHandler = new OtlpExporterTransmissionHandler(exportClient, exporterOptions.TimeoutMilliseconds); + } + + var otlpExporter = new OtlpTraceExporter(exporterOptions, new(), transmissionHandler); + + var activitySourceName = "otel.http.retry.test"; + using var source = new ActivitySource(activitySourceName); + + using var tracerProvider = Sdk.CreateTracerProviderBuilder() + .AddSource(activitySourceName) + .Build(); + + using var activity = source.StartActivity("HttpRetryTest"); + activity.Stop(); + using var batch = new Batch([activity], 1); + + var exportResult = otlpExporter.Export(batch); + + Assert.Equal(expectedResult, exportResult); + } + private class MockCollectorState { private Grpc.Core.StatusCode[] statusCodes = { }; @@ -110,6 +319,25 @@ public Grpc.Core.StatusCode NextStatus() } } + private class MockCollectorHttpState + { + private HttpStatusCode[] statusCodes = { }; + private int statusCodeIndex = 0; + + public void SetStatusCodes(int[] statusCodes) + { + this.statusCodeIndex = 0; + this.statusCodes = statusCodes.Select(x => (HttpStatusCode)x).ToArray(); + } + + public HttpStatusCode NextStatus() + { + return this.statusCodeIndex < this.statusCodes.Length + ? this.statusCodes[this.statusCodeIndex++] + : HttpStatusCode.OK; + } + } + private class MockTraceService : TraceService.TraceServiceBase { private readonly MockCollectorState state; diff --git a/test/OpenTelemetry.Exporter.OpenTelemetryProtocol.Tests/OtlpRetryTests.cs b/test/OpenTelemetry.Exporter.OpenTelemetryProtocol.Tests/OtlpRetryTests.cs index 4f7a0bef5f6..25acb30e3e6 100644 --- a/test/OpenTelemetry.Exporter.OpenTelemetryProtocol.Tests/OtlpRetryTests.cs +++ b/test/OpenTelemetry.Exporter.OpenTelemetryProtocol.Tests/OtlpRetryTests.cs @@ -31,10 +31,12 @@ public void TryGetGrpcRetryResultTest(GrpcRetryTestCase testCase) foreach (var retryAttempt in testCase.RetryAttempts) { ++attempts; - var statusCode = retryAttempt.RpcException.StatusCode; - var deadline = retryAttempt.CallOptions.Deadline; - var trailers = retryAttempt.RpcException.Trailers; - var success = OtlpRetry.TryGetGrpcRetryResult(statusCode, deadline, trailers, nextRetryDelayMilliseconds, out var retryResult); + var rpcException = retryAttempt.Response.Exception as RpcException; + Assert.NotNull(rpcException); + var statusCode = rpcException.StatusCode; + var deadline = retryAttempt.Response.DeadlineUtc; + var trailers = rpcException.Trailers; + var success = OtlpRetry.TryGetGrpcRetryResult(retryAttempt.Response, nextRetryDelayMilliseconds, out var retryResult); Assert.Equal(retryAttempt.ExpectedSuccess, success); @@ -186,16 +188,6 @@ public static IEnumerable GetGrpcTestCases() }, expectedRetryAttempts: 9), }; - - yield return new[] - { - new GrpcRetryTestCase( - "Ridiculous throttling delay", - new GrpcRetryAttempt[] - { - new(StatusCode.Unavailable, throttleDelay: Duration.FromTimeSpan(TimeSpan.FromDays(3000000)), expectedNextRetryDelayMilliseconds: 5000), - }), - }; } public override string ToString() @@ -222,11 +214,10 @@ private static Metadata GenerateTrailers(Duration throttleDelay) public struct GrpcRetryAttempt { - public RpcException RpcException; - public CallOptions CallOptions; public TimeSpan? ThrottleDelay; public int? ExpectedNextRetryDelayMilliseconds; public bool ExpectedSuccess; + internal ExportClientGrpcResponse Response; public GrpcRetryAttempt( StatusCode statusCode, @@ -236,14 +227,17 @@ public GrpcRetryAttempt( bool expectedSuccess = true) { var status = new Status(statusCode, "Error"); - this.RpcException = throttleDelay != null + var rpcException = throttleDelay != null ? new RpcException(status, GenerateTrailers(throttleDelay)) : new RpcException(status); - this.CallOptions = deadlineExceeded ? new CallOptions(deadline: DateTime.UtcNow.AddSeconds(-1)) : default; + // Using arbitrary +1 hr for deadline for test purposes. + var deadlineUtc = deadlineExceeded ? DateTime.UtcNow.AddSeconds(-1) : DateTime.UtcNow.AddHours(1); this.ThrottleDelay = throttleDelay != null ? throttleDelay.ToTimeSpan() : null; + this.Response = new ExportClientGrpcResponse(expectedSuccess, deadlineUtc, rpcException); + this.ExpectedNextRetryDelayMilliseconds = expectedNextRetryDelayMilliseconds; this.ExpectedSuccess = expectedSuccess; @@ -316,7 +310,6 @@ public override string ToString() internal class HttpRetryAttempt { public ExportClientHttpResponse Response; - public DateTime? Deadline; public TimeSpan? ThrottleDelay; public int? ExpectedNextRetryDelayMilliseconds; public bool ExpectedSuccess; @@ -346,8 +339,6 @@ internal HttpRetryAttempt( // Using arbitrary +1 hr for deadline for test purposes. var deadlineUtc = isDeadlineExceeded ? DateTime.UtcNow.AddMilliseconds(-1) : DateTime.UtcNow.AddHours(1); this.Response = new ExportClientHttpResponse(expectedSuccess, deadlineUtc, responseMessage, new HttpRequestException()); - - this.Deadline = isDeadlineExceeded ? DateTime.UtcNow.AddMilliseconds(-1) : null; this.ExpectedNextRetryDelayMilliseconds = expectedNextRetryDelayMilliseconds; this.ExpectedSuccess = expectedSuccess; }