diff --git a/Microsoft.Azure.Cosmos/FaultInjection/src/FaultInjectionRuleBuilder.cs b/Microsoft.Azure.Cosmos/FaultInjection/src/FaultInjectionRuleBuilder.cs
index efa166166b..dd512366a1 100644
--- a/Microsoft.Azure.Cosmos/FaultInjection/src/FaultInjectionRuleBuilder.cs
+++ b/Microsoft.Azure.Cosmos/FaultInjection/src/FaultInjectionRuleBuilder.cs
@@ -149,7 +149,11 @@ private void ValidateGatewayConnection()
{
if (serverErrorResult?.GetServerErrorType() != FaultInjectionServerErrorType.TooManyRequests
&& serverErrorResult?.GetServerErrorType() != FaultInjectionServerErrorType.ResponseDelay
- && serverErrorResult?.GetServerErrorType() != FaultInjectionServerErrorType.SendDelay)
+ && serverErrorResult?.GetServerErrorType() != FaultInjectionServerErrorType.SendDelay
+ && serverErrorResult?.GetServerErrorType() != FaultInjectionServerErrorType.DatabaseAccountNotFound
+ && serverErrorResult?.GetServerErrorType() != FaultInjectionServerErrorType.ServiceUnavailable
+ && serverErrorResult?.GetServerErrorType() != FaultInjectionServerErrorType.InternalServerError
+ && serverErrorResult?.GetServerErrorType() != FaultInjectionServerErrorType.LeaseNotFound)
{
throw new ArgumentException($"{serverErrorResult?.GetServerErrorType()} is not supported for metadata requests.");
}
diff --git a/Microsoft.Azure.Cosmos/FaultInjection/src/FaultInjectionServerErrorType.cs b/Microsoft.Azure.Cosmos/FaultInjection/src/FaultInjectionServerErrorType.cs
index bb3f7dae2d..12045d1155 100644
--- a/Microsoft.Azure.Cosmos/FaultInjection/src/FaultInjectionServerErrorType.cs
+++ b/Microsoft.Azure.Cosmos/FaultInjection/src/FaultInjectionServerErrorType.cs
@@ -72,8 +72,13 @@ public enum FaultInjectionServerErrorType
ServiceUnavailable,
///
- /// 404:1008 Database account not found from gateway
+ /// 403:1008 Database account not found from gateway
///
DatabaseAccountNotFound,
+
+ ///
+ /// 410:1022 Lease not Found
+ ///
+ LeaseNotFound,
}
}
diff --git a/Microsoft.Azure.Cosmos/FaultInjection/src/implementation/FaultInjectionServerErrorResultInternal.cs b/Microsoft.Azure.Cosmos/FaultInjection/src/implementation/FaultInjectionServerErrorResultInternal.cs
index de5d6f7d60..6cac1ccbd1 100644
--- a/Microsoft.Azure.Cosmos/FaultInjection/src/implementation/FaultInjectionServerErrorResultInternal.cs
+++ b/Microsoft.Azure.Cosmos/FaultInjection/src/implementation/FaultInjectionServerErrorResultInternal.cs
@@ -461,7 +461,7 @@ public HttpResponseMessage GetInjectedServerError(DocumentServiceRequest dsr, st
httpResponse.Headers.Add(
WFConstants.BackendHeaders.SubStatus,
- ((int)SubStatusCodes.RUBudgetExceeded).ToString(CultureInfo.InvariantCulture));
+ ((int)SubStatusCodes.Unknown).ToString(CultureInfo.InvariantCulture));
httpResponse.Headers.Add(WFConstants.BackendHeaders.LocalLSN, lsn);
return httpResponse;
@@ -470,7 +470,7 @@ public HttpResponseMessage GetInjectedServerError(DocumentServiceRequest dsr, st
httpResponse = new HttpResponseMessage
{
- StatusCode = HttpStatusCode.NotFound,
+ StatusCode = HttpStatusCode.Forbidden,
Content = new FauntInjectionHttpContent(
new MemoryStream(
FaultInjectionResponseEncoding.GetBytes($"Fault Injection Server Error: DatabaseAccountNotFound, rule: {ruleId}"))),
@@ -488,6 +488,28 @@ public HttpResponseMessage GetInjectedServerError(DocumentServiceRequest dsr, st
return httpResponse;
+ case FaultInjectionServerErrorType.LeaseNotFound:
+
+ httpResponse = new HttpResponseMessage
+ {
+ StatusCode = HttpStatusCode.Gone,
+ Content = new FauntInjectionHttpContent(
+ new MemoryStream(
+ FaultInjectionResponseEncoding.GetBytes($"Fault Injection Server Error: LeaseNotFound, rule: {ruleId}"))),
+ };
+
+ foreach (string header in headers.AllKeys())
+ {
+ httpResponse.Headers.Add(header, headers.Get(header));
+ }
+
+ httpResponse.Headers.Add(
+ WFConstants.BackendHeaders.SubStatus,
+ ((int)SubStatusCodes.LeaseNotFound).ToString(CultureInfo.InvariantCulture));
+ httpResponse.Headers.Add(WFConstants.BackendHeaders.LocalLSN, lsn);
+
+ return httpResponse;
+
default:
throw new ArgumentException($"Server error type {this.serverErrorType} is not supported");
}
diff --git a/Microsoft.Azure.Cosmos/src/ClientRetryPolicy.cs b/Microsoft.Azure.Cosmos/src/ClientRetryPolicy.cs
index 50ffa81975..78f2960aa8 100644
--- a/Microsoft.Azure.Cosmos/src/ClientRetryPolicy.cs
+++ b/Microsoft.Azure.Cosmos/src/ClientRetryPolicy.cs
@@ -308,11 +308,12 @@ private async Task ShouldRetryInternalAsync(
this.documentServiceRequest?.RequestContext?.LocationEndpointToRoute?.ToString() ?? string.Empty,
this.documentServiceRequest?.ResourceAddress ?? string.Empty);
+ //Retry policy will retry on the next preffered region as the original requert region is not accepting requests
return await this.ShouldRetryOnEndpointFailureAsync(
isReadRequest: this.isReadRequest,
markBothReadAndWriteAsUnavailable: false,
forceRefresh: false,
- retryOnPreferredLocations: false);
+ retryOnPreferredLocations: true);
}
if (statusCode == HttpStatusCode.NotFound
@@ -328,6 +329,13 @@ private async Task ShouldRetryInternalAsync(
isSystemResourceUnavailableForWrite: false);
}
+ // Recieved 500 status code or lease not found
+ if ((statusCode == HttpStatusCode.InternalServerError && this.isReadRequest)
+ || (statusCode == HttpStatusCode.Gone && subStatusCode == SubStatusCodes.LeaseNotFound))
+ {
+ return this.ShouldRetryOnUnavailableEndpointStatusCodes();
+ }
+
return null;
}
@@ -467,14 +475,15 @@ private ShouldRetryResult TryMarkEndpointUnavailableForPkRangeAndRetryOnServiceU
this.TryMarkEndpointUnavailableForPkRange(isSystemResourceUnavailableForWrite);
- return this.ShouldRetryOnServiceUnavailable();
+ return this.ShouldRetryOnUnavailableEndpointStatusCodes();
}
///
/// For a ServiceUnavailable (503.0) we could be having a timeout from Direct/TCP locally or a request to Gateway request with a similar response due to an endpoint not yet available.
/// We try and retry the request only if there are other regions available. The retry logic is applicable for single master write accounts as well.
+ /// Other status codes include InternalServerError (500.0) and LeaseNotFound (410.1022).
///
- private ShouldRetryResult ShouldRetryOnServiceUnavailable()
+ private ShouldRetryResult ShouldRetryOnUnavailableEndpointStatusCodes()
{
if (this.serviceUnavailableRetryCount++ >= ClientRetryPolicy.MaxServiceUnavailableRetryCount)
{
diff --git a/Microsoft.Azure.Cosmos/src/MetadataRequestThrottleRetryPolicy.cs b/Microsoft.Azure.Cosmos/src/MetadataRequestThrottleRetryPolicy.cs
index 928d2f2e87..204bcbd830 100644
--- a/Microsoft.Azure.Cosmos/src/MetadataRequestThrottleRetryPolicy.cs
+++ b/Microsoft.Azure.Cosmos/src/MetadataRequestThrottleRetryPolicy.cs
@@ -23,9 +23,9 @@ internal sealed class MetadataRequestThrottleRetryPolicy : IDocumentClientRetryP
private const int DefaultMaxWaitTimeInSeconds = 60;
///
- /// A constant integer defining the default maximum retry count on service unavailable.
+ /// A constant integer defining the default maximum retry count on unavailable endpoint.
///
- private const int DefaultMaxServiceUnavailableRetryCount = 1;
+ private const int DefaultMaxUnavailableEndpointRetryCount = 1;
///
/// An instance of .
@@ -38,9 +38,9 @@ internal sealed class MetadataRequestThrottleRetryPolicy : IDocumentClientRetryP
private readonly IDocumentClientRetryPolicy throttlingRetryPolicy;
///
- /// An integer defining the maximum retry count on service unavailable.
+ /// An integer defining the maximum retry count on unavailable endpoint.
///
- private readonly int maxServiceUnavailableRetryCount;
+ private readonly int maxUnavailableEndpointRetryCount;
///
/// An instance of containing the location endpoint where the partition key
@@ -49,9 +49,9 @@ internal sealed class MetadataRequestThrottleRetryPolicy : IDocumentClientRetryP
private MetadataRetryContext retryContext;
///
- /// An integer capturing the current retry count on service unavailable.
+ /// An integer capturing the current retry count on unavailable endpoint.
///
- private int serviceUnavailableRetryCount;
+ private int unavailableEndpointRetryCount;
///
/// The constructor to initialize an instance of .
@@ -66,8 +66,8 @@ public MetadataRequestThrottleRetryPolicy(
int maxRetryWaitTimeInSeconds = DefaultMaxWaitTimeInSeconds)
{
this.globalEndpointManager = endpointManager;
- this.maxServiceUnavailableRetryCount = Math.Max(
- MetadataRequestThrottleRetryPolicy.DefaultMaxServiceUnavailableRetryCount,
+ this.maxUnavailableEndpointRetryCount = Math.Max(
+ MetadataRequestThrottleRetryPolicy.DefaultMaxUnavailableEndpointRetryCount,
this.globalEndpointManager.PreferredLocationCount);
this.throttlingRetryPolicy = new ResourceThrottleRetryPolicy(
@@ -91,11 +91,43 @@ public Task ShouldRetryAsync(
Exception exception,
CancellationToken cancellationToken)
{
- if (exception is CosmosException cosmosException
- && cosmosException.StatusCode == HttpStatusCode.ServiceUnavailable
- && cosmosException.Headers.SubStatusCode == SubStatusCodes.TransportGenerated503)
+ if (exception is CosmosException cosmosException)
{
- if (this.IncrementRetryIndexOnServiceUnavailableForMetadataRead())
+ return this.ShouldRetryInternalAsync(
+ cosmosException.StatusCode,
+ (SubStatusCodes)cosmosException.SubStatusCode,
+ exception,
+ cancellationToken);
+ }
+
+ if (exception is DocumentClientException clientException)
+ {
+ return this.ShouldRetryInternalAsync(
+ clientException.StatusCode,
+ clientException.GetSubStatus(),
+ exception, cancellationToken);
+ }
+
+ return this.throttlingRetryPolicy.ShouldRetryAsync(exception, cancellationToken);
+ }
+
+ private Task ShouldRetryInternalAsync(
+ HttpStatusCode? statusCode,
+ SubStatusCodes subStatus,
+ Exception exception,
+ CancellationToken cancellationToken)
+ {
+ if (statusCode == null)
+ {
+ return this.throttlingRetryPolicy.ShouldRetryAsync(exception, cancellationToken);
+ }
+
+ if (statusCode == HttpStatusCode.ServiceUnavailable
+ || statusCode == HttpStatusCode.InternalServerError
+ || (statusCode == HttpStatusCode.Gone && subStatus == SubStatusCodes.LeaseNotFound)
+ || (statusCode == HttpStatusCode.Forbidden && subStatus == SubStatusCodes.DatabaseAccountNotFound))
+ {
+ if (this.IncrementRetryIndexOnUnavailableEndpointForMetadataRead())
{
return Task.FromResult(ShouldRetryResult.RetryAfter(TimeSpan.Zero));
}
@@ -114,16 +146,36 @@ public Task ShouldRetryAsync(
ResponseMessage cosmosResponseMessage,
CancellationToken cancellationToken)
{
- if (cosmosResponseMessage?.StatusCode == HttpStatusCode.ServiceUnavailable
- && cosmosResponseMessage?.Headers?.SubStatusCode == SubStatusCodes.TransportGenerated503)
+ return this.ShouldRetryInternalAsync(
+ cosmosResponseMessage.StatusCode,
+ (SubStatusCodes)Convert.ToInt32(cosmosResponseMessage.Headers[WFConstants.BackendHeaders.SubStatus]),
+ cosmosResponseMessage,
+ cancellationToken);
+ }
+
+ private Task ShouldRetryInternalAsync(
+ HttpStatusCode? statusCode,
+ SubStatusCodes subStatus,
+ ResponseMessage responseMessage,
+ CancellationToken cancellationToken)
+ {
+ if (statusCode == null)
+ {
+ return this.throttlingRetryPolicy.ShouldRetryAsync(responseMessage, cancellationToken);
+ }
+
+ if (statusCode == HttpStatusCode.ServiceUnavailable
+ || statusCode == HttpStatusCode.InternalServerError
+ || (statusCode == HttpStatusCode.Gone && subStatus == SubStatusCodes.LeaseNotFound)
+ || (statusCode == HttpStatusCode.Forbidden && subStatus == SubStatusCodes.DatabaseAccountNotFound))
{
- if (this.IncrementRetryIndexOnServiceUnavailableForMetadataRead())
+ if (this.IncrementRetryIndexOnUnavailableEndpointForMetadataRead())
{
return Task.FromResult(ShouldRetryResult.RetryAfter(TimeSpan.Zero));
}
}
- return this.throttlingRetryPolicy.ShouldRetryAsync(cosmosResponseMessage, cancellationToken);
+ return this.throttlingRetryPolicy.ShouldRetryAsync(responseMessage, cancellationToken);
}
///
@@ -146,23 +198,23 @@ public void OnBeforeSendRequest(DocumentServiceRequest request)
}
///
- /// Increments the location index when a service unavailable exception ocurrs, for any future read requests.
+ /// Increments the location index when a unavailable endpoint exception ocurrs, for any future read requests.
///
/// A boolean flag indicating if the operation was successful.
- private bool IncrementRetryIndexOnServiceUnavailableForMetadataRead()
+ private bool IncrementRetryIndexOnUnavailableEndpointForMetadataRead()
{
- if (this.serviceUnavailableRetryCount++ >= this.maxServiceUnavailableRetryCount)
+ if (this.unavailableEndpointRetryCount++ >= this.maxUnavailableEndpointRetryCount)
{
- DefaultTrace.TraceWarning("MetadataRequestThrottleRetryPolicy: Retry count: {0} has exceeded the maximum permitted retry count on service unavailable: {1}.", this.serviceUnavailableRetryCount, this.maxServiceUnavailableRetryCount);
+ DefaultTrace.TraceWarning("MetadataRequestThrottleRetryPolicy: Retry count: {0} has exceeded the maximum permitted retry count on unavailable endpoint: {1}.", this.unavailableEndpointRetryCount, this.maxUnavailableEndpointRetryCount);
return false;
}
// Retrying on second PreferredLocations.
// RetryCount is used as zero-based index.
- DefaultTrace.TraceWarning("MetadataRequestThrottleRetryPolicy: Incrementing the metadata retry location index to: {0}.", this.serviceUnavailableRetryCount);
+ DefaultTrace.TraceWarning("MetadataRequestThrottleRetryPolicy: Incrementing the metadata retry location index to: {0}.", this.unavailableEndpointRetryCount);
this.retryContext = new MetadataRetryContext()
{
- RetryLocationIndex = this.serviceUnavailableRetryCount,
+ RetryLocationIndex = this.unavailableEndpointRetryCount,
RetryRequestOnPreferredLocations = true,
};
diff --git a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.EmulatorTests/CosmosItemIntegrationTests.cs b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.EmulatorTests/CosmosItemIntegrationTests.cs
index 5ac6cdc06b..8a79051ca8 100644
--- a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.EmulatorTests/CosmosItemIntegrationTests.cs
+++ b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.EmulatorTests/CosmosItemIntegrationTests.cs
@@ -8,6 +8,7 @@
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
+ using Microsoft.Azure.Cosmos.Diagnostics;
using Microsoft.Azure.Cosmos.FaultInjection;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using static Microsoft.Azure.Cosmos.Routing.GlobalPartitionEndpointManagerCore;
@@ -149,6 +150,85 @@ public async Task ReadMany2UnreachablePartitionsTest()
}
}
+ [TestMethod]
+ [TestCategory("MultiRegion")]
+ [DataRow(FaultInjectionServerErrorType.ServiceUnavailable)]
+ [DataRow(FaultInjectionServerErrorType.InternalServerError)]
+ [DataRow(FaultInjectionServerErrorType.DatabaseAccountNotFound)]
+ [DataRow(FaultInjectionServerErrorType.LeaseNotFound)]
+ public async Task MetadataEndpointUnavailableCrossRegionalRetryTest(FaultInjectionServerErrorType serverErrorType)
+ {
+ FaultInjectionRule collReadBad = new FaultInjectionRuleBuilder(
+ id: "collread",
+ condition: new FaultInjectionConditionBuilder()
+ .WithOperationType(FaultInjectionOperationType.MetadataContainer)
+ .WithRegion(region1)
+ .Build(),
+ result: new FaultInjectionServerErrorResultBuilder(serverErrorType)
+ .Build())
+ .Build();
+
+ FaultInjectionRule pkRangeBad = new FaultInjectionRuleBuilder(
+ id: "pkrange",
+ condition: new FaultInjectionConditionBuilder()
+ .WithOperationType(FaultInjectionOperationType.MetadataPartitionKeyRange)
+ .WithRegion(region1)
+ .Build(),
+ result: new FaultInjectionServerErrorResultBuilder(serverErrorType)
+ .Build())
+ .Build();
+
+ collReadBad.Disable();
+ pkRangeBad.Disable();
+
+ FaultInjector faultInjector = new FaultInjector(new List { pkRangeBad, collReadBad });
+
+ CosmosClientOptions cosmosClientOptions = new CosmosClientOptions()
+ {
+ ConsistencyLevel = ConsistencyLevel.Session,
+ ConnectionMode = ConnectionMode.Direct,
+ Serializer = this.cosmosSystemTextJsonSerializer,
+ FaultInjector = faultInjector,
+ ApplicationPreferredRegions = new List { region1, region2, region3 }
+ };
+
+ using (CosmosClient fiClient = new CosmosClient(
+ connectionString: this.connectionString,
+ clientOptions: cosmosClientOptions))
+ {
+ Database fidb = fiClient.GetDatabase(MultiRegionSetupHelpers.dbName);
+ Container fic = fidb.GetContainer(MultiRegionSetupHelpers.containerName);
+
+ pkRangeBad.Enable();
+ collReadBad.Enable();
+
+ try
+ {
+ FeedIterator frTest = fic.GetItemQueryIterator("SELECT * FROM c");
+ while (frTest.HasMoreResults)
+ {
+ FeedResponse feedres = await frTest.ReadNextAsync();
+
+ Assert.AreEqual(HttpStatusCode.OK, feedres.StatusCode);
+ }
+ }
+ catch (CosmosException ex)
+ {
+ Assert.Fail(ex.Message);
+ }
+ finally
+ {
+ //Cross regional retry needs to ocur (could trigger for other metadata call to try on secondary region so rule would not trigger)
+ Assert.IsTrue(pkRangeBad.GetHitCount() + collReadBad.GetHitCount() >= 1);
+
+ pkRangeBad.Disable();
+ collReadBad.Disable();
+
+ fiClient.Dispose();
+ }
+ }
+ }
+
[TestMethod]
[TestCategory("MultiRegion")]
public async Task AddressRefreshTimeoutTest()
diff --git a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.EmulatorTests/PartitionKeyRangeCacheTests.cs b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.EmulatorTests/PartitionKeyRangeCacheTests.cs
index 5ca4d1dd37..ee8c763a8c 100644
--- a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.EmulatorTests/PartitionKeyRangeCacheTests.cs
+++ b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.EmulatorTests/PartitionKeyRangeCacheTests.cs
@@ -54,8 +54,8 @@ public async Task VerifyPkRangeCacheRefreshOnSplitWithErrorsAsync()
ifNoneMatchValues.Add(request.Headers.IfNoneMatch.ToString());
pkRangeCalls++;
-
- if (pkRangeCalls == throwOnPkRefreshCount)
+ //account for metadata retry
+ if (pkRangeCalls == throwOnPkRefreshCount || pkRangeCalls == throwOnPkRefreshCount + 1)
{
failedIfNoneMatchValue = request.Headers.IfNoneMatch.ToString();
if (signalSplitException.IsSet)
@@ -132,10 +132,10 @@ public async Task VerifyPkRangeCacheRefreshOnSplitWithErrorsAsync()
pkRangesRefreshed.Wait();
Assert.IsFalse(causeSplitExceptionInRntbdCall);
- Assert.AreEqual(4, pkRangeCalls);
+ Assert.AreEqual(5, pkRangeCalls);
Assert.AreEqual(1, ifNoneMatchValues.Count(x => string.IsNullOrEmpty(x)));
- Assert.AreEqual(3, ifNoneMatchValues.Count(x => x == failedIfNoneMatchValue), $"3 request with same if none value. 1 initial, 2 from the split errors. split exception count: {countSplitExceptions}; {string.Join(';', ifNoneMatchValues)}");
+ Assert.AreEqual(4, ifNoneMatchValues.Count(x => x == failedIfNoneMatchValue), $"4 request with same if none value. 1 initial, 2 from the split errors, 1 retry. split exception count: {countSplitExceptions}; {string.Join(';', ifNoneMatchValues)}");
HashSet verifyUniqueIfNoneHeaderValues = new HashSet();
foreach (string ifNoneValue in ifNoneMatchValues)
diff --git a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/ClientRetryPolicyTests.cs b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/ClientRetryPolicyTests.cs
index ae24e76449..a0eb64befb 100644
--- a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/ClientRetryPolicyTests.cs
+++ b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/ClientRetryPolicyTests.cs
@@ -164,13 +164,16 @@ public async Task ShouldRetryAsync_WhenRequestThrottledWithResourceNotAvailable_
}
///
- /// Tests to see if different 503 substatus codes are handeled correctly
+ /// Tests to see if different 503 substatus and other similar status codes are handeled correctly
///
/// The substatus code being Tested.
- [DataRow((int)SubStatusCodes.Unknown)]
- [DataRow((int)SubStatusCodes.TransportGenerated503)]
+ [DataRow((int)StatusCodes.ServiceUnavailable, (int)SubStatusCodes.Unknown, "ServiceUnavailable")]
+ [DataRow((int)StatusCodes.ServiceUnavailable, (int)SubStatusCodes.TransportGenerated503, "ServiceUnavailable")]
+ [DataRow((int)StatusCodes.InternalServerError, (int)SubStatusCodes.Unknown, "InternalServerError")]
+ [DataRow((int)StatusCodes.Gone, (int)SubStatusCodes.LeaseNotFound, "LeaseNotFound")]
+ [DataRow((int)StatusCodes.Forbidden, (int)SubStatusCodes.DatabaseAccountNotFound, "DatabaseAccountNotFound")]
[DataTestMethod]
- public void Http503SubStatusHandelingTests(int testCode)
+ public void Http503LikeSubStatusHandelingTests(int statusCode, int SubStatusCode, string message)
{
const bool enableEndpointDiscovery = true;
@@ -187,14 +190,14 @@ public void Http503SubStatusHandelingTests(int testCode)
Exception serviceUnavailableException = new Exception();
Mock nameValueCollection = new Mock();
- HttpStatusCode serviceUnavailable = HttpStatusCode.ServiceUnavailable;
+ HttpStatusCode serviceUnavailable = (HttpStatusCode)statusCode;
DocumentClientException documentClientException = new DocumentClientException(
- message: "Service Unavailable",
+ message: message,
innerException: serviceUnavailableException,
responseHeaders: nameValueCollection.Object,
statusCode: serviceUnavailable,
- substatusCode: (SubStatusCodes)testCode,
+ substatusCode: (SubStatusCodes)SubStatusCode,
requestUri: null
);
diff --git a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/LocationCacheTests.cs b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/LocationCacheTests.cs
index 52e36c2908..d0af0ce709 100644
--- a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/LocationCacheTests.cs
+++ b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/LocationCacheTests.cs
@@ -945,7 +945,8 @@ await BackoffRetryUtility.ExecuteAsync(
Assert.IsNotNull(this.cache.EffectivePreferredLocations);
Assert.AreEqual(this.cache.EffectivePreferredLocations.Count, 1);
- expectedEndpoint = LocationCacheTests.EndpointByLocation[availableWriteLocations[1]];
+ //If the defaut endpoint is a regional endpoint, it will be the only vaild read region for read only requests
+ expectedEndpoint = LocationCacheTests.EndpointByLocation[availableWriteLocations[0]];
}
else
{
diff --git a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/MetadataRequestThrottleRetryPolicyTests.cs b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/MetadataRequestThrottleRetryPolicyTests.cs
index 7f3fe7cdc9..6e3486e3c7 100644
--- a/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/MetadataRequestThrottleRetryPolicyTests.cs
+++ b/Microsoft.Azure.Cosmos/tests/Microsoft.Azure.Cosmos.Tests/MetadataRequestThrottleRetryPolicyTests.cs
@@ -24,13 +24,10 @@ public class MetadataRequestThrottleRetryPolicyTests
{
[TestMethod]
[Owner("dkunda")]
- [DataRow(true, true, DisplayName = "Test when a response message with a valid substatus code was used.")]
- [DataRow(false, true, DisplayName = "Test when an exception was thrown with a valid substatus code.")]
- [DataRow(true, false, DisplayName = "Test when a response message with an invalid substatus code was used.")]
- [DataRow(false, false, DisplayName = "Test when an exception was thrown with an invalid substatus code.")]
+ [DataRow(true, DisplayName = "Test when a response message.")]
+ [DataRow(false, DisplayName = "Test when an exception was thrown.")]
public async Task ShouldRetryAsync_WithValidAndInvalidSubStatusCodes_ShouldIncrementLocationIndexOrSkip(
- bool useResponseMessage,
- bool isValidSubStatusCode)
+ bool useResponseMessage)
{
// Arrange.
ShouldRetryResult retryResult;
@@ -54,7 +51,7 @@ public async Task ShouldRetryAsync_WithValidAndInvalidSubStatusCodes_ShouldIncre
mockedGlobalEndpointManager
.SetupSequence(gem => gem.ResolveServiceEndpoint(It.IsAny()))
.Returns(primaryServiceEndpoint)
- .Returns(isValidSubStatusCode ? routedServiceEndpoint : primaryServiceEndpoint);
+ .Returns(routedServiceEndpoint);
MetadataRequestThrottleRetryPolicy policy = new(mockedGlobalEndpointManager.Object, 0);
policy.OnBeforeSendRequest(request);
@@ -66,9 +63,7 @@ public async Task ShouldRetryAsync_WithValidAndInvalidSubStatusCodes_ShouldIncre
{
Headers responseHeaders = new()
{
- SubStatusCode = isValidSubStatusCode
- ? SubStatusCodes.TransportGenerated503
- : SubStatusCodes.BWTermCountLimitExceeded
+ SubStatusCode = SubStatusCodes.TransportGenerated503
};
ResponseMessage responseMessage = new(
@@ -87,9 +82,7 @@ public async Task ShouldRetryAsync_WithValidAndInvalidSubStatusCodes_ShouldIncre
headers: new Headers()
{
ActivityId = System.Diagnostics.Trace.CorrelationManager.ActivityId.ToString(),
- SubStatusCode = isValidSubStatusCode
- ? SubStatusCodes.TransportGenerated503
- : SubStatusCodes.BWTermCountLimitExceeded
+ SubStatusCode = SubStatusCodes.TransportGenerated503
},
trace: NoOpTrace.Singleton,
innerException: null);
@@ -112,18 +105,9 @@ public async Task ShouldRetryAsync_WithValidAndInvalidSubStatusCodes_ShouldIncre
obj: policy);
Assert.IsNotNull(retryResult);
- if (isValidSubStatusCode)
- {
- Assert.AreEqual(true, retryResult.ShouldRetry, "MetadataRequestThrottleRetryPolicy should return true since the sub status code indicates to retry the request in the next preferred read region.");
- Assert.AreEqual(1, retryContext.RetryLocationIndex, "Indicates that the retry location index was incremented.");
- Assert.AreEqual(routedServiceEndpoint, request.RequestContext.LocationEndpointToRoute);
- }
- else
- {
- Assert.AreEqual(false, retryResult.ShouldRetry, "ResourceThrottleRetryPolicy should return false since the status code does not indicate the request was throttled.");
- Assert.AreEqual(0, retryContext.RetryLocationIndex, "Indicates that the retry location index remain unchanged.");
- Assert.AreEqual(primaryServiceEndpoint, request.RequestContext.LocationEndpointToRoute);
- }
+ Assert.AreEqual(true, retryResult.ShouldRetry, "MetadataRequestThrottleRetryPolicy should return true since the sub status code indicates to retry the request in the next preferred read region.");
+ Assert.AreEqual(1, retryContext.RetryLocationIndex, "Indicates that the retry location index was incremented.");
+ Assert.AreEqual(routedServiceEndpoint, request.RequestContext.LocationEndpointToRoute);
}
}
}
\ No newline at end of file