diff --git a/Sources/Typhoon/Classes/RetryPolicyService/RetryPolicyService.swift b/Sources/Typhoon/Classes/RetryPolicyService/RetryPolicyService.swift index 5ebbcee..fe2d5c1 100644 --- a/Sources/Typhoon/Classes/RetryPolicyService/RetryPolicyService.swift +++ b/Sources/Typhoon/Classes/RetryPolicyService/RetryPolicyService.swift @@ -7,7 +7,49 @@ import Foundation // MARK: - RetryPolicyService -/// A class that defines a service for retry policies +/// `RetryPolicyService` provides a high-level API for retrying asynchronous +/// operations using configurable retry strategies. +/// +/// The service encapsulates retry logic such as: +/// - limiting the number of retry attempts, +/// - applying delays between retries (e.g. fixed, exponential, or custom), +/// - reacting to errors on each failed attempt. +/// +/// This class is typically used for retrying unstable operations like +/// network requests, database calls, or interactions with external services. +/// +/// ### Example +/// ```swift +/// let strategy = RetryPolicyStrategy.exponential( +/// maxAttempts: 3, +/// initialDelay: .milliseconds(500) +/// ) +/// +/// let retryService = RetryPolicyService(strategy: strategy) +/// +/// let data = try await retryService.retry( +/// strategy: nil, +/// onFailure: { error in +/// print("Request failed with error: \(error)") +/// +/// // Return `true` to continue retrying, +/// // or `false` to stop and rethrow the error. +/// return true +/// } +/// ) { +/// try await apiClient.fetchData() +/// } +/// ``` +/// +/// +/// In this example: +/// - The request will be retried up to 3 times. +/// - The delay between retries grows exponentially. +/// - Each failure is logged before the next attempt. +/// - If all retries are exhausted, `RetryPolicyError.retryLimitExceeded` is thrown. +/// +/// - Note: You can override the default strategy per call by passing a custom +/// `RetryPolicyStrategy` into the `retry` method. public final class RetryPolicyService { // MARK: Private @@ -40,9 +82,11 @@ extension RetryPolicyService: IRetryPolicyService { onFailure: (@Sendable (Error) async -> Bool)?, _ closure: @Sendable () async throws -> T ) async throws -> T { - for duration in RetrySequence(strategy: strategy ?? self.strategy) { - try Task.checkCancellation() + let effectiveStrategy = strategy ?? self.strategy + var iterator = RetrySequence(strategy: effectiveStrategy).makeIterator() + + while true { do { return try await closure() } catch { @@ -51,11 +95,15 @@ extension RetryPolicyService: IRetryPolicyService { if !shouldContinue { throw error } - } - try await Task.sleep(nanoseconds: duration) - } + guard let duration = iterator.next() else { + throw RetryPolicyError.retryLimitExceeded + } - throw RetryPolicyError.retryLimitExceeded + try Task.checkCancellation() + + try await Task.sleep(nanoseconds: duration) + } + } } } diff --git a/Sources/Typhoon/Typhoon.docc/Articles/advanced-retry-strategies.md b/Sources/Typhoon/Typhoon.docc/Articles/advanced-retry-strategies.md index 9bcd403..3869a6e 100644 --- a/Sources/Typhoon/Typhoon.docc/Articles/advanced-retry-strategies.md +++ b/Sources/Typhoon/Typhoon.docc/Articles/advanced-retry-strategies.md @@ -7,6 +7,34 @@ Master advanced retry patterns and optimization techniques. This guide covers advanced usage patterns, performance optimization, and sophisticated retry strategies for complex scenarios. +## How Retry Mechanism Works + +Understanding the retry flow is crucial for effective error handling: + +```swift +// Configuration: retry: 3 means 3 RETRY attempts +let strategy = RetryStrategy.exponential( + retry: 3, + multiplier: 2.0, + duration: .seconds(1) +) +``` + +**Total Execution Flow:** + +| Attempt Type | Attempt # | Delay Before | Description | +|--------------|-----------|--------------|-------------| +| Initial | 1 | 0s | First execution (not a retry) | +| Retry | 2 | 1s | First retry after failure | +| Retry | 3 | 2s | Second retry after failure | +| Retry | 4 | 4s | Third retry after failure | + +**Key Points:** +- The `retry` parameter specifies the number of **retry attempts**, not total attempts +- Total attempts = 1 (initial) + N (retries) +- `retry: 3` means **4 total attempts** (1 initial + 3 retries) +- `onFailure` callback is invoked after **every** failed attempt, including the initial one + ## Strategy Deep Dive ### Understanding Exponential Backoff @@ -15,33 +43,36 @@ Exponential backoff progressively increases wait times to avoid overwhelming rec ```swift let strategy = RetryStrategy.exponential( - retry: 5, + retry: 5, // 5 retry attempts multiplier: 2.0, duration: .seconds(1) ) ``` -**Calculation:** `delay = baseDuration × multiplier^retryCount` +**Calculation:** `delay = baseDuration × multiplier^(attemptNumber - 1)` + +| Attempt Type | Total Attempt | Calculation | Delay Before | +|--------------|---------------|-------------|--------------| +| Initial | 1 | - | 0s (immediate) | +| Retry 1 | 2 | 1 × 2⁰ | 1s | +| Retry 2 | 3 | 1 × 2¹ | 2s | +| Retry 3 | 4 | 1 × 2² | 4s | +| Retry 4 | 5 | 1 × 2³ | 8s | +| Retry 5 | 6 | 1 × 2⁴ | 16s | -| Attempt | Calculation | Delay | -|---------|-------------|-------| -| 1 | 1 × 2⁰ | 1s | -| 2 | 1 × 2¹ | 2s | -| 3 | 1 × 2² | 4s | -| 4 | 1 × 2³ | 8s | -| 5 | 1 × 2⁴ | 16s | +**Total: 6 attempts (1 initial + 5 retries)** **Multiplier effects:** ```swift // Aggressive backoff (multiplier: 3.0) -// 1s → 3s → 9s → 27s → 81s +// Initial: 0s → Retry: 1s → 3s → 9s → 27s → 81s // Moderate backoff (multiplier: 1.5) -// 1s → 1.5s → 2.25s → 3.375s → 5.0625s +// Initial: 0s → Retry: 1s → 1.5s → 2.25s → 3.375s → 5.0625s // Slow backoff (multiplier: 1.2) -// 1s → 1.2s → 1.44s → 1.728s → 2.074s +// Initial: 0s → Retry: 1s → 1.2s → 1.44s → 1.728s → 2.074s ``` ### Jitter: Preventing Thundering Herd @@ -50,7 +81,7 @@ When multiple clients retry simultaneously, they can overwhelm a recovering serv ```swift let strategy = RetryStrategy.exponentialWithJitter( - retry: 5, + retry: 5, // 5 retry attempts jitterFactor: 0.2, // ±20% randomization maxInterval: .seconds(30), // Cap at 30 seconds multiplier: 2.0, @@ -60,17 +91,17 @@ let strategy = RetryStrategy.exponentialWithJitter( **Without jitter:** ``` -Client 1: 0s → 1s → 2s → 4s → 8s -Client 2: 0s → 1s → 2s → 4s → 8s -Client 3: 0s → 1s → 2s → 4s → 8s +Client 1: 0s(init) → 1s → 2s → 4s → 8s → 16s +Client 2: 0s(init) → 1s → 2s → 4s → 8s → 16s +Client 3: 0s(init) → 1s → 2s → 4s → 8s → 16s All hit server simultaneously! 💥 ``` **With jitter:** ``` -Client 1: 0s → 0.9s → 2.1s → 3.8s → 8.2s -Client 2: 0s → 1.1s → 1.9s → 4.3s → 7.7s -Client 3: 0s → 0.8s → 2.2s → 3.9s → 8.1s +Client 1: 0s(init) → 0.9s → 2.1s → 3.8s → 8.2s → 15.7s +Client 2: 0s(init) → 1.1s → 1.9s → 4.3s → 7.7s → 16.4s +Client 3: 0s(init) → 0.8s → 2.2s → 3.9s → 8.1s → 15.8s Traffic spread out! ✅ ``` @@ -80,17 +111,19 @@ Prevent delays from growing unbounded: ```swift .exponentialWithJitter( - retry: 10, + retry: 10, // 10 retry attempts = 11 total jitterFactor: 0.1, - maxInterval: .seconds(60), // Never wait more than 60 seconds + maxInterval: .seconds(60), // Never wait more than 60 seconds multiplier: 2.0, duration: .seconds(1) ) ``` -**Without cap:** 1s → 2s → 4s → 8s → 16s → 32s → 64s → 128s → 256s... +**Without cap:** +Initial → 1s → 2s → 4s → 8s → 16s → 32s → 64s → 128s → 256s → 512s -**With 60s cap:** 1s → 2s → 4s → 8s → 16s → 32s → 60s → 60s → 60s... +**With 60s cap:** +Initial → 1s → 2s → 4s → 8s → 16s → 32s → 60s → 60s → 60s → 60s ## Advanced Patterns @@ -132,7 +165,8 @@ func fetchWithConditionalRetry() async throws -> Data { } catch let error as RetryPolicyError { switch error { case .retryLimitExceeded: - // Retry linit exceeded + // All retry attempts exhausted + print("Retry limit exceeded after multiple attempts") throw error } } @@ -200,6 +234,7 @@ actor AdaptiveRetryService { private func selectStrategy() -> RetryPolicyStrategy { if consecutiveFailures >= maxConsecutiveFailures { // System under stress - use conservative strategy + // 1 initial + 3 retries with longer delays return .exponentialWithJitter( retry: 3, jitterFactor: 0.3, @@ -209,6 +244,7 @@ actor AdaptiveRetryService { ) } else { // Normal operation - use standard strategy + // 1 initial + 4 retries return .exponential( retry: 4, multiplier: 2.0, diff --git a/Sources/Typhoon/Typhoon.docc/Articles/quick-start.md b/Sources/Typhoon/Typhoon.docc/Articles/quick-start.md index f1c13b6..8aee244 100644 --- a/Sources/Typhoon/Typhoon.docc/Articles/quick-start.md +++ b/Sources/Typhoon/Typhoon.docc/Articles/quick-start.md @@ -33,7 +33,6 @@ This will: - Try your operation immediately - If it fails, wait 1 second and retry - Repeat up to 3 times -- Throw the last error if all attempts fail ### Network Request Example @@ -75,7 +74,7 @@ Best for predictable, fixed delays: .constant(retry: 5, duration: .seconds(2)) ``` -**Timeline:** 0s → 2s → 2s → 2s → 2s +**Timeline:** 0s (initial) → 2s → 2s → 2s → 2s → 2s ### Exponential Strategy @@ -86,7 +85,7 @@ Ideal for backing off from failing services: .exponential(retry: 4, multiplier: 2.0, duration: .seconds(1)) ``` -**Timeline:** 0s → 1s → 2s → 4s +**Timeline:** 0s (initial) → 1s → 2s → 4s → 8s ### Exponential with Jitter @@ -103,7 +102,7 @@ Best for preventing thundering herd problems: ) ``` -**Timeline:** 0s → ~1s → ~2s → ~4s → ~8s (with randomization) +**Timeline:** 0s (initial) → ~1s → ~2s → ~4s → ~8s → ~16s (with randomization) ## Common Patterns diff --git a/Tests/TyphoonTests/UnitTests/RetryPolicyServiceTests.swift b/Tests/TyphoonTests/UnitTests/RetryPolicyServiceTests.swift index 683936e..ef86069 100644 --- a/Tests/TyphoonTests/UnitTests/RetryPolicyServiceTests.swift +++ b/Tests/TyphoonTests/UnitTests/RetryPolicyServiceTests.swift @@ -9,15 +9,15 @@ import XCTest // MARK: - RetryPolicyServiceTests final class RetryPolicyServiceTests: XCTestCase { - // MARK: Private + // MARK: Properties private var sut: IRetryPolicyService! - // MARK: XCTestCase + // MARK: Lifecycle override func setUp() { super.setUp() - sut = RetryPolicyService(strategy: .constant(retry: .retry, duration: .seconds(0))) + sut = RetryPolicyService(strategy: .constant(retry: .defaultRetryCount, duration: .seconds(0))) } override func tearDown() { @@ -25,9 +25,12 @@ final class RetryPolicyServiceTests: XCTestCase { super.tearDown() } - // MARK: Tests + // MARK: Tests - Error Handling + + func test_thatRetryThrowsRetryLimitExceededError_whenAllRetriesFail() async throws { + // given + let expectedError = RetryPolicyError.retryLimitExceeded - func test_thatRetryServiceThrowsAnError_whenRetryLimitExceeded() async throws { // when var receivedError: Error? do { @@ -37,85 +40,264 @@ final class RetryPolicyServiceTests: XCTestCase { } // then - XCTAssertEqual(receivedError as? NSError, RetryPolicyError.retryLimitExceeded as NSError) + XCTAssertEqual(receivedError as? NSError, expectedError as NSError) } - func test_thatRetryServiceDoesNotThrowAnError_whenServiceDidReturnValue() async throws { + func test_thatRetryThrowsOriginalError_whenOnFailureReturnsFalse() async throws { // given - actor Counter { - // MARK: Properties + let originalError = URLError(.timedOut) + + // when + var receivedError: Error? + do { + _ = try await sut.retry( + strategy: .constant(retry: .defaultRetryCount, duration: .nanoseconds(1)), + onFailure: { _ in false } + ) { + throw originalError + } + } catch { + receivedError = error + } - private var value: Int = 0 + // then + XCTAssertEqual(receivedError as? URLError, originalError) + } - // MARK: Internal + // MARK: Tests - Success Cases + + func test_thatRetryReturnsValue_whenOperationSucceedsImmediately() async throws { + // given + let expectedValue = 42 - func increment() -> Int { - value += 1 - return value + // when + let result = try await sut.retry { + expectedValue + } + + // then + XCTAssertEqual(result, expectedValue) + } + + func test_thatRetryReturnsValue_whenOperationSucceedsAfterRetries() async throws { + // given + let counter = Counter() + let expectedValue = 100 + + // when + let result = try await sut.retry( + strategy: .constant(retry: .defaultRetryCount, duration: .nanoseconds(1)) + ) { + let currentCount = await counter.increment() + + if currentCount >= .defaultRetryCount { + return expectedValue } + throw URLError(.unknown) } + // then + XCTAssertEqual(result, expectedValue) + let finalCount = await counter.getValue() + XCTAssertEqual(finalCount, .defaultRetryCount) + } + + // MARK: Tests - Retry Count + + func test_thatRetryAttemptsCorrectNumberOfTimes_whenAllRetriesFail() async throws { + // given let counter = Counter() // when - _ = try await sut.retry( - strategy: .constant(retry: .retry, duration: .nanoseconds(1)), - { - let currentCounter = await counter.increment() + do { + _ = try await sut.retry( + strategy: .constant(retry: .defaultRetryCount, duration: .nanoseconds(1)) + ) { + _ = await counter.increment() + throw URLError(.unknown) + } + } catch {} - if currentCounter > .retry - 1 { - return 1 - } + // then + let attemptCount = await counter.getValue() + XCTAssertEqual(attemptCount, .defaultRetryCount + 1) + } + + func test_thatRetryStopsImmediately_whenOnFailureReturnsFalse() async throws { + // given + let counter = Counter() + + // when + do { + _ = try await sut.retry( + strategy: .constant(retry: .defaultRetryCount, duration: .nanoseconds(1)), + onFailure: { _ in false } + ) { + _ = await counter.increment() throw URLError(.unknown) } - ) + } catch {} // then - let finalValue = await counter.increment() - 1 - XCTAssertEqual(finalValue, .retry) + let attemptCount = await counter.getValue() + XCTAssertEqual(attemptCount, 1) } - func test_thatRetryServiceHandlesErrorOnFailureCallback_whenErrorOcurred() async { + // MARK: Tests - Failure Callback + + func test_thatRetryInvokesOnFailureCallback_whenErrorOccurs() async { // given - actor ErrorContainer { - // MARK: Private + let errorContainer = ErrorContainer() + let expectedError = URLError(.notConnectedToInternet) - private var error: NSError? + // when + do { + _ = try await sut.retry( + strategy: .constant(retry: .defaultRetryCount, duration: .nanoseconds(1)), + onFailure: { error in + await errorContainer.setError(error as NSError) + return false + } + ) { + throw expectedError + } + } catch {} + + // then + let capturedError = await errorContainer.getError() + XCTAssertEqual(capturedError as? URLError, expectedError) + } - // MARK: Internal + func test_thatRetryInvokesOnFailureMultipleTimes_whenMultipleRetriesFail() async { + // given + let counter = Counter() + let expectedCallCount = 3 - func setError(_ newError: NSError) { - error = newError + // when + do { + _ = try await sut.retry( + strategy: .constant(retry: expectedCallCount, duration: .nanoseconds(1)), + onFailure: { _ in + true + } + ) { + _ = await counter.increment() + throw URLError(.unknown) } + } catch {} - func getError() -> NSError? { - error + // then + let callCount = await counter.getValue() + XCTAssertEqual(callCount, expectedCallCount + 1) + } + + // MARK: Tests - Edge Cases + + func test_thatRetryReturnsValue_whenRetryCountIsZero() async throws { + // given + let expectedValue = 7 + let zeroRetryStrategy = RetryPolicyService( + strategy: .constant(retry: 0, duration: .nanoseconds(1)) + ) + + // when + let result = try await zeroRetryStrategy.retry { + expectedValue + } + + // then + XCTAssertEqual(result, expectedValue) + } + + func test_thatRetryThrowsError_whenRetryCountIsZeroAndOperationFails() async throws { + // given + let zeroRetryStrategy = RetryPolicyService( + strategy: .constant(retry: 0, duration: .nanoseconds(1)) + ) + + // when + var receivedError: Error? + do { + _ = try await zeroRetryStrategy.retry { + throw URLError(.badURL) } + } catch { + receivedError = error } + // then + XCTAssertNotNil(receivedError) + } + + func test_thatRetryHandlesDifferentErrorTypes_whenMultipleErrorsOccur() async { + // given let errorContainer = ErrorContainer() + let counter = Counter() + let errors: [Error] = [ + URLError(.badURL), + URLError(.timedOut), + URLError(.cannotFindHost), + ] // when do { _ = try await sut.retry( - strategy: .constant(retry: .retry, duration: .nanoseconds(1)), + strategy: .constant(retry: errors.count, duration: .nanoseconds(1)), onFailure: { error in await errorContainer.setError(error as NSError) - return false + return true } ) { - throw URLError(.unknown) + let index = await counter.increment() - 1 + throw errors[min(index, errors.count - 1)] } } catch {} // then - let capturedError = await errorContainer.getError() - XCTAssertEqual(capturedError as? URLError, URLError(.unknown)) + let lastError = await errorContainer.getError() + XCTAssertNotNil(lastError) + } +} + +// MARK: - Counter + +private actor Counter { + // MARK: Properties + + private var value: Int = 0 + + // MARK: Internal + + func increment() -> Int { + value += 1 + return value + } + + func getValue() -> Int { + value + } +} + +// MARK: - ErrorContainer + +private actor ErrorContainer { + // MARK: Properties + + private var error: NSError? + + // MARK: Internal + + func setError(_ newError: NSError) { + error = newError + } + + func getError() -> NSError? { + error } } // MARK: - Constants private extension Int { - static let retry = 5 + static let defaultRetryCount = 5 }