diff --git a/.editorconfig b/.editorconfig
index c428507..72a2997 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -39,3 +39,7 @@ csharp_preserve_single_line_statements = false
dotnet_analyzer_diagnostic.category-security.severity = error
dotnet_analyzer_diagnostic.category-performance.severity = warning
dotnet_analyzer_diagnostic.category-reliability.severity = warning
+
+# Roslynator: require ConfigureAwait(false) in library code
+dotnet_diagnostic.RCS1090.severity = warning
+roslynator_configure_await = true
diff --git a/.specify/memory/constitution.md b/.specify/memory/constitution.md
deleted file mode 100644
index 72319ff..0000000
--- a/.specify/memory/constitution.md
+++ /dev/null
@@ -1,160 +0,0 @@
-
-
-# SharpSync Constitution
-
-## Core Principles
-
-### I. Library-First, UI-Agnostic
-
-SharpSync is a **pure .NET library** with zero UI dependencies. Every
-feature MUST be consumable by any .NET application—console, desktop,
-mobile, or server—without requiring a specific UI framework. No feature
-may introduce a hard dependency on a UI toolkit, platform-specific
-windowing API, or interactive prompt. Hooks (callbacks, events,
-delegates) MUST be provided so that callers supply their own UI when
-user interaction is needed (e.g., OAuth2 browser flow, conflict
-resolution dialogs).
-
-**Rationale**: SharpSync exists to power applications like Nimbus
-(Windows desktop client) while remaining usable in headless CI
-pipelines, Linux daemons, and cross-platform mobile apps. Coupling to
-any UI would break this contract.
-
-### II. Interface-Driven Design
-
-All major components MUST be defined by interfaces (`ISyncEngine`,
-`ISyncStorage`, `ISyncDatabase`, `IConflictResolver`, `ISyncFilter`,
-`IOAuth2Provider`). Concrete implementations MUST accept dependencies
-via constructor injection. New storage backends, conflict strategies,
-and database providers MUST be addable without modifying existing code.
-
-**Rationale**: Interface-based design enables testability (mocking),
-extensibility (new backends), and inversion of control for host
-applications that use dependency injection containers.
-
-### III. Async-First, Thread-Safe Where Documented
-
-All I/O-bound operations MUST use `async`/`await` with
-`CancellationToken` support. Public APIs that are safe to call from
-any thread (state properties, notification methods, pause/resume)
-MUST be explicitly documented as thread-safe. Only one sync operation
-may run at a time per `SyncEngine` instance—this invariant MUST be
-enforced, not merely documented.
-
-**Rationale**: Modern .NET consumers expect async APIs. Clear
-thread-safety contracts prevent data races in desktop and server
-applications that call SharpSync from multiple threads.
-
-### IV. Test Discipline
-
-Unit tests MUST accompany new functionality. Integration tests MUST
-exist for every storage backend and MUST run in CI via Docker services
-on Ubuntu. Tests MUST auto-skip gracefully (using `Skip.If()` or
-equivalent) on platforms where required services are unavailable.
-The test suite MUST pass on all CI matrix platforms (Ubuntu, Windows,
-macOS) before a PR may merge. Code coverage MUST be reported via
-Codecov.
-
-**Rationale**: SharpSync targets multiple OS platforms and multiple
-remote storage protocols. Automated, cross-platform testing is the
-primary defense against regressions.
-
-### V. Simplicity & YAGNI
-
-Features MUST solve a current, demonstrated need—not a hypothetical
-future one. Abstractions MUST be introduced only when two or more
-concrete consumers exist. Complexity (e.g., additional projects,
-repository patterns, plugin systems) MUST be justified in a
-Complexity Tracking table during planning. If a simpler alternative
-is sufficient, it MUST be preferred.
-
-**Rationale**: A synchronization library carries inherent complexity
-in conflict resolution, concurrency, and multi-protocol support.
-Unnecessary abstractions compound this complexity and slow both
-contributors and consumers.
-
-## Quality & Safety Standards
-
-- **Warnings as Errors**: The project MUST compile with
- `TreatWarningsAsErrors` enabled and .NET analyzers at the latest
- analysis level.
-- **Nullable Reference Types**: All projects MUST enable `
- enable`. Nullable warnings MUST be resolved, not
- suppressed.
-- **XML Documentation**: All public APIs MUST have XML doc comments.
- The `GenerateDocumentationFile` MSBuild property MUST remain enabled.
-- **No Native Dependencies**: SharpSync MUST remain a pure managed
- .NET library. Native/P-Invoke dependencies are prohibited in the
- core library (consumers may use native code in their own layers).
-- **Security**: Code MUST NOT introduce OWASP Top 10 vulnerabilities.
- Credentials, tokens, and secrets MUST NOT be logged or committed.
- Sensitive files (`.env`, credentials) MUST be excluded from source
- control.
-- **No Null Device Writes**: Code MUST NEVER copy or write to the
- Windows null device (`NUL`), `/dev/null`, or `Stream.Null` as a
- file operation target. This includes build scripts, tests, and
- CI pipelines. Discarding data via null device copies can mask
- errors and cause silent data loss on Windows.
-- **Licensing**: All dependencies MUST have licenses compatible with
- Apache-2.0.
-
-## Development Workflow
-
-- **Branching**: Feature work MUST occur on a named branch. Pull
- requests target `master`.
-- **CI Gate**: The GitHub Actions matrix (Ubuntu, Windows, macOS) MUST
- pass—build, format check, and all applicable tests—before merge.
-- **Integration Tests**: Run via Docker Compose on Ubuntu CI. Locally,
- developers use the provided scripts (`scripts/run-integration-tests
- .sh` / `.ps1`). Integration tests MUST NOT fail the build on
- platforms where Docker services are unavailable; they MUST skip.
-- **Code Review**: All PRs MUST be reviewed before merge. The reviewer
- MUST verify compliance with this constitution's principles.
-- **Commit Hygiene**: Commits MUST have concise, descriptive messages.
- Avoid mixing unrelated changes in a single commit.
-
-## Governance
-
-This constitution is the authoritative source of project principles
-and standards. In cases of conflict between this document and other
-project documentation, this constitution takes precedence.
-
-### Amendment Procedure
-
-1. Propose changes via a pull request modifying this file.
-2. The PR description MUST state the version bump type (MAJOR, MINOR,
- PATCH) with rationale.
-3. At least one maintainer MUST approve the amendment PR.
-4. Upon merge, update `CONSTITUTION_VERSION` and `LAST_AMENDED_DATE`.
-
-### Versioning Policy
-
-- **MAJOR**: Removal or incompatible redefinition of an existing
- principle.
-- **MINOR**: Addition of a new principle or material expansion of
- existing guidance.
-- **PATCH**: Clarifications, wording improvements, typo fixes.
-
-### Compliance Review
-
-Every pull request review SHOULD include a check that the proposed
-changes do not violate the principles above. The plan template's
-"Constitution Check" section MUST be filled before design work begins
-on any new feature.
-
-**Version**: 1.1.0 | **Ratified**: 2026-01-30 | **Last Amended**: 2026-01-30
diff --git a/.specify/scripts/powershell/check-prerequisites.ps1 b/.specify/scripts/powershell/check-prerequisites.ps1
deleted file mode 100644
index 91667e9..0000000
--- a/.specify/scripts/powershell/check-prerequisites.ps1
+++ /dev/null
@@ -1,148 +0,0 @@
-#!/usr/bin/env pwsh
-
-# Consolidated prerequisite checking script (PowerShell)
-#
-# This script provides unified prerequisite checking for Spec-Driven Development workflow.
-# It replaces the functionality previously spread across multiple scripts.
-#
-# Usage: ./check-prerequisites.ps1 [OPTIONS]
-#
-# OPTIONS:
-# -Json Output in JSON format
-# -RequireTasks Require tasks.md to exist (for implementation phase)
-# -IncludeTasks Include tasks.md in AVAILABLE_DOCS list
-# -PathsOnly Only output path variables (no validation)
-# -Help, -h Show help message
-
-[CmdletBinding()]
-param(
- [switch]$Json,
- [switch]$RequireTasks,
- [switch]$IncludeTasks,
- [switch]$PathsOnly,
- [switch]$Help
-)
-
-$ErrorActionPreference = 'Stop'
-
-# Show help if requested
-if ($Help) {
- Write-Output @"
-Usage: check-prerequisites.ps1 [OPTIONS]
-
-Consolidated prerequisite checking for Spec-Driven Development workflow.
-
-OPTIONS:
- -Json Output in JSON format
- -RequireTasks Require tasks.md to exist (for implementation phase)
- -IncludeTasks Include tasks.md in AVAILABLE_DOCS list
- -PathsOnly Only output path variables (no prerequisite validation)
- -Help, -h Show this help message
-
-EXAMPLES:
- # Check task prerequisites (plan.md required)
- .\check-prerequisites.ps1 -Json
-
- # Check implementation prerequisites (plan.md + tasks.md required)
- .\check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks
-
- # Get feature paths only (no validation)
- .\check-prerequisites.ps1 -PathsOnly
-
-"@
- exit 0
-}
-
-# Source common functions
-. "$PSScriptRoot/common.ps1"
-
-# Get feature paths and validate branch
-$paths = Get-FeaturePathsEnv
-
-if (-not (Test-FeatureBranch -Branch $paths.CURRENT_BRANCH -HasGit:$paths.HAS_GIT)) {
- exit 1
-}
-
-# If paths-only mode, output paths and exit (support combined -Json -PathsOnly)
-if ($PathsOnly) {
- if ($Json) {
- [PSCustomObject]@{
- REPO_ROOT = $paths.REPO_ROOT
- BRANCH = $paths.CURRENT_BRANCH
- FEATURE_DIR = $paths.FEATURE_DIR
- FEATURE_SPEC = $paths.FEATURE_SPEC
- IMPL_PLAN = $paths.IMPL_PLAN
- TASKS = $paths.TASKS
- } | ConvertTo-Json -Compress
- } else {
- Write-Output "REPO_ROOT: $($paths.REPO_ROOT)"
- Write-Output "BRANCH: $($paths.CURRENT_BRANCH)"
- Write-Output "FEATURE_DIR: $($paths.FEATURE_DIR)"
- Write-Output "FEATURE_SPEC: $($paths.FEATURE_SPEC)"
- Write-Output "IMPL_PLAN: $($paths.IMPL_PLAN)"
- Write-Output "TASKS: $($paths.TASKS)"
- }
- exit 0
-}
-
-# Validate required directories and files
-if (-not (Test-Path $paths.FEATURE_DIR -PathType Container)) {
- Write-Output "ERROR: Feature directory not found: $($paths.FEATURE_DIR)"
- Write-Output "Run /speckit.specify first to create the feature structure."
- exit 1
-}
-
-if (-not (Test-Path $paths.IMPL_PLAN -PathType Leaf)) {
- Write-Output "ERROR: plan.md not found in $($paths.FEATURE_DIR)"
- Write-Output "Run /speckit.plan first to create the implementation plan."
- exit 1
-}
-
-# Check for tasks.md if required
-if ($RequireTasks -and -not (Test-Path $paths.TASKS -PathType Leaf)) {
- Write-Output "ERROR: tasks.md not found in $($paths.FEATURE_DIR)"
- Write-Output "Run /speckit.tasks first to create the task list."
- exit 1
-}
-
-# Build list of available documents
-$docs = @()
-
-# Always check these optional docs
-if (Test-Path $paths.RESEARCH) { $docs += 'research.md' }
-if (Test-Path $paths.DATA_MODEL) { $docs += 'data-model.md' }
-
-# Check contracts directory (only if it exists and has files)
-if ((Test-Path $paths.CONTRACTS_DIR) -and (Get-ChildItem -Path $paths.CONTRACTS_DIR -ErrorAction SilentlyContinue | Select-Object -First 1)) {
- $docs += 'contracts/'
-}
-
-if (Test-Path $paths.QUICKSTART) { $docs += 'quickstart.md' }
-
-# Include tasks.md if requested and it exists
-if ($IncludeTasks -and (Test-Path $paths.TASKS)) {
- $docs += 'tasks.md'
-}
-
-# Output results
-if ($Json) {
- # JSON output
- [PSCustomObject]@{
- FEATURE_DIR = $paths.FEATURE_DIR
- AVAILABLE_DOCS = $docs
- } | ConvertTo-Json -Compress
-} else {
- # Text output
- Write-Output "FEATURE_DIR:$($paths.FEATURE_DIR)"
- Write-Output "AVAILABLE_DOCS:"
-
- # Show status of each potential document
- Test-FileExists -Path $paths.RESEARCH -Description 'research.md' | Out-Null
- Test-FileExists -Path $paths.DATA_MODEL -Description 'data-model.md' | Out-Null
- Test-DirHasFiles -Path $paths.CONTRACTS_DIR -Description 'contracts/' | Out-Null
- Test-FileExists -Path $paths.QUICKSTART -Description 'quickstart.md' | Out-Null
-
- if ($IncludeTasks) {
- Test-FileExists -Path $paths.TASKS -Description 'tasks.md' | Out-Null
- }
-}
diff --git a/.specify/scripts/powershell/common.ps1 b/.specify/scripts/powershell/common.ps1
deleted file mode 100644
index b0be273..0000000
--- a/.specify/scripts/powershell/common.ps1
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/env pwsh
-# Common PowerShell functions analogous to common.sh
-
-function Get-RepoRoot {
- try {
- $result = git rev-parse --show-toplevel 2>$null
- if ($LASTEXITCODE -eq 0) {
- return $result
- }
- } catch {
- # Git command failed
- }
-
- # Fall back to script location for non-git repos
- return (Resolve-Path (Join-Path $PSScriptRoot "../../..")).Path
-}
-
-function Get-CurrentBranch {
- # First check if SPECIFY_FEATURE environment variable is set
- if ($env:SPECIFY_FEATURE) {
- return $env:SPECIFY_FEATURE
- }
-
- # Then check git if available
- try {
- $result = git rev-parse --abbrev-ref HEAD 2>$null
- if ($LASTEXITCODE -eq 0) {
- return $result
- }
- } catch {
- # Git command failed
- }
-
- # For non-git repos, try to find the latest feature directory
- $repoRoot = Get-RepoRoot
- $specsDir = Join-Path $repoRoot "specs"
-
- if (Test-Path $specsDir) {
- $latestFeature = ""
- $highest = 0
-
- Get-ChildItem -Path $specsDir -Directory | ForEach-Object {
- if ($_.Name -match '^(\d{3})-') {
- $num = [int]$matches[1]
- if ($num -gt $highest) {
- $highest = $num
- $latestFeature = $_.Name
- }
- }
- }
-
- if ($latestFeature) {
- return $latestFeature
- }
- }
-
- # Final fallback
- return "main"
-}
-
-function Test-HasGit {
- try {
- git rev-parse --show-toplevel 2>$null | Out-Null
- return ($LASTEXITCODE -eq 0)
- } catch {
- return $false
- }
-}
-
-function Test-FeatureBranch {
- param(
- [string]$Branch,
- [bool]$HasGit = $true
- )
-
- # For non-git repos, we can't enforce branch naming but still provide output
- if (-not $HasGit) {
- Write-Warning "[specify] Warning: Git repository not detected; skipped branch validation"
- return $true
- }
-
- if ($Branch -notmatch '^[0-9]{3}-') {
- Write-Output "ERROR: Not on a feature branch. Current branch: $Branch"
- Write-Output "Feature branches should be named like: 001-feature-name"
- return $false
- }
- return $true
-}
-
-function Get-FeatureDir {
- param([string]$RepoRoot, [string]$Branch)
- Join-Path $RepoRoot "specs/$Branch"
-}
-
-function Get-FeaturePathsEnv {
- $repoRoot = Get-RepoRoot
- $currentBranch = Get-CurrentBranch
- $hasGit = Test-HasGit
- $featureDir = Get-FeatureDir -RepoRoot $repoRoot -Branch $currentBranch
-
- [PSCustomObject]@{
- REPO_ROOT = $repoRoot
- CURRENT_BRANCH = $currentBranch
- HAS_GIT = $hasGit
- FEATURE_DIR = $featureDir
- FEATURE_SPEC = Join-Path $featureDir 'spec.md'
- IMPL_PLAN = Join-Path $featureDir 'plan.md'
- TASKS = Join-Path $featureDir 'tasks.md'
- RESEARCH = Join-Path $featureDir 'research.md'
- DATA_MODEL = Join-Path $featureDir 'data-model.md'
- QUICKSTART = Join-Path $featureDir 'quickstart.md'
- CONTRACTS_DIR = Join-Path $featureDir 'contracts'
- }
-}
-
-function Test-FileExists {
- param([string]$Path, [string]$Description)
- if (Test-Path -Path $Path -PathType Leaf) {
- Write-Output " ✓ $Description"
- return $true
- } else {
- Write-Output " ✗ $Description"
- return $false
- }
-}
-
-function Test-DirHasFiles {
- param([string]$Path, [string]$Description)
- if ((Test-Path -Path $Path -PathType Container) -and (Get-ChildItem -Path $Path -ErrorAction SilentlyContinue | Where-Object { -not $_.PSIsContainer } | Select-Object -First 1)) {
- Write-Output " ✓ $Description"
- return $true
- } else {
- Write-Output " ✗ $Description"
- return $false
- }
-}
-
diff --git a/.specify/scripts/powershell/create-new-feature.ps1 b/.specify/scripts/powershell/create-new-feature.ps1
deleted file mode 100644
index 2f0172e..0000000
--- a/.specify/scripts/powershell/create-new-feature.ps1
+++ /dev/null
@@ -1,283 +0,0 @@
-#!/usr/bin/env pwsh
-# Create a new feature
-[CmdletBinding()]
-param(
- [switch]$Json,
- [string]$ShortName,
- [int]$Number = 0,
- [switch]$Help,
- [Parameter(ValueFromRemainingArguments = $true)]
- [string[]]$FeatureDescription
-)
-$ErrorActionPreference = 'Stop'
-
-# Show help if requested
-if ($Help) {
- Write-Host "Usage: ./create-new-feature.ps1 [-Json] [-ShortName ] [-Number N] "
- Write-Host ""
- Write-Host "Options:"
- Write-Host " -Json Output in JSON format"
- Write-Host " -ShortName Provide a custom short name (2-4 words) for the branch"
- Write-Host " -Number N Specify branch number manually (overrides auto-detection)"
- Write-Host " -Help Show this help message"
- Write-Host ""
- Write-Host "Examples:"
- Write-Host " ./create-new-feature.ps1 'Add user authentication system' -ShortName 'user-auth'"
- Write-Host " ./create-new-feature.ps1 'Implement OAuth2 integration for API'"
- exit 0
-}
-
-# Check if feature description provided
-if (-not $FeatureDescription -or $FeatureDescription.Count -eq 0) {
- Write-Error "Usage: ./create-new-feature.ps1 [-Json] [-ShortName ] "
- exit 1
-}
-
-$featureDesc = ($FeatureDescription -join ' ').Trim()
-
-# Resolve repository root. Prefer git information when available, but fall back
-# to searching for repository markers so the workflow still functions in repositories that
-# were initialized with --no-git.
-function Find-RepositoryRoot {
- param(
- [string]$StartDir,
- [string[]]$Markers = @('.git', '.specify')
- )
- $current = Resolve-Path $StartDir
- while ($true) {
- foreach ($marker in $Markers) {
- if (Test-Path (Join-Path $current $marker)) {
- return $current
- }
- }
- $parent = Split-Path $current -Parent
- if ($parent -eq $current) {
- # Reached filesystem root without finding markers
- return $null
- }
- $current = $parent
- }
-}
-
-function Get-HighestNumberFromSpecs {
- param([string]$SpecsDir)
-
- $highest = 0
- if (Test-Path $SpecsDir) {
- Get-ChildItem -Path $SpecsDir -Directory | ForEach-Object {
- if ($_.Name -match '^(\d+)') {
- $num = [int]$matches[1]
- if ($num -gt $highest) { $highest = $num }
- }
- }
- }
- return $highest
-}
-
-function Get-HighestNumberFromBranches {
- param()
-
- $highest = 0
- try {
- $branches = git branch -a 2>$null
- if ($LASTEXITCODE -eq 0) {
- foreach ($branch in $branches) {
- # Clean branch name: remove leading markers and remote prefixes
- $cleanBranch = $branch.Trim() -replace '^\*?\s+', '' -replace '^remotes/[^/]+/', ''
-
- # Extract feature number if branch matches pattern ###-*
- if ($cleanBranch -match '^(\d+)-') {
- $num = [int]$matches[1]
- if ($num -gt $highest) { $highest = $num }
- }
- }
- }
- } catch {
- # If git command fails, return 0
- Write-Verbose "Could not check Git branches: $_"
- }
- return $highest
-}
-
-function Get-NextBranchNumber {
- param(
- [string]$SpecsDir
- )
-
- # Fetch all remotes to get latest branch info (suppress errors if no remotes)
- try {
- git fetch --all --prune 2>$null | Out-Null
- } catch {
- # Ignore fetch errors
- }
-
- # Get highest number from ALL branches (not just matching short name)
- $highestBranch = Get-HighestNumberFromBranches
-
- # Get highest number from ALL specs (not just matching short name)
- $highestSpec = Get-HighestNumberFromSpecs -SpecsDir $SpecsDir
-
- # Take the maximum of both
- $maxNum = [Math]::Max($highestBranch, $highestSpec)
-
- # Return next number
- return $maxNum + 1
-}
-
-function ConvertTo-CleanBranchName {
- param([string]$Name)
-
- return $Name.ToLower() -replace '[^a-z0-9]', '-' -replace '-{2,}', '-' -replace '^-', '' -replace '-$', ''
-}
-$fallbackRoot = (Find-RepositoryRoot -StartDir $PSScriptRoot)
-if (-not $fallbackRoot) {
- Write-Error "Error: Could not determine repository root. Please run this script from within the repository."
- exit 1
-}
-
-try {
- $repoRoot = git rev-parse --show-toplevel 2>$null
- if ($LASTEXITCODE -eq 0) {
- $hasGit = $true
- } else {
- throw "Git not available"
- }
-} catch {
- $repoRoot = $fallbackRoot
- $hasGit = $false
-}
-
-Set-Location $repoRoot
-
-$specsDir = Join-Path $repoRoot 'specs'
-New-Item -ItemType Directory -Path $specsDir -Force | Out-Null
-
-# Function to generate branch name with stop word filtering and length filtering
-function Get-BranchName {
- param([string]$Description)
-
- # Common stop words to filter out
- $stopWords = @(
- 'i', 'a', 'an', 'the', 'to', 'for', 'of', 'in', 'on', 'at', 'by', 'with', 'from',
- 'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had',
- 'do', 'does', 'did', 'will', 'would', 'should', 'could', 'can', 'may', 'might', 'must', 'shall',
- 'this', 'that', 'these', 'those', 'my', 'your', 'our', 'their',
- 'want', 'need', 'add', 'get', 'set'
- )
-
- # Convert to lowercase and extract words (alphanumeric only)
- $cleanName = $Description.ToLower() -replace '[^a-z0-9\s]', ' '
- $words = $cleanName -split '\s+' | Where-Object { $_ }
-
- # Filter words: remove stop words and words shorter than 3 chars (unless they're uppercase acronyms in original)
- $meaningfulWords = @()
- foreach ($word in $words) {
- # Skip stop words
- if ($stopWords -contains $word) { continue }
-
- # Keep words that are length >= 3 OR appear as uppercase in original (likely acronyms)
- if ($word.Length -ge 3) {
- $meaningfulWords += $word
- } elseif ($Description -match "\b$($word.ToUpper())\b") {
- # Keep short words if they appear as uppercase in original (likely acronyms)
- $meaningfulWords += $word
- }
- }
-
- # If we have meaningful words, use first 3-4 of them
- if ($meaningfulWords.Count -gt 0) {
- $maxWords = if ($meaningfulWords.Count -eq 4) { 4 } else { 3 }
- $result = ($meaningfulWords | Select-Object -First $maxWords) -join '-'
- return $result
- } else {
- # Fallback to original logic if no meaningful words found
- $result = ConvertTo-CleanBranchName -Name $Description
- $fallbackWords = ($result -split '-') | Where-Object { $_ } | Select-Object -First 3
- return [string]::Join('-', $fallbackWords)
- }
-}
-
-# Generate branch name
-if ($ShortName) {
- # Use provided short name, just clean it up
- $branchSuffix = ConvertTo-CleanBranchName -Name $ShortName
-} else {
- # Generate from description with smart filtering
- $branchSuffix = Get-BranchName -Description $featureDesc
-}
-
-# Determine branch number
-if ($Number -eq 0) {
- if ($hasGit) {
- # Check existing branches on remotes
- $Number = Get-NextBranchNumber -SpecsDir $specsDir
- } else {
- # Fall back to local directory check
- $Number = (Get-HighestNumberFromSpecs -SpecsDir $specsDir) + 1
- }
-}
-
-$featureNum = ('{0:000}' -f $Number)
-$branchName = "$featureNum-$branchSuffix"
-
-# GitHub enforces a 244-byte limit on branch names
-# Validate and truncate if necessary
-$maxBranchLength = 244
-if ($branchName.Length -gt $maxBranchLength) {
- # Calculate how much we need to trim from suffix
- # Account for: feature number (3) + hyphen (1) = 4 chars
- $maxSuffixLength = $maxBranchLength - 4
-
- # Truncate suffix
- $truncatedSuffix = $branchSuffix.Substring(0, [Math]::Min($branchSuffix.Length, $maxSuffixLength))
- # Remove trailing hyphen if truncation created one
- $truncatedSuffix = $truncatedSuffix -replace '-$', ''
-
- $originalBranchName = $branchName
- $branchName = "$featureNum-$truncatedSuffix"
-
- Write-Warning "[specify] Branch name exceeded GitHub's 244-byte limit"
- Write-Warning "[specify] Original: $originalBranchName ($($originalBranchName.Length) bytes)"
- Write-Warning "[specify] Truncated to: $branchName ($($branchName.Length) bytes)"
-}
-
-if ($hasGit) {
- try {
- git checkout -b $branchName | Out-Null
- } catch {
- Write-Warning "Failed to create git branch: $branchName"
- }
-} else {
- Write-Warning "[specify] Warning: Git repository not detected; skipped branch creation for $branchName"
-}
-
-$featureDir = Join-Path $specsDir $branchName
-New-Item -ItemType Directory -Path $featureDir -Force | Out-Null
-
-$template = Join-Path $repoRoot '.specify/templates/spec-template.md'
-$specFile = Join-Path $featureDir 'spec.md'
-if (Test-Path $template) {
- Copy-Item $template $specFile -Force
-} else {
- New-Item -ItemType File -Path $specFile | Out-Null
-}
-
-# Set the SPECIFY_FEATURE environment variable for the current session
-$env:SPECIFY_FEATURE = $branchName
-
-if ($Json) {
- $obj = [PSCustomObject]@{
- BRANCH_NAME = $branchName
- SPEC_FILE = $specFile
- FEATURE_NUM = $featureNum
- HAS_GIT = $hasGit
- }
- $obj | ConvertTo-Json -Compress
-} else {
- Write-Output "BRANCH_NAME: $branchName"
- Write-Output "SPEC_FILE: $specFile"
- Write-Output "FEATURE_NUM: $featureNum"
- Write-Output "HAS_GIT: $hasGit"
- Write-Output "SPECIFY_FEATURE environment variable set to: $branchName"
-}
-
diff --git a/.specify/scripts/powershell/setup-plan.ps1 b/.specify/scripts/powershell/setup-plan.ps1
deleted file mode 100644
index d0ed582..0000000
--- a/.specify/scripts/powershell/setup-plan.ps1
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env pwsh
-# Setup implementation plan for a feature
-
-[CmdletBinding()]
-param(
- [switch]$Json,
- [switch]$Help
-)
-
-$ErrorActionPreference = 'Stop'
-
-# Show help if requested
-if ($Help) {
- Write-Output "Usage: ./setup-plan.ps1 [-Json] [-Help]"
- Write-Output " -Json Output results in JSON format"
- Write-Output " -Help Show this help message"
- exit 0
-}
-
-# Load common functions
-. "$PSScriptRoot/common.ps1"
-
-# Get all paths and variables from common functions
-$paths = Get-FeaturePathsEnv
-
-# Check if we're on a proper feature branch (only for git repos)
-if (-not (Test-FeatureBranch -Branch $paths.CURRENT_BRANCH -HasGit $paths.HAS_GIT)) {
- exit 1
-}
-
-# Ensure the feature directory exists
-New-Item -ItemType Directory -Path $paths.FEATURE_DIR -Force | Out-Null
-
-# Copy plan template if it exists, otherwise note it or create empty file
-$template = Join-Path $paths.REPO_ROOT '.specify/templates/plan-template.md'
-if (Test-Path $template) {
- Copy-Item $template $paths.IMPL_PLAN -Force
- Write-Output "Copied plan template to $($paths.IMPL_PLAN)"
-} else {
- Write-Warning "Plan template not found at $template"
- # Create a basic plan file if template doesn't exist
- New-Item -ItemType File -Path $paths.IMPL_PLAN -Force | Out-Null
-}
-
-# Output results
-if ($Json) {
- $result = [PSCustomObject]@{
- FEATURE_SPEC = $paths.FEATURE_SPEC
- IMPL_PLAN = $paths.IMPL_PLAN
- SPECS_DIR = $paths.FEATURE_DIR
- BRANCH = $paths.CURRENT_BRANCH
- HAS_GIT = $paths.HAS_GIT
- }
- $result | ConvertTo-Json -Compress
-} else {
- Write-Output "FEATURE_SPEC: $($paths.FEATURE_SPEC)"
- Write-Output "IMPL_PLAN: $($paths.IMPL_PLAN)"
- Write-Output "SPECS_DIR: $($paths.FEATURE_DIR)"
- Write-Output "BRANCH: $($paths.CURRENT_BRANCH)"
- Write-Output "HAS_GIT: $($paths.HAS_GIT)"
-}
diff --git a/.specify/scripts/powershell/update-agent-context.ps1 b/.specify/scripts/powershell/update-agent-context.ps1
deleted file mode 100644
index ffdab4b..0000000
--- a/.specify/scripts/powershell/update-agent-context.ps1
+++ /dev/null
@@ -1,448 +0,0 @@
-#!/usr/bin/env pwsh
-<#!
-.SYNOPSIS
-Update agent context files with information from plan.md (PowerShell version)
-
-.DESCRIPTION
-Mirrors the behavior of scripts/bash/update-agent-context.sh:
- 1. Environment Validation
- 2. Plan Data Extraction
- 3. Agent File Management (create from template or update existing)
- 4. Content Generation (technology stack, recent changes, timestamp)
- 5. Multi-Agent Support (claude, gemini, copilot, cursor-agent, qwen, opencode, codex, windsurf, kilocode, auggie, roo, codebuddy, amp, shai, q, bob, qoder)
-
-.PARAMETER AgentType
-Optional agent key to update a single agent. If omitted, updates all existing agent files (creating a default Claude file if none exist).
-
-.EXAMPLE
-./update-agent-context.ps1 -AgentType claude
-
-.EXAMPLE
-./update-agent-context.ps1 # Updates all existing agent files
-
-.NOTES
-Relies on common helper functions in common.ps1
-#>
-param(
- [Parameter(Position=0)]
- [ValidateSet('claude','gemini','copilot','cursor-agent','qwen','opencode','codex','windsurf','kilocode','auggie','roo','codebuddy','amp','shai','q','bob','qoder')]
- [string]$AgentType
-)
-
-$ErrorActionPreference = 'Stop'
-
-# Import common helpers
-$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
-. (Join-Path $ScriptDir 'common.ps1')
-
-# Acquire environment paths
-$envData = Get-FeaturePathsEnv
-$REPO_ROOT = $envData.REPO_ROOT
-$CURRENT_BRANCH = $envData.CURRENT_BRANCH
-$HAS_GIT = $envData.HAS_GIT
-$IMPL_PLAN = $envData.IMPL_PLAN
-$NEW_PLAN = $IMPL_PLAN
-
-# Agent file paths
-$CLAUDE_FILE = Join-Path $REPO_ROOT 'CLAUDE.md'
-$GEMINI_FILE = Join-Path $REPO_ROOT 'GEMINI.md'
-$COPILOT_FILE = Join-Path $REPO_ROOT '.github/agents/copilot-instructions.md'
-$CURSOR_FILE = Join-Path $REPO_ROOT '.cursor/rules/specify-rules.mdc'
-$QWEN_FILE = Join-Path $REPO_ROOT 'QWEN.md'
-$AGENTS_FILE = Join-Path $REPO_ROOT 'AGENTS.md'
-$WINDSURF_FILE = Join-Path $REPO_ROOT '.windsurf/rules/specify-rules.md'
-$KILOCODE_FILE = Join-Path $REPO_ROOT '.kilocode/rules/specify-rules.md'
-$AUGGIE_FILE = Join-Path $REPO_ROOT '.augment/rules/specify-rules.md'
-$ROO_FILE = Join-Path $REPO_ROOT '.roo/rules/specify-rules.md'
-$CODEBUDDY_FILE = Join-Path $REPO_ROOT 'CODEBUDDY.md'
-$QODER_FILE = Join-Path $REPO_ROOT 'QODER.md'
-$AMP_FILE = Join-Path $REPO_ROOT 'AGENTS.md'
-$SHAI_FILE = Join-Path $REPO_ROOT 'SHAI.md'
-$Q_FILE = Join-Path $REPO_ROOT 'AGENTS.md'
-$BOB_FILE = Join-Path $REPO_ROOT 'AGENTS.md'
-
-$TEMPLATE_FILE = Join-Path $REPO_ROOT '.specify/templates/agent-file-template.md'
-
-# Parsed plan data placeholders
-$script:NEW_LANG = ''
-$script:NEW_FRAMEWORK = ''
-$script:NEW_DB = ''
-$script:NEW_PROJECT_TYPE = ''
-
-function Write-Info {
- param(
- [Parameter(Mandatory=$true)]
- [string]$Message
- )
- Write-Host "INFO: $Message"
-}
-
-function Write-Success {
- param(
- [Parameter(Mandatory=$true)]
- [string]$Message
- )
- Write-Host "$([char]0x2713) $Message"
-}
-
-function Write-WarningMsg {
- param(
- [Parameter(Mandatory=$true)]
- [string]$Message
- )
- Write-Warning $Message
-}
-
-function Write-Err {
- param(
- [Parameter(Mandatory=$true)]
- [string]$Message
- )
- Write-Host "ERROR: $Message" -ForegroundColor Red
-}
-
-function Validate-Environment {
- if (-not $CURRENT_BRANCH) {
- Write-Err 'Unable to determine current feature'
- if ($HAS_GIT) { Write-Info "Make sure you're on a feature branch" } else { Write-Info 'Set SPECIFY_FEATURE environment variable or create a feature first' }
- exit 1
- }
- if (-not (Test-Path $NEW_PLAN)) {
- Write-Err "No plan.md found at $NEW_PLAN"
- Write-Info 'Ensure you are working on a feature with a corresponding spec directory'
- if (-not $HAS_GIT) { Write-Info 'Use: $env:SPECIFY_FEATURE=your-feature-name or create a new feature first' }
- exit 1
- }
- if (-not (Test-Path $TEMPLATE_FILE)) {
- Write-Err "Template file not found at $TEMPLATE_FILE"
- Write-Info 'Run specify init to scaffold .specify/templates, or add agent-file-template.md there.'
- exit 1
- }
-}
-
-function Extract-PlanField {
- param(
- [Parameter(Mandatory=$true)]
- [string]$FieldPattern,
- [Parameter(Mandatory=$true)]
- [string]$PlanFile
- )
- if (-not (Test-Path $PlanFile)) { return '' }
- # Lines like **Language/Version**: Python 3.12
- $regex = "^\*\*$([Regex]::Escape($FieldPattern))\*\*: (.+)$"
- Get-Content -LiteralPath $PlanFile -Encoding utf8 | ForEach-Object {
- if ($_ -match $regex) {
- $val = $Matches[1].Trim()
- if ($val -notin @('NEEDS CLARIFICATION','N/A')) { return $val }
- }
- } | Select-Object -First 1
-}
-
-function Parse-PlanData {
- param(
- [Parameter(Mandatory=$true)]
- [string]$PlanFile
- )
- if (-not (Test-Path $PlanFile)) { Write-Err "Plan file not found: $PlanFile"; return $false }
- Write-Info "Parsing plan data from $PlanFile"
- $script:NEW_LANG = Extract-PlanField -FieldPattern 'Language/Version' -PlanFile $PlanFile
- $script:NEW_FRAMEWORK = Extract-PlanField -FieldPattern 'Primary Dependencies' -PlanFile $PlanFile
- $script:NEW_DB = Extract-PlanField -FieldPattern 'Storage' -PlanFile $PlanFile
- $script:NEW_PROJECT_TYPE = Extract-PlanField -FieldPattern 'Project Type' -PlanFile $PlanFile
-
- if ($NEW_LANG) { Write-Info "Found language: $NEW_LANG" } else { Write-WarningMsg 'No language information found in plan' }
- if ($NEW_FRAMEWORK) { Write-Info "Found framework: $NEW_FRAMEWORK" }
- if ($NEW_DB -and $NEW_DB -ne 'N/A') { Write-Info "Found database: $NEW_DB" }
- if ($NEW_PROJECT_TYPE) { Write-Info "Found project type: $NEW_PROJECT_TYPE" }
- return $true
-}
-
-function Format-TechnologyStack {
- param(
- [Parameter(Mandatory=$false)]
- [string]$Lang,
- [Parameter(Mandatory=$false)]
- [string]$Framework
- )
- $parts = @()
- if ($Lang -and $Lang -ne 'NEEDS CLARIFICATION') { $parts += $Lang }
- if ($Framework -and $Framework -notin @('NEEDS CLARIFICATION','N/A')) { $parts += $Framework }
- if (-not $parts) { return '' }
- return ($parts -join ' + ')
-}
-
-function Get-ProjectStructure {
- param(
- [Parameter(Mandatory=$false)]
- [string]$ProjectType
- )
- if ($ProjectType -match 'web') { return "backend/`nfrontend/`ntests/" } else { return "src/`ntests/" }
-}
-
-function Get-CommandsForLanguage {
- param(
- [Parameter(Mandatory=$false)]
- [string]$Lang
- )
- switch -Regex ($Lang) {
- 'Python' { return "cd src; pytest; ruff check ." }
- 'Rust' { return "cargo test; cargo clippy" }
- 'JavaScript|TypeScript' { return "npm test; npm run lint" }
- default { return "# Add commands for $Lang" }
- }
-}
-
-function Get-LanguageConventions {
- param(
- [Parameter(Mandatory=$false)]
- [string]$Lang
- )
- if ($Lang) { "${Lang}: Follow standard conventions" } else { 'General: Follow standard conventions' }
-}
-
-function New-AgentFile {
- param(
- [Parameter(Mandatory=$true)]
- [string]$TargetFile,
- [Parameter(Mandatory=$true)]
- [string]$ProjectName,
- [Parameter(Mandatory=$true)]
- [datetime]$Date
- )
- if (-not (Test-Path $TEMPLATE_FILE)) { Write-Err "Template not found at $TEMPLATE_FILE"; return $false }
- $temp = New-TemporaryFile
- Copy-Item -LiteralPath $TEMPLATE_FILE -Destination $temp -Force
-
- $projectStructure = Get-ProjectStructure -ProjectType $NEW_PROJECT_TYPE
- $commands = Get-CommandsForLanguage -Lang $NEW_LANG
- $languageConventions = Get-LanguageConventions -Lang $NEW_LANG
-
- $escaped_lang = $NEW_LANG
- $escaped_framework = $NEW_FRAMEWORK
- $escaped_branch = $CURRENT_BRANCH
-
- $content = Get-Content -LiteralPath $temp -Raw -Encoding utf8
- $content = $content -replace '\[PROJECT NAME\]',$ProjectName
- $content = $content -replace '\[DATE\]',$Date.ToString('yyyy-MM-dd')
-
- # Build the technology stack string safely
- $techStackForTemplate = ""
- if ($escaped_lang -and $escaped_framework) {
- $techStackForTemplate = "- $escaped_lang + $escaped_framework ($escaped_branch)"
- } elseif ($escaped_lang) {
- $techStackForTemplate = "- $escaped_lang ($escaped_branch)"
- } elseif ($escaped_framework) {
- $techStackForTemplate = "- $escaped_framework ($escaped_branch)"
- }
-
- $content = $content -replace '\[EXTRACTED FROM ALL PLAN.MD FILES\]',$techStackForTemplate
- # For project structure we manually embed (keep newlines)
- $escapedStructure = [Regex]::Escape($projectStructure)
- $content = $content -replace '\[ACTUAL STRUCTURE FROM PLANS\]',$escapedStructure
- # Replace escaped newlines placeholder after all replacements
- $content = $content -replace '\[ONLY COMMANDS FOR ACTIVE TECHNOLOGIES\]',$commands
- $content = $content -replace '\[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE\]',$languageConventions
-
- # Build the recent changes string safely
- $recentChangesForTemplate = ""
- if ($escaped_lang -and $escaped_framework) {
- $recentChangesForTemplate = "- ${escaped_branch}: Added ${escaped_lang} + ${escaped_framework}"
- } elseif ($escaped_lang) {
- $recentChangesForTemplate = "- ${escaped_branch}: Added ${escaped_lang}"
- } elseif ($escaped_framework) {
- $recentChangesForTemplate = "- ${escaped_branch}: Added ${escaped_framework}"
- }
-
- $content = $content -replace '\[LAST 3 FEATURES AND WHAT THEY ADDED\]',$recentChangesForTemplate
- # Convert literal \n sequences introduced by Escape to real newlines
- $content = $content -replace '\\n',[Environment]::NewLine
-
- $parent = Split-Path -Parent $TargetFile
- if (-not (Test-Path $parent)) { New-Item -ItemType Directory -Path $parent | Out-Null }
- Set-Content -LiteralPath $TargetFile -Value $content -NoNewline -Encoding utf8
- Remove-Item $temp -Force
- return $true
-}
-
-function Update-ExistingAgentFile {
- param(
- [Parameter(Mandatory=$true)]
- [string]$TargetFile,
- [Parameter(Mandatory=$true)]
- [datetime]$Date
- )
- if (-not (Test-Path $TargetFile)) { return (New-AgentFile -TargetFile $TargetFile -ProjectName (Split-Path $REPO_ROOT -Leaf) -Date $Date) }
-
- $techStack = Format-TechnologyStack -Lang $NEW_LANG -Framework $NEW_FRAMEWORK
- $newTechEntries = @()
- if ($techStack) {
- $escapedTechStack = [Regex]::Escape($techStack)
- if (-not (Select-String -Pattern $escapedTechStack -Path $TargetFile -Quiet)) {
- $newTechEntries += "- $techStack ($CURRENT_BRANCH)"
- }
- }
- if ($NEW_DB -and $NEW_DB -notin @('N/A','NEEDS CLARIFICATION')) {
- $escapedDB = [Regex]::Escape($NEW_DB)
- if (-not (Select-String -Pattern $escapedDB -Path $TargetFile -Quiet)) {
- $newTechEntries += "- $NEW_DB ($CURRENT_BRANCH)"
- }
- }
- $newChangeEntry = ''
- if ($techStack) { $newChangeEntry = "- ${CURRENT_BRANCH}: Added ${techStack}" }
- elseif ($NEW_DB -and $NEW_DB -notin @('N/A','NEEDS CLARIFICATION')) { $newChangeEntry = "- ${CURRENT_BRANCH}: Added ${NEW_DB}" }
-
- $lines = Get-Content -LiteralPath $TargetFile -Encoding utf8
- $output = New-Object System.Collections.Generic.List[string]
- $inTech = $false; $inChanges = $false; $techAdded = $false; $changeAdded = $false; $existingChanges = 0
-
- for ($i=0; $i -lt $lines.Count; $i++) {
- $line = $lines[$i]
- if ($line -eq '## Active Technologies') {
- $output.Add($line)
- $inTech = $true
- continue
- }
- if ($inTech -and $line -match '^##\s') {
- if (-not $techAdded -and $newTechEntries.Count -gt 0) { $newTechEntries | ForEach-Object { $output.Add($_) }; $techAdded = $true }
- $output.Add($line); $inTech = $false; continue
- }
- if ($inTech -and [string]::IsNullOrWhiteSpace($line)) {
- if (-not $techAdded -and $newTechEntries.Count -gt 0) { $newTechEntries | ForEach-Object { $output.Add($_) }; $techAdded = $true }
- $output.Add($line); continue
- }
- if ($line -eq '## Recent Changes') {
- $output.Add($line)
- if ($newChangeEntry) { $output.Add($newChangeEntry); $changeAdded = $true }
- $inChanges = $true
- continue
- }
- if ($inChanges -and $line -match '^##\s') { $output.Add($line); $inChanges = $false; continue }
- if ($inChanges -and $line -match '^- ') {
- if ($existingChanges -lt 2) { $output.Add($line); $existingChanges++ }
- continue
- }
- if ($line -match '\*\*Last updated\*\*: .*\d{4}-\d{2}-\d{2}') {
- $output.Add(($line -replace '\d{4}-\d{2}-\d{2}',$Date.ToString('yyyy-MM-dd')))
- continue
- }
- $output.Add($line)
- }
-
- # Post-loop check: if we're still in the Active Technologies section and haven't added new entries
- if ($inTech -and -not $techAdded -and $newTechEntries.Count -gt 0) {
- $newTechEntries | ForEach-Object { $output.Add($_) }
- }
-
- Set-Content -LiteralPath $TargetFile -Value ($output -join [Environment]::NewLine) -Encoding utf8
- return $true
-}
-
-function Update-AgentFile {
- param(
- [Parameter(Mandatory=$true)]
- [string]$TargetFile,
- [Parameter(Mandatory=$true)]
- [string]$AgentName
- )
- if (-not $TargetFile -or -not $AgentName) { Write-Err 'Update-AgentFile requires TargetFile and AgentName'; return $false }
- Write-Info "Updating $AgentName context file: $TargetFile"
- $projectName = Split-Path $REPO_ROOT -Leaf
- $date = Get-Date
-
- $dir = Split-Path -Parent $TargetFile
- if (-not (Test-Path $dir)) { New-Item -ItemType Directory -Path $dir | Out-Null }
-
- if (-not (Test-Path $TargetFile)) {
- if (New-AgentFile -TargetFile $TargetFile -ProjectName $projectName -Date $date) { Write-Success "Created new $AgentName context file" } else { Write-Err 'Failed to create new agent file'; return $false }
- } else {
- try {
- if (Update-ExistingAgentFile -TargetFile $TargetFile -Date $date) { Write-Success "Updated existing $AgentName context file" } else { Write-Err 'Failed to update agent file'; return $false }
- } catch {
- Write-Err "Cannot access or update existing file: $TargetFile. $_"
- return $false
- }
- }
- return $true
-}
-
-function Update-SpecificAgent {
- param(
- [Parameter(Mandatory=$true)]
- [string]$Type
- )
- switch ($Type) {
- 'claude' { Update-AgentFile -TargetFile $CLAUDE_FILE -AgentName 'Claude Code' }
- 'gemini' { Update-AgentFile -TargetFile $GEMINI_FILE -AgentName 'Gemini CLI' }
- 'copilot' { Update-AgentFile -TargetFile $COPILOT_FILE -AgentName 'GitHub Copilot' }
- 'cursor-agent' { Update-AgentFile -TargetFile $CURSOR_FILE -AgentName 'Cursor IDE' }
- 'qwen' { Update-AgentFile -TargetFile $QWEN_FILE -AgentName 'Qwen Code' }
- 'opencode' { Update-AgentFile -TargetFile $AGENTS_FILE -AgentName 'opencode' }
- 'codex' { Update-AgentFile -TargetFile $AGENTS_FILE -AgentName 'Codex CLI' }
- 'windsurf' { Update-AgentFile -TargetFile $WINDSURF_FILE -AgentName 'Windsurf' }
- 'kilocode' { Update-AgentFile -TargetFile $KILOCODE_FILE -AgentName 'Kilo Code' }
- 'auggie' { Update-AgentFile -TargetFile $AUGGIE_FILE -AgentName 'Auggie CLI' }
- 'roo' { Update-AgentFile -TargetFile $ROO_FILE -AgentName 'Roo Code' }
- 'codebuddy' { Update-AgentFile -TargetFile $CODEBUDDY_FILE -AgentName 'CodeBuddy CLI' }
- 'qoder' { Update-AgentFile -TargetFile $QODER_FILE -AgentName 'Qoder CLI' }
- 'amp' { Update-AgentFile -TargetFile $AMP_FILE -AgentName 'Amp' }
- 'shai' { Update-AgentFile -TargetFile $SHAI_FILE -AgentName 'SHAI' }
- 'q' { Update-AgentFile -TargetFile $Q_FILE -AgentName 'Amazon Q Developer CLI' }
- 'bob' { Update-AgentFile -TargetFile $BOB_FILE -AgentName 'IBM Bob' }
- default { Write-Err "Unknown agent type '$Type'"; Write-Err 'Expected: claude|gemini|copilot|cursor-agent|qwen|opencode|codex|windsurf|kilocode|auggie|roo|codebuddy|amp|shai|q|bob|qoder'; return $false }
- }
-}
-
-function Update-AllExistingAgents {
- $found = $false
- $ok = $true
- if (Test-Path $CLAUDE_FILE) { if (-not (Update-AgentFile -TargetFile $CLAUDE_FILE -AgentName 'Claude Code')) { $ok = $false }; $found = $true }
- if (Test-Path $GEMINI_FILE) { if (-not (Update-AgentFile -TargetFile $GEMINI_FILE -AgentName 'Gemini CLI')) { $ok = $false }; $found = $true }
- if (Test-Path $COPILOT_FILE) { if (-not (Update-AgentFile -TargetFile $COPILOT_FILE -AgentName 'GitHub Copilot')) { $ok = $false }; $found = $true }
- if (Test-Path $CURSOR_FILE) { if (-not (Update-AgentFile -TargetFile $CURSOR_FILE -AgentName 'Cursor IDE')) { $ok = $false }; $found = $true }
- if (Test-Path $QWEN_FILE) { if (-not (Update-AgentFile -TargetFile $QWEN_FILE -AgentName 'Qwen Code')) { $ok = $false }; $found = $true }
- if (Test-Path $AGENTS_FILE) { if (-not (Update-AgentFile -TargetFile $AGENTS_FILE -AgentName 'Codex/opencode')) { $ok = $false }; $found = $true }
- if (Test-Path $WINDSURF_FILE) { if (-not (Update-AgentFile -TargetFile $WINDSURF_FILE -AgentName 'Windsurf')) { $ok = $false }; $found = $true }
- if (Test-Path $KILOCODE_FILE) { if (-not (Update-AgentFile -TargetFile $KILOCODE_FILE -AgentName 'Kilo Code')) { $ok = $false }; $found = $true }
- if (Test-Path $AUGGIE_FILE) { if (-not (Update-AgentFile -TargetFile $AUGGIE_FILE -AgentName 'Auggie CLI')) { $ok = $false }; $found = $true }
- if (Test-Path $ROO_FILE) { if (-not (Update-AgentFile -TargetFile $ROO_FILE -AgentName 'Roo Code')) { $ok = $false }; $found = $true }
- if (Test-Path $CODEBUDDY_FILE) { if (-not (Update-AgentFile -TargetFile $CODEBUDDY_FILE -AgentName 'CodeBuddy CLI')) { $ok = $false }; $found = $true }
- if (Test-Path $QODER_FILE) { if (-not (Update-AgentFile -TargetFile $QODER_FILE -AgentName 'Qoder CLI')) { $ok = $false }; $found = $true }
- if (Test-Path $SHAI_FILE) { if (-not (Update-AgentFile -TargetFile $SHAI_FILE -AgentName 'SHAI')) { $ok = $false }; $found = $true }
- if (Test-Path $Q_FILE) { if (-not (Update-AgentFile -TargetFile $Q_FILE -AgentName 'Amazon Q Developer CLI')) { $ok = $false }; $found = $true }
- if (Test-Path $BOB_FILE) { if (-not (Update-AgentFile -TargetFile $BOB_FILE -AgentName 'IBM Bob')) { $ok = $false }; $found = $true }
- if (-not $found) {
- Write-Info 'No existing agent files found, creating default Claude file...'
- if (-not (Update-AgentFile -TargetFile $CLAUDE_FILE -AgentName 'Claude Code')) { $ok = $false }
- }
- return $ok
-}
-
-function Print-Summary {
- Write-Host ''
- Write-Info 'Summary of changes:'
- if ($NEW_LANG) { Write-Host " - Added language: $NEW_LANG" }
- if ($NEW_FRAMEWORK) { Write-Host " - Added framework: $NEW_FRAMEWORK" }
- if ($NEW_DB -and $NEW_DB -ne 'N/A') { Write-Host " - Added database: $NEW_DB" }
- Write-Host ''
- Write-Info 'Usage: ./update-agent-context.ps1 [-AgentType claude|gemini|copilot|cursor-agent|qwen|opencode|codex|windsurf|kilocode|auggie|roo|codebuddy|amp|shai|q|bob|qoder]'
-}
-
-function Main {
- Validate-Environment
- Write-Info "=== Updating agent context files for feature $CURRENT_BRANCH ==="
- if (-not (Parse-PlanData -PlanFile $NEW_PLAN)) { Write-Err 'Failed to parse plan data'; exit 1 }
- $success = $true
- if ($AgentType) {
- Write-Info "Updating specific agent: $AgentType"
- if (-not (Update-SpecificAgent -Type $AgentType)) { $success = $false }
- }
- else {
- Write-Info 'No agent specified, updating all existing agent files...'
- if (-not (Update-AllExistingAgents)) { $success = $false }
- }
- Print-Summary
- if ($success) { Write-Success 'Agent context update completed successfully'; exit 0 } else { Write-Err 'Agent context update completed with errors'; exit 1 }
-}
-
-Main
-
diff --git a/.specify/templates/agent-file-template.md b/.specify/templates/agent-file-template.md
deleted file mode 100644
index 4cc7fd6..0000000
--- a/.specify/templates/agent-file-template.md
+++ /dev/null
@@ -1,28 +0,0 @@
-# [PROJECT NAME] Development Guidelines
-
-Auto-generated from all feature plans. Last updated: [DATE]
-
-## Active Technologies
-
-[EXTRACTED FROM ALL PLAN.MD FILES]
-
-## Project Structure
-
-```text
-[ACTUAL STRUCTURE FROM PLANS]
-```
-
-## Commands
-
-[ONLY COMMANDS FOR ACTIVE TECHNOLOGIES]
-
-## Code Style
-
-[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE]
-
-## Recent Changes
-
-[LAST 3 FEATURES AND WHAT THEY ADDED]
-
-
-
diff --git a/.specify/templates/checklist-template.md b/.specify/templates/checklist-template.md
deleted file mode 100644
index 806657d..0000000
--- a/.specify/templates/checklist-template.md
+++ /dev/null
@@ -1,40 +0,0 @@
-# [CHECKLIST TYPE] Checklist: [FEATURE NAME]
-
-**Purpose**: [Brief description of what this checklist covers]
-**Created**: [DATE]
-**Feature**: [Link to spec.md or relevant documentation]
-
-**Note**: This checklist is generated by the `/speckit.checklist` command based on feature context and requirements.
-
-
-
-## [Category 1]
-
-- [ ] CHK001 First checklist item with clear action
-- [ ] CHK002 Second checklist item
-- [ ] CHK003 Third checklist item
-
-## [Category 2]
-
-- [ ] CHK004 Another category item
-- [ ] CHK005 Item with specific criteria
-- [ ] CHK006 Final item in this category
-
-## Notes
-
-- Check items off as completed: `[x]`
-- Add comments or findings inline
-- Link to relevant resources or documentation
-- Items are numbered sequentially for easy reference
diff --git a/.specify/templates/plan-template.md b/.specify/templates/plan-template.md
deleted file mode 100644
index 6a8bfc6..0000000
--- a/.specify/templates/plan-template.md
+++ /dev/null
@@ -1,104 +0,0 @@
-# Implementation Plan: [FEATURE]
-
-**Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link]
-**Input**: Feature specification from `/specs/[###-feature-name]/spec.md`
-
-**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
-
-## Summary
-
-[Extract from feature spec: primary requirement + technical approach from research]
-
-## Technical Context
-
-
-
-**Language/Version**: [e.g., Python 3.11, Swift 5.9, Rust 1.75 or NEEDS CLARIFICATION]
-**Primary Dependencies**: [e.g., FastAPI, UIKit, LLVM or NEEDS CLARIFICATION]
-**Storage**: [if applicable, e.g., PostgreSQL, CoreData, files or N/A]
-**Testing**: [e.g., pytest, XCTest, cargo test or NEEDS CLARIFICATION]
-**Target Platform**: [e.g., Linux server, iOS 15+, WASM or NEEDS CLARIFICATION]
-**Project Type**: [single/web/mobile - determines source structure]
-**Performance Goals**: [domain-specific, e.g., 1000 req/s, 10k lines/sec, 60 fps or NEEDS CLARIFICATION]
-**Constraints**: [domain-specific, e.g., <200ms p95, <100MB memory, offline-capable or NEEDS CLARIFICATION]
-**Scale/Scope**: [domain-specific, e.g., 10k users, 1M LOC, 50 screens or NEEDS CLARIFICATION]
-
-## Constitution Check
-
-*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
-
-[Gates determined based on constitution file]
-
-## Project Structure
-
-### Documentation (this feature)
-
-```text
-specs/[###-feature]/
-├── plan.md # This file (/speckit.plan command output)
-├── research.md # Phase 0 output (/speckit.plan command)
-├── data-model.md # Phase 1 output (/speckit.plan command)
-├── quickstart.md # Phase 1 output (/speckit.plan command)
-├── contracts/ # Phase 1 output (/speckit.plan command)
-└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan)
-```
-
-### Source Code (repository root)
-
-
-```text
-# [REMOVE IF UNUSED] Option 1: Single project (DEFAULT)
-src/
-├── models/
-├── services/
-├── cli/
-└── lib/
-
-tests/
-├── contract/
-├── integration/
-└── unit/
-
-# [REMOVE IF UNUSED] Option 2: Web application (when "frontend" + "backend" detected)
-backend/
-├── src/
-│ ├── models/
-│ ├── services/
-│ └── api/
-└── tests/
-
-frontend/
-├── src/
-│ ├── components/
-│ ├── pages/
-│ └── services/
-└── tests/
-
-# [REMOVE IF UNUSED] Option 3: Mobile + API (when "iOS/Android" detected)
-api/
-└── [same as backend above]
-
-ios/ or android/
-└── [platform-specific structure: feature modules, UI flows, platform tests]
-```
-
-**Structure Decision**: [Document the selected structure and reference the real
-directories captured above]
-
-## Complexity Tracking
-
-> **Fill ONLY if Constitution Check has violations that must be justified**
-
-| Violation | Why Needed | Simpler Alternative Rejected Because |
-|-----------|------------|-------------------------------------|
-| [e.g., 4th project] | [current need] | [why 3 projects insufficient] |
-| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] |
diff --git a/.specify/templates/spec-template.md b/.specify/templates/spec-template.md
deleted file mode 100644
index c67d914..0000000
--- a/.specify/templates/spec-template.md
+++ /dev/null
@@ -1,115 +0,0 @@
-# Feature Specification: [FEATURE NAME]
-
-**Feature Branch**: `[###-feature-name]`
-**Created**: [DATE]
-**Status**: Draft
-**Input**: User description: "$ARGUMENTS"
-
-## User Scenarios & Testing *(mandatory)*
-
-
-
-### User Story 1 - [Brief Title] (Priority: P1)
-
-[Describe this user journey in plain language]
-
-**Why this priority**: [Explain the value and why it has this priority level]
-
-**Independent Test**: [Describe how this can be tested independently - e.g., "Can be fully tested by [specific action] and delivers [specific value]"]
-
-**Acceptance Scenarios**:
-
-1. **Given** [initial state], **When** [action], **Then** [expected outcome]
-2. **Given** [initial state], **When** [action], **Then** [expected outcome]
-
----
-
-### User Story 2 - [Brief Title] (Priority: P2)
-
-[Describe this user journey in plain language]
-
-**Why this priority**: [Explain the value and why it has this priority level]
-
-**Independent Test**: [Describe how this can be tested independently]
-
-**Acceptance Scenarios**:
-
-1. **Given** [initial state], **When** [action], **Then** [expected outcome]
-
----
-
-### User Story 3 - [Brief Title] (Priority: P3)
-
-[Describe this user journey in plain language]
-
-**Why this priority**: [Explain the value and why it has this priority level]
-
-**Independent Test**: [Describe how this can be tested independently]
-
-**Acceptance Scenarios**:
-
-1. **Given** [initial state], **When** [action], **Then** [expected outcome]
-
----
-
-[Add more user stories as needed, each with an assigned priority]
-
-### Edge Cases
-
-
-
-- What happens when [boundary condition]?
-- How does system handle [error scenario]?
-
-## Requirements *(mandatory)*
-
-
-
-### Functional Requirements
-
-- **FR-001**: System MUST [specific capability, e.g., "allow users to create accounts"]
-- **FR-002**: System MUST [specific capability, e.g., "validate email addresses"]
-- **FR-003**: Users MUST be able to [key interaction, e.g., "reset their password"]
-- **FR-004**: System MUST [data requirement, e.g., "persist user preferences"]
-- **FR-005**: System MUST [behavior, e.g., "log all security events"]
-
-*Example of marking unclear requirements:*
-
-- **FR-006**: System MUST authenticate users via [NEEDS CLARIFICATION: auth method not specified - email/password, SSO, OAuth?]
-- **FR-007**: System MUST retain user data for [NEEDS CLARIFICATION: retention period not specified]
-
-### Key Entities *(include if feature involves data)*
-
-- **[Entity 1]**: [What it represents, key attributes without implementation]
-- **[Entity 2]**: [What it represents, relationships to other entities]
-
-## Success Criteria *(mandatory)*
-
-
-
-### Measurable Outcomes
-
-- **SC-001**: [Measurable metric, e.g., "Users can complete account creation in under 2 minutes"]
-- **SC-002**: [Measurable metric, e.g., "System handles 1000 concurrent users without degradation"]
-- **SC-003**: [User satisfaction metric, e.g., "90% of users successfully complete primary task on first attempt"]
-- **SC-004**: [Business metric, e.g., "Reduce support tickets related to [X] by 50%"]
diff --git a/.specify/templates/tasks-template.md b/.specify/templates/tasks-template.md
deleted file mode 100644
index 60f9be4..0000000
--- a/.specify/templates/tasks-template.md
+++ /dev/null
@@ -1,251 +0,0 @@
----
-
-description: "Task list template for feature implementation"
----
-
-# Tasks: [FEATURE NAME]
-
-**Input**: Design documents from `/specs/[###-feature-name]/`
-**Prerequisites**: plan.md (required), spec.md (required for user stories), research.md, data-model.md, contracts/
-
-**Tests**: The examples below include test tasks. Tests are OPTIONAL - only include them if explicitly requested in the feature specification.
-
-**Organization**: Tasks are grouped by user story to enable independent implementation and testing of each story.
-
-## Format: `[ID] [P?] [Story] Description`
-
-- **[P]**: Can run in parallel (different files, no dependencies)
-- **[Story]**: Which user story this task belongs to (e.g., US1, US2, US3)
-- Include exact file paths in descriptions
-
-## Path Conventions
-
-- **Single project**: `src/`, `tests/` at repository root
-- **Web app**: `backend/src/`, `frontend/src/`
-- **Mobile**: `api/src/`, `ios/src/` or `android/src/`
-- Paths shown below assume single project - adjust based on plan.md structure
-
-
-
-## Phase 1: Setup (Shared Infrastructure)
-
-**Purpose**: Project initialization and basic structure
-
-- [ ] T001 Create project structure per implementation plan
-- [ ] T002 Initialize [language] project with [framework] dependencies
-- [ ] T003 [P] Configure linting and formatting tools
-
----
-
-## Phase 2: Foundational (Blocking Prerequisites)
-
-**Purpose**: Core infrastructure that MUST be complete before ANY user story can be implemented
-
-**⚠️ CRITICAL**: No user story work can begin until this phase is complete
-
-Examples of foundational tasks (adjust based on your project):
-
-- [ ] T004 Setup database schema and migrations framework
-- [ ] T005 [P] Implement authentication/authorization framework
-- [ ] T006 [P] Setup API routing and middleware structure
-- [ ] T007 Create base models/entities that all stories depend on
-- [ ] T008 Configure error handling and logging infrastructure
-- [ ] T009 Setup environment configuration management
-
-**Checkpoint**: Foundation ready - user story implementation can now begin in parallel
-
----
-
-## Phase 3: User Story 1 - [Title] (Priority: P1) 🎯 MVP
-
-**Goal**: [Brief description of what this story delivers]
-
-**Independent Test**: [How to verify this story works on its own]
-
-### Tests for User Story 1 (OPTIONAL - only if tests requested) ⚠️
-
-> **NOTE: Write these tests FIRST, ensure they FAIL before implementation**
-
-- [ ] T010 [P] [US1] Contract test for [endpoint] in tests/contract/test_[name].py
-- [ ] T011 [P] [US1] Integration test for [user journey] in tests/integration/test_[name].py
-
-### Implementation for User Story 1
-
-- [ ] T012 [P] [US1] Create [Entity1] model in src/models/[entity1].py
-- [ ] T013 [P] [US1] Create [Entity2] model in src/models/[entity2].py
-- [ ] T014 [US1] Implement [Service] in src/services/[service].py (depends on T012, T013)
-- [ ] T015 [US1] Implement [endpoint/feature] in src/[location]/[file].py
-- [ ] T016 [US1] Add validation and error handling
-- [ ] T017 [US1] Add logging for user story 1 operations
-
-**Checkpoint**: At this point, User Story 1 should be fully functional and testable independently
-
----
-
-## Phase 4: User Story 2 - [Title] (Priority: P2)
-
-**Goal**: [Brief description of what this story delivers]
-
-**Independent Test**: [How to verify this story works on its own]
-
-### Tests for User Story 2 (OPTIONAL - only if tests requested) ⚠️
-
-- [ ] T018 [P] [US2] Contract test for [endpoint] in tests/contract/test_[name].py
-- [ ] T019 [P] [US2] Integration test for [user journey] in tests/integration/test_[name].py
-
-### Implementation for User Story 2
-
-- [ ] T020 [P] [US2] Create [Entity] model in src/models/[entity].py
-- [ ] T021 [US2] Implement [Service] in src/services/[service].py
-- [ ] T022 [US2] Implement [endpoint/feature] in src/[location]/[file].py
-- [ ] T023 [US2] Integrate with User Story 1 components (if needed)
-
-**Checkpoint**: At this point, User Stories 1 AND 2 should both work independently
-
----
-
-## Phase 5: User Story 3 - [Title] (Priority: P3)
-
-**Goal**: [Brief description of what this story delivers]
-
-**Independent Test**: [How to verify this story works on its own]
-
-### Tests for User Story 3 (OPTIONAL - only if tests requested) ⚠️
-
-- [ ] T024 [P] [US3] Contract test for [endpoint] in tests/contract/test_[name].py
-- [ ] T025 [P] [US3] Integration test for [user journey] in tests/integration/test_[name].py
-
-### Implementation for User Story 3
-
-- [ ] T026 [P] [US3] Create [Entity] model in src/models/[entity].py
-- [ ] T027 [US3] Implement [Service] in src/services/[service].py
-- [ ] T028 [US3] Implement [endpoint/feature] in src/[location]/[file].py
-
-**Checkpoint**: All user stories should now be independently functional
-
----
-
-[Add more user story phases as needed, following the same pattern]
-
----
-
-## Phase N: Polish & Cross-Cutting Concerns
-
-**Purpose**: Improvements that affect multiple user stories
-
-- [ ] TXXX [P] Documentation updates in docs/
-- [ ] TXXX Code cleanup and refactoring
-- [ ] TXXX Performance optimization across all stories
-- [ ] TXXX [P] Additional unit tests (if requested) in tests/unit/
-- [ ] TXXX Security hardening
-- [ ] TXXX Run quickstart.md validation
-
----
-
-## Dependencies & Execution Order
-
-### Phase Dependencies
-
-- **Setup (Phase 1)**: No dependencies - can start immediately
-- **Foundational (Phase 2)**: Depends on Setup completion - BLOCKS all user stories
-- **User Stories (Phase 3+)**: All depend on Foundational phase completion
- - User stories can then proceed in parallel (if staffed)
- - Or sequentially in priority order (P1 → P2 → P3)
-- **Polish (Final Phase)**: Depends on all desired user stories being complete
-
-### User Story Dependencies
-
-- **User Story 1 (P1)**: Can start after Foundational (Phase 2) - No dependencies on other stories
-- **User Story 2 (P2)**: Can start after Foundational (Phase 2) - May integrate with US1 but should be independently testable
-- **User Story 3 (P3)**: Can start after Foundational (Phase 2) - May integrate with US1/US2 but should be independently testable
-
-### Within Each User Story
-
-- Tests (if included) MUST be written and FAIL before implementation
-- Models before services
-- Services before endpoints
-- Core implementation before integration
-- Story complete before moving to next priority
-
-### Parallel Opportunities
-
-- All Setup tasks marked [P] can run in parallel
-- All Foundational tasks marked [P] can run in parallel (within Phase 2)
-- Once Foundational phase completes, all user stories can start in parallel (if team capacity allows)
-- All tests for a user story marked [P] can run in parallel
-- Models within a story marked [P] can run in parallel
-- Different user stories can be worked on in parallel by different team members
-
----
-
-## Parallel Example: User Story 1
-
-```bash
-# Launch all tests for User Story 1 together (if tests requested):
-Task: "Contract test for [endpoint] in tests/contract/test_[name].py"
-Task: "Integration test for [user journey] in tests/integration/test_[name].py"
-
-# Launch all models for User Story 1 together:
-Task: "Create [Entity1] model in src/models/[entity1].py"
-Task: "Create [Entity2] model in src/models/[entity2].py"
-```
-
----
-
-## Implementation Strategy
-
-### MVP First (User Story 1 Only)
-
-1. Complete Phase 1: Setup
-2. Complete Phase 2: Foundational (CRITICAL - blocks all stories)
-3. Complete Phase 3: User Story 1
-4. **STOP and VALIDATE**: Test User Story 1 independently
-5. Deploy/demo if ready
-
-### Incremental Delivery
-
-1. Complete Setup + Foundational → Foundation ready
-2. Add User Story 1 → Test independently → Deploy/Demo (MVP!)
-3. Add User Story 2 → Test independently → Deploy/Demo
-4. Add User Story 3 → Test independently → Deploy/Demo
-5. Each story adds value without breaking previous stories
-
-### Parallel Team Strategy
-
-With multiple developers:
-
-1. Team completes Setup + Foundational together
-2. Once Foundational is done:
- - Developer A: User Story 1
- - Developer B: User Story 2
- - Developer C: User Story 3
-3. Stories complete and integrate independently
-
----
-
-## Notes
-
-- [P] tasks = different files, no dependencies
-- [Story] label maps task to specific user story for traceability
-- Each user story should be independently completable and testable
-- Verify tests fail before implementing
-- Commit after each task or logical group
-- Stop at any checkpoint to validate story independently
-- Avoid: vague tasks, same file conflicts, cross-story dependencies that break independence
diff --git a/CLAUDE.md b/CLAUDE.md
index 62afb03..43d5f70 100644
--- a/CLAUDE.md
+++ b/CLAUDE.md
@@ -92,7 +92,7 @@ SharpSync is a **pure .NET file synchronization library** with no native depende
- `ISyncDatabase` - Sync state persistence
- `IConflictResolver` - Pluggable conflict resolution strategies
- `ISyncFilter` - File filtering for selective sync
- - Domain models: `SyncItem` (with `IsSymlink` support), `SyncOptions`, `SyncProgress`, `SyncResult`
+ - Domain models: `SyncItem` (with `IsSymlink` support), `SyncOptions` (`ExcludePatterns` typed as `IList`), `SyncProgress`, `SyncResult`
2. **Storage Implementations** (`src/SharpSync/Storage/`)
- `LocalFileStorage` - Local filesystem operations with symlink detection, timestamp/permission preservation (fully implemented and tested)
@@ -104,7 +104,7 @@ SharpSync is a **pure .NET file synchronization library** with no native depende
Additional public types in `src/SharpSync/Storage/`:
- `ServerCapabilities` - Detected server features (Nextcloud, OCIS, chunking, TUS protocol version)
- `StorageOperation` - Enum: Upload, Download, Delete, Move
- - `StorageProgressEventArgs` - Byte-level progress for storage operations (path, bytes transferred, total, percent)
+ - `StorageProgressEventArgs` - Byte-level progress for storage operations (path, bytes transferred, total, percent); properties use `init` setters
- `ThrottledStream` - Token-bucket bandwidth throttling wrapper (internal)
- `ProgressStream` - Stream wrapper that fires progress events (internal)
@@ -113,17 +113,17 @@ SharpSync is a **pure .NET file synchronization library** with no native depende
- Pre-configured for Nextcloud and OCIS
4. **Database Layer** (`src/SharpSync/Database/`)
- - `SqliteSyncDatabase` - SQLite-based state tracking
+ - `SqliteSyncDatabase` (sealed) - SQLite-based state tracking, implements both `IDisposable` and `IAsyncDisposable`
- Optimized indexes for performance
- Transaction support for consistency
5. **Synchronization Engine** (`src/SharpSync/Sync/`)
- - `SyncEngine` - Production-ready sync implementation with:
+ - `SyncEngine` (sealed) - Production-ready sync implementation with:
- Incremental sync with change detection (timestamp, checksum-only, or size-only modes)
- Parallel processing for large file sets
- Three-phase optimization (directories/small files, large files, deletes/conflicts)
- All `SyncOptions` properties fully wired: `TimeoutSeconds`, `ChecksumOnly`, `SizeOnly`, `UpdateExisting`, `ConflictResolution` override, `ExcludePatterns`, `Verbose`, `FollowSymlinks`, `PreserveTimestamps`, `PreservePermissions`
- - `SyncFilter` - Pattern-based file filtering
+ - `SyncFilter` (sealed) - Pattern-based file filtering with ReDoS-safe `NonBacktracking` regex
Internal sync pipeline types (in `Oire.SharpSync.Sync` namespace):
- `IChange` / `AdditionChange` / `ModificationChange` / `DeletionChange` - Change detection models
@@ -167,7 +167,10 @@ SharpSync is a **pure .NET file synchronization library** with no native depende
- `SSH.NET` - SFTP protocol implementation
- `FluentFTP` - FTP/FTPS protocol implementation
- `AWSSDK.S3` - Amazon S3 and S3-compatible storage
+- `Roslynator.Analyzers` - Code quality analyzers (dev-only, `PrivateAssets="all"`)
+- `Microsoft.SourceLink.GitHub` - SourceLink for NuGet debugging (dev-only, `PrivateAssets="all"`)
- Target Framework: .NET 8.0
+- Deterministic builds enabled, embedded debug symbols, SourceLink configured
See `src/SharpSync/SharpSync.csproj` for current versions.
@@ -180,10 +183,10 @@ See `src/SharpSync/SharpSync.csproj` for current versions.
### Design Patterns
1. **Interface-Based Design**: All major components use interfaces for testability
-2. **Async/Await Throughout**: Modern async patterns for all I/O operations
+2. **Async/Await Throughout**: Modern async patterns for all I/O operations, `ConfigureAwait(false)` on every `await` (enforced by Roslynator RCS1090 via `.editorconfig`)
3. **Event-Driven Progress**: Events for progress and conflict notifications
4. **Dependency Injection Ready**: Constructor-based dependencies
-5. **Disposable Pattern**: Proper resource cleanup
+5. **Disposable Pattern**: Sealed classes with `IDisposable` and `IAsyncDisposable` where appropriate
### Important Considerations
@@ -482,9 +485,9 @@ The core library is production-ready. All critical items are complete and the li
- `S3Storage` - Fully implemented with multipart uploads and tested (LocalStack integration)
- `WebDavStorage` - OAuth2, chunking, platform optimizations, and tested
- `SqliteSyncDatabase` - Complete with transaction support and tests
-- `SmartConflictResolver` - Intelligent conflict analysis with tests
+- `SmartConflictResolver` - Intelligent conflict analysis with tests (`ConflictAnalysis.TimeDifference` is `TimeSpan`)
- `DefaultConflictResolver` - Strategy-based resolution with tests
-- `SyncFilter` - Pattern-based filtering with tests
+- `SyncFilter` (sealed) - Pattern-based filtering with tests, accepts optional `ILogger`
**Infrastructure**
- Clean solution structure
@@ -535,3 +538,15 @@ All critical items have been resolved.
- ✅ All `SyncOptions` properties wired and functional (TimeoutSeconds, ChecksumOnly, SizeOnly, UpdateExisting, ConflictResolution override, ExcludePatterns, Verbose, FollowSymlinks, PreserveTimestamps, PreservePermissions)
- ✅ `ISyncStorage.SetLastModifiedAsync` / `SetPermissionsAsync` default interface methods
- ✅ Symlink detection (`SyncItem.IsSymlink`) in Local and SFTP storage
+- ✅ `ConfigureAwait(false)` on all await calls (enforced by Roslynator RCS1090)
+- ✅ SourceLink, deterministic builds, embedded debug symbols
+- ✅ `SyncOptions.Clone()` deep copy fix for `ExcludePatterns`
+- ✅ Thread-safe `ChangeSet` for parallel directory scanning
+- ✅ `IAsyncDisposable` on `SqliteSyncDatabase`
+- ✅ Sealed `SyncEngine`, `SqliteSyncDatabase`, `SyncFilter` classes
+- ✅ Logging in all catch blocks (no more silent exception swallowing)
+- ✅ ReDoS protection via `RegexOptions.NonBacktracking` in `SyncFilter`
+- ✅ `StorageProgressEventArgs` immutable (`init` setters)
+- ✅ `SyncOptions.ExcludePatterns` typed as `IList`
+- ✅ `ConflictAnalysis.TimeDifference` changed from `double` to `TimeSpan`
+- ✅ Removed `await Task.CompletedTask` antipattern from all methods
diff --git a/FIX-BEFORE-1.0.0.md b/FIX-BEFORE-1.0.0.md
index 5053c71..c1b35c1 100644
--- a/FIX-BEFORE-1.0.0.md
+++ b/FIX-BEFORE-1.0.0.md
@@ -3,319 +3,160 @@
**Audited**: 2026-02-14
**Build**: Clean (0 warnings, 0 errors)
**Tests**: 1,636 passed, 0 failed, 260 skipped (integration tests require Docker)
+**Fixes applied**: 2026-02-14
+**Post-fix build**: Clean (0 errors, 0 warnings except expected SourceLink local build warning)
+**Post-fix tests**: 818 passed, 130 skipped, 0 failed
---
## CRITICAL -- Must fix before release
-### C1. Missing `ConfigureAwait(false)` throughout the entire library
+### C1. Missing `ConfigureAwait(false)` throughout the entire library -- FIXED
-**Where**: Every `await` call in every `.cs` file under `src/SharpSync/`. This affects `SyncEngine.cs`, all five storage implementations (`WebDavStorage.cs`, `SftpStorage.cs`, `FtpStorage.cs`, `S3Storage.cs`, `LocalFileStorage.cs`), `SqliteSyncDatabase.cs`, `SmartConflictResolver.cs`, and `DefaultConflictResolver.cs`.
+**Where**: Every `await` call in every `.cs` file under `src/SharpSync/`.
-**What**: There is not a single call to `.ConfigureAwait(false)` anywhere in the library. When a consumer calls this library from a WPF, WinForms, or ASP.NET application, every `await` without `ConfigureAwait(false)` captures the synchronization context and marshals the continuation back to the calling thread (e.g., the UI thread). This causes:
+**What**: There was not a single call to `.ConfigureAwait(false)` anywhere in the library. This causes deadlocks and performance degradation when called from UI threads.
-- Deadlocks (the classic `.Result`/`.Wait()` scenario)
-- Performance degradation from unnecessary UI thread marshaling
+**Fix applied**: Added `.ConfigureAwait(false)` to every `await` call in the library via Roslynator.Analyzers (RCS1090) auto-fix + 4 manual fixes in FtpStorage.cs. Roslynator kept as PrivateAssets="all" dev dependency. `.editorconfig` updated with `dotnet_diagnostic.RCS1090.severity = warning` and `roslynator_configure_await = true` to enforce going forward.
-This is the #1 best practice for .NET library code per Microsoft's guidelines. For a library explicitly targeting desktop client integration, this is an outright correctness hazard.
+### C2. `SyncOptions.Clone()` produces a broken shallow copy -- FIXED
-**Fix**: Add `.ConfigureAwait(false)` to every `await` call in the library. Consider adding `ConfigureAwait.Fody` or `Meziantou.Analyzer` (rule MA0004) to enforce this going forward.
+**Where**: `src/SharpSync/Core/SyncOptions.cs`.
-### C2. `SyncOptions.Clone()` produces a broken shallow copy
+**What**: `Clone()` used `MemberwiseClone()` which shared the `ExcludePatterns` list reference.
-**Where**: `src/SharpSync/Core/SyncOptions.cs`, line 109.
-
-**What**: `Clone()` uses `MemberwiseClone()`, which creates a shallow copy. The `ExcludePatterns` property is a `List` (a reference type), so the cloned instance shares the same list object. Mutating `ExcludePatterns` on the clone corrupts the original, and vice versa.
-
-**Fix**:
-```csharp
-public SyncOptions Clone() {
- var clone = (SyncOptions)MemberwiseClone();
- clone.ExcludePatterns = new List(ExcludePatterns);
- return clone;
-}
-```
+**Fix applied**: Deep copy using spread syntax: `clone.ExcludePatterns = [..ExcludePatterns];`
---
## SERIOUS -- Should fix before release
-### S1. No SourceLink, no deterministic builds, no debug symbols
-
-**Where**: `src/SharpSync/SharpSync.csproj`.
-
-**What**: Missing `PublishRepositoryUrl`, `EmbedUntrackedSources`, `DebugType` set to `embedded`/`portable`, `IncludeSymbols`/`SymbolPackageFormat`, and SourceLink package reference. `Deterministic` is not explicitly set. `IncludeSourceRevisionInInformationalVersion` is actually set to `false`, which suppresses the commit hash.
-
-Without SourceLink, consumers cannot step into SharpSync source code when debugging. Without deterministic builds, reproducibility is not guaranteed. These are table-stakes for any serious NuGet package.
-
-**Fix**: Add to the ``:
-```xml
-true
-true
-embedded
-true
-true
-```
-And add:
-```xml
-
-```
-
-### S2. `SyncPlan` properties enumerate `Actions` on every access
-
-**Where**: `src/SharpSync/Core/SyncPlan.cs`, lines 20-75.
-
-**What**: Every property (`Downloads`, `Uploads`, `LocalDeletes`, `RemoteDeletes`, `Conflicts`) calls `.Where(...).ToList()` on the full `Actions` list each time it is accessed. The computed properties (`DownloadCount`, `UploadCount`, etc.) each trigger their respective property getter, which re-enumerates and re-allocates. Properties like `HasConflicts` call `ConflictCount`, which calls `Conflicts`, which allocates a full list just to check `.Count`.
-
-A consumer inspecting a plan in a UI will enumerate `Actions` 10+ times, creating a new `List` allocation each time.
-
-**Fix**: Cache the categorized lists lazily:
-```csharp
-private IReadOnlyList? _downloads;
-public IReadOnlyList Downloads =>
- _downloads ??= Actions.Where(a => a.ActionType == SyncActionType.Download).ToList();
-```
-Or compute all groups once in the constructor/init.
-
-### S4. Regex Denial of Service (ReDoS) risk in `SyncFilter`
-
-**Where**: `src/SharpSync/Sync/SyncFilter.cs`:
-- Line 88: `new Regex(pattern, RegexOptions.IgnoreCase | RegexOptions.Compiled)` (exclusion)
-- Line 117: `new Regex(pattern, RegexOptions.IgnoreCase | RegexOptions.Compiled)` (inclusion)
-- Line 224: `Regex.IsMatch(path, regexPattern, RegexOptions.IgnoreCase)` (wildcard matching)
-
-**What**: User-provided patterns are compiled into regex without `NonBacktracking` or a timeout. A malicious or accidental pattern could cause catastrophic backtracking, hanging the sync engine.
-
-**Fix**: Use `RegexOptions.NonBacktracking` (available in .NET 8+):
-```csharp
-var regex = new Regex(pattern, RegexOptions.IgnoreCase | RegexOptions.NonBacktracking);
-// ...
-return Regex.IsMatch(path, regexPattern, RegexOptions.IgnoreCase | RegexOptions.NonBacktracking);
-```
-Note: `NonBacktracking` and `Compiled` are mutually exclusive in .NET. `NonBacktracking` is preferred here since security > minor perf gain from compilation.
-
-### S4. `SqliteSyncDatabase.Dispose()` calls `.Wait()` on an async method
-
-**Where**: `src/SharpSync/Database/SqliteSyncDatabase.cs`, line 292.
-
-**What**: `Dispose()` calls `_connection?.CloseAsync().Wait()`, which blocks the calling thread waiting for an async operation. Combined with C1 (no `ConfigureAwait(false)`), this can deadlock on the UI thread.
-
-**Fix**: Implement `IAsyncDisposable` alongside `IDisposable`:
-```csharp
-public class SqliteSyncDatabase : ISyncDatabase, IAsyncDisposable {
- public async ValueTask DisposeAsync() {
- if (!_disposed) {
- if (_connection is not null)
- await _connection.CloseAsync().ConfigureAwait(false);
- _connection = null;
- _disposed = true;
- }
- GC.SuppressFinalize(this);
- }
-}
-```
-
-### S5. Four bare `catch { }` blocks swallow all exceptions silently
-
-**Where**:
-- `src/SharpSync/Sync/SyncEngine.cs:1430` -- `GetDomainFromUrl` catches URI parsing failures
-- `src/SharpSync/Sync/SyncEngine.cs:2158` -- `TryGetItemAsync` catches storage errors
-- `src/SharpSync/Sync/SyncFilter.cs:90` -- catches regex compilation failures (exclusion)
-- `src/SharpSync/Sync/SyncFilter.cs:119` -- catches regex compilation failures (inclusion)
-
-**What**: Silent exception swallowing makes debugging impossible. The `TryGetItemAsync` case is particularly severe since it directly affects sync behavior -- when a storage call fails, the file is silently skipped.
-
-**Fix**: At minimum, log at debug/trace level in each catch block. For `TryGetItemAsync`, change to:
-```csharp
-catch (Exception ex) {
- _logger.LogDebug(ex, "Failed to get item at {Path}", path);
- return null;
-}
-```
-For `SyncFilter`, log when a user-provided pattern fails to compile.
-
-### S6. Non-sealed public classes implementing `IDisposable` without proper pattern
-
-**Where**:
-- `src/SharpSync/Sync/SyncEngine.cs:38` -- `public class SyncEngine`
-- `src/SharpSync/Database/SqliteSyncDatabase.cs:13` -- `public class SqliteSyncDatabase`
-
-**What**: These are unsealed public classes implementing `IDisposable` without the `Dispose(bool disposing)` pattern and without `GC.SuppressFinalize(this)`. If subclassed, derived classes cannot properly participate in disposal. Violates CA1063.
-
-**Fix**: Either mark them `sealed`, or implement the full dispose pattern:
-```csharp
-protected virtual void Dispose(bool disposing) {
- if (!_disposed) {
- if (disposing) { /* dispose managed resources */ }
- _disposed = true;
- }
-}
-
-public void Dispose() {
- Dispose(disposing: true);
- GC.SuppressFinalize(this);
-}
-```
-Sealing is simpler and likely the right choice if subclassing is not a supported scenario.
-
-### S7. `SynchronizeAsync` wraps `OperationCanceledException` in dead code
-
-**Where**: `src/SharpSync/Sync/SyncEngine.cs`, lines 232, 1750, 1833.
-
-**What**: When a sync is cancelled, the code does:
-```csharp
-catch (OperationCanceledException) {
- result.Error = new InvalidOperationException("Synchronization was cancelled");
- throw;
-}
-```
-The `result.Error` assignment is dead code -- since the exception is re-thrown, `result` is never returned to the caller. It's also misleading to wrap `OperationCanceledException` in an `InvalidOperationException`.
-
-**Fix**: Simply re-throw without setting `result.Error`:
-```csharp
-catch (OperationCanceledException) {
- throw;
-}
-```
-Or remove the catch block entirely (it only re-throws).
-
----
-
-## MODERATE -- Should fix for polish
-
-### M1. No `PackageIcon` in the NuGet package
+### S1. No SourceLink, no deterministic builds, no debug symbols -- FIXED
**Where**: `src/SharpSync/SharpSync.csproj`.
-**What**: No `PackageIcon` property and no icon file. Packages without icons look unprofessional on NuGet.org.
+**Fix applied**: Added `PublishRepositoryUrl`, `EmbedUntrackedSources`, `DebugType=embedded`, `Deterministic`, `ContinuousIntegrationBuild` (CI-only). Added `Microsoft.SourceLink.GitHub` 8.0.0 as PrivateAssets="all". Removed `IncludeSourceRevisionInInformationalVersion=false`.
-**Fix**: Add a 128x128 PNG icon and reference it:
-```xml
-icon.png
-
-
-```
+### S2. `SyncPlan` properties enumerate `Actions` on every access -- FIXED
-### M2. No `CHANGELOG.md`
+**Where**: `src/SharpSync/Core/SyncPlan.cs`.
-**Where**: Repository root.
+**Fix applied**: Added lazy caching with `??=` pattern using private backing fields for all categorized lists (`_downloads`, `_uploads`, `_localDeletes`, `_remoteDeletes`, `_conflicts`).
-**What**: For a v1.0.0 release, consumers expect a changelog documenting what's in the release. Follow [Keep a Changelog](https://keepachangelog.com/) format.
+### S3. Regex Denial of Service (ReDoS) risk in `SyncFilter` -- FIXED
-### M3. `Array.Empty()` instead of `[]` collection expressions
+**Where**: `src/SharpSync/Sync/SyncFilter.cs`.
-**Where**:
-- `src/SharpSync/Core/SyncPlan.cs:15`
-- `src/SharpSync/Sync/SyncEngine.cs:295, 341`
+**Fix applied**: Changed `RegexOptions.Compiled` to `RegexOptions.NonBacktracking` in all 3 regex usages (2 compiled patterns + 1 static `Regex.IsMatch` call).
-**What**: Uses `Array.Empty()` where the C# 12 collection expression `[]` is preferred for .NET 8.
+### S4. `SqliteSyncDatabase.Dispose()` calls `.Wait()` on an async method -- FIXED
-### M4. `await Task.CompletedTask` antipattern
+**Where**: `src/SharpSync/Database/SqliteSyncDatabase.cs`.
-**Where**:
-- `src/SharpSync/Storage/LocalFileStorage.cs:191, 211, 238`
-- `src/SharpSync/Core/DefaultConflictResolver.cs:31`
-- `src/SharpSync/Core/SmartConflictResolver.cs:117`
+**Fix applied**: Added `IAsyncDisposable` interface with `DisposeAsync()` method. Changed `.Wait()` to `.GetAwaiter().GetResult()` in sync `Dispose()`. Added `GC.SuppressFinalize(this)` to both dispose methods.
-**What**: `await Task.CompletedTask` adds an unnecessary state machine allocation while providing no actual asynchrony. The comment "Make it truly async" is incorrect -- `Task.CompletedTask` completes synchronously.
+### S5. Four bare `catch { }` blocks swallow all exceptions silently -- FIXED
-**Fix**: Remove the `async` keyword and return `Task.FromResult(value)` directly, or use `ValueTask`.
+**Where**: `SyncEngine.cs` (2 locations), `SyncFilter.cs` (2 locations).
-### M5. `StorageProgressEventArgs` uses mutable setters
+**Fix applied**:
+- `GetDomainFromUrl`: Changed `catch {` to `catch (UriFormatException)` (typed catch, no need to log)
+- `TryGetItemAsync`: Made non-static, added `catch (Exception ex) when (ex is not OperationCanceledException)` with logging via `StorageItemRetrievalFailed` (EventId 46)
+- `SyncFilter` (2 locations): Added `ILogger` constructor parameter, changed `catch {` to `catch (ArgumentException ex)` with logging via `SyncFilterRegexCompilationFailed` (EventId 47)
-**Where**: `src/SharpSync/Storage/StorageProgressEventArgs.cs`, lines 10-30.
+### S6. Non-sealed public classes implementing `IDisposable` without proper pattern -- FIXED
-**What**: All properties have public `set` accessors. Event args should be immutable by convention -- multiple subscribers could mutate the same instance. Compare with `FileProgressEventArgs` and `SyncProgressEventArgs` which correctly use read-only properties.
+**Where**: `SyncEngine`, `SqliteSyncDatabase`, `SyncFilter`.
-**Fix**: Change to `init` or constructor-initialized read-only properties.
+**Fix applied**: Sealed all three classes (`public sealed class`). Added `GC.SuppressFinalize(this)` to `SyncEngine.Dispose()`.
-### M6. `SyncOptions.ExcludePatterns` exposes concrete `List`
+### S7. `SynchronizeAsync` wraps `OperationCanceledException` in dead code -- FIXED
-**Where**: `src/SharpSync/Core/SyncOptions.cs:60`.
+**Where**: `src/SharpSync/Sync/SyncEngine.cs` (3 locations).
-**What**: Initialized as `new List()` (old-style syntax) and exposes the concrete `List` type.
+**Fix applied**: Removed `result.Error = new InvalidOperationException("Synchronization was cancelled")` from all 3 `OperationCanceledException` catch blocks, leaving only `throw;`.
-**Fix**: Change initialization to `[]` and consider changing the property type to `IList`.
+---
-### M7. `SyncItem.Metadata` exposes `Dictionary`
+## MODERATE -- Should fix for polish
-**Where**: `src/SharpSync/Core/SyncItem.cs:45`.
+### M1. No `PackageIcon` in the NuGet package -- NOT FIXED
-**What**: Exposes a mutable concrete `Dictionary` directly. Uses `object` as the value type, requiring casting by consumers. No documentation on expected keys.
+**Status**: Deferred. Requires creating an icon file.
-**Fix**: Document expected metadata keys. Consider `IDictionary` for a simpler public API.
+### M2. No `CHANGELOG.md` -- NOT FIXED
-### M8. `ChangeSet` race condition from parallel mutation
+**Status**: Deferred. To be created at release time.
-**Where**: `src/SharpSync/Sync/ChangeSet.cs` and `src/SharpSync/Sync/SyncEngine.cs:630-635`.
+### M3. `Array.Empty()` instead of `[]` collection expressions -- FIXED
-**What**: `ChangeSet` contains `List` and `HashSet` properties that are mutated from parallel tasks. `ScanDirectoryRecursiveAsync` launches tasks via `Task.WhenAll` (line 635), and each task modifies the shared `ChangeSet` (adding to `LocalPaths`, `RemotePaths`, `Additions`, `Modifications`). `List` and `HashSet` are not thread-safe -- this is a race condition that can cause data corruption or exceptions.
+**Where**: Multiple files.
-**Fix**: Use `ConcurrentBag` / `ConcurrentDictionary` for thread-safe collections, or add locking around mutations to the `ChangeSet`.
+**Fix applied**: Replaced all `Array.Empty()` with `[]` collection expressions across 6 files (OAuth2Config, ISyncStorage, S3Storage, WebDavStorage, SyncEngine, SmartConflictResolver). Also replaced `new[] { ... }` with `[]` in OAuth2Config and SmartConflictResolver.
----
+### M4. `await Task.CompletedTask` antipattern -- FIXED
-## NITPICK -- Would be nice to fix
+**Where**: LocalFileStorage (3 methods), DefaultConflictResolver, SmartConflictResolver.
-### N1. `SmartConflictResolver` references "Nimbus" by name
+**Fix applied**: Removed `async` keyword and replaced with `Task.FromResult`/`Task.CompletedTask` returns in all 5 methods.
-**Where**: `src/SharpSync/Core/SmartConflictResolver.cs:12`.
+### M5. `StorageProgressEventArgs` uses mutable setters -- FIXED
-**What**: XML doc says "Nimbus can implement this to show dialogs". Library public API docs should be consumer-agnostic.
+**Where**: `src/SharpSync/Storage/StorageProgressEventArgs.cs`.
-**Fix**: Change to "Desktop clients can implement this to show UI dialogs."
+**Fix applied**: Changed all `{ get; set; }` to `{ get; init; }`.
-### N2. Inconsistent collection initialization
+### M6. `SyncOptions.ExcludePatterns` exposes concrete `List` -- FIXED
-**Where**: Throughout the codebase.
+**Where**: `src/SharpSync/Core/SyncOptions.cs`.
-**What**: Mixes `new List()`, `new()`, and `[]` syntax. Example: `SyncFilter.cs` uses `new()`, `SyncOptions.cs` uses `new List()`, `OAuth2Config.cs` uses `new()` for dictionary.
+**Fix applied**: Changed property type from `List` to `IList`, initializer from `new List()` to `[]`, and Clone deep copy to `[..ExcludePatterns]`.
-**Fix**: Standardize on `[]` collection expression syntax (C# 12 / .NET 8).
+### M7. `SyncItem.Metadata` exposes `Dictionary` -- NOT FIXED
-### N3. `OAuth2Config` property initialization inconsistency
+**Status**: Deferred. Low impact, would require broader API changes.
-**Where**: `src/SharpSync/Auth/OAuth2Config.cs`, lines 35 and 40.
+### M8. `ChangeSet` race condition from parallel mutation -- FIXED
-**What**: `Scopes` uses `Array.Empty()` while `AdditionalParameters` uses `new()`. Two properties on the same type with different patterns.
+**Where**: `src/SharpSync/Sync/ChangeSet.cs` and `SyncEngine.cs`.
-**Fix**: Use `[]` for both.
+**Fix applied**: Added `SyncRoot` lock object to `ChangeSet`. Wrapped all mutations in `ScanDirectoryRecursiveAsync` with `lock (changeSet.SyncRoot)` blocks. Async `HasChangedAsync` calls run outside the lock.
-### N4. `GetSyncPlanAsync` catches `Exception` and returns empty plan silently
+---
-**Where**: `src/SharpSync/Sync/SyncEngine.cs:339`.
+## NITPICK -- Would be nice to fix
-**What**: Catches all exceptions and returns an empty plan, giving the caller no indication that something went wrong. If there's a network error or database failure, the consumer receives an empty plan and concludes there are no changes.
+### N1. `SmartConflictResolver` references "Nimbus" by name -- FIXED
-**Fix**: Log the exception and consider either propagating it or adding an error property to `SyncPlan`.
+**Fix applied**: Changed to "Desktop clients can implement this to show UI dialogs."
-### N5. `SyncEngine` constructor has 9 parameters
+### N2. Inconsistent collection initialization -- PARTIALLY FIXED
-**Where**: `src/SharpSync/Sync/SyncEngine.cs:138-148`.
+**Status**: Fixed all `Array.Empty()` and `new[] { ... }` usages. Remaining `new List()` in local variables left as-is because `var x = []` doesn't compile (type cannot be inferred).
-**What**: 4 required + 5 optional parameters. While optional parameters mitigate the pain, this is a code smell. An options/builder pattern would be more idiomatic.
+### N3. `OAuth2Config` property initialization inconsistency -- FIXED
-**Fix**: Consider a `SyncEngineOptions` class or builder pattern for the optional parameters. Not required for v1.0 but worth considering.
+**Fix applied**: `Scopes` now uses `[]`. `AdditionalParameters` stays `new()` because `[]` doesn't work for `Dictionary`.
-### N6. Inconsistent XML doc summary trailing periods
+### N4. `GetSyncPlanAsync` catches `Exception` and returns empty plan silently -- FIXED
-**Where**: Multiple files.
+**Fix applied**: Added logging via `SyncPlanGenerationFailed` (EventId 48, Warning level).
-**What**: Some `` elements end with a period, others do not. For example, `ISyncStorage.cs` has no periods while `SyncEngine.cs` sometimes does.
+### N5. `SyncEngine` constructor has 9 parameters -- NOT FIXED
-**Fix**: Consistently end all `` text with a period.
+**Status**: Deferred per audit recommendation ("Not required for v1.0 but worth considering").
-### N7. `ConflictAnalysis.TimeDifference` is `double` instead of `TimeSpan`
+### N6. Inconsistent XML doc summary trailing periods -- NOT FIXED
-**Where**: `src/SharpSync/Core/ConflictAnalysis.cs:60`.
+**Status**: Deferred. Cosmetic, high churn.
-**What**: `TimeDifference` is typed as `double` representing seconds. Using `TimeSpan` would be more idiomatic and self-documenting.
+### N7. `ConflictAnalysis.TimeDifference` is `double` instead of `TimeSpan` -- FIXED
-### N8. Test class uses `.GetAwaiter().GetResult()` in constructor
+**Fix applied**: Changed property type from `double` to `TimeSpan`. Updated producer in `SmartConflictResolver` to use `TimeSpan.Duration()`. Updated all tests.
-**Where**: `tests/SharpSync.Tests/Sync/SyncEngineTests.cs:28`.
+### N8. Test class uses `.GetAwaiter().GetResult()` in constructor -- FIXED
-**What**: The test class constructor calls `_database.InitializeAsync().GetAwaiter().GetResult()`. xUnit supports `IAsyncLifetime` for async setup.
+**Fix applied**: Converted `SyncEngineTests` from `IDisposable` to `IAsyncLifetime`. Moved `_database.InitializeAsync()` to `InitializeAsync()` method. Also modernized `new[] { ... }` to `[]` for static field initializers.
---
@@ -347,17 +188,14 @@ For a v1.0 library with this much API surface, published API documentation would
---
-## Priority order for fixes
-
-1. **C1** -- `ConfigureAwait(false)` everywhere (release-blocker, deadlock risk)
-2. **C2** -- `SyncOptions.Clone()` deep copy (release-blocker, data corruption)
-3. **M8** -- `ChangeSet` thread safety (race condition causing potential crashes)
-4. **S4** -- `SqliteSyncDatabase.Dispose()` async (deadlock risk)
-5. **S5** -- Silent catch blocks (debuggability)
-6. **S6** -- Seal or fix dispose pattern (API correctness)
-7. **S7** -- Dead code in cancellation handlers (cleanup)
-8. **S1** -- SourceLink + deterministic builds (NuGet best practice)
-9. **S2** -- `SyncPlan` allocation waste (performance)
-10. **S3** -- ReDoS protection (security)
-11. **N1** -- Remove "Nimbus" reference (API neutrality)
-12. Everything else in order of severity
+## Summary
+
+| Category | Total | Fixed | Deferred |
+|----------|-------|-------|----------|
+| CRITICAL | 2 | 2 | 0 |
+| SERIOUS | 7 | 7 | 0 |
+| MODERATE | 8 | 6 | 2 (M1, M7) |
+| NITPICK | 8 | 6 | 2 (N5, N6) |
+| **Total** | **25** | **21** | **4** |
+
+All critical and serious issues are resolved. Remaining 4 deferred items are cosmetic or low-impact.
diff --git a/README.md b/README.md
index b01b9d5..50445db 100644
--- a/README.md
+++ b/README.md
@@ -414,7 +414,7 @@ var options = new SyncOptions
ConflictResolution = ConflictResolution.Ask,
TimeoutSeconds = 300, // 5 minute timeout
MaxBytesPerSecond = null, // No bandwidth limit
- ExcludePatterns = new List { "*.tmp", "~*" }
+ ExcludePatterns = ["*.tmp", "~*"]
};
```
diff --git a/icon.png b/icon.png
new file mode 100644
index 0000000..637e4a6
Binary files /dev/null and b/icon.png differ
diff --git a/samples/SharpSync.Samples.Console/BasicSyncExample.cs b/samples/SharpSync.Samples.Console/BasicSyncExample.cs
index 3f06e56..3177233 100644
--- a/samples/SharpSync.Samples.Console/BasicSyncExample.cs
+++ b/samples/SharpSync.Samples.Console/BasicSyncExample.cs
@@ -239,7 +239,7 @@ public static async Task SyncWithOptionsAsync(ISyncEngine syncEngine) {
// Per-sync exclude patterns (applied in addition to the engine-level SyncFilter).
// Useful for one-off syncs that need extra filtering without modifying the filter.
var excludeOptions = new SyncOptions {
- ExcludePatterns = new List { "*.bak", "thumbs.db", "*.tmp" }
+ ExcludePatterns = ["*.bak", "thumbs.db", "*.tmp"]
};
await syncEngine.SynchronizeAsync(excludeOptions);
@@ -269,7 +269,7 @@ public static async Task SyncWithOptionsAsync(ISyncEngine syncEngine) {
var combinedOptions = new SyncOptions {
ChecksumOnly = true,
PreserveTimestamps = true,
- ExcludePatterns = new List { "*.log" },
+ ExcludePatterns = ["*.log"],
TimeoutSeconds = 600,
Verbose = true
};
diff --git a/samples/SharpSync.Samples.Console/ConsoleOAuth2Example.cs b/samples/SharpSync.Samples.Console/ConsoleOAuth2Example.cs
index 7fb461b..eb72269 100644
--- a/samples/SharpSync.Samples.Console/ConsoleOAuth2Example.cs
+++ b/samples/SharpSync.Samples.Console/ConsoleOAuth2Example.cs
@@ -131,25 +131,24 @@ public async Task RefreshTokenAsync(
}
///
- public async Task ValidateTokenAsync(
+ public Task ValidateTokenAsync(
OAuth2Result result,
CancellationToken cancellationToken = default) {
// Quick local check first
if (!result.IsValid) {
- return false;
+ return Task.FromResult(false);
}
// Check if token will expire within 30 seconds
if (result.WillExpireWithin(TimeSpan.FromSeconds(30))) {
- return false;
+ return Task.FromResult(false);
}
// Token appears valid based on expiry time.
// A production implementation could make a lightweight API call
// (e.g., GET /ocs/v2.php/cloud/user for Nextcloud) to verify
// the token is actually accepted by the server.
- await Task.CompletedTask; // Placeholder for async API validation
- return true;
+ return Task.FromResult(true);
}
private async Task ExchangeCodeForTokensAsync(
diff --git a/src/SharpSync/Auth/OAuth2Config.cs b/src/SharpSync/Auth/OAuth2Config.cs
index 0924a84..bc97900 100644
--- a/src/SharpSync/Auth/OAuth2Config.cs
+++ b/src/SharpSync/Auth/OAuth2Config.cs
@@ -32,7 +32,7 @@ public record OAuth2Config {
///
/// Requested OAuth scopes
///
- public string[] Scopes { get; init; } = Array.Empty();
+ public string[] Scopes { get; init; } = [];
///
/// Additional parameters for authorization request
@@ -48,7 +48,7 @@ public static OAuth2Config ForNextcloud(string serverUrl, string clientId, strin
AuthorizeUrl = $"{serverUrl.TrimEnd('/')}/apps/oauth2/authorize",
TokenUrl = $"{serverUrl.TrimEnd('/')}/apps/oauth2/api/v1/token",
RedirectUri = redirectUri,
- Scopes = new[] { "files" },
+ Scopes = ["files"],
AdditionalParameters = new Dictionary
{
{ "response_type", "code" }
@@ -65,7 +65,7 @@ public static OAuth2Config ForOcis(string serverUrl, string clientId, string red
AuthorizeUrl = $"{serverUrl.TrimEnd('/')}/oauth2/auth",
TokenUrl = $"{serverUrl.TrimEnd('/')}/oauth2/token",
RedirectUri = redirectUri,
- Scopes = new[] { "openid", "profile", "offline_access" },
+ Scopes = ["openid", "profile", "offline_access"],
AdditionalParameters = new Dictionary
{
{ "response_type", "code" }
diff --git a/src/SharpSync/Core/ConflictAnalysis.cs b/src/SharpSync/Core/ConflictAnalysis.cs
index 85846d3..e12f940 100644
--- a/src/SharpSync/Core/ConflictAnalysis.cs
+++ b/src/SharpSync/Core/ConflictAnalysis.cs
@@ -55,9 +55,9 @@ public record ConflictAnalysis {
public DateTime? RemoteModified { get; init; }
///
- /// Absolute difference in modification times (seconds)
+ /// Absolute difference in modification times
///
- public double TimeDifference { get; init; }
+ public TimeSpan TimeDifference { get; init; }
///
/// Which version appears to be newer ("Local", "Remote", or null if unclear)
diff --git a/src/SharpSync/Core/DefaultConflictResolver.cs b/src/SharpSync/Core/DefaultConflictResolver.cs
index f7e41ca..3b498ef 100644
--- a/src/SharpSync/Core/DefaultConflictResolver.cs
+++ b/src/SharpSync/Core/DefaultConflictResolver.cs
@@ -23,13 +23,8 @@ public DefaultConflictResolver(ConflictResolution defaultResolution) {
/// The conflict event arguments
/// Cancellation token
/// The configured default resolution
- public async Task ResolveConflictAsync(FileConflictEventArgs conflict, CancellationToken cancellationToken = default) {
+ public Task ResolveConflictAsync(FileConflictEventArgs conflict, CancellationToken cancellationToken = default) {
cancellationToken.ThrowIfCancellationRequested();
-
- // For this implementation, we always return the default resolution
- // regardless of the conflict details
- await Task.CompletedTask; // Make it truly async for interface compliance
-
- return DefaultResolution;
+ return Task.FromResult(DefaultResolution);
}
}
diff --git a/src/SharpSync/Core/ISyncStorage.cs b/src/SharpSync/Core/ISyncStorage.cs
index b0f25c3..ba43643 100644
--- a/src/SharpSync/Core/ISyncStorage.cs
+++ b/src/SharpSync/Core/ISyncStorage.cs
@@ -126,5 +126,5 @@ public interface ISyncStorage {
/// Cancellation token to cancel the operation
/// A collection of remote changes detected since the specified time
Task> GetRemoteChangesAsync(DateTime since, CancellationToken cancellationToken = default)
- => Task.FromResult>(Array.Empty());
+ => Task.FromResult>([]);
}
diff --git a/src/SharpSync/Core/SmartConflictResolver.cs b/src/SharpSync/Core/SmartConflictResolver.cs
index 7d56bde..3dfdb12 100644
--- a/src/SharpSync/Core/SmartConflictResolver.cs
+++ b/src/SharpSync/Core/SmartConflictResolver.cs
@@ -8,24 +8,24 @@ namespace Oire.SharpSync.Core;
///
public class SmartConflictResolver: IConflictResolver {
///
- /// Delegate for UI-driven conflict resolution
- /// Nimbus can implement this to show dialogs
+ /// Delegate for UI-driven conflict resolution.
+ /// Desktop clients can implement this to show UI dialogs.
///
public delegate Task ConflictHandlerDelegate(ConflictAnalysis analysis, CancellationToken cancellationToken);
- private static readonly FrozenSet BinaryExtensions = new[] {
+ private static readonly FrozenSet BinaryExtensions = FrozenSet.ToFrozenSet([
".exe", ".dll", ".bin", ".zip", ".7z", ".rar",
".jpg", ".jpeg", ".png", ".gif", ".bmp", ".ico", ".webp",
".mp4", ".avi", ".mkv", ".mp3", ".wav", ".ogg", ".flac", ".mov", ".wmv", ".alac", ".wma",
".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx", ".odt", ".ods", ".odp", ".mo", ".epub",
- }.ToFrozenSet(StringComparer.OrdinalIgnoreCase);
+ ], StringComparer.OrdinalIgnoreCase);
- private static readonly FrozenSet TextExtensions = new[] {
+ private static readonly FrozenSet TextExtensions = FrozenSet.ToFrozenSet([
".txt", ".md", ".json", ".xml", ".yml", ".yaml", ".om", ".toml", ".m3u", ".m3u8", ".fb2",
".cs", ".js", ".ts", ".py", ".java", ".cpp", ".c", ".h", ".rb", ".go", ".rs", ".swift", ".kt", ".dart", ".lua", ".sh", ".bat", ".ps1", ".sql", ".zig", ".d", ".lr", ".po",
".css", ".scss", ".less", ".html", ".htm", ".php",
".ini", ".cfg", ".conf", ".log"
- }.ToFrozenSet(StringComparer.OrdinalIgnoreCase);
+ ], StringComparer.OrdinalIgnoreCase);
private readonly ConflictHandlerDelegate? _conflictHandler;
private readonly ConflictResolution _defaultResolution;
@@ -45,11 +45,11 @@ public SmartConflictResolver(ConflictHandlerDelegate? conflictHandler = null, Co
///
public async Task ResolveConflictAsync(FileConflictEventArgs conflict, CancellationToken cancellationToken = default) {
// Analyze the conflict to provide rich information
- var analysis = await AnalyzeConflictAsync(conflict, cancellationToken);
+ var analysis = await AnalyzeConflictAsync(conflict, cancellationToken).ConfigureAwait(false);
// If we have a UI handler, let it decide
if (_conflictHandler is not null) {
- return await _conflictHandler(analysis, cancellationToken);
+ return await _conflictHandler(analysis, cancellationToken).ConfigureAwait(false);
}
// Otherwise, use intelligent automatic resolution
@@ -59,7 +59,7 @@ public async Task ResolveConflictAsync(FileConflictEventArgs
///
/// Analyzes a conflict to provide rich information for decision making
///
- private static async Task AnalyzeConflictAsync(FileConflictEventArgs conflict, CancellationToken cancellationToken) {
+ private static Task AnalyzeConflictAsync(FileConflictEventArgs conflict, CancellationToken cancellationToken) {
cancellationToken.ThrowIfCancellationRequested();
// Collect analysis data
@@ -68,7 +68,7 @@ private static async Task AnalyzeConflictAsync(FileConflictEve
long sizeDifference = 0;
DateTime? localModified = null;
DateTime? remoteModified = null;
- double timeDifference = 0;
+ var timeDifference = TimeSpan.Zero;
string? newerVersion = null;
var recommendedResolution = ConflictResolution.Ask;
@@ -83,7 +83,7 @@ private static async Task AnalyzeConflictAsync(FileConflictEve
if (conflict.LocalItem?.LastModified is not null && conflict.RemoteItem?.LastModified is not null) {
localModified = conflict.LocalItem.LastModified;
remoteModified = conflict.RemoteItem.LastModified;
- timeDifference = Math.Abs((conflict.RemoteItem.LastModified - conflict.LocalItem.LastModified).TotalSeconds);
+ timeDifference = (conflict.RemoteItem.LastModified - conflict.LocalItem.LastModified).Duration();
// Determine which is newer
if (conflict.RemoteItem.LastModified > conflict.LocalItem.LastModified) {
@@ -114,10 +114,8 @@ private static async Task AnalyzeConflictAsync(FileConflictEve
break;
}
- await Task.CompletedTask; // Make it truly async
-
// Create immutable analysis record
- return new ConflictAnalysis {
+ return Task.FromResult(new ConflictAnalysis {
FilePath = conflict.Path,
ConflictType = conflict.ConflictType,
LocalItem = conflict.LocalItem,
@@ -132,7 +130,7 @@ private static async Task AnalyzeConflictAsync(FileConflictEve
NewerVersion = newerVersion,
IsLikelyBinary = isLikelyBinary,
IsLikelyTextFile = isLikelyTextFile
- };
+ });
}
///
diff --git a/src/SharpSync/Core/SyncOptions.cs b/src/SharpSync/Core/SyncOptions.cs
index b47aa77..2330722 100644
--- a/src/SharpSync/Core/SyncOptions.cs
+++ b/src/SharpSync/Core/SyncOptions.cs
@@ -57,7 +57,7 @@ public class SyncOptions {
///
/// Gets or sets file patterns to exclude from synchronization
///
- public List ExcludePatterns { get; set; } = new List();
+ public IList ExcludePatterns { get; set; } = [];
///
/// Gets or sets the maximum transfer rate in bytes per second.
@@ -103,9 +103,13 @@ public class SyncOptions {
public VirtualFileCallbackDelegate? VirtualFileCallback { get; set; }
///
- /// Creates a shallow copy of the sync options
+ /// Creates a deep copy of the sync options.
///
- /// A new SyncOptions instance with the same values
- public SyncOptions Clone() => (SyncOptions)MemberwiseClone();
+ /// A new SyncOptions instance with the same values.
+ public SyncOptions Clone() {
+ var clone = (SyncOptions)MemberwiseClone();
+ clone.ExcludePatterns = [.. ExcludePatterns];
+ return clone;
+ }
}
diff --git a/src/SharpSync/Core/SyncPlan.cs b/src/SharpSync/Core/SyncPlan.cs
index 0570f18..6819254 100644
--- a/src/SharpSync/Core/SyncPlan.cs
+++ b/src/SharpSync/Core/SyncPlan.cs
@@ -9,78 +9,89 @@ namespace Oire.SharpSync.Core;
/// The plan groups actions by type for easier presentation in UI.
///
public sealed class SyncPlan {
+ private IReadOnlyList? _downloads;
+ private IReadOnlyList? _uploads;
+ private IReadOnlyList? _localDeletes;
+ private IReadOnlyList? _remoteDeletes;
+ private IReadOnlyList? _conflicts;
+
///
- /// Gets all planned actions, sorted by priority (highest first)
+ /// Gets all planned actions, sorted by priority (highest first).
///
- public IReadOnlyList Actions { get; init; } = Array.Empty();
+ public IReadOnlyList Actions { get; init; } = [];
///
- /// Gets actions that will download files or directories from remote to local
+ /// Gets actions that will download files or directories from remote to local.
///
- public IReadOnlyList Downloads => Actions.Where(a => a.ActionType == SyncActionType.Download).ToList();
+ public IReadOnlyList Downloads =>
+ _downloads ??= Actions.Where(a => a.ActionType == SyncActionType.Download).ToList();
///
- /// Gets actions that will upload files or directories from local to remote
+ /// Gets actions that will upload files or directories from local to remote.
///
- public IReadOnlyList Uploads => Actions.Where(a => a.ActionType == SyncActionType.Upload).ToList();
+ public IReadOnlyList Uploads =>
+ _uploads ??= Actions.Where(a => a.ActionType == SyncActionType.Upload).ToList();
///
- /// Gets actions that will delete files or directories from local storage
+ /// Gets actions that will delete files or directories from local storage.
///
- public IReadOnlyList LocalDeletes => Actions.Where(a => a.ActionType == SyncActionType.DeleteLocal).ToList();
+ public IReadOnlyList LocalDeletes =>
+ _localDeletes ??= Actions.Where(a => a.ActionType == SyncActionType.DeleteLocal).ToList();
///
- /// Gets actions that will delete files or directories from remote storage
+ /// Gets actions that will delete files or directories from remote storage.
///
- public IReadOnlyList RemoteDeletes => Actions.Where(a => a.ActionType == SyncActionType.DeleteRemote).ToList();
+ public IReadOnlyList RemoteDeletes =>
+ _remoteDeletes ??= Actions.Where(a => a.ActionType == SyncActionType.DeleteRemote).ToList();
///
- /// Gets actions representing conflicts that need resolution
+ /// Gets actions representing conflicts that need resolution.
///
- public IReadOnlyList Conflicts => Actions.Where(a => a.ActionType == SyncActionType.Conflict).ToList();
+ public IReadOnlyList Conflicts =>
+ _conflicts ??= Actions.Where(a => a.ActionType == SyncActionType.Conflict).ToList();
///
- /// Gets the total number of planned actions
+ /// Gets the total number of planned actions.
///
public int TotalActions => Actions.Count;
///
- /// Gets the total number of files that will be downloaded
+ /// Gets the total number of files that will be downloaded.
///
public int DownloadCount => Downloads.Count;
///
- /// Gets the total number of files that will be uploaded
+ /// Gets the total number of files that will be uploaded.
///
public int UploadCount => Uploads.Count;
///
- /// Gets the total number of deletions (both local and remote)
+ /// Gets the total number of deletions (both local and remote).
///
public int DeleteCount => LocalDeletes.Count + RemoteDeletes.Count;
///
- /// Gets the total number of conflicts
+ /// Gets the total number of conflicts.
///
public int ConflictCount => Conflicts.Count;
///
- /// Gets the total size of data that will be downloaded (in bytes)
+ /// Gets the total size of data that will be downloaded (in bytes).
///
public long TotalDownloadSize => Downloads.Where(a => !a.IsDirectory).Sum(a => a.Size);
///
- /// Gets the total size of data that will be uploaded (in bytes)
+ /// Gets the total size of data that will be uploaded (in bytes).
///
public long TotalUploadSize => Uploads.Where(a => !a.IsDirectory).Sum(a => a.Size);
///
- /// Gets whether this plan has any actions to perform
+ /// Gets whether this plan has any actions to perform.
///
public bool HasChanges => TotalActions > 0;
///
- /// Gets whether this plan contains any conflicts
+ /// Gets whether this plan contains any conflicts.
///
public bool HasConflicts => ConflictCount > 0;
}
diff --git a/src/SharpSync/Database/SqliteSyncDatabase.cs b/src/SharpSync/Database/SqliteSyncDatabase.cs
index a93ee14..2c06190 100644
--- a/src/SharpSync/Database/SqliteSyncDatabase.cs
+++ b/src/SharpSync/Database/SqliteSyncDatabase.cs
@@ -10,7 +10,7 @@ namespace Oire.SharpSync.Database;
/// This class provides persistent storage for sync state using SQLite. It tracks file metadata,
/// sync status, and conflict information. The database is automatically created if it doesn't exist.
///
-public class SqliteSyncDatabase: ISyncDatabase {
+public sealed class SqliteSyncDatabase: ISyncDatabase, IAsyncDisposable {
private readonly string _databasePath;
private SQLiteAsyncConnection? _connection;
private bool _disposed;
@@ -41,20 +41,20 @@ public async Task InitializeAsync(CancellationToken cancellationToken = default)
_connection = new SQLiteAsyncConnection(_databasePath);
- await _connection.CreateTableAsync();
- await _connection.CreateTableAsync();
+ await _connection.CreateTableAsync().ConfigureAwait(false);
+ await _connection.CreateTableAsync().ConfigureAwait(false);
await _connection.ExecuteAsync("""
CREATE INDEX IF NOT EXISTS idx_syncstates_status
ON SyncStates(Status)
- """);
+ """).ConfigureAwait(false);
await _connection.ExecuteAsync("""
CREATE INDEX IF NOT EXISTS idx_syncstates_lastsync
ON SyncStates(LastSyncTime)
- """);
+ """).ConfigureAwait(false);
await _connection.ExecuteAsync("""
CREATE INDEX IF NOT EXISTS idx_operationhistory_completedat
ON OperationHistory(CompletedAtTicks DESC)
- """);
+ """).ConfigureAwait(false);
}
///
@@ -68,7 +68,8 @@ ON OperationHistory(CompletedAtTicks DESC)
EnsureInitialized();
return await _connection!.Table()
.Where(s => s.Path == path)
- .FirstOrDefaultAsync();
+ .FirstOrDefaultAsync()
+ .ConfigureAwait(false);
}
///
@@ -85,9 +86,9 @@ public async Task UpdateSyncStateAsync(SyncState state, CancellationToken cancel
EnsureInitialized();
if (state.Id == 0) {
- await _connection!.InsertAsync(state);
+ await _connection!.InsertAsync(state).ConfigureAwait(false);
} else {
- await _connection!.UpdateAsync(state);
+ await _connection!.UpdateAsync(state).ConfigureAwait(false);
}
}
@@ -101,7 +102,8 @@ public async Task DeleteSyncStateAsync(string path, CancellationToken cancellati
EnsureInitialized();
await _connection!.Table()
.Where(s => s.Path == path)
- .DeleteAsync();
+ .DeleteAsync()
+ .ConfigureAwait(false);
}
///
@@ -112,7 +114,7 @@ public async Task DeleteSyncStateAsync(string path, CancellationToken cancellati
/// Thrown when the database is not initialized
public async Task> GetAllSyncStatesAsync(CancellationToken cancellationToken = default) {
EnsureInitialized();
- return await _connection!.Table().ToListAsync();
+ return await _connection!.Table().ToListAsync().ConfigureAwait(false);
}
///
@@ -134,7 +136,7 @@ public async Task> GetSyncStatesByPrefixAsync(string path
return await _connection!.QueryAsync(
"SELECT * FROM SyncStates WHERE Path = ? OR Path LIKE ?",
normalizedPrefix,
- normalizedPrefix + "/%");
+ normalizedPrefix + "/%").ConfigureAwait(false);
}
///
@@ -147,7 +149,8 @@ public async Task> GetPendingSyncStatesAsync(Cancellation
EnsureInitialized();
return await _connection!.Table()
.Where(s => s.Status != SyncStatus.Synced && s.Status != SyncStatus.Ignored)
- .ToListAsync();
+ .ToListAsync()
+ .ConfigureAwait(false);
}
///
@@ -157,7 +160,7 @@ public async Task> GetPendingSyncStatesAsync(Cancellation
/// Thrown when the database is not initialized
public async Task ClearAsync(CancellationToken cancellationToken = default) {
EnsureInitialized();
- await _connection!.DeleteAllAsync();
+ await _connection!.DeleteAllAsync().ConfigureAwait(false);
}
///
@@ -169,25 +172,26 @@ public async Task ClearAsync(CancellationToken cancellationToken = default) {
public async Task GetStatsAsync(CancellationToken cancellationToken = default) {
EnsureInitialized();
- var totalItems = await _connection!.Table().CountAsync();
+ var totalItems = await _connection!.Table().CountAsync().ConfigureAwait(false);
var syncedItems = await _connection!.Table()
- .Where(s => s.Status == SyncStatus.Synced).CountAsync();
+ .Where(s => s.Status == SyncStatus.Synced).CountAsync().ConfigureAwait(false);
var conflictedItems = await _connection!.Table()
- .Where(s => s.Status == SyncStatus.Conflict).CountAsync();
+ .Where(s => s.Status == SyncStatus.Conflict).CountAsync().ConfigureAwait(false);
var errorItems = await _connection!.Table()
- .Where(s => s.Status == SyncStatus.Error).CountAsync();
+ .Where(s => s.Status == SyncStatus.Error).CountAsync().ConfigureAwait(false);
var pendingItems = await _connection!.Table()
.Where(s => s.Status == SyncStatus.LocalNew ||
s.Status == SyncStatus.RemoteNew ||
s.Status == SyncStatus.LocalModified ||
s.Status == SyncStatus.RemoteModified ||
s.Status == SyncStatus.LocalDeleted ||
- s.Status == SyncStatus.RemoteDeleted).CountAsync();
+ s.Status == SyncStatus.RemoteDeleted).CountAsync().ConfigureAwait(false);
var lastSyncState = await _connection!.Table()
.Where(s => s.LastSyncTime != null)
.OrderByDescending(s => s.LastSyncTime)
- .FirstOrDefaultAsync();
+ .FirstOrDefaultAsync()
+ .ConfigureAwait(false);
var fileInfo = new FileInfo(_databasePath);
@@ -233,7 +237,7 @@ public async Task LogOperationAsync(
renamedFrom,
renamedTo);
- await _connection!.InsertAsync(record);
+ await _connection!.InsertAsync(record).ConfigureAwait(false);
}
///
@@ -252,11 +256,11 @@ public async Task> GetRecentOperationsAsync(
records = await _connection!.QueryAsync(
"SELECT * FROM OperationHistory WHERE CompletedAtTicks > ? ORDER BY CompletedAtTicks DESC LIMIT ?",
sinceTicks,
- limit);
+ limit).ConfigureAwait(false);
} else {
records = await _connection!.QueryAsync(
"SELECT * FROM OperationHistory ORDER BY CompletedAtTicks DESC LIMIT ?",
- limit);
+ limit).ConfigureAwait(false);
}
return records.Select(r => r.ToCompletedOperation()).ToList();
@@ -271,7 +275,7 @@ public async Task ClearOperationHistoryAsync(DateTime olderThan, Cancellati
var olderThanTicks = olderThan.Ticks;
return await _connection!.ExecuteAsync(
"DELETE FROM OperationHistory WHERE CompletedAtTicks < ?",
- olderThanTicks);
+ olderThanTicks).ConfigureAwait(false);
}
private void EnsureInitialized() {
@@ -281,17 +285,34 @@ private void EnsureInitialized() {
}
///
- /// Releases all resources used by the sync database
+ /// Asynchronously releases all resources used by the sync database.
+ ///
+ public async ValueTask DisposeAsync() {
+ if (!_disposed) {
+ if (_connection is not null) {
+ await _connection.CloseAsync().ConfigureAwait(false);
+ }
+
+ _connection = null;
+ _disposed = true;
+ }
+
+ GC.SuppressFinalize(this);
+ }
+
+ ///
+ /// Releases all resources used by the sync database.
///
///
- /// Closes the database connection and disposes of all resources.
- /// This method can be called multiple times safely. After disposal, the database instance cannot be reused.
+ /// Prefer to avoid blocking the calling thread.
///
public void Dispose() {
if (!_disposed) {
- _connection?.CloseAsync().Wait();
+ _connection?.CloseAsync().GetAwaiter().GetResult();
_connection = null;
_disposed = true;
}
+
+ GC.SuppressFinalize(this);
}
}
diff --git a/src/SharpSync/Logging/LogMessages.cs b/src/SharpSync/Logging/LogMessages.cs
index 9495e1e..a88dc8a 100644
--- a/src/SharpSync/Logging/LogMessages.cs
+++ b/src/SharpSync/Logging/LogMessages.cs
@@ -299,4 +299,22 @@ internal static partial class LogMessages {
Level = LogLevel.Debug,
Message = "Could not retrieve item metadata for pending change at {Path}; file may have been deleted since notification")]
public static partial void PendingChangeItemNotFound(this ILogger logger, Exception ex, string path);
+
+ [LoggerMessage(
+ EventId = 46,
+ Level = LogLevel.Debug,
+ Message = "Failed to retrieve storage item at {Path}")]
+ public static partial void StorageItemRetrievalFailed(this ILogger logger, Exception ex, string path);
+
+ [LoggerMessage(
+ EventId = 47,
+ Level = LogLevel.Debug,
+ Message = "Failed to compile regex pattern '{Pattern}' for sync filter; treating as wildcard")]
+ public static partial void SyncFilterRegexCompilationFailed(this ILogger logger, Exception ex, string pattern);
+
+ [LoggerMessage(
+ EventId = 48,
+ Level = LogLevel.Warning,
+ Message = "Failed to generate sync plan; returning empty plan")]
+ public static partial void SyncPlanGenerationFailed(this ILogger logger, Exception ex);
}
diff --git a/src/SharpSync/SharpSync.csproj b/src/SharpSync/SharpSync.csproj
index 09656c5..215da49 100644
--- a/src/SharpSync/SharpSync.csproj
+++ b/src/SharpSync/SharpSync.csproj
@@ -11,6 +11,7 @@
true
true
README.md
+ icon.png
Oire.SharpSync
André Polykanine; Oire
Oire Software
@@ -24,13 +25,16 @@
git
file-sync;synchronization;webdav;sftp;ftp;ftps;s3;aws;nextcloud;owncloud;backup;sync
1.0.0
- false
+ embedded
+ true
+ true
$(NoWarn);NETSDK1206
+
@@ -39,6 +43,10 @@
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
diff --git a/src/SharpSync/Storage/FtpStorage.cs b/src/SharpSync/Storage/FtpStorage.cs
index b290625..e6801b8 100644
--- a/src/SharpSync/Storage/FtpStorage.cs
+++ b/src/SharpSync/Storage/FtpStorage.cs
@@ -132,7 +132,7 @@ private async Task EnsureConnectedAsync(CancellationToken cancellationToken = de
return;
}
- await _connectionSemaphore.WaitAsync(cancellationToken);
+ await _connectionSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try {
if (_client?.IsConnected == true) {
return;
@@ -140,14 +140,14 @@ private async Task EnsureConnectedAsync(CancellationToken cancellationToken = de
// Dispose old client if exists
if (_client is not null) {
- await _client.Disconnect(cancellationToken);
+ await _client.Disconnect(cancellationToken).ConfigureAwait(false);
_client.Dispose();
}
// Create and connect client
_client = new AsyncFtpClient(_host, _username, _password, _port, _config);
- await _client.Connect(cancellationToken);
+ await _client.Connect(cancellationToken).ConfigureAwait(false);
} finally {
_connectionSemaphore.Release();
}
@@ -160,7 +160,7 @@ private async Task EnsureConnectedAsync(CancellationToken cancellationToken = de
/// True if connection is successful, false otherwise
public async Task TestConnectionAsync(CancellationToken cancellationToken = default) {
try {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
return _client?.IsConnected == true;
} catch (Exception ex) {
_logger.ConnectionTestFailed(ex, "FTP");
@@ -176,18 +176,18 @@ public async Task TestConnectionAsync(CancellationToken cancellationToken
/// A collection of sync items representing files and directories
/// Thrown when authentication fails
public async Task> ListItemsAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
return await ExecuteWithRetry(async () => {
var items = new List();
- if (!await _client!.DirectoryExists(fullPath, cancellationToken)) {
+ if (!await _client!.DirectoryExists(fullPath, cancellationToken).ConfigureAwait(false)) {
return items;
}
- var ftpItems = await _client.GetListing(fullPath, cancellationToken);
+ var ftpItems = await _client.GetListing(fullPath, cancellationToken).ConfigureAwait(false);
foreach (var item in ftpItems) {
cancellationToken.ThrowIfCancellationRequested();
@@ -209,7 +209,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
}
return (IEnumerable)items;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -219,17 +219,17 @@ public async Task> ListItemsAsync(string path, Cancellatio
/// Cancellation token to cancel the operation
/// The sync item if it exists, null otherwise
public async Task GetItemAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
return await ExecuteWithRetry(async () => {
- if (!await _client!.FileExists(fullPath, cancellationToken) &&
- !await _client.DirectoryExists(fullPath, cancellationToken)) {
+ if (!await _client!.FileExists(fullPath, cancellationToken).ConfigureAwait(false) &&
+ !await _client.DirectoryExists(fullPath, cancellationToken).ConfigureAwait(false)) {
return null;
}
- var item = await _client.GetObjectInfo(fullPath);
+ var item = await _client.GetObjectInfo(fullPath).ConfigureAwait(false);
if (item is null) {
return null;
}
@@ -241,7 +241,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
LastModified = item.Modified.ToUniversalTime(),
Permissions = ConvertPermissionsToString(item)
};
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -257,19 +257,19 @@ public async Task> ListItemsAsync(string path, Cancellatio
/// For files larger than the configured chunk size, progress events will be raised via
///
public async Task ReadFileAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
return await ExecuteWithRetry(async () => {
- if (!await _client!.FileExists(fullPath, cancellationToken)) {
+ if (!await _client!.FileExists(fullPath, cancellationToken).ConfigureAwait(false)) {
throw new FileNotFoundException($"File not found: {path}");
}
var memoryStream = new MemoryStream();
// Get file size for progress reporting
- var fileInfo = await _client.GetObjectInfo(fullPath);
+ var fileInfo = await _client.GetObjectInfo(fullPath).ConfigureAwait(false);
var needsProgress = fileInfo?.Size > _chunkSize;
if (needsProgress && fileInfo is not null) {
@@ -279,15 +279,15 @@ public async Task ReadFileAsync(string path, CancellationToken cancellat
RaiseProgressChanged(path, p.TransferredBytes, totalBytes, StorageOperation.Download);
});
- await _client.DownloadStream(memoryStream, fullPath, progress: progress, token: cancellationToken);
+ await _client.DownloadStream(memoryStream, fullPath, progress: progress, token: cancellationToken).ConfigureAwait(false);
} else {
// Download without progress
- await _client.DownloadStream(memoryStream, fullPath, token: cancellationToken);
+ await _client.DownloadStream(memoryStream, fullPath, token: cancellationToken).ConfigureAwait(false);
}
memoryStream.Position = 0;
return (Stream)memoryStream;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -302,14 +302,14 @@ public async Task ReadFileAsync(string path, CancellationToken cancellat
/// progress events will be raised via
///
public async Task WriteFileAsync(string path, Stream content, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
// Ensure parent directories exist
var directory = GetParentDirectory(fullPath);
if (!string.IsNullOrEmpty(directory)) {
- await CreateDirectoryAsync(GetRelativePath(directory), cancellationToken);
+ await CreateDirectoryAsync(GetRelativePath(directory), cancellationToken).ConfigureAwait(false);
}
await ExecuteWithRetry(async () => {
@@ -322,14 +322,14 @@ await ExecuteWithRetry(async () => {
RaiseProgressChanged(path, p.TransferredBytes, totalBytes, StorageOperation.Upload);
});
- await _client!.UploadStream(content, fullPath, FtpRemoteExists.Overwrite, true, progress, cancellationToken);
+ await _client!.UploadStream(content, fullPath, FtpRemoteExists.Overwrite, true, progress, cancellationToken).ConfigureAwait(false);
} else {
// Upload without progress
- await _client!.UploadStream(content, fullPath, FtpRemoteExists.Overwrite, true, token: cancellationToken);
+ await _client!.UploadStream(content, fullPath, FtpRemoteExists.Overwrite, true, token: cancellationToken).ConfigureAwait(false);
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -342,7 +342,7 @@ await ExecuteWithRetry(async () => {
/// If the directory already exists, this method completes successfully without error
///
public async Task CreateDirectoryAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
@@ -352,15 +352,15 @@ public async Task CreateDirectoryAsync(string path, CancellationToken cancellati
}
await ExecuteWithRetry(async () => {
- if (await _client!.DirectoryExists(fullPath, cancellationToken)) {
+ if (await _client!.DirectoryExists(fullPath, cancellationToken).ConfigureAwait(false)) {
return true; // Directory already exists
}
// Create directory with parent directories
- await _client.CreateDirectory(fullPath, cancellationToken);
+ await _client.CreateDirectory(fullPath, cancellationToken).ConfigureAwait(false);
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -374,21 +374,21 @@ await ExecuteWithRetry(async () => {
/// If the item does not exist, this method completes successfully without error
///
public async Task DeleteAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
await ExecuteWithRetry(async () => {
- if (await _client!.DirectoryExists(fullPath, cancellationToken)) {
+ if (await _client!.DirectoryExists(fullPath, cancellationToken).ConfigureAwait(false)) {
// Delete directory recursively
- await _client.DeleteDirectory(fullPath, cancellationToken);
- } else if (await _client.FileExists(fullPath, cancellationToken)) {
+ await _client.DeleteDirectory(fullPath, cancellationToken).ConfigureAwait(false);
+ } else if (await _client.FileExists(fullPath, cancellationToken).ConfigureAwait(false)) {
// Delete file
- await _client.DeleteFile(fullPath, cancellationToken);
+ await _client.DeleteFile(fullPath, cancellationToken).ConfigureAwait(false);
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -403,7 +403,7 @@ await ExecuteWithRetry(async () => {
/// Parent directories of the target path will be created if they don't exist
///
public async Task MoveAsync(string sourcePath, string targetPath, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var sourceFullPath = GetFullPath(sourcePath);
var targetFullPath = GetFullPath(targetPath);
@@ -411,19 +411,19 @@ public async Task MoveAsync(string sourcePath, string targetPath, CancellationTo
// Ensure target parent directory exists
var targetParentRelative = GetParentDirectory(NormalizePath(targetPath));
if (!string.IsNullOrEmpty(targetParentRelative)) {
- await CreateDirectoryAsync(targetParentRelative, cancellationToken);
+ await CreateDirectoryAsync(targetParentRelative, cancellationToken).ConfigureAwait(false);
}
await ExecuteWithRetry(async () => {
- if (!await _client!.FileExists(sourceFullPath, cancellationToken) &&
- !await _client.DirectoryExists(sourceFullPath, cancellationToken)) {
+ if (!await _client!.FileExists(sourceFullPath, cancellationToken).ConfigureAwait(false) &&
+ !await _client.DirectoryExists(sourceFullPath, cancellationToken).ConfigureAwait(false)) {
throw new FileNotFoundException($"Source not found: {sourcePath}");
}
- await _client.MoveFile(sourceFullPath, targetFullPath, FtpRemoteExists.Overwrite, cancellationToken);
+ await _client.MoveFile(sourceFullPath, targetFullPath, FtpRemoteExists.Overwrite, cancellationToken).ConfigureAwait(false);
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -433,14 +433,14 @@ await ExecuteWithRetry(async () => {
/// Cancellation token to cancel the operation
/// True if the file or directory exists, false otherwise
public async Task ExistsAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
return await ExecuteWithRetry(async () => {
- return await _client!.FileExists(fullPath, cancellationToken) ||
- await _client.DirectoryExists(fullPath, cancellationToken);
- }, cancellationToken);
+ return await _client!.FileExists(fullPath, cancellationToken).ConfigureAwait(false) ||
+ await _client.DirectoryExists(fullPath, cancellationToken).ConfigureAwait(false);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -453,14 +453,14 @@ public async Task ExistsAsync(string path, CancellationToken cancellationT
/// best-effort values which may be -1 if the server doesn't support the SIZE command
///
public async Task GetStorageInfoAsync(CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
// FTP doesn't have a standard way to get disk space
// Return unknown values
return await Task.FromResult(new StorageInfo {
TotalSpace = -1,
UsedSpace = -1
- });
+ }).ConfigureAwait(false);
}
///
@@ -476,10 +476,10 @@ public async Task GetStorageInfoAsync(CancellationToken cancellatio
///
public async Task ComputeHashAsync(string path, CancellationToken cancellationToken = default) {
// FTP doesn't have native hash support, so we download and hash
- using var stream = await ReadFileAsync(path, cancellationToken);
+ using var stream = await ReadFileAsync(path, cancellationToken).ConfigureAwait(false);
using var sha256 = SHA256.Create();
- var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken);
+ var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken).ConfigureAwait(false);
return Convert.ToBase64String(hashBytes);
}
@@ -487,13 +487,13 @@ public async Task ComputeHashAsync(string path, CancellationToken cancel
/// Sets the last modified time for a file on the FTP server
///
public async Task SetLastModifiedAsync(string path, DateTime lastModified, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
await ExecuteWithRetry(async () => {
- await _client!.SetModifiedTime(fullPath, lastModified, cancellationToken);
+ await _client!.SetModifiedTime(fullPath, lastModified, cancellationToken).ConfigureAwait(false);
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
#region Helper Methods
@@ -586,7 +586,7 @@ private async Task ExecuteWithRetry(Func> operation, CancellationT
for (int attempt = 0; attempt <= _maxRetries; attempt++) {
try {
cancellationToken.ThrowIfCancellationRequested();
- return await operation();
+ return await operation().ConfigureAwait(false);
} catch (Exception ex) when (attempt < _maxRetries && IsRetriableException(ex)) {
lastException = ex;
_logger.StorageOperationRetry("FTP", attempt + 1, _maxRetries);
@@ -596,17 +596,17 @@ private async Task ExecuteWithRetry(Func> operation, CancellationT
_logger.StorageReconnecting(attempt + 1, "FTP");
try {
if (_client is not null) {
- await _client.Disconnect(cancellationToken);
+ await _client.Disconnect(cancellationToken).ConfigureAwait(false);
_client.Dispose();
_client = null;
}
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
} catch (Exception reconnectEx) {
_logger.StorageReconnectFailed(reconnectEx, "FTP");
}
}
- await Task.Delay(_retryDelay * (attempt + 1), cancellationToken);
+ await Task.Delay(_retryDelay * (attempt + 1), cancellationToken).ConfigureAwait(false);
}
}
diff --git a/src/SharpSync/Storage/LocalFileStorage.cs b/src/SharpSync/Storage/LocalFileStorage.cs
index 187e2e7..e078826 100644
--- a/src/SharpSync/Storage/LocalFileStorage.cs
+++ b/src/SharpSync/Storage/LocalFileStorage.cs
@@ -108,7 +108,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
items.Add(item);
}
- return await Task.FromResult(items);
+ return await Task.FromResult(items).ConfigureAwait(false);
}
///
@@ -128,7 +128,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
IsSymlink = dirInfo.Attributes.HasFlag(FileAttributes.ReparsePoint),
LastModified = dirInfo.LastWriteTimeUtc,
Size = 0
- });
+ }).ConfigureAwait(false);
}
if (File.Exists(fullPath)) {
@@ -139,7 +139,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
IsSymlink = fileInfo.Attributes.HasFlag(FileAttributes.ReparsePoint),
LastModified = fileInfo.LastWriteTimeUtc,
Size = fileInfo.Length
- });
+ }).ConfigureAwait(false);
}
return null;
@@ -159,7 +159,7 @@ public async Task ReadFileAsync(string path, CancellationToken cancellat
throw new FileNotFoundException($"File not found: {path}");
}
- return await Task.FromResult(File.OpenRead(fullPath));
+ return await Task.FromResult(File.OpenRead(fullPath)).ConfigureAwait(false);
}
///
@@ -177,7 +177,7 @@ public async Task WriteFileAsync(string path, Stream content, CancellationToken
}
using var fileStream = File.Create(fullPath);
- await content.CopyToAsync(fileStream, cancellationToken);
+ await content.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false);
}
///
@@ -185,10 +185,10 @@ public async Task WriteFileAsync(string path, Stream content, CancellationToken
///
/// The relative path to the directory to create
/// Cancellation token to cancel the operation
- public async Task CreateDirectoryAsync(string path, CancellationToken cancellationToken = default) {
+ public Task CreateDirectoryAsync(string path, CancellationToken cancellationToken = default) {
var fullPath = GetFullPath(path);
Directory.CreateDirectory(fullPath);
- await Task.CompletedTask;
+ return Task.CompletedTask;
}
///
@@ -199,7 +199,7 @@ public async Task CreateDirectoryAsync(string path, CancellationToken cancellati
///
/// If the path is a directory, it will be deleted recursively along with all its contents
///
- public async Task DeleteAsync(string path, CancellationToken cancellationToken = default) {
+ public Task DeleteAsync(string path, CancellationToken cancellationToken = default) {
var fullPath = GetFullPath(path);
if (Directory.Exists(fullPath)) {
@@ -208,7 +208,7 @@ public async Task DeleteAsync(string path, CancellationToken cancellationToken =
File.Delete(fullPath);
}
- await Task.CompletedTask;
+ return Task.CompletedTask;
}
///
@@ -218,7 +218,7 @@ public async Task DeleteAsync(string path, CancellationToken cancellationToken =
/// The relative path to the target location
/// Cancellation token to cancel the operation
/// Thrown when the source does not exist
- public async Task MoveAsync(string sourcePath, string targetPath, CancellationToken cancellationToken = default) {
+ public Task MoveAsync(string sourcePath, string targetPath, CancellationToken cancellationToken = default) {
var sourceFullPath = GetFullPath(sourcePath);
var targetFullPath = GetFullPath(targetPath);
@@ -235,7 +235,7 @@ public async Task MoveAsync(string sourcePath, string targetPath, CancellationTo
throw new FileNotFoundException($"Source not found: {sourcePath}");
}
- await Task.CompletedTask;
+ return Task.CompletedTask;
}
///
@@ -246,7 +246,7 @@ public async Task MoveAsync(string sourcePath, string targetPath, CancellationTo
/// True if the file or directory exists, false otherwise
public async Task ExistsAsync(string path, CancellationToken cancellationToken = default) {
var fullPath = GetFullPath(path);
- return await Task.FromResult(Directory.Exists(fullPath) || File.Exists(fullPath));
+ return await Task.FromResult(Directory.Exists(fullPath) || File.Exists(fullPath)).ConfigureAwait(false);
}
///
@@ -260,7 +260,7 @@ public async Task GetStorageInfoAsync(CancellationToken cancellatio
return await Task.FromResult(new StorageInfo {
TotalSpace = driveInfo.TotalSize,
UsedSpace = driveInfo.TotalSize - driveInfo.AvailableFreeSpace
- });
+ }).ConfigureAwait(false);
}
///
@@ -280,7 +280,7 @@ public async Task ComputeHashAsync(string path, CancellationToken cancel
using var stream = File.OpenRead(fullPath);
using var sha256 = SHA256.Create();
- var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken);
+ var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken).ConfigureAwait(false);
return Convert.ToBase64String(hashBytes);
}
@@ -290,7 +290,7 @@ public async Task ComputeHashAsync(string path, CancellationToken cancel
/// Cancellation token to cancel the operation
/// True if the root directory exists and is accessible
public async Task TestConnectionAsync(CancellationToken cancellationToken = default) {
- return await Task.FromResult(Directory.Exists(RootPath));
+ return await Task.FromResult(Directory.Exists(RootPath)).ConfigureAwait(false);
}
///
diff --git a/src/SharpSync/Storage/ProgressStream.cs b/src/SharpSync/Storage/ProgressStream.cs
index d8dc130..c343eb7 100644
--- a/src/SharpSync/Storage/ProgressStream.cs
+++ b/src/SharpSync/Storage/ProgressStream.cs
@@ -39,13 +39,13 @@ public override int Read(byte[] buffer, int offset, int count) {
}
public override async Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) {
- var bytesRead = await _innerStream.ReadAsync(buffer.AsMemory(offset, count), cancellationToken);
+ var bytesRead = await _innerStream.ReadAsync(buffer.AsMemory(offset, count), cancellationToken).ConfigureAwait(false);
UpdateProgress(bytesRead);
return bytesRead;
}
public override async ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) {
- var bytesRead = await _innerStream.ReadAsync(buffer, cancellationToken);
+ var bytesRead = await _innerStream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
UpdateProgress(bytesRead);
return bytesRead;
}
diff --git a/src/SharpSync/Storage/S3Storage.cs b/src/SharpSync/Storage/S3Storage.cs
index 0f13ca9..d3db36c 100644
--- a/src/SharpSync/Storage/S3Storage.cs
+++ b/src/SharpSync/Storage/S3Storage.cs
@@ -183,7 +183,7 @@ public async Task TestConnectionAsync(CancellationToken cancellationToken
Prefix = _prefix
};
- await _client.ListObjectsV2Async(request, cancellationToken);
+ await _client.ListObjectsV2Async(request, cancellationToken).ConfigureAwait(false);
return true;
} catch (Exception ex) {
_logger.ConnectionTestFailed(ex, "S3");
@@ -222,7 +222,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
ListObjectsV2Response? response;
do {
- response = await _client.ListObjectsV2Async(request, cancellationToken);
+ response = await _client.ListObjectsV2Async(request, cancellationToken).ConfigureAwait(false);
// Add files (objects)
var s3Objects = response?.S3Objects ?? Enumerable.Empty();
@@ -270,7 +270,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
} while (response is not null && response.IsTruncated.GetValueOrDefault());
return (IEnumerable)items;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -290,7 +290,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
Key = fullPath
};
- var response = await _client.GetObjectMetadataAsync(request, cancellationToken);
+ var response = await _client.GetObjectMetadataAsync(request, cancellationToken).ConfigureAwait(false);
return new SyncItem {
Path = path,
@@ -310,7 +310,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
MaxKeys = 1
};
- var listResponse = await _client.ListObjectsV2Async(listRequest, cancellationToken);
+ var listResponse = await _client.ListObjectsV2Async(listRequest, cancellationToken).ConfigureAwait(false);
var hasObjects = listResponse?.S3Objects is not null && listResponse.S3Objects.Count > 0;
var hasPrefixes = listResponse?.CommonPrefixes is not null && listResponse.CommonPrefixes.Count > 0;
@@ -326,7 +326,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
return null;
}
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -348,7 +348,7 @@ public async Task ReadFileAsync(string path, CancellationToken cancellat
Key = fullPath
};
- var response = await _client.GetObjectAsync(request, cancellationToken);
+ var response = await _client.GetObjectAsync(request, cancellationToken).ConfigureAwait(false);
// Read the entire stream into memory
var memoryStream = new MemoryStream();
@@ -359,8 +359,8 @@ public async Task ReadFileAsync(string path, CancellationToken cancellat
var responseStream = response.ResponseStream;
int read;
- while ((read = await responseStream.ReadAsync(buffer.AsMemory(), cancellationToken)) > 0) {
- await memoryStream.WriteAsync(buffer.AsMemory(0, read), cancellationToken);
+ while ((read = await responseStream.ReadAsync(buffer.AsMemory(), cancellationToken).ConfigureAwait(false)) > 0) {
+ await memoryStream.WriteAsync(buffer.AsMemory(0, read), cancellationToken).ConfigureAwait(false);
bytesRead += read;
if (totalBytes > _chunkSize) {
@@ -373,7 +373,7 @@ public async Task ReadFileAsync(string path, CancellationToken cancellat
} catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound) {
throw new FileNotFoundException($"File not found: {path}");
}
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -390,7 +390,7 @@ public async Task ReadFileAsync(string path, CancellationToken cancellat
public async Task WriteFileAsync(string path, Stream content, CancellationToken cancellationToken = default) {
var fullPath = GetFullPath(path);
- await _transferSemaphore.WaitAsync(cancellationToken);
+ await _transferSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try {
await ExecuteWithRetry(async () => {
var fileSize = content.CanSeek ? content.Length : -1;
@@ -412,7 +412,7 @@ await ExecuteWithRetry(async () => {
RaiseProgressChanged(path, args.TransferredBytes, args.TotalBytes, StorageOperation.Upload);
};
- await transferUtility.UploadAsync(uploadRequest, cancellationToken);
+ await transferUtility.UploadAsync(uploadRequest, cancellationToken).ConfigureAwait(false);
} else {
// Use simple put for small files
var putRequest = new PutObjectRequest {
@@ -422,7 +422,7 @@ await ExecuteWithRetry(async () => {
AutoCloseStream = false
};
- await _client.PutObjectAsync(putRequest, cancellationToken);
+ await _client.PutObjectAsync(putRequest, cancellationToken).ConfigureAwait(false);
if (fileSize > 0) {
RaiseProgressChanged(path, fileSize, fileSize, StorageOperation.Upload);
@@ -430,7 +430,7 @@ await ExecuteWithRetry(async () => {
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
} finally {
_transferSemaphore.Release();
}
@@ -466,21 +466,21 @@ await ExecuteWithRetry(async () => {
Key = directoryKey
};
- await _client.GetObjectMetadataAsync(headRequest, cancellationToken);
+ await _client.GetObjectMetadataAsync(headRequest, cancellationToken).ConfigureAwait(false);
return true; // Marker already exists
} catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound) {
// Marker doesn't exist, create it
var putRequest = new PutObjectRequest {
BucketName = _bucketName,
Key = directoryKey,
- InputStream = new MemoryStream(Array.Empty()),
+ InputStream = new MemoryStream([]),
ContentType = "application/x-directory"
};
- await _client.PutObjectAsync(putRequest, cancellationToken);
+ await _client.PutObjectAsync(putRequest, cancellationToken).ConfigureAwait(false);
return true;
}
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -496,7 +496,7 @@ public async Task DeleteAsync(string path, CancellationToken cancellationToken =
await ExecuteWithRetry(async () => {
// First, try to get the item to determine if it's a file or directory
- var item = await GetItemAsync(path, cancellationToken);
+ var item = await GetItemAsync(path, cancellationToken).ConfigureAwait(false);
if (item is null) {
return true; // Already deleted
@@ -504,7 +504,7 @@ await ExecuteWithRetry(async () => {
if (item.IsDirectory) {
// Delete all objects with this prefix
- await DeleteDirectoryRecursive(fullPath, cancellationToken);
+ await DeleteDirectoryRecursive(fullPath, cancellationToken).ConfigureAwait(false);
} else {
// Delete single object
var deleteRequest = new DeleteObjectRequest {
@@ -512,11 +512,11 @@ await ExecuteWithRetry(async () => {
Key = fullPath
};
- await _client.DeleteObjectAsync(deleteRequest, cancellationToken);
+ await _client.DeleteObjectAsync(deleteRequest, cancellationToken).ConfigureAwait(false);
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -532,7 +532,7 @@ private async Task DeleteDirectoryRecursive(string prefix, CancellationToken can
ListObjectsV2Response? response;
do {
- response = await _client.ListObjectsV2Async(listRequest, cancellationToken);
+ response = await _client.ListObjectsV2Async(listRequest, cancellationToken).ConfigureAwait(false);
var objectsToDelete = response?.S3Objects ?? Enumerable.Empty();
if (objectsToDelete.Any()) {
var deleteRequest = new DeleteObjectsRequest {
@@ -540,7 +540,7 @@ private async Task DeleteDirectoryRecursive(string prefix, CancellationToken can
Objects = objectsToDelete.Select(obj => new KeyVersion { Key = obj.Key }).ToList()
};
- await _client.DeleteObjectsAsync(deleteRequest, cancellationToken);
+ await _client.DeleteObjectsAsync(deleteRequest, cancellationToken).ConfigureAwait(false);
}
listRequest.ContinuationToken = response?.NextContinuationToken;
@@ -553,7 +553,7 @@ private async Task DeleteDirectoryRecursive(string prefix, CancellationToken can
Key = directoryPrefix
};
- await _client.DeleteObjectAsync(deleteMarkerRequest, cancellationToken);
+ await _client.DeleteObjectAsync(deleteMarkerRequest, cancellationToken).ConfigureAwait(false);
} catch (AmazonS3Exception ex) {
_logger.S3DirectoryMarkerCleanupFailed(ex, directoryPrefix);
}
@@ -581,7 +581,7 @@ await ExecuteWithRetry(async () => {
Key = sourceFullPath
};
- await _client.GetObjectMetadataAsync(headRequest, cancellationToken);
+ await _client.GetObjectMetadataAsync(headRequest, cancellationToken).ConfigureAwait(false);
} catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound) {
throw new FileNotFoundException($"Source not found: {sourcePath}");
}
@@ -594,7 +594,7 @@ await ExecuteWithRetry(async () => {
DestinationKey = targetFullPath
};
- await _client.CopyObjectAsync(copyRequest, cancellationToken);
+ await _client.CopyObjectAsync(copyRequest, cancellationToken).ConfigureAwait(false);
// Delete source object
var deleteRequest = new DeleteObjectRequest {
@@ -602,10 +602,10 @@ await ExecuteWithRetry(async () => {
Key = sourceFullPath
};
- await _client.DeleteObjectAsync(deleteRequest, cancellationToken);
+ await _client.DeleteObjectAsync(deleteRequest, cancellationToken).ConfigureAwait(false);
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -615,7 +615,7 @@ await ExecuteWithRetry(async () => {
/// Cancellation token to cancel the operation
/// True if the object or directory exists, false otherwise
public async Task ExistsAsync(string path, CancellationToken cancellationToken = default) {
- var item = await GetItemAsync(path, cancellationToken);
+ var item = await GetItemAsync(path, cancellationToken).ConfigureAwait(false);
return item is not null;
}
@@ -636,7 +636,7 @@ public async Task GetStorageInfoAsync(CancellationToken cancellatio
return await Task.FromResult(new StorageInfo {
TotalSpace = -1,
UsedSpace = -1
- });
+ }).ConfigureAwait(false);
}
///
@@ -653,10 +653,10 @@ public async Task GetStorageInfoAsync(CancellationToken cancellatio
public async Task ComputeHashAsync(string path, CancellationToken cancellationToken = default) {
// S3 ETag is MD5-based and complex for multipart uploads
// Download and compute SHA256 for consistency
- using var stream = await ReadFileAsync(path, cancellationToken);
+ using var stream = await ReadFileAsync(path, cancellationToken).ConfigureAwait(false);
using var sha256 = SHA256.Create();
- var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken);
+ var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken).ConfigureAwait(false);
return Convert.ToBase64String(hashBytes);
}
@@ -721,11 +721,11 @@ private async Task ExecuteWithRetry(Func> operation, CancellationT
for (int attempt = 0; attempt <= _maxRetries; attempt++) {
try {
cancellationToken.ThrowIfCancellationRequested();
- return await operation();
+ return await operation().ConfigureAwait(false);
} catch (Exception ex) when (attempt < _maxRetries && IsRetriableException(ex)) {
lastException = ex;
_logger.StorageOperationRetry("S3", attempt + 1, _maxRetries);
- await Task.Delay(_retryDelay * (attempt + 1), cancellationToken);
+ await Task.Delay(_retryDelay * (attempt + 1), cancellationToken).ConfigureAwait(false);
}
}
@@ -787,7 +787,7 @@ public async Task> GetRemoteChangesAsync(DateTime sinc
ListObjectsV2Response? response;
do {
cancellationToken.ThrowIfCancellationRequested();
- response = await _client.ListObjectsV2Async(request, cancellationToken);
+ response = await _client.ListObjectsV2Async(request, cancellationToken).ConfigureAwait(false);
var s3Objects = response?.S3Objects ?? Enumerable.Empty();
foreach (var obj in s3Objects) {
diff --git a/src/SharpSync/Storage/SftpStorage.cs b/src/SharpSync/Storage/SftpStorage.cs
index 84c5c48..a7399c7 100644
--- a/src/SharpSync/Storage/SftpStorage.cs
+++ b/src/SharpSync/Storage/SftpStorage.cs
@@ -177,7 +177,7 @@ private async Task EnsureConnectedAsync(CancellationToken cancellationToken = de
return;
}
- await _connectionSemaphore.WaitAsync(cancellationToken);
+ await _connectionSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try {
if (_client?.IsConnected == true) {
return;
@@ -214,7 +214,7 @@ private async Task EnsureConnectedAsync(CancellationToken cancellationToken = de
// Create and connect client
_client = new SftpClient(connectionInfo);
- await Task.Run(() => _client.Connect(), cancellationToken);
+ await Task.Run(() => _client.Connect(), cancellationToken).ConfigureAwait(false);
// Detect server path handling based on root path configuration
// When no root is specified or root doesn't start with "/", assume chrooted environment
@@ -306,7 +306,7 @@ private async Task EnsureConnectedAsync(CancellationToken cancellationToken = de
/// True if connection is successful, false otherwise
public async Task TestConnectionAsync(CancellationToken cancellationToken = default) {
try {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
return _client?.IsConnected == true;
} catch (Exception ex) {
_logger.ConnectionTestFailed(ex, "SFTP");
@@ -322,7 +322,7 @@ public async Task TestConnectionAsync(CancellationToken cancellationToken
/// A collection of sync items representing files and directories
/// Thrown when authentication fails
public async Task> ListItemsAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
@@ -333,7 +333,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
return items;
}
- var sftpFiles = await Task.Run(() => _client!.ListDirectory(fullPath), cancellationToken);
+ var sftpFiles = await Task.Run(() => _client!.ListDirectory(fullPath), cancellationToken).ConfigureAwait(false);
foreach (var file in sftpFiles) {
// Skip current and parent directory entries
@@ -354,7 +354,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
}
return (IEnumerable)items;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -364,7 +364,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
/// Cancellation token to cancel the operation
/// The sync item if it exists, null otherwise
public async Task GetItemAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
@@ -373,7 +373,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
return null;
}
- var file = await Task.Run(() => _client.Get(fullPath), cancellationToken);
+ var file = await Task.Run(() => _client.Get(fullPath), cancellationToken).ConfigureAwait(false);
return new SyncItem {
Path = path,
@@ -383,7 +383,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
LastModified = file.LastWriteTimeUtc,
Permissions = ConvertPermissionsToString(file)
};
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -399,7 +399,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
/// For files larger than the configured chunk size, progress events will be raised via
///
public async Task ReadFileAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
@@ -426,15 +426,15 @@ await Task.Run(() => {
downloadedBytes = uploaded;
RaiseProgressChanged(path, (long)downloadedBytes, (long)totalBytes, StorageOperation.Download);
});
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
} else {
// Download without progress
- await Task.Run(() => _client.DownloadFile(fullPath, memoryStream), cancellationToken);
+ await Task.Run(() => _client.DownloadFile(fullPath, memoryStream), cancellationToken).ConfigureAwait(false);
}
memoryStream.Position = 0;
return (Stream)memoryStream;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -449,14 +449,14 @@ await Task.Run(() => {
/// progress events will be raised via
///
public async Task WriteFileAsync(string path, Stream content, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
// Ensure parent directories exist
var directory = GetParentDirectory(fullPath);
if (!string.IsNullOrEmpty(directory)) {
- await CreateDirectoryAsync(GetRelativePath(directory), cancellationToken);
+ await CreateDirectoryAsync(GetRelativePath(directory), cancellationToken).ConfigureAwait(false);
}
await ExecuteWithRetry(async () => {
@@ -472,14 +472,14 @@ await Task.Run(() => {
uploadedBytes = uploaded;
RaiseProgressChanged(path, (long)uploadedBytes, (long)totalBytes, StorageOperation.Upload);
});
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
} else {
// Upload without progress
- await Task.Run(() => _client!.UploadFile(content, fullPath, true), cancellationToken);
+ await Task.Run(() => _client!.UploadFile(content, fullPath, true), cancellationToken).ConfigureAwait(false);
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -492,7 +492,7 @@ await Task.Run(() => {
/// If the directory already exists, this method completes successfully without error
///
public async Task CreateDirectoryAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
@@ -523,7 +523,7 @@ await ExecuteWithRetry(async () => {
if (!SafeExists(currentPath)) {
try {
- await Task.Run(() => _client!.CreateDirectory(currentPath), cancellationToken);
+ await Task.Run(() => _client!.CreateDirectory(currentPath), cancellationToken).ConfigureAwait(false);
} catch (Exception ex) when (ex is Renci.SshNet.Common.SftpPermissionDeniedException ||
ex is Renci.SshNet.Common.SftpPathNotFoundException) {
_logger.SftpPermissionDenied(ex, "directory creation", currentPath);
@@ -531,7 +531,7 @@ await ExecuteWithRetry(async () => {
var alternatePath = currentPath.StartsWith('/') ? currentPath.TrimStart('/') : "/" + currentPath;
if (!SafeExists(alternatePath)) {
try {
- await Task.Run(() => _client!.CreateDirectory(alternatePath), cancellationToken);
+ await Task.Run(() => _client!.CreateDirectory(alternatePath), cancellationToken).ConfigureAwait(false);
} catch (Exception ex2) when (ex2 is Renci.SshNet.Common.SftpPermissionDeniedException ||
ex2 is Renci.SshNet.Common.SftpPathNotFoundException) {
_logger.SftpPermissionDenied(ex2, "directory creation (alternate path)", alternatePath);
@@ -546,7 +546,7 @@ await ExecuteWithRetry(async () => {
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -560,7 +560,7 @@ await ExecuteWithRetry(async () => {
/// If the item does not exist, this method completes successfully without error
///
public async Task DeleteAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
@@ -572,13 +572,13 @@ await ExecuteWithRetry(async () => {
var file = _client.Get(fullPath);
if (file.IsDirectory) {
- await Task.Run(() => DeleteDirectoryRecursive(fullPath, cancellationToken), cancellationToken);
+ await Task.Run(() => DeleteDirectoryRecursive(fullPath, cancellationToken), cancellationToken).ConfigureAwait(false);
} else {
- await Task.Run(() => _client.DeleteFile(fullPath), cancellationToken);
+ await Task.Run(() => _client.DeleteFile(fullPath), cancellationToken).ConfigureAwait(false);
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -614,7 +614,7 @@ private void DeleteDirectoryRecursive(string path, CancellationToken cancellatio
/// Parent directories of the target path will be created if they don't exist
///
public async Task MoveAsync(string sourcePath, string targetPath, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var sourceFullPath = GetFullPath(sourcePath);
var targetFullPath = GetFullPath(targetPath);
@@ -625,7 +625,7 @@ public async Task MoveAsync(string sourcePath, string targetPath, CancellationTo
var normalizedTargetPath = NormalizePath(targetPath);
var targetParentRelative = GetParentDirectory(normalizedTargetPath);
if (!string.IsNullOrEmpty(targetParentRelative)) {
- await CreateDirectoryAsync(targetParentRelative, cancellationToken);
+ await CreateDirectoryAsync(targetParentRelative, cancellationToken).ConfigureAwait(false);
}
await ExecuteWithRetry(async () => {
@@ -635,10 +635,10 @@ await ExecuteWithRetry(async () => {
// SSH.NET's RenameFile maps to SSH_FXP_RENAME, which handles both
// same-directory renames and cross-directory moves for files and directories
- await Task.Run(() => _client.RenameFile(sourceFullPath, targetFullPath), cancellationToken);
+ await Task.Run(() => _client.RenameFile(sourceFullPath, targetFullPath), cancellationToken).ConfigureAwait(false);
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -648,13 +648,13 @@ await ExecuteWithRetry(async () => {
/// Cancellation token to cancel the operation
/// True if the file or directory exists, false otherwise
public async Task ExistsAsync(string path, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
return await ExecuteWithRetry(async () => {
- return await Task.Run(() => _client!.Exists(fullPath), cancellationToken);
- }, cancellationToken);
+ return await Task.Run(() => _client!.Exists(fullPath), cancellationToken).ConfigureAwait(false);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -667,11 +667,11 @@ public async Task ExistsAsync(string path, CancellationToken cancellationT
/// best-effort values which may be -1 if the server doesn't support disk space queries
///
public async Task GetStorageInfoAsync(CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
return await ExecuteWithRetry(async () => {
try {
- var statVfs = await Task.Run(() => _client!.GetStatus(RootPath.Length != 0 ? RootPath : "/"), cancellationToken);
+ var statVfs = await Task.Run(() => _client!.GetStatus(RootPath.Length != 0 ? RootPath : "/"), cancellationToken).ConfigureAwait(false);
var totalSpace = (long)(statVfs.TotalBlocks * statVfs.BlockSize);
var usedSpace = (long)((statVfs.TotalBlocks - statVfs.FreeBlocks) * statVfs.BlockSize);
@@ -687,7 +687,7 @@ public async Task GetStorageInfoAsync(CancellationToken cancellatio
UsedSpace = -1
};
}
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -703,10 +703,10 @@ public async Task GetStorageInfoAsync(CancellationToken cancellatio
///
public async Task ComputeHashAsync(string path, CancellationToken cancellationToken = default) {
// SFTP doesn't have native hash support, so we download and hash
- using var stream = await ReadFileAsync(path, cancellationToken);
+ using var stream = await ReadFileAsync(path, cancellationToken).ConfigureAwait(false);
using var sha256 = SHA256.Create();
- var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken);
+ var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken).ConfigureAwait(false);
return Convert.ToBase64String(hashBytes);
}
@@ -714,24 +714,24 @@ public async Task ComputeHashAsync(string path, CancellationToken cancel
/// Sets the last modified time for a file on the SFTP server
///
public async Task SetLastModifiedAsync(string path, DateTime lastModified, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
await ExecuteWithRetry(async () => {
if (_client!.Exists(fullPath)) {
var attrs = _client.GetAttributes(fullPath);
attrs.LastWriteTime = lastModified;
- await Task.Run(() => _client.SetAttributes(fullPath, attrs), cancellationToken);
+ await Task.Run(() => _client.SetAttributes(fullPath, attrs), cancellationToken).ConfigureAwait(false);
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
/// Sets file permissions on the SFTP server
///
public async Task SetPermissionsAsync(string path, string permissions, CancellationToken cancellationToken = default) {
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
var fullPath = GetFullPath(path);
await ExecuteWithRetry(async () => {
@@ -744,11 +744,11 @@ await ExecuteWithRetry(async () => {
&& permissions.All(c => c >= '0' && c <= '7')) {
var attrs = _client.GetAttributes(fullPath);
attrs.SetPermissions(mode);
- await Task.Run(() => _client.SetAttributes(fullPath, attrs), cancellationToken);
+ await Task.Run(() => _client.SetAttributes(fullPath, attrs), cancellationToken).ConfigureAwait(false);
}
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
#region Helper Methods
@@ -896,7 +896,7 @@ private async Task ExecuteWithRetry(Func> operation, CancellationT
for (int attempt = 0; attempt <= _maxRetries; attempt++) {
try {
cancellationToken.ThrowIfCancellationRequested();
- return await operation();
+ return await operation().ConfigureAwait(false);
} catch (Exception ex) when (attempt < _maxRetries && IsRetriableException(ex)) {
lastException = ex;
_logger.StorageOperationRetry("SFTP", attempt + 1, _maxRetries);
@@ -908,13 +908,13 @@ private async Task ExecuteWithRetry(Func> operation, CancellationT
_client?.Disconnect();
_client?.Dispose();
_client = null;
- await EnsureConnectedAsync(cancellationToken);
+ await EnsureConnectedAsync(cancellationToken).ConfigureAwait(false);
} catch (Exception reconnectEx) {
_logger.StorageReconnectFailed(reconnectEx, "SFTP");
}
}
- await Task.Delay(_retryDelay * (attempt + 1), cancellationToken);
+ await Task.Delay(_retryDelay * (attempt + 1), cancellationToken).ConfigureAwait(false);
}
}
diff --git a/src/SharpSync/Storage/StorageProgressEventArgs.cs b/src/SharpSync/Storage/StorageProgressEventArgs.cs
index 713cb43..0c0416f 100644
--- a/src/SharpSync/Storage/StorageProgressEventArgs.cs
+++ b/src/SharpSync/Storage/StorageProgressEventArgs.cs
@@ -7,25 +7,25 @@ public class StorageProgressEventArgs: EventArgs {
///
/// Path of the file being processed
///
- public string Path { get; set; } = string.Empty;
+ public string Path { get; init; } = string.Empty;
///
/// Number of bytes transferred so far
///
- public long BytesTransferred { get; set; }
+ public long BytesTransferred { get; init; }
///
/// Total number of bytes to transfer
///
- public long TotalBytes { get; set; }
+ public long TotalBytes { get; init; }
///
/// Operation being performed
///
- public StorageOperation Operation { get; set; }
+ public StorageOperation Operation { get; init; }
///
/// Percentage complete (0-100)
///
- public int PercentComplete { get; set; }
+ public int PercentComplete { get; init; }
}
diff --git a/src/SharpSync/Storage/ThrottledStream.cs b/src/SharpSync/Storage/ThrottledStream.cs
index f93b23b..ef3feeb 100644
--- a/src/SharpSync/Storage/ThrottledStream.cs
+++ b/src/SharpSync/Storage/ThrottledStream.cs
@@ -55,15 +55,15 @@ public override int Read(byte[] buffer, int offset, int count) {
}
public override async Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) {
- await ThrottleAsync(count, cancellationToken);
- var bytesRead = await _innerStream.ReadAsync(buffer.AsMemory(offset, count), cancellationToken);
+ await ThrottleAsync(count, cancellationToken).ConfigureAwait(false);
+ var bytesRead = await _innerStream.ReadAsync(buffer.AsMemory(offset, count), cancellationToken).ConfigureAwait(false);
RecordBytesTransferred(bytesRead);
return bytesRead;
}
public override async ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) {
- await ThrottleAsync(buffer.Length, cancellationToken);
- var bytesRead = await _innerStream.ReadAsync(buffer, cancellationToken);
+ await ThrottleAsync(buffer.Length, cancellationToken).ConfigureAwait(false);
+ var bytesRead = await _innerStream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
RecordBytesTransferred(bytesRead);
return bytesRead;
}
@@ -75,14 +75,14 @@ public override void Write(byte[] buffer, int offset, int count) {
}
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) {
- await ThrottleAsync(count, cancellationToken);
- await _innerStream.WriteAsync(buffer.AsMemory(offset, count), cancellationToken);
+ await ThrottleAsync(count, cancellationToken).ConfigureAwait(false);
+ await _innerStream.WriteAsync(buffer.AsMemory(offset, count), cancellationToken).ConfigureAwait(false);
RecordBytesTransferred(count);
}
public override async ValueTask WriteAsync(ReadOnlyMemory buffer, CancellationToken cancellationToken = default) {
- await ThrottleAsync(buffer.Length, cancellationToken);
- await _innerStream.WriteAsync(buffer, cancellationToken);
+ await ThrottleAsync(buffer.Length, cancellationToken).ConfigureAwait(false);
+ await _innerStream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false);
RecordBytesTransferred(buffer.Length);
}
@@ -106,7 +106,7 @@ private void ThrottleSync(int requestedBytes) {
private async Task ThrottleAsync(int requestedBytes, CancellationToken cancellationToken) {
var delay = CalculateDelay(requestedBytes);
if (delay > TimeSpan.Zero) {
- await Task.Delay(delay, cancellationToken);
+ await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
}
}
diff --git a/src/SharpSync/Storage/WebDavStorage.cs b/src/SharpSync/Storage/WebDavStorage.cs
index 06c1ba1..2c5604d 100644
--- a/src/SharpSync/Storage/WebDavStorage.cs
+++ b/src/SharpSync/Storage/WebDavStorage.cs
@@ -140,13 +140,13 @@ public async Task GetServerCapabilitiesAsync(CancellationTok
return _serverCapabilities;
}
- await _capabilitiesSemaphore.WaitAsync(cancellationToken);
+ await _capabilitiesSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try {
if (_serverCapabilities is not null) {
return _serverCapabilities;
}
- _serverCapabilities = await DetectServerCapabilitiesAsync(cancellationToken);
+ _serverCapabilities = await DetectServerCapabilitiesAsync(cancellationToken).ConfigureAwait(false);
return _serverCapabilities;
} finally {
_capabilitiesSemaphore.Release();
@@ -161,7 +161,7 @@ public async Task AuthenticateAsync(CancellationToken cancellationToken =
return true; // No OAuth2 configured, assume basic auth or anonymous
}
- await _authSemaphore.WaitAsync(cancellationToken);
+ await _authSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try {
// Check if current token is still valid
if (_oauth2Result?.IsValid == true && !_oauth2Result.WillExpireWithin(TimeSpan.FromMinutes(5))) {
@@ -171,7 +171,7 @@ public async Task AuthenticateAsync(CancellationToken cancellationToken =
// Try refresh token first
if (_oauth2Result?.RefreshToken is not null) {
try {
- _oauth2Result = await _oauth2Provider.RefreshTokenAsync(_oauth2Config, _oauth2Result.RefreshToken, cancellationToken);
+ _oauth2Result = await _oauth2Provider.RefreshTokenAsync(_oauth2Config, _oauth2Result.RefreshToken, cancellationToken).ConfigureAwait(false);
UpdateClientAuth();
return true;
} catch (Exception ex) {
@@ -180,7 +180,7 @@ public async Task AuthenticateAsync(CancellationToken cancellationToken =
}
// Perform full OAuth2 authentication
- _oauth2Result = await _oauth2Provider.AuthenticateAsync(_oauth2Config, cancellationToken);
+ _oauth2Result = await _oauth2Provider.AuthenticateAsync(_oauth2Config, cancellationToken).ConfigureAwait(false);
UpdateClientAuth();
return _oauth2Result.IsValid;
} finally {
@@ -211,16 +211,16 @@ private void UpdateClientAuth() {
/// Cancellation token to cancel the operation
/// True if the connection is successful, false otherwise
public async Task TestConnectionAsync(CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
return false;
return await ExecuteWithRetry(async () => {
var result = await _client.Propfind(_baseUrl, new PropfindParameters {
RequestType = PropfindRequestType.AllProperties,
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
return result.IsSuccessful;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -230,7 +230,7 @@ public async Task TestConnectionAsync(CancellationToken cancellationToken
/// Cancellation token to cancel the operation
/// A collection of sync items representing files and directories
public async Task> ListItemsAsync(string path, CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
throw new UnauthorizedAccessException("Authentication failed");
var fullPath = GetFullPath(path);
@@ -239,7 +239,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
var result = await _client.Propfind(fullPath, new PropfindParameters {
RequestType = PropfindRequestType.AllProperties,
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!result.IsSuccessful) {
if (result.StatusCode == 404) {
@@ -259,7 +259,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
LastModified = resource.LastModifiedDate?.ToUniversalTime() ?? DateTime.MinValue,
ETag = NormalizeETag(resource.ETag)
});
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -269,7 +269,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
/// Cancellation token to cancel the operation
/// The sync item if it exists, null otherwise
public async Task GetItemAsync(string path, CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
return null;
var fullPath = GetFullPath(path);
@@ -278,7 +278,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
var result = await _client.Propfind(fullPath, new PropfindParameters {
RequestType = PropfindRequestType.AllProperties,
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!result.IsSuccessful) {
return null;
@@ -296,7 +296,7 @@ public async Task> ListItemsAsync(string path, Cancellatio
LastModified = resource.LastModifiedDate?.ToUniversalTime() ?? DateTime.MinValue,
ETag = NormalizeETag(resource.ETag)
};
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -308,19 +308,19 @@ public async Task> ListItemsAsync(string path, Cancellatio
/// Thrown when the file does not exist
/// Thrown when authentication fails
public async Task ReadFileAsync(string path, CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
throw new UnauthorizedAccessException("Authentication failed");
var fullPath = GetFullPath(path);
// Get file info first to determine if we need progress reporting
- var item = await GetItemAsync(path, cancellationToken);
+ var item = await GetItemAsync(path, cancellationToken).ConfigureAwait(false);
var needsProgress = item?.Size > _chunkSize;
return await ExecuteWithRetry(async () => {
var response = await _client.GetRawFile(fullPath, new GetFileParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!response.IsSuccessful) {
if (response.StatusCode == 404) {
@@ -337,7 +337,7 @@ public async Task ReadFileAsync(string path, CancellationToken cancellat
}
return response.Stream;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -351,20 +351,20 @@ public async Task ReadFileAsync(string path, CancellationToken cancellat
/// Progress events are raised during large file uploads.
///
public async Task WriteFileAsync(string path, Stream content, CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
throw new UnauthorizedAccessException("Authentication failed");
var fullPath = GetFullPath(path);
// Ensure root path exists first (if configured)
if (!string.IsNullOrEmpty(RootPath)) {
- await EnsureRootPathExistsAsync(cancellationToken);
+ await EnsureRootPathExistsAsync(cancellationToken).ConfigureAwait(false);
}
// Ensure parent directories exist
var directory = Path.GetDirectoryName(path);
if (!string.IsNullOrEmpty(directory)) {
- await CreateDirectoryAsync(directory, cancellationToken);
+ await CreateDirectoryAsync(directory, cancellationToken).ConfigureAwait(false);
}
// For small files, use regular upload
@@ -372,7 +372,7 @@ public async Task WriteFileAsync(string path, Stream content, CancellationToken
// Extract bytes once before retry loop
content.Position = 0;
using var tempStream = new MemoryStream();
- await content.CopyToAsync(tempStream, cancellationToken);
+ await content.CopyToAsync(tempStream, cancellationToken).ConfigureAwait(false);
var contentBytes = tempStream.ToArray();
await ExecuteWithRetry(async () => {
@@ -381,7 +381,7 @@ await ExecuteWithRetry(async () => {
var result = await _client.PutFile(fullPath, contentCopy, new PutFileParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!result.IsSuccessful) {
// 409 Conflict on PUT typically means parent directory issue
@@ -390,17 +390,17 @@ await ExecuteWithRetry(async () => {
// Ensure root path and parent directory exist
_rootPathCreated = false; // Force re-check
if (!string.IsNullOrEmpty(RootPath)) {
- await EnsureRootPathExistsAsync(cancellationToken);
+ await EnsureRootPathExistsAsync(cancellationToken).ConfigureAwait(false);
}
var dir = Path.GetDirectoryName(path);
if (!string.IsNullOrEmpty(dir)) {
- await CreateDirectoryAsync(dir, cancellationToken);
+ await CreateDirectoryAsync(dir, cancellationToken).ConfigureAwait(false);
}
// Retry the upload with fresh stream
using var retryStream = new MemoryStream(contentBytes);
var retryResult = await _client.PutFile(fullPath, retryStream, new PutFileParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (retryResult.IsSuccessful) {
return true;
}
@@ -409,36 +409,36 @@ await ExecuteWithRetry(async () => {
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
// Small delay for server propagation, then verify file exists
- await Task.Delay(50, cancellationToken);
+ await Task.Delay(50, cancellationToken).ConfigureAwait(false);
return;
}
// For large files, use chunked upload (if supported by server)
- await WriteFileChunkedAsync(fullPath, path, content, cancellationToken);
+ await WriteFileChunkedAsync(fullPath, path, content, cancellationToken).ConfigureAwait(false);
// Small delay for server propagation
- await Task.Delay(50, cancellationToken);
+ await Task.Delay(50, cancellationToken).ConfigureAwait(false);
}
///
/// Chunked upload implementation with platform-specific optimizations
///
private async Task WriteFileChunkedAsync(string fullPath, string relativePath, Stream content, CancellationToken cancellationToken) {
- var capabilities = await GetServerCapabilitiesAsync(cancellationToken);
+ var capabilities = await GetServerCapabilitiesAsync(cancellationToken).ConfigureAwait(false);
// Use platform-specific chunking if available
if (capabilities.IsNextcloud && capabilities.ChunkingVersion >= 2) {
_logger.UploadStrategySelected("Nextcloud chunking v2", relativePath);
- await WriteFileNextcloudChunkedAsync(fullPath, relativePath, content, cancellationToken);
+ await WriteFileNextcloudChunkedAsync(fullPath, relativePath, content, cancellationToken).ConfigureAwait(false);
} else if (capabilities.IsOcis && capabilities.SupportsOcisChunking) {
_logger.UploadStrategySelected("OCIS TUS", relativePath);
- await WriteFileOcisChunkedAsync(fullPath, relativePath, content, cancellationToken);
+ await WriteFileOcisChunkedAsync(fullPath, relativePath, content, cancellationToken).ConfigureAwait(false);
} else {
_logger.UploadStrategySelected("generic WebDAV", relativePath);
- await WriteFileGenericAsync(fullPath, relativePath, content, cancellationToken);
+ await WriteFileGenericAsync(fullPath, relativePath, content, cancellationToken).ConfigureAwait(false);
}
}
@@ -457,7 +457,7 @@ await ExecuteWithRetry(async () => {
var result = await _client.PutFile(fullPath, content, new PutFileParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!result.IsSuccessful) {
// 409 Conflict on PUT typically means parent directory issue
@@ -466,17 +466,17 @@ await ExecuteWithRetry(async () => {
// Ensure root path and parent directory exist
_rootPathCreated = false; // Force re-check
if (!string.IsNullOrEmpty(RootPath)) {
- await EnsureRootPathExistsAsync(cancellationToken);
+ await EnsureRootPathExistsAsync(cancellationToken).ConfigureAwait(false);
}
var dir = Path.GetDirectoryName(relativePath);
if (!string.IsNullOrEmpty(dir)) {
- await CreateDirectoryAsync(dir, cancellationToken);
+ await CreateDirectoryAsync(dir, cancellationToken).ConfigureAwait(false);
}
// Retry the upload
content.Position = 0;
var retryResult = await _client.PutFile(fullPath, content, new PutFileParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (retryResult.IsSuccessful) {
RaiseProgressChanged(relativePath, totalSize, totalSize, StorageOperation.Upload);
return true;
@@ -489,7 +489,7 @@ await ExecuteWithRetry(async () => {
RaiseProgressChanged(relativePath, totalSize, totalSize, StorageOperation.Upload);
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -502,7 +502,7 @@ private async Task WriteFileNextcloudChunkedAsync(string fullPath, string relati
try {
// Create chunking folder
- await CreateDirectoryAsync(chunkFolder, cancellationToken);
+ await CreateDirectoryAsync(chunkFolder, cancellationToken).ConfigureAwait(false);
// Upload chunks
var chunkNumber = 0;
@@ -512,7 +512,7 @@ private async Task WriteFileNextcloudChunkedAsync(string fullPath, string relati
content.Position = 0;
while (uploadedBytes < totalSize) {
- var bytesRead = await content.ReadAsync(buffer, cancellationToken);
+ var bytesRead = await content.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (bytesRead == 0) {
break;
}
@@ -524,14 +524,14 @@ private async Task WriteFileNextcloudChunkedAsync(string fullPath, string relati
await ExecuteWithRetry(async () => {
var result = await _client.PutFile(GetFullPath(chunkPath), chunkStream, new PutFileParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!result.IsSuccessful) {
throw new HttpRequestException($"Chunk upload failed: {result.StatusCode}");
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
uploadedBytes += bytesRead;
chunkNumber++;
@@ -541,11 +541,11 @@ await ExecuteWithRetry(async () => {
}
// Assemble chunks
- await AssembleNextcloudChunksAsync(chunkFolder, fullPath, totalSize, cancellationToken);
+ await AssembleNextcloudChunksAsync(chunkFolder, fullPath, totalSize, cancellationToken).ConfigureAwait(false);
} finally {
// Clean up chunks folder
try {
- await DeleteAsync(chunkFolder, cancellationToken);
+ await DeleteAsync(chunkFolder, cancellationToken).ConfigureAwait(false);
} catch (Exception ex) {
_logger.ChunkCleanupFailed(ex, chunkFolder);
}
@@ -557,14 +557,14 @@ await ExecuteWithRetry(async () => {
///
private async Task WriteFileOcisChunkedAsync(string fullPath, string relativePath, Stream content, CancellationToken cancellationToken) {
try {
- await WriteFileOcisTusAsync(fullPath, relativePath, content, cancellationToken);
+ await WriteFileOcisTusAsync(fullPath, relativePath, content, cancellationToken).ConfigureAwait(false);
} catch (Exception ex) when (ex is not OperationCanceledException) {
_logger.TusUploadFallback(ex, relativePath);
// Fallback to generic upload if TUS fails
if (content.CanSeek) {
content.Position = 0;
}
- await WriteFileGenericAsync(fullPath, relativePath, content, cancellationToken);
+ await WriteFileGenericAsync(fullPath, relativePath, content, cancellationToken).ConfigureAwait(false);
}
}
@@ -584,7 +584,7 @@ private async Task WriteFileOcisTusAsync(string fullPath, string relativePath, S
RaiseProgressChanged(relativePath, 0, totalSize, StorageOperation.Upload);
// Create TUS upload
- var uploadUrl = await TusCreateUploadAsync(fullPath, totalSize, relativePath, cancellationToken);
+ var uploadUrl = await TusCreateUploadAsync(fullPath, totalSize, relativePath, cancellationToken).ConfigureAwait(false);
// Upload chunks
var offset = 0L;
@@ -598,17 +598,17 @@ private async Task WriteFileOcisTusAsync(string fullPath, string relativePath, S
// Read chunk from content stream
content.Position = offset;
- var bytesRead = await content.ReadAsync(buffer.AsMemory(0, chunkSize), cancellationToken);
+ var bytesRead = await content.ReadAsync(buffer.AsMemory(0, chunkSize), cancellationToken).ConfigureAwait(false);
if (bytesRead == 0) {
break;
}
try {
- offset = await TusPatchChunkAsync(uploadUrl, buffer, bytesRead, offset, cancellationToken);
+ offset = await TusPatchChunkAsync(uploadUrl, buffer, bytesRead, offset, cancellationToken).ConfigureAwait(false);
} catch (Exception ex) when (ex is not OperationCanceledException && IsRetriableException(ex)) {
// Try to resume by checking current offset
_logger.TusUploadResumeFailed(ex, relativePath, offset);
- var currentOffset = await TusGetOffsetAsync(uploadUrl, cancellationToken);
+ var currentOffset = await TusGetOffsetAsync(uploadUrl, cancellationToken).ConfigureAwait(false);
if (currentOffset >= 0 && currentOffset <= totalSize) {
offset = currentOffset;
continue;
@@ -638,9 +638,9 @@ private async Task TusCreateUploadAsync(string fullPath, long totalSize,
request.Headers.Add("Upload-Metadata", encodedMetadata);
// Empty content for POST
- request.Content = new ByteArrayContent(Array.Empty());
+ request.Content = new ByteArrayContent([]);
- var response = await httpClient.SendAsync(request, cancellationToken);
+ var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode) {
throw new HttpRequestException($"TUS upload creation failed: {(int)response.StatusCode} {response.ReasonPhrase}");
@@ -674,7 +674,7 @@ private async Task TusPatchChunkAsync(string uploadUrl, byte[] buffer, int
content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/offset+octet-stream");
request.Content = content;
- var response = await httpClient.SendAsync(request, cancellationToken);
+ var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode) {
throw new HttpRequestException($"TUS chunk upload failed: {(int)response.StatusCode} {response.ReasonPhrase}");
@@ -703,7 +703,7 @@ private async Task TusGetOffsetAsync(string uploadUrl, CancellationToken c
var request = new HttpRequestMessage(HttpMethod.Head, uploadUrl);
request.Headers.Add("Tus-Resumable", TusProtocolVersion);
- var response = await httpClient.SendAsync(request, cancellationToken);
+ var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode) {
return -1;
@@ -765,14 +765,14 @@ await ExecuteWithRetry(async () => {
using var assemblyStream = new MemoryStream(Encoding.UTF8.GetBytes(assemblyInfo));
var result = await _client.PutFile(assemblyPath, assemblyStream, new PutFileParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!result.IsSuccessful) {
throw new HttpRequestException($"Chunk assembly failed: {result.StatusCode}");
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -781,12 +781,12 @@ await ExecuteWithRetry(async () => {
/// The relative path to the directory to create
/// Cancellation token to cancel the operation
public async Task CreateDirectoryAsync(string path, CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
throw new UnauthorizedAccessException("Authentication failed");
// Ensure root path exists first (if configured)
if (!string.IsNullOrEmpty(RootPath)) {
- await EnsureRootPathExistsAsync(cancellationToken);
+ await EnsureRootPathExistsAsync(cancellationToken).ConfigureAwait(false);
}
// Normalize the path
@@ -808,29 +808,29 @@ public async Task CreateDirectoryAsync(string path, CancellationToken cancellati
await ExecuteWithRetry(async () => {
// Check if directory already exists first
- if (await ExistsAsync(pathToCheck, cancellationToken)) {
+ if (await ExistsAsync(pathToCheck, cancellationToken).ConfigureAwait(false)) {
return true; // Directory already exists, skip creation
}
// Try to create the directory
var result = await _client.Mkcol(fullPath, new MkColParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
// Treat 201 (Created), 405 (Already exists), and 409 (Conflict/race condition) as success
if (result.IsSuccessful || result.StatusCode == 201 || result.StatusCode == 405 || result.StatusCode == 409) {
// Verify the directory was actually created (with a short delay for server propagation)
- await Task.Delay(50, cancellationToken);
- if (await ExistsAsync(pathToCheck, cancellationToken)) {
+ await Task.Delay(50, cancellationToken).ConfigureAwait(false);
+ if (await ExistsAsync(pathToCheck, cancellationToken).ConfigureAwait(false)) {
return true;
}
// If it doesn't exist yet, give it more time and try again
- await Task.Delay(100, cancellationToken);
- return await ExistsAsync(pathToCheck, cancellationToken);
+ await Task.Delay(100, cancellationToken).ConfigureAwait(false);
+ return await ExistsAsync(pathToCheck, cancellationToken).ConfigureAwait(false);
}
throw new HttpRequestException($"Directory creation failed for {pathToCheck}: {result.StatusCode} {result.Description}");
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
}
@@ -843,7 +843,7 @@ await ExecuteWithRetry(async () => {
/// If the path is a directory, it will be deleted recursively along with all its contents
///
public async Task DeleteAsync(string path, CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
throw new UnauthorizedAccessException("Authentication failed");
var fullPath = GetFullPath(path);
@@ -851,13 +851,13 @@ public async Task DeleteAsync(string path, CancellationToken cancellationToken =
await ExecuteWithRetry(async () => {
var result = await _client.Delete(fullPath, new DeleteParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!result.IsSuccessful && result.StatusCode != 404) // 404 = already deleted
throw new HttpRequestException($"Delete failed: {result.StatusCode}");
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -867,7 +867,7 @@ await ExecuteWithRetry(async () => {
/// The relative path to the target location
/// Cancellation token to cancel the operation
public async Task MoveAsync(string sourcePath, string targetPath, CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
throw new UnauthorizedAccessException("Authentication failed");
var sourceFullPath = GetFullPath(sourcePath);
@@ -876,20 +876,20 @@ public async Task MoveAsync(string sourcePath, string targetPath, CancellationTo
// Ensure target parent directory exists
var targetDirectory = Path.GetDirectoryName(targetPath);
if (!string.IsNullOrEmpty(targetDirectory)) {
- await CreateDirectoryAsync(targetDirectory, cancellationToken);
+ await CreateDirectoryAsync(targetDirectory, cancellationToken).ConfigureAwait(false);
}
await ExecuteWithRetry(async () => {
var result = await _client.Move(sourceFullPath, targetFullPath, new MoveParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!result.IsSuccessful) {
throw new HttpRequestException($"Move failed: {result.StatusCode}");
}
return true;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -899,7 +899,7 @@ await ExecuteWithRetry(async () => {
/// Cancellation token to cancel the operation
/// True if the file or directory exists, false otherwise
public async Task ExistsAsync(string path, CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
return false;
var fullPath = GetFullPath(path);
@@ -910,7 +910,7 @@ public async Task ExistsAsync(string path, CancellationToken cancellationT
// Use AllProperties for better compatibility with various WebDAV servers
RequestType = PropfindRequestType.AllProperties,
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
// Check if the request was successful and we got at least one resource
if (!result.IsSuccessful || result.StatusCode == 404) {
@@ -919,7 +919,7 @@ public async Task ExistsAsync(string path, CancellationToken cancellationT
// Ensure we actually have resources in the response
return result.Resources.Count > 0;
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
} catch (HttpRequestException ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound) {
return false;
} catch (Exception ex) {
@@ -934,14 +934,14 @@ public async Task ExistsAsync(string path, CancellationToken cancellationT
/// Cancellation token to cancel the operation
/// Storage information including total and used space, or -1 if not supported
public async Task GetStorageInfoAsync(CancellationToken cancellationToken = default) {
- if (!await EnsureAuthenticated(cancellationToken))
+ if (!await EnsureAuthenticated(cancellationToken).ConfigureAwait(false))
return new StorageInfo { TotalSpace = -1, UsedSpace = -1 };
return await ExecuteWithRetry(async () => {
// Try to get quota information from the root
var result = await _client.Propfind(_baseUrl, new PropfindParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (result.IsSuccessful && result.Resources.Count != 0) {
var resource = result.Resources.First();
@@ -964,7 +964,7 @@ public async Task GetStorageInfoAsync(CancellationToken cancellatio
TotalSpace = -1,
UsedSpace = -1
};
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
///
@@ -983,18 +983,18 @@ public async Task GetStorageInfoAsync(CancellationToken cancellatio
///
public async Task ComputeHashAsync(string path, CancellationToken cancellationToken = default) {
// For Nextcloud/OCIS, try to get content-based checksum from properties
- var capabilities = await GetServerCapabilitiesAsync(cancellationToken);
+ var capabilities = await GetServerCapabilitiesAsync(cancellationToken).ConfigureAwait(false);
if (capabilities.IsNextcloud || capabilities.IsOcis) {
- var checksum = await GetServerChecksumAsync(path, cancellationToken);
+ var checksum = await GetServerChecksumAsync(path, cancellationToken).ConfigureAwait(false);
if (!string.IsNullOrEmpty(checksum))
return checksum;
}
// Compute SHA256 hash from file content (content-based, same for identical files)
- using var stream = await ReadFileAsync(path, cancellationToken);
+ using var stream = await ReadFileAsync(path, cancellationToken).ConfigureAwait(false);
using var sha256 = SHA256.Create();
- var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken);
+ var hashBytes = await sha256.ComputeHashAsync(stream, cancellationToken).ConfigureAwait(false);
return Convert.ToBase64String(hashBytes);
}
@@ -1024,7 +1024,7 @@ public async Task ComputeHashAsync(string path, CancellationToken cancel
var result = await _client.Propfind(fullPath, new PropfindParameters {
RequestType = PropfindRequestType.AllProperties,
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
if (!result.IsSuccessful || result.Resources.Count == 0) {
return null;
@@ -1072,9 +1072,9 @@ private async Task DetectServerCapabilitiesAsync(Cancellatio
}
try {
- var response = await httpClient.GetAsync(statusUrl, cancellationToken);
+ var response = await httpClient.GetAsync(statusUrl, cancellationToken).ConfigureAwait(false);
if (response.IsSuccessStatusCode) {
- var json = await response.Content.ReadAsStringAsync(cancellationToken);
+ var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
using var doc = JsonDocument.Parse(json);
if (doc.RootElement.TryGetProperty("productname", out var productName)) {
@@ -1096,9 +1096,9 @@ private async Task DetectServerCapabilitiesAsync(Cancellatio
var capabilitiesUrl = $"{serverBase}/ocs/v1.php/cloud/capabilities";
try {
- var response = await httpClient.GetAsync(capabilitiesUrl, cancellationToken);
+ var response = await httpClient.GetAsync(capabilitiesUrl, cancellationToken).ConfigureAwait(false);
if (response.IsSuccessStatusCode) {
- var json = await response.Content.ReadAsStringAsync(cancellationToken);
+ var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
using var doc = JsonDocument.Parse(json);
// Check for chunking support
@@ -1226,7 +1226,7 @@ private async Task EnsureRootPathExistsAsync(CancellationToken cancellationToken
await ExecuteWithRetry(async () => {
var result = await _client.Mkcol(rootUrl, new MkColParameters {
CancellationToken = cancellationToken
- });
+ }).ConfigureAwait(false);
// Treat 201 (Created), 405 (Already exists), and 409 (Conflict) as success
if (result.IsSuccessful || result.StatusCode == 201 || result.StatusCode == 405 || result.StatusCode == 409) {
@@ -1235,12 +1235,12 @@ await ExecuteWithRetry(async () => {
}
throw new HttpRequestException($"Failed to create root path: {result.StatusCode} {result.Description}");
- }, cancellationToken);
+ }, cancellationToken).ConfigureAwait(false);
}
private async Task EnsureAuthenticated(CancellationToken cancellationToken) {
if (_oauth2Provider is not null) {
- return await AuthenticateAsync(cancellationToken);
+ return await AuthenticateAsync(cancellationToken).ConfigureAwait(false);
}
return true;
}
@@ -1251,13 +1251,13 @@ private async Task ExecuteWithRetry(Func> operation, CancellationT
for (int attempt = 0; attempt <= _maxRetries; attempt++) {
try {
cancellationToken.ThrowIfCancellationRequested();
- return await operation();
+ return await operation().ConfigureAwait(false);
} catch (Exception ex) when (attempt < _maxRetries && IsRetriableException(ex)) {
lastException = ex;
_logger.StorageOperationRetry("WebDAV", attempt + 1, _maxRetries);
// Exponential backoff: delay * 2^attempt (e.g., 1s, 2s, 4s, 8s...)
var delay = _retryDelay * (1 << attempt);
- await Task.Delay(delay, cancellationToken);
+ await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
}
}
@@ -1314,9 +1314,9 @@ private void RaiseProgressChanged(string path, long completed, long total, Stora
/// Cancellation token to cancel the operation
/// A collection of remote changes detected since the specified time
public async Task> GetRemoteChangesAsync(DateTime since, CancellationToken cancellationToken = default) {
- var capabilities = await GetServerCapabilitiesAsync(cancellationToken);
+ var capabilities = await GetServerCapabilitiesAsync(cancellationToken).ConfigureAwait(false);
if (!capabilities.IsNextcloud && !capabilities.IsOcis) {
- return Array.Empty();
+ return [];
}
var changes = new List();
@@ -1337,18 +1337,18 @@ public async Task> GetRemoteChangesAsync(DateTime sinc
// OCS API requires this header
httpClient.DefaultRequestHeaders.Add("OCS-APIRequest", "true");
- var response = await httpClient.GetAsync(activityUrl, cancellationToken);
+ var response = await httpClient.GetAsync(activityUrl, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode) {
- return Array.Empty();
+ return [];
}
- var json = await response.Content.ReadAsStringAsync(cancellationToken);
+ var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
using var doc = JsonDocument.Parse(json);
if (!doc.RootElement.TryGetProperty("ocs", out var ocs) ||
!ocs.TryGetProperty("data", out var data) ||
data.ValueKind != JsonValueKind.Array) {
- return Array.Empty();
+ return [];
}
foreach (var activity in data.EnumerateArray()) {
diff --git a/src/SharpSync/Sync/ChangeSet.cs b/src/SharpSync/Sync/ChangeSet.cs
index 4c7a537..0746005 100644
--- a/src/SharpSync/Sync/ChangeSet.cs
+++ b/src/SharpSync/Sync/ChangeSet.cs
@@ -3,9 +3,15 @@
namespace Oire.SharpSync.Sync;
///
-/// Represents a set of changes detected during synchronization
+/// Represents a set of changes detected during synchronization.
+/// Use to synchronize access during parallel operations.
///
internal sealed class ChangeSet {
+ ///
+ /// Lock object for thread-safe mutation during parallel directory scanning.
+ ///
+ internal readonly object SyncRoot = new();
+
public List Additions { get; } = [];
public List Modifications { get; } = [];
public List Deletions { get; } = [];
diff --git a/src/SharpSync/Sync/SyncEngine.cs b/src/SharpSync/Sync/SyncEngine.cs
index 040d176..b067f65 100644
--- a/src/SharpSync/Sync/SyncEngine.cs
+++ b/src/SharpSync/Sync/SyncEngine.cs
@@ -35,7 +35,7 @@ namespace Oire.SharpSync.Sync;
/// pause/resume from the main thread.
///
///
-public class SyncEngine: ISyncEngine {
+public sealed class SyncEngine: ISyncEngine {
private readonly ISyncStorage _localStorage;
private readonly ISyncStorage _remoteStorage;
private readonly ISyncDatabase _database;
@@ -183,7 +183,7 @@ public async Task SynchronizeAsync(SyncOptions? options = null, Canc
throw new ObjectDisposedException(nameof(SyncEngine));
}
- if (!await _syncSemaphore.WaitAsync(0, cancellationToken)) {
+ if (!await _syncSemaphore.WaitAsync(0, cancellationToken).ConfigureAwait(false)) {
throw new InvalidOperationException("Synchronization is already in progress");
}
@@ -211,7 +211,7 @@ public async Task SynchronizeAsync(SyncOptions? options = null, Canc
try {
// Phase 1: Fast change detection
RaiseProgress(new SyncProgress(), SyncOperation.Scanning);
- var changes = await DetectChangesAsync(options, syncToken);
+ var changes = await DetectChangesAsync(options, syncToken).ConfigureAwait(false);
if (changes.TotalChanges == 0) {
result.Success = true;
@@ -221,15 +221,14 @@ public async Task SynchronizeAsync(SyncOptions? options = null, Canc
// Phase 2: Process changes (respecting dry run mode)
RaiseProgress(new SyncProgress { TotalItems = changes.TotalChanges }, SyncOperation.Unknown);
- await ProcessChangesAsync(changes, options, result, syncToken);
+ await ProcessChangesAsync(changes, options, result, syncToken).ConfigureAwait(false);
// Phase 3: Update database state
- await UpdateDatabaseStateAsync(changes, syncToken);
+ await UpdateDatabaseStateAsync(changes, syncToken).ConfigureAwait(false);
result.Success = true;
_lastRemotePollTime = DateTime.UtcNow;
} catch (OperationCanceledException) {
- result.Error = new InvalidOperationException("Synchronization was cancelled");
throw;
} catch (Exception ex) {
result.Error = ex;
@@ -279,20 +278,20 @@ public async Task GetSyncPlanAsync(SyncOptions? options = null, Cancel
try {
// Detect changes
RaiseProgress(new SyncProgress(), SyncOperation.Scanning);
- var changes = await DetectChangesAsync(options, cancellationToken);
+ var changes = await DetectChangesAsync(options, cancellationToken).ConfigureAwait(false);
// Check cancellation after detection
cancellationToken.ThrowIfCancellationRequested();
// Incorporate pending changes from NotifyLocalChangeAsync calls
- await IncorporatePendingChangesAsync(changes, cancellationToken);
+ await IncorporatePendingChangesAsync(changes, cancellationToken).ConfigureAwait(false);
// Incorporate pending remote changes and poll remote storage
- await IncorporatePendingRemoteChangesAsync(changes, cancellationToken);
- await TryPollRemoteChangesAsync(changes, cancellationToken);
+ await IncorporatePendingRemoteChangesAsync(changes, cancellationToken).ConfigureAwait(false);
+ await TryPollRemoteChangesAsync(changes, cancellationToken).ConfigureAwait(false);
if (changes.TotalChanges == 0) {
- return new SyncPlan { Actions = Array.Empty() };
+ return new SyncPlan { Actions = [] };
}
// Analyze and prioritize changes
@@ -336,9 +335,9 @@ public async Task GetSyncPlanAsync(SyncOptions? options = null, Cancel
};
} catch (OperationCanceledException) {
throw;
- } catch (Exception) {
- // Return empty plan on error
- return new SyncPlan { Actions = Array.Empty() };
+ } catch (Exception ex) {
+ _logger.SyncPlanGenerationFailed(ex);
+ return new SyncPlan { Actions = [] };
}
}
@@ -358,10 +357,10 @@ private async Task IncorporatePendingChangesAsync(ChangeSet changeSet, Cancellat
case ChangeType.Created:
case ChangeType.Changed:
// Get the local item for additions/modifications
- var localItem = await TryGetItemAsync(_localStorage, pending.Path, cancellationToken);
+ var localItem = await TryGetItemAsync(_localStorage, pending.Path, cancellationToken).ConfigureAwait(false);
if (localItem is not null) {
changeSet.LocalPaths.Add(pending.Path);
- var tracked = await _database.GetSyncStateAsync(pending.Path, cancellationToken);
+ var tracked = await _database.GetSyncStateAsync(pending.Path, cancellationToken).ConfigureAwait(false);
if (tracked is null) {
// New file
changeSet.Additions.Add(new AdditionChange(pending.Path, localItem, IsLocal: true));
@@ -373,7 +372,7 @@ private async Task IncorporatePendingChangesAsync(ChangeSet changeSet, Cancellat
break;
case ChangeType.Deleted:
- var trackedForDelete = await _database.GetSyncStateAsync(pending.Path, cancellationToken);
+ var trackedForDelete = await _database.GetSyncStateAsync(pending.Path, cancellationToken).ConfigureAwait(false);
if (trackedForDelete is not null) {
changeSet.Deletions.Add(new DeletionChange(pending.Path, DeletedLocally: true, DeletedRemotely: false, trackedForDelete));
}
@@ -402,10 +401,10 @@ private async Task IncorporatePendingRemoteChangesAsync(ChangeSet changeSet, Can
case ChangeType.Created:
case ChangeType.Changed:
// Get the remote item for additions/modifications
- var remoteItem = await TryGetItemAsync(_remoteStorage, pending.Path, cancellationToken);
+ var remoteItem = await TryGetItemAsync(_remoteStorage, pending.Path, cancellationToken).ConfigureAwait(false);
if (remoteItem is not null) {
changeSet.RemotePaths.Add(pending.Path);
- var tracked = await _database.GetSyncStateAsync(pending.Path, cancellationToken);
+ var tracked = await _database.GetSyncStateAsync(pending.Path, cancellationToken).ConfigureAwait(false);
if (tracked is null) {
// New file on remote
changeSet.Additions.Add(new AdditionChange(pending.Path, remoteItem, IsLocal: false));
@@ -417,7 +416,7 @@ private async Task IncorporatePendingRemoteChangesAsync(ChangeSet changeSet, Can
break;
case ChangeType.Deleted:
- var trackedForDelete = await _database.GetSyncStateAsync(pending.Path, cancellationToken);
+ var trackedForDelete = await _database.GetSyncStateAsync(pending.Path, cancellationToken).ConfigureAwait(false);
if (trackedForDelete is not null) {
changeSet.Deletions.Add(new DeletionChange(pending.Path, DeletedLocally: false, DeletedRemotely: true, trackedForDelete));
}
@@ -436,7 +435,7 @@ private async Task IncorporatePendingRemoteChangesAsync(ChangeSet changeSet, Can
///
private async Task TryPollRemoteChangesAsync(ChangeSet changeSet, CancellationToken cancellationToken) {
try {
- var changes = await _remoteStorage.GetRemoteChangesAsync(_lastRemotePollTime, cancellationToken);
+ var changes = await _remoteStorage.GetRemoteChangesAsync(_lastRemotePollTime, cancellationToken).ConfigureAwait(false);
if (changes.Count == 0) {
return;
@@ -468,10 +467,10 @@ private async Task TryPollRemoteChangesAsync(ChangeSet changeSet, CancellationTo
switch (change.ChangeType) {
case ChangeType.Created:
case ChangeType.Changed:
- var remoteItem = await TryGetItemAsync(_remoteStorage, normalizedPath, cancellationToken);
+ var remoteItem = await TryGetItemAsync(_remoteStorage, normalizedPath, cancellationToken).ConfigureAwait(false);
if (remoteItem is not null) {
changeSet.RemotePaths.Add(normalizedPath);
- var tracked = await _database.GetSyncStateAsync(normalizedPath, cancellationToken);
+ var tracked = await _database.GetSyncStateAsync(normalizedPath, cancellationToken).ConfigureAwait(false);
if (tracked is null) {
changeSet.Additions.Add(new AdditionChange(normalizedPath, remoteItem, IsLocal: false));
} else {
@@ -481,7 +480,7 @@ private async Task TryPollRemoteChangesAsync(ChangeSet changeSet, CancellationTo
break;
case ChangeType.Deleted:
- var trackedForDelete = await _database.GetSyncStateAsync(normalizedPath, cancellationToken);
+ var trackedForDelete = await _database.GetSyncStateAsync(normalizedPath, cancellationToken).ConfigureAwait(false);
if (trackedForDelete is not null) {
changeSet.Deletions.Add(new DeletionChange(normalizedPath, DeletedLocally: false, DeletedRemotely: true, trackedForDelete));
}
@@ -504,14 +503,14 @@ private async Task DetectChangesAsync(SyncOptions? options, Cancellat
var changeSet = new ChangeSet();
// Get all tracked items from database
- var trackedItems = (await _database.GetAllSyncStatesAsync(cancellationToken))
+ var trackedItems = (await _database.GetAllSyncStatesAsync(cancellationToken).ConfigureAwait(false))
.ToDictionary(s => s.Path, StringComparer.OrdinalIgnoreCase);
// Scan local and remote in parallel
var localScanTask = ScanStorageAsync(_localStorage, trackedItems, true, changeSet, cancellationToken);
var remoteScanTask = ScanStorageAsync(_remoteStorage, trackedItems, false, changeSet, cancellationToken);
- await Task.WhenAll(localScanTask, remoteScanTask);
+ await Task.WhenAll(localScanTask, remoteScanTask).ConfigureAwait(false);
// Detect deletions by comparing DB state with current storage state
foreach (var tracked in trackedItems.Values) {
@@ -528,7 +527,7 @@ private async Task DetectChangesAsync(SyncOptions? options, Cancellat
// Handle DeleteExtraneous option - delete files that exist on remote but not locally
if (options?.DeleteExtraneous == true) {
- await DetectExtraneousFilesAsync(changeSet, cancellationToken);
+ await DetectExtraneousFilesAsync(changeSet, cancellationToken).ConfigureAwait(false);
}
if (options?.Verbose is true) {
@@ -562,7 +561,7 @@ private async Task DetectExtraneousFilesAsync(ChangeSet changeSet, CancellationT
// Check remote modifications too
foreach (var modification in changeSet.Modifications.Where(m => !m.IsLocal).ToList()) {
- if (!localPaths.Contains(modification.Path) && !await _localStorage.ExistsAsync(modification.Path, cancellationToken)) {
+ if (!localPaths.Contains(modification.Path) && !await _localStorage.ExistsAsync(modification.Path, cancellationToken).ConfigureAwait(false)) {
// Remote file modified but no local file - mark for deletion
changeSet.Modifications.Remove(modification);
changeSet.Deletions.Add(new DeletionChange(modification.Path, DeletedLocally: true, DeletedRemotely: false, modification.TrackedState));
@@ -580,7 +579,7 @@ private async Task ScanStorageAsync(
ChangeSet changeSet,
CancellationToken cancellationToken
) {
- await ScanDirectoryRecursiveAsync(storage, "", trackedItems, isLocal, changeSet, cancellationToken);
+ await ScanDirectoryRecursiveAsync(storage, "", trackedItems, isLocal, changeSet, cancellationToken).ConfigureAwait(false);
}
private async Task ScanDirectoryRecursiveAsync(
@@ -592,7 +591,7 @@ private async Task ScanDirectoryRecursiveAsync(
CancellationToken cancellationToken
) {
try {
- var items = await storage.ListItemsAsync(dirPath, cancellationToken);
+ var items = await storage.ListItemsAsync(dirPath, cancellationToken).ConfigureAwait(false);
var tasks = new List();
foreach (var item in items) {
@@ -605,24 +604,30 @@ CancellationToken cancellationToken
continue;
}
- changeSet.ProcessedPaths.Add(item.Path);
+ lock (changeSet.SyncRoot) {
+ changeSet.ProcessedPaths.Add(item.Path);
- // Track which side the item exists on
- if (isLocal) {
- changeSet.LocalPaths.Add(item.Path);
- } else {
- changeSet.RemotePaths.Add(item.Path);
+ // Track which side the item exists on
+ if (isLocal) {
+ changeSet.LocalPaths.Add(item.Path);
+ } else {
+ changeSet.RemotePaths.Add(item.Path);
+ }
}
- // Check if item is tracked
+ // Check if item is tracked (read-only lookup, no lock needed)
if (trackedItems.TryGetValue(item.Path, out var tracked)) {
- // Check for modifications
- if (await HasChangedAsync(storage, item, tracked, isLocal, cancellationToken)) {
- changeSet.Modifications.Add(new ModificationChange(item.Path, item, isLocal, tracked));
+ // Check for modifications (async, runs outside lock)
+ if (await HasChangedAsync(storage, item, tracked, isLocal, cancellationToken).ConfigureAwait(false)) {
+ lock (changeSet.SyncRoot) {
+ changeSet.Modifications.Add(new ModificationChange(item.Path, item, isLocal, tracked));
+ }
}
} else {
// New item
- changeSet.Additions.Add(new AdditionChange(item.Path, item, isLocal));
+ lock (changeSet.SyncRoot) {
+ changeSet.Additions.Add(new AdditionChange(item.Path, item, isLocal));
+ }
}
// Recursively scan directories
@@ -632,7 +637,7 @@ CancellationToken cancellationToken
}
if (tasks.Count != 0) {
- await Task.WhenAll(tasks);
+ await Task.WhenAll(tasks).ConfigureAwait(false);
}
} catch (Exception ex) when (ex is not OperationCanceledException) {
// Log error but continue scanning other directories
@@ -663,7 +668,7 @@ CancellationToken cancellationToken
// ChecksumOnly: compare only by checksum, skip timestamp checks
if ((_currentOptions?.ChecksumOnly ?? false) && !item.IsDirectory) {
- var hash = await storage.ComputeHashAsync(item.Path, cancellationToken);
+ var hash = await storage.ComputeHashAsync(item.Path, cancellationToken).ConfigureAwait(false);
return hash != tracked.LocalHash;
}
@@ -684,7 +689,7 @@ CancellationToken cancellationToken
// If using checksums, compute and compare
if (_useChecksums && !item.IsDirectory) {
- var hash = await storage.ComputeHashAsync(item.Path, cancellationToken);
+ var hash = await storage.ComputeHashAsync(item.Path, cancellationToken).ConfigureAwait(false);
return hash != tracked.LocalHash;
}
@@ -702,7 +707,7 @@ CancellationToken cancellationToken
// ChecksumOnly: compare only by checksum, skip timestamp checks
if ((_currentOptions?.ChecksumOnly ?? false) && !item.IsDirectory) {
- var hash = await storage.ComputeHashAsync(item.Path, cancellationToken);
+ var hash = await storage.ComputeHashAsync(item.Path, cancellationToken).ConfigureAwait(false);
return hash != tracked.RemoteHash;
}
@@ -723,7 +728,7 @@ CancellationToken cancellationToken
// If using checksums, compute and compare
if (_useChecksums && !item.IsDirectory) {
- var hash = await storage.ComputeHashAsync(item.Path, cancellationToken);
+ var hash = await storage.ComputeHashAsync(item.Path, cancellationToken).ConfigureAwait(false);
return hash != tracked.RemoteHash;
}
@@ -744,9 +749,9 @@ private async Task ProcessChangesAsync(ChangeSet changes, SyncOptions? options,
var actionGroups = AnalyzeAndPrioritizeChanges(changes, options);
// Process in phases for optimal efficiency
- await ProcessPhase1_DirectoriesAndSmallFilesAsync(actionGroups, threadSafeResult, progressCounter, totalChanges, cancellationToken);
- await ProcessPhase2_LargeFilesAsync(actionGroups, threadSafeResult, progressCounter, totalChanges, cancellationToken);
- await ProcessPhase3_DeletesAndConflictsAsync(actionGroups, threadSafeResult, progressCounter, totalChanges, cancellationToken);
+ await ProcessPhase1_DirectoriesAndSmallFilesAsync(actionGroups, threadSafeResult, progressCounter, totalChanges, cancellationToken).ConfigureAwait(false);
+ await ProcessPhase2_LargeFilesAsync(actionGroups, threadSafeResult, progressCounter, totalChanges, cancellationToken).ConfigureAwait(false);
+ await ProcessPhase3_DeletesAndConflictsAsync(actionGroups, threadSafeResult, progressCounter, totalChanges, cancellationToken).ConfigureAwait(false);
}
///
@@ -934,12 +939,12 @@ await Parallel.ForEachAsync(allSmallActions, parallelOptions, async (action, ct)
CurrentItem = action.Path
};
- if (!await CheckPausePointAsync(ct, currentProgress)) {
+ if (!await CheckPausePointAsync(ct, currentProgress).ConfigureAwait(false)) {
ct.ThrowIfCancellationRequested();
return;
}
- await ProcessActionAsync(action, result, ct);
+ await ProcessActionAsync(action, result, ct).ConfigureAwait(false);
var newCount = progressCounter.Increment();
@@ -955,7 +960,7 @@ await Parallel.ForEachAsync(allSmallActions, parallelOptions, async (action, ct)
result.IncrementFilesSkipped();
_logger.ProcessingError(ex, action.Path);
}
- });
+ }).ConfigureAwait(false);
if (_currentOptions?.Verbose is true) {
_logger.PhaseComplete(1, allSmallActions.Count);
@@ -984,7 +989,7 @@ private async Task ProcessPhase2_LargeFilesAsync(
tasks.Add(ProcessLargeFileAsync(action, result, progressCounter, totalChanges, semaphore, cancellationToken));
}
- await Task.WhenAll(tasks);
+ await Task.WhenAll(tasks).ConfigureAwait(false);
}
private async Task ProcessLargeFileAsync(
@@ -994,7 +999,7 @@ private async Task ProcessLargeFileAsync(
int totalChanges,
SemaphoreSlim semaphore,
CancellationToken cancellationToken) {
- await semaphore.WaitAsync(cancellationToken);
+ await semaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try {
// Check for pause point before processing large file
var currentProgress = new SyncProgress {
@@ -1003,7 +1008,7 @@ private async Task ProcessLargeFileAsync(
CurrentItem = action.Path
};
- if (!await CheckPausePointAsync(cancellationToken, currentProgress)) {
+ if (!await CheckPausePointAsync(cancellationToken, currentProgress).ConfigureAwait(false)) {
cancellationToken.ThrowIfCancellationRequested();
return;
}
@@ -1011,7 +1016,7 @@ private async Task ProcessLargeFileAsync(
// Report start of large file processing
RaiseProgress(currentProgress, GetOperationType(action.Type));
- await ProcessActionAsync(action, result, cancellationToken);
+ await ProcessActionAsync(action, result, cancellationToken).ConfigureAwait(false);
var newCount = progressCounter.Increment();
@@ -1049,12 +1054,12 @@ private async Task ProcessPhase3_DeletesAndConflictsAsync(
CurrentItem = action.Path
};
- if (!await CheckPausePointAsync(cancellationToken, currentProgress)) {
+ if (!await CheckPausePointAsync(cancellationToken, currentProgress).ConfigureAwait(false)) {
cancellationToken.ThrowIfCancellationRequested();
return;
}
- await ProcessActionAsync(action, result, cancellationToken);
+ await ProcessActionAsync(action, result, cancellationToken).ConfigureAwait(false);
var newCount = progressCounter.Increment();
RaiseProgress(new SyncProgress {
@@ -1082,12 +1087,12 @@ private async Task ProcessPhase3_DeletesAndConflictsAsync(
CurrentItem = action.Path
};
- if (!await CheckPausePointAsync(cancellationToken, currentProgress)) {
+ if (!await CheckPausePointAsync(cancellationToken, currentProgress).ConfigureAwait(false)) {
cancellationToken.ThrowIfCancellationRequested();
return;
}
- await ProcessActionAsync(action, result, cancellationToken);
+ await ProcessActionAsync(action, result, cancellationToken).ConfigureAwait(false);
var newCount = progressCounter.Increment();
RaiseProgress(new SyncProgress {
@@ -1114,23 +1119,23 @@ private async Task ProcessActionAsync(SyncAction action, ThreadSafeSyncResult re
try {
switch (action.Type) {
case SyncActionType.Download:
- await DownloadFileAsync(action, result, cancellationToken);
+ await DownloadFileAsync(action, result, cancellationToken).ConfigureAwait(false);
break;
case SyncActionType.Upload:
- await UploadFileAsync(action, result, cancellationToken);
+ await UploadFileAsync(action, result, cancellationToken).ConfigureAwait(false);
break;
case SyncActionType.DeleteLocal:
- await DeleteLocalAsync(action, result, cancellationToken);
+ await DeleteLocalAsync(action, result, cancellationToken).ConfigureAwait(false);
break;
case SyncActionType.DeleteRemote:
- await DeleteRemoteAsync(action, result, cancellationToken);
+ await DeleteRemoteAsync(action, result, cancellationToken).ConfigureAwait(false);
break;
case SyncActionType.Conflict:
- await ResolveConflictAsync(action, result, cancellationToken);
+ await ResolveConflictAsync(action, result, cancellationToken).ConfigureAwait(false);
break;
}
} catch (OperationCanceledException) {
@@ -1143,7 +1148,7 @@ private async Task ProcessActionAsync(SyncAction action, ThreadSafeSyncResult re
} finally {
// Log the operation unless it was cancelled
if (!cancellationToken.IsCancellationRequested) {
- await LogOperationAsync(action, startedAt, success, errorMessage);
+ await LogOperationAsync(action, startedAt, success, errorMessage).ConfigureAwait(false);
}
}
}
@@ -1185,7 +1190,7 @@ await _database.LogOperationAsync(
success,
errorMessage,
renamedFrom,
- renamedTo);
+ renamedTo).ConfigureAwait(false);
} catch (Exception ex) {
// Don't fail the sync operation if logging fails
_logger.OperationLoggingError(ex, action.Path);
@@ -1194,20 +1199,20 @@ await _database.LogOperationAsync(
private async Task DownloadFileAsync(SyncAction action, ThreadSafeSyncResult result, CancellationToken cancellationToken) {
if (action.RemoteItem!.IsDirectory) {
- await _localStorage.CreateDirectoryAsync(action.Path, cancellationToken);
+ await _localStorage.CreateDirectoryAsync(action.Path, cancellationToken).ConfigureAwait(false);
} else {
- using var remoteStream = await _remoteStorage.ReadFileAsync(action.Path, cancellationToken);
+ using var remoteStream = await _remoteStorage.ReadFileAsync(action.Path, cancellationToken).ConfigureAwait(false);
var streamToRead = WrapWithThrottling(remoteStream);
- await _localStorage.WriteFileAsync(action.Path, streamToRead, cancellationToken);
+ await _localStorage.WriteFileAsync(action.Path, streamToRead, cancellationToken).ConfigureAwait(false);
// Preserve timestamps if enabled
- await TryPreserveTimestampsAsync(_localStorage, action.Path, action.RemoteItem, cancellationToken);
+ await TryPreserveTimestampsAsync(_localStorage, action.Path, action.RemoteItem, cancellationToken).ConfigureAwait(false);
// Preserve permissions if enabled
- await TryPreservePermissionsAsync(_localStorage, action.Path, action.RemoteItem, cancellationToken);
+ await TryPreservePermissionsAsync(_localStorage, action.Path, action.RemoteItem, cancellationToken).ConfigureAwait(false);
// Invoke virtual file callback if enabled
- await TryInvokeVirtualFileCallbackAsync(action.Path, action.RemoteItem, cancellationToken);
+ await TryInvokeVirtualFileCallbackAsync(action.Path, action.RemoteItem, cancellationToken).ConfigureAwait(false);
}
result.IncrementFilesSynchronized();
@@ -1225,7 +1230,7 @@ private async Task TryInvokeVirtualFileCallbackAsync(string relativePath, SyncIt
// Construct the full local path from the storage root and relative path
var localFullPath = Path.Combine(_localStorage.RootPath, relativePath.Replace('/', Path.DirectorySeparatorChar));
- await _currentOptions.VirtualFileCallback(relativePath, localFullPath, fileMetadata, cancellationToken);
+ await _currentOptions.VirtualFileCallback(relativePath, localFullPath, fileMetadata, cancellationToken).ConfigureAwait(false);
// Update the item's virtual state to indicate it's now a placeholder
fileMetadata.VirtualState = VirtualFileState.Placeholder;
@@ -1237,36 +1242,36 @@ private async Task TryInvokeVirtualFileCallbackAsync(string relativePath, SyncIt
private async Task UploadFileAsync(SyncAction action, ThreadSafeSyncResult result, CancellationToken cancellationToken) {
if (action.LocalItem!.IsDirectory) {
- await _remoteStorage.CreateDirectoryAsync(action.Path, cancellationToken);
+ await _remoteStorage.CreateDirectoryAsync(action.Path, cancellationToken).ConfigureAwait(false);
} else {
- using var localStream = await _localStorage.ReadFileAsync(action.Path, cancellationToken);
+ using var localStream = await _localStorage.ReadFileAsync(action.Path, cancellationToken).ConfigureAwait(false);
var streamToRead = WrapWithThrottling(localStream);
- await _remoteStorage.WriteFileAsync(action.Path, streamToRead, cancellationToken);
+ await _remoteStorage.WriteFileAsync(action.Path, streamToRead, cancellationToken).ConfigureAwait(false);
// Preserve timestamps if enabled
- await TryPreserveTimestampsAsync(_remoteStorage, action.Path, action.LocalItem, cancellationToken);
+ await TryPreserveTimestampsAsync(_remoteStorage, action.Path, action.LocalItem, cancellationToken).ConfigureAwait(false);
// Preserve permissions if enabled
- await TryPreservePermissionsAsync(_remoteStorage, action.Path, action.LocalItem, cancellationToken);
+ await TryPreservePermissionsAsync(_remoteStorage, action.Path, action.LocalItem, cancellationToken).ConfigureAwait(false);
}
result.IncrementFilesSynchronized();
}
private async Task DeleteLocalAsync(SyncAction action, ThreadSafeSyncResult result, CancellationToken cancellationToken) {
- await _localStorage.DeleteAsync(action.Path, cancellationToken);
+ await _localStorage.DeleteAsync(action.Path, cancellationToken).ConfigureAwait(false);
result.IncrementFilesDeleted();
}
private async Task DeleteRemoteAsync(SyncAction action, ThreadSafeSyncResult result, CancellationToken cancellationToken) {
- await _remoteStorage.DeleteAsync(action.Path, cancellationToken);
+ await _remoteStorage.DeleteAsync(action.Path, cancellationToken).ConfigureAwait(false);
result.IncrementFilesDeleted();
}
private async Task ResolveConflictAsync(SyncAction action, ThreadSafeSyncResult result, CancellationToken cancellationToken) {
// Get full item details if needed
- var localItem = action.LocalItem ?? await _localStorage.GetItemAsync(action.Path, cancellationToken);
- var remoteItem = action.RemoteItem ?? await _remoteStorage.GetItemAsync(action.Path, cancellationToken);
+ var localItem = action.LocalItem ?? await _localStorage.GetItemAsync(action.Path, cancellationToken).ConfigureAwait(false);
+ var remoteItem = action.RemoteItem ?? await _remoteStorage.GetItemAsync(action.Path, cancellationToken).ConfigureAwait(false);
var conflictArgs = new FileConflictEventArgs(
action.Path,
@@ -1282,20 +1287,20 @@ private async Task ResolveConflictAsync(SyncAction action, ThreadSafeSyncResult
if (_currentOptions?.ConflictResolution is { } cr && cr != ConflictResolution.Ask) {
resolution = cr;
} else {
- resolution = await _conflictResolver.ResolveConflictAsync(conflictArgs, cancellationToken);
+ resolution = await _conflictResolver.ResolveConflictAsync(conflictArgs, cancellationToken).ConfigureAwait(false);
}
// Apply resolution
switch (resolution) {
case ConflictResolution.UseLocal:
if (action.LocalItem is not null) {
- await UploadFileAsync(action, result, cancellationToken);
+ await UploadFileAsync(action, result, cancellationToken).ConfigureAwait(false);
}
break;
case ConflictResolution.UseRemote:
if (action.RemoteItem is not null) {
- await DownloadFileAsync(action, result, cancellationToken);
+ await DownloadFileAsync(action, result, cancellationToken).ConfigureAwait(false);
}
break;
@@ -1306,27 +1311,27 @@ private async Task ResolveConflictAsync(SyncAction action, ThreadSafeSyncResult
case ConflictResolution.RenameLocal:
if (action.LocalItem is not null && action.RemoteItem is not null) {
// Generate unique conflict name for local file using computer name
- var conflictPath = await GenerateUniqueConflictNameAsync(action.Path, Environment.MachineName, _localStorage, cancellationToken);
+ var conflictPath = await GenerateUniqueConflictNameAsync(action.Path, Environment.MachineName, _localStorage, cancellationToken).ConfigureAwait(false);
// Move local file to conflict name
- await _localStorage.MoveAsync(action.Path, conflictPath, cancellationToken);
+ await _localStorage.MoveAsync(action.Path, conflictPath, cancellationToken).ConfigureAwait(false);
// Download remote file to original path
- await DownloadFileAsync(action, result, cancellationToken);
+ await DownloadFileAsync(action, result, cancellationToken).ConfigureAwait(false);
// Track the conflict file in database (exists locally, needs to be uploaded)
- var conflictItem = await _localStorage.GetItemAsync(conflictPath, cancellationToken);
+ var conflictItem = await _localStorage.GetItemAsync(conflictPath, cancellationToken).ConfigureAwait(false);
if (conflictItem is not null) {
var conflictState = new SyncState {
Path = conflictPath,
IsDirectory = conflictItem.IsDirectory,
Status = SyncStatus.LocalNew,
LastSyncTime = DateTime.UtcNow,
- LocalHash = conflictItem.IsDirectory ? null : (conflictItem.ETag ?? await _localStorage.ComputeHashAsync(conflictPath, cancellationToken)),
+ LocalHash = conflictItem.IsDirectory ? null : (conflictItem.ETag ?? await _localStorage.ComputeHashAsync(conflictPath, cancellationToken).ConfigureAwait(false)),
LocalSize = conflictItem.Size,
LocalModified = conflictItem.LastModified
};
- await _database.UpdateSyncStateAsync(conflictState, cancellationToken);
+ await _database.UpdateSyncStateAsync(conflictState, cancellationToken).ConfigureAwait(false);
}
} else {
result.IncrementFilesConflicted();
@@ -1336,27 +1341,27 @@ private async Task ResolveConflictAsync(SyncAction action, ThreadSafeSyncResult
case ConflictResolution.RenameRemote:
if (action.LocalItem is not null && action.RemoteItem is not null) {
// Generate unique conflict name for remote file using domain name
- var conflictPath = await GenerateUniqueConflictNameAsync(action.Path, GetDomainFromUrl(_remoteStorage.RootPath), _remoteStorage, cancellationToken);
+ var conflictPath = await GenerateUniqueConflictNameAsync(action.Path, GetDomainFromUrl(_remoteStorage.RootPath), _remoteStorage, cancellationToken).ConfigureAwait(false);
// Move remote file to conflict name
- await _remoteStorage.MoveAsync(action.Path, conflictPath, cancellationToken);
+ await _remoteStorage.MoveAsync(action.Path, conflictPath, cancellationToken).ConfigureAwait(false);
// Upload local file to original path
- await UploadFileAsync(action, result, cancellationToken);
+ await UploadFileAsync(action, result, cancellationToken).ConfigureAwait(false);
// Track the conflict file in database (exists remotely, needs to be downloaded)
- var conflictItem = await _remoteStorage.GetItemAsync(conflictPath, cancellationToken);
+ var conflictItem = await _remoteStorage.GetItemAsync(conflictPath, cancellationToken).ConfigureAwait(false);
if (conflictItem is not null) {
var conflictState = new SyncState {
Path = conflictPath,
IsDirectory = conflictItem.IsDirectory,
Status = SyncStatus.RemoteNew,
LastSyncTime = DateTime.UtcNow,
- RemoteHash = conflictItem.IsDirectory ? null : (conflictItem.ETag ?? await _remoteStorage.ComputeHashAsync(conflictPath, cancellationToken)),
+ RemoteHash = conflictItem.IsDirectory ? null : (conflictItem.ETag ?? await _remoteStorage.ComputeHashAsync(conflictPath, cancellationToken).ConfigureAwait(false)),
RemoteSize = conflictItem.Size,
RemoteModified = conflictItem.LastModified
};
- await _database.UpdateSyncStateAsync(conflictState, cancellationToken);
+ await _database.UpdateSyncStateAsync(conflictState, cancellationToken).ConfigureAwait(false);
}
} else {
result.IncrementFilesConflicted();
@@ -1393,7 +1398,7 @@ private static async Task GenerateUniqueConflictNameAsync(string path, s
: Path.Combine(directory, conflictFileName);
// Check if this path already exists
- if (!await storage.ExistsAsync(conflictPath, cancellationToken)) {
+ if (!await storage.ExistsAsync(conflictPath, cancellationToken).ConfigureAwait(false)) {
return conflictPath;
}
@@ -1404,7 +1409,7 @@ private static async Task GenerateUniqueConflictNameAsync(string path, s
? conflictFileName
: Path.Combine(directory, conflictFileName);
- if (!await storage.ExistsAsync(conflictPath, cancellationToken)) {
+ if (!await storage.ExistsAsync(conflictPath, cancellationToken).ConfigureAwait(false)) {
return conflictPath;
}
}
@@ -1427,7 +1432,7 @@ internal static string GetDomainFromUrl(string url) {
var uri = new Uri(url);
var host = uri.Host;
return string.IsNullOrEmpty(host) ? "remote" : host;
- } catch {
+ } catch (UriFormatException) {
return "remote";
}
}
@@ -1446,14 +1451,14 @@ private async Task UpdateDatabaseStateAsync(ChangeSet changes, CancellationToken
};
if (addition.IsLocal) {
- state.LocalHash = addition.Item.IsDirectory ? null : (addition.Item.ETag ?? await _localStorage.ComputeHashAsync(addition.Path, cancellationToken));
+ state.LocalHash = addition.Item.IsDirectory ? null : (addition.Item.ETag ?? await _localStorage.ComputeHashAsync(addition.Path, cancellationToken).ConfigureAwait(false));
state.LocalSize = addition.Item.Size;
state.LocalModified = addition.Item.LastModified;
state.RemoteHash = state.LocalHash;
state.RemoteSize = state.LocalSize;
state.RemoteModified = state.LocalModified;
} else {
- state.RemoteHash = addition.Item.IsDirectory ? null : (addition.Item.ETag ?? await _remoteStorage.ComputeHashAsync(addition.Path, cancellationToken));
+ state.RemoteHash = addition.Item.IsDirectory ? null : (addition.Item.ETag ?? await _remoteStorage.ComputeHashAsync(addition.Path, cancellationToken).ConfigureAwait(false));
state.RemoteSize = addition.Item.Size;
state.RemoteModified = addition.Item.LastModified;
state.LocalHash = state.RemoteHash;
@@ -1471,14 +1476,14 @@ private async Task UpdateDatabaseStateAsync(ChangeSet changes, CancellationToken
state.LastSyncTime = DateTime.UtcNow;
if (mod.IsLocal) {
- state.LocalHash = mod.Item.IsDirectory ? null : (mod.Item.ETag ?? await _localStorage.ComputeHashAsync(mod.Path, cancellationToken));
+ state.LocalHash = mod.Item.IsDirectory ? null : (mod.Item.ETag ?? await _localStorage.ComputeHashAsync(mod.Path, cancellationToken).ConfigureAwait(false));
state.LocalSize = mod.Item.Size;
state.LocalModified = mod.Item.LastModified;
state.RemoteHash = state.LocalHash;
state.RemoteSize = state.LocalSize;
state.RemoteModified = state.LocalModified;
} else {
- state.RemoteHash = mod.Item.IsDirectory ? null : (mod.Item.ETag ?? await _remoteStorage.ComputeHashAsync(mod.Path, cancellationToken));
+ state.RemoteHash = mod.Item.IsDirectory ? null : (mod.Item.ETag ?? await _remoteStorage.ComputeHashAsync(mod.Path, cancellationToken).ConfigureAwait(false));
state.RemoteSize = mod.Item.Size;
state.RemoteModified = mod.Item.LastModified;
state.LocalHash = state.RemoteHash;
@@ -1500,7 +1505,7 @@ private async Task UpdateDatabaseStateAsync(ChangeSet changes, CancellationToken
}
}
- await Task.WhenAll(updates);
+ await Task.WhenAll(updates).ConfigureAwait(false);
}
private static SyncOperation GetOperationType(SyncActionType actionType) => actionType switch {
@@ -1549,7 +1554,7 @@ private void OnStorageProgressChanged(object? sender, StorageProgressEventArgs e
/// Cancellation token to cancel the operation.
/// Database statistics including total files, directories, and sizes.
public async Task GetStatsAsync(CancellationToken cancellationToken = default) {
- return await _database.GetStatsAsync(cancellationToken);
+ return await _database.GetStatsAsync(cancellationToken).ConfigureAwait(false);
}
///
@@ -1561,7 +1566,7 @@ public async Task GetStatsAsync(CancellationToken cancellationTok
/// This is useful when the sync state becomes corrupted or when starting fresh.
///
public async Task ResetSyncStateAsync(CancellationToken cancellationToken = default) {
- await _database.ClearAsync(cancellationToken);
+ await _database.ClearAsync(cancellationToken).ConfigureAwait(false);
}
///
@@ -1688,7 +1693,7 @@ private async Task CheckPausePointAsync(CancellationToken cancellationToke
}
// Run the blocking wait on a thread pool thread to not block async context
- return await Task.Run(() => CheckPausePoint(cancellationToken, currentProgress), cancellationToken);
+ return await Task.Run(() => CheckPausePoint(cancellationToken, currentProgress), cancellationToken).ConfigureAwait(false);
}
///
@@ -1703,7 +1708,7 @@ public async Task SyncFolderAsync(string folderPath, SyncOptions? op
throw new ObjectDisposedException(nameof(SyncEngine));
}
- if (!await _syncSemaphore.WaitAsync(0, cancellationToken)) {
+ if (!await _syncSemaphore.WaitAsync(0, cancellationToken).ConfigureAwait(false)) {
throw new InvalidOperationException("Synchronization is already in progress");
}
@@ -1733,7 +1738,7 @@ public async Task SyncFolderAsync(string folderPath, SyncOptions? op
var normalizedPath = NormalizePath(folderPath);
RaiseProgress(new SyncProgress { CurrentItem = normalizedPath }, SyncOperation.Scanning);
- var changes = await DetectChangesForPathAsync(normalizedPath, options, syncToken);
+ var changes = await DetectChangesForPathAsync(normalizedPath, options, syncToken).ConfigureAwait(false);
if (changes.TotalChanges == 0) {
result.Success = true;
@@ -1742,12 +1747,11 @@ public async Task SyncFolderAsync(string folderPath, SyncOptions? op
RaiseProgress(new SyncProgress { TotalItems = changes.TotalChanges }, SyncOperation.Unknown);
- await ProcessChangesAsync(changes, options, result, syncToken);
- await UpdateDatabaseStateAsync(changes, syncToken);
+ await ProcessChangesAsync(changes, options, result, syncToken).ConfigureAwait(false);
+ await UpdateDatabaseStateAsync(changes, syncToken).ConfigureAwait(false);
result.Success = true;
} catch (OperationCanceledException) {
- result.Error = new InvalidOperationException("Synchronization was cancelled");
throw;
} catch (Exception ex) {
result.Error = ex;
@@ -1789,7 +1793,7 @@ public async Task SyncFilesAsync(IEnumerable filePaths, Sync
return new SyncResult { Success = true };
}
- if (!await _syncSemaphore.WaitAsync(0, cancellationToken)) {
+ if (!await _syncSemaphore.WaitAsync(0, cancellationToken).ConfigureAwait(false)) {
throw new InvalidOperationException("Synchronization is already in progress");
}
@@ -1816,7 +1820,7 @@ public async Task SyncFilesAsync(IEnumerable filePaths, Sync
try {
RaiseProgress(new SyncProgress { TotalItems = pathList.Count }, SyncOperation.Scanning);
- var changes = await DetectChangesForFilesAsync(pathList, options, syncToken);
+ var changes = await DetectChangesForFilesAsync(pathList, options, syncToken).ConfigureAwait(false);
if (changes.TotalChanges == 0) {
result.Success = true;
@@ -1825,12 +1829,11 @@ public async Task SyncFilesAsync(IEnumerable filePaths, Sync
RaiseProgress(new SyncProgress { TotalItems = changes.TotalChanges }, SyncOperation.Unknown);
- await ProcessChangesAsync(changes, options, result, syncToken);
- await UpdateDatabaseStateAsync(changes, syncToken);
+ await ProcessChangesAsync(changes, options, result, syncToken).ConfigureAwait(false);
+ await UpdateDatabaseStateAsync(changes, syncToken).ConfigureAwait(false);
result.Success = true;
} catch (OperationCanceledException) {
- result.Error = new InvalidOperationException("Synchronization was cancelled");
throw;
} catch (Exception ex) {
result.Error = ex;
@@ -1927,7 +1930,7 @@ public async Task> GetPendingOperationsAsync(Can
SyncItem? item = null;
if (pending.ChangeType != ChangeType.Deleted) {
try {
- item = await _localStorage.GetItemAsync(pending.Path, cancellationToken);
+ item = await _localStorage.GetItemAsync(pending.Path, cancellationToken).ConfigureAwait(false);
} catch (Exception ex) {
_logger.PendingChangeItemNotFound(ex, pending.Path);
}
@@ -1962,7 +1965,7 @@ public async Task> GetPendingOperationsAsync(Can
SyncItem? item = null;
if (pending.ChangeType != ChangeType.Deleted) {
try {
- item = await _remoteStorage.GetItemAsync(pending.Path, cancellationToken);
+ item = await _remoteStorage.GetItemAsync(pending.Path, cancellationToken).ConfigureAwait(false);
} catch (Exception ex) {
_logger.PendingChangeItemNotFound(ex, pending.Path);
}
@@ -1982,7 +1985,7 @@ public async Task> GetPendingOperationsAsync(Can
}
// Also include items from database that are not synced
- var pendingStates = await _database.GetPendingSyncStatesAsync(cancellationToken);
+ var pendingStates = await _database.GetPendingSyncStatesAsync(cancellationToken).ConfigureAwait(false);
foreach (var state in pendingStates) {
cancellationToken.ThrowIfCancellationRequested();
@@ -2027,14 +2030,14 @@ private async Task DetectChangesForPathAsync(string folderPath, SyncO
var changeSet = new ChangeSet();
// Get tracked items for this folder prefix only
- var trackedItems = (await _database.GetSyncStatesByPrefixAsync(folderPath, cancellationToken))
+ var trackedItems = (await _database.GetSyncStatesByPrefixAsync(folderPath, cancellationToken).ConfigureAwait(false))
.ToDictionary(s => s.Path, StringComparer.OrdinalIgnoreCase);
// Scan only the specified folder on both sides
var localScanTask = ScanFolderAsync(_localStorage, folderPath, trackedItems, true, changeSet, cancellationToken);
var remoteScanTask = ScanFolderAsync(_remoteStorage, folderPath, trackedItems, false, changeSet, cancellationToken);
- await Task.WhenAll(localScanTask, remoteScanTask);
+ await Task.WhenAll(localScanTask, remoteScanTask).ConfigureAwait(false);
// Detect deletions within the folder
foreach (var tracked in trackedItems.Values) {
@@ -2053,7 +2056,7 @@ private async Task DetectChangesForPathAsync(string folderPath, SyncO
}
if (options?.DeleteExtraneous == true) {
- await DetectExtraneousFilesAsync(changeSet, cancellationToken);
+ await DetectExtraneousFilesAsync(changeSet, cancellationToken).ConfigureAwait(false);
}
return changeSet;
@@ -2071,11 +2074,11 @@ private async Task ScanFolderAsync(
CancellationToken cancellationToken) {
try {
// First check if the folder exists
- if (!await storage.ExistsAsync(folderPath, cancellationToken)) {
+ if (!await storage.ExistsAsync(folderPath, cancellationToken).ConfigureAwait(false)) {
return;
}
- await ScanDirectoryRecursiveAsync(storage, folderPath, trackedItems, isLocal, changeSet, cancellationToken);
+ await ScanDirectoryRecursiveAsync(storage, folderPath, trackedItems, isLocal, changeSet, cancellationToken).ConfigureAwait(false);
} catch (Exception ex) when (ex is not OperationCanceledException) {
_logger.DirectoryScanError(ex, folderPath);
}
@@ -2096,11 +2099,11 @@ private async Task DetectChangesForFilesAsync(List filePaths,
}
// Get tracked state for this file
- var tracked = await _database.GetSyncStateAsync(path, cancellationToken);
+ var tracked = await _database.GetSyncStateAsync(path, cancellationToken).ConfigureAwait(false);
// Check local and remote existence and state
- var localItem = await TryGetItemAsync(_localStorage, path, cancellationToken);
- var remoteItem = await TryGetItemAsync(_remoteStorage, path, cancellationToken);
+ var localItem = await TryGetItemAsync(_localStorage, path, cancellationToken).ConfigureAwait(false);
+ var remoteItem = await TryGetItemAsync(_remoteStorage, path, cancellationToken).ConfigureAwait(false);
if (localItem is not null) {
changeSet.LocalPaths.Add(path);
@@ -2128,8 +2131,8 @@ private async Task DetectChangesForFilesAsync(List filePaths,
changeSet.Deletions.Add(new DeletionChange(path, DeletedLocally: false, DeletedRemotely: true, tracked));
} else {
// Both exist - check for modifications
- var localChanged = await HasChangedAsync(_localStorage, localItem, tracked, true, cancellationToken);
- var remoteChanged = await HasChangedAsync(_remoteStorage, remoteItem, tracked, false, cancellationToken);
+ var localChanged = await HasChangedAsync(_localStorage, localItem, tracked, true, cancellationToken).ConfigureAwait(false);
+ var remoteChanged = await HasChangedAsync(_remoteStorage, remoteItem, tracked, false, cancellationToken).ConfigureAwait(false);
if (localChanged) {
changeSet.Modifications.Add(new ModificationChange(path, localItem, IsLocal: true, tracked));
@@ -2149,13 +2152,14 @@ private async Task DetectChangesForFilesAsync(List filePaths,
///
/// Tries to get an item from storage, returning null if it doesn't exist.
///
- private static async Task TryGetItemAsync(ISyncStorage storage, string path, CancellationToken cancellationToken) {
+ private async Task TryGetItemAsync(ISyncStorage storage, string path, CancellationToken cancellationToken) {
try {
- if (!await storage.ExistsAsync(path, cancellationToken)) {
+ if (!await storage.ExistsAsync(path, cancellationToken).ConfigureAwait(false)) {
return null;
}
- return await storage.GetItemAsync(path, cancellationToken);
- } catch {
+ return await storage.GetItemAsync(path, cancellationToken).ConfigureAwait(false);
+ } catch (Exception ex) when (ex is not OperationCanceledException) {
+ _logger.StorageItemRetrievalFailed(ex, path);
return null;
}
}
@@ -2433,7 +2437,7 @@ public async Task> GetRecentOperationsAsync(
throw new ObjectDisposedException(nameof(SyncEngine));
}
- return await _database.GetRecentOperationsAsync(limit, since, cancellationToken);
+ return await _database.GetRecentOperationsAsync(limit, since, cancellationToken).ConfigureAwait(false);
}
///
@@ -2444,7 +2448,7 @@ public async Task ClearOperationHistoryAsync(DateTime olderThan, Cancellati
throw new ObjectDisposedException(nameof(SyncEngine));
}
- return await _database.ClearOperationHistoryAsync(olderThan, cancellationToken);
+ return await _database.ClearOperationHistoryAsync(olderThan, cancellationToken).ConfigureAwait(false);
}
///
@@ -2456,7 +2460,7 @@ private async Task TryPreserveTimestampsAsync(ISyncStorage storage, string path,
}
try {
- await storage.SetLastModifiedAsync(path, sourceItem.LastModified, cancellationToken);
+ await storage.SetLastModifiedAsync(path, sourceItem.LastModified, cancellationToken).ConfigureAwait(false);
} catch (Exception ex) {
_logger.TimestampPreservationError(ex, path);
}
@@ -2471,7 +2475,7 @@ private async Task TryPreservePermissionsAsync(ISyncStorage storage, string path
}
try {
- await storage.SetPermissionsAsync(path, sourceItem.Permissions, cancellationToken);
+ await storage.SetPermissionsAsync(path, sourceItem.Permissions, cancellationToken).ConfigureAwait(false);
} catch (Exception ex) {
_logger.PermissionPreservationError(ex, path);
}
@@ -2527,6 +2531,8 @@ public void Dispose() {
_pauseEvent?.Dispose();
_disposed = true;
}
+
+ GC.SuppressFinalize(this);
}
}
diff --git a/src/SharpSync/Sync/SyncFilter.cs b/src/SharpSync/Sync/SyncFilter.cs
index 227e4ca..12a76ef 100644
--- a/src/SharpSync/Sync/SyncFilter.cs
+++ b/src/SharpSync/Sync/SyncFilter.cs
@@ -1,16 +1,28 @@
using System.Text.RegularExpressions;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
using Oire.SharpSync.Core;
+using Oire.SharpSync.Logging;
namespace Oire.SharpSync.Sync;
///
-/// Default implementation of sync filter with pattern matching
+/// Default implementation of sync filter with pattern matching.
///
-public class SyncFilter: ISyncFilter {
+public sealed class SyncFilter: ISyncFilter {
private readonly List _excludePatterns = new();
private readonly List _includePatterns = new();
private readonly List _excludeRegexes = new();
private readonly List _includeRegexes = new();
+ private readonly ILogger _logger;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Optional logger for diagnostic output.
+ public SyncFilter(ILogger? logger = null) {
+ _logger = logger ?? NullLogger.Instance;
+ }
///
/// Determines whether a file or directory should be synchronized
@@ -85,10 +97,10 @@ public void AddExclusionPattern(string pattern) {
// If it looks like a regex (contains regex special chars), compile it
if (IsRegexPattern(pattern)) {
try {
- var regex = new Regex(pattern, RegexOptions.IgnoreCase | RegexOptions.Compiled);
+ var regex = new Regex(pattern, RegexOptions.IgnoreCase | RegexOptions.NonBacktracking);
_excludeRegexes.Add(regex);
- } catch {
- // If regex compilation fails, treat as wildcard
+ } catch (ArgumentException ex) {
+ _logger.SyncFilterRegexCompilationFailed(ex, pattern);
_excludePatterns.Add(pattern);
}
} else {
@@ -114,10 +126,10 @@ public void AddInclusionPattern(string pattern) {
// If it looks like a regex, compile it
if (IsRegexPattern(pattern)) {
try {
- var regex = new Regex(pattern, RegexOptions.IgnoreCase | RegexOptions.Compiled);
+ var regex = new Regex(pattern, RegexOptions.IgnoreCase | RegexOptions.NonBacktracking);
_includeRegexes.Add(regex);
- } catch {
- // If regex compilation fails, treat as wildcard
+ } catch (ArgumentException ex) {
+ _logger.SyncFilterRegexCompilationFailed(ex, pattern);
_includePatterns.Add(pattern);
}
} else {
@@ -221,7 +233,7 @@ private static bool MatchesWildcard(string path, string pattern) {
regexPattern = regexPattern.Replace("^(.*/)\\?", "(.*/)");
}
- return Regex.IsMatch(path, regexPattern, RegexOptions.IgnoreCase);
+ return Regex.IsMatch(path, regexPattern, RegexOptions.IgnoreCase | RegexOptions.NonBacktracking);
}
private static bool IsRegexPattern(string pattern) {
diff --git a/tests/SharpSync.Tests/Core/ConflictAnalysisTests.cs b/tests/SharpSync.Tests/Core/ConflictAnalysisTests.cs
index 3f6881a..bfd40e2 100644
--- a/tests/SharpSync.Tests/Core/ConflictAnalysisTests.cs
+++ b/tests/SharpSync.Tests/Core/ConflictAnalysisTests.cs
@@ -40,7 +40,7 @@ public void Constructor_AllProperties_InitializesCorrectly() {
SizeDifference = 1024,
LocalModified = localModified,
RemoteModified = remoteModified,
- TimeDifference = 3600,
+ TimeDifference = TimeSpan.FromSeconds(3600),
NewerVersion = "Remote",
IsLikelyBinary = false,
IsLikelyTextFile = true
@@ -57,7 +57,7 @@ public void Constructor_AllProperties_InitializesCorrectly() {
Assert.Equal(1024, analysis.SizeDifference);
Assert.Equal(localModified, analysis.LocalModified);
Assert.Equal(remoteModified, analysis.RemoteModified);
- Assert.Equal(3600, analysis.TimeDifference);
+ Assert.Equal(TimeSpan.FromSeconds(3600), analysis.TimeDifference);
Assert.Equal("Remote", analysis.NewerVersion);
Assert.False(analysis.IsLikelyBinary);
Assert.True(analysis.IsLikelyTextFile);
@@ -219,11 +219,11 @@ public void TimeDifference_Zero_IsAllowed() {
var analysis = new ConflictAnalysis {
FilePath = "test.txt",
ConflictType = ConflictType.BothModified,
- TimeDifference = 0
+ TimeDifference = TimeSpan.Zero
};
// Assert
- Assert.Equal(0, analysis.TimeDifference);
+ Assert.Equal(TimeSpan.Zero, analysis.TimeDifference);
}
[Fact]
diff --git a/tests/SharpSync.Tests/Core/SmartConflictResolverTests.cs b/tests/SharpSync.Tests/Core/SmartConflictResolverTests.cs
index e3ce65e..e0ec376 100644
--- a/tests/SharpSync.Tests/Core/SmartConflictResolverTests.cs
+++ b/tests/SharpSync.Tests/Core/SmartConflictResolverTests.cs
@@ -378,7 +378,7 @@ public async Task ResolveConflictAsync_TimeDifference_CalculatesCorrectly() {
// Assert
Assert.NotNull(capturedAnalysis);
- Assert.Equal(600, capturedAnalysis.TimeDifference, 1.0); // Allow 1 second tolerance
+ Assert.InRange(capturedAnalysis.TimeDifference, TimeSpan.FromSeconds(599), TimeSpan.FromSeconds(601)); // Allow 1 second tolerance
Assert.Equal("Remote", capturedAnalysis.NewerVersion);
}
diff --git a/tests/SharpSync.Tests/Sync/SyncEngineTests.cs b/tests/SharpSync.Tests/Sync/SyncEngineTests.cs
index 744fa1e..628cfbc 100644
--- a/tests/SharpSync.Tests/Sync/SyncEngineTests.cs
+++ b/tests/SharpSync.Tests/Sync/SyncEngineTests.cs
@@ -1,6 +1,6 @@
namespace Oire.SharpSync.Tests.Sync;
-public class SyncEngineTests: IDisposable {
+public class SyncEngineTests: IAsyncLifetime {
private readonly string _localRootPath;
private readonly string _remoteRootPath;
private readonly string _dbPath;
@@ -8,12 +8,12 @@ public class SyncEngineTests: IDisposable {
private readonly LocalFileStorage _remoteStorage;
private readonly SqliteSyncDatabase _database;
private readonly SyncEngine _syncEngine;
- private static readonly string[] filePaths = new[] { "singlefile.txt" };
- private static readonly string[] filePathsArray = new[] { "sync1.txt", "sync2.txt" };
- private static readonly string[] filePathsArray0 = new[] { "SubDir/subfile.txt" };
- private static readonly string[] nonexistentFilePaths = new[] { "nonexistent.txt" };
- private static readonly string[] singleFilePaths = new[] { "file.txt" };
- private static readonly string[] clearmeFilePaths = new[] { "clearme.txt" };
+ private static readonly string[] filePaths = ["singlefile.txt"];
+ private static readonly string[] filePathsArray = ["sync1.txt", "sync2.txt"];
+ private static readonly string[] filePathsArray0 = ["SubDir/subfile.txt"];
+ private static readonly string[] nonexistentFilePaths = ["nonexistent.txt"];
+ private static readonly string[] singleFilePaths = ["file.txt"];
+ private static readonly string[] clearmeFilePaths = ["clearme.txt"];
public SyncEngineTests() {
_localRootPath = Path.Combine(Path.GetTempPath(), "SharpSyncTests", "Local", Guid.NewGuid().ToString());
@@ -25,14 +25,17 @@ public SyncEngineTests() {
_localStorage = new LocalFileStorage(_localRootPath);
_remoteStorage = new LocalFileStorage(_remoteRootPath);
_database = new SqliteSyncDatabase(_dbPath);
- _database.InitializeAsync().GetAwaiter().GetResult();
var filter = new SyncFilter();
var conflictResolver = new DefaultConflictResolver(ConflictResolution.UseLocal);
_syncEngine = new SyncEngine(_localStorage, _remoteStorage, _database, conflictResolver, filter);
}
- public void Dispose() {
+ public async Task InitializeAsync() {
+ await _database.InitializeAsync();
+ }
+
+ public Task DisposeAsync() {
_syncEngine?.Dispose();
_database?.Dispose();
@@ -47,6 +50,8 @@ public void Dispose() {
if (File.Exists(_dbPath)) {
File.Delete(_dbPath);
}
+
+ return Task.CompletedTask;
}
[Fact]
diff --git a/tests/SharpSync.Tests/SyncOptionsTests.cs b/tests/SharpSync.Tests/SyncOptionsTests.cs
index 0532a24..2317b0d 100644
--- a/tests/SharpSync.Tests/SyncOptionsTests.cs
+++ b/tests/SharpSync.Tests/SyncOptionsTests.cs
@@ -39,7 +39,9 @@ public void Properties_CanBeSetAndRetrieved() {
options.UpdateExisting = false;
options.ConflictResolution = ConflictResolution.UseLocal;
options.TimeoutSeconds = 300;
- options.ExcludePatterns.AddRange(excludePatterns);
+ foreach (var pattern in excludePatterns) {
+ options.ExcludePatterns.Add(pattern);
+ }
// Assert
Assert.False(options.PreservePermissions);