diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 0000000..79aaaa9 --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,16 @@ +{ + "hooks": { + "PostToolUse": [ + { + "matcher": "Edit|Write", + "hooks": [ + { + "type": "command", + "command": "jq -r '.tool_input.file_path' | { read file_path; if [[ \"$file_path\" == *.rs ]]; then cargo fmt 2>/dev/null || true; fi; }", + "timeout": 30 + } + ] + } + ] + } +} \ No newline at end of file diff --git a/.env.example b/.env.example index cc84191..09ea00d 100644 --- a/.env.example +++ b/.env.example @@ -1,7 +1,19 @@ -DATABASE_URL=postgres://user:password@localhost:5432/dbname +DATABASE_URL=postgres://postgres:develop@localhost:5432/postgres # or DATABASE_HOST=localhost DATABASE_PORT=5432 -DATABASE_USER=user -DATABASE_PASSWORD=password -DATABASE_NAME=dbname \ No newline at end of file +DATABASE_USER=postgres +DATABASE_PASSWORD=develop +DATABASE_NAME=postgres + +HYDRA_ISSUER_URL=http://localhost:4444/ +HYDRA_ADMIN_URL=http://localhost:4445/ +KRATOS_PUBLIC_URL=http://localhost:4433/ +EXPECTED_AUDIENCE=account + +KETO_READ_URL=http://localhost:4466 +KETO_WRITE_URL=http://localhost:4467 + +REDIS_URL=redis://localhost:6379 +# or +REDIS_HOST=localhost \ No newline at end of file diff --git a/.gitignore b/.gitignore index a7510ff..1a5d6c7 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,6 @@ .idea .direnv /migrations/dbml-error.log -.env \ No newline at end of file +.env +logs +master-key-password diff --git a/.review/keycloak-to-ory-migration.md b/.review/keycloak-to-ory-migration.md new file mode 100644 index 0000000..c1a4704 --- /dev/null +++ b/.review/keycloak-to-ory-migration.md @@ -0,0 +1,183 @@ +--- +feature: keycloak-to-ory-migration +started: 2026-03-11 +phase: implementing +--- + +# Keycloak to Ory Migration (Phase 1: Kratos + Hydra) + +## Requirements + +### Purpose + +Replace Keycloak with Ory Kratos (identity management) + Ory Hydra (OAuth2/OIDC) to reduce infrastructure weight. Enable self-service registration. Login/Consent UI is ShuttlePub frontend's responsibility. + +### Scope - Do + +- Remove all Keycloak dependencies from server layer (`axum-keycloak-auth`, `KeycloakAuthLayer`, `KeycloakToken`, `expect_role!` macro) +- Implement JWT validation middleware for Hydra-issued tokens + - Validate: `iss`, `aud`, `exp`, `sub` + - JWKS: auto-resolve via OIDC Discovery, in-memory cache, re-fetch on kid miss (rate-limited), lazy init on startup failure + - Audience: configurable via `EXPECTED_AUDIENCE` env var +- Implement Login/Consent Provider backend in server layer (Hydra login/consent accept/reject API) +- Value mapping: `AuthHost.url` = Hydra issuer URL, `AuthAccount.client_id` = Kratos identity UUID (JWT `sub`) +- Simplify authorization to auth-check only (JWT validity). Role-based authz deferred to Phase 2 (Ketos) +- Dev environment: Keycloak -> Kratos + Hydra containers, self-service registration enabled +- Kratos identity schema: minimal (email + password) +- Seed data: test user (email: testuser@example.com / password: testuser) via Kratos import +- Existing AuthHost/AuthAccount data: discard (dev only, no migration) +- Env vars: `HYDRA_ISSUER_URL`, `HYDRA_ADMIN_URL`, `EXPECTED_AUDIENCE` +- Update server-layer auth tests. JWT unit tests use test RSA keypair + mock JWKS. Integration tests use `test_with::env` pattern + +### Scope - Don't + +- Ory Ketos introduction (Phase 2) +- Authorization checker trait in kernel (Phase 2) +- Multi-AuthAccount permission separation (Phase 2+) +- Entity structure changes (AuthAccount, AuthHost, etc.) +- kernel / adapter / application / driver layer changes +- Login UI implementation (ShuttlePub frontend responsibility) + +## Design + +### A. JWT Validation Middleware + +Replace `axum-keycloak-auth` with `jsonwebtoken` + `reqwest`. + +``` +server/src/auth.rs (new, replaces keycloak.rs) +├── OidcConfig — issuer URL, expected audience, jwks_refetch_interval_secs (configurable for tests) +├── JwksCache — in-memory (Arc>), kid miss → re-fetch (rate-limited), lazy init +├── AuthClaims — standard OIDC claims (iss, sub, aud, exp) +├── OidcAuthInfo — from AuthClaims: issuer → AuthHost.url, subject → AuthAccount.client_id +├── auth_middleware() — axum middleware: Bearer token → validate → Extension +└── resolve_auth_account_id() — rewritten with OidcAuthInfo (same find-or-create logic) +``` + +Value mapping: JWT `iss` → AuthHost.url, JWT `sub` (= Kratos identity UUID) → AuthAccount.client_id. +Hydra login accept sets `subject = Kratos identity.id`, so JWT `sub` = Kratos identity UUID. + +### B. Login/Consent Provider + +``` +server/src/route/oauth2.rs (new) — NOT under auth_middleware +├── GET /oauth2/login — login_challenge → Kratos session check → Hydra login accept/redirect +├── GET /oauth2/consent — consent_challenge → skip check → unified JSON response +│ skip: { action: "redirect", redirect_to: "..." } +│ non-skip: { action: "show_consent", consent_challenge, client_name, requested_scope } +├── POST /oauth2/consent — consent result → Hydra consent accept/reject +``` + +### C. Hydra Admin API Client + +``` +server/src/hydra.rs (new, reqwest-based) +├── HydraAdminClient +│ ├── get_login_request / accept_login / reject_login +│ └── get_consent_request / accept_consent / reject_consent +``` + +### D. Kratos Client + +``` +server/src/kratos.rs (new) +├── KratosClient +│ └── whoami(cookie) -> Option +``` + +Domain premise: Kratos and Emumet on same domain (subdomain) for cookie reachability. + +### E. Route Handler Changes + +account.rs, profile.rs, metadata.rs: KeycloakToken → AuthClaims, remove KeycloakAuthLayer, remove expect_role!. +route.rs: remove `to_permission_strings` and its test, add oauth2 module. + +Router structure in main.rs: +``` +Router::new() + .route_account/profile/metadata(...) + .layer(auth_middleware(...)) // JWT required + .route_oauth2(...) // NO auth_middleware + .layer(CorsLayer) + .with_state(app) +``` + +### F. Handler / AppModule + +Handler gets: HydraAdminClient, KratosClient, JwksCache, OidcConfig as fields. +Access via `#[derive(References)]` auto-generated getters. No DependOn* traits needed (server-layer only). + +### G. Dev Environment + +podman-compose (docker-compose.yml). Helm charts for production separately later. + +``` +ory/ +├── kratos/ +│ ├── kratos.yml, identity.schema.json, seed-users.json +└── hydra/ + └── hydra.yml +docker-compose.yml — postgres, redis, kratos, hydra +``` + +Env vars: HYDRA_ISSUER_URL, HYDRA_ADMIN_URL, KRATOS_PUBLIC_URL, EXPECTED_AUDIENCE + +### H. Data Cleanup + +Dev switch: TRUNCATE auth_hosts, auth_accounts, auth_account_events. No schema migration needed. + +### I. Files NOT Changed/Deleted + +- permission.rs: kept (potential Phase 2 reference) +- kernel / adapter / application / driver: no changes + +### J. Testing + +- JWT validation: test RSA keypair + in-memory JwkSet injection (configurable refetch interval = 0) +- Login/Consent: test_with::env(HYDRA_ADMIN_URL) skip +- Route handlers: Extension directly set, bypass middleware + +## Tasks + +- [x] 1. Dev environment (Ory Kratos + Hydra) + - [x] 1.1 Kratos config files (kratos.yml, identity.schema.json, seed-users.json) (P) + - [x] 1.2 Hydra config file (hydra.yml) (P) + - [x] 1.3 docker-compose.yml (postgres, redis, kratos, hydra; replaces standalone podman run; shared postgres with DB name separation) + - [x] 1.4 .env.example update (add HYDRA_ISSUER_URL, HYDRA_ADMIN_URL, KRATOS_PUBLIC_URL, EXPECTED_AUDIENCE; remove KEYCLOAK_SERVER, KEYCLOAK_REALM) + - [x] 1.5 Startup verification (podman-compose up, health endpoints respond) +- [x] 2. Cargo.toml deps and type definitions + - [x] 2.1 Add jsonwebtoken / reqwest to Cargo.toml (keep axum-keycloak-auth for now) + - [x] 2.2 OidcConfig / AuthClaims / OidcAuthInfo type definitions (server/src/auth.rs new) +- [x] 3. JWT validation and external clients + - [x] 3.1 JwksCache (OIDC Discovery, in-memory cache, kid miss re-fetch, lazy init) (P) + - [x] 3.2 HydraAdminClient types and methods (server/src/hydra.rs) (P) + - [x] 3.3 KratosClient and whoami (server/src/kratos.rs) (P) + - [x] 3.4 auth_middleware (Bearer extraction, JWT validation, Extension) + - [x] 3.5 JWT validation unit tests (test RSA keypair + JwkSet injection) +- [x] R1. Code review: auth foundation (auth.rs, hydra.rs, kratos.rs) +- [x] 4. Login/Consent Provider endpoints + - [x] 4.1 OAuth2Router trait and oauth2 module in route.rs + - [x] 4.2 GET /oauth2/login endpoint + - [x] 4.3 GET /oauth2/consent endpoint (skip check, unified JSON response) + - [x] 4.4 POST /oauth2/consent endpoint (consent result -> Hydra accept/reject) +- [x] R2. Code review: OAuth2 flow (route/oauth2.rs) +- [x] 5. Keycloak removal and route handler rewrite + - [x] 5.1 Rewrite resolve_auth_account_id with OidcAuthInfo (in auth.rs) + - [x] 5.2 account.rs rewrite (KeycloakToken -> AuthClaims, remove KeycloakAuthLayer/expect_role!) (P) + - [x] 5.3 profile.rs rewrite (same) (P) + - [x] 5.4 metadata.rs rewrite (same) (P) + - [x] 5.5 Remove to_permission_strings + test from route.rs, remove expect_role! macro from keycloak.rs + - [x] 5.6 Route handler unit test updates (Extension direct injection) +- [x] 6. Handler / AppModule / main.rs integration + - [x] 6.1 Handler: add HydraAdminClient / KratosClient / JwksCache / OidcConfig fields + AppModule accessors + init + - [x] 6.2 main.rs rewrite (remove KeycloakAuthInstance, add auth_middleware + OAuth2 routes, middleware scoping) + - [x] 6.3 Remove axum-keycloak-auth from Cargo.toml +- [x] R3. Code review: integration (handler.rs, main.rs, route handlers consistency) +- [x] 7. Cleanup and documentation + - [x] 7.1 Delete keycloak.rs, keycloak-data/, remove keycloak-data lines from .gitignore (P) + - [x] 7.2 Update CLAUDE.md / README.md (podman-compose setup instructions) (P) + - [x] 7.3 Document data cleanup procedure (TRUNCATE auth_hosts / auth_accounts / auth_account_events) +- [x] 8. Integration tests and verification + - [ ] 8.1 Login/Consent flow integration test (test_with::env(HYDRA_ADMIN_URL) skip) + - [x] 8.2 E2E verification with podman-compose (register -> login -> JWT -> API access) +- [x] R4. Final review: full alignment check (requirements/design coverage, missing items, code quality) diff --git a/AGENTS.md b/AGENTS.md new file mode 120000 index 0000000..681311e --- /dev/null +++ b/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..6b3f470 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,198 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +Emumet is an Account Service for ShuttlePub, implementing Event Sourcing with CQRS pattern. The name derives from EMU (Extravehicular Mobility Unit) + Helmet. + +## Build & Development Commands + +```bash +# Build +cargo build + +# Run tests (requires DATABASE_URL environment variable) +cargo test + +# Run single test +cargo test + +# Run server +cargo run -p server +``` + +## Required Services + +Use `podman-compose up` (or `docker-compose up`) to start all required services: + +```bash +podman-compose up -d +``` + +This starts: PostgreSQL, Redis, Ory Kratos, and Ory Hydra. + +### Manual startup (alternative) + +#### PostgreSQL +```bash +podman run --rm --name emumet-postgres -e POSTGRES_PASSWORD=develop -p 5432:5432 docker.io/postgres +``` +- User: postgres / Password: develop + +#### Redis +Required for message queue (rikka-mq). + +### Auth: Ory Kratos + Hydra + +- **Kratos** (identity management): http://localhost:4433 (public), http://localhost:4434 (admin) + - Self-service registration enabled + - Identity schema: email + password + - Test user: testuser@example.com / testuser +- **Hydra** (OAuth2/OIDC): http://localhost:4444 (public), http://localhost:4445 (admin) + - Login/Consent Provider: Emumet server (GET /oauth2/login, GET/POST /oauth2/consent) + - JWT issuer + +Config files: `ory/kratos/`, `ory/hydra/` + +## Environment Variables + +Copy `.env.example` to `.env`: +- `DATABASE_URL` or individual `DATABASE_HOST`, `DATABASE_PORT`, `DATABASE_USER`, `DATABASE_PASSWORD`, `DATABASE_NAME` +- `HYDRA_ISSUER_URL` — Hydra public URL for JWT validation (default: http://localhost:4444/) +- `HYDRA_ADMIN_URL` — Hydra admin URL for Login/Consent API (default: http://localhost:4445/) +- `KRATOS_PUBLIC_URL` — Kratos public URL for session verification (default: http://localhost:4433/) +- `EXPECTED_AUDIENCE` — Expected JWT audience claim (default: account) +- `REDIS_URL` or `REDIS_HOST` — Redis connection for message queue + +### Master Key Password + +Account creation requires a master key password file for signing key encryption: +- Production: `/run/secrets/master-key-password` +- Development: `./master-key-password` (create manually, `chmod 600`) + +## Architecture + +### Workspace Structure (5 crates with dependency flow) + +``` +kernel → adapter → application → server +kernel → driver → server +``` + +- **kernel**: Domain entities, interface traits (EventStore, ReadModel, Repository), Event Sourcing core. Traits are exposed via logical `pub mod interfaces {}` block in `lib.rs` (not a physical directory). +- **adapter**: CQRS processors (CommandProcessor/QueryProcessor) that compose kernel traits, crypto trait composition (SigningKeyGenerator) +- **application**: Use case services (Account CRUD use cases), event appliers (projection update), DTOs +- **driver**: PostgreSQL/Redis implementations of kernel interfaces +- **server**: Axum HTTP server, JWT auth (Ory Hydra), OAuth2 Login/Consent Provider, route handlers, DI wiring (Handler/AppModule) + +### CQRS + Event Sourcing Pattern + +Two entity types exist in the codebase: **CQRS-migrated** and **legacy (Query/Modifier)**. + +#### CQRS-migrated entities (Account, AuthAccount, Profile, Metadata) + +Each CQRS entity has these components across layers: + +``` +Command Flow: + REST handler → CommandProcessor (adapter) + → EventStore.persist_and_transform() (kernel trait, driver impl) + → EventApplier (kernel) → entity reconstruction + → [AuthAccount only: ReadModel.create() for immediate consistency] + → Signal → async applier → ReadModel projection update + +Query Flow: + REST handler → QueryProcessor (adapter) + → ReadModel.find_*() (kernel trait, driver impl) +``` + +**kernel** defines per-entity interface traits: +- `AccountEventStore` / `AuthAccountEventStore` / `ProfileEventStore` / `MetadataEventStore` — event persistence + retrieval per entity-specific table +- `AccountReadModel` / `AuthAccountReadModel` / `ProfileReadModel` / `MetadataReadModel` — projection reads + writes + +**adapter** provides processors with blanket impls: +- `AccountCommandProcessor` / `ProfileCommandProcessor` / `MetadataCommandProcessor` — EventStore + EventApplier + Signal (projection via async applier) +- `AuthAccountCommandProcessor` — EventStore + EventApplier + ReadModel.create() + Signal (synchronous projection for find-or-create pattern) +- `*QueryProcessor` — ReadModel facade + +**driver** implements per-entity stores: +- `PostgresAccountEventStore` → `account_events` table +- `PostgresAuthAccountEventStore` → `auth_account_events` table +- `PostgresProfileEventStore` → `profile_events` table +- `PostgresMetadataEventStore` → `metadata_events` table +- `PostgresAccountReadModel` → `accounts` table +- `PostgresAuthAccountReadModel` → `auth_accounts` table +- `PostgresProfileReadModel` → `profiles` table +- `PostgresMetadataReadModel` → `metadatas` table + +**application** provides use case services and event appliers: +- `GetAccountUseCase` / `CreateAccountUseCase` / `EditAccountUseCase` / `DeleteAccountUseCase` — Account CRUD orchestration via CommandProcessor/QueryProcessor +- `GetProfileUseCase` / `CreateProfileUseCase` / `EditProfileUseCase` / `DeleteProfileUseCase` — Profile CRUD +- `GetMetadataUseCase` / `CreateMetadataUseCase` / `EditMetadataUseCase` / `DeleteMetadataUseCase` — Metadata CRUD +- `UpdateAuthAccount` / `UpdateProfile` / `UpdateMetadata` — event appliers that replay events from EventStore, update ReadModel projections + +#### Repository entities (Follow, RemoteAccount, Image, AuthHost) + +These use the Repository pattern — a single trait combining read and write operations: +- `*Repository` traits in `kernel/src/repository/` — unified CRUD interface +- `Postgres*Repository` driver implementations in `driver/src/database/postgres/` +- Follow and RemoteAccount are pure CRUD (Event Sourcing removed) +- AuthHost and Image are pure CRUD (never had Event Sourcing) + +### Key Patterns + +**DependOn\* trait pattern**: Dependency injection via associated types. `DependOnFoo` provides `fn foo(&self) -> &Self::Foo`. Blanket impls auto-wire when dependencies are satisfied. + +**impl_database_delegation! macro** (kernel/src/lib.rs): Delegates all database `DependOn*` traits from a wrapper type to a database field. Used by `Handler` to wire `PostgresDatabase`. + +**EventApplier trait** (kernel/src/event.rs): Reconstructs entity state from events. `fn apply(entity: &mut Option, event: EventEnvelope) -> Result<()>`. Entity becomes `None` on Deleted events. + +**Optimistic concurrency control**: Commands carry `prev_version: Option`. `KnownEventVersion::Nothing` = must be first event, `KnownEventVersion::Prev(version)` = must match latest version. EventStore validates before persisting. + +**Signal → Applier pipeline**: `Signal` trait emits entity IDs via Redis (rikka-mq). `ApplierContainer` (server/src/applier.rs) receives and dispatches to entity-specific appliers that update ReadModel projections. + +### Auth Architecture + +JWT validation middleware (`server/src/auth.rs`): +- OIDC Discovery → JWKS cache (with kid-miss re-fetch, rate-limited) +- Bearer token → RS256 validation → `Extension` inserted into request +- `AuthClaims` → `OidcAuthInfo` → `resolve_auth_account_id` (find-or-create AuthHost + AuthAccount) + +OAuth2 Login/Consent Provider (`server/src/route/oauth2.rs`): +- GET /oauth2/login — Kratos session → Hydra login accept +- GET /oauth2/consent — skip check → redirect or show consent +- POST /oauth2/consent — accept/reject with scope validation + +Value mapping: JWT `iss` → `AuthHost.url`, JWT `sub` (Kratos identity UUID) → `AuthAccount.client_id` + +### Entity Structure + +Entities use vodca macros (`References`, `Newln`, `Nameln`) and `destructure::Destructure` for field access. + +Event Sourcing対象エンティティ (Account, AuthAccount, Profile, Metadata): +- ID type (UUIDv7-based, provides temporal ordering) +- Event enum with variants (Created, Updated, Deleted) + `Nameln` for event name serialization +- `EventApplier` implementation +- `CommandEnvelope` factory methods (e.g., `Account::create()`, `Account::delete()`) + +純粋CRUDエンティティ (Follow, RemoteAccount, AuthHost, Image): +- ID type のみ。Event enum / EventApplier なし +- Repository パターンで直接 CRUD 操作 + +### Server DI Architecture + +`Handler` — owns PostgresDatabase + RedisDatabase + crypto providers + HydraAdminClient + KratosClient. `impl_database_delegation!` wires kernel traits. + +`AppModule` — wraps `Arc` + `Arc`. Manually implements `DependOn*` for adapter-layer traits (Signal, ReadModel, EventStore, Repository). Blanket impls provide CommandProcessor/QueryProcessor automatically. Provides `hydra_admin_client()` and `kratos_client()` accessors. + +### Testing + +Database tests use `#[test_with::env(DATABASE_URL)]` attribute to skip when database is unavailable. + +### Data Cleanup (after auth migration) + +If migrating from Keycloak to Ory, truncate auth-related tables: +```sql +TRUNCATE auth_hosts, auth_accounts, auth_account_events; +``` diff --git a/Cargo.lock b/Cargo.lock index 0c17b00..341c778 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,17 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 + +[[package]] +name = "adapter" +version = "0.1.0" +dependencies = [ + "error-stack", + "kernel", + "tracing", + "uuid", + "zeroize", +] [[package]] name = "addr2line" @@ -17,6 +28,52 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + +[[package]] +name = "ahash" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" +dependencies = [ + "getrandom", + "once_cell", + "version_check", +] + [[package]] name = "ahash" version = "0.8.11" @@ -30,12 +87,36 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + [[package]] name = "allocator-api2" version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anyhow" version = "1.0.86" @@ -46,9 +127,51 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" name = "application" version = "0.1.0" dependencies = [ - "async-trait", + "adapter", + "driver", "error-stack", "kernel", + "serde", + "serde_json", + "tempfile", + "time", + "tokio", + "uuid", + "vodca", +] + +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + +[[package]] +name = "argon2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" +dependencies = [ + "base64ct", + "blake2", + "cpufeatures", + "password-hash", +] + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", ] [[package]] @@ -59,7 +182,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.96", ] [[package]] @@ -71,12 +194,73 @@ dependencies = [ "num-traits", ] +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "autocfg" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +[[package]] +name = "axum" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "504e3947307ac8326a5437504c517c4b56716c9d98fac0028c2acc7ca47d70ae" +dependencies = [ + "async-trait", + "axum-core", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "backtrace" version = "0.3.73" @@ -98,6 +282,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "base64ct" version = "1.6.0" @@ -119,6 +309,27 @@ dependencies = [ "serde", ] +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -128,6 +339,68 @@ dependencies = [ "generic-array", ] +[[package]] +name = "borsh" +version = "1.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5430e3be710b68d984d1391c854eb431a9d548640711faa54eecb1df93db91cc" +dependencies = [ + "borsh-derive", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "1.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8b668d39970baad5356d7c83a86fee3a539e6f93bf6764c97368243e17a0487" +dependencies = [ + "once_cell", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "byte-unit" +version = "5.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cd29c3c585209b0cbc7309bfe3ed7efd8c84c21b7af29c8bfae908f8777174" +dependencies = [ + "rust_decimal", + "serde", + "utf8-width", +] + +[[package]] +name = "bytecheck" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "byteorder" version = "1.5.0" @@ -152,6 +425,36 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-targets 0.52.6", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + [[package]] name = "combine" version = "4.6.7" @@ -184,9 +487,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" @@ -212,6 +515,34 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" +[[package]] +name = "crossbeam-channel" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-queue" version = "0.3.11" @@ -234,29 +565,49 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", + "rand_core", "typenum", ] +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + [[package]] name = "deadpool" -version = "0.9.5" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "421fe0f90f2ab22016f32a9881be5134fdd71c65298917084b0c7477cbc3856e" +checksum = "fb84100978c1c7b37f09ed3ce3e5f843af02c2a2c431bae5b19230dad2c1b490" dependencies = [ "async-trait", "deadpool-runtime", "num_cpus", - "retain_mut", + "tokio", +] + +[[package]] +name = "deadpool" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed" +dependencies = [ + "deadpool-runtime", + "num_cpus", "tokio", ] [[package]] name = "deadpool-redis" -version = "0.12.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f1760f60ffc6653b4afd924c5792098d8c00d9a3deb6b3d989eac17949dc422" +checksum = "c136f185b3ca9d1f4e4e19c11570e1002f4bfdd592d589053e225716d613851f" dependencies = [ - "deadpool", + "deadpool 0.12.1", "redis", ] @@ -297,7 +648,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfd72fa5c0aa087c0ce4f7039c664b106d638a61a6088663415b34bf4e803881" dependencies = [ "quote", - "syn 2.0.72", + "syn 2.0.96", ] [[package]] @@ -312,6 +663,17 @@ dependencies = [ "subtle", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + [[package]] name = "dotenvy" version = "0.15.7" @@ -322,17 +684,27 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" name = "driver" version = "0.1.0" dependencies = [ + "aes-gcm", + "argon2", "async-trait", + "base64 0.22.1", "deadpool-redis", "dotenvy", "error-stack", "kernel", + "rand", + "reqwest", + "rsa", "serde", "serde_json", "sqlx", + "tempfile", + "test-with", "time", "tokio", "uuid", + "vodca", + "zeroize", ] [[package]] @@ -344,6 +716,21 @@ dependencies = [ "serde", ] +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "env_home" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" + [[package]] name = "equivalent" version = "1.0.1" @@ -404,6 +791,12 @@ dependencies = [ "spin", ] +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + [[package]] name = "foreign-types" version = "0.3.2" @@ -429,26 +822,47 @@ dependencies = [ ] [[package]] -name = "futures-channel" -version = "0.3.30" +name = "funty" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" -dependencies = [ - "futures-core", - "futures-sink", -] +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] -name = "futures-core" -version = "0.3.30" +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -468,30 +882,43 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ + "futures-channel", "futures-core", "futures-io", + "futures-macro", "futures-sink", "futures-task", "memchr", @@ -517,8 +944,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi", + "wasm-bindgen", +] + +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", ] [[package]] @@ -527,13 +966,41 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +[[package]] +name = "h2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash 0.7.8", +] + [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "ahash", + "ahash 0.8.11", "allocator-api2", ] @@ -543,7 +1010,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown", + "hashbrown 0.14.5", ] [[package]] @@ -594,14 +1061,285 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "http" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" +dependencies = [ + "futures-util", + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", + "socket2 0.5.7", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core 0.52.0", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] @@ -611,15 +1349,55 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.14.5", ] +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "generic-array", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + [[package]] name = "itoa" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "jsonwebtoken" +version = "9.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ae10193d25051e74945f1ea2d0b42e03cc3b890f7e4cc5faa44997d808193f" +dependencies = [ + "base64 0.21.7", + "js-sys", + "pem", + "ring", + "serde", + "serde_json", + "simple_asn1", +] + [[package]] name = "kernel" version = "0.1.0" @@ -633,6 +1411,7 @@ dependencies = [ "time", "uuid", "vodca", + "zeroize", ] [[package]] @@ -646,9 +1425,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.155" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libm" @@ -673,6 +1452,12 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "lock_api" version = "0.4.12" @@ -689,6 +1474,21 @@ version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + [[package]] name = "md-5" version = "0.10.6" @@ -705,6 +1505,12 @@ version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -769,14 +1575,43 @@ dependencies = [ ] [[package]] -name = "num-bigint-dig" -version = "0.8.4" +name = "ntapi" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" dependencies = [ - "byteorder", - "lazy_static", - "libm", + "winapi", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", "num-integer", "num-iter", "num-traits", @@ -846,6 +1681,12 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + [[package]] name = "openssl" version = "0.10.66" @@ -869,7 +1710,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.96", ] [[package]] @@ -890,6 +1731,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "parking_lot" version = "0.12.3" @@ -913,12 +1760,33 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "password-hash" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" +dependencies = [ + "base64ct", + "rand_core", + "subtle", +] + [[package]] name = "paste" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "pem" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" +dependencies = [ + "base64 0.22.1", + "serde", +] + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -946,6 +1814,17 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "ping" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "122ee1f5a6843bec84fcbd5c6ba3622115337a6b8965b93a61aad347648f4e8d" +dependencies = [ + "rand", + "socket2 0.4.10", + "thiserror 1.0.63", +] + [[package]] name = "pkcs1" version = "0.7.5" @@ -973,6 +1852,18 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -988,15 +1879,66 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "proc-macro-crate" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" +dependencies = [ + "toml_edit", +] + +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2", + "quote", + "syn 2.0.96", +] + [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] +[[package]] +name = "ptr_meta" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "quote" version = "1.0.36" @@ -1006,6 +1948,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + [[package]] name = "rand" version = "0.8.5" @@ -1036,20 +1984,42 @@ dependencies = [ "getrandom", ] +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + [[package]] name = "redis" -version = "0.23.3" +version = "0.29.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f49cdc0bb3f412bf8e7d1bd90fe1d9eb10bc5c399ba90973c14662a27b3f8ba" +checksum = "b110459d6e323b7cda23980c46c77157601199c9da6241552b284cd565a7a133" dependencies = [ - "async-trait", + "arc-swap", "bytes", "combine", "futures-util", "itoa", + "num-bigint", "percent-encoding", "pin-project-lite", "ryu", + "socket2 0.5.7", "tokio", "tokio-util", "url", @@ -1074,10 +2044,161 @@ dependencies = [ ] [[package]] -name = "retain_mut" -version = "0.1.9" +name = "regex" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.8", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "rend" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "reqwest" +version = "0.12.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" +dependencies = [ + "base64 0.22.1", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows-registry", +] + +[[package]] +name = "rikka-mq" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52886d21f46f54dfa1ba31bc734f164940d8acca1c666d45114c01c6077725ad" +dependencies = [ + "deadpool-redis", + "destructure", + "serde", + "serde_json", + "thiserror 2.0.11", + "tokio", + "tracing", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rkyv" +version = "0.7.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" +dependencies = [ + "bitvec", + "bytecheck", + "bytes", + "hashbrown 0.12.3", + "ptr_meta", + "rend", + "rkyv_derive", + "seahash", + "tinyvec", + "uuid", +] + +[[package]] +name = "rkyv_derive" +version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" +checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] [[package]] name = "rsa" @@ -1093,12 +2214,29 @@ dependencies = [ "pkcs1", "pkcs8", "rand_core", + "sha2", "signature", "spki", "subtle", "zeroize", ] +[[package]] +name = "rust_decimal" +version = "1.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b082d80e3e3cc52b2ed634388d436fe1f4de6af5786cc2de9ba9737527bdf555" +dependencies = [ + "arrayvec", + "borsh", + "bytes", + "num-traits", + "rand", + "rkyv", + "serde", + "serde_json", +] + [[package]] name = "rustc-demangle" version = "0.1.24" @@ -1127,6 +2265,51 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rustls" +version = "0.23.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f287924602bf649d949c63dc8ac8b235fa5387d394020705b80c4eb597ce5b8" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" + [[package]] name = "ryu" version = "1.0.18" @@ -1148,6 +2331,12 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + [[package]] name = "security-framework" version = "2.11.1" @@ -1179,29 +2368,29 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.205" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33aedb1a7135da52b7c21791455563facbbcc43d0f0f66165b42c21b3dfb150" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.205" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692d6f5ac90220161d6774db30c662202721e64aed9058d2c394f451261420c1" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.96", ] [[package]] name = "serde_json" -version = "1.0.122" +version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da" +checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b" dependencies = [ "itoa", "memchr", @@ -1210,29 +2399,77 @@ dependencies = [ ] [[package]] -name = "server" -version = "0.1.0" +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" dependencies = [ - "application", - "driver", - "kernel", + "itoa", + "serde", ] [[package]] -name = "sha1" -version = "0.10.6" +name = "serde_urlencoded" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ - "cfg-if", - "cpufeatures", - "digest", + "form_urlencoded", + "itoa", + "ryu", + "serde", ] [[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" +name = "server" +version = "0.1.0" +dependencies = [ + "adapter", + "application", + "axum", + "base64 0.22.1", + "destructure", + "dotenvy", + "driver", + "error-stack", + "http-body-util", + "jsonwebtoken", + "kernel", + "rand", + "reqwest", + "rikka-mq", + "rsa", + "serde", + "serde_json", + "test-with", + "time", + "tokio", + "tower", + "tower-http", + "tracing", + "tracing-appender", + "tracing-subscriber", + "url", + "uuid", + "vodca", + "wiremock", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", @@ -1240,6 +2477,15 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "signature" version = "2.2.0" @@ -1250,6 +2496,24 @@ dependencies = [ "rand_core", ] +[[package]] +name = "simdutf8" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" + +[[package]] +name = "simple_asn1" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror 2.0.11", + "time", +] + [[package]] name = "slab" version = "0.4.9" @@ -1265,6 +2529,16 @@ version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "socket2" version = "0.5.7" @@ -1323,7 +2597,7 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24ba59a9342a3d9bab6c56c118be528b27c9b60e490080e9711a04dccac83ef6" dependencies = [ - "ahash", + "ahash 0.8.11", "atoi", "byteorder", "bytes", @@ -1350,7 +2624,7 @@ dependencies = [ "sha2", "smallvec", "sqlformat", - "thiserror", + "thiserror 1.0.63", "time", "tokio", "tokio-stream", @@ -1405,7 +2679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418" dependencies = [ "atoi", - "base64", + "base64 0.21.7", "bitflags 2.6.0", "byteorder", "bytes", @@ -1435,7 +2709,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.63", "time", "tracing", "uuid", @@ -1449,7 +2723,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e" dependencies = [ "atoi", - "base64", + "base64 0.21.7", "bitflags 2.6.0", "byteorder", "crc", @@ -1475,7 +2749,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.63", "time", "tracing", "uuid", @@ -1507,6 +2781,12 @@ dependencies = [ "uuid", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "stringprep" version = "0.1.5" @@ -1537,15 +2817,76 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.72" +version = "2.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "sysinfo" +version = "0.33.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fc858248ea01b66f19d8e8a6d55f41deaf91e9d495246fd01368d99935c6c01" +dependencies = [ + "core-foundation-sys", + "libc", + "memchr", + "ntapi", + "rayon", + "windows", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.6.0", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "tempfile" version = "3.12.0" @@ -1559,13 +2900,43 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "test-with" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb39a1199760f44d7e11b6644b620d35abe7e012fa34288abae9e5aa95a243da" +dependencies = [ + "byte-unit", + "chrono", + "num_cpus", + "ping", + "proc-macro-error2", + "proc-macro2", + "quote", + "regex", + "reqwest", + "syn 2.0.96", + "sysinfo", + "uzers", + "which", +] + [[package]] name = "thiserror" version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.63", +] + +[[package]] +name = "thiserror" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +dependencies = [ + "thiserror-impl 2.0.11", ] [[package]] @@ -1576,14 +2947,35 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.96", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", ] [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -1602,14 +2994,24 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" version = "1.8.0" @@ -1627,29 +3029,49 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.39.2" +version = "1.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daa4fb1bc778bd6f04cbfc4bb2d06a7396a8f299dc33ea1900cedaa316f467b1" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" dependencies = [ "backtrace", "bytes", "libc", "mio", "pin-project-lite", - "socket2", + "socket2 0.5.7", "tokio-macros", "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.96", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" +dependencies = [ + "rustls", + "tokio", ] [[package]] @@ -1676,11 +3098,73 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" + +[[package]] +name = "toml_edit" +version = "0.22.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" +dependencies = [ + "indexmap", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" +dependencies = [ + "bitflags 2.6.0", + "bytes", + "http", + "http-body", + "http-body-util", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -1688,26 +3172,74 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-appender" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +dependencies = [ + "crossbeam-channel", + "thiserror 1.0.63", + "time", + "tracing-subscriber", +] + [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.96", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", + "valuable", ] +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + [[package]] name = "typenum" version = "1.17.0" @@ -1753,11 +3285,27 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -1770,16 +3318,50 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8-width" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "uuid" -version = "1.10.0" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +checksum = "b3758f5e68192bb96cc8f9b7e2c2cfdabb435499a28499a42f8f984092adad4b" dependencies = [ "getrandom", "serde", ] +[[package]] +name = "uzers" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d283dc7e8c901e79e32d077866eaf599156cbf427fffa8289aecc52c5c3f63" +dependencies = [ + "libc", + "log", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + [[package]] name = "vcpkg" version = "0.2.15" @@ -1800,7 +3382,16 @@ checksum = "1d74d8fdab2e45cc05fa4964e5ff7ee72499f9edfffb38455bb94e62d1e3bcaa" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.96", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", ] [[package]] @@ -1815,6 +3406,99 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "which" +version = "7.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2774c861e1f072b3aadc02f8ba886c26ad6321567ecc294c935434cad06f1283" +dependencies = [ + "either", + "env_home", + "rustix", + "winsafe", +] + [[package]] name = "whoami" version = "1.5.1" @@ -1825,6 +3509,120 @@ dependencies = [ "wasite", ] +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" +dependencies = [ + "windows-core 0.57.0", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-result 0.1.2", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-implement" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "windows-interface" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result 0.2.0", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result 0.2.0", + "windows-targets 0.52.6", +] + [[package]] name = "windows-sys" version = "0.48.0" @@ -1973,6 +3771,90 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "winnow" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59690dea168f2198d1a3b0cac23b8063efcd11012f10ae4698f284808c8ef603" +dependencies = [ + "memchr", +] + +[[package]] +name = "winsafe" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" + +[[package]] +name = "wiremock" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2b8b99d4cdbf36b239a9532e31fe4fb8acc38d1897c1761e161550a7dc78e6a" +dependencies = [ + "assert-json-diff", + "async-trait", + "base64 0.22.1", + "deadpool 0.10.0", + "futures", + "http", + "http-body-util", + "hyper", + "hyper-util", + "log", + "once_cell", + "regex", + "serde", + "serde_json", + "tokio", + "url", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -1991,7 +3873,28 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.96", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "synstructure", ] [[package]] @@ -1999,3 +3902,25 @@ name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] diff --git a/Cargo.toml b/Cargo.toml index 0818774..4da0980 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,6 @@ [workspace] members = [ + "adapter", "application", "driver", "kernel", @@ -12,8 +13,17 @@ serde = { version = "1", features = ["derive"] } uuid = { version = "1.4", features = ["serde", "v7"] } time = { version = "0.3", features = ["serde"] } nanoid = "0.4.0" +dotenvy = "0.15.7" + +vodca = "0.1.8" +destructure = "0.5.6" +qualified_do = "0.1.0" error-stack = "0.4.1" +tracing = "0.1.40" +tracing-subscriber = { version = "0.3.18", features = ["registry", "env-filter", "fmt"] } + +test-with = "0.14.7" [workspace.package] version = "0.1.0" diff --git a/README.md b/README.md index 829a4e3..8300e9e 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,41 @@ # Emumet - - + + + -# DB -Podman(docker)にて環境構築が可能です +## Setup + +### Services + +```shell +podman-compose up -d +``` + +PostgreSQL, Redis, Ory Kratos, Ory Hydra が起動します。 + +### Auth: Ory Kratos + Hydra + +- **Kratos** (Identity Management): http://localhost:4433 + - Test user: testuser@example.com / testuser +- **Hydra** (OAuth2/OIDC): http://localhost:4444 + +### Environment + +```shell +cp .env.example .env +``` + +## DB + +`podman-compose` で PostgreSQL が起動します。手動起動する場合: ```shell podman run --rm --name emumet-postgres -e POSTGRES_PASSWORD=develop -p 5432:5432 docker.io/postgres ``` -> ユーザー名: postgres -> パスワード: develop +> User: postgres / Password: develop + +## Etymology -# 語源 -EMU(Extravehicular Mobility Unit=宇宙服)+Helmet +EMU(Extravehicular Mobility Unit) + Helmet diff --git a/adapter/Cargo.toml b/adapter/Cargo.toml new file mode 100644 index 0000000..a8c619f --- /dev/null +++ b/adapter/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "adapter" +version.workspace = true +edition.workspace = true +authors.workspace = true + +[dependencies] +kernel = { path = "../kernel" } +error-stack = { workspace = true } +tracing = { workspace = true } +uuid = { workspace = true } +zeroize = "1.7" diff --git a/adapter/src/crypto.rs b/adapter/src/crypto.rs new file mode 100644 index 0000000..ce1e2d5 --- /dev/null +++ b/adapter/src/crypto.rs @@ -0,0 +1,75 @@ +use error_stack::Result; +use kernel::interfaces::crypto::{ + DependOnKeyEncryptor, DependOnRawKeyGenerator, GeneratedKeyPair, KeyEncryptor, RawKeyGenerator, + SigningAlgorithm, +}; +use kernel::KernelError; + +/// Trait for signing key pair generation (composed from RawKeyGenerator + KeyEncryptor) +/// +/// This trait is automatically implemented for any type that implements both +/// [`DependOnRawKeyGenerator`] and [`DependOnKeyEncryptor`] via blanket implementation. +/// +/// # Architecture +/// +/// ```text +/// Application (uses SigningKeyGenerator) +/// ↓ +/// Adapter (composes RawKeyGenerator + KeyEncryptor) +/// ↓ +/// Kernel (defines traits) +/// ↑ +/// Driver (implements concrete crypto) +/// ``` +/// +/// # Example +/// +/// ```ignore +/// // If Handler implements DependOnRawKeyGenerator + DependOnKeyEncryptor, +/// // SigningKeyGenerator is automatically implemented. +/// let key_pair = handler.signing_key_generator().generate(password)?; +/// ``` +pub trait SigningKeyGenerator: Send + Sync { + /// Generate a new key pair, encrypting the private key with the given password + fn generate(&self, password: &[u8]) -> Result; + + /// Returns the algorithm used by this generator + fn algorithm(&self) -> SigningAlgorithm; +} + +pub trait DependOnSigningKeyGenerator: Send + Sync { + type SigningKeyGenerator: SigningKeyGenerator; + fn signing_key_generator(&self) -> &Self::SigningKeyGenerator; +} + +// Blanket implementation: any type with RawKeyGenerator + KeyEncryptor can generate signing keys +impl SigningKeyGenerator for T +where + T: DependOnRawKeyGenerator + DependOnKeyEncryptor + Send + Sync, +{ + fn generate(&self, password: &[u8]) -> Result { + let raw = self.raw_key_generator().generate_raw()?; + let encrypted = + self.key_encryptor() + .encrypt(&raw.private_key_pem, password, raw.algorithm)?; + Ok(GeneratedKeyPair { + public_key_pem: raw.public_key_pem, + encrypted_private_key: encrypted, + }) + } + + fn algorithm(&self) -> SigningAlgorithm { + self.raw_key_generator().algorithm() + } +} + +// Blanket implementation: any type that can generate signing keys provides DependOnSigningKeyGenerator +impl DependOnSigningKeyGenerator for T +where + T: DependOnRawKeyGenerator + DependOnKeyEncryptor + Send + Sync, +{ + type SigningKeyGenerator = Self; + fn signing_key_generator(&self) -> &Self::SigningKeyGenerator { + self + } +} diff --git a/adapter/src/lib.rs b/adapter/src/lib.rs new file mode 100644 index 0000000..131719b --- /dev/null +++ b/adapter/src/lib.rs @@ -0,0 +1,2 @@ +pub mod crypto; +pub mod processor; diff --git a/adapter/src/processor.rs b/adapter/src/processor.rs new file mode 100644 index 0000000..63247ca --- /dev/null +++ b/adapter/src/processor.rs @@ -0,0 +1,4 @@ +pub mod account; +pub mod auth_account; +pub mod metadata; +pub mod profile; diff --git a/adapter/src/processor/account.rs b/adapter/src/processor/account.rs new file mode 100644 index 0000000..0386667 --- /dev/null +++ b/adapter/src/processor/account.rs @@ -0,0 +1,252 @@ +use error_stack::Report; +use kernel::interfaces::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use kernel::interfaces::event::EventApplier; +use kernel::interfaces::event_store::{AccountEventStore, DependOnAccountEventStore}; +use kernel::interfaces::read_model::{AccountReadModel, DependOnAccountReadModel}; +use kernel::interfaces::signal::Signal; +use kernel::prelude::entity::{ + Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, + AuthAccountId, Nanoid, +}; +use kernel::KernelError; +use std::future::Future; + +// --- Signal DI trait (adapter-specific) --- + +pub trait DependOnAccountSignal: Send + Sync { + type AccountSignal: Signal + Send + Sync + 'static; + fn account_signal(&self) -> &Self::AccountSignal; +} + +// --- AccountCommandProcessor --- + +pub trait AccountCommandProcessor: Send + Sync + 'static { + type Executor: Executor; + + fn create( + &self, + executor: &mut Self::Executor, + name: AccountName, + private_key: AccountPrivateKey, + public_key: AccountPublicKey, + is_bot: AccountIsBot, + auth_account_id: AuthAccountId, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + account_id: AccountId, + is_bot: AccountIsBot, + current_version: kernel::prelude::entity::EventVersion, + ) -> impl Future> + Send; + + fn deactivate( + &self, + executor: &mut Self::Executor, + account_id: AccountId, + current_version: kernel::prelude::entity::EventVersion, + ) -> impl Future> + Send; +} + +impl AccountCommandProcessor for T +where + T: DependOnAccountEventStore + DependOnAccountSignal + Send + Sync + 'static, +{ + type Executor = + <::AccountEventStore as AccountEventStore>::Executor; + + async fn create( + &self, + executor: &mut Self::Executor, + name: AccountName, + private_key: AccountPrivateKey, + public_key: AccountPublicKey, + is_bot: AccountIsBot, + auth_account_id: AuthAccountId, + ) -> error_stack::Result { + let account_id = AccountId::default(); + let nanoid = Nanoid::::default(); + let command = Account::create( + account_id.clone(), + name, + private_key, + public_key, + is_bot, + nanoid, + auth_account_id, + ); + + let event_envelope = self + .account_event_store() + .persist_and_transform(executor, command) + .await?; + + let mut account = None; + Account::apply(&mut account, event_envelope)?; + let account = account.ok_or_else(|| { + Report::new(KernelError::Internal) + .attach_printable("Failed to construct account from created event") + })?; + + if let Err(e) = self.account_signal().emit(account_id).await { + tracing::warn!("Failed to emit account signal: {:?}", e); + } + + Ok(account) + } + + async fn update( + &self, + executor: &mut Self::Executor, + account_id: AccountId, + is_bot: AccountIsBot, + current_version: kernel::prelude::entity::EventVersion, + ) -> error_stack::Result<(), KernelError> { + let command = Account::update(account_id.clone(), is_bot, current_version); + + self.account_event_store() + .persist_and_transform(executor, command) + .await?; + + if let Err(e) = self.account_signal().emit(account_id).await { + tracing::warn!("Failed to emit account signal: {:?}", e); + } + + Ok(()) + } + + async fn deactivate( + &self, + executor: &mut Self::Executor, + account_id: AccountId, + current_version: kernel::prelude::entity::EventVersion, + ) -> error_stack::Result<(), KernelError> { + let command = Account::deactivate(account_id.clone(), current_version); + + self.account_event_store() + .persist_and_transform(executor, command) + .await?; + + if let Err(e) = self.account_signal().emit(account_id).await { + tracing::warn!("Failed to emit account signal: {:?}", e); + } + + Ok(()) + } +} + +pub trait DependOnAccountCommandProcessor: DependOnDatabaseConnection + Send + Sync { + type AccountCommandProcessor: AccountCommandProcessor< + Executor = <::DatabaseConnection as DatabaseConnection>::Executor, + >; + fn account_command_processor(&self) -> &Self::AccountCommandProcessor; +} + +impl DependOnAccountCommandProcessor for T +where + T: DependOnAccountEventStore + + DependOnAccountSignal + + DependOnDatabaseConnection + + Send + + Sync + + 'static, +{ + type AccountCommandProcessor = Self; + fn account_command_processor(&self) -> &Self::AccountCommandProcessor { + self + } +} + +// --- AccountQueryProcessor --- + +pub trait AccountQueryProcessor: Send + Sync + 'static { + type Executor: Executor; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &AccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_auth_id( + &self, + executor: &mut Self::Executor, + auth_id: &AuthAccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_nanoid( + &self, + executor: &mut Self::Executor, + nanoid: &Nanoid, + ) -> impl Future, KernelError>> + Send; + + fn find_by_nanoids( + &self, + executor: &mut Self::Executor, + nanoids: &[Nanoid], + ) -> impl Future, KernelError>> + Send; +} + +impl AccountQueryProcessor for T +where + T: DependOnAccountReadModel + Send + Sync + 'static, +{ + type Executor = + <::AccountReadModel as AccountReadModel>::Executor; + + async fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &AccountId, + ) -> error_stack::Result, KernelError> { + self.account_read_model().find_by_id(executor, id).await + } + + async fn find_by_auth_id( + &self, + executor: &mut Self::Executor, + auth_id: &AuthAccountId, + ) -> error_stack::Result, KernelError> { + self.account_read_model() + .find_by_auth_id(executor, auth_id) + .await + } + + async fn find_by_nanoid( + &self, + executor: &mut Self::Executor, + nanoid: &Nanoid, + ) -> error_stack::Result, KernelError> { + self.account_read_model() + .find_by_nanoid(executor, nanoid) + .await + } + + async fn find_by_nanoids( + &self, + executor: &mut Self::Executor, + nanoids: &[Nanoid], + ) -> error_stack::Result, KernelError> { + self.account_read_model() + .find_by_nanoids(executor, nanoids) + .await + } +} + +pub trait DependOnAccountQueryProcessor: DependOnDatabaseConnection + Send + Sync { + type AccountQueryProcessor: AccountQueryProcessor< + Executor = <::DatabaseConnection as DatabaseConnection>::Executor, + >; + fn account_query_processor(&self) -> &Self::AccountQueryProcessor; +} + +impl DependOnAccountQueryProcessor for T +where + T: DependOnAccountReadModel + DependOnDatabaseConnection + Send + Sync + 'static, +{ + type AccountQueryProcessor = Self; + fn account_query_processor(&self) -> &Self::AccountQueryProcessor { + self + } +} diff --git a/adapter/src/processor/auth_account.rs b/adapter/src/processor/auth_account.rs new file mode 100644 index 0000000..e773b2f --- /dev/null +++ b/adapter/src/processor/auth_account.rs @@ -0,0 +1,159 @@ +use error_stack::Report; +use kernel::interfaces::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use kernel::interfaces::event::EventApplier; +use kernel::interfaces::event_store::{AuthAccountEventStore, DependOnAuthAccountEventStore}; +use kernel::interfaces::read_model::{AuthAccountReadModel, DependOnAuthAccountReadModel}; +use kernel::interfaces::signal::Signal; +use kernel::prelude::entity::{AuthAccount, AuthAccountClientId, AuthAccountId, AuthHostId}; +use kernel::KernelError; +use std::future::Future; + +// --- Signal DI trait (adapter-specific) --- + +pub trait DependOnAuthAccountSignal: Send + Sync { + type AuthAccountSignal: Signal + Send + Sync + 'static; + fn auth_account_signal(&self) -> &Self::AuthAccountSignal; +} + +// --- AuthAccountCommandProcessor --- + +pub trait AuthAccountCommandProcessor: Send + Sync + 'static { + type Executor: Executor; + + fn create( + &self, + executor: &mut Self::Executor, + host: AuthHostId, + client_id: AuthAccountClientId, + ) -> impl Future> + Send; +} + +impl AuthAccountCommandProcessor for T +where + T: DependOnAuthAccountEventStore + + DependOnAuthAccountReadModel + + DependOnAuthAccountSignal + + Send + + Sync + + 'static, +{ + type Executor = <::AuthAccountEventStore as AuthAccountEventStore>::Executor; + + async fn create( + &self, + executor: &mut Self::Executor, + host: AuthHostId, + client_id: AuthAccountClientId, + ) -> error_stack::Result { + let auth_account_id = AuthAccountId::default(); + let command = AuthAccount::create(auth_account_id.clone(), host, client_id); + + let event_envelope = self + .auth_account_event_store() + .persist_and_transform(executor, command) + .await?; + + let mut auth_account = None; + AuthAccount::apply(&mut auth_account, event_envelope)?; + let auth_account = auth_account.ok_or_else(|| { + Report::new(KernelError::Internal) + .attach_printable("Failed to construct auth account from created event") + })?; + + self.auth_account_read_model() + .create(executor, &auth_account) + .await?; + + if let Err(e) = self.auth_account_signal().emit(auth_account_id).await { + tracing::warn!("Failed to emit auth account signal: {:?}", e); + } + + Ok(auth_account) + } +} + +pub trait DependOnAuthAccountCommandProcessor: DependOnDatabaseConnection + Send + Sync { + type AuthAccountCommandProcessor: AuthAccountCommandProcessor< + Executor = <::DatabaseConnection as DatabaseConnection>::Executor, + >; + fn auth_account_command_processor(&self) -> &Self::AuthAccountCommandProcessor; +} + +impl DependOnAuthAccountCommandProcessor for T +where + T: DependOnAuthAccountEventStore + + DependOnAuthAccountReadModel + + DependOnAuthAccountSignal + + DependOnDatabaseConnection + + Send + + Sync + + 'static, +{ + type AuthAccountCommandProcessor = Self; + fn auth_account_command_processor(&self) -> &Self::AuthAccountCommandProcessor { + self + } +} + +// --- AuthAccountQueryProcessor --- + +pub trait AuthAccountQueryProcessor: Send + Sync + 'static { + type Executor: Executor; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &AuthAccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_client_id( + &self, + executor: &mut Self::Executor, + client_id: &AuthAccountClientId, + ) -> impl Future, KernelError>> + Send; +} + +impl AuthAccountQueryProcessor for T +where + T: DependOnAuthAccountReadModel + Send + Sync + 'static, +{ + type Executor = + <::AuthAccountReadModel as AuthAccountReadModel>::Executor; + + async fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &AuthAccountId, + ) -> error_stack::Result, KernelError> { + self.auth_account_read_model() + .find_by_id(executor, id) + .await + } + + async fn find_by_client_id( + &self, + executor: &mut Self::Executor, + client_id: &AuthAccountClientId, + ) -> error_stack::Result, KernelError> { + self.auth_account_read_model() + .find_by_client_id(executor, client_id) + .await + } +} + +pub trait DependOnAuthAccountQueryProcessor: DependOnDatabaseConnection + Send + Sync { + type AuthAccountQueryProcessor: AuthAccountQueryProcessor< + Executor = <::DatabaseConnection as DatabaseConnection>::Executor, + >; + fn auth_account_query_processor(&self) -> &Self::AuthAccountQueryProcessor; +} + +impl DependOnAuthAccountQueryProcessor for T +where + T: DependOnAuthAccountReadModel + DependOnDatabaseConnection + Send + Sync + 'static, +{ + type AuthAccountQueryProcessor = Self; + fn auth_account_query_processor(&self) -> &Self::AuthAccountQueryProcessor { + self + } +} diff --git a/adapter/src/processor/metadata.rs b/adapter/src/processor/metadata.rs new file mode 100644 index 0000000..0d92437 --- /dev/null +++ b/adapter/src/processor/metadata.rs @@ -0,0 +1,226 @@ +use error_stack::Report; +use kernel::interfaces::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use kernel::interfaces::event::EventApplier; +use kernel::interfaces::event_store::{DependOnMetadataEventStore, MetadataEventStore}; +use kernel::interfaces::read_model::{DependOnMetadataReadModel, MetadataReadModel}; +use kernel::interfaces::signal::Signal; +use kernel::prelude::entity::{ + AccountId, EventVersion, Metadata, MetadataContent, MetadataId, MetadataLabel, Nanoid, +}; +use kernel::KernelError; +use std::future::Future; + +// --- Signal DI trait (adapter-specific) --- + +pub trait DependOnMetadataSignal: Send + Sync { + type MetadataSignal: Signal + Send + Sync + 'static; + fn metadata_signal(&self) -> &Self::MetadataSignal; +} + +// --- MetadataCommandProcessor --- + +pub trait MetadataCommandProcessor: Send + Sync + 'static { + type Executor: Executor; + + fn create( + &self, + executor: &mut Self::Executor, + account_id: AccountId, + label: MetadataLabel, + content: MetadataContent, + nano_id: Nanoid, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + metadata_id: MetadataId, + label: MetadataLabel, + content: MetadataContent, + current_version: EventVersion, + ) -> impl Future> + Send; + + fn delete( + &self, + executor: &mut Self::Executor, + metadata_id: MetadataId, + current_version: EventVersion, + ) -> impl Future> + Send; +} + +impl MetadataCommandProcessor for T +where + T: DependOnMetadataEventStore + DependOnMetadataSignal + Send + Sync + 'static, +{ + type Executor = + <::MetadataEventStore as MetadataEventStore>::Executor; + + async fn create( + &self, + executor: &mut Self::Executor, + account_id: AccountId, + label: MetadataLabel, + content: MetadataContent, + nano_id: Nanoid, + ) -> error_stack::Result { + let metadata_id = MetadataId::new(uuid::Uuid::now_v7()); + let command = Metadata::create(metadata_id.clone(), account_id, label, content, nano_id); + + let event_envelope = self + .metadata_event_store() + .persist_and_transform(executor, command) + .await?; + + let mut metadata = None; + Metadata::apply(&mut metadata, event_envelope)?; + let metadata = metadata.ok_or_else(|| { + Report::new(KernelError::Internal) + .attach_printable("Failed to construct metadata from created event") + })?; + + if let Err(e) = self.metadata_signal().emit(metadata_id).await { + tracing::warn!("Failed to emit metadata signal: {:?}", e); + } + + Ok(metadata) + } + + async fn update( + &self, + executor: &mut Self::Executor, + metadata_id: MetadataId, + label: MetadataLabel, + content: MetadataContent, + current_version: EventVersion, + ) -> error_stack::Result<(), KernelError> { + let command = Metadata::update(metadata_id.clone(), label, content, current_version); + + self.metadata_event_store() + .persist_and_transform(executor, command) + .await?; + + if let Err(e) = self.metadata_signal().emit(metadata_id).await { + tracing::warn!("Failed to emit metadata signal: {:?}", e); + } + + Ok(()) + } + + async fn delete( + &self, + executor: &mut Self::Executor, + metadata_id: MetadataId, + current_version: EventVersion, + ) -> error_stack::Result<(), KernelError> { + let command = Metadata::delete(metadata_id.clone(), current_version); + + self.metadata_event_store() + .persist_and_transform(executor, command) + .await?; + + if let Err(e) = self.metadata_signal().emit(metadata_id).await { + tracing::warn!("Failed to emit metadata signal: {:?}", e); + } + + Ok(()) + } +} + +pub trait DependOnMetadataCommandProcessor: DependOnDatabaseConnection + Send + Sync { + type MetadataCommandProcessor: MetadataCommandProcessor< + Executor = <::DatabaseConnection as DatabaseConnection>::Executor, + >; + fn metadata_command_processor(&self) -> &Self::MetadataCommandProcessor; +} + +impl DependOnMetadataCommandProcessor for T +where + T: DependOnMetadataEventStore + + DependOnMetadataSignal + + DependOnDatabaseConnection + + Send + + Sync + + 'static, +{ + type MetadataCommandProcessor = Self; + fn metadata_command_processor(&self) -> &Self::MetadataCommandProcessor { + self + } +} + +// --- MetadataQueryProcessor --- + +pub trait MetadataQueryProcessor: Send + Sync + 'static { + type Executor: Executor; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &MetadataId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_account_id( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_account_ids( + &self, + executor: &mut Self::Executor, + account_ids: &[AccountId], + ) -> impl Future, KernelError>> + Send; +} + +impl MetadataQueryProcessor for T +where + T: DependOnMetadataReadModel + Send + Sync + 'static, +{ + type Executor = + <::MetadataReadModel as MetadataReadModel>::Executor; + + async fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &MetadataId, + ) -> error_stack::Result, KernelError> { + self.metadata_read_model().find_by_id(executor, id).await + } + + async fn find_by_account_id( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> error_stack::Result, KernelError> { + self.metadata_read_model() + .find_by_account_id(executor, account_id) + .await + } + + async fn find_by_account_ids( + &self, + executor: &mut Self::Executor, + account_ids: &[AccountId], + ) -> error_stack::Result, KernelError> { + self.metadata_read_model() + .find_by_account_ids(executor, account_ids) + .await + } +} + +pub trait DependOnMetadataQueryProcessor: DependOnDatabaseConnection + Send + Sync { + type MetadataQueryProcessor: MetadataQueryProcessor< + Executor = <::DatabaseConnection as DatabaseConnection>::Executor, + >; + fn metadata_query_processor(&self) -> &Self::MetadataQueryProcessor; +} + +impl DependOnMetadataQueryProcessor for T +where + T: DependOnMetadataReadModel + DependOnDatabaseConnection + Send + Sync + 'static, +{ + type MetadataQueryProcessor = Self; + fn metadata_query_processor(&self) -> &Self::MetadataQueryProcessor { + self + } +} diff --git a/adapter/src/processor/profile.rs b/adapter/src/processor/profile.rs new file mode 100644 index 0000000..3200f78 --- /dev/null +++ b/adapter/src/processor/profile.rs @@ -0,0 +1,224 @@ +use error_stack::Report; +use kernel::interfaces::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use kernel::interfaces::event::EventApplier; +use kernel::interfaces::event_store::{DependOnProfileEventStore, ProfileEventStore}; +use kernel::interfaces::read_model::{DependOnProfileReadModel, ProfileReadModel}; +use kernel::interfaces::signal::Signal; +use kernel::prelude::entity::{ + AccountId, EventVersion, ImageId, Nanoid, Profile, ProfileDisplayName, ProfileId, + ProfileSummary, +}; +use kernel::KernelError; +use std::future::Future; + +// --- Signal DI trait (adapter-specific) --- + +pub trait DependOnProfileSignal: Send + Sync { + type ProfileSignal: Signal + Send + Sync + 'static; + fn profile_signal(&self) -> &Self::ProfileSignal; +} + +// --- ProfileCommandProcessor --- + +pub trait ProfileCommandProcessor: Send + Sync + 'static { + type Executor: Executor; + + fn create( + &self, + executor: &mut Self::Executor, + account_id: AccountId, + display_name: Option, + summary: Option, + icon: Option, + banner: Option, + nano_id: Nanoid, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + profile_id: ProfileId, + display_name: Option, + summary: Option, + icon: Option, + banner: Option, + current_version: EventVersion, + ) -> impl Future> + Send; +} + +impl ProfileCommandProcessor for T +where + T: DependOnProfileEventStore + DependOnProfileSignal + Send + Sync + 'static, +{ + type Executor = + <::ProfileEventStore as ProfileEventStore>::Executor; + + async fn create( + &self, + executor: &mut Self::Executor, + account_id: AccountId, + display_name: Option, + summary: Option, + icon: Option, + banner: Option, + nano_id: Nanoid, + ) -> error_stack::Result { + let profile_id = ProfileId::new(uuid::Uuid::now_v7()); + let command = Profile::create( + profile_id.clone(), + account_id, + display_name, + summary, + icon, + banner, + nano_id, + ); + + let event_envelope = self + .profile_event_store() + .persist_and_transform(executor, command) + .await?; + + let mut profile = None; + Profile::apply(&mut profile, event_envelope)?; + let profile = profile.ok_or_else(|| { + Report::new(KernelError::Internal) + .attach_printable("Failed to construct profile from created event") + })?; + + if let Err(e) = self.profile_signal().emit(profile_id).await { + tracing::warn!("Failed to emit profile signal: {:?}", e); + } + + Ok(profile) + } + + async fn update( + &self, + executor: &mut Self::Executor, + profile_id: ProfileId, + display_name: Option, + summary: Option, + icon: Option, + banner: Option, + current_version: EventVersion, + ) -> error_stack::Result<(), KernelError> { + let command = Profile::update( + profile_id.clone(), + display_name, + summary, + icon, + banner, + current_version, + ); + + self.profile_event_store() + .persist_and_transform(executor, command) + .await?; + + if let Err(e) = self.profile_signal().emit(profile_id).await { + tracing::warn!("Failed to emit profile signal: {:?}", e); + } + + Ok(()) + } +} + +pub trait DependOnProfileCommandProcessor: DependOnDatabaseConnection + Send + Sync { + type ProfileCommandProcessor: ProfileCommandProcessor< + Executor = <::DatabaseConnection as DatabaseConnection>::Executor, + >; + fn profile_command_processor(&self) -> &Self::ProfileCommandProcessor; +} + +impl DependOnProfileCommandProcessor for T +where + T: DependOnProfileEventStore + + DependOnProfileSignal + + DependOnDatabaseConnection + + Send + + Sync + + 'static, +{ + type ProfileCommandProcessor = Self; + fn profile_command_processor(&self) -> &Self::ProfileCommandProcessor { + self + } +} + +// --- ProfileQueryProcessor --- + +pub trait ProfileQueryProcessor: Send + Sync + 'static { + type Executor: Executor; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &ProfileId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_account_id( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_account_ids( + &self, + executor: &mut Self::Executor, + account_ids: &[AccountId], + ) -> impl Future, KernelError>> + Send; +} + +impl ProfileQueryProcessor for T +where + T: DependOnProfileReadModel + Send + Sync + 'static, +{ + type Executor = + <::ProfileReadModel as ProfileReadModel>::Executor; + + async fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &ProfileId, + ) -> error_stack::Result, KernelError> { + self.profile_read_model().find_by_id(executor, id).await + } + + async fn find_by_account_id( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> error_stack::Result, KernelError> { + self.profile_read_model() + .find_by_account_id(executor, account_id) + .await + } + + async fn find_by_account_ids( + &self, + executor: &mut Self::Executor, + account_ids: &[AccountId], + ) -> error_stack::Result, KernelError> { + self.profile_read_model() + .find_by_account_ids(executor, account_ids) + .await + } +} + +pub trait DependOnProfileQueryProcessor: DependOnDatabaseConnection + Send + Sync { + type ProfileQueryProcessor: ProfileQueryProcessor< + Executor = <::DatabaseConnection as DatabaseConnection>::Executor, + >; + fn profile_query_processor(&self) -> &Self::ProfileQueryProcessor; +} + +impl DependOnProfileQueryProcessor for T +where + T: DependOnProfileReadModel + DependOnDatabaseConnection + Send + Sync + 'static, +{ + type ProfileQueryProcessor = Self; + fn profile_query_processor(&self) -> &Self::ProfileQueryProcessor { + self + } +} diff --git a/application/Cargo.toml b/application/Cargo.toml index 5f48561..858dc6b 100644 --- a/application/Cargo.toml +++ b/application/Cargo.toml @@ -7,7 +7,21 @@ authors.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -async-trait = "0.1" +time = { workspace = true } +vodca = { workspace = true } +serde = { workspace = true } + error-stack = { workspace = true } +serde_json = "1" + +adapter = { path = "../adapter" } kernel = { path = "../kernel" } + +uuid = { workspace = true } + +[dev-dependencies] +tokio = { workspace = true, features = ["macros", "test-util"] } +tempfile = "3" + +driver.path = "../driver" \ No newline at end of file diff --git a/application/src/lib.rs b/application/src/lib.rs new file mode 100644 index 0000000..6936a4d --- /dev/null +++ b/application/src/lib.rs @@ -0,0 +1,3 @@ +pub mod permission; +pub mod service; +pub mod transfer; diff --git a/application/src/main.rs b/application/src/main.rs deleted file mode 100644 index e7a11a9..0000000 --- a/application/src/main.rs +++ /dev/null @@ -1,3 +0,0 @@ -fn main() { - println!("Hello, world!"); -} diff --git a/application/src/permission.rs b/application/src/permission.rs new file mode 100644 index 0000000..5fd9b10 --- /dev/null +++ b/application/src/permission.rs @@ -0,0 +1,58 @@ +use error_stack::Report; +use kernel::interfaces::permission::{ + DependOnPermissionChecker, Permission, PermissionChecker, PermissionReq, Relation, Resource, +}; +use kernel::prelude::entity::{AccountId, AuthAccountId}; +use kernel::KernelError; + +pub fn account_view(account_id: &AccountId) -> Permission { + Permission::new(PermissionReq::new( + Resource::Account(account_id.clone()), + [Relation::Owner, Relation::Editor, Relation::Signer], + )) +} + +pub fn account_edit(account_id: &AccountId) -> Permission { + Permission::new(PermissionReq::new( + Resource::Account(account_id.clone()), + [Relation::Owner, Relation::Editor], + )) +} + +pub fn account_deactivate(account_id: &AccountId) -> Permission { + Permission::new(PermissionReq::new( + Resource::Account(account_id.clone()), + [Relation::Owner], + )) +} + +pub fn account_sign(account_id: &AccountId) -> Permission { + Permission::new(PermissionReq::new( + Resource::Account(account_id.clone()), + [Relation::Owner, Relation::Signer], + )) +} + +pub fn instance_moderate() -> Permission { + Permission::new(PermissionReq::new( + Resource::Instance, + [Relation::Admin, Relation::Moderator], + )) +} + +pub async fn check_permission( + deps: &T, + subject: &AuthAccountId, + permission: &Permission, +) -> error_stack::Result<(), KernelError> { + if !deps + .permission_checker() + .satisfies(subject, permission) + .await? + { + return Err( + Report::new(KernelError::PermissionDenied).attach_printable("Insufficient permissions") + ); + } + Ok(()) +} diff --git a/application/src/service.rs b/application/src/service.rs new file mode 100644 index 0000000..63247ca --- /dev/null +++ b/application/src/service.rs @@ -0,0 +1,4 @@ +pub mod account; +pub mod auth_account; +pub mod metadata; +pub mod profile; diff --git a/application/src/service/account.rs b/application/src/service/account.rs new file mode 100644 index 0000000..fb3e6db --- /dev/null +++ b/application/src/service/account.rs @@ -0,0 +1,268 @@ +use crate::permission::{account_deactivate, account_edit, account_view, check_permission}; +use crate::transfer::account::AccountDto; +use crate::transfer::pagination::{apply_pagination, Pagination}; +use adapter::crypto::{DependOnSigningKeyGenerator, SigningKeyGenerator}; +use adapter::processor::account::{ + AccountCommandProcessor, AccountQueryProcessor, DependOnAccountCommandProcessor, + DependOnAccountQueryProcessor, +}; +use error_stack::Report; +use kernel::interfaces::crypto::{DependOnPasswordProvider, PasswordProvider}; +use kernel::interfaces::database::DatabaseConnection; +use kernel::interfaces::permission::{ + DependOnPermissionChecker, DependOnPermissionWriter, PermissionWriter, Relation, Resource, +}; +use kernel::prelude::entity::{ + Account, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, AuthAccountId, Nanoid, +}; +use kernel::KernelError; +use serde_json; +use std::future::Future; + +pub trait GetAccountUseCase: + 'static + Sync + Send + DependOnAccountQueryProcessor + DependOnPermissionChecker +{ + fn get_all_accounts( + &self, + auth_account_id: &AuthAccountId, + Pagination { + direction, + cursor, + limit, + }: Pagination, + ) -> impl Future>, KernelError>> + Send + { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + let accounts = self + .account_query_processor() + .find_by_auth_id(&mut transaction, auth_account_id) + .await?; + let cursor = if let Some(cursor) = cursor { + let id: Nanoid = Nanoid::new(cursor); + self.account_query_processor() + .find_by_nanoid(&mut transaction, &id) + .await? + } else { + None + }; + let accounts = apply_pagination(accounts, limit, cursor, direction); + Ok(Some(accounts.into_iter().map(AccountDto::from).collect())) + } + } + + fn get_accounts_by_ids( + &self, + auth_account_id: &AuthAccountId, + ids: Vec, + ) -> impl Future, KernelError>> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoids: Vec> = + ids.into_iter().map(Nanoid::::new).collect(); + let accounts = self + .account_query_processor() + .find_by_nanoids(&mut transaction, &nanoids) + .await?; + + let mut result = Vec::new(); + for account in accounts { + if check_permission(self, auth_account_id, &account_view(account.id())) + .await + .is_ok() + { + result.push(AccountDto::from(account)); + } + } + + Ok(result) + } + } +} + +impl GetAccountUseCase for T where + T: 'static + DependOnAccountQueryProcessor + DependOnPermissionChecker +{ +} + +pub trait CreateAccountUseCase: + 'static + + Sync + + Send + + DependOnAccountCommandProcessor + + DependOnPasswordProvider + + DependOnSigningKeyGenerator + + DependOnPermissionWriter +{ + fn create_account( + &self, + auth_account_id: AuthAccountId, + name: String, + is_bot: bool, + ) -> impl Future> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + // Generate key pair + let master_password = self.password_provider().get_password()?; + let key_pair = self.signing_key_generator().generate(&master_password)?; + + let encrypted_private_key_json = serde_json::to_string(&key_pair.encrypted_private_key) + .map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Failed to serialize encrypted private key: {e}")) + })?; + + let private_key = AccountPrivateKey::new(encrypted_private_key_json); + let public_key = AccountPublicKey::new(key_pair.public_key_pem); + let account_name = AccountName::new(name); + let account_is_bot = AccountIsBot::new(is_bot); + + let account = self + .account_command_processor() + .create( + &mut transaction, + account_name, + private_key, + public_key, + account_is_bot, + auth_account_id.clone(), + ) + .await?; + + self.permission_writer() + .create_relation( + &Resource::Account(account.id().clone()), + Relation::Owner, + &auth_account_id, + ) + .await?; + + Ok(AccountDto::from(account)) + } + } +} + +impl CreateAccountUseCase for T where + T: 'static + + DependOnAccountCommandProcessor + + DependOnPasswordProvider + + DependOnSigningKeyGenerator + + DependOnPermissionWriter +{ +} + +pub trait EditAccountUseCase: + 'static + + Sync + + Send + + DependOnAccountCommandProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ + fn edit_account( + &self, + auth_account_id: &AuthAccountId, + account_id: String, + is_bot: bool, + ) -> impl Future> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoid = Nanoid::::new(account_id); + let account = self + .account_query_processor() + .find_by_nanoid(&mut transaction, &nanoid) + .await? + .ok_or_else(|| { + Report::new(KernelError::NotFound).attach_printable(format!( + "Account not found with nanoid: {}", + nanoid.as_ref() + )) + })?; + + check_permission(self, auth_account_id, &account_edit(account.id())).await?; + + let account_id = account.id().clone(); + let current_version = account.version().clone(); + self.account_command_processor() + .update( + &mut transaction, + account_id, + AccountIsBot::new(is_bot), + current_version, + ) + .await?; + + Ok(()) + } + } +} + +impl EditAccountUseCase for T where + T: 'static + + DependOnAccountCommandProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ +} + +pub trait DeactivateAccountUseCase: + 'static + + Sync + + Send + + DependOnAccountCommandProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker + + DependOnPermissionWriter +{ + fn deactivate_account( + &self, + auth_account_id: &AuthAccountId, + account_id: String, + ) -> impl Future> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoid = Nanoid::::new(account_id); + let account = self + .account_query_processor() + .find_by_nanoid(&mut transaction, &nanoid) + .await? + .ok_or_else(|| { + Report::new(KernelError::NotFound).attach_printable(format!( + "Account not found with nanoid: {}", + nanoid.as_ref() + )) + })?; + + check_permission(self, auth_account_id, &account_deactivate(account.id())).await?; + + let account_id = account.id().clone(); + let current_version = account.version().clone(); + self.account_command_processor() + .deactivate(&mut transaction, account_id.clone(), current_version) + .await?; + + self.permission_writer() + .delete_relation( + &Resource::Account(account_id), + Relation::Owner, + auth_account_id, + ) + .await?; + + Ok(()) + } + } +} + +impl DeactivateAccountUseCase for T where + T: 'static + + DependOnAccountCommandProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker + + DependOnPermissionWriter +{ +} diff --git a/application/src/service/auth_account.rs b/application/src/service/auth_account.rs new file mode 100644 index 0000000..111c58a --- /dev/null +++ b/application/src/service/auth_account.rs @@ -0,0 +1,72 @@ +use kernel::interfaces::database::{DatabaseConnection, DependOnDatabaseConnection}; +use kernel::interfaces::event::EventApplier; +use kernel::interfaces::event_store::{AuthAccountEventStore, DependOnAuthAccountEventStore}; +use kernel::interfaces::read_model::{AuthAccountReadModel, DependOnAuthAccountReadModel}; +use kernel::prelude::entity::{AuthAccount, AuthAccountId, EventId}; +use kernel::KernelError; +use std::future::Future; + +pub trait UpdateAuthAccount: + 'static + DependOnDatabaseConnection + DependOnAuthAccountReadModel + DependOnAuthAccountEventStore +{ + fn update_auth_account( + &self, + auth_account_id: AuthAccountId, + ) -> impl Future> { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + let existing = self + .auth_account_read_model() + .find_by_id(&mut transaction, &auth_account_id) + .await?; + let event_id = EventId::from(auth_account_id.clone()); + + if let Some(auth_account) = existing { + let events = self + .auth_account_event_store() + .find_by_id(&mut transaction, &event_id, Some(auth_account.version())) + .await?; + if events + .last() + .map(|event| &event.version != auth_account.version()) + .unwrap_or(false) + { + let mut auth_account = Some(auth_account); + for event in events { + AuthAccount::apply(&mut auth_account, event)?; + } + if let Some(auth_account) = auth_account { + self.auth_account_read_model() + .update(&mut transaction, &auth_account) + .await?; + } + } + } else { + let events = self + .auth_account_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await?; + if !events.is_empty() { + let mut auth_account = None; + for event in events { + AuthAccount::apply(&mut auth_account, event)?; + } + if let Some(auth_account) = auth_account { + self.auth_account_read_model() + .create(&mut transaction, &auth_account) + .await?; + } + } + } + Ok(()) + } + } +} + +impl UpdateAuthAccount for T where + T: 'static + + DependOnDatabaseConnection + + DependOnAuthAccountReadModel + + DependOnAuthAccountEventStore +{ +} diff --git a/application/src/service/metadata.rs b/application/src/service/metadata.rs new file mode 100644 index 0000000..0412e8e --- /dev/null +++ b/application/src/service/metadata.rs @@ -0,0 +1,365 @@ +use crate::permission::{account_edit, account_view, check_permission}; +use crate::transfer::metadata::MetadataDto; +use adapter::processor::account::{AccountQueryProcessor, DependOnAccountQueryProcessor}; +use adapter::processor::metadata::{ + DependOnMetadataCommandProcessor, DependOnMetadataQueryProcessor, MetadataCommandProcessor, + MetadataQueryProcessor, +}; +use error_stack::Report; +use kernel::interfaces::database::{DatabaseConnection, DependOnDatabaseConnection}; +use kernel::interfaces::event::EventApplier; +use kernel::interfaces::event_store::{DependOnMetadataEventStore, MetadataEventStore}; +use kernel::interfaces::permission::DependOnPermissionChecker; +use kernel::interfaces::read_model::{DependOnMetadataReadModel, MetadataReadModel}; +use kernel::prelude::entity::{ + Account, AuthAccountId, EventId, Metadata, MetadataContent, MetadataId, MetadataLabel, Nanoid, +}; +use kernel::KernelError; +use std::future::Future; + +pub trait UpdateMetadata: + 'static + DependOnDatabaseConnection + DependOnMetadataReadModel + DependOnMetadataEventStore +{ + fn update_metadata( + &self, + metadata_id: MetadataId, + ) -> impl Future> { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + let existing = self + .metadata_read_model() + .find_by_id(&mut transaction, &metadata_id) + .await?; + let event_id = EventId::from(metadata_id.clone()); + + if let Some(metadata) = existing { + let events = self + .metadata_event_store() + .find_by_id(&mut transaction, &event_id, Some(metadata.version())) + .await?; + if events + .last() + .map(|event| &event.version != metadata.version()) + .unwrap_or(false) + { + let mut metadata = Some(metadata); + for event in events { + Metadata::apply(&mut metadata, event)?; + } + if let Some(metadata) = metadata { + self.metadata_read_model() + .update(&mut transaction, &metadata) + .await?; + } else { + self.metadata_read_model() + .delete(&mut transaction, &metadata_id) + .await?; + } + } + } else { + let events = self + .metadata_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await?; + if !events.is_empty() { + let mut metadata = None; + for event in events { + Metadata::apply(&mut metadata, event)?; + } + if let Some(metadata) = metadata { + self.metadata_read_model() + .create(&mut transaction, &metadata) + .await?; + } + } + } + Ok(()) + } + } +} + +impl UpdateMetadata for T where + T: 'static + + DependOnDatabaseConnection + + DependOnMetadataReadModel + + DependOnMetadataEventStore +{ +} + +pub trait GetMetadataUseCase: + 'static + + Sync + + Send + + DependOnMetadataQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ + fn get_metadata_batch( + &self, + auth_account_id: &AuthAccountId, + account_nanoids: Vec, + ) -> impl Future, KernelError>> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoids: Vec> = account_nanoids + .into_iter() + .map(Nanoid::::new) + .collect(); + let accounts = self + .account_query_processor() + .find_by_nanoids(&mut transaction, &nanoids) + .await?; + + let mut permitted_accounts = Vec::new(); + for account in accounts { + if check_permission(self, auth_account_id, &account_view(account.id())) + .await + .is_ok() + { + permitted_accounts.push(account); + } + } + + if permitted_accounts.is_empty() { + return Ok(Vec::new()); + } + + let account_ids: Vec<_> = permitted_accounts.iter().map(|a| a.id().clone()).collect(); + let nanoid_map: std::collections::HashMap<_, _> = permitted_accounts + .iter() + .map(|a| (a.id().clone(), a.nanoid().as_ref().to_string())) + .collect(); + + let metadata_list = self + .metadata_query_processor() + .find_by_account_ids(&mut transaction, &account_ids) + .await?; + + Ok(metadata_list + .into_iter() + .filter_map(|metadata| { + let account_nanoid = nanoid_map.get(metadata.account_id())?.clone(); + Some(MetadataDto::new(metadata, account_nanoid)) + }) + .collect()) + } + } +} + +impl GetMetadataUseCase for T where + T: 'static + + Sync + + Send + + DependOnMetadataQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ +} + +pub trait CreateMetadataUseCase: + 'static + + Sync + + Send + + DependOnMetadataCommandProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ + fn create_metadata( + &self, + auth_account_id: &AuthAccountId, + account_nanoid: String, + label: String, + content: String, + ) -> impl Future> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoid = kernel::prelude::entity::Nanoid::::new(account_nanoid); + let account = self + .account_query_processor() + .find_by_nanoid(&mut transaction, &nanoid) + .await? + .ok_or_else(|| { + Report::new(KernelError::NotFound).attach_printable(format!( + "Account not found with nanoid: {}", + nanoid.as_ref() + )) + })?; + + check_permission(self, auth_account_id, &account_edit(account.id())).await?; + + let account_nanoid_str = account.nanoid().as_ref().to_string(); + let account_id = account.id().clone(); + let metadata_nanoid = Nanoid::::default(); + let metadata = self + .metadata_command_processor() + .create( + &mut transaction, + account_id, + MetadataLabel::new(label), + MetadataContent::new(content), + metadata_nanoid, + ) + .await?; + + Ok(MetadataDto::new(metadata, account_nanoid_str)) + } + } +} + +impl CreateMetadataUseCase for T where + T: 'static + + Sync + + Send + + DependOnMetadataCommandProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ +} + +pub trait EditMetadataUseCase: + 'static + + Sync + + Send + + DependOnMetadataCommandProcessor + + DependOnMetadataQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ + fn edit_metadata( + &self, + auth_account_id: &AuthAccountId, + account_nanoid: String, + metadata_nanoid: String, + label: String, + content: String, + ) -> impl Future> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoid = kernel::prelude::entity::Nanoid::::new(account_nanoid); + let account = self + .account_query_processor() + .find_by_nanoid(&mut transaction, &nanoid) + .await? + .ok_or_else(|| { + Report::new(KernelError::NotFound).attach_printable(format!( + "Account not found with nanoid: {}", + nanoid.as_ref() + )) + })?; + + check_permission(self, auth_account_id, &account_edit(account.id())).await?; + + let metadata_list = self + .metadata_query_processor() + .find_by_account_id(&mut transaction, account.id()) + .await?; + + let metadata = metadata_list + .into_iter() + .find(|m| m.nanoid().as_ref() == &metadata_nanoid) + .ok_or_else(|| { + Report::new(KernelError::NotFound).attach_printable(format!( + "Metadata not found with nanoid: {}", + metadata_nanoid + )) + })?; + + let metadata_id = metadata.id().clone(); + let current_version = metadata.version().clone(); + self.metadata_command_processor() + .update( + &mut transaction, + metadata_id, + MetadataLabel::new(label), + MetadataContent::new(content), + current_version, + ) + .await?; + + Ok(()) + } + } +} + +impl EditMetadataUseCase for T where + T: 'static + + Sync + + Send + + DependOnMetadataCommandProcessor + + DependOnMetadataQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ +} + +pub trait DeleteMetadataUseCase: + 'static + + Sync + + Send + + DependOnMetadataCommandProcessor + + DependOnMetadataQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ + fn delete_metadata( + &self, + auth_account_id: &AuthAccountId, + account_nanoid: String, + metadata_nanoid: String, + ) -> impl Future> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoid = kernel::prelude::entity::Nanoid::::new(account_nanoid); + let account = self + .account_query_processor() + .find_by_nanoid(&mut transaction, &nanoid) + .await? + .ok_or_else(|| { + Report::new(KernelError::NotFound).attach_printable(format!( + "Account not found with nanoid: {}", + nanoid.as_ref() + )) + })?; + + check_permission(self, auth_account_id, &account_edit(account.id())).await?; + + let metadata_list = self + .metadata_query_processor() + .find_by_account_id(&mut transaction, account.id()) + .await?; + + let metadata = metadata_list + .into_iter() + .find(|m| m.nanoid().as_ref() == &metadata_nanoid) + .ok_or_else(|| { + Report::new(KernelError::NotFound).attach_printable(format!( + "Metadata not found with nanoid: {}", + metadata_nanoid + )) + })?; + + let metadata_id = metadata.id().clone(); + let current_version = metadata.version().clone(); + self.metadata_command_processor() + .delete(&mut transaction, metadata_id, current_version) + .await?; + + Ok(()) + } + } +} + +impl DeleteMetadataUseCase for T where + T: 'static + + Sync + + Send + + DependOnMetadataCommandProcessor + + DependOnMetadataQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ +} diff --git a/application/src/service/profile.rs b/application/src/service/profile.rs new file mode 100644 index 0000000..6e1acdb --- /dev/null +++ b/application/src/service/profile.rs @@ -0,0 +1,302 @@ +use crate::permission::{account_edit, account_view, check_permission}; +use crate::transfer::profile::ProfileDto; +use adapter::processor::account::{AccountQueryProcessor, DependOnAccountQueryProcessor}; +use adapter::processor::profile::{ + DependOnProfileCommandProcessor, DependOnProfileQueryProcessor, ProfileCommandProcessor, + ProfileQueryProcessor, +}; +use error_stack::Report; +use kernel::interfaces::database::{DatabaseConnection, DependOnDatabaseConnection}; +use kernel::interfaces::event::EventApplier; +use kernel::interfaces::event_store::{DependOnProfileEventStore, ProfileEventStore}; +use kernel::interfaces::permission::DependOnPermissionChecker; +use kernel::interfaces::read_model::{DependOnProfileReadModel, ProfileReadModel}; +use kernel::prelude::entity::{ + Account, AuthAccountId, EventId, ImageId, Nanoid, Profile, ProfileDisplayName, ProfileId, + ProfileSummary, +}; +use kernel::KernelError; +use std::future::Future; + +pub trait UpdateProfile: + 'static + DependOnDatabaseConnection + DependOnProfileReadModel + DependOnProfileEventStore +{ + fn update_profile( + &self, + profile_id: ProfileId, + ) -> impl Future> { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + let existing = self + .profile_read_model() + .find_by_id(&mut transaction, &profile_id) + .await?; + let event_id = EventId::from(profile_id.clone()); + + if let Some(profile) = existing { + let events = self + .profile_event_store() + .find_by_id(&mut transaction, &event_id, Some(profile.version())) + .await?; + if events + .last() + .map(|event| &event.version != profile.version()) + .unwrap_or(false) + { + let mut profile = Some(profile); + for event in events { + Profile::apply(&mut profile, event)?; + } + if let Some(profile) = profile { + self.profile_read_model() + .update(&mut transaction, &profile) + .await?; + } + } + } else { + let events = self + .profile_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await?; + if !events.is_empty() { + let mut profile = None; + for event in events { + Profile::apply(&mut profile, event)?; + } + if let Some(profile) = profile { + self.profile_read_model() + .create(&mut transaction, &profile) + .await?; + } + } + } + Ok(()) + } + } +} + +impl UpdateProfile for T where + T: 'static + DependOnDatabaseConnection + DependOnProfileReadModel + DependOnProfileEventStore +{ +} + +pub trait GetProfileUseCase: + 'static + + Sync + + Send + + DependOnProfileQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ + fn get_profiles_batch( + &self, + auth_account_id: &AuthAccountId, + account_nanoids: Vec, + ) -> impl Future, KernelError>> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoids: Vec> = account_nanoids + .into_iter() + .map(Nanoid::::new) + .collect(); + let accounts = self + .account_query_processor() + .find_by_nanoids(&mut transaction, &nanoids) + .await?; + + let mut permitted_accounts = Vec::new(); + for account in accounts { + if check_permission(self, auth_account_id, &account_view(account.id())) + .await + .is_ok() + { + permitted_accounts.push(account); + } + } + + if permitted_accounts.is_empty() { + return Ok(Vec::new()); + } + + let account_ids: Vec<_> = permitted_accounts.iter().map(|a| a.id().clone()).collect(); + let nanoid_map: std::collections::HashMap<_, _> = permitted_accounts + .iter() + .map(|a| (a.id().clone(), a.nanoid().as_ref().to_string())) + .collect(); + + let profiles = self + .profile_query_processor() + .find_by_account_ids(&mut transaction, &account_ids) + .await?; + + Ok(profiles + .into_iter() + .filter_map(|profile| { + let account_nanoid = nanoid_map.get(profile.account_id())?.clone(); + Some(ProfileDto::new(profile, account_nanoid)) + }) + .collect()) + } + } +} + +impl GetProfileUseCase for T where + T: 'static + + Sync + + Send + + DependOnProfileQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ +} + +pub trait CreateProfileUseCase: + 'static + + Sync + + Send + + DependOnProfileCommandProcessor + + DependOnProfileQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ + fn create_profile( + &self, + auth_account_id: &AuthAccountId, + account_nanoid: String, + display_name: Option, + summary: Option, + icon: Option, + banner: Option, + ) -> impl Future> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoid = kernel::prelude::entity::Nanoid::::new(account_nanoid); + let account = self + .account_query_processor() + .find_by_nanoid(&mut transaction, &nanoid) + .await? + .ok_or_else(|| { + Report::new(KernelError::NotFound).attach_printable(format!( + "Account not found with nanoid: {}", + nanoid.as_ref() + )) + })?; + + check_permission(self, auth_account_id, &account_edit(account.id())).await?; + + let existing_profile = self + .profile_query_processor() + .find_by_account_id(&mut transaction, account.id()) + .await?; + if existing_profile.is_some() { + return Err(Report::new(KernelError::Concurrency) + .attach_printable("Profile already exists for this account")); + } + + let account_nanoid_str = account.nanoid().as_ref().to_string(); + let account_id = account.id().clone(); + let profile_nanoid = Nanoid::::default(); + let profile = self + .profile_command_processor() + .create( + &mut transaction, + account_id, + display_name.map(ProfileDisplayName::new), + summary.map(ProfileSummary::new), + icon, + banner, + profile_nanoid, + ) + .await?; + + Ok(ProfileDto::new(profile, account_nanoid_str)) + } + } +} + +impl CreateProfileUseCase for T where + T: 'static + + Sync + + Send + + DependOnProfileCommandProcessor + + DependOnProfileQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ +} + +pub trait EditProfileUseCase: + 'static + + Sync + + Send + + DependOnProfileCommandProcessor + + DependOnProfileQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ + fn edit_profile( + &self, + auth_account_id: &AuthAccountId, + account_nanoid: String, + display_name: Option, + summary: Option, + icon: Option, + banner: Option, + ) -> impl Future> + Send { + async move { + let mut transaction = self.database_connection().begin_transaction().await?; + + let nanoid = kernel::prelude::entity::Nanoid::::new(account_nanoid); + let account = self + .account_query_processor() + .find_by_nanoid(&mut transaction, &nanoid) + .await? + .ok_or_else(|| { + Report::new(KernelError::NotFound).attach_printable(format!( + "Account not found with nanoid: {}", + nanoid.as_ref() + )) + })?; + + check_permission(self, auth_account_id, &account_edit(account.id())).await?; + + let profile = self + .profile_query_processor() + .find_by_account_id(&mut transaction, account.id()) + .await? + .ok_or_else(|| { + Report::new(KernelError::NotFound) + .attach_printable("Profile not found for this account") + })?; + + let profile_id = profile.id().clone(); + let current_version = profile.version().clone(); + self.profile_command_processor() + .update( + &mut transaction, + profile_id, + display_name.map(ProfileDisplayName::new), + summary.map(ProfileSummary::new), + icon, + banner, + current_version, + ) + .await?; + + Ok(()) + } + } +} + +impl EditProfileUseCase for T where + T: 'static + + Sync + + Send + + DependOnProfileCommandProcessor + + DependOnProfileQueryProcessor + + DependOnAccountQueryProcessor + + DependOnPermissionChecker +{ +} diff --git a/application/src/transfer.rs b/application/src/transfer.rs new file mode 100644 index 0000000..bf0d70b --- /dev/null +++ b/application/src/transfer.rs @@ -0,0 +1,4 @@ +pub mod account; +pub mod metadata; +pub mod pagination; +pub mod profile; diff --git a/application/src/transfer/account.rs b/application/src/transfer/account.rs new file mode 100644 index 0000000..008bfef --- /dev/null +++ b/application/src/transfer/account.rs @@ -0,0 +1,23 @@ +use kernel::prelude::entity::Account; +use time::OffsetDateTime; + +#[derive(Debug)] +pub struct AccountDto { + pub nanoid: String, + pub name: String, + pub public_key: String, + pub is_bot: bool, + pub created_at: OffsetDateTime, +} + +impl From for AccountDto { + fn from(account: Account) -> Self { + Self { + nanoid: account.nanoid().as_ref().to_string(), + name: account.name().as_ref().to_string(), + public_key: account.public_key().as_ref().to_string(), + is_bot: *account.is_bot().as_ref(), + created_at: *account.created_at().as_ref(), + } + } +} diff --git a/application/src/transfer/metadata.rs b/application/src/transfer/metadata.rs new file mode 100644 index 0000000..b8f4fcd --- /dev/null +++ b/application/src/transfer/metadata.rs @@ -0,0 +1,56 @@ +use kernel::prelude::entity::Metadata; + +#[derive(Debug)] +pub struct MetadataDto { + pub account_nanoid: String, + pub nanoid: String, + pub label: String, + pub content: String, +} + +impl MetadataDto { + pub fn new(metadata: Metadata, account_nanoid: String) -> Self { + Self { + account_nanoid, + nanoid: metadata.nanoid().as_ref().to_string(), + label: metadata.label().as_ref().to_string(), + content: metadata.content().as_ref().to_string(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use kernel::prelude::entity::{ + AccountId, EventVersion, Metadata, MetadataContent, MetadataId, MetadataLabel, Nanoid, + }; + use uuid::Uuid; + + #[test] + fn test_metadata_dto_new() { + let metadata_id = MetadataId::new(Uuid::now_v7()); + let account_id = AccountId::new(Uuid::now_v7()); + let label = MetadataLabel::new("test label".to_string()); + let content = MetadataContent::new("test content".to_string()); + let nanoid = Nanoid::default(); + let version = EventVersion::new(Uuid::now_v7()); + let account_nanoid = "acc-nanoid-789".to_string(); + + let metadata = Metadata::new( + metadata_id, + account_id, + label.clone(), + content.clone(), + version, + nanoid.clone(), + ); + + let dto = MetadataDto::new(metadata, account_nanoid.clone()); + + assert_eq!(dto.account_nanoid, account_nanoid); + assert_eq!(dto.nanoid, nanoid.as_ref().to_string()); + assert_eq!(dto.label, label.as_ref().to_string()); + assert_eq!(dto.content, content.as_ref().to_string()); + } +} diff --git a/application/src/transfer/pagination.rs b/application/src/transfer/pagination.rs new file mode 100644 index 0000000..b914664 --- /dev/null +++ b/application/src/transfer/pagination.rs @@ -0,0 +1,81 @@ +use vodca::Nameln; + +#[derive(Debug, Nameln)] +#[vodca(snake_case)] +pub enum Direction { + NEXT, + PREV, +} + +impl Default for Direction { + fn default() -> Self { + Self::NEXT + } +} + +impl TryFrom for Direction { + type Error = String; + + fn try_from(value: String) -> Result { + match value.as_str() { + "next" => Ok(Self::NEXT), + "prev" => Ok(Self::PREV), + other => Err(format!("Invalid direction: {}", other)), + } + } +} + +#[derive(Debug)] +pub struct Pagination { + pub limit: u32, + pub cursor: Option, + pub direction: Direction, +} + +impl Pagination { + pub fn new(limit: Option, cursor: Option, direction: Direction) -> Self { + Self { + limit: limit.unwrap_or(5), + cursor, + direction, + } + } +} + +pub(crate) fn apply_pagination( + vec: Vec, + limit: u32, + cursor_data: Option, + direction: Direction, +) -> Vec { + let mut vec = vec; + match direction { + Direction::NEXT => { + vec.sort(); + vec = vec + .into_iter() + .filter(|x| { + cursor_data + .as_ref() + .map(|cursor_data| x > cursor_data) + .unwrap_or(true) + }) + .take(limit as usize) + .collect(); + } + Direction::PREV => { + vec.sort_by(|a, b| b.cmp(a)); + vec = vec + .into_iter() + .filter(|x| { + cursor_data + .as_ref() + .map(|cursor_data| x < cursor_data) + .unwrap_or(true) + }) + .take(limit as usize) + .collect(); + } + }; + vec +} diff --git a/application/src/transfer/profile.rs b/application/src/transfer/profile.rs new file mode 100644 index 0000000..4073101 --- /dev/null +++ b/application/src/transfer/profile.rs @@ -0,0 +1,100 @@ +use kernel::prelude::entity::Profile; +use uuid::Uuid; + +#[derive(Debug)] +pub struct ProfileDto { + pub account_nanoid: String, + pub nanoid: String, + pub display_name: Option, + pub summary: Option, + pub icon_id: Option, + pub banner_id: Option, +} + +impl ProfileDto { + pub fn new(profile: Profile, account_nanoid: String) -> Self { + Self { + account_nanoid, + nanoid: profile.nanoid().as_ref().to_string(), + display_name: profile + .display_name() + .as_ref() + .map(|d| d.as_ref().to_string()), + summary: profile.summary().as_ref().map(|s| s.as_ref().to_string()), + icon_id: profile.icon().as_ref().map(|i| *i.as_ref()), + banner_id: profile.banner().as_ref().map(|b| *b.as_ref()), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use kernel::prelude::entity::{ + AccountId, EventVersion, ImageId, Nanoid, Profile, ProfileDisplayName, ProfileId, + ProfileSummary, + }; + use uuid::Uuid; + + #[test] + fn test_profile_dto_with_all_fields() { + let profile_id = ProfileId::new(Uuid::now_v7()); + let account_id = AccountId::new(Uuid::now_v7()); + let nanoid = Nanoid::default(); + let display_name = ProfileDisplayName::new("Test User".to_string()); + let summary = ProfileSummary::new("A test summary".to_string()); + let icon_id = ImageId::new(Uuid::now_v7()); + let banner_id = ImageId::new(Uuid::now_v7()); + let version = EventVersion::new(Uuid::now_v7()); + let account_nanoid = "acc-nanoid-123".to_string(); + + let profile = Profile::new( + profile_id, + account_id, + Some(display_name.clone()), + Some(summary.clone()), + Some(icon_id.clone()), + Some(banner_id.clone()), + version, + nanoid.clone(), + ); + + let dto = ProfileDto::new(profile, account_nanoid.clone()); + + assert_eq!(dto.account_nanoid, account_nanoid); + assert_eq!(dto.nanoid, nanoid.as_ref().to_string()); + assert_eq!(dto.display_name, Some(display_name.as_ref().to_string())); + assert_eq!(dto.summary, Some(summary.as_ref().to_string())); + assert_eq!(dto.icon_id, Some(*icon_id.as_ref())); + assert_eq!(dto.banner_id, Some(*banner_id.as_ref())); + } + + #[test] + fn test_profile_dto_with_no_optional_fields() { + let profile_id = ProfileId::new(Uuid::now_v7()); + let account_id = AccountId::new(Uuid::now_v7()); + let nanoid = Nanoid::default(); + let version = EventVersion::new(Uuid::now_v7()); + let account_nanoid = "acc-nanoid-456".to_string(); + + let profile = Profile::new( + profile_id, + account_id, + None, + None, + None, + None, + version, + nanoid.clone(), + ); + + let dto = ProfileDto::new(profile, account_nanoid.clone()); + + assert_eq!(dto.account_nanoid, account_nanoid); + assert_eq!(dto.nanoid, nanoid.as_ref().to_string()); + assert!(dto.display_name.is_none()); + assert!(dto.summary.is_none()); + assert!(dto.icon_id.is_none()); + assert!(dto.banner_id.is_none()); + } +} diff --git a/compose.yml b/compose.yml new file mode 100644 index 0000000..e4d6035 --- /dev/null +++ b/compose.yml @@ -0,0 +1,167 @@ +services: + postgres: + image: docker.io/postgres:16 + container_name: emumet-postgres + environment: + POSTGRES_PASSWORD: develop + POSTGRES_USER: postgres + ports: + - "5432:5432" + volumes: + - type: volume + source: postgres-data + target: /var/lib/postgresql/data + - type: bind + source: ./docker-entrypoint-initdb.d + target: /docker-entrypoint-initdb.d + bind: + selinux: z + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 5s + retries: 5 + + redis: + image: docker.io/redis:7 + container_name: emumet-redis + ports: + - "6379:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 5s + retries: 5 + + kratos-migrate: + image: docker.io/oryd/kratos:v1.3.1 + container_name: emumet-kratos-migrate + environment: + DSN: postgres://postgres:develop@postgres:5432/kratos?sslmode=disable + volumes: + - type: bind + source: ./ory/kratos + target: /etc/config/kratos + bind: + selinux: z + command: migrate sql -e --yes + depends_on: + postgres: + condition: service_healthy + + kratos: + image: docker.io/oryd/kratos:v1.3.1 + container_name: emumet-kratos + environment: + DSN: postgres://postgres:develop@postgres:5432/kratos?sslmode=disable + volumes: + - type: bind + source: ./ory/kratos + target: /etc/config/kratos + bind: + selinux: z + command: serve -c /etc/config/kratos/kratos.yml --dev --watch-courier + ports: + - "4433:4433" + - "4434:4434" + depends_on: + kratos-migrate: + condition: service_completed_successfully + healthcheck: + test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:4433/health/alive || exit 1"] + interval: 10s + timeout: 5s + retries: 5 + + kratos-import: + image: docker.io/oryd/kratos:v1.3.1 + container_name: emumet-kratos-import + environment: + DSN: postgres://postgres:develop@postgres:5432/kratos?sslmode=disable + volumes: + - type: bind + source: ./ory/kratos + target: /etc/config/kratos + bind: + selinux: z + command: import identities /etc/config/kratos/seed-users.json -e http://kratos:4434 + depends_on: + kratos: + condition: service_healthy + + hydra-migrate: + image: docker.io/oryd/hydra:v2.3.0 + container_name: emumet-hydra-migrate + environment: + DSN: postgres://postgres:develop@postgres:5432/hydra?sslmode=disable + command: migrate sql -e --yes + depends_on: + postgres: + condition: service_healthy + + hydra: + image: docker.io/oryd/hydra:v2.3.0 + container_name: emumet-hydra + environment: + DSN: postgres://postgres:develop@postgres:5432/hydra?sslmode=disable + volumes: + - type: bind + source: ./ory/hydra + target: /etc/config/hydra + bind: + selinux: z + command: serve all -c /etc/config/hydra/hydra.yml --dev + ports: + - "4444:4444" + - "4445:4445" + depends_on: + hydra-migrate: + condition: service_completed_successfully + healthcheck: + test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:4444/health/alive || exit 1"] + interval: 10s + timeout: 5s + retries: 5 + + keto-migrate: + image: docker.io/oryd/keto:v0.12.0 + container_name: emumet-keto-migrate + environment: + DSN: postgres://postgres:develop@postgres:5432/keto?sslmode=disable + volumes: + - type: bind + source: ./ory/keto + target: /etc/config/keto + bind: + selinux: z + command: migrate up -y -c /etc/config/keto/keto.yml + depends_on: + postgres: + condition: service_healthy + + keto: + image: docker.io/oryd/keto:v0.12.0 + container_name: emumet-keto + environment: + DSN: postgres://postgres:develop@postgres:5432/keto?sslmode=disable + volumes: + - type: bind + source: ./ory/keto + target: /etc/config/keto + bind: + selinux: z + command: serve -c /etc/config/keto/keto.yml + ports: + - "4466:4466" + - "4467:4467" + depends_on: + keto-migrate: + condition: service_completed_successfully + healthcheck: + test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:4466/health/alive || exit 1"] + interval: 10s + timeout: 5s + retries: 5 + +volumes: + postgres-data: diff --git a/docker-entrypoint-initdb.d/01-create-databases.sh b/docker-entrypoint-initdb.d/01-create-databases.sh new file mode 100755 index 0000000..f75355e --- /dev/null +++ b/docker-entrypoint-initdb.d/01-create-databases.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e + +psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL + CREATE DATABASE kratos; + CREATE DATABASE hydra; + CREATE DATABASE keto; +EOSQL diff --git a/driver/Cargo.toml b/driver/Cargo.toml index e2c90a4..4aeabf3 100644 --- a/driver/Cargo.toml +++ b/driver/Cargo.toml @@ -7,18 +7,30 @@ authors.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -dotenvy = "0.15.7" -deadpool-redis = "0.12.0" +dotenvy = { workspace = true } +deadpool-redis = "0.20.0" sqlx = { version = "0.7", features = ["uuid", "time", "postgres", "runtime-tokio-native-tls", "json"] } serde_json = "1" serde = { workspace = true } uuid = { workspace = true } time = { workspace = true } async-trait = "0.1" +reqwest = { version = "0.12", features = ["json"] } +vodca.workspace = true error-stack = { workspace = true } +# Cryptography (moved from application) +rsa = { version = "0.9", features = ["sha2", "pem"] } +argon2 = "0.5" +aes-gcm = "0.10" +base64 = "0.22" +rand = "0.8" +zeroize = "1.7" + kernel = { path = "../kernel" } [dev-dependencies] tokio = { workspace = true, features = ["macros", "test-util"] } +test-with.workspace = true +tempfile = "3" diff --git a/driver/src/crypto/encryption.rs b/driver/src/crypto/encryption.rs new file mode 100644 index 0000000..617945b --- /dev/null +++ b/driver/src/crypto/encryption.rs @@ -0,0 +1,212 @@ +use aes_gcm::{ + aead::{Aead, KeyInit}, + Aes256Gcm, Nonce, +}; +use argon2::Argon2; +use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; +use error_stack::{Report, Result}; +use kernel::interfaces::crypto::{EncryptedPrivateKey, KeyEncryptor, SigningAlgorithm}; +use kernel::KernelError; +use rand::{rngs::OsRng, RngCore}; +use zeroize::Zeroizing; + +/// Argon2id parameters (OWASP recommended) +#[derive(Debug, Clone)] +pub struct Argon2Params { + /// Memory cost in KiB (default: 64 MiB = 65536 KiB) + pub memory_cost: u32, + /// Number of iterations (default: 3) + pub time_cost: u32, + /// Degree of parallelism (default: 4) + pub parallelism: u32, +} + +impl Default for Argon2Params { + fn default() -> Self { + Self { + memory_cost: 65536, // 64 MiB + time_cost: 3, + parallelism: 4, + } + } +} + +/// Argon2id-based private key encryptor using AES-256-GCM +#[derive(Debug, Clone)] +pub struct Argon2Encryptor { + params: Argon2Params, +} + +impl Argon2Encryptor { + pub fn new(params: Argon2Params) -> Self { + Self { params } + } +} + +impl Default for Argon2Encryptor { + fn default() -> Self { + Self::new(Argon2Params::default()) + } +} + +impl KeyEncryptor for Argon2Encryptor { + fn encrypt( + &self, + private_key_pem: &[u8], + password: &[u8], + algorithm: SigningAlgorithm, + ) -> Result { + // Generate random salt (16 bytes) + let mut salt = [0u8; 16]; + OsRng.fill_bytes(&mut salt); + + // Derive key using Argon2id (32 bytes for AES-256) + let argon2_params = argon2::Params::new( + self.params.memory_cost, + self.params.time_cost, + self.params.parallelism, + Some(32), + ) + .map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Invalid Argon2 parameters: {e}")) + })?; + + let argon2 = Argon2::new( + argon2::Algorithm::Argon2id, + argon2::Version::V0x13, + argon2_params, + ); + + // Derived key is zeroized on drop + let mut derived_key = Zeroizing::new([0u8; 32]); + argon2 + .hash_password_into(password, &salt, &mut *derived_key) + .map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Argon2id key derivation failed: {e}")) + })?; + + // Encrypt with AES-256-GCM + let cipher = Aes256Gcm::new_from_slice(&*derived_key).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Failed to create AES-GCM cipher: {e}")) + })?; + + let mut nonce_bytes = [0u8; 12]; + OsRng.fill_bytes(&mut nonce_bytes); + let nonce = Nonce::from_slice(&nonce_bytes); + + let ciphertext = cipher.encrypt(nonce, private_key_pem).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("AES-GCM encryption failed: {e}")) + })?; + + Ok(EncryptedPrivateKey { + ciphertext: BASE64.encode(&ciphertext), + nonce: BASE64.encode(nonce_bytes), + salt: BASE64.encode(salt), + algorithm, + }) + } + + fn decrypt( + &self, + encrypted: &EncryptedPrivateKey, + password: &[u8], + ) -> Result, KernelError> { + // Decode Base64 fields (use generic error message to prevent information leakage) + let salt = BASE64.decode(&encrypted.salt).map_err(|_| { + Report::new(KernelError::Internal).attach_printable("Invalid encrypted data format") + })?; + + let nonce_bytes = BASE64.decode(&encrypted.nonce).map_err(|_| { + Report::new(KernelError::Internal).attach_printable("Invalid encrypted data format") + })?; + + let ciphertext = BASE64.decode(&encrypted.ciphertext).map_err(|_| { + Report::new(KernelError::Internal).attach_printable("Invalid encrypted data format") + })?; + + // Derive key using Argon2id + // Use generic error messages to prevent information leakage during decryption + let argon2_params = argon2::Params::new( + self.params.memory_cost, + self.params.time_cost, + self.params.parallelism, + Some(32), + ) + .map_err(|_| { + Report::new(KernelError::Internal) + .attach_printable("Decryption failed: invalid password or corrupted data") + })?; + + let argon2 = Argon2::new( + argon2::Algorithm::Argon2id, + argon2::Version::V0x13, + argon2_params, + ); + + // Derived key is zeroized on drop + let mut derived_key = Zeroizing::new([0u8; 32]); + argon2 + .hash_password_into(password, &salt, &mut *derived_key) + .map_err(|_| { + Report::new(KernelError::Internal) + .attach_printable("Decryption failed: invalid password or corrupted data") + })?; + + // Decrypt with AES-256-GCM + let cipher = Aes256Gcm::new_from_slice(&*derived_key).map_err(|_| { + Report::new(KernelError::Internal) + .attach_printable("Decryption failed: invalid password or corrupted data") + })?; + + let nonce = Nonce::from_slice(&nonce_bytes); + + // Use generic error message to prevent timing attacks + cipher.decrypt(nonce, ciphertext.as_ref()).map_err(|_| { + Report::new(KernelError::Internal) + .attach_printable("Decryption failed: invalid password or corrupted data") + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_encrypt_decrypt_roundtrip() { + let original = b"-----BEGIN PRIVATE KEY-----\ntest data\n-----END PRIVATE KEY-----"; + let password = b"test-password-123"; + let encryptor = Argon2Encryptor::default(); + + let encrypted = encryptor + .encrypt(original, password, SigningAlgorithm::Rsa2048) + .unwrap(); + + assert!(!encrypted.ciphertext.is_empty()); + assert!(!encrypted.nonce.is_empty()); + assert!(!encrypted.salt.is_empty()); + assert_eq!(encrypted.algorithm, SigningAlgorithm::Rsa2048); + + let decrypted = encryptor.decrypt(&encrypted, password).unwrap(); + assert_eq!(decrypted, original); + } + + #[test] + fn test_wrong_password_fails() { + let original = b"secret data"; + let password = b"correct-password"; + let wrong_password = b"wrong-password"; + let encryptor = Argon2Encryptor::default(); + + let encrypted = encryptor + .encrypt(original, password, SigningAlgorithm::Rsa2048) + .unwrap(); + + let result = encryptor.decrypt(&encrypted, wrong_password); + assert!(result.is_err()); + } +} diff --git a/driver/src/crypto/mod.rs b/driver/src/crypto/mod.rs new file mode 100644 index 0000000..e9ed78a --- /dev/null +++ b/driver/src/crypto/mod.rs @@ -0,0 +1,7 @@ +mod encryption; +mod password; +mod rsa; + +pub use encryption::{Argon2Encryptor, Argon2Params}; +pub use password::FilePasswordProvider; +pub use rsa::{Rsa2048RawGenerator, Rsa2048Signer, Rsa2048Verifier}; diff --git a/driver/src/crypto/password.rs b/driver/src/crypto/password.rs new file mode 100644 index 0000000..dce944c --- /dev/null +++ b/driver/src/crypto/password.rs @@ -0,0 +1,175 @@ +use error_stack::{Report, Result}; +use kernel::interfaces::crypto::PasswordProvider; +use kernel::KernelError; +use std::path::Path; +use zeroize::Zeroizing; + +const SECRETS_PATH: &str = "/run/secrets/master-key-password"; +const FALLBACK_PATH: &str = "./master-key-password"; + +/// Validate file permissions (Unix only) +/// Rejects files with group or other permissions (must be 0o600 or 0o400) +#[cfg(unix)] +fn validate_file_permissions(path: &str) -> Result<(), KernelError> { + use std::os::unix::fs::PermissionsExt; + + let metadata = std::fs::metadata(path).map_err(|e| { + Report::new(KernelError::Internal).attach_printable(format!( + "Failed to read file metadata for '{}': {}", + path, e + )) + })?; + + let mode = metadata.permissions().mode(); + // Check if group or other have any permissions + if mode & 0o077 != 0 { + return Err(Report::new(KernelError::Internal).attach_printable(format!( + "Master password file '{}' has insecure permissions: {:o} (expected 0o600 or 0o400)", + path, mode + ))); + } + + Ok(()) +} + +#[cfg(not(unix))] +fn validate_file_permissions(_path: &str) -> Result<(), KernelError> { + // Skip permission check on non-Unix systems + Ok(()) +} + +/// File-based password provider with fallback support +/// +/// Tries to read from `/run/secrets/master-key-password` first, +/// then falls back to `./master-key-password` if not found. +/// +/// On Unix systems, validates that the file has secure permissions (0o600 or 0o400). +#[derive(Clone)] +pub struct FilePasswordProvider { + secrets_path: String, + fallback_path: String, +} + +impl FilePasswordProvider { + /// Create a new provider with default paths + pub fn new() -> Self { + Self { + secrets_path: SECRETS_PATH.to_string(), + fallback_path: FALLBACK_PATH.to_string(), + } + } + + /// Create a provider with custom paths (useful for testing) + pub fn with_paths, P2: AsRef>( + secrets_path: P1, + fallback_path: P2, + ) -> Self { + Self { + secrets_path: secrets_path.as_ref().to_string_lossy().into_owned(), + fallback_path: fallback_path.as_ref().to_string_lossy().into_owned(), + } + } +} + +impl Default for FilePasswordProvider { + fn default() -> Self { + Self::new() + } +} + +impl PasswordProvider for FilePasswordProvider { + fn get_password(&self) -> Result>, KernelError> { + // Try secrets path first + if Path::new(&self.secrets_path).exists() { + validate_file_permissions(&self.secrets_path)?; + let password = std::fs::read(&self.secrets_path).map_err(|e| { + Report::new(KernelError::Internal).attach_printable(format!( + "Failed to read master password from '{}': {}", + self.secrets_path, e + )) + })?; + if password.is_empty() { + return Err(Report::new(KernelError::Internal) + .attach_printable("Master password file is empty")); + } + return Ok(Zeroizing::new(password)); + } + + // Fall back to local path + if Path::new(&self.fallback_path).exists() { + validate_file_permissions(&self.fallback_path)?; + let password = std::fs::read(&self.fallback_path).map_err(|e| { + Report::new(KernelError::Internal).attach_printable(format!( + "Failed to read master password from '{}': {}", + self.fallback_path, e + )) + })?; + if password.is_empty() { + return Err(Report::new(KernelError::Internal) + .attach_printable("Master password file is empty")); + } + return Ok(Zeroizing::new(password)); + } + + Err(Report::new(KernelError::Internal).attach_printable(format!( + "Master password file not found. Tried: '{}', '{}'", + self.secrets_path, self.fallback_path + ))) + } +} + +/// In-memory password provider for testing +#[cfg(test)] +pub struct InMemoryPasswordProvider { + password: Vec, +} + +#[cfg(test)] +impl InMemoryPasswordProvider { + pub fn new(password: impl Into>) -> Self { + Self { + password: password.into(), + } + } +} + +#[cfg(test)] +impl PasswordProvider for InMemoryPasswordProvider { + fn get_password(&self) -> Result>, KernelError> { + Ok(Zeroizing::new(self.password.clone())) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::io::Write; + use tempfile::NamedTempFile; + + #[test] + fn test_in_memory_provider() { + let provider = InMemoryPasswordProvider::new(b"test-password".to_vec()); + let password = provider.get_password().unwrap(); + assert_eq!(password.as_slice(), b"test-password"); + } + + #[test] + fn test_file_provider_with_custom_path() { + let mut temp_file = NamedTempFile::new().unwrap(); + temp_file.write_all(b"file-password").unwrap(); + + let provider = FilePasswordProvider::with_paths("/nonexistent/path", temp_file.path()); + + let password = provider.get_password().unwrap(); + assert_eq!(password.as_slice(), b"file-password"); + } + + #[test] + fn test_file_provider_no_file() { + let provider = + FilePasswordProvider::with_paths("/nonexistent/secrets", "/nonexistent/fallback"); + + let result = provider.get_password(); + assert!(result.is_err()); + } +} diff --git a/driver/src/crypto/rsa.rs b/driver/src/crypto/rsa.rs new file mode 100644 index 0000000..9403374 --- /dev/null +++ b/driver/src/crypto/rsa.rs @@ -0,0 +1,173 @@ +use error_stack::{Report, Result}; +use kernel::interfaces::crypto::{ + RawKeyGenerator, RawKeyPair, SignatureVerifier, Signer, SigningAlgorithm, +}; +use kernel::KernelError; +use rand::rngs::OsRng; +use rsa::sha2::Sha256; +use rsa::{ + pkcs1v15::{SigningKey, VerifyingKey}, + pkcs8::{DecodePrivateKey, DecodePublicKey, EncodePrivateKey, EncodePublicKey, LineEnding}, + signature::{SignatureEncoding, Signer as RsaSigner, Verifier}, + RsaPrivateKey, RsaPublicKey, +}; +use zeroize::Zeroizing; + +/// RSA-2048 raw key pair generator (without encryption) +#[derive(Debug, Clone, Copy, Default)] +pub struct Rsa2048RawGenerator; + +impl RawKeyGenerator for Rsa2048RawGenerator { + fn generate_raw(&self) -> Result { + // Generate RSA-2048 key pair + let private_key = RsaPrivateKey::new(&mut OsRng, 2048).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Failed to generate RSA-2048 key: {e}")) + })?; + + let public_key = RsaPublicKey::from(&private_key); + + // Convert to PEM format + // Note: SecretDocument already implements Zeroize on drop + let private_key_pem = private_key.to_pkcs8_pem(LineEnding::LF).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Failed to encode private key as PEM: {e}")) + })?; + + let public_key_pem = public_key.to_public_key_pem(LineEnding::LF).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Failed to encode public key as PEM: {e}")) + })?; + + Ok(RawKeyPair { + public_key_pem, + // Note: SecretDocument (private_key_pem source) already implements Zeroize on drop. + // The copy to Vec is wrapped in Zeroizing for defense in depth. + private_key_pem: Zeroizing::new(private_key_pem.as_bytes().to_vec()), + algorithm: SigningAlgorithm::Rsa2048, + }) + } + + fn algorithm(&self) -> SigningAlgorithm { + SigningAlgorithm::Rsa2048 + } +} + +/// RSA-2048 signer using PKCS#1 v1.5 + SHA-256 +#[derive(Debug, Clone, Copy, Default)] +pub struct Rsa2048Signer; + +impl Signer for Rsa2048Signer { + fn sign(&self, data: &[u8], private_key_pem: &[u8]) -> Result, KernelError> { + let pem_str = std::str::from_utf8(private_key_pem).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Invalid UTF-8 in private key PEM: {e}")) + })?; + + let private_key = RsaPrivateKey::from_pkcs8_pem(pem_str).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Failed to parse private key PEM: {e}")) + })?; + + let signing_key = SigningKey::::new(private_key); + let signature = signing_key.sign(data); + + Ok(signature.to_bytes().to_vec()) + } +} + +/// RSA-2048 signature verifier using PKCS#1 v1.5 + SHA-256 +#[derive(Debug, Clone, Copy, Default)] +pub struct Rsa2048Verifier; + +impl SignatureVerifier for Rsa2048Verifier { + fn verify( + &self, + data: &[u8], + signature: &[u8], + public_key_pem: &[u8], + ) -> Result { + let pem_str = std::str::from_utf8(public_key_pem).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Invalid UTF-8 in public key PEM: {e}")) + })?; + + let public_key = RsaPublicKey::from_public_key_pem(pem_str).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Failed to parse public key PEM: {e}")) + })?; + + let verifying_key = VerifyingKey::::new(public_key); + + let sig = rsa::pkcs1v15::Signature::try_from(signature).map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Invalid signature format: {e}")) + })?; + + match verifying_key.verify(data, &sig) { + Ok(()) => Ok(true), + Err(_) => Ok(false), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_rsa2048_raw_generation() { + let generator = Rsa2048RawGenerator; + + let result = generator.generate_raw(); + assert!(result.is_ok()); + + let key_pair = result.unwrap(); + assert!(key_pair.public_key_pem.contains("BEGIN PUBLIC KEY")); + assert!(!key_pair.private_key_pem.is_empty()); + assert_eq!(key_pair.algorithm, SigningAlgorithm::Rsa2048); + + // Verify private key is valid PEM + let private_key_str = String::from_utf8_lossy(&key_pair.private_key_pem); + assert!(private_key_str.contains("BEGIN PRIVATE KEY")); + } + + #[test] + fn test_sign_and_verify() { + let generator = Rsa2048RawGenerator; + let key_pair = generator.generate_raw().unwrap(); + + let signer = Rsa2048Signer; + let verifier = Rsa2048Verifier; + + let data = b"Hello, ActivityPub!"; + let signature = signer.sign(data, &key_pair.private_key_pem).unwrap(); + + let is_valid = verifier + .verify(data, &signature, key_pair.public_key_pem.as_bytes()) + .unwrap(); + assert!(is_valid); + } + + #[test] + fn test_verify_wrong_data_fails() { + let generator = Rsa2048RawGenerator; + let key_pair = generator.generate_raw().unwrap(); + + let signer = Rsa2048Signer; + let verifier = Rsa2048Verifier; + + let data = b"Original message"; + let signature = signer.sign(data, &key_pair.private_key_pem).unwrap(); + + let tampered_data = b"Tampered message"; + let is_valid = verifier + .verify( + tampered_data, + &signature, + key_pair.public_key_pem.as_bytes(), + ) + .unwrap(); + assert!(!is_valid); + } +} diff --git a/driver/src/database.rs b/driver/src/database.rs index ab71a54..e27c66b 100644 --- a/driver/src/database.rs +++ b/driver/src/database.rs @@ -1,10 +1,13 @@ mod postgres; +mod redis; use error_stack::Report; use kernel::KernelError; -pub use postgres::*; use std::env; +pub use postgres::*; +pub use redis::*; + pub(crate) fn env(key: &str) -> error_stack::Result, KernelError> { let result = dotenvy::var(key); match result { diff --git a/driver/src/database/postgres.rs b/driver/src/database/postgres.rs index bef3deb..c09bc56 100644 --- a/driver/src/database/postgres.rs +++ b/driver/src/database/postgres.rs @@ -1,17 +1,20 @@ mod account; -mod event; +mod account_event_store; +mod auth_account; +mod auth_account_event_store; +mod auth_host; mod follow; mod image; mod metadata; +mod metadata_event_store; mod profile; +mod profile_event_store; mod remote_account; -mod stellar_account; -mod stellar_host; use crate::database::env; use crate::ConvertError; use error_stack::{Report, ResultExt}; -use kernel::interfaces::database::{DatabaseConnection, Transaction}; +use kernel::interfaces::database::{DatabaseConnection, Executor}; use kernel::KernelError; use sqlx::pool::PoolConnection; use sqlx::{Error, PgConnection, Pool, Postgres}; @@ -67,7 +70,7 @@ pub(in crate::database::postgres) struct CountRow { pub struct PostgresConnection(PoolConnection); -impl Transaction for PostgresConnection {} +impl Executor for PostgresConnection {} impl Deref for PostgresConnection { type Target = PgConnection; @@ -83,8 +86,8 @@ impl DerefMut for PostgresConnection { } impl DatabaseConnection for PostgresDatabase { - type Transaction = PostgresConnection; - async fn begin_transaction(&self) -> error_stack::Result { + type Executor = PostgresConnection; + async fn begin_transaction(&self) -> error_stack::Result { let connection = self.pool.acquire().await.convert_error()?; Ok(PostgresConnection(connection)) } diff --git a/driver/src/database/postgres/account.rs b/driver/src/database/postgres/account.rs index d65ee66..37a225f 100644 --- a/driver/src/database/postgres/account.rs +++ b/driver/src/database/postgres/account.rs @@ -1,10 +1,9 @@ use crate::database::{PostgresConnection, PostgresDatabase}; use crate::ConvertError; -use kernel::interfaces::modify::{AccountModifier, DependOnAccountModifier}; -use kernel::interfaces::query::{AccountQuery, DependOnAccountQuery}; +use kernel::interfaces::read_model::{AccountReadModel, DependOnAccountReadModel}; use kernel::prelude::entity::{ - Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, DeletedAt, - EventVersion, Nanoid, StellarAccountId, + Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, + AuthAccountId, CreatedAt, DeletedAt, EventVersion, Nanoid, }; use kernel::KernelError; use sqlx::types::time::OffsetDateTime; @@ -21,6 +20,7 @@ struct AccountRow { deleted_at: Option, version: Uuid, nanoid: String, + created_at: OffsetDateTime, } impl From for Account { @@ -34,25 +34,26 @@ impl From for Account { value.deleted_at.map(DeletedAt::new), EventVersion::new(value.version), Nanoid::new(value.nanoid), + CreatedAt::new(value.created_at), ) } } -pub struct PostgresAccountRepository; +pub struct PostgresAccountReadModel; -impl AccountQuery for PostgresAccountRepository { - type Transaction = PostgresConnection; +impl AccountReadModel for PostgresAccountReadModel { + type Executor = PostgresConnection; async fn find_by_id( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, id: &AccountId, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, AccountRow>( //language=postgresql r#" - SELECT id, name, private_key, public_key, is_bot, deleted_at, version, nanoid + SELECT id, name, private_key, public_key, is_bot, deleted_at, version, nanoid, created_at FROM accounts WHERE id = $1 AND deleted_at IS NULL "#, @@ -64,22 +65,22 @@ impl AccountQuery for PostgresAccountRepository { .map(|option| option.map(Account::from)) } - async fn find_by_stellar_id( + async fn find_by_auth_id( &self, - transaction: &mut Self::Transaction, - stellar_id: &StellarAccountId, + executor: &mut Self::Executor, + auth_id: &AuthAccountId, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, AccountRow>( //language=postgresql r#" - SELECT id, name, private_key, public_key, is_bot, deleted_at, version, nanoid + SELECT id, name, private_key, public_key, is_bot, deleted_at, version, nanoid, created_at FROM accounts - INNER JOIN stellar_emumet_accounts ON stellar_emumet_accounts.emumet_id = accounts.id - WHERE stellar_emumet_accounts.stellar_id = $1 AND deleted_at IS NULL + INNER JOIN auth_emumet_accounts ON auth_emumet_accounts.emumet_id = accounts.id + WHERE auth_emumet_accounts.auth_id = $1 AND deleted_at IS NULL "#, ) - .bind(stellar_id.as_ref()) + .bind(auth_id.as_ref()) .fetch_all(con) .await .convert_error() @@ -88,14 +89,14 @@ impl AccountQuery for PostgresAccountRepository { async fn find_by_name( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, name: &AccountName, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, AccountRow>( //language=postgresql r#" - SELECT id, name, private_key, public_key, is_bot, deleted_at, version, nanoid + SELECT id, name, private_key, public_key, is_bot, deleted_at, version, nanoid, created_at FROM accounts WHERE name = $1 AND deleted_at IS NULL "#, @@ -106,30 +107,61 @@ impl AccountQuery for PostgresAccountRepository { .convert_error() .map(|option| option.map(Account::from)) } -} - -impl DependOnAccountQuery for PostgresDatabase { - type AccountQuery = PostgresAccountRepository; - fn account_query(&self) -> &Self::AccountQuery { - &PostgresAccountRepository + async fn find_by_nanoid( + &self, + executor: &mut Self::Executor, + nanoid: &Nanoid, + ) -> error_stack::Result, KernelError> { + let con: &mut PgConnection = executor; + sqlx::query_as::<_, AccountRow>( + //language=postgresql + r#" + SELECT id, name, private_key, public_key, is_bot, deleted_at, version, nanoid, created_at + FROM accounts + WHERE nanoid = $1 AND deleted_at IS NULL + "#, + ) + .bind(nanoid.as_ref()) + .fetch_optional(con) + .await + .convert_error() + .map(|option| option.map(Account::from)) } -} -impl AccountModifier for PostgresAccountRepository { - type Transaction = PostgresConnection; + async fn find_by_nanoids( + &self, + executor: &mut Self::Executor, + nanoids: &[Nanoid], + ) -> error_stack::Result, KernelError> { + let con: &mut PgConnection = executor; + let nanoid_strs: Vec<&str> = nanoids.iter().map(|n| n.as_ref().as_str()).collect(); + sqlx::query_as::<_, AccountRow>( + //language=postgresql + r#" + SELECT id, name, private_key, public_key, is_bot, deleted_at, version, nanoid, created_at + FROM accounts + WHERE nanoid = ANY($1) AND deleted_at IS NULL + "#, + ) + .bind(&nanoid_strs) + .fetch_all(con) + .await + .convert_error() + .map(|rows| rows.into_iter().map(Account::from).collect()) + } async fn create( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, account: &Account, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( //language=postgresql r#" - INSERT INTO accounts (id, name, private_key, public_key, is_bot, version, nanoid) - VALUES ($1, $2, $3, $4, $5, $6, $7) + INSERT INTO accounts (id, name, private_key, public_key, is_bot, version, nanoid, created_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) "#, ) .bind(account.id().as_ref()) @@ -139,6 +171,7 @@ impl AccountModifier for PostgresAccountRepository { .bind(account.is_bot().as_ref()) .bind(account.version().as_ref()) .bind(account.nanoid().as_ref()) + .bind(account.created_at().as_ref()) .execute(con) .await .convert_error()?; @@ -147,15 +180,15 @@ impl AccountModifier for PostgresAccountRepository { async fn update( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, account: &Account, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( //language=postgresql r#" UPDATE accounts - SET name = $2, private_key = $3, public_key = $4, is_bot = $5, version = $6 + SET name = $2, private_key = $3, public_key = $4, is_bot = $5, version = $6, deleted_at = $7 WHERE id = $1 "#, ) @@ -165,18 +198,19 @@ impl AccountModifier for PostgresAccountRepository { .bind(account.public_key().as_ref()) .bind(account.is_bot().as_ref()) .bind(account.version().as_ref()) + .bind(account.deleted_at().as_ref().map(|d| d.as_ref())) .execute(con) .await .convert_error()?; Ok(()) } - async fn delete( + async fn deactivate( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, account_id: &AccountId, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( //language=postgresql r#" @@ -191,29 +225,70 @@ impl AccountModifier for PostgresAccountRepository { .convert_error()?; Ok(()) } + + async fn unlink_all_auth_accounts( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + sqlx::query( + //language=postgresql + r#" + DELETE FROM auth_emumet_accounts WHERE emumet_id = $1 + "#, + ) + .bind(account_id.as_ref()) + .execute(con) + .await + .convert_error()?; + Ok(()) + } + + async fn link_auth_account( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + auth_account_id: &AuthAccountId, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + sqlx::query( + //language=postgresql + r#" + INSERT INTO auth_emumet_accounts (emumet_id, auth_id) VALUES ($1, $2) + "#, + ) + .bind(account_id.as_ref()) + .bind(auth_account_id.as_ref()) + .execute(con) + .await + .convert_error()?; + Ok(()) + } } -impl DependOnAccountModifier for PostgresDatabase { - type AccountModifier = PostgresAccountRepository; +impl DependOnAccountReadModel for PostgresDatabase { + type AccountReadModel = PostgresAccountReadModel; - fn account_modifier(&self) -> &Self::AccountModifier { - &PostgresAccountRepository + fn account_read_model(&self) -> &Self::AccountReadModel { + &PostgresAccountReadModel } } #[cfg(test)] mod test { - mod query { + mod read_model { use crate::database::PostgresDatabase; use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{AccountModifier, DependOnAccountModifier}; - use kernel::interfaces::query::{AccountQuery, DependOnAccountQuery}; + use kernel::interfaces::read_model::{AccountReadModel, DependOnAccountReadModel}; use kernel::prelude::entity::{ Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventVersion, Nanoid, StellarAccountId, + AuthAccountId, CreatedAt, DeletedAt, EventVersion, Nanoid, }; + use sqlx::types::time::OffsetDateTime; use sqlx::types::Uuid; + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_by_id() { let database = PostgresDatabase::new().await.unwrap(); @@ -229,39 +304,42 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); let result = database - .account_query() + .account_read_model() .find_by_id(&mut transaction, &id) .await .unwrap(); assert_eq!(result.as_ref().map(Account::id), Some(account.id())); } + #[test_with::env(DATABASE_URL)] #[tokio::test] - async fn find_by_stellar_id() { + async fn find_by_auth_id() { let database = PostgresDatabase::new().await.unwrap(); let mut transaction = database.begin_transaction().await.unwrap(); let accounts = database - .account_query() - .find_by_stellar_id(&mut transaction, &StellarAccountId::new(Uuid::now_v7())) + .account_read_model() + .find_by_auth_id(&mut transaction, &AuthAccountId::new(Uuid::now_v7())) .await .unwrap(); assert!(accounts.is_empty()); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_by_name() { let database = PostgresDatabase::new().await.unwrap(); let mut transaction = database.begin_transaction().await.unwrap(); - let name = AccountName::new("findbynametest"); + let name = AccountName::new(Uuid::now_v7().to_string()); let account = Account::new( AccountId::new(Uuid::now_v7()), name.clone(), @@ -271,39 +349,65 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); let result = database - .account_query() + .account_read_model() .find_by_name(&mut transaction, &name) .await .unwrap(); assert_eq!(result.as_ref().map(Account::id), Some(account.id())); database - .account_modifier() - .delete(&mut transaction, account.id()) + .account_read_model() + .deactivate(&mut transaction, account.id()) .await .unwrap(); } - } - mod modify { - use crate::database::PostgresDatabase; - use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{AccountModifier, DependOnAccountModifier}; - use kernel::interfaces::query::{AccountQuery, DependOnAccountQuery}; - use kernel::prelude::entity::{ - Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - DeletedAt, EventVersion, Nanoid, - }; - use sqlx::types::time::OffsetDateTime; - use sqlx::types::Uuid; + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_nanoid() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let nanoid = Nanoid::default(); + let account = Account::new( + AccountId::new(Uuid::now_v7()), + AccountName::new("test"), + AccountPrivateKey::new("test"), + AccountPublicKey::new("test"), + AccountIsBot::new(false), + None, + EventVersion::new(Uuid::now_v7()), + nanoid.clone(), + CreatedAt::now(), + ); + database + .account_read_model() + .create(&mut transaction, &account) + .await + .unwrap(); + + let result = database + .account_read_model() + .find_by_nanoid(&mut transaction, &nanoid) + .await + .unwrap(); + assert_eq!(result.as_ref().map(Account::id), Some(account.id())); + database + .account_read_model() + .deactivate(&mut transaction, account.id()) + .await + .unwrap(); + } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn create() { let database = PostgresDatabase::new().await.unwrap(); @@ -318,14 +422,15 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); let result = database - .account_query() + .account_read_model() .find_by_id(&mut transaction, account.id()) .await .unwrap() @@ -333,6 +438,7 @@ mod test { assert_eq!(result.id(), account.id()); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn update() { let database = PostgresDatabase::new().await.unwrap(); @@ -347,9 +453,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); @@ -362,22 +469,24 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .update(&mut transaction, &updated_account) .await .unwrap(); let result = database - .account_query() + .account_read_model() .find_by_id(&mut transaction, account.id()) .await .unwrap(); assert_eq!(result.as_ref().map(Account::id), Some(updated_account.id())); } + #[test_with::env(DATABASE_URL)] #[tokio::test] - async fn delete() { + async fn deactivate() { let database = PostgresDatabase::new().await.unwrap(); let mut transaction = database.begin_transaction().await.unwrap(); @@ -390,20 +499,21 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, account.id()) + .account_read_model() + .deactivate(&mut transaction, account.id()) .await .unwrap(); let result = database - .account_query() + .account_read_model() .find_by_id(&mut transaction, account.id()) .await .unwrap(); @@ -419,20 +529,21 @@ mod test { Some(DeletedAt::new(OffsetDateTime::now_utc())), EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, account.id()) + .account_read_model() + .deactivate(&mut transaction, account.id()) .await .unwrap(); let result = database - .account_query() + .account_read_model() .find_by_id(&mut transaction, account.id()) .await .unwrap(); diff --git a/driver/src/database/postgres/account_event_store.rs b/driver/src/database/postgres/account_event_store.rs new file mode 100644 index 0000000..d6865a0 --- /dev/null +++ b/driver/src/database/postgres/account_event_store.rs @@ -0,0 +1,420 @@ +use crate::database::postgres::{CountRow, VersionRow}; +use crate::database::{PostgresConnection, PostgresDatabase}; +use crate::ConvertError; +use error_stack::Report; +use kernel::interfaces::event_store::{AccountEventStore, DependOnAccountEventStore}; +use kernel::prelude::entity::{ + Account, AccountEvent, CommandEnvelope, EventEnvelope, EventId, EventVersion, KnownEventVersion, +}; +use kernel::KernelError; +use serde_json; +use sqlx::PgConnection; +use uuid::Uuid; + +#[derive(sqlx::FromRow)] +struct EventRow { + version: Uuid, + id: Uuid, + #[allow(dead_code)] + event_name: String, + data: serde_json::Value, +} + +impl TryFrom for EventEnvelope { + type Error = Report; + fn try_from(value: EventRow) -> Result { + let event: AccountEvent = serde_json::from_value(value.data).convert_error()?; + Ok(EventEnvelope::new( + EventId::new(value.id), + event, + EventVersion::new(value.version), + )) + } +} + +pub struct PostgresAccountEventStore; + +impl AccountEventStore for PostgresAccountEventStore { + type Executor = PostgresConnection; + + async fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &EventId, + since: Option<&EventVersion>, + ) -> error_stack::Result>, KernelError> { + let con: &mut PgConnection = executor; + let rows = if let Some(version) = since { + sqlx::query_as::<_, EventRow>( + //language=postgresql + r#" + SELECT version, id, event_name, data + FROM account_events + WHERE id = $1 AND version > $2 + ORDER BY version + "#, + ) + .bind(id.as_ref()) + .bind(version.as_ref()) + .fetch_all(con) + .await + .convert_error()? + } else { + sqlx::query_as::<_, EventRow>( + //language=postgresql + r#" + SELECT version, id, event_name, data + FROM account_events + WHERE id = $1 + ORDER BY version + "#, + ) + .bind(id.as_ref()) + .fetch_all(con) + .await + .convert_error()? + }; + rows.into_iter() + .map(|row| row.try_into()) + .collect::, KernelError>>() + } + + async fn persist( + &self, + executor: &mut Self::Executor, + command: &CommandEnvelope, + ) -> error_stack::Result<(), KernelError> { + self.persist_internal(executor, command, Uuid::now_v7()) + .await + } + + async fn persist_and_transform( + &self, + executor: &mut Self::Executor, + command: CommandEnvelope, + ) -> error_stack::Result, KernelError> { + let version = Uuid::now_v7(); + self.persist_internal(executor, &command, version).await?; + + let command = command.into_destruct(); + Ok(EventEnvelope::new( + command.id, + command.event, + EventVersion::new(version), + )) + } +} + +impl PostgresAccountEventStore { + async fn persist_internal( + &self, + executor: &mut PostgresConnection, + command: &CommandEnvelope, + version: Uuid, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + + let event_name = command.event_name(); + let prev_version = command.prev_version().as_ref(); + if let Some(prev_version) = prev_version { + match prev_version { + KnownEventVersion::Nothing => { + let amount = sqlx::query_as::<_, CountRow>( + //language=postgresql + r#" + SELECT COUNT(*) + FROM account_events + WHERE id = $1 + "#, + ) + .bind(command.id().as_ref()) + .fetch_one(&mut *con) + .await + .convert_error()?; + if amount.count != 0 { + return Err(Report::new(KernelError::Concurrency).attach_printable( + format!("Event {} already exists", command.id().as_ref()), + )); + } + } + KnownEventVersion::Prev(prev_version) => { + let last_version = sqlx::query_as::<_, VersionRow>( + //language=postgresql + r#" + SELECT version + FROM account_events + WHERE id = $1 + ORDER BY version DESC + LIMIT 1 + "#, + ) + .bind(command.id().as_ref()) + .fetch_optional(&mut *con) + .await + .convert_error()?; + if last_version + .map(|row: VersionRow| &row.version != prev_version.as_ref()) + .unwrap_or(true) + { + return Err(Report::new(KernelError::Concurrency).attach_printable( + format!( + "Event {} version {} already exists", + command.id().as_ref(), + prev_version.as_ref() + ), + )); + } + } + }; + } + + sqlx::query( + //language=postgresql + r#" + INSERT INTO account_events (version, id, event_name, data) + VALUES ($1, $2, $3, $4) + "#, + ) + .bind(version) + .bind(command.id().as_ref()) + .bind(event_name) + .bind(serde_json::to_value(command.event()).convert_error()?) + .execute(con) + .await + .convert_error()?; + + Ok(()) + } +} + +impl DependOnAccountEventStore for PostgresDatabase { + type AccountEventStore = PostgresAccountEventStore; + + fn account_event_store(&self) -> &Self::AccountEventStore { + &PostgresAccountEventStore + } +} + +#[cfg(test)] +mod test { + mod query { + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::event_store::{AccountEventStore, DependOnAccountEventStore}; + use kernel::prelude::entity::{ + Account, AccountEvent, AccountId, AccountIsBot, AccountName, AccountPrivateKey, + AccountPublicKey, AuthAccountId, CommandEnvelope, EventId, KnownEventVersion, Nanoid, + }; + use uuid::Uuid; + + fn create_account_command(account_id: AccountId) -> CommandEnvelope { + let event = AccountEvent::Created { + name: AccountName::new("test"), + private_key: AccountPrivateKey::new("test"), + public_key: AccountPublicKey::new("test"), + is_bot: AccountIsBot::new(false), + nanoid: Nanoid::default(), + auth_account_id: AuthAccountId::new(uuid::Uuid::now_v7()), + }; + CommandEnvelope::new( + EventId::from(account_id), + event.name(), + event, + Some(KnownEventVersion::Nothing), + ) + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let account_id = AccountId::new(Uuid::now_v7()); + let event_id = EventId::from(account_id.clone()); + let events = db + .account_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(events.len(), 0); + let created_account = create_account_command(account_id.clone()); + let update_event = AccountEvent::Updated { + is_bot: AccountIsBot::new(true), + }; + let updated_account = CommandEnvelope::new( + EventId::from(account_id.clone()), + update_event.name(), + update_event, + None, + ); + let delete_event = AccountEvent::Deactivated; + let deleted_account = CommandEnvelope::new( + EventId::from(account_id.clone()), + delete_event.name(), + delete_event, + None, + ); + + db.account_event_store() + .persist(&mut transaction, &created_account) + .await + .unwrap(); + db.account_event_store() + .persist(&mut transaction, &updated_account) + .await + .unwrap(); + db.account_event_store() + .persist(&mut transaction, &deleted_account) + .await + .unwrap(); + let events = db + .account_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(events.len(), 3); + assert_eq!(&events[0].event, created_account.event()); + assert_eq!(&events[1].event, updated_account.event()); + assert_eq!(&events[2].event, deleted_account.event()); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id_since_version() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let account_id = AccountId::new(Uuid::now_v7()); + let event_id = EventId::from(account_id.clone()); + + let created_account = create_account_command(account_id.clone()); + let update_event = AccountEvent::Updated { + is_bot: AccountIsBot::new(true), + }; + let updated_account = CommandEnvelope::new( + EventId::from(account_id.clone()), + update_event.name(), + update_event, + None, + ); + let delete_event = AccountEvent::Deactivated; + let deleted_account = CommandEnvelope::new( + EventId::from(account_id.clone()), + delete_event.name(), + delete_event, + None, + ); + + db.account_event_store() + .persist(&mut transaction, &created_account) + .await + .unwrap(); + db.account_event_store() + .persist(&mut transaction, &updated_account) + .await + .unwrap(); + db.account_event_store() + .persist(&mut transaction, &deleted_account) + .await + .unwrap(); + + // Get all events to obtain the first version + let all_events = db + .account_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(all_events.len(), 3); + + // Query since the first event's version — should return the 2nd and 3rd events + let since_events = db + .account_event_store() + .find_by_id(&mut transaction, &event_id, Some(&all_events[0].version)) + .await + .unwrap(); + assert_eq!(since_events.len(), 2); + assert_eq!(&since_events[0].event, updated_account.event()); + assert_eq!(&since_events[1].event, deleted_account.event()); + + // Query since the last event's version — should return no events + let no_events = db + .account_event_store() + .find_by_id(&mut transaction, &event_id, Some(&all_events[2].version)) + .await + .unwrap(); + assert_eq!(no_events.len(), 0); + } + } + + mod persist { + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::event_store::{AccountEventStore, DependOnAccountEventStore}; + use kernel::prelude::entity::{ + Account, AccountEvent, AccountId, AccountIsBot, AccountName, AccountPrivateKey, + AccountPublicKey, AuthAccountId, CommandEnvelope, EventId, KnownEventVersion, Nanoid, + }; + use uuid::Uuid; + + fn create_account_command(account_id: AccountId) -> CommandEnvelope { + let event = AccountEvent::Created { + name: AccountName::new("test"), + private_key: AccountPrivateKey::new("test"), + public_key: AccountPublicKey::new("test"), + is_bot: AccountIsBot::new(false), + nanoid: Nanoid::default(), + auth_account_id: AuthAccountId::new(uuid::Uuid::now_v7()), + }; + CommandEnvelope::new( + EventId::from(account_id), + event.name(), + event, + Some(KnownEventVersion::Nothing), + ) + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn basic_creation() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let account_id = AccountId::new(Uuid::now_v7()); + let created_account = create_account_command(account_id.clone()); + db.account_event_store() + .persist(&mut transaction, &created_account) + .await + .unwrap(); + let events = db + .account_event_store() + .find_by_id(&mut transaction, &EventId::from(account_id), None) + .await + .unwrap(); + assert_eq!(events.len(), 1); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn persist_and_transform_test() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let account_id = AccountId::new(Uuid::now_v7()); + let created_account = create_account_command(account_id.clone()); + + let event_envelope = db + .account_event_store() + .persist_and_transform(&mut transaction, created_account.clone()) + .await + .unwrap(); + + assert_eq!(event_envelope.id, EventId::from(account_id.clone())); + assert_eq!(&event_envelope.event, created_account.event()); + + let events = db + .account_event_store() + .find_by_id(&mut transaction, &EventId::from(account_id), None) + .await + .unwrap(); + assert_eq!(events.len(), 1); + assert_eq!(&events[0].event, created_account.event()); + } + } +} diff --git a/driver/src/database/postgres/auth_account.rs b/driver/src/database/postgres/auth_account.rs new file mode 100644 index 0000000..b4348f3 --- /dev/null +++ b/driver/src/database/postgres/auth_account.rs @@ -0,0 +1,339 @@ +use crate::database::{PostgresConnection, PostgresDatabase}; +use crate::ConvertError; +use kernel::interfaces::read_model::{AuthAccountReadModel, DependOnAuthAccountReadModel}; +use kernel::prelude::entity::{ + AuthAccount, AuthAccountClientId, AuthAccountId, AuthHostId, EventVersion, +}; +use kernel::KernelError; +use sqlx::PgConnection; +use uuid::Uuid; + +#[derive(sqlx::FromRow)] +struct AuthAccountRow { + id: Uuid, + host_id: Uuid, + client_id: String, + version: Uuid, +} + +impl From for AuthAccount { + fn from(value: AuthAccountRow) -> Self { + AuthAccount::new( + AuthAccountId::new(value.id), + AuthHostId::new(value.host_id), + AuthAccountClientId::new(value.client_id), + EventVersion::new(value.version), + ) + } +} + +pub struct PostgresAuthAccountReadModel; + +impl AuthAccountReadModel for PostgresAuthAccountReadModel { + type Executor = PostgresConnection; + + async fn find_by_id( + &self, + executor: &mut Self::Executor, + account_id: &AuthAccountId, + ) -> error_stack::Result, KernelError> { + let con: &mut PgConnection = executor; + sqlx::query_as::<_, AuthAccountRow>( + //language=postgresql + r#" + SELECT id, host_id, client_id, version + FROM auth_accounts + WHERE id = $1 + "#, + ) + .bind(account_id.as_ref()) + .fetch_optional(con) + .await + .convert_error() + .map(|option| option.map(|row| row.into())) + } + + async fn find_by_client_id( + &self, + executor: &mut Self::Executor, + client_id: &AuthAccountClientId, + ) -> error_stack::Result, KernelError> { + let con: &mut PgConnection = executor; + sqlx::query_as::<_, AuthAccountRow>( + //language=postgresql + r#" + SELECT id, host_id, client_id, version + FROM auth_accounts + WHERE client_id = $1 + "#, + ) + .bind(client_id.as_ref()) + .fetch_optional(con) + .await + .convert_error() + .map(|option| option.map(|row| row.into())) + } + + async fn create( + &self, + executor: &mut Self::Executor, + auth_account: &AuthAccount, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + sqlx::query( + //language=postgresql + r#" + INSERT INTO auth_accounts (id, host_id, client_id, version) VALUES ($1, $2, $3, $4) + "#, + ) + .bind(auth_account.id().as_ref()) + .bind(auth_account.host().as_ref()) + .bind(auth_account.client_id().as_ref()) + .bind(auth_account.version().as_ref()) + .execute(con) + .await + .convert_error()?; + Ok(()) + } + + async fn update( + &self, + executor: &mut Self::Executor, + auth_account: &AuthAccount, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + sqlx::query( + //language=postgresql + r#" + UPDATE auth_accounts SET host_id = $2, client_id = $3, version = $4 + WHERE id = $1 + "#, + ) + .bind(auth_account.id().as_ref()) + .bind(auth_account.host().as_ref()) + .bind(auth_account.client_id().as_ref()) + .bind(auth_account.version().as_ref()) + .execute(con) + .await + .convert_error()?; + Ok(()) + } + + async fn delete( + &self, + executor: &mut Self::Executor, + account_id: &AuthAccountId, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + sqlx::query( + //language=postgresql + r#" + DELETE FROM auth_accounts WHERE id = $1 + "#, + ) + .bind(account_id.as_ref()) + .execute(con) + .await + .convert_error()?; + Ok(()) + } +} + +impl DependOnAuthAccountReadModel for PostgresDatabase { + type AuthAccountReadModel = PostgresAuthAccountReadModel; + + fn auth_account_read_model(&self) -> &Self::AuthAccountReadModel { + &PostgresAuthAccountReadModel + } +} + +#[cfg(test)] +mod test { + mod query { + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::read_model::{AuthAccountReadModel, DependOnAuthAccountReadModel}; + use kernel::interfaces::repository::{AuthHostRepository, DependOnAuthHostRepository}; + use kernel::prelude::entity::{ + AuthAccount, AuthAccountClientId, AuthAccountId, AuthHost, AuthHostId, AuthHostUrl, + EventVersion, + }; + use uuid::Uuid; + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let auth_host_id = AuthHostId::new(Uuid::now_v7()); + let auth_host = AuthHost::new(auth_host_id.clone(), AuthHostUrl::new(Uuid::now_v7())); + database + .auth_host_repository() + .create(&mut transaction, &auth_host) + .await + .unwrap(); + let account_id = AuthAccountId::new(Uuid::now_v7()); + let auth_account = AuthAccount::new( + account_id.clone(), + auth_host_id, + AuthAccountClientId::new("client_id".to_string()), + EventVersion::new(Uuid::now_v7()), + ); + + database + .auth_account_read_model() + .create(&mut transaction, &auth_account) + .await + .unwrap(); + let result = database + .auth_account_read_model() + .find_by_id(&mut transaction, &account_id) + .await + .unwrap(); + assert_eq!(result, Some(auth_account.clone())); + database + .auth_account_read_model() + .delete(&mut transaction, auth_account.id()) + .await + .unwrap(); + } + } + + mod modify { + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::read_model::{AuthAccountReadModel, DependOnAuthAccountReadModel}; + use kernel::interfaces::repository::{AuthHostRepository, DependOnAuthHostRepository}; + use kernel::prelude::entity::{ + AuthAccount, AuthAccountClientId, AuthAccountId, AuthHost, AuthHostId, AuthHostUrl, + EventVersion, + }; + use uuid::Uuid; + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn create() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let host_id = AuthHostId::new(Uuid::now_v7()); + let account_id = AuthAccountId::new(Uuid::now_v7()); + let auth_host = AuthHost::new(host_id.clone(), AuthHostUrl::new(Uuid::now_v7())); + database + .auth_host_repository() + .create(&mut transaction, &auth_host) + .await + .unwrap(); + let auth_account = AuthAccount::new( + account_id.clone(), + host_id, + AuthAccountClientId::new("client_id".to_string()), + EventVersion::new(Uuid::now_v7()), + ); + database + .auth_account_read_model() + .create(&mut transaction, &auth_account) + .await + .unwrap(); + let result = database + .auth_account_read_model() + .find_by_id(&mut transaction, &account_id) + .await + .unwrap(); + assert_eq!(result, Some(auth_account.clone())); + database + .auth_account_read_model() + .delete(&mut transaction, auth_account.id()) + .await + .unwrap(); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn update() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let host_id = AuthHostId::new(Uuid::now_v7()); + let account_id = AuthAccountId::new(Uuid::now_v7()); + let auth_host = AuthHost::new(host_id.clone(), AuthHostUrl::new(Uuid::now_v7())); + database + .auth_host_repository() + .create(&mut transaction, &auth_host) + .await + .unwrap(); + let auth_account = AuthAccount::new( + account_id.clone(), + host_id.clone(), + AuthAccountClientId::new("client_id".to_string()), + EventVersion::new(Uuid::now_v7()), + ); + database + .auth_account_read_model() + .create(&mut transaction, &auth_account) + .await + .unwrap(); + let updated_auth_account = AuthAccount::new( + account_id.clone(), + host_id, + AuthAccountClientId::new("updated_client_id".to_string()), + EventVersion::new(Uuid::now_v7()), + ); + database + .auth_account_read_model() + .update(&mut transaction, &updated_auth_account) + .await + .unwrap(); + let result = database + .auth_account_read_model() + .find_by_id(&mut transaction, &account_id) + .await + .unwrap(); + assert_eq!(result, Some(updated_auth_account)); + database + .auth_account_read_model() + .delete(&mut transaction, auth_account.id()) + .await + .unwrap(); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn delete() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let host_id = AuthHostId::new(Uuid::now_v7()); + let auth_host = AuthHost::new(host_id.clone(), AuthHostUrl::new(Uuid::now_v7())); + database + .auth_host_repository() + .create(&mut transaction, &auth_host) + .await + .unwrap(); + let account_id = AuthAccountId::new(Uuid::now_v7()); + let auth_account = AuthAccount::new( + account_id.clone(), + host_id, + AuthAccountClientId::new("client_id".to_string()), + EventVersion::new(Uuid::now_v7()), + ); + database + .auth_account_read_model() + .create(&mut transaction, &auth_account) + .await + .unwrap(); + database + .auth_account_read_model() + .delete(&mut transaction, &account_id) + .await + .unwrap(); + let result = database + .auth_account_read_model() + .find_by_id(&mut transaction, &account_id) + .await + .unwrap(); + assert_eq!(result, None); + } + } +} diff --git a/driver/src/database/postgres/auth_account_event_store.rs b/driver/src/database/postgres/auth_account_event_store.rs new file mode 100644 index 0000000..d005732 --- /dev/null +++ b/driver/src/database/postgres/auth_account_event_store.rs @@ -0,0 +1,351 @@ +use crate::database::postgres::{CountRow, VersionRow}; +use crate::database::{PostgresConnection, PostgresDatabase}; +use crate::ConvertError; +use error_stack::Report; +use kernel::interfaces::event_store::{AuthAccountEventStore, DependOnAuthAccountEventStore}; +use kernel::prelude::entity::{ + AuthAccount, AuthAccountEvent, CommandEnvelope, EventEnvelope, EventId, EventVersion, + KnownEventVersion, +}; +use kernel::KernelError; +use serde_json; +use sqlx::PgConnection; +use uuid::Uuid; + +#[derive(sqlx::FromRow)] +struct EventRow { + version: Uuid, + id: Uuid, + #[allow(dead_code)] + event_name: String, + data: serde_json::Value, +} + +impl TryFrom for EventEnvelope { + type Error = Report; + fn try_from(value: EventRow) -> Result { + let event: AuthAccountEvent = serde_json::from_value(value.data).convert_error()?; + Ok(EventEnvelope::new( + EventId::new(value.id), + event, + EventVersion::new(value.version), + )) + } +} + +pub struct PostgresAuthAccountEventStore; + +impl AuthAccountEventStore for PostgresAuthAccountEventStore { + type Executor = PostgresConnection; + + async fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &EventId, + since: Option<&EventVersion>, + ) -> error_stack::Result>, KernelError> { + let con: &mut PgConnection = executor; + let rows = if let Some(version) = since { + sqlx::query_as::<_, EventRow>( + //language=postgresql + r#" + SELECT version, id, event_name, data + FROM auth_account_events + WHERE id = $1 AND version > $2 + ORDER BY version + "#, + ) + .bind(id.as_ref()) + .bind(version.as_ref()) + .fetch_all(con) + .await + .convert_error()? + } else { + sqlx::query_as::<_, EventRow>( + //language=postgresql + r#" + SELECT version, id, event_name, data + FROM auth_account_events + WHERE id = $1 + ORDER BY version + "#, + ) + .bind(id.as_ref()) + .fetch_all(con) + .await + .convert_error()? + }; + rows.into_iter() + .map(|row| row.try_into()) + .collect::, KernelError>>() + } + + async fn persist( + &self, + executor: &mut Self::Executor, + command: &CommandEnvelope, + ) -> error_stack::Result<(), KernelError> { + self.persist_internal(executor, command, Uuid::now_v7()) + .await + } + + async fn persist_and_transform( + &self, + executor: &mut Self::Executor, + command: CommandEnvelope, + ) -> error_stack::Result, KernelError> { + let version = Uuid::now_v7(); + self.persist_internal(executor, &command, version).await?; + + let command = command.into_destruct(); + Ok(EventEnvelope::new( + command.id, + command.event, + EventVersion::new(version), + )) + } +} + +impl PostgresAuthAccountEventStore { + async fn persist_internal( + &self, + executor: &mut PostgresConnection, + command: &CommandEnvelope, + version: Uuid, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + + let event_name = command.event_name(); + let prev_version = command.prev_version().as_ref(); + if let Some(prev_version) = prev_version { + match prev_version { + KnownEventVersion::Nothing => { + let amount = sqlx::query_as::<_, CountRow>( + //language=postgresql + r#" + SELECT COUNT(*) + FROM auth_account_events + WHERE id = $1 + "#, + ) + .bind(command.id().as_ref()) + .fetch_one(&mut *con) + .await + .convert_error()?; + if amount.count != 0 { + return Err(Report::new(KernelError::Concurrency).attach_printable( + format!("Event {} already exists", command.id().as_ref()), + )); + } + } + KnownEventVersion::Prev(prev_version) => { + let last_version = sqlx::query_as::<_, VersionRow>( + //language=postgresql + r#" + SELECT version + FROM auth_account_events + WHERE id = $1 + ORDER BY version DESC + LIMIT 1 + "#, + ) + .bind(command.id().as_ref()) + .fetch_optional(&mut *con) + .await + .convert_error()?; + if last_version + .map(|row: VersionRow| &row.version != prev_version.as_ref()) + .unwrap_or(true) + { + return Err(Report::new(KernelError::Concurrency).attach_printable( + format!( + "Event {} version {} already exists", + command.id().as_ref(), + prev_version.as_ref() + ), + )); + } + } + }; + } + + sqlx::query( + //language=postgresql + r#" + INSERT INTO auth_account_events (version, id, event_name, data) + VALUES ($1, $2, $3, $4) + "#, + ) + .bind(version) + .bind(command.id().as_ref()) + .bind(event_name) + .bind(serde_json::to_value(command.event()).convert_error()?) + .execute(con) + .await + .convert_error()?; + + Ok(()) + } +} + +impl DependOnAuthAccountEventStore for PostgresDatabase { + type AuthAccountEventStore = PostgresAuthAccountEventStore; + + fn auth_account_event_store(&self) -> &Self::AuthAccountEventStore { + &PostgresAuthAccountEventStore + } +} + +#[cfg(test)] +mod test { + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::event_store::{AuthAccountEventStore, DependOnAuthAccountEventStore}; + use kernel::prelude::entity::{ + AuthAccount, AuthAccountClientId, AuthAccountEvent, AuthAccountId, AuthHostId, + CommandEnvelope, EventId, + }; + use uuid::Uuid; + + fn create_auth_account_command( + id: AuthAccountId, + ) -> CommandEnvelope { + AuthAccount::create( + id, + AuthHostId::new(Uuid::now_v7()), + AuthAccountClientId::new("test_client"), + ) + } + + mod query { + use super::*; + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let id = AuthAccountId::new(Uuid::now_v7()); + let event_id = EventId::from(id.clone()); + let events = db + .auth_account_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(events.len(), 0); + + let created = create_auth_account_command(id.clone()); + db.auth_account_event_store() + .persist_and_transform(&mut transaction, created.clone()) + .await + .unwrap(); + + let events = db + .auth_account_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(events.len(), 1); + assert_eq!(&events[0].event, created.event()); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id_since_version() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let id = AuthAccountId::new(Uuid::now_v7()); + let event_id = EventId::from(id.clone()); + + let created = create_auth_account_command(id.clone()); + let create_envelope = db + .auth_account_event_store() + .persist_and_transform(&mut transaction, created.clone()) + .await + .unwrap(); + + let all_events = db + .auth_account_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(all_events.len(), 1); + + let no_events = db + .auth_account_event_store() + .find_by_id(&mut transaction, &event_id, Some(&create_envelope.version)) + .await + .unwrap(); + assert_eq!(no_events.len(), 0); + } + } + + mod persist { + use super::*; + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn basic_creation() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let id = AuthAccountId::new(Uuid::now_v7()); + let created = create_auth_account_command(id.clone()); + db.auth_account_event_store() + .persist(&mut transaction, &created) + .await + .unwrap(); + let events = db + .auth_account_event_store() + .find_by_id(&mut transaction, &EventId::from(id), None) + .await + .unwrap(); + assert_eq!(events.len(), 1); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn persist_and_transform_test() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let id = AuthAccountId::new(Uuid::now_v7()); + let created = create_auth_account_command(id.clone()); + + let event_envelope = db + .auth_account_event_store() + .persist_and_transform(&mut transaction, created.clone()) + .await + .unwrap(); + + assert_eq!(event_envelope.id, EventId::from(id.clone())); + assert_eq!(&event_envelope.event, created.event()); + + let events = db + .auth_account_event_store() + .find_by_id(&mut transaction, &EventId::from(id), None) + .await + .unwrap(); + assert_eq!(events.len(), 1); + assert_eq!(&events[0].event, created.event()); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn optimistic_concurrency_nothing() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let id = AuthAccountId::new(Uuid::now_v7()); + let created = create_auth_account_command(id.clone()); + db.auth_account_event_store() + .persist(&mut transaction, &created) + .await + .unwrap(); + + let duplicate = create_auth_account_command(id.clone()); + let result = db + .auth_account_event_store() + .persist(&mut transaction, &duplicate) + .await; + assert!(result.is_err()); + } + } +} diff --git a/driver/src/database/postgres/auth_host.rs b/driver/src/database/postgres/auth_host.rs new file mode 100644 index 0000000..f645dc2 --- /dev/null +++ b/driver/src/database/postgres/auth_host.rs @@ -0,0 +1,240 @@ +use crate::database::{PostgresConnection, PostgresDatabase}; +use crate::ConvertError; +use kernel::interfaces::repository::{AuthHostRepository, DependOnAuthHostRepository}; +use kernel::prelude::entity::{AuthHost, AuthHostId, AuthHostUrl}; +use kernel::KernelError; +use sqlx::PgConnection; +use uuid::Uuid; + +#[derive(sqlx::FromRow)] +struct AuthHostRow { + id: Uuid, + url: String, +} + +impl From for AuthHost { + fn from(row: AuthHostRow) -> Self { + AuthHost::new(AuthHostId::new(row.id), AuthHostUrl::new(row.url)) + } +} + +pub struct PostgresAuthHostRepository; + +impl AuthHostRepository for PostgresAuthHostRepository { + type Executor = PostgresConnection; + async fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &AuthHostId, + ) -> error_stack::Result, KernelError> { + let con: &mut PgConnection = executor; + sqlx::query_as::<_, AuthHostRow>( + // language=postgresql + r#" + SELECT id, url + FROM auth_hosts + WHERE id = $1 + "#, + ) + .bind(id.as_ref()) + .fetch_optional(con) + .await + .convert_error() + .map(|row| row.map(AuthHost::from)) + } + + async fn find_by_url( + &self, + executor: &mut Self::Executor, + domain: &AuthHostUrl, + ) -> error_stack::Result, KernelError> { + let con: &mut PgConnection = executor; + sqlx::query_as::<_, AuthHostRow>( + // language=postgresql + r#" + SELECT id, url + FROM auth_hosts + WHERE url = $1 + "#, + ) + .bind(domain.as_ref()) + .fetch_optional(con) + .await + .convert_error() + .map(|row| row.map(AuthHost::from)) + } + + async fn create( + &self, + executor: &mut Self::Executor, + auth_host: &AuthHost, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + sqlx::query( + // language=postgresql + r#" + INSERT INTO auth_hosts (id, url) + VALUES ($1, $2) + "#, + ) + .bind(auth_host.id().as_ref()) + .bind(auth_host.url().as_ref()) + .execute(con) + .await + .convert_error() + .map(|_| ()) + } + + async fn update( + &self, + executor: &mut Self::Executor, + auth_host: &AuthHost, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + sqlx::query( + // language=postgresql + r#" + UPDATE auth_hosts + SET url = $2 + WHERE id = $1 + "#, + ) + .bind(auth_host.id().as_ref()) + .bind(auth_host.url().as_ref()) + .execute(con) + .await + .convert_error() + .map(|_| ()) + } +} + +impl DependOnAuthHostRepository for PostgresDatabase { + type AuthHostRepository = PostgresAuthHostRepository; + + fn auth_host_repository(&self) -> &Self::AuthHostRepository { + &PostgresAuthHostRepository + } +} + +#[cfg(test)] +mod test { + use kernel::prelude::entity::AuthHostUrl; + use uuid::Uuid; + + fn url() -> AuthHostUrl { + AuthHostUrl::new(format!("https://{}.example.com", Uuid::now_v7())) + } + + mod query { + use crate::database::postgres::auth_host::test::url; + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::repository::{AuthHostRepository, DependOnAuthHostRepository}; + use kernel::prelude::entity::{AuthHost, AuthHostId}; + use uuid::Uuid; + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let auth_host = AuthHost::new(AuthHostId::new(Uuid::now_v7()), url()); + database + .auth_host_repository() + .create(&mut transaction, &auth_host) + .await + .unwrap(); + + let found_auth_host = database + .auth_host_repository() + .find_by_id(&mut transaction, auth_host.id()) + .await + .unwrap() + .unwrap(); + assert_eq!(auth_host, found_auth_host); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_url() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let auth_host = AuthHost::new(AuthHostId::new(Uuid::now_v7()), url()); + database + .auth_host_repository() + .create(&mut transaction, &auth_host) + .await + .unwrap(); + + let found_auth_host = database + .auth_host_repository() + .find_by_url(&mut transaction, auth_host.url()) + .await + .unwrap() + .unwrap(); + assert_eq!(auth_host, found_auth_host); + } + } + + mod modify { + use crate::database::postgres::auth_host::test::url; + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::repository::{AuthHostRepository, DependOnAuthHostRepository}; + use kernel::prelude::entity::{AuthHost, AuthHostId}; + use uuid::Uuid; + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn create() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let auth_host = AuthHost::new(AuthHostId::new(Uuid::now_v7()), url()); + database + .auth_host_repository() + .create(&mut transaction, &auth_host) + .await + .unwrap(); + + let found_auth_host = database + .auth_host_repository() + .find_by_id(&mut transaction, auth_host.id()) + .await + .unwrap() + .unwrap(); + assert_eq!(auth_host, found_auth_host); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn update() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let auth_host = AuthHost::new(AuthHostId::new(Uuid::now_v7()), url()); + database + .auth_host_repository() + .create(&mut transaction, &auth_host) + .await + .unwrap(); + + let updated_auth_host = AuthHost::new(auth_host.id().clone(), url()); + database + .auth_host_repository() + .update(&mut transaction, &updated_auth_host) + .await + .unwrap(); + + let found_auth_host = database + .auth_host_repository() + .find_by_id(&mut transaction, auth_host.id()) + .await + .unwrap() + .unwrap(); + assert_eq!(updated_auth_host, found_auth_host); + } + } +} diff --git a/driver/src/database/postgres/event.rs b/driver/src/database/postgres/event.rs deleted file mode 100644 index b8fdb1b..0000000 --- a/driver/src/database/postgres/event.rs +++ /dev/null @@ -1,319 +0,0 @@ -use crate::database::postgres::{CountRow, VersionRow}; -use crate::database::{PostgresConnection, PostgresDatabase}; -use crate::ConvertError; -use error_stack::Report; -use kernel::interfaces::modify::{DependOnEventModifier, EventModifier}; -use kernel::interfaces::query::{DependOnEventQuery, EventQuery}; -use kernel::prelude::entity::{ - CommandEnvelope, EventEnvelope, EventId, EventVersion, KnownEventVersion, -}; -use kernel::KernelError; -use serde::{Deserialize, Serialize}; -use sqlx::PgConnection; -use uuid::Uuid; - -#[derive(sqlx::FromRow)] -struct EventRow { - version: Uuid, - id: Uuid, - event_name: String, - data: serde_json::Value, -} - -impl Deserialize<'a>, Entity> TryFrom for EventEnvelope { - type Error = Report; - fn try_from(value: EventRow) -> Result { - let event: Event = serde_json::from_value(value.data).convert_error()?; - Ok(EventEnvelope::new( - EventId::new(value.id), - event, - EventVersion::new(value.version), - )) - } -} - -pub struct PostgresEventRepository; - -impl EventQuery for PostgresEventRepository { - type Transaction = PostgresConnection; - - async fn find_by_id Deserialize<'de>, Entity>( - &self, - transaction: &mut Self::Transaction, - id: &EventId, - since: Option<&EventVersion>, - ) -> error_stack::Result>, KernelError> { - let con: &mut PgConnection = transaction; - if let Some(version) = since { - sqlx::query_as::<_, EventRow>( - //language=postgresql - r#" - SELECT version, id, event_name, data - FROM event_streams - WHERE id = $2 AND version > $1 - ORDER BY version - "#, - ) - .bind(version.as_ref()) - } else { - sqlx::query_as::<_, EventRow>( - //language=postgresql - r#" - SELECT version, id, event_name, data - FROM event_streams - WHERE id = $1 - ORDER BY version - "#, - ) - } - .bind(id.as_ref()) - .fetch_all(con) - .await - .convert_error() - .and_then(|versions| { - versions - .into_iter() - .map(|row| row.try_into()) - .collect::>, KernelError>>() - }) - } -} - -impl DependOnEventQuery for PostgresDatabase { - type EventQuery = PostgresEventRepository; - - fn event_query(&self) -> &Self::EventQuery { - &PostgresEventRepository - } -} - -impl EventModifier for PostgresEventRepository { - type Transaction = PostgresConnection; - - async fn handle( - &self, - transaction: &mut Self::Transaction, - event: &CommandEnvelope, - ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; - - let event_name = event.event_name(); - let version = event.prev_version().as_ref(); - if let Some(prev_version) = version { - match prev_version { - KnownEventVersion::Nothing => { - let amount = sqlx::query_as::<_, CountRow>( - //language=postgresql - r#" - SELECT COUNT(*) - FROM event_streams - WHERE id = $1 - "#, - ) - .bind(event.id().as_ref()) - .fetch_one(&mut *con) - .await - .convert_error()?; - if amount.count != 0 { - return Err(Report::new(KernelError::Concurrency).attach_printable( - format!("Event {} already exists", event.id().as_ref()), - )); - } - } - KnownEventVersion::Prev(prev_version) => { - let last_version = sqlx::query_as::<_, VersionRow>( - //language=postgresql - r#" - SELECT version - FROM event_streams - WHERE id = $1 - ORDER BY version DESC - LIMIT 1 - "#, - ) - .bind(event.id().as_ref()) - .fetch_optional(&mut *con) - .await - .convert_error()?; - if last_version - .map(|row: VersionRow| &row.version != prev_version.as_ref()) - .unwrap_or(true) - { - return Err(Report::new(KernelError::Concurrency).attach_printable( - format!( - "Event {} version {} already exists", - event.id().as_ref(), - prev_version.as_ref() - ), - )); - } - } - }; - } - sqlx::query( - //language=postgresql - r#" - INSERT INTO event_streams (version, id, event_name, data) - VALUES ($1, $2, $3, $4) - "#, - ) - .bind(Uuid::now_v7()) - .bind(event.id().as_ref()) - .bind(event_name) - .bind(serde_json::to_value(event.event()).convert_error()?) - .execute(con) - .await - .convert_error()?; - Ok(()) - } -} - -impl DependOnEventModifier for PostgresDatabase { - type EventModifier = PostgresEventRepository; - - fn event_modifier(&self) -> &Self::EventModifier { - &PostgresEventRepository - } -} - -#[cfg(test)] -mod test { - mod query { - use uuid::Uuid; - - use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{DependOnEventModifier, EventModifier}; - use kernel::interfaces::query::{DependOnEventQuery, EventQuery}; - use kernel::prelude::entity::{ - Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventId, Nanoid, - }; - - use crate::database::PostgresDatabase; - - #[tokio::test] - async fn find_by_id() { - let db = PostgresDatabase::new().await.unwrap(); - let mut transaction = db.begin_transaction().await.unwrap(); - let account_id = AccountId::new(Uuid::now_v7()); - let event_id = EventId::from(account_id.clone()); - let events = db - .event_query() - .find_by_id(&mut transaction, &event_id, None) - .await - .unwrap(); - assert_eq!(events.len(), 0); - let created_account = Account::create( - account_id.clone(), - AccountName::new("test"), - AccountPrivateKey::new("test"), - AccountPublicKey::new("test"), - AccountIsBot::new(false), - Nanoid::default(), - ); - let updated_account = Account::update(account_id.clone(), AccountIsBot::new(true)); - let deleted_account = Account::delete(account_id.clone()); - - db.event_modifier() - .handle(&mut transaction, &created_account) - .await - .unwrap(); - db.event_modifier() - .handle(&mut transaction, &updated_account) - .await - .unwrap(); - db.event_modifier() - .handle(&mut transaction, &deleted_account) - .await - .unwrap(); - let events = db - .event_query() - .find_by_id(&mut transaction, &event_id, None) - .await - .unwrap(); - assert_eq!(events.len(), 3); - assert_eq!(&events[0].event, created_account.event()); - assert_eq!(&events[1].event, updated_account.event()); - assert_eq!(&events[2].event, deleted_account.event()); - } - - #[tokio::test] - #[should_panic] - async fn find_by_id_with_version() { - let db = PostgresDatabase::new().await.unwrap(); - let mut transaction = db.begin_transaction().await.unwrap(); - let account_id = AccountId::new(Uuid::now_v7()); - let event_id = EventId::from(account_id.clone()); - let created_account = Account::create( - account_id.clone(), - AccountName::new("test"), - AccountPrivateKey::new("test"), - AccountPublicKey::new("test"), - AccountIsBot::new(false), - Nanoid::default(), - ); - let updated_account = Account::update(account_id.clone(), AccountIsBot::new(true)); - db.event_modifier() - .handle(&mut transaction, &created_account) - .await - .unwrap(); - db.event_modifier() - .handle(&mut transaction, &updated_account) - .await - .unwrap(); - - let all_events = db - .event_query() - .find_by_id(&mut transaction, &event_id, None) - .await - .unwrap(); - let events = db - .event_query() - .find_by_id(&mut transaction, &event_id, Some(&all_events[1].version)) - .await - .unwrap(); - assert_eq!(events.len(), 1); - let event = &events[0]; - assert_eq!(&event.event, updated_account.event()); - } - } - - mod modify { - use uuid::Uuid; - - use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{DependOnEventModifier, EventModifier}; - use kernel::interfaces::query::{DependOnEventQuery, EventQuery}; - use kernel::prelude::entity::{ - Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventId, Nanoid, - }; - - use crate::database::PostgresDatabase; - - #[tokio::test] - async fn basic_creation() { - let db = PostgresDatabase::new().await.unwrap(); - let mut transaction = db.begin_transaction().await.unwrap(); - let account_id = AccountId::new(Uuid::now_v7()); - let created_account = Account::create( - account_id.clone(), - AccountName::new("test"), - AccountPrivateKey::new("test"), - AccountPublicKey::new("test"), - AccountIsBot::new(false), - Nanoid::default(), - ); - db.event_modifier() - .handle(&mut transaction, &created_account) - .await - .unwrap(); - let events = db - .event_query() - .find_by_id(&mut transaction, &EventId::from(account_id), None) - .await - .unwrap(); - assert_eq!(events.len(), 1); - } - } -} diff --git a/driver/src/database/postgres/follow.rs b/driver/src/database/postgres/follow.rs index 8c90e87..985eae6 100644 --- a/driver/src/database/postgres/follow.rs +++ b/driver/src/database/postgres/follow.rs @@ -1,8 +1,7 @@ use crate::database::{PostgresConnection, PostgresDatabase}; use crate::ConvertError; use error_stack::Report; -use kernel::interfaces::modify::{DependOnFollowModifier, FollowModifier}; -use kernel::interfaces::query::{DependOnFollowQuery, FollowQuery}; +use kernel::interfaces::repository::{DependOnFollowRepository, FollowRepository}; use kernel::prelude::entity::{ AccountId, Follow, FollowApprovedAt, FollowId, FollowTargetId, RemoteAccountId, }; @@ -62,15 +61,22 @@ impl TryFrom for Follow { pub struct PostgresFollowRepository; -impl FollowQuery for PostgresFollowRepository { - type Transaction = PostgresConnection; +fn split_follow_target_id(target_id: &FollowTargetId) -> (Option<&Uuid>, Option<&Uuid>) { + match target_id { + FollowTargetId::Local(account_id) => (Some(account_id.as_ref()), None), + FollowTargetId::Remote(remote_account_id) => (None, Some(remote_account_id.as_ref())), + } +} + +impl FollowRepository for PostgresFollowRepository { + type Executor = PostgresConnection; async fn find_followings( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, source_id: &FollowTargetId, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; match source_id { FollowTargetId::Local(account_id) => { sqlx::query_as::<_, FollowRow>( @@ -100,10 +106,10 @@ impl FollowQuery for PostgresFollowRepository { async fn find_followers( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, destination_id: &FollowTargetId, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; match destination_id { FollowTargetId::Local(account_id) => { sqlx::query_as::<_, FollowRow>( @@ -130,32 +136,13 @@ impl FollowQuery for PostgresFollowRepository { .convert_error() .and_then(|rows| rows.into_iter().map(Follow::try_from).collect::>()) } -} - -impl DependOnFollowQuery for PostgresDatabase { - type FollowQuery = PostgresFollowRepository; - - fn follow_query(&self) -> &Self::FollowQuery { - &PostgresFollowRepository - } -} - -fn split_follow_target_id(target_id: &FollowTargetId) -> (Option<&Uuid>, Option<&Uuid>) { - match target_id { - FollowTargetId::Local(account_id) => (Some(account_id.as_ref()), None), - FollowTargetId::Remote(remote_account_id) => (None, Some(remote_account_id.as_ref())), - } -} - -impl FollowModifier for PostgresFollowRepository { - type Transaction = PostgresConnection; async fn create( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, follow: &Follow, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; let (follower_local_id, follower_remote_id) = split_follow_target_id(follow.source()); let (followee_local_id, followee_remote_id) = split_follow_target_id(follow.destination()); sqlx::query( @@ -178,10 +165,10 @@ impl FollowModifier for PostgresFollowRepository { async fn update( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, follow: &Follow, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; let (follower_local_id, follower_remote_id) = split_follow_target_id(follow.source()); let (followee_local_id, followee_remote_id) = split_follow_target_id(follow.destination()); sqlx::query( @@ -205,10 +192,10 @@ impl FollowModifier for PostgresFollowRepository { async fn delete( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, follow_id: &FollowId, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( //language=postgresql r#" @@ -223,10 +210,10 @@ impl FollowModifier for PostgresFollowRepository { } } -impl DependOnFollowModifier for PostgresDatabase { - type FollowModifier = PostgresFollowRepository; +impl DependOnFollowRepository for PostgresDatabase { + type FollowRepository = PostgresFollowRepository; - fn follow_modifier(&self) -> &Self::FollowModifier { + fn follow_repository(&self) -> &Self::FollowRepository { &PostgresFollowRepository } } @@ -236,16 +223,15 @@ mod test { mod query { use crate::database::PostgresDatabase; use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{ - AccountModifier, DependOnAccountModifier, DependOnFollowModifier, FollowModifier, - }; - use kernel::interfaces::query::{DependOnFollowQuery, FollowQuery}; + use kernel::interfaces::read_model::{AccountReadModel, DependOnAccountReadModel}; + use kernel::interfaces::repository::{DependOnFollowRepository, FollowRepository}; use kernel::prelude::entity::{ Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventVersion, Follow, FollowApprovedAt, FollowId, FollowTargetId, Nanoid, + CreatedAt, EventVersion, Follow, FollowApprovedAt, FollowId, FollowTargetId, Nanoid, }; use uuid::Uuid; + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_followers() { let database = PostgresDatabase::new().await.unwrap(); @@ -260,9 +246,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &follower_account) .await .unwrap(); @@ -276,9 +263,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &followee_account) .await .unwrap(); @@ -291,41 +279,42 @@ mod test { .unwrap(); database - .follow_modifier() + .follow_repository() .create(&mut transaction, &follow) .await .unwrap(); let followers = database - .follow_query() + .follow_repository() .find_followings(&mut transaction, &FollowTargetId::from(follower_id)) .await .unwrap(); assert_eq!(followers[0].id(), follow.id()); let followers = database - .follow_query() + .follow_repository() .find_followings(&mut transaction, &FollowTargetId::from(followee_id)) .await .unwrap(); assert!(followers.is_empty()); database - .follow_modifier() + .follow_repository() .delete(&mut transaction, follow.id()) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, follower_account.id()) + .account_read_model() + .deactivate(&mut transaction, follower_account.id()) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, followee_account.id()) + .account_read_model() + .deactivate(&mut transaction, followee_account.id()) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_followings() { let database = PostgresDatabase::new().await.unwrap(); @@ -340,9 +329,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &follower_account) .await .unwrap(); @@ -356,9 +346,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &followee_account) .await .unwrap(); @@ -371,37 +362,37 @@ mod test { .unwrap(); database - .follow_modifier() + .follow_repository() .create(&mut transaction, &follow) .await .unwrap(); let followings = database - .follow_query() + .follow_repository() .find_followers(&mut transaction, &FollowTargetId::from(followee_id)) .await .unwrap(); assert_eq!(followings[0].id(), follow.id()); let followings = database - .follow_query() + .follow_repository() .find_followers(&mut transaction, &FollowTargetId::from(follower_id)) .await .unwrap(); assert!(followings.is_empty()); database - .follow_modifier() + .follow_repository() .delete(&mut transaction, follow.id()) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, follower_account.id()) + .account_read_model() + .deactivate(&mut transaction, follower_account.id()) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, followee_account.id()) + .account_read_model() + .deactivate(&mut transaction, followee_account.id()) .await .unwrap(); } @@ -410,16 +401,15 @@ mod test { mod modify { use crate::database::PostgresDatabase; use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{ - AccountModifier, DependOnAccountModifier, DependOnFollowModifier, FollowModifier, - }; - use kernel::interfaces::query::{DependOnFollowQuery, FollowQuery}; + use kernel::interfaces::read_model::{AccountReadModel, DependOnAccountReadModel}; + use kernel::interfaces::repository::{DependOnFollowRepository, FollowRepository}; use kernel::prelude::entity::{ Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventVersion, Follow, FollowApprovedAt, FollowId, FollowTargetId, Nanoid, + CreatedAt, EventVersion, Follow, FollowApprovedAt, FollowId, FollowTargetId, Nanoid, }; use uuid::Uuid; + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn create() { let database = PostgresDatabase::new().await.unwrap(); @@ -434,9 +424,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &follower_account) .await .unwrap(); @@ -450,9 +441,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &followee_account) .await .unwrap(); @@ -465,27 +457,28 @@ mod test { .unwrap(); database - .follow_modifier() + .follow_repository() .create(&mut transaction, &follow) .await .unwrap(); database - .follow_modifier() + .follow_repository() .delete(&mut transaction, follow.id()) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, follower_account.id()) + .account_read_model() + .deactivate(&mut transaction, follower_account.id()) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, followee_account.id()) + .account_read_model() + .deactivate(&mut transaction, followee_account.id()) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn update() { let database = PostgresDatabase::new().await.unwrap(); @@ -501,9 +494,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &follower_account) .await .unwrap(); @@ -517,9 +511,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &followee_account) .await .unwrap(); @@ -532,14 +527,14 @@ mod test { .unwrap(); let following = database - .follow_query() + .follow_repository() .find_followings(&mut transaction, &FollowTargetId::from(follower_id.clone())) .await .unwrap(); assert!(following.is_empty()); database - .follow_modifier() + .follow_repository() .create(&mut transaction, &follow) .await .unwrap(); @@ -552,34 +547,35 @@ mod test { ) .unwrap(); database - .follow_modifier() + .follow_repository() .update(&mut transaction, &follow) .await .unwrap(); let following = database - .follow_query() + .follow_repository() .find_followers(&mut transaction, &FollowTargetId::from(followee_id)) .await .unwrap(); assert_eq!(following[0].id(), follow.id()); database - .follow_modifier() + .follow_repository() .delete(&mut transaction, follow.id()) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, follower_account.id()) + .account_read_model() + .deactivate(&mut transaction, follower_account.id()) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, followee_account.id()) + .account_read_model() + .deactivate(&mut transaction, followee_account.id()) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn delete() { let database = PostgresDatabase::new().await.unwrap(); @@ -594,9 +590,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &follower_account) .await .unwrap(); @@ -610,9 +607,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), + CreatedAt::now(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &followee_account) .await .unwrap(); @@ -625,31 +623,31 @@ mod test { .unwrap(); database - .follow_modifier() + .follow_repository() .create(&mut transaction, &follow) .await .unwrap(); database - .follow_modifier() + .follow_repository() .delete(&mut transaction, follow.id()) .await .unwrap(); let following = database - .follow_query() + .follow_repository() .find_followers(&mut transaction, &FollowTargetId::from(follower_id)) .await .unwrap(); assert!(following.is_empty()); database - .account_modifier() - .delete(&mut transaction, follower_account.id()) + .account_read_model() + .deactivate(&mut transaction, follower_account.id()) .await .unwrap(); database - .account_modifier() - .delete(&mut transaction, followee_account.id()) + .account_read_model() + .deactivate(&mut transaction, followee_account.id()) .await .unwrap(); } diff --git a/driver/src/database/postgres/image.rs b/driver/src/database/postgres/image.rs index 0ef3afa..ffcec81 100644 --- a/driver/src/database/postgres/image.rs +++ b/driver/src/database/postgres/image.rs @@ -1,7 +1,6 @@ use crate::database::{PostgresConnection, PostgresDatabase}; use crate::ConvertError; -use kernel::interfaces::modify::{DependOnImageModifier, ImageModifier}; -use kernel::interfaces::query::{DependOnImageQuery, ImageQuery}; +use kernel::interfaces::repository::{DependOnImageRepository, ImageRepository}; use kernel::prelude::entity::{Image, ImageBlurHash, ImageHash, ImageId, ImageUrl}; use kernel::KernelError; use sqlx::PgConnection; @@ -28,15 +27,15 @@ impl From for Image { pub struct PostgresImageRepository; -impl ImageQuery for PostgresImageRepository { - type Transaction = PostgresConnection; +impl ImageRepository for PostgresImageRepository { + type Executor = PostgresConnection; async fn find_by_id( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, id: &ImageId, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, ImageRow>( // language=postgresql r#" @@ -52,10 +51,10 @@ impl ImageQuery for PostgresImageRepository { async fn find_by_url( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, url: &ImageUrl, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, ImageRow>( // language=postgresql r#" @@ -68,25 +67,13 @@ impl ImageQuery for PostgresImageRepository { .convert_error() .map(|option| option.map(|row| row.into())) } -} - -impl DependOnImageQuery for PostgresDatabase { - type ImageQuery = PostgresImageRepository; - - fn image_query(&self) -> &Self::ImageQuery { - &PostgresImageRepository - } -} - -impl ImageModifier for PostgresImageRepository { - type Transaction = PostgresConnection; async fn create( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, image: &Image, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( // language=postgresql r#" @@ -105,10 +92,10 @@ impl ImageModifier for PostgresImageRepository { async fn delete( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, image_id: &ImageId, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( // language=postgresql r#" @@ -123,10 +110,10 @@ impl ImageModifier for PostgresImageRepository { } } -impl DependOnImageModifier for PostgresDatabase { - type ImageModifier = PostgresImageRepository; +impl DependOnImageRepository for PostgresDatabase { + type ImageRepository = PostgresImageRepository; - fn image_modifier(&self) -> &Self::ImageModifier { + fn image_repository(&self) -> &Self::ImageRepository { &PostgresImageRepository } } @@ -148,11 +135,11 @@ mod test { use crate::database::postgres::image::test::url; use crate::database::PostgresDatabase; use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{DependOnImageModifier, ImageModifier}; - use kernel::interfaces::query::{DependOnImageQuery, ImageQuery}; + use kernel::interfaces::repository::{DependOnImageRepository, ImageRepository}; use kernel::prelude::entity::{Image, ImageBlurHash, ImageHash, ImageId}; use uuid::Uuid; + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_by_id() { let database = PostgresDatabase::new().await.unwrap(); @@ -168,23 +155,24 @@ mod test { ); database - .image_modifier() + .image_repository() .create(&mut transaction, &image) .await .unwrap(); let result = database - .image_query() + .image_repository() .find_by_id(&mut transaction, &id) .await .unwrap(); assert_eq!(result, Some(image)); database - .image_modifier() + .image_repository() .delete(&mut transaction, &id) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_by_url() { let database = PostgresDatabase::new().await.unwrap(); @@ -200,18 +188,18 @@ mod test { ); database - .image_modifier() + .image_repository() .create(&mut transaction, &image) .await .unwrap(); let result = database - .image_query() + .image_repository() .find_by_url(&mut transaction, &url) .await .unwrap(); assert_eq!(result, Some(image.clone())); database - .image_modifier() + .image_repository() .delete(&mut transaction, image.id()) .await .unwrap(); @@ -222,10 +210,11 @@ mod test { use crate::database::postgres::image::test::url; use crate::database::PostgresDatabase; use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{DependOnImageModifier, ImageModifier}; + use kernel::interfaces::repository::{DependOnImageRepository, ImageRepository}; use kernel::prelude::entity::{Image, ImageBlurHash, ImageHash, ImageId}; use uuid::Uuid; + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn create() { let database = PostgresDatabase::new().await.unwrap(); @@ -241,17 +230,18 @@ mod test { ); database - .image_modifier() + .image_repository() .create(&mut transaction, &image) .await .unwrap(); database - .image_modifier() + .image_repository() .delete(&mut transaction, image.id()) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn delete() { let database = PostgresDatabase::new().await.unwrap(); @@ -267,12 +257,12 @@ mod test { ); database - .image_modifier() + .image_repository() .create(&mut transaction, &image) .await .unwrap(); database - .image_modifier() + .image_repository() .delete(&mut transaction, &id) .await .unwrap(); diff --git a/driver/src/database/postgres/metadata.rs b/driver/src/database/postgres/metadata.rs index fed3f07..168d63c 100644 --- a/driver/src/database/postgres/metadata.rs +++ b/driver/src/database/postgres/metadata.rs @@ -1,7 +1,6 @@ use crate::database::{PostgresConnection, PostgresDatabase}; use crate::ConvertError; -use kernel::interfaces::modify::{DependOnMetadataModifier, MetadataModifier}; -use kernel::interfaces::query::{DependOnMetadataQuery, MetadataQuery}; +use kernel::interfaces::read_model::{DependOnMetadataReadModel, MetadataReadModel}; use kernel::prelude::entity::{ AccountId, EventVersion, Metadata, MetadataContent, MetadataId, MetadataLabel, Nanoid, }; @@ -32,17 +31,17 @@ impl From for Metadata { } } -pub struct PostgresMetadataRepository; +pub struct PostgresMetadataReadModel; -impl MetadataQuery for PostgresMetadataRepository { - type Transaction = PostgresConnection; +impl MetadataReadModel for PostgresMetadataReadModel { + type Executor = PostgresConnection; async fn find_by_id( &self, - transaction: &mut Self::Transaction, - metadata_id: &MetadataId, + executor: &mut Self::Executor, + id: &MetadataId, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, MetadataRow>( // language=postgresql r#" @@ -51,7 +50,7 @@ impl MetadataQuery for PostgresMetadataRepository { WHERE id = $1 "#, ) - .bind(metadata_id.as_ref()) + .bind(id.as_ref()) .fetch_optional(con) .await .convert_error() @@ -60,10 +59,10 @@ impl MetadataQuery for PostgresMetadataRepository { async fn find_by_account_id( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, account_id: &AccountId, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, MetadataRow>( // language=postgresql r#" @@ -78,25 +77,35 @@ impl MetadataQuery for PostgresMetadataRepository { .convert_error() .map(|rows| rows.into_iter().map(|row| row.into()).collect()) } -} -impl DependOnMetadataQuery for PostgresDatabase { - type MetadataQuery = PostgresMetadataRepository; - - fn metadata_query(&self) -> &Self::MetadataQuery { - &PostgresMetadataRepository + async fn find_by_account_ids( + &self, + executor: &mut Self::Executor, + account_ids: &[AccountId], + ) -> error_stack::Result, KernelError> { + let con: &mut PgConnection = executor; + let ids: Vec = account_ids.iter().map(|id| *id.as_ref()).collect(); + sqlx::query_as::<_, MetadataRow>( + // language=postgresql + r#" + SELECT id, account_id, label, content, version, nanoid + FROM metadatas + WHERE account_id = ANY($1) + "#, + ) + .bind(&ids) + .fetch_all(con) + .await + .convert_error() + .map(|rows| rows.into_iter().map(|row| row.into()).collect()) } -} - -impl MetadataModifier for PostgresMetadataRepository { - type Transaction = PostgresConnection; async fn create( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, metadata: &Metadata, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( // language=postgresql r#" @@ -118,10 +127,10 @@ impl MetadataModifier for PostgresMetadataRepository { async fn update( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, metadata: &Metadata, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( // language=postgresql r#" @@ -142,10 +151,10 @@ impl MetadataModifier for PostgresMetadataRepository { async fn delete( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, metadata_id: &MetadataId, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( // language=postgresql r#" @@ -161,306 +170,290 @@ impl MetadataModifier for PostgresMetadataRepository { } } -impl DependOnMetadataModifier for PostgresDatabase { - type MetadataModifier = PostgresMetadataRepository; +impl DependOnMetadataReadModel for PostgresDatabase { + type MetadataReadModel = PostgresMetadataReadModel; - fn metadata_modifier(&self) -> &Self::MetadataModifier { - &PostgresMetadataRepository + fn metadata_read_model(&self) -> &Self::MetadataReadModel { + &PostgresMetadataReadModel } } #[cfg(test)] mod test { - - mod query { + mod read_model { use crate::database::PostgresDatabase; use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{ - AccountModifier, DependOnAccountModifier, DependOnMetadataModifier, MetadataModifier, + use kernel::interfaces::read_model::{ + AccountReadModel, DependOnAccountReadModel, DependOnMetadataReadModel, + MetadataReadModel, }; - use kernel::interfaces::query::{DependOnMetadataQuery, MetadataQuery}; use kernel::prelude::entity::{ Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventVersion, Metadata, MetadataContent, MetadataId, MetadataLabel, Nanoid, + CreatedAt, EventVersion, Metadata, MetadataContent, MetadataId, MetadataLabel, Nanoid, }; use uuid::Uuid; + fn make_account(account_id: AccountId) -> Account { + Account::new( + account_id, + AccountName::new("name"), + AccountPrivateKey::new("private_key"), + AccountPublicKey::new("public_key"), + AccountIsBot::new(false), + None, + EventVersion::new(Uuid::now_v7()), + Nanoid::default(), + CreatedAt::now(), + ) + } + + fn make_metadata(metadata_id: MetadataId, account_id: AccountId) -> Metadata { + Metadata::new( + metadata_id, + account_id, + MetadataLabel::new("label"), + MetadataContent::new("content"), + EventVersion::new(Uuid::now_v7()), + Nanoid::default(), + ) + } + + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_by_id() { let database = PostgresDatabase::new().await.unwrap(); let mut transaction = database.begin_transaction().await.unwrap(); let account_id = AccountId::new(Uuid::now_v7()); - - let account = Account::new( - account_id.clone(), - AccountName::new("name".to_string()), - AccountPrivateKey::new("private_key".to_string()), - AccountPublicKey::new("public_key".to_string()), - AccountIsBot::new(false), - None, - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); + let account = make_account(account_id.clone()); + let metadata_id = MetadataId::new(Uuid::now_v7()); + let metadata = make_metadata(metadata_id.clone(), account_id.clone()); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); - let metadata = Metadata::new( - MetadataId::new(Uuid::now_v7()), - account_id.clone(), - MetadataLabel::new("label".to_string()), - MetadataContent::new("content".to_string()), - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); - database - .metadata_modifier() + .metadata_read_model() .create(&mut transaction, &metadata) .await .unwrap(); let found = database - .metadata_query() - .find_by_id(&mut transaction, metadata.id()) + .metadata_read_model() + .find_by_id(&mut transaction, &metadata_id) .await .unwrap(); assert_eq!(found.as_ref().map(Metadata::id), Some(metadata.id())); + + // Non-existent id returns None + let not_found = database + .metadata_read_model() + .find_by_id(&mut transaction, &MetadataId::new(Uuid::now_v7())) + .await + .unwrap(); + assert!(not_found.is_none()); + + database + .account_read_model() + .deactivate(&mut transaction, account.id()) + .await + .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_by_account_id() { let database = PostgresDatabase::new().await.unwrap(); let mut transaction = database.begin_transaction().await.unwrap(); let account_id = AccountId::new(Uuid::now_v7()); - let account = Account::new( + let account = make_account(account_id.clone()); + + let metadata1 = make_metadata(MetadataId::new(Uuid::now_v7()), account_id.clone()); + let metadata2 = Metadata::new( + MetadataId::new(Uuid::now_v7()), account_id.clone(), - AccountName::new("name".to_string()), - AccountPrivateKey::new("private_key".to_string()), - AccountPublicKey::new("public_key".to_string()), - AccountIsBot::new(false), - None, + MetadataLabel::new("label2"), + MetadataContent::new("content2"), EventVersion::new(Uuid::now_v7()), Nanoid::default(), ); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); - let metadata = Metadata::new( - MetadataId::new(Uuid::now_v7()), - account_id.clone(), - MetadataLabel::new("label".to_string()), - MetadataContent::new("content".to_string()), - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); - let metadata2 = Metadata::new( - MetadataId::new(Uuid::now_v7()), - account_id.clone(), - MetadataLabel::new("label2".to_string()), - MetadataContent::new("content2".to_string()), - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); - database - .metadata_modifier() - .create(&mut transaction, &metadata) + .metadata_read_model() + .create(&mut transaction, &metadata1) .await .unwrap(); database - .metadata_modifier() + .metadata_read_model() .create(&mut transaction, &metadata2) .await .unwrap(); let found = database - .metadata_query() + .metadata_read_model() .find_by_account_id(&mut transaction, &account_id) .await .unwrap(); - assert_eq!( - found.iter().map(Metadata::id).collect::>(), - vec![metadata.id(), metadata2.id()] - ); + assert_eq!(found.len(), 2); + let ids: Vec<_> = found.iter().map(Metadata::id).collect(); + assert!(ids.contains(&metadata1.id())); + assert!(ids.contains(&metadata2.id())); + + // Non-existent account_id returns empty vec + let not_found = database + .metadata_read_model() + .find_by_account_id(&mut transaction, &AccountId::new(Uuid::now_v7())) + .await + .unwrap(); + assert!(not_found.is_empty()); + + database + .account_read_model() + .deactivate(&mut transaction, account.id()) + .await + .unwrap(); } - } - mod modify { - use crate::database::PostgresDatabase; - use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{ - AccountModifier, DependOnAccountModifier, DependOnMetadataModifier, MetadataModifier, - }; - use kernel::interfaces::query::{DependOnMetadataQuery, MetadataQuery}; - use kernel::prelude::entity::{ - Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventVersion, Metadata, MetadataContent, MetadataId, MetadataLabel, Nanoid, - }; - use uuid::Uuid; + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn create() { let database = PostgresDatabase::new().await.unwrap(); let mut transaction = database.begin_transaction().await.unwrap(); let account_id = AccountId::new(Uuid::now_v7()); - let account = Account::new( - account_id.clone(), - AccountName::new("name".to_string()), - AccountPrivateKey::new("private_key".to_string()), - AccountPublicKey::new("public_key".to_string()), - AccountIsBot::new(false), - None, - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); + let account = make_account(account_id.clone()); + let metadata_id = MetadataId::new(Uuid::now_v7()); + let metadata = make_metadata(metadata_id.clone(), account_id.clone()); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); - let metadata = Metadata::new( - MetadataId::new(Uuid::now_v7()), - account_id.clone(), - MetadataLabel::new("label".to_string()), - MetadataContent::new("content".to_string()), - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); - database - .metadata_modifier() + .metadata_read_model() .create(&mut transaction, &metadata) .await .unwrap(); let found = database - .metadata_query() - .find_by_id(&mut transaction, metadata.id()) + .metadata_read_model() + .find_by_id(&mut transaction, &metadata_id) + .await + .unwrap() + .unwrap(); + assert_eq!(found.id(), metadata.id()); + assert_eq!(found.label(), metadata.label()); + assert_eq!(found.content(), metadata.content()); + + database + .account_read_model() + .deactivate(&mut transaction, account.id()) .await .unwrap(); - assert_eq!(found.as_ref().map(Metadata::id), Some(metadata.id())); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn update() { let database = PostgresDatabase::new().await.unwrap(); let mut transaction = database.begin_transaction().await.unwrap(); let account_id = AccountId::new(Uuid::now_v7()); - let account = Account::new( - account_id.clone(), - AccountName::new("name".to_string()), - AccountPrivateKey::new("private_key".to_string()), - AccountPublicKey::new("public_key".to_string()), - AccountIsBot::new(false), - None, - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); + let account = make_account(account_id.clone()); + let metadata_id = MetadataId::new(Uuid::now_v7()); + let metadata = make_metadata(metadata_id.clone(), account_id.clone()); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); - let metadata = Metadata::new( - MetadataId::new(Uuid::now_v7()), - account_id.clone(), - MetadataLabel::new("label".to_string()), - MetadataContent::new("content".to_string()), - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); - database - .metadata_modifier() + .metadata_read_model() .create(&mut transaction, &metadata) .await .unwrap(); let updated_metadata = Metadata::new( - metadata.id().clone(), + metadata_id.clone(), account_id.clone(), - MetadataLabel::new("label2".to_string()), - MetadataContent::new("content2".to_string()), + MetadataLabel::new("updated_label"), + MetadataContent::new("updated_content"), EventVersion::new(Uuid::now_v7()), Nanoid::default(), ); - database - .metadata_modifier() + .metadata_read_model() .update(&mut transaction, &updated_metadata) .await .unwrap(); let found = database - .metadata_query() - .find_by_id(&mut transaction, metadata.id()) + .metadata_read_model() + .find_by_id(&mut transaction, &metadata_id) + .await + .unwrap() + .unwrap(); + assert_eq!(found.id(), updated_metadata.id()); + assert_eq!(found.label(), updated_metadata.label()); + assert_eq!(found.content(), updated_metadata.content()); + + database + .account_read_model() + .deactivate(&mut transaction, account.id()) .await .unwrap(); - assert_eq!( - found.as_ref().map(Metadata::id), - Some(updated_metadata.id()) - ); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn delete() { let database = PostgresDatabase::new().await.unwrap(); let mut transaction = database.begin_transaction().await.unwrap(); let account_id = AccountId::new(Uuid::now_v7()); - let account = Account::new( - account_id.clone(), - AccountName::new("name".to_string()), - AccountPrivateKey::new("private_key".to_string()), - AccountPublicKey::new("public_key".to_string()), - AccountIsBot::new(false), - None, - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); + let account = make_account(account_id.clone()); + let metadata_id = MetadataId::new(Uuid::now_v7()); + let metadata = make_metadata(metadata_id.clone(), account_id.clone()); database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); - let metadata = Metadata::new( - MetadataId::new(Uuid::now_v7()), - account_id.clone(), - MetadataLabel::new("label".to_string()), - MetadataContent::new("content".to_string()), - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); - database - .metadata_modifier() + .metadata_read_model() .create(&mut transaction, &metadata) .await .unwrap(); + database - .metadata_modifier() - .delete(&mut transaction, metadata.id()) + .metadata_read_model() + .delete(&mut transaction, &metadata_id) .await .unwrap(); let found = database - .metadata_query() - .find_by_id(&mut transaction, metadata.id()) + .metadata_read_model() + .find_by_id(&mut transaction, &metadata_id) .await .unwrap(); assert!(found.is_none()); + + database + .account_read_model() + .deactivate(&mut transaction, account.id()) + .await + .unwrap(); } } } diff --git a/driver/src/database/postgres/metadata_event_store.rs b/driver/src/database/postgres/metadata_event_store.rs new file mode 100644 index 0000000..7636154 --- /dev/null +++ b/driver/src/database/postgres/metadata_event_store.rs @@ -0,0 +1,445 @@ +use crate::database::postgres::{CountRow, VersionRow}; +use crate::database::{PostgresConnection, PostgresDatabase}; +use crate::ConvertError; +use error_stack::Report; +use kernel::interfaces::event_store::{DependOnMetadataEventStore, MetadataEventStore}; +use kernel::prelude::entity::{ + CommandEnvelope, EventEnvelope, EventId, EventVersion, KnownEventVersion, Metadata, + MetadataEvent, +}; +use kernel::KernelError; +use serde_json; +use sqlx::PgConnection; +use uuid::Uuid; + +#[derive(sqlx::FromRow)] +struct EventRow { + version: Uuid, + id: Uuid, + #[allow(dead_code)] + event_name: String, + data: serde_json::Value, +} + +impl TryFrom for EventEnvelope { + type Error = Report; + fn try_from(value: EventRow) -> Result { + let event: MetadataEvent = serde_json::from_value(value.data).convert_error()?; + Ok(EventEnvelope::new( + EventId::new(value.id), + event, + EventVersion::new(value.version), + )) + } +} + +pub struct PostgresMetadataEventStore; + +impl MetadataEventStore for PostgresMetadataEventStore { + type Executor = PostgresConnection; + + async fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &EventId, + since: Option<&EventVersion>, + ) -> error_stack::Result>, KernelError> { + let con: &mut PgConnection = executor; + let rows = if let Some(version) = since { + sqlx::query_as::<_, EventRow>( + //language=postgresql + r#" + SELECT version, id, event_name, data + FROM metadata_events + WHERE id = $1 AND version > $2 + ORDER BY version + "#, + ) + .bind(id.as_ref()) + .bind(version.as_ref()) + .fetch_all(con) + .await + .convert_error()? + } else { + sqlx::query_as::<_, EventRow>( + //language=postgresql + r#" + SELECT version, id, event_name, data + FROM metadata_events + WHERE id = $1 + ORDER BY version + "#, + ) + .bind(id.as_ref()) + .fetch_all(con) + .await + .convert_error()? + }; + rows.into_iter() + .map(|row| row.try_into()) + .collect::, KernelError>>() + } + + async fn persist( + &self, + executor: &mut Self::Executor, + command: &CommandEnvelope, + ) -> error_stack::Result<(), KernelError> { + self.persist_internal(executor, command, Uuid::now_v7()) + .await + } + + async fn persist_and_transform( + &self, + executor: &mut Self::Executor, + command: CommandEnvelope, + ) -> error_stack::Result, KernelError> { + let version = Uuid::now_v7(); + self.persist_internal(executor, &command, version).await?; + + let command = command.into_destruct(); + Ok(EventEnvelope::new( + command.id, + command.event, + EventVersion::new(version), + )) + } +} + +impl PostgresMetadataEventStore { + async fn persist_internal( + &self, + executor: &mut PostgresConnection, + command: &CommandEnvelope, + version: Uuid, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + + let event_name = command.event_name(); + let prev_version = command.prev_version().as_ref(); + if let Some(prev_version) = prev_version { + match prev_version { + KnownEventVersion::Nothing => { + let amount = sqlx::query_as::<_, CountRow>( + //language=postgresql + r#" + SELECT COUNT(*) + FROM metadata_events + WHERE id = $1 + "#, + ) + .bind(command.id().as_ref()) + .fetch_one(&mut *con) + .await + .convert_error()?; + if amount.count != 0 { + return Err(Report::new(KernelError::Concurrency).attach_printable( + format!("Event {} already exists", command.id().as_ref()), + )); + } + } + KnownEventVersion::Prev(prev_version) => { + let last_version = sqlx::query_as::<_, VersionRow>( + //language=postgresql + r#" + SELECT version + FROM metadata_events + WHERE id = $1 + ORDER BY version DESC + LIMIT 1 + "#, + ) + .bind(command.id().as_ref()) + .fetch_optional(&mut *con) + .await + .convert_error()?; + if last_version + .map(|row: VersionRow| &row.version != prev_version.as_ref()) + .unwrap_or(true) + { + return Err(Report::new(KernelError::Concurrency).attach_printable( + format!( + "Event {} version {} already exists", + command.id().as_ref(), + prev_version.as_ref() + ), + )); + } + } + }; + } + + sqlx::query( + //language=postgresql + r#" + INSERT INTO metadata_events (version, id, event_name, data) + VALUES ($1, $2, $3, $4) + "#, + ) + .bind(version) + .bind(command.id().as_ref()) + .bind(event_name) + .bind(serde_json::to_value(command.event()).convert_error()?) + .execute(con) + .await + .convert_error()?; + + Ok(()) + } +} + +impl DependOnMetadataEventStore for PostgresDatabase { + type MetadataEventStore = PostgresMetadataEventStore; + + fn metadata_event_store(&self) -> &Self::MetadataEventStore { + &PostgresMetadataEventStore + } +} + +#[cfg(test)] +mod test { + mod query { + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::event_store::{DependOnMetadataEventStore, MetadataEventStore}; + use kernel::prelude::entity::{ + AccountId, CommandEnvelope, EventId, KnownEventVersion, Metadata, MetadataContent, + MetadataEvent, MetadataId, MetadataLabel, Nanoid, + }; + use uuid::Uuid; + + fn create_metadata_command( + metadata_id: MetadataId, + ) -> CommandEnvelope { + let event = MetadataEvent::Created { + account_id: AccountId::new(Uuid::now_v7()), + label: MetadataLabel::new("label".to_string()), + content: MetadataContent::new("content".to_string()), + nanoid: Nanoid::default(), + }; + CommandEnvelope::new( + EventId::from(metadata_id), + event.name(), + event, + Some(KnownEventVersion::Nothing), + ) + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let metadata_id = MetadataId::new(Uuid::now_v7()); + let event_id = EventId::from(metadata_id.clone()); + let events = db + .metadata_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(events.len(), 0); + let created_metadata = create_metadata_command(metadata_id.clone()); + let update_event = MetadataEvent::Updated { + label: MetadataLabel::new("new_label".to_string()), + content: MetadataContent::new("new_content".to_string()), + }; + let updated_metadata = CommandEnvelope::new( + EventId::from(metadata_id.clone()), + update_event.name(), + update_event, + None, + ); + let delete_event = MetadataEvent::Deleted; + let deleted_metadata = CommandEnvelope::new( + EventId::from(metadata_id.clone()), + delete_event.name(), + delete_event, + None, + ); + + db.metadata_event_store() + .persist(&mut transaction, &created_metadata) + .await + .unwrap(); + db.metadata_event_store() + .persist(&mut transaction, &updated_metadata) + .await + .unwrap(); + db.metadata_event_store() + .persist(&mut transaction, &deleted_metadata) + .await + .unwrap(); + let events = db + .metadata_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(events.len(), 3); + assert_eq!(&events[0].event, created_metadata.event()); + assert_eq!(&events[1].event, updated_metadata.event()); + assert_eq!(&events[2].event, deleted_metadata.event()); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id_since_version() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let metadata_id = MetadataId::new(Uuid::now_v7()); + let event_id = EventId::from(metadata_id.clone()); + + let created_metadata = create_metadata_command(metadata_id.clone()); + let update_event = MetadataEvent::Updated { + label: MetadataLabel::new("new_label".to_string()), + content: MetadataContent::new("new_content".to_string()), + }; + let updated_metadata = CommandEnvelope::new( + EventId::from(metadata_id.clone()), + update_event.name(), + update_event, + None, + ); + let delete_event = MetadataEvent::Deleted; + let deleted_metadata = CommandEnvelope::new( + EventId::from(metadata_id.clone()), + delete_event.name(), + delete_event, + None, + ); + + db.metadata_event_store() + .persist(&mut transaction, &created_metadata) + .await + .unwrap(); + db.metadata_event_store() + .persist(&mut transaction, &updated_metadata) + .await + .unwrap(); + db.metadata_event_store() + .persist(&mut transaction, &deleted_metadata) + .await + .unwrap(); + + // Get all events to obtain the first version + let all_events = db + .metadata_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(all_events.len(), 3); + + // Query since the first event's version — should return the 2nd and 3rd events + let since_events = db + .metadata_event_store() + .find_by_id(&mut transaction, &event_id, Some(&all_events[0].version)) + .await + .unwrap(); + assert_eq!(since_events.len(), 2); + assert_eq!(&since_events[0].event, updated_metadata.event()); + assert_eq!(&since_events[1].event, deleted_metadata.event()); + + // Query since the last event's version — should return no events + let no_events = db + .metadata_event_store() + .find_by_id(&mut transaction, &event_id, Some(&all_events[2].version)) + .await + .unwrap(); + assert_eq!(no_events.len(), 0); + } + } + + mod persist { + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::event_store::{DependOnMetadataEventStore, MetadataEventStore}; + use kernel::prelude::entity::{ + AccountId, CommandEnvelope, EventId, KnownEventVersion, Metadata, MetadataContent, + MetadataEvent, MetadataId, MetadataLabel, Nanoid, + }; + use uuid::Uuid; + + fn create_metadata_command( + metadata_id: MetadataId, + ) -> CommandEnvelope { + let event = MetadataEvent::Created { + account_id: AccountId::new(Uuid::now_v7()), + label: MetadataLabel::new("label".to_string()), + content: MetadataContent::new("content".to_string()), + nanoid: Nanoid::default(), + }; + CommandEnvelope::new( + EventId::from(metadata_id), + event.name(), + event, + Some(KnownEventVersion::Nothing), + ) + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn basic_creation() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let metadata_id = MetadataId::new(Uuid::now_v7()); + let created_metadata = create_metadata_command(metadata_id.clone()); + db.metadata_event_store() + .persist(&mut transaction, &created_metadata) + .await + .unwrap(); + let events = db + .metadata_event_store() + .find_by_id(&mut transaction, &EventId::from(metadata_id), None) + .await + .unwrap(); + assert_eq!(events.len(), 1); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn persist_and_transform_test() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let metadata_id = MetadataId::new(Uuid::now_v7()); + let created_metadata = create_metadata_command(metadata_id.clone()); + + let event_envelope = db + .metadata_event_store() + .persist_and_transform(&mut transaction, created_metadata.clone()) + .await + .unwrap(); + + assert_eq!(event_envelope.id, EventId::from(metadata_id.clone())); + assert_eq!(&event_envelope.event, created_metadata.event()); + + let events = db + .metadata_event_store() + .find_by_id(&mut transaction, &EventId::from(metadata_id), None) + .await + .unwrap(); + assert_eq!(events.len(), 1); + assert_eq!(&events[0].event, created_metadata.event()); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn known_event_version_nothing_prevents_duplicate() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let metadata_id = MetadataId::new(Uuid::now_v7()); + let created_metadata = create_metadata_command(metadata_id.clone()); + + // First persist should succeed + db.metadata_event_store() + .persist(&mut transaction, &created_metadata) + .await + .unwrap(); + + // Second persist with KnownEventVersion::Nothing should fail (concurrency error) + let result = db + .metadata_event_store() + .persist(&mut transaction, &created_metadata) + .await; + assert!(result.is_err()); + } + } +} diff --git a/driver/src/database/postgres/profile.rs b/driver/src/database/postgres/profile.rs index a59ed0f..ebc7a2c 100644 --- a/driver/src/database/postgres/profile.rs +++ b/driver/src/database/postgres/profile.rs @@ -1,8 +1,7 @@ use sqlx::PgConnection; use uuid::Uuid; -use kernel::interfaces::modify::{DependOnProfileModifier, ProfileModifier}; -use kernel::interfaces::query::{DependOnProfileQuery, ProfileQuery}; +use kernel::interfaces::read_model::{DependOnProfileReadModel, ProfileReadModel}; use kernel::prelude::entity::{ AccountId, EventVersion, ImageId, Nanoid, Profile, ProfileDisplayName, ProfileId, ProfileSummary, @@ -39,17 +38,17 @@ impl From for Profile { } } -pub struct PostgresProfileRepository; +pub struct PostgresProfileReadModel; -impl ProfileQuery for PostgresProfileRepository { - type Transaction = PostgresConnection; +impl ProfileReadModel for PostgresProfileReadModel { + type Executor = PostgresConnection; async fn find_by_id( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, id: &ProfileId, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, ProfileRow>( //language=postgresql r#" @@ -61,27 +60,56 @@ impl ProfileQuery for PostgresProfileRepository { .fetch_optional(con) .await .convert_error() - .map(|option| option.map(|row| row.into())) + .map(|option| option.map(Profile::from)) } -} - -impl DependOnProfileQuery for PostgresDatabase { - type ProfileQuery = PostgresProfileRepository; - fn profile_query(&self) -> &Self::ProfileQuery { - &PostgresProfileRepository + async fn find_by_account_id( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> error_stack::Result, KernelError> { + let con: &mut PgConnection = executor; + sqlx::query_as::<_, ProfileRow>( + //language=postgresql + r#" + SELECT id, account_id, display, summary, icon_id, banner_id, version, nanoid + FROM profiles WHERE account_id = $1 + "#, + ) + .bind(account_id.as_ref()) + .fetch_optional(con) + .await + .convert_error() + .map(|option| option.map(Profile::from)) } -} -impl ProfileModifier for PostgresProfileRepository { - type Transaction = PostgresConnection; + async fn find_by_account_ids( + &self, + executor: &mut Self::Executor, + account_ids: &[AccountId], + ) -> error_stack::Result, KernelError> { + let con: &mut PgConnection = executor; + let ids: Vec = account_ids.iter().map(|id| *id.as_ref()).collect(); + sqlx::query_as::<_, ProfileRow>( + //language=postgresql + r#" + SELECT id, account_id, display, summary, icon_id, banner_id, version, nanoid + FROM profiles WHERE account_id = ANY($1) + "#, + ) + .bind(&ids) + .fetch_all(con) + .await + .convert_error() + .map(|rows| rows.into_iter().map(Profile::from).collect()) + } async fn create( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, profile: &Profile, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( //language=postgresql r#" @@ -110,64 +138,82 @@ impl ProfileModifier for PostgresProfileRepository { async fn update( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, profile: &Profile, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( //language=postgresql r#" UPDATE profiles SET display = $2, summary = $3, icon_id = $4, banner_id = $5, version = $6 - WHERE account_id = $1 - "# + WHERE id = $1 + "#, + ) + .bind(profile.id().as_ref()) + .bind( + profile + .display_name() + .as_ref() + .map(ProfileDisplayName::as_ref), ) - .bind(profile.id().as_ref()) - .bind(profile.display_name().as_ref().map(ProfileDisplayName::as_ref)) - .bind(profile.summary().as_ref().map(ProfileSummary::as_ref)) - .bind(profile.icon().as_ref().map(ImageId::as_ref)) - .bind(profile.banner().as_ref().map(ImageId::as_ref)) - .bind(profile.version().as_ref()) - .execute(con) - .await - .convert_error()?; + .bind(profile.summary().as_ref().map(ProfileSummary::as_ref)) + .bind(profile.icon().as_ref().map(ImageId::as_ref)) + .bind(profile.banner().as_ref().map(ImageId::as_ref)) + .bind(profile.version().as_ref()) + .execute(con) + .await + .convert_error()?; + Ok(()) + } + + async fn delete( + &self, + executor: &mut Self::Executor, + profile_id: &ProfileId, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + sqlx::query( + //language=postgresql + r#" + DELETE FROM profiles WHERE id = $1 + "#, + ) + .bind(profile_id.as_ref()) + .execute(con) + .await + .convert_error()?; Ok(()) } } -impl DependOnProfileModifier for PostgresDatabase { - type ProfileModifier = PostgresProfileRepository; +impl DependOnProfileReadModel for PostgresDatabase { + type ProfileReadModel = PostgresProfileReadModel; - fn profile_modifier(&self) -> &Self::ProfileModifier { - &PostgresProfileRepository + fn profile_read_model(&self) -> &Self::ProfileReadModel { + &PostgresProfileReadModel } } #[cfg(test)] mod test { - mod query { + mod read_model { use uuid::Uuid; use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{ - AccountModifier, DependOnAccountModifier, DependOnProfileModifier, ProfileModifier, + use kernel::interfaces::read_model::{ + AccountReadModel, DependOnAccountReadModel, DependOnProfileReadModel, ProfileReadModel, }; - use kernel::interfaces::query::{DependOnProfileQuery, ProfileQuery}; use kernel::prelude::entity::{ Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventVersion, Nanoid, Profile, ProfileDisplayName, ProfileId, ProfileSummary, + CreatedAt, EventVersion, Nanoid, Profile, ProfileDisplayName, ProfileId, + ProfileSummary, }; use crate::database::PostgresDatabase; - #[tokio::test] - async fn find_by_id() { - let database = PostgresDatabase::new().await.unwrap(); - let mut transaction = database.begin_transaction().await.unwrap(); - - let profile_id = ProfileId::new(Uuid::now_v7()); - let account_id = AccountId::new(Uuid::now_v7()); - let account = Account::new( - account_id.clone(), + fn make_account(account_id: AccountId) -> Account { + Account::new( + account_id, AccountName::new("test"), AccountPrivateKey::new("test"), AccountPublicKey::new("test"), @@ -175,9 +221,13 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), - ); - let profile = Profile::new( - profile_id.clone(), + CreatedAt::now(), + ) + } + + fn make_profile(profile_id: ProfileId, account_id: AccountId) -> Profile { + Profile::new( + profile_id, account_id, Some(ProfileDisplayName::new("display name")), Some(ProfileSummary::new("summary")), @@ -185,47 +235,90 @@ mod test { None, EventVersion::new(Uuid::now_v7()), Nanoid::default(), - ); + ) + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let profile_id = ProfileId::new(Uuid::now_v7()); + let account_id = AccountId::new(Uuid::now_v7()); + let account = make_account(account_id.clone()); + let profile = make_profile(profile_id.clone(), account_id.clone()); + database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); database - .profile_modifier() + .profile_read_model() .create(&mut transaction, &profile) .await .unwrap(); let result = database - .profile_query() + .profile_read_model() .find_by_id(&mut transaction, &profile_id) .await .unwrap(); assert_eq!(result.as_ref().map(Profile::id), Some(profile.id())); + database - .account_modifier() - .delete(&mut transaction, account.id()) + .account_read_model() + .deactivate(&mut transaction, account.id()) .await .unwrap(); } - } - mod modify { - use uuid::Uuid; + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_account_id() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); - use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{ - AccountModifier, DependOnAccountModifier, DependOnProfileModifier, ProfileModifier, - }; - use kernel::interfaces::query::{DependOnProfileQuery, ProfileQuery}; - use kernel::prelude::entity::{ - Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventVersion, Nanoid, Profile, ProfileDisplayName, ProfileId, ProfileSummary, - }; + let profile_id = ProfileId::new(Uuid::now_v7()); + let account_id = AccountId::new(Uuid::now_v7()); + let account = make_account(account_id.clone()); + let profile = make_profile(profile_id.clone(), account_id.clone()); - use crate::database::PostgresDatabase; + database + .account_read_model() + .create(&mut transaction, &account) + .await + .unwrap(); + database + .profile_read_model() + .create(&mut transaction, &profile) + .await + .unwrap(); + + let result = database + .profile_read_model() + .find_by_account_id(&mut transaction, &account_id) + .await + .unwrap(); + assert_eq!(result.as_ref().map(Profile::id), Some(profile.id())); + + // Non-existent account_id returns None + let not_found = database + .profile_read_model() + .find_by_account_id(&mut transaction, &AccountId::new(Uuid::now_v7())) + .await + .unwrap(); + assert!(not_found.is_none()); + database + .account_read_model() + .deactivate(&mut transaction, account.id()) + .await + .unwrap(); + } + + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn create() { let database = PostgresDatabase::new().await.unwrap(); @@ -233,43 +326,36 @@ mod test { let profile_id = ProfileId::new(Uuid::now_v7()); let account_id = AccountId::new(Uuid::now_v7()); - let account = Account::new( - account_id.clone(), - AccountName::new("test"), - AccountPrivateKey::new("test"), - AccountPublicKey::new("test"), - AccountIsBot::new(false), - None, - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); - let profile = Profile::new( - profile_id, - account_id, - Some(ProfileDisplayName::new("display name")), - Some(ProfileSummary::new("summary")), - None, - None, - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); + let account = make_account(account_id.clone()); + let profile = make_profile(profile_id.clone(), account_id.clone()); + database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); database - .profile_modifier() + .profile_read_model() .create(&mut transaction, &profile) .await .unwrap(); + + let result = database + .profile_read_model() + .find_by_id(&mut transaction, &profile_id) + .await + .unwrap() + .unwrap(); + assert_eq!(result.id(), profile.id()); + database - .account_modifier() - .delete(&mut transaction, account.id()) + .account_read_model() + .deactivate(&mut transaction, account.id()) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn update() { let database = PostgresDatabase::new().await.unwrap(); @@ -277,40 +363,23 @@ mod test { let profile_id = ProfileId::new(Uuid::now_v7()); let account_id = AccountId::new(Uuid::now_v7()); - let account = Account::new( - account_id.clone(), - AccountName::new("test"), - AccountPrivateKey::new("test"), - AccountPublicKey::new("test"), - AccountIsBot::new(false), - None, - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); - let profile = Profile::new( - profile_id.clone(), - account_id.clone(), - Some(ProfileDisplayName::new("display name")), - Some(ProfileSummary::new("summary")), - None, - None, - EventVersion::new(Uuid::now_v7()), - Nanoid::default(), - ); + let account = make_account(account_id.clone()); + let profile = make_profile(profile_id.clone(), account_id.clone()); + database - .account_modifier() + .account_read_model() .create(&mut transaction, &account) .await .unwrap(); database - .profile_modifier() + .profile_read_model() .create(&mut transaction, &profile) .await .unwrap(); let updated_profile = Profile::new( profile_id.clone(), - account_id, + account_id.clone(), Some(ProfileDisplayName::new("updated display name")), Some(ProfileSummary::new("updated summary")), None, @@ -319,20 +388,66 @@ mod test { Nanoid::default(), ); database - .profile_modifier() + .profile_read_model() .update(&mut transaction, &updated_profile) .await .unwrap(); let result = database - .profile_query() + .profile_read_model() .find_by_id(&mut transaction, &profile_id) .await + .unwrap() + .unwrap(); + assert_eq!(result.id(), updated_profile.id()); + assert_eq!(result.display_name(), updated_profile.display_name()); + assert_eq!(result.summary(), updated_profile.summary()); + + database + .account_read_model() + .deactivate(&mut transaction, account.id()) + .await .unwrap(); - assert_eq!(result.as_ref().map(Profile::id), Some(updated_profile.id())); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn delete() { + let database = PostgresDatabase::new().await.unwrap(); + let mut transaction = database.begin_transaction().await.unwrap(); + + let profile_id = ProfileId::new(Uuid::now_v7()); + let account_id = AccountId::new(Uuid::now_v7()); + let account = make_account(account_id.clone()); + let profile = make_profile(profile_id.clone(), account_id.clone()); + + database + .account_read_model() + .create(&mut transaction, &account) + .await + .unwrap(); + database + .profile_read_model() + .create(&mut transaction, &profile) + .await + .unwrap(); + + database + .profile_read_model() + .delete(&mut transaction, &profile_id) + .await + .unwrap(); + + let result = database + .profile_read_model() + .find_by_id(&mut transaction, &profile_id) + .await + .unwrap(); + assert!(result.is_none()); + database - .account_modifier() - .delete(&mut transaction, account.id()) + .account_read_model() + .deactivate(&mut transaction, account.id()) .await .unwrap(); } diff --git a/driver/src/database/postgres/profile_event_store.rs b/driver/src/database/postgres/profile_event_store.rs new file mode 100644 index 0000000..f00f09e --- /dev/null +++ b/driver/src/database/postgres/profile_event_store.rs @@ -0,0 +1,424 @@ +use crate::database::postgres::{CountRow, VersionRow}; +use crate::database::{PostgresConnection, PostgresDatabase}; +use crate::ConvertError; +use error_stack::Report; +use kernel::interfaces::event_store::{DependOnProfileEventStore, ProfileEventStore}; +use kernel::prelude::entity::{ + CommandEnvelope, EventEnvelope, EventId, EventVersion, KnownEventVersion, Profile, ProfileEvent, +}; +use kernel::KernelError; +use serde_json; +use sqlx::PgConnection; +use uuid::Uuid; + +#[derive(sqlx::FromRow)] +struct EventRow { + version: Uuid, + id: Uuid, + #[allow(dead_code)] + event_name: String, + data: serde_json::Value, +} + +impl TryFrom for EventEnvelope { + type Error = Report; + fn try_from(value: EventRow) -> Result { + let event: ProfileEvent = serde_json::from_value(value.data).convert_error()?; + Ok(EventEnvelope::new( + EventId::new(value.id), + event, + EventVersion::new(value.version), + )) + } +} + +pub struct PostgresProfileEventStore; + +impl ProfileEventStore for PostgresProfileEventStore { + type Executor = PostgresConnection; + + async fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &EventId, + since: Option<&EventVersion>, + ) -> error_stack::Result>, KernelError> { + let con: &mut PgConnection = executor; + let rows = if let Some(version) = since { + sqlx::query_as::<_, EventRow>( + //language=postgresql + r#" + SELECT version, id, event_name, data + FROM profile_events + WHERE id = $1 AND version > $2 + ORDER BY version + "#, + ) + .bind(id.as_ref()) + .bind(version.as_ref()) + .fetch_all(con) + .await + .convert_error()? + } else { + sqlx::query_as::<_, EventRow>( + //language=postgresql + r#" + SELECT version, id, event_name, data + FROM profile_events + WHERE id = $1 + ORDER BY version + "#, + ) + .bind(id.as_ref()) + .fetch_all(con) + .await + .convert_error()? + }; + rows.into_iter() + .map(|row| row.try_into()) + .collect::, KernelError>>() + } + + async fn persist( + &self, + executor: &mut Self::Executor, + command: &CommandEnvelope, + ) -> error_stack::Result<(), KernelError> { + self.persist_internal(executor, command, Uuid::now_v7()) + .await + } + + async fn persist_and_transform( + &self, + executor: &mut Self::Executor, + command: CommandEnvelope, + ) -> error_stack::Result, KernelError> { + let version = Uuid::now_v7(); + self.persist_internal(executor, &command, version).await?; + + let command = command.into_destruct(); + Ok(EventEnvelope::new( + command.id, + command.event, + EventVersion::new(version), + )) + } +} + +impl PostgresProfileEventStore { + async fn persist_internal( + &self, + executor: &mut PostgresConnection, + command: &CommandEnvelope, + version: Uuid, + ) -> error_stack::Result<(), KernelError> { + let con: &mut PgConnection = executor; + + let event_name = command.event_name(); + let prev_version = command.prev_version().as_ref(); + if let Some(prev_version) = prev_version { + match prev_version { + KnownEventVersion::Nothing => { + let amount = sqlx::query_as::<_, CountRow>( + //language=postgresql + r#" + SELECT COUNT(*) + FROM profile_events + WHERE id = $1 + "#, + ) + .bind(command.id().as_ref()) + .fetch_one(&mut *con) + .await + .convert_error()?; + if amount.count != 0 { + return Err(Report::new(KernelError::Concurrency).attach_printable( + format!("Event {} already exists", command.id().as_ref()), + )); + } + } + KnownEventVersion::Prev(prev_version) => { + let last_version = sqlx::query_as::<_, VersionRow>( + //language=postgresql + r#" + SELECT version + FROM profile_events + WHERE id = $1 + ORDER BY version DESC + LIMIT 1 + "#, + ) + .bind(command.id().as_ref()) + .fetch_optional(&mut *con) + .await + .convert_error()?; + if last_version + .map(|row: VersionRow| &row.version != prev_version.as_ref()) + .unwrap_or(true) + { + return Err(Report::new(KernelError::Concurrency).attach_printable( + format!( + "Event {} version {} already exists", + command.id().as_ref(), + prev_version.as_ref() + ), + )); + } + } + }; + } + + sqlx::query( + //language=postgresql + r#" + INSERT INTO profile_events (version, id, event_name, data) + VALUES ($1, $2, $3, $4) + "#, + ) + .bind(version) + .bind(command.id().as_ref()) + .bind(event_name) + .bind(serde_json::to_value(command.event()).convert_error()?) + .execute(con) + .await + .convert_error()?; + + Ok(()) + } +} + +impl DependOnProfileEventStore for PostgresDatabase { + type ProfileEventStore = PostgresProfileEventStore; + + fn profile_event_store(&self) -> &Self::ProfileEventStore { + &PostgresProfileEventStore + } +} + +#[cfg(test)] +mod test { + mod query { + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::event_store::{DependOnProfileEventStore, ProfileEventStore}; + use kernel::prelude::entity::{ + AccountId, CommandEnvelope, EventId, KnownEventVersion, Nanoid, Profile, ProfileEvent, + ProfileId, + }; + use uuid::Uuid; + + fn create_profile_command(profile_id: ProfileId) -> CommandEnvelope { + let event = ProfileEvent::Created { + account_id: AccountId::new(Uuid::now_v7()), + display_name: None, + summary: None, + icon: None, + banner: None, + nanoid: Nanoid::default(), + }; + CommandEnvelope::new( + EventId::from(profile_id), + event.name(), + event, + Some(KnownEventVersion::Nothing), + ) + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let profile_id = ProfileId::new(Uuid::now_v7()); + let event_id = EventId::from(profile_id.clone()); + let events = db + .profile_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(events.len(), 0); + let created_profile = create_profile_command(profile_id.clone()); + let update_event = ProfileEvent::Updated { + display_name: None, + summary: None, + icon: None, + banner: None, + }; + let updated_profile = CommandEnvelope::new( + EventId::from(profile_id.clone()), + update_event.name(), + update_event, + None, + ); + + db.profile_event_store() + .persist(&mut transaction, &created_profile) + .await + .unwrap(); + db.profile_event_store() + .persist(&mut transaction, &updated_profile) + .await + .unwrap(); + let events = db + .profile_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(events.len(), 2); + assert_eq!(&events[0].event, created_profile.event()); + assert_eq!(&events[1].event, updated_profile.event()); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn find_by_id_since_version() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let profile_id = ProfileId::new(Uuid::now_v7()); + let event_id = EventId::from(profile_id.clone()); + + let created_profile = create_profile_command(profile_id.clone()); + let update_event = ProfileEvent::Updated { + display_name: None, + summary: None, + icon: None, + banner: None, + }; + let updated_profile = CommandEnvelope::new( + EventId::from(profile_id.clone()), + update_event.name(), + update_event, + None, + ); + + db.profile_event_store() + .persist(&mut transaction, &created_profile) + .await + .unwrap(); + db.profile_event_store() + .persist(&mut transaction, &updated_profile) + .await + .unwrap(); + + // Get all events to obtain the first version + let all_events = db + .profile_event_store() + .find_by_id(&mut transaction, &event_id, None) + .await + .unwrap(); + assert_eq!(all_events.len(), 2); + + // Query since the first event's version — should return the 2nd event + let since_events = db + .profile_event_store() + .find_by_id(&mut transaction, &event_id, Some(&all_events[0].version)) + .await + .unwrap(); + assert_eq!(since_events.len(), 1); + assert_eq!(&since_events[0].event, updated_profile.event()); + + // Query since the last event's version — should return no events + let no_events = db + .profile_event_store() + .find_by_id(&mut transaction, &event_id, Some(&all_events[1].version)) + .await + .unwrap(); + assert_eq!(no_events.len(), 0); + } + } + + mod persist { + use crate::database::PostgresDatabase; + use kernel::interfaces::database::DatabaseConnection; + use kernel::interfaces::event_store::{DependOnProfileEventStore, ProfileEventStore}; + use kernel::prelude::entity::{ + AccountId, CommandEnvelope, EventId, KnownEventVersion, Nanoid, Profile, ProfileEvent, + ProfileId, + }; + use uuid::Uuid; + + fn create_profile_command(profile_id: ProfileId) -> CommandEnvelope { + let event = ProfileEvent::Created { + account_id: AccountId::new(Uuid::now_v7()), + display_name: None, + summary: None, + icon: None, + banner: None, + nanoid: Nanoid::default(), + }; + CommandEnvelope::new( + EventId::from(profile_id), + event.name(), + event, + Some(KnownEventVersion::Nothing), + ) + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn basic_creation() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let profile_id = ProfileId::new(Uuid::now_v7()); + let created_profile = create_profile_command(profile_id.clone()); + db.profile_event_store() + .persist(&mut transaction, &created_profile) + .await + .unwrap(); + let events = db + .profile_event_store() + .find_by_id(&mut transaction, &EventId::from(profile_id), None) + .await + .unwrap(); + assert_eq!(events.len(), 1); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn persist_and_transform_test() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let profile_id = ProfileId::new(Uuid::now_v7()); + let created_profile = create_profile_command(profile_id.clone()); + + let event_envelope = db + .profile_event_store() + .persist_and_transform(&mut transaction, created_profile.clone()) + .await + .unwrap(); + + assert_eq!(event_envelope.id, EventId::from(profile_id.clone())); + assert_eq!(&event_envelope.event, created_profile.event()); + + let events = db + .profile_event_store() + .find_by_id(&mut transaction, &EventId::from(profile_id), None) + .await + .unwrap(); + assert_eq!(events.len(), 1); + assert_eq!(&events[0].event, created_profile.event()); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn known_event_version_nothing_prevents_duplicate() { + let db = PostgresDatabase::new().await.unwrap(); + let mut transaction = db.begin_transaction().await.unwrap(); + let profile_id = ProfileId::new(Uuid::now_v7()); + let created_profile = create_profile_command(profile_id.clone()); + + // First persist should succeed + db.profile_event_store() + .persist(&mut transaction, &created_profile) + .await + .unwrap(); + + // Second persist with KnownEventVersion::Nothing should fail (concurrency error) + let result = db + .profile_event_store() + .persist(&mut transaction, &created_profile) + .await; + assert!(result.is_err()); + } + } +} diff --git a/driver/src/database/postgres/remote_account.rs b/driver/src/database/postgres/remote_account.rs index 5ec4fc6..d32c757 100644 --- a/driver/src/database/postgres/remote_account.rs +++ b/driver/src/database/postgres/remote_account.rs @@ -1,7 +1,6 @@ use crate::database::{PostgresConnection, PostgresDatabase}; use crate::ConvertError; -use kernel::interfaces::modify::{DependOnRemoteAccountModifier, RemoteAccountModifier}; -use kernel::interfaces::query::{DependOnRemoteAccountQuery, RemoteAccountQuery}; +use kernel::interfaces::repository::{DependOnRemoteAccountRepository, RemoteAccountRepository}; use kernel::prelude::entity::{ ImageId, RemoteAccount, RemoteAccountAcct, RemoteAccountId, RemoteAccountUrl, }; @@ -30,15 +29,15 @@ impl From for RemoteAccount { pub struct PostgresRemoteAccountRepository; -impl RemoteAccountQuery for PostgresRemoteAccountRepository { - type Transaction = PostgresConnection; +impl RemoteAccountRepository for PostgresRemoteAccountRepository { + type Executor = PostgresConnection; async fn find_by_id( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, id: &RemoteAccountId, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, RemoteAccountRow>( // language=postgresql r#" @@ -56,10 +55,10 @@ impl RemoteAccountQuery for PostgresRemoteAccountRepository { async fn find_by_acct( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, acct: &RemoteAccountAcct, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, RemoteAccountRow>( // language=postgresql r#" @@ -77,10 +76,10 @@ impl RemoteAccountQuery for PostgresRemoteAccountRepository { async fn find_by_url( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, url: &RemoteAccountUrl, ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query_as::<_, RemoteAccountRow>( // language=postgresql r#" @@ -95,25 +94,13 @@ impl RemoteAccountQuery for PostgresRemoteAccountRepository { .convert_error() .map(|option| option.map(RemoteAccount::from)) } -} - -impl DependOnRemoteAccountQuery for PostgresDatabase { - type RemoteAccountQuery = PostgresRemoteAccountRepository; - - fn remote_account_query(&self) -> &Self::RemoteAccountQuery { - &PostgresRemoteAccountRepository - } -} - -impl RemoteAccountModifier for PostgresRemoteAccountRepository { - type Transaction = PostgresConnection; async fn create( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, account: &RemoteAccount, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( // language=postgresql r#" @@ -133,10 +120,10 @@ impl RemoteAccountModifier for PostgresRemoteAccountRepository { async fn update( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, account: &RemoteAccount, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( // language=postgresql r#" @@ -157,10 +144,10 @@ impl RemoteAccountModifier for PostgresRemoteAccountRepository { async fn delete( &self, - transaction: &mut Self::Transaction, + executor: &mut Self::Executor, account_id: &RemoteAccountId, ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; + let con: &mut PgConnection = executor; sqlx::query( // language=postgresql r#" @@ -176,10 +163,10 @@ impl RemoteAccountModifier for PostgresRemoteAccountRepository { } } -impl DependOnRemoteAccountModifier for PostgresDatabase { - type RemoteAccountModifier = PostgresRemoteAccountRepository; +impl DependOnRemoteAccountRepository for PostgresDatabase { + type RemoteAccountRepository = PostgresRemoteAccountRepository; - fn remote_account_modifier(&self) -> &Self::RemoteAccountModifier { + fn remote_account_repository(&self) -> &Self::RemoteAccountRepository { &PostgresRemoteAccountRepository } } @@ -209,11 +196,13 @@ mod test { use crate::database::postgres::remote_account::test::acct_url; use crate::database::PostgresDatabase; use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{DependOnRemoteAccountModifier, RemoteAccountModifier}; - use kernel::interfaces::query::{DependOnRemoteAccountQuery, RemoteAccountQuery}; + use kernel::interfaces::repository::{ + DependOnRemoteAccountRepository, RemoteAccountRepository, + }; use kernel::prelude::entity::{RemoteAccount, RemoteAccountId}; use uuid::Uuid; + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_by_id() { let database = PostgresDatabase::new().await.unwrap(); @@ -223,23 +212,24 @@ mod test { let (acct, url) = acct_url(None); let remote_account = RemoteAccount::new(id.clone(), acct, url, None); database - .remote_account_modifier() + .remote_account_repository() .create(&mut transaction, &remote_account) .await .unwrap(); let result = database - .remote_account_query() + .remote_account_repository() .find_by_id(&mut transaction, &id) .await .unwrap(); assert_eq!(result, Some(remote_account)); database - .remote_account_modifier() + .remote_account_repository() .delete(&mut transaction, &id) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_by_acct() { let database = PostgresDatabase::new().await.unwrap(); @@ -253,24 +243,25 @@ mod test { None, ); database - .remote_account_modifier() + .remote_account_repository() .create(&mut transaction, &remote_account) .await .unwrap(); let result = database - .remote_account_query() + .remote_account_repository() .find_by_acct(&mut transaction, &acct) .await .unwrap(); assert_eq!(result, Some(remote_account.clone())); database - .remote_account_modifier() + .remote_account_repository() .delete(&mut transaction, remote_account.id()) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn find_by_url() { let database = PostgresDatabase::new().await.unwrap(); @@ -284,18 +275,18 @@ mod test { None, ); database - .remote_account_modifier() + .remote_account_repository() .create(&mut transaction, &remote_account) .await .unwrap(); let result = database - .remote_account_query() + .remote_account_repository() .find_by_url(&mut transaction, &url) .await .unwrap(); assert_eq!(result, Some(remote_account.clone())); database - .remote_account_modifier() + .remote_account_repository() .delete(&mut transaction, remote_account.id()) .await .unwrap(); @@ -306,11 +297,13 @@ mod test { use crate::database::postgres::remote_account::test::acct_url; use crate::database::PostgresDatabase; use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{DependOnRemoteAccountModifier, RemoteAccountModifier}; - use kernel::interfaces::query::{DependOnRemoteAccountQuery, RemoteAccountQuery}; + use kernel::interfaces::repository::{ + DependOnRemoteAccountRepository, RemoteAccountRepository, + }; use kernel::prelude::entity::{RemoteAccount, RemoteAccountId}; use uuid::Uuid; + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn create() { let database = PostgresDatabase::new().await.unwrap(); @@ -320,17 +313,18 @@ mod test { let (acct, url) = acct_url(None); let remote_account = RemoteAccount::new(id, acct, url, None); database - .remote_account_modifier() + .remote_account_repository() .create(&mut transaction, &remote_account) .await .unwrap(); database - .remote_account_modifier() + .remote_account_repository() .delete(&mut transaction, remote_account.id()) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn update() { let database = PostgresDatabase::new().await.unwrap(); @@ -340,7 +334,7 @@ mod test { let (acct, url) = acct_url(None); let remote_account = RemoteAccount::new(id.clone(), acct, url, None); database - .remote_account_modifier() + .remote_account_repository() .create(&mut transaction, &remote_account) .await .unwrap(); @@ -348,23 +342,24 @@ mod test { let (acct, url) = acct_url(None); let remote_account = RemoteAccount::new(id.clone(), acct, url, None); database - .remote_account_modifier() + .remote_account_repository() .update(&mut transaction, &remote_account) .await .unwrap(); let result = database - .remote_account_query() + .remote_account_repository() .find_by_id(&mut transaction, &id) .await .unwrap(); assert_eq!(result, Some(remote_account.clone())); database - .remote_account_modifier() + .remote_account_repository() .delete(&mut transaction, remote_account.id()) .await .unwrap(); } + #[test_with::env(DATABASE_URL)] #[tokio::test] async fn delete() { let database = PostgresDatabase::new().await.unwrap(); @@ -374,18 +369,18 @@ mod test { let (acct, url) = acct_url(None); let remote_account = RemoteAccount::new(id.clone(), acct, url, None); database - .remote_account_modifier() + .remote_account_repository() .create(&mut transaction, &remote_account) .await .unwrap(); database - .remote_account_modifier() + .remote_account_repository() .delete(&mut transaction, &id) .await .unwrap(); let result = database - .remote_account_query() + .remote_account_repository() .find_by_id(&mut transaction, &id) .await .unwrap(); diff --git a/driver/src/database/postgres/stellar_account.rs b/driver/src/database/postgres/stellar_account.rs deleted file mode 100644 index 06ec468..0000000 --- a/driver/src/database/postgres/stellar_account.rs +++ /dev/null @@ -1,363 +0,0 @@ -use crate::database::{PostgresConnection, PostgresDatabase}; -use crate::ConvertError; -use kernel::interfaces::modify::{DependOnStellarAccountModifier, StellarAccountModifier}; -use kernel::interfaces::query::{DependOnStellarAccountQuery, StellarAccountQuery}; -use kernel::prelude::entity::{ - EventVersion, StellarAccount, StellarAccountAccessToken, StellarAccountClientId, - StellarAccountId, StellarAccountRefreshToken, StellarHostId, -}; -use kernel::KernelError; -use sqlx::PgConnection; -use uuid::Uuid; - -#[derive(sqlx::FromRow)] -struct StellarAccountRow { - id: Uuid, - host_id: Uuid, - client_id: String, - access_token: String, - refresh_token: String, - version: Uuid, -} - -impl From for StellarAccount { - fn from(value: StellarAccountRow) -> Self { - StellarAccount::new( - StellarAccountId::new(value.id), - StellarHostId::new(value.host_id), - StellarAccountClientId::new(value.client_id), - StellarAccountAccessToken::new(value.access_token), - StellarAccountRefreshToken::new(value.refresh_token), - EventVersion::new(value.version), - ) - } -} - -pub struct PostgresStellarAccountRepository; - -impl StellarAccountQuery for PostgresStellarAccountRepository { - type Transaction = PostgresConnection; - - async fn find_by_id( - &self, - transaction: &mut Self::Transaction, - account_id: &StellarAccountId, - ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; - sqlx::query_as::<_, StellarAccountRow>( - //language=postgresql - r#" - SELECT id, host_id, client_id, access_token, refresh_token, version - FROM stellar_accounts - WHERE id = $1 - "#, - ) - .bind(account_id.as_ref()) - .fetch_optional(con) - .await - .convert_error() - .map(|option| option.map(|row| row.into())) - } -} - -impl DependOnStellarAccountQuery for PostgresDatabase { - type StellarAccountQuery = PostgresStellarAccountRepository; - - fn stellar_account_query(&self) -> &Self::StellarAccountQuery { - &PostgresStellarAccountRepository - } -} - -impl StellarAccountModifier for PostgresStellarAccountRepository { - type Transaction = PostgresConnection; - - async fn create( - &self, - transaction: &mut Self::Transaction, - stellar_account: &StellarAccount, - ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; - sqlx::query( - //language=postgresql - r#" - INSERT INTO stellar_accounts (id, host_id, client_id, access_token, refresh_token, version) VALUES ($1, $2, $3, $4, $5, $6) - "# - ) - .bind(stellar_account.id().as_ref()) - .bind(stellar_account.host().as_ref()) - .bind(stellar_account.client_id().as_ref()) - .bind(stellar_account.access_token().as_ref()) - .bind(stellar_account.refresh_token().as_ref()) - .bind(stellar_account.version().as_ref()) - .execute(con) - .await - .convert_error()?; - Ok(()) - } - - async fn update( - &self, - transaction: &mut Self::Transaction, - stellar_account: &StellarAccount, - ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; - sqlx::query( - //language=postgresql - r#" - UPDATE stellar_accounts SET host_id = $2, client_id = $3, access_token = $4, refresh_token = $5, version = $6 - WHERE id = $1 - "# - ) - .bind(stellar_account.id().as_ref()) - .bind(stellar_account.host().as_ref()) - .bind(stellar_account.client_id().as_ref()) - .bind(stellar_account.access_token().as_ref()) - .bind(stellar_account.refresh_token().as_ref()) - .bind(stellar_account.version().as_ref()) - .execute(con) - .await - .convert_error()?; - Ok(()) - } - - async fn delete( - &self, - transaction: &mut Self::Transaction, - account_id: &StellarAccountId, - ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; - sqlx::query( - //language=postgresql - r#" - DELETE FROM stellar_accounts WHERE id = $1 - "#, - ) - .bind(account_id.as_ref()) - .execute(con) - .await - .convert_error()?; - Ok(()) - } -} - -impl DependOnStellarAccountModifier for PostgresDatabase { - type StellarAccountModifier = PostgresStellarAccountRepository; - - fn stellar_account_modifier(&self) -> &Self::StellarAccountModifier { - &PostgresStellarAccountRepository - } -} - -#[cfg(test)] -mod test { - mod query { - use crate::database::PostgresDatabase; - use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{ - DependOnStellarAccountModifier, DependOnStellarHostModifier, StellarAccountModifier, - StellarHostModifier, - }; - use kernel::interfaces::query::{DependOnStellarAccountQuery, StellarAccountQuery}; - use kernel::prelude::entity::{ - EventVersion, StellarAccount, StellarAccountAccessToken, StellarAccountClientId, - StellarAccountId, StellarAccountRefreshToken, StellarHost, StellarHostId, - StellarHostUrl, - }; - use uuid::Uuid; - - #[tokio::test] - async fn find_by_id() { - let database = PostgresDatabase::new().await.unwrap(); - let mut transaction = database.begin_transaction().await.unwrap(); - - let stellar_host_id = StellarHostId::new(Uuid::now_v7()); - let stellar_host = - StellarHost::new(stellar_host_id.clone(), StellarHostUrl::new(Uuid::now_v7())); - database - .stellar_host_modifier() - .create(&mut transaction, &stellar_host) - .await - .unwrap(); - let account_id = StellarAccountId::new(Uuid::now_v7()); - let stellar_account = StellarAccount::new( - account_id.clone(), - stellar_host_id, - StellarAccountClientId::new("client_id".to_string()), - StellarAccountAccessToken::new("access_token".to_string()), - StellarAccountRefreshToken::new("refresh_token".to_string()), - EventVersion::new(Uuid::now_v7()), - ); - - database - .stellar_account_modifier() - .create(&mut transaction, &stellar_account) - .await - .unwrap(); - let result = database - .stellar_account_query() - .find_by_id(&mut transaction, &account_id) - .await - .unwrap(); - assert_eq!(result, Some(stellar_account.clone())); - database - .stellar_account_modifier() - .delete(&mut transaction, stellar_account.id()) - .await - .unwrap(); - } - } - - mod modify { - use crate::database::PostgresDatabase; - use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{ - DependOnStellarAccountModifier, DependOnStellarHostModifier, StellarAccountModifier, - StellarHostModifier, - }; - use kernel::interfaces::query::{DependOnStellarAccountQuery, StellarAccountQuery}; - use kernel::prelude::entity::{ - EventVersion, StellarAccount, StellarAccountAccessToken, StellarAccountClientId, - StellarAccountId, StellarAccountRefreshToken, StellarHost, StellarHostId, - StellarHostUrl, - }; - use uuid::Uuid; - - #[tokio::test] - async fn create() { - let database = PostgresDatabase::new().await.unwrap(); - let mut transaction = database.begin_transaction().await.unwrap(); - - let host_id = StellarHostId::new(Uuid::now_v7()); - let account_id = StellarAccountId::new(Uuid::now_v7()); - let stellar_host = - StellarHost::new(host_id.clone(), StellarHostUrl::new(Uuid::now_v7())); - database - .stellar_host_modifier() - .create(&mut transaction, &stellar_host) - .await - .unwrap(); - let stellar_account = StellarAccount::new( - account_id.clone(), - host_id, - StellarAccountClientId::new("client_id".to_string()), - StellarAccountAccessToken::new("access_token".to_string()), - StellarAccountRefreshToken::new("refresh_token".to_string()), - EventVersion::new(Uuid::now_v7()), - ); - database - .stellar_account_modifier() - .create(&mut transaction, &stellar_account) - .await - .unwrap(); - let result = database - .stellar_account_query() - .find_by_id(&mut transaction, &account_id) - .await - .unwrap(); - assert_eq!(result, Some(stellar_account.clone())); - database - .stellar_account_modifier() - .delete(&mut transaction, stellar_account.id()) - .await - .unwrap(); - } - - #[tokio::test] - async fn update() { - let database = PostgresDatabase::new().await.unwrap(); - let mut transaction = database.begin_transaction().await.unwrap(); - - let host_id = StellarHostId::new(Uuid::now_v7()); - let account_id = StellarAccountId::new(Uuid::now_v7()); - let stellar_host = - StellarHost::new(host_id.clone(), StellarHostUrl::new(Uuid::now_v7())); - database - .stellar_host_modifier() - .create(&mut transaction, &stellar_host) - .await - .unwrap(); - let stellar_account = StellarAccount::new( - account_id.clone(), - host_id.clone(), - StellarAccountClientId::new("client_id".to_string()), - StellarAccountAccessToken::new("access_token".to_string()), - StellarAccountRefreshToken::new("refresh_token".to_string()), - EventVersion::new(Uuid::now_v7()), - ); - database - .stellar_account_modifier() - .create(&mut transaction, &stellar_account) - .await - .unwrap(); - let updated_stellar_account = StellarAccount::new( - account_id.clone(), - host_id, - StellarAccountClientId::new("updated_client_id".to_string()), - StellarAccountAccessToken::new("updated_access_token".to_string()), - StellarAccountRefreshToken::new("updated_refresh_token".to_string()), - EventVersion::new(Uuid::now_v7()), - ); - database - .stellar_account_modifier() - .update(&mut transaction, &updated_stellar_account) - .await - .unwrap(); - let result = database - .stellar_account_query() - .find_by_id(&mut transaction, &account_id) - .await - .unwrap(); - assert_eq!(result, Some(updated_stellar_account)); - database - .stellar_account_modifier() - .delete(&mut transaction, stellar_account.id()) - .await - .unwrap(); - } - - #[tokio::test] - async fn delete() { - let database = PostgresDatabase::new().await.unwrap(); - let mut transaction = database.begin_transaction().await.unwrap(); - - let host_id = StellarHostId::new(Uuid::now_v7()); - let stellar_host = - StellarHost::new(host_id.clone(), StellarHostUrl::new(Uuid::now_v7())); - database - .stellar_host_modifier() - .create(&mut transaction, &stellar_host) - .await - .unwrap(); - let account_id = StellarAccountId::new(Uuid::now_v7()); - let stellar_account = StellarAccount::new( - account_id.clone(), - host_id, - StellarAccountClientId::new("client_id".to_string()), - StellarAccountAccessToken::new("access_token".to_string()), - StellarAccountRefreshToken::new("refresh_token".to_string()), - EventVersion::new(Uuid::now_v7()), - ); - database - .stellar_account_modifier() - .create(&mut transaction, &stellar_account) - .await - .unwrap(); - database - .stellar_account_modifier() - .delete(&mut transaction, &account_id) - .await - .unwrap(); - let result = database - .stellar_account_query() - .find_by_id(&mut transaction, &account_id) - .await - .unwrap(); - assert_eq!(result, None); - database - .stellar_account_modifier() - .delete(&mut transaction, stellar_account.id()) - .await - .unwrap(); - } - } -} diff --git a/driver/src/database/postgres/stellar_host.rs b/driver/src/database/postgres/stellar_host.rs deleted file mode 100644 index 2cdfccb..0000000 --- a/driver/src/database/postgres/stellar_host.rs +++ /dev/null @@ -1,250 +0,0 @@ -use crate::database::{PostgresConnection, PostgresDatabase}; -use crate::ConvertError; -use kernel::interfaces::modify::{DependOnStellarHostModifier, StellarHostModifier}; -use kernel::interfaces::query::{DependOnStellarHostQuery, StellarHostQuery}; -use kernel::prelude::entity::{StellarHost, StellarHostId, StellarHostUrl}; -use kernel::KernelError; -use sqlx::PgConnection; -use uuid::Uuid; - -#[derive(sqlx::FromRow)] -struct StellarHostRow { - id: Uuid, - url: String, -} - -impl From for StellarHost { - fn from(row: StellarHostRow) -> Self { - StellarHost::new(StellarHostId::new(row.id), StellarHostUrl::new(row.url)) - } -} - -pub struct PostgresStellarHostRepository; - -impl StellarHostQuery for PostgresStellarHostRepository { - type Transaction = PostgresConnection; - async fn find_by_id( - &self, - transaction: &mut Self::Transaction, - id: &StellarHostId, - ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; - sqlx::query_as::<_, StellarHostRow>( - // language=postgresql - r#" - SELECT id, url - FROM stellar_hosts - WHERE id = $1 - "#, - ) - .bind(id.as_ref()) - .fetch_optional(con) - .await - .convert_error() - .map(|row| row.map(StellarHost::from)) - } - - async fn find_by_url( - &self, - transaction: &mut Self::Transaction, - domain: &StellarHostUrl, - ) -> error_stack::Result, KernelError> { - let con: &mut PgConnection = transaction; - sqlx::query_as::<_, StellarHostRow>( - // language=postgresql - r#" - SELECT id, url - FROM stellar_hosts - WHERE url = $1 - "#, - ) - .bind(domain.as_ref()) - .fetch_optional(con) - .await - .convert_error() - .map(|row| row.map(StellarHost::from)) - } -} - -impl DependOnStellarHostQuery for PostgresDatabase { - type StellarHostQuery = PostgresStellarHostRepository; - - fn stellar_host_query(&self) -> &Self::StellarHostQuery { - &PostgresStellarHostRepository - } -} - -impl StellarHostModifier for PostgresStellarHostRepository { - type Transaction = PostgresConnection; - async fn create( - &self, - transaction: &mut Self::Transaction, - stellar_host: &StellarHost, - ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; - sqlx::query( - // language=postgresql - r#" - INSERT INTO stellar_hosts (id, url) - VALUES ($1, $2) - "#, - ) - .bind(stellar_host.id().as_ref()) - .bind(stellar_host.url().as_ref()) - .execute(con) - .await - .convert_error() - .map(|_| ()) - } - - async fn update( - &self, - transaction: &mut Self::Transaction, - stellar_host: &StellarHost, - ) -> error_stack::Result<(), KernelError> { - let con: &mut PgConnection = transaction; - sqlx::query( - // language=postgresql - r#" - UPDATE stellar_hosts - SET url = $2 - WHERE id = $1 - "#, - ) - .bind(stellar_host.id().as_ref()) - .bind(stellar_host.url().as_ref()) - .execute(con) - .await - .convert_error() - .map(|_| ()) - } -} - -impl DependOnStellarHostModifier for PostgresDatabase { - type StellarHostModifier = PostgresStellarHostRepository; - - fn stellar_host_modifier(&self) -> &Self::StellarHostModifier { - &PostgresStellarHostRepository - } -} - -#[cfg(test)] -mod test { - use kernel::prelude::entity::StellarHostUrl; - use uuid::Uuid; - - fn url() -> StellarHostUrl { - StellarHostUrl::new(format!("https://{}.example.com", Uuid::now_v7())) - } - - mod query { - use crate::database::postgres::stellar_host::test::url; - use crate::database::PostgresDatabase; - use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{DependOnStellarHostModifier, StellarHostModifier}; - use kernel::interfaces::query::{DependOnStellarHostQuery, StellarHostQuery}; - use kernel::prelude::entity::{StellarHost, StellarHostId}; - use uuid::Uuid; - - #[tokio::test] - async fn find_by_id() { - let database = PostgresDatabase::new().await.unwrap(); - let mut transaction = database.begin_transaction().await.unwrap(); - - let stellar_host = StellarHost::new(StellarHostId::new(Uuid::now_v7()), url()); - database - .stellar_host_modifier() - .create(&mut transaction, &stellar_host) - .await - .unwrap(); - - let found_stellar_host = database - .stellar_host_query() - .find_by_id(&mut transaction, stellar_host.id()) - .await - .unwrap() - .unwrap(); - assert_eq!(stellar_host, found_stellar_host); - } - - #[tokio::test] - async fn find_by_url() { - let database = PostgresDatabase::new().await.unwrap(); - let mut transaction = database.begin_transaction().await.unwrap(); - - let stellar_host = StellarHost::new(StellarHostId::new(Uuid::now_v7()), url()); - database - .stellar_host_modifier() - .create(&mut transaction, &stellar_host) - .await - .unwrap(); - - let found_stellar_host = database - .stellar_host_query() - .find_by_url(&mut transaction, stellar_host.url()) - .await - .unwrap() - .unwrap(); - assert_eq!(stellar_host, found_stellar_host); - } - } - - mod modify { - use crate::database::postgres::stellar_host::test::url; - use crate::database::PostgresDatabase; - use kernel::interfaces::database::DatabaseConnection; - use kernel::interfaces::modify::{DependOnStellarHostModifier, StellarHostModifier}; - use kernel::interfaces::query::{DependOnStellarHostQuery, StellarHostQuery}; - use kernel::prelude::entity::{StellarHost, StellarHostId}; - use uuid::Uuid; - - #[tokio::test] - async fn create() { - let database = PostgresDatabase::new().await.unwrap(); - let mut transaction = database.begin_transaction().await.unwrap(); - - let stellar_host = StellarHost::new(StellarHostId::new(Uuid::now_v7()), url()); - database - .stellar_host_modifier() - .create(&mut transaction, &stellar_host) - .await - .unwrap(); - - let found_stellar_host = database - .stellar_host_query() - .find_by_id(&mut transaction, stellar_host.id()) - .await - .unwrap() - .unwrap(); - assert_eq!(stellar_host, found_stellar_host); - } - - #[tokio::test] - async fn update() { - let database = PostgresDatabase::new().await.unwrap(); - let mut transaction = database.begin_transaction().await.unwrap(); - - let stellar_host = StellarHost::new(StellarHostId::new(Uuid::now_v7()), url()); - database - .stellar_host_modifier() - .create(&mut transaction, &stellar_host) - .await - .unwrap(); - - let updated_stellar_host = StellarHost::new(stellar_host.id().clone(), url()); - database - .stellar_host_modifier() - .update(&mut transaction, &updated_stellar_host) - .await - .unwrap(); - - let found_stellar_host = database - .stellar_host_query() - .find_by_id(&mut transaction, stellar_host.id()) - .await - .unwrap() - .unwrap(); - assert_eq!(updated_stellar_host, found_stellar_host); - } - } -} diff --git a/driver/src/database/redis.rs b/driver/src/database/redis.rs new file mode 100644 index 0000000..c1a24e9 --- /dev/null +++ b/driver/src/database/redis.rs @@ -0,0 +1,62 @@ +use crate::database::env; +use deadpool_redis::{Config, Pool, Runtime}; +use error_stack::{Report, ResultExt}; +use kernel::interfaces::database::{DatabaseConnection, Executor}; +use kernel::KernelError; +use std::ops::Deref; +use vodca::References; + +// redis://127.0.0.1 +const REDIS_URL: &str = "REDIS_URL"; + +// 127.0.0.1 +const HOST: &str = "REDIS_HOST"; + +#[derive(Clone, References)] +pub struct RedisDatabase { + pool: Pool, +} + +impl RedisDatabase { + pub fn new() -> error_stack::Result { + let url = if let Some(env) = env(REDIS_URL)? { + env + } else { + let host = env(HOST)?.ok_or_else(|| { + Report::new(KernelError::Internal) + .attach_printable(format!("Failed to get env: {HOST}")) + })?; + format!("redis://{}", host) + }; + let config = Config::from_url(&url); + let pool = config + .create_pool(Some(Runtime::Tokio1)) + .change_context_lazy(|| KernelError::Internal)?; + Ok(Self { pool }) + } +} + +pub struct RedisConnection(deadpool_redis::Connection); + +impl Executor for RedisConnection {} + +impl Deref for RedisConnection { + type Target = deadpool_redis::Connection; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DatabaseConnection for RedisDatabase { + type Executor = RedisConnection; + + async fn begin_transaction(&self) -> error_stack::Result { + let pool = self + .pool + .get() + .await + .change_context_lazy(|| KernelError::Internal)?; + Ok(RedisConnection(pool)) + } +} diff --git a/driver/src/keto.rs b/driver/src/keto.rs new file mode 100644 index 0000000..3ee3603 --- /dev/null +++ b/driver/src/keto.rs @@ -0,0 +1,147 @@ +use error_stack::{Report, ResultExt}; +use kernel::interfaces::permission::{ + PermissionChecker, PermissionReq, PermissionWriter, Relation, Resource, +}; +use kernel::prelude::entity::AuthAccountId; +use kernel::KernelError; +use reqwest::Client; +use serde::{Deserialize, Serialize}; + +pub struct KetoClient { + read_url: String, + write_url: String, + http_client: Client, +} + +impl KetoClient { + pub fn new(read_url: String, write_url: String) -> Self { + let read_url = read_url.trim_end_matches('/').to_string(); + let write_url = write_url.trim_end_matches('/').to_string(); + Self { + read_url, + write_url, + http_client: Client::new(), + } + } +} + +#[derive(Debug, Serialize)] +struct CheckRequest { + namespace: String, + object: String, + relation: String, + subject_id: String, +} + +#[derive(Debug, Deserialize)] +struct CheckResponse { + allowed: bool, +} + +#[derive(Debug, Serialize)] +struct RelationTuple { + namespace: String, + object: String, + relation: String, + subject_id: String, +} + +impl PermissionChecker for KetoClient { + async fn check( + &self, + subject: &AuthAccountId, + req: &PermissionReq, + ) -> error_stack::Result { + let subject_id = subject.as_ref().to_string(); + + for relation in req.relations() { + let body = CheckRequest { + namespace: req.resource().namespace().to_string(), + object: req.resource().object_id(), + relation: relation.as_str().to_string(), + subject_id: subject_id.clone(), + }; + + let response = self + .http_client + .post(format!("{}/relation-tuples/check", self.read_url)) + .json(&body) + .send() + .await + .change_context_lazy(|| KernelError::Internal) + .attach_printable("Failed to check permission with Keto")?; + + if response.status().is_success() { + let check: CheckResponse = response + .json() + .await + .change_context_lazy(|| KernelError::Internal) + .attach_printable("Failed to parse Keto check response")?; + + if check.allowed { + return Ok(true); + } + } + } + + Ok(false) + } +} + +impl PermissionWriter for KetoClient { + async fn create_relation( + &self, + resource: &Resource, + relation: Relation, + subject: &AuthAccountId, + ) -> error_stack::Result<(), KernelError> { + let tuple = RelationTuple { + namespace: resource.namespace().to_string(), + object: resource.object_id(), + relation: relation.as_str().to_string(), + subject_id: subject.as_ref().to_string(), + }; + + self.http_client + .put(format!("{}/admin/relation-tuples", self.write_url)) + .json(&tuple) + .send() + .await + .change_context_lazy(|| KernelError::Internal) + .attach_printable("Failed to create relation tuple in Keto")? + .error_for_status() + .map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Keto write error: {e}")) + })?; + + Ok(()) + } + + async fn delete_relation( + &self, + resource: &Resource, + relation: Relation, + subject: &AuthAccountId, + ) -> error_stack::Result<(), KernelError> { + self.http_client + .delete(format!("{}/admin/relation-tuples", self.write_url)) + .query(&[ + ("namespace", resource.namespace()), + ("object", &resource.object_id()), + ("relation", relation.as_str()), + ("subject_id", &subject.as_ref().to_string()), + ]) + .send() + .await + .change_context_lazy(|| KernelError::Internal) + .attach_printable("Failed to delete relation tuple from Keto")? + .error_for_status() + .map_err(|e| { + Report::new(KernelError::Internal) + .attach_printable(format!("Keto delete error: {e}")) + })?; + + Ok(()) + } +} diff --git a/driver/src/lib.rs b/driver/src/lib.rs index 4008cf3..b400a9e 100644 --- a/driver/src/lib.rs +++ b/driver/src/lib.rs @@ -1,4 +1,6 @@ -mod database; +pub mod crypto; +pub mod database; mod error; +pub mod keto; pub use self::error::*; diff --git a/flake.lock b/flake.lock index 7004b4c..bcf2e8b 100644 --- a/flake.lock +++ b/flake.lock @@ -5,11 +5,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { @@ -19,11 +19,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1710608262, - "narHash": "sha256-Tf2zqUWgU1iofcECQ+xj7HJVtoCz6yWG/oEIDmXxwXg=", + "lastModified": 1767242400, + "narHash": "sha256-knFaYjeg7swqG1dljj1hOxfg39zrIy8pfGuicjm9s+o=", "owner": "nixos", "repo": "nixpkgs", - "rev": "d211b80d2944a41899a6ab24009d9729cca05e49", + "rev": "c04833a1e584401bb63c1a63ddc51a71e6aa457a", "type": "github" }, "original": { @@ -75,11 +75,11 @@ "systems": "systems_2" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 327428e..90f8d0c 100644 --- a/flake.nix +++ b/flake.nix @@ -11,16 +11,33 @@ pkgs = import nixpkgs { inherit system; }; + emumet = pkgs.rustPlatform.buildRustPackage { + pname = "server"; + name = "emumet"; + src = ./.; + cargoLock.lockFile = ./Cargo.lock; + nativeBuildInputs = [ pkgs.pkg-config ]; + buildInputs = [ pkgs.openssl ]; + PKG_CONFIG_PATH = "${pkgs.openssl.dev}/lib/pkgconfig"; + }; in - with pkgs; rec { + with pkgs; { formatter = nixpkgs-fmt; - devShells.default = mkShell { + packages.default = emumet; + devShells.default = mkShell rec { nativeBuildInputs = [ pkg-config ]; buildInputs = [ openssl ]; packages = [ nodePackages.pnpm sqlx-cli ]; + env = { + LD_LIBRARY_PATH = lib.makeLibraryPath buildInputs; + }; + shellHook = '' + #export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER="${pkgs.clang}/bin/clang" + #export CARGO_TARGET_X86_64-UNKNOWN_LINUX_GNU_RUSTFLAGS="-C link-arg=-fuse-ld=${pkgs.mold-wrapped.override(old: { extraPackages = nativeBuildInputs ++ buildInputs; })}/bin/mold" + ''; }; }); } diff --git a/kernel/Cargo.toml b/kernel/Cargo.toml index c3b4a58..969c4c2 100644 --- a/kernel/Cargo.toml +++ b/kernel/Cargo.toml @@ -8,12 +8,13 @@ authors.workspace = true [dependencies] rand = "0.8" -destructure = "0.5.6" -vodca = "0.1.8" +destructure = { workspace = true } +vodca = { workspace = true } serde = { workspace = true } uuid = { workspace = true } time = { workspace = true } nanoid = { workspace = true } +zeroize = "1.7" async-trait = "0.1" error-stack = { workspace = true } diff --git a/kernel/src/crypto.rs b/kernel/src/crypto.rs new file mode 100644 index 0000000..81dfdfc --- /dev/null +++ b/kernel/src/crypto.rs @@ -0,0 +1,132 @@ +use crate::KernelError; +use error_stack::Result; +use serde::{Deserialize, Serialize}; +use zeroize::Zeroizing; + +/// Supported signing algorithms +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum SigningAlgorithm { + Rsa2048, + Ed25519, +} + +impl std::fmt::Display for SigningAlgorithm { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Rsa2048 => write!(f, "rsa2048"), + Self::Ed25519 => write!(f, "ed25519"), + } + } +} + +impl Default for SigningAlgorithm { + fn default() -> Self { + Self::Rsa2048 + } +} + +/// Encrypted private key with metadata for decryption +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EncryptedPrivateKey { + /// Base64-encoded ciphertext (encrypted PEM) + pub ciphertext: String, + /// Base64-encoded nonce (12 bytes for AES-GCM) + pub nonce: String, + /// Base64-encoded salt (16 bytes for Argon2id) + pub salt: String, + /// Algorithm used to generate the key pair + pub algorithm: SigningAlgorithm, +} + +/// Generated key pair with public key in PEM format and encrypted private key +pub struct GeneratedKeyPair { + /// Public key in PEM format + pub public_key_pem: String, + /// Encrypted private key with metadata + pub encrypted_private_key: EncryptedPrivateKey, +} + +/// Raw key pair before encryption (private key is zeroized on drop) +pub struct RawKeyPair { + /// Public key in PEM format + pub public_key_pem: String, + /// Private key in PEM format (zeroized on drop) + pub private_key_pem: Zeroizing>, + /// Algorithm used to generate the key pair + pub algorithm: SigningAlgorithm, +} + +/// Trait for providing master password +pub trait PasswordProvider: Send + Sync { + fn get_password(&self) -> Result>, KernelError>; +} + +/// Trait for raw key pair generation (without encryption) +pub trait RawKeyGenerator: Send + Sync { + /// Generate a new raw key pair (unencrypted) + fn generate_raw(&self) -> Result; + + /// Returns the algorithm used by this generator + fn algorithm(&self) -> SigningAlgorithm; +} + +/// Trait for encrypting/decrypting private keys +pub trait KeyEncryptor: Send + Sync { + fn encrypt( + &self, + private_key_pem: &[u8], + password: &[u8], + algorithm: SigningAlgorithm, + ) -> Result; + + fn decrypt( + &self, + encrypted: &EncryptedPrivateKey, + password: &[u8], + ) -> Result, KernelError>; +} + +/// Trait for signing data with a private key +pub trait Signer: Send + Sync { + /// Sign data using PKCS#1 v1.5 + SHA-256 (for RSA) or Ed25519 + fn sign(&self, data: &[u8], private_key_pem: &[u8]) -> Result, KernelError>; +} + +/// Trait for verifying signatures with a public key +pub trait SignatureVerifier: Send + Sync { + /// Verify a signature using the public key + fn verify( + &self, + data: &[u8], + signature: &[u8], + public_key_pem: &[u8], + ) -> Result; +} + +// --- DI Traits --- + +pub trait DependOnPasswordProvider: Send + Sync { + type PasswordProvider: PasswordProvider; + fn password_provider(&self) -> &Self::PasswordProvider; +} + +pub trait DependOnRawKeyGenerator: Send + Sync { + type RawKeyGenerator: RawKeyGenerator; + fn raw_key_generator(&self) -> &Self::RawKeyGenerator; +} + +pub trait DependOnKeyEncryptor: Send + Sync { + type KeyEncryptor: KeyEncryptor; + fn key_encryptor(&self) -> &Self::KeyEncryptor; +} + +pub trait DependOnSigner: Send + Sync { + type Signer: Signer; + fn signer(&self) -> &Self::Signer; +} + +pub trait DependOnSignatureVerifier: Send + Sync { + type SignatureVerifier: SignatureVerifier; + fn signature_verifier(&self) -> &Self::SignatureVerifier; +} diff --git a/kernel/src/database.rs b/kernel/src/database.rs index c2d5307..af71727 100644 --- a/kernel/src/database.rs +++ b/kernel/src/database.rs @@ -4,13 +4,13 @@ use std::future::Future; /// Databaseのトランザクション処理を示すトレイト /// /// 現状は何もないが、将来的にトランザクション時に使える機能を示す可能性を考えて用意している -pub trait Transaction {} +pub trait Executor: Send {} pub trait DatabaseConnection: Sync + Send + 'static { - type Transaction: Transaction; + type Executor: Executor; fn begin_transaction( &self, - ) -> impl Future> + Send; + ) -> impl Future> + Send; } pub trait DependOnDatabaseConnection: Sync + Send { diff --git a/kernel/src/entity.rs b/kernel/src/entity.rs index ab6c058..58f1652 100644 --- a/kernel/src/entity.rs +++ b/kernel/src/entity.rs @@ -1,4 +1,6 @@ mod account; +mod auth_account; +mod auth_host; mod common; mod event; mod follow; @@ -6,10 +8,10 @@ mod image; mod metadata; mod profile; mod remote_account; -mod stellar_account; -mod stellar_host; pub use self::account::*; +pub use self::auth_account::*; +pub use self::auth_host::*; pub use self::common::*; pub use self::event::*; pub use self::follow::*; @@ -17,5 +19,3 @@ pub use self::image::*; pub use self::metadata::*; pub use self::profile::*; pub use self::remote_account::*; -pub use self::stellar_account::*; -pub use self::stellar_host::*; diff --git a/kernel/src/entity/account.rs b/kernel/src/entity/account.rs index c4b3438..3649933 100644 --- a/kernel/src/entity/account.rs +++ b/kernel/src/entity/account.rs @@ -5,7 +5,8 @@ use serde::Serialize; use vodca::{Nameln, Newln, References}; use crate::entity::{ - CommandEnvelope, DeletedAt, EventEnvelope, EventId, EventVersion, KnownEventVersion, Nanoid, + AuthAccountId, CommandEnvelope, CreatedAt, DeletedAt, EventEnvelope, EventId, EventVersion, + KnownEventVersion, Nanoid, }; use crate::event::EventApplier; use crate::KernelError; @@ -34,6 +35,7 @@ pub struct Account { deleted_at: Option>, version: EventVersion, nanoid: Nanoid, + created_at: CreatedAt, } #[derive(Debug, Clone, Eq, PartialEq, Nameln, Serialize, Deserialize)] @@ -46,11 +48,13 @@ pub enum AccountEvent { public_key: AccountPublicKey, is_bot: AccountIsBot, nanoid: Nanoid, + auth_account_id: AuthAccountId, }, Updated { is_bot: AccountIsBot, }, - Deleted, + #[serde(alias = "deleted")] + Deactivated, } impl Account { @@ -60,14 +64,16 @@ impl Account { private_key: AccountPrivateKey, public_key: AccountPublicKey, is_bot: AccountIsBot, - nano_id: Nanoid, + nanoid: Nanoid, + auth_account_id: AuthAccountId, ) -> CommandEnvelope { let event = AccountEvent::Created { name, private_key, public_key, is_bot, - nanoid: nano_id, + nanoid, + auth_account_id, }; CommandEnvelope::new( EventId::from(id), @@ -77,14 +83,43 @@ impl Account { ) } - pub fn update(id: AccountId, is_bot: AccountIsBot) -> CommandEnvelope { + pub fn update( + id: AccountId, + is_bot: AccountIsBot, + current_version: EventVersion, + ) -> CommandEnvelope { let event = AccountEvent::Updated { is_bot }; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) + CommandEnvelope::new( + EventId::from(id), + event.name(), + event, + Some(KnownEventVersion::Prev(current_version)), + ) + } + + pub fn deactivate( + id: AccountId, + current_version: EventVersion, + ) -> CommandEnvelope { + let event = AccountEvent::Deactivated; + CommandEnvelope::new( + EventId::from(id), + event.name(), + event, + Some(KnownEventVersion::Prev(current_version)), + ) } +} + +impl PartialOrd for Account { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} - pub fn delete(id: AccountId) -> CommandEnvelope { - let event = AccountEvent::Deleted; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) +impl Ord for Account { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.id.cmp(&other.id) } } @@ -103,11 +138,17 @@ impl EventApplier for Account { public_key, is_bot, nanoid: nano_id, + auth_account_id: _, } => { if let Some(entity) = entity { return Err(Report::new(KernelError::Internal) .attach_printable(Self::already_exists(entity))); } + let created_at = if let Some(timestamp) = event.id.as_ref().get_timestamp() { + CreatedAt::try_from(timestamp)? + } else { + CreatedAt::now() + }; *entity = Some(Account { id: AccountId::new(event.id), name, @@ -117,6 +158,7 @@ impl EventApplier for Account { deleted_at: None, version: event.version, nanoid: nano_id, + created_at, }); } AccountEvent::Updated { is_bot } => { @@ -128,10 +170,14 @@ impl EventApplier for Account { .attach_printable(Self::not_exists(event.id.as_ref()))); } } - AccountEvent::Deleted => { - if let Some(entity) = entity { - entity.deleted_at = Some(DeletedAt::now()); - entity.version = event.version; + AccountEvent::Deactivated => { + if let Some(account) = entity { + if account.deleted_at.is_some() { + return Err(Report::new(KernelError::Internal) + .attach_printable("Account is already deactivated")); + } + account.deleted_at = Some(DeletedAt::now()); + account.version = event.version; } else { return Err(Report::new(KernelError::Internal) .attach_printable(Self::not_exists(event.id.as_ref()))); @@ -145,10 +191,11 @@ impl EventApplier for Account { #[cfg(test)] mod test { use crate::entity::{ - Account, AccountId, AccountIsBot, AccountName, AccountPrivateKey, AccountPublicKey, - EventEnvelope, EventVersion, Nanoid, + Account, AccountEvent, AccountId, AccountIsBot, AccountName, AccountPrivateKey, + AccountPublicKey, AuthAccountId, CreatedAt, EventEnvelope, EventId, EventVersion, Nanoid, }; use crate::event::EventApplier; + use crate::KernelError; use uuid::Uuid; #[test] @@ -159,17 +206,17 @@ mod test { let public_key = AccountPublicKey::new("public_key".to_string()); let is_bot = AccountIsBot::new(false); let nano_id = Nanoid::default(); - let event = Account::create( - id.clone(), - name.clone(), - private_key.clone(), - public_key.clone(), - is_bot.clone(), - nano_id.clone(), - ); + let event = AccountEvent::Created { + name: name.clone(), + private_key: private_key.clone(), + public_key: public_key.clone(), + is_bot: is_bot.clone(), + nanoid: nano_id.clone(), + auth_account_id: AuthAccountId::new(Uuid::now_v7()), + }; let envelope = EventEnvelope::new( - event.id().clone(), - event.event().clone(), + EventId::from(id.clone()), + event, EventVersion::new(Uuid::now_v7()), ); let mut account = None; @@ -185,7 +232,6 @@ mod test { } #[test] - #[should_panic] fn create_exist_account() { let id = AccountId::new(Uuid::now_v7()); let name = AccountName::new("test"); @@ -202,22 +248,24 @@ mod test { None, EventVersion::new(Uuid::now_v7()), nano_id.clone(), + CreatedAt::now(), ); - let event = Account::create( - id.clone(), - name.clone(), - private_key.clone(), - public_key.clone(), - is_bot.clone(), - nano_id.clone(), - ); + let event = AccountEvent::Created { + name: name.clone(), + private_key: private_key.clone(), + public_key: public_key.clone(), + is_bot: is_bot.clone(), + nanoid: nano_id.clone(), + auth_account_id: AuthAccountId::new(Uuid::now_v7()), + }; let envelope = EventEnvelope::new( - event.id().clone(), - event.event().clone(), + EventId::from(id.clone()), + event, EventVersion::new(Uuid::now_v7()), ); let mut account = Some(account); - Account::apply(&mut account, envelope).unwrap(); + assert!(Account::apply(&mut account, envelope) + .is_err_and(|e| e.current_context() == &KernelError::Internal)); } #[test] @@ -237,8 +285,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), nano_id.clone(), + CreatedAt::now(), ); - let event = Account::update(id.clone(), AccountIsBot::new(true)); + let version = account.version().clone(); + let event = Account::update(id.clone(), AccountIsBot::new(true), version); let envelope = EventEnvelope::new( event.id().clone(), event.event().clone(), @@ -253,21 +303,22 @@ mod test { } #[test] - #[should_panic] fn update_not_exist_account() { let id = AccountId::new(Uuid::now_v7()); - let event = Account::update(id.clone(), AccountIsBot::new(true)); + let version = EventVersion::new(Uuid::now_v7()); + let event = Account::update(id.clone(), AccountIsBot::new(true), version); let envelope = EventEnvelope::new( event.id().clone(), event.event().clone(), EventVersion::new(Uuid::now_v7()), ); let mut account = None; - Account::apply(&mut account, envelope).unwrap(); + assert!(Account::apply(&mut account, envelope) + .is_err_and(|e| e.current_context() == &KernelError::Internal)); } #[test] - fn delete_account() { + fn deactivate_account() { let id = AccountId::new(Uuid::now_v7()); let name = AccountName::new("test"); let private_key = AccountPrivateKey::new("private_key".to_string()); @@ -283,8 +334,10 @@ mod test { None, EventVersion::new(Uuid::now_v7()), nano_id.clone(), + CreatedAt::now(), ); - let event = Account::delete(id.clone()); + let version = account.version().clone(); + let event = Account::deactivate(id.clone(), version); let envelope = EventEnvelope::new( event.id().clone(), event.event().clone(), @@ -292,23 +345,66 @@ mod test { ); let mut account = Some(account); Account::apply(&mut account, envelope.clone()).unwrap(); + assert!(account.is_some()); let account = account.unwrap(); assert!(account.deleted_at().is_some()); - assert_eq!(account.version(), &envelope.version); - assert_eq!(account.nanoid(), &nano_id); } #[test] - #[should_panic] - fn delete_not_exist_account() { + fn deactivate_already_deactivated_account() { + let id = AccountId::new(Uuid::now_v7()); + let name = AccountName::new("test"); + let private_key = AccountPrivateKey::new("private_key".to_string()); + let public_key = AccountPublicKey::new("public_key".to_string()); + let is_bot = AccountIsBot::new(false); + let nano_id = Nanoid::default(); + let account = Account::new( + id.clone(), + name.clone(), + private_key.clone(), + public_key.clone(), + is_bot.clone(), + None, + EventVersion::new(Uuid::now_v7()), + nano_id.clone(), + CreatedAt::now(), + ); + let version = account.version().clone(); + let event = Account::deactivate(id.clone(), version); + let envelope = EventEnvelope::new( + event.id().clone(), + event.event().clone(), + EventVersion::new(Uuid::now_v7()), + ); + let mut account = Some(account); + Account::apply(&mut account, envelope).unwrap(); + assert!(account.is_some()); + assert!(account.as_ref().unwrap().deleted_at().is_some()); + + // Second deactivation should fail + let version2 = account.as_ref().unwrap().version().clone(); + let event2 = Account::deactivate(id.clone(), version2); + let envelope2 = EventEnvelope::new( + event2.id().clone(), + event2.event().clone(), + EventVersion::new(Uuid::now_v7()), + ); + assert!(Account::apply(&mut account, envelope2) + .is_err_and(|e| e.current_context() == &KernelError::Internal)); + } + + #[test] + fn deactivate_not_exist_account() { let id = AccountId::new(Uuid::now_v7()); - let event = Account::delete(id.clone()); + let version = EventVersion::new(Uuid::now_v7()); + let event = Account::deactivate(id.clone(), version); let envelope = EventEnvelope::new( event.id().clone(), event.event().clone(), EventVersion::new(Uuid::now_v7()), ); let mut account = None; - Account::apply(&mut account, envelope).unwrap(); + assert!(Account::apply(&mut account, envelope) + .is_err_and(|e| e.current_context() == &KernelError::Internal)); } } diff --git a/kernel/src/entity/account/id.rs b/kernel/src/entity/account/id.rs index 85cd4f4..6c29b2e 100644 --- a/kernel/src/entity/account/id.rs +++ b/kernel/src/entity/account/id.rs @@ -3,9 +3,28 @@ use serde::{Deserialize, Serialize}; use uuid::Uuid; use vodca::{AsRefln, Fromln, Newln}; -#[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] +#[derive( + Debug, + Clone, + PartialEq, + Eq, + Hash, + Ord, + PartialOrd, + Fromln, + AsRefln, + Newln, + Serialize, + Deserialize, +)] pub struct AccountId(Uuid); +impl Default for AccountId { + fn default() -> Self { + AccountId(Uuid::now_v7()) + } +} + impl From for EventId { fn from(account_id: AccountId) -> Self { EventId::new(account_id.0) diff --git a/kernel/src/entity/auth_account.rs b/kernel/src/entity/auth_account.rs new file mode 100644 index 0000000..6bb8765 --- /dev/null +++ b/kernel/src/entity/auth_account.rs @@ -0,0 +1,109 @@ +mod client_id; +mod id; + +pub use self::client_id::*; +pub use self::id::*; +use crate::entity::{ + AuthHostId, CommandEnvelope, EventEnvelope, EventId, EventVersion, KnownEventVersion, +}; +use crate::event::EventApplier; +use crate::KernelError; +use destructure::Destructure; +use error_stack::Report; +use serde::Deserialize; +use serde::Serialize; +use vodca::{Nameln, Newln, References}; + +#[derive( + Debug, Clone, Hash, Eq, PartialEq, References, Newln, Serialize, Deserialize, Destructure, +)] +pub struct AuthAccount { + id: AuthAccountId, + host: AuthHostId, + client_id: AuthAccountClientId, + version: EventVersion, +} + +#[derive(Debug, Clone, Eq, PartialEq, Nameln, Serialize, Deserialize)] +#[serde(tag = "type", rename_all_fields = "snake_case")] +#[vodca(prefix = "auth_account", snake_case)] +pub enum AuthAccountEvent { + Created { + host: AuthHostId, + client_id: AuthAccountClientId, + }, +} + +impl AuthAccount { + pub fn create( + id: AuthAccountId, + host: AuthHostId, + client_id: AuthAccountClientId, + ) -> CommandEnvelope { + let event = AuthAccountEvent::Created { host, client_id }; + CommandEnvelope::new( + EventId::from(id), + event.name(), + event, + Some(KnownEventVersion::Nothing), + ) + } +} + +impl EventApplier for AuthAccount { + type Event = AuthAccountEvent; + const ENTITY_NAME: &'static str = "AuthAccount"; + + fn apply( + entity: &mut Option, + event: EventEnvelope, + ) -> error_stack::Result<(), KernelError> + where + Self: Sized, + { + match event.event { + AuthAccountEvent::Created { host, client_id } => { + if let Some(entity) = entity { + return Err(Report::new(KernelError::Internal) + .attach_printable(Self::already_exists(entity))); + } + *entity = Some(AuthAccount { + id: AuthAccountId::new(event.id), + host, + client_id, + version: event.version, + }); + } + } + Ok(()) + } +} + +#[cfg(test)] +mod test { + use crate::entity::{ + AuthAccount, AuthAccountClientId, AuthAccountId, AuthHostId, EventEnvelope, EventVersion, + }; + use crate::event::EventApplier; + use uuid::Uuid; + + #[test] + fn create_auth_account() { + let id = AuthAccountId::new(Uuid::now_v7()); + let host = AuthHostId::new(Uuid::now_v7()); + let client_id = AuthAccountClientId::new(Uuid::now_v7()); + let create_account = AuthAccount::create(id.clone(), host.clone(), client_id.clone()); + let envelope = EventEnvelope::new( + create_account.id().clone(), + create_account.event().clone(), + EventVersion::new(Uuid::now_v7()), + ); + let mut account = None; + AuthAccount::apply(&mut account, envelope).unwrap(); + assert!(account.is_some()); + let account = account.unwrap(); + assert_eq!(account.id(), &id); + assert_eq!(account.host(), &host); + assert_eq!(account.client_id(), &client_id); + } +} diff --git a/kernel/src/entity/stellar_account/client_id.rs b/kernel/src/entity/auth_account/client_id.rs similarity index 79% rename from kernel/src/entity/stellar_account/client_id.rs rename to kernel/src/entity/auth_account/client_id.rs index 3baa401..e197180 100644 --- a/kernel/src/entity/stellar_account/client_id.rs +++ b/kernel/src/entity/auth_account/client_id.rs @@ -2,4 +2,4 @@ use serde::{Deserialize, Serialize}; use vodca::{AsRefln, Fromln, Newln}; #[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] -pub struct StellarAccountClientId(String); +pub struct AuthAccountClientId(String); diff --git a/kernel/src/entity/auth_account/id.rs b/kernel/src/entity/auth_account/id.rs new file mode 100644 index 0000000..985939d --- /dev/null +++ b/kernel/src/entity/auth_account/id.rs @@ -0,0 +1,19 @@ +use crate::entity::{AuthAccount, AuthAccountEvent, EventId}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; +use vodca::{AsRefln, Fromln, Newln}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] +pub struct AuthAccountId(Uuid); + +impl Default for AuthAccountId { + fn default() -> Self { + AuthAccountId(Uuid::now_v7()) + } +} + +impl From for EventId { + fn from(auth_account_id: AuthAccountId) -> Self { + EventId::new(auth_account_id.0) + } +} diff --git a/kernel/src/entity/auth_host.rs b/kernel/src/entity/auth_host.rs new file mode 100644 index 0000000..551c1f3 --- /dev/null +++ b/kernel/src/entity/auth_host.rs @@ -0,0 +1,15 @@ +mod id; +mod url; + +pub use self::{id::*, url::*}; +use destructure::Destructure; +use serde::{Deserialize, Serialize}; +use vodca::{Newln, References}; + +#[derive( + Debug, Clone, PartialEq, Eq, Hash, References, Newln, Serialize, Deserialize, Destructure, +)] +pub struct AuthHost { + id: AuthHostId, + url: AuthHostUrl, +} diff --git a/kernel/src/entity/stellar_host/id.rs b/kernel/src/entity/auth_host/id.rs similarity index 58% rename from kernel/src/entity/stellar_host/id.rs rename to kernel/src/entity/auth_host/id.rs index c485211..cf00636 100644 --- a/kernel/src/entity/stellar_host/id.rs +++ b/kernel/src/entity/auth_host/id.rs @@ -3,4 +3,10 @@ use uuid::Uuid; use vodca::{AsRefln, Fromln, Newln}; #[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] -pub struct StellarHostId(Uuid); +pub struct AuthHostId(Uuid); + +impl Default for AuthHostId { + fn default() -> Self { + AuthHostId(Uuid::now_v7()) + } +} diff --git a/kernel/src/entity/stellar_host/url.rs b/kernel/src/entity/auth_host/url.rs similarity index 82% rename from kernel/src/entity/stellar_host/url.rs rename to kernel/src/entity/auth_host/url.rs index 00b2cee..00e8c22 100644 --- a/kernel/src/entity/stellar_host/url.rs +++ b/kernel/src/entity/auth_host/url.rs @@ -2,4 +2,4 @@ use serde::{Deserialize, Serialize}; use vodca::{AsRefln, Fromln, Newln}; #[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] -pub struct StellarHostUrl(String); +pub struct AuthHostUrl(String); diff --git a/kernel/src/entity/common/created_at.rs b/kernel/src/entity/common/created_at.rs index 8044f0d..e15dc06 100644 --- a/kernel/src/entity/common/created_at.rs +++ b/kernel/src/entity/common/created_at.rs @@ -1,6 +1,9 @@ +use crate::KernelError; +use error_stack::{Report, ResultExt}; use serde::{Deserialize, Deserializer, Serialize}; use std::marker::PhantomData; use time::OffsetDateTime; +use uuid::Timestamp; use vodca::{AsRefln, Fromln}; #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Fromln, AsRefln)] @@ -17,6 +20,20 @@ impl CreatedAt { } } +impl TryFrom for CreatedAt { + type Error = Report; + fn try_from(value: Timestamp) -> Result { + let (seconds, nanos) = value.to_unix(); + let datetime = OffsetDateTime::from_unix_timestamp(seconds as i64) + .change_context_lazy(|| KernelError::Internal) + .attach_printable_lazy(|| format!("Invalid seconds: {seconds}"))? + .replace_nanosecond(nanos) + .change_context_lazy(|| KernelError::Internal) + .attach_printable_lazy(|| format!("Invalid nanos: {nanos}"))?; + Ok(Self::new(datetime)) + } +} + impl Serialize for CreatedAt { fn serialize(&self, serializer: S) -> Result where diff --git a/kernel/src/entity/event/version.rs b/kernel/src/entity/event/version.rs index 3373ccd..66b760f 100644 --- a/kernel/src/entity/event/version.rs +++ b/kernel/src/entity/event/version.rs @@ -12,6 +12,12 @@ impl EventVersion { } } +impl Default for EventVersion { + fn default() -> Self { + Self(Uuid::now_v7(), PhantomData) + } +} + impl Serialize for EventVersion { fn serialize(&self, serializer: S) -> Result where diff --git a/kernel/src/entity/follow.rs b/kernel/src/entity/follow.rs index 5afd479..d979290 100644 --- a/kernel/src/entity/follow.rs +++ b/kernel/src/entity/follow.rs @@ -4,13 +4,10 @@ mod target_id; pub use self::{approved_at::*, id::*, target_id::*}; -use crate::entity::{CommandEnvelope, EventEnvelope, EventId}; -use crate::event::EventApplier; use crate::KernelError; -use error_stack::{Report, ResultExt}; +use error_stack::ResultExt; use serde::{Deserialize, Serialize}; -use time::OffsetDateTime; -use vodca::{Nameln, References}; +use vodca::References; #[derive(Debug, Clone, Hash, Eq, PartialEq, References, Serialize, Deserialize)] pub struct Follow { @@ -43,175 +40,3 @@ impl Follow { } } } - -#[derive(Debug, Clone, Eq, PartialEq, Nameln, Serialize, Deserialize)] -#[serde(tag = "type", rename_all = "snake_case")] -#[vodca(prefix = "follow", snake_case)] -pub enum FollowEvent { - Created { - source: FollowTargetId, - destination: FollowTargetId, - }, - Approved, - Deleted, -} - -impl Follow { - pub fn create( - id: FollowId, - source: FollowTargetId, - destination: FollowTargetId, - ) -> error_stack::Result, KernelError> { - match (source, destination) { - (source @ FollowTargetId::Remote(_), destination @ FollowTargetId::Remote(_)) => { - Err(KernelError::Internal).attach_printable(format!( - "Cannot create remote to remote follow data. source: {:?}, destination: {:?}", - source, destination - )) - } - (source, destination) => { - let event = FollowEvent::Created { - source, - destination, - }; - Ok(CommandEnvelope::new( - EventId::from(id), - event.name(), - event, - None, - )) - } - } - } - - pub fn approve(id: FollowId) -> CommandEnvelope { - let event = FollowEvent::Approved; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) - } - - pub fn delete(id: FollowId) -> CommandEnvelope { - let event = FollowEvent::Deleted; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) - } -} - -impl EventApplier for Follow { - type Event = FollowEvent; - const ENTITY_NAME: &'static str = "Follow"; - - fn apply( - entity: &mut Option, - event: EventEnvelope, - ) -> error_stack::Result<(), KernelError> - where - Self: Sized, - { - match event.event { - FollowEvent::Created { - source, - destination, - } => { - if let Some(entity) = entity { - return Err(KernelError::Internal) - .attach_printable(Self::already_exists(entity)); - } - *entity = Some(Follow::new( - FollowId::new(event.id), - source, - destination, - None, - )?); - } - FollowEvent::Approved => { - if let Some(entity) = entity { - let timestamp = event.id.as_ref().get_timestamp().ok_or_else(|| { - Report::new(KernelError::Internal) - .attach_printable("Failed to get timestamp from uuid") - })?; - let (seconds, nanos) = timestamp.to_unix(); - let datetime = OffsetDateTime::from_unix_timestamp_nanos( - i128::from(nanos) + i128::from(seconds * 1_000_000_000), - ) - .change_context_lazy(|| KernelError::Internal)?; - entity.approved_at = Some(FollowApprovedAt::new(datetime)); - } else { - return Err(KernelError::Internal) - .attach_printable(Self::not_exists(event.id.as_ref())); - } - } - FollowEvent::Deleted => { - *entity = None; - } - } - Ok(()) - } -} - -#[cfg(test)] -mod test { - use crate::entity::{ - AccountId, EventEnvelope, EventVersion, Follow, FollowId, FollowTargetId, RemoteAccountId, - }; - use crate::event::EventApplier; - use uuid::Uuid; - - #[test] - fn create_event() { - let id = FollowId::new(Uuid::now_v7()); - let source = FollowTargetId::from(AccountId::new(Uuid::now_v7())); - let destination = FollowTargetId::from(RemoteAccountId::new(Uuid::now_v7())); - let event = Follow::create(id.clone(), source.clone(), destination.clone()).unwrap(); - let envelope = EventEnvelope::new( - event.id().clone(), - event.event().clone(), - EventVersion::new(Uuid::now_v7()), - ); - let mut entity = None; - Follow::apply(&mut entity, envelope).unwrap(); - assert!(entity.is_some()); - let entity = entity.unwrap(); - assert_eq!(entity.id(), &id); - assert_eq!(entity.source(), &source); - assert_eq!(entity.destination(), &destination); - assert!(entity.approved_at().is_none()); - } - - #[test] - fn update_event() { - let id = FollowId::new(Uuid::now_v7()); - let source = FollowTargetId::from(AccountId::new(Uuid::now_v7())); - let destination = FollowTargetId::from(RemoteAccountId::new(Uuid::now_v7())); - let follow = Follow::new(id.clone(), source.clone(), destination.clone(), None).unwrap(); - let event = Follow::approve(id.clone()); - let envelope = EventEnvelope::new( - event.id().clone(), - event.event().clone(), - EventVersion::new(Uuid::now_v7()), - ); - let mut entity = Some(follow); - Follow::apply(&mut entity, envelope).unwrap(); - assert!(entity.is_some()); - let entity = entity.unwrap(); - assert_eq!(entity.id(), &id); - assert_eq!(entity.source(), &source); - assert_eq!(entity.destination(), &destination); - assert!(entity.approved_at().is_some()); - } - - #[test] - fn delete_event() { - let id = FollowId::new(Uuid::now_v7()); - let source = FollowTargetId::from(AccountId::new(Uuid::now_v7())); - let destination = FollowTargetId::from(RemoteAccountId::new(Uuid::now_v7())); - let follow = Follow::new(id.clone(), source.clone(), destination.clone(), None).unwrap(); - let event = Follow::delete(id.clone()); - let envelope = EventEnvelope::new( - event.id().clone(), - event.event().clone(), - EventVersion::new(Uuid::now_v7()), - ); - let mut entity = Some(follow); - Follow::apply(&mut entity, envelope).unwrap(); - assert!(entity.is_none()); - } -} diff --git a/kernel/src/entity/follow/id.rs b/kernel/src/entity/follow/id.rs index aea4bdf..f2d256a 100644 --- a/kernel/src/entity/follow/id.rs +++ b/kernel/src/entity/follow/id.rs @@ -1,13 +1,6 @@ -use crate::entity::{EventId, Follow, FollowEvent}; use serde::{Deserialize, Serialize}; use uuid::Uuid; use vodca::{AsRefln, Fromln, Newln}; #[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] pub struct FollowId(Uuid); - -impl From for EventId { - fn from(value: FollowId) -> Self { - EventId::new(value.0) - } -} diff --git a/kernel/src/entity/metadata.rs b/kernel/src/entity/metadata.rs index a41b68c..a0fc92d 100644 --- a/kernel/src/entity/metadata.rs +++ b/kernel/src/entity/metadata.rs @@ -25,7 +25,7 @@ pub struct Metadata { nanoid: Nanoid, } -#[derive(Debug, Clone, Nameln, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Nameln, Serialize, Deserialize)] #[serde(tag = "type", rename_all = "snake_case")] #[vodca(prefix = "metadata", snake_case)] pub enum MetadataEvent { @@ -68,14 +68,28 @@ impl Metadata { id: MetadataId, label: MetadataLabel, content: MetadataContent, + current_version: EventVersion, ) -> CommandEnvelope { let event = MetadataEvent::Updated { label, content }; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) + CommandEnvelope::new( + EventId::from(id), + event.name(), + event, + Some(KnownEventVersion::Prev(current_version)), + ) } - pub fn delete(id: MetadataId) -> CommandEnvelope { + pub fn delete( + id: MetadataId, + current_version: EventVersion, + ) -> CommandEnvelope { let event = MetadataEvent::Deleted; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) + CommandEnvelope::new( + EventId::from(id), + event.name(), + event, + Some(KnownEventVersion::Prev(current_version)), + ) } } @@ -188,7 +202,9 @@ mod test { ); let label = MetadataLabel::new("new_label".to_string()); let content = MetadataContent::new("new_content".to_string()); - let update_event = Metadata::update(id.clone(), label.clone(), content.clone()); + let current_version = metadata.version().clone(); + let update_event = + Metadata::update(id.clone(), label.clone(), content.clone(), current_version); let version = EventVersion::new(Uuid::now_v7()); let envelope = EventEnvelope::new( update_event.id().clone(), @@ -222,7 +238,8 @@ mod test { EventVersion::new(Uuid::now_v7()), nano_id.clone(), ); - let delete_event = Metadata::delete(id.clone()); + let current_version = metadata.version().clone(); + let delete_event = Metadata::delete(id.clone(), current_version); let envelope = EventEnvelope::new( delete_event.id().clone(), delete_event.event().clone(), diff --git a/kernel/src/entity/profile.rs b/kernel/src/entity/profile.rs index db8ed2a..4eaa879 100644 --- a/kernel/src/entity/profile.rs +++ b/kernel/src/entity/profile.rs @@ -31,7 +31,7 @@ pub struct Profile { nanoid: Nanoid, } -#[derive(Debug, Clone, Nameln, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Nameln, Serialize, Deserialize)] #[serde(tag = "type", rename_all = "snake_case")] #[vodca(prefix = "profile", snake_case)] pub enum ProfileEvent { @@ -83,6 +83,7 @@ impl Profile { summary: Option, icon: Option, banner: Option, + current_version: EventVersion, ) -> CommandEnvelope { let event = ProfileEvent::Updated { display_name, @@ -90,7 +91,12 @@ impl Profile { icon, banner, }; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) + CommandEnvelope::new( + EventId::from(id), + event.name(), + event, + Some(KnownEventVersion::Prev(current_version)), + ) } } @@ -216,12 +222,14 @@ mod test { let summary = ProfileSummary::new("summary".to_string()); let icon = ImageId::new(Uuid::now_v7()); let banner = ImageId::new(Uuid::now_v7()); + let current_version = profile.version().clone(); let update_event = Profile::update( id.clone(), Some(display_name.clone()), Some(summary.clone()), Some(icon.clone()), Some(banner.clone()), + current_version, ); let version = EventVersion::new(Uuid::now_v7()); let envelope = EventEnvelope::new( diff --git a/kernel/src/entity/remote_account.rs b/kernel/src/entity/remote_account.rs index f2a986a..f4f1a53 100644 --- a/kernel/src/entity/remote_account.rs +++ b/kernel/src/entity/remote_account.rs @@ -6,12 +6,8 @@ pub use self::acct::*; pub use self::id::*; pub use self::url::*; use crate::entity::image::ImageId; -use crate::entity::{CommandEnvelope, EventEnvelope, EventId, KnownEventVersion}; -use crate::event::EventApplier; -use crate::KernelError; -use error_stack::Report; use serde::{Deserialize, Serialize}; -use vodca::{Nameln, Newln, References}; +use vodca::{Newln, References}; #[derive(Debug, Clone, Eq, PartialEq, References, Newln, Serialize, Deserialize)] pub struct RemoteAccount { @@ -20,159 +16,3 @@ pub struct RemoteAccount { url: RemoteAccountUrl, icon_id: Option, } - -#[derive(Debug, Clone, Eq, PartialEq, Nameln, Serialize, Deserialize)] -#[serde(tag = "type", rename_all_fields = "snake_case")] -#[vodca(prefix = "remote_account", snake_case)] -pub enum RemoteAccountEvent { - Created { - acct: RemoteAccountAcct, - url: RemoteAccountUrl, - icon_id: Option, - }, - Updated { - icon_id: Option, - }, - Deleted, -} - -impl RemoteAccount { - pub fn create( - id: RemoteAccountId, - acct: RemoteAccountAcct, - url: RemoteAccountUrl, - icon_id: Option, - ) -> CommandEnvelope { - let event = RemoteAccountEvent::Created { acct, url, icon_id }; - CommandEnvelope::new( - EventId::from(id), - event.name(), - event, - Some(KnownEventVersion::Nothing), - ) - } - - pub fn update( - id: RemoteAccountId, - icon_id: Option, - ) -> CommandEnvelope { - let event = RemoteAccountEvent::Updated { icon_id }; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) - } - - pub fn delete(id: RemoteAccountId) -> CommandEnvelope { - let event = RemoteAccountEvent::Deleted; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) - } -} - -impl EventApplier for RemoteAccount { - type Event = RemoteAccountEvent; - const ENTITY_NAME: &'static str = "RemoteAccount"; - - fn apply( - entity: &mut Option, - event: EventEnvelope, - ) -> error_stack::Result<(), KernelError> - where - Self: Sized, - { - match event.event { - RemoteAccountEvent::Created { acct, url, icon_id } => { - if let Some(entity) = entity { - return Err(Report::new(KernelError::Internal) - .attach_printable(Self::already_exists(entity))); - } - *entity = Some(RemoteAccount { - id: RemoteAccountId::new(event.id), - acct, - url, - icon_id, - }); - } - RemoteAccountEvent::Updated { icon_id } => { - if let Some(entity) = entity { - entity.icon_id = icon_id; - } else { - return Err(Report::new(KernelError::Internal) - .attach_printable(Self::not_exists(event.id.as_ref()))); - } - } - RemoteAccountEvent::Deleted => { - *entity = None; - } - } - Ok(()) - } -} - -#[cfg(test)] -mod test { - use crate::entity::{ - EventEnvelope, EventVersion, ImageId, RemoteAccount, RemoteAccountAcct, RemoteAccountId, - RemoteAccountUrl, - }; - use crate::event::EventApplier; - use uuid::Uuid; - - #[test] - fn create_remote_account() { - let id = RemoteAccountId::new(Uuid::now_v7()); - let acct = RemoteAccountAcct::new("acct:".to_string()); - let url = RemoteAccountUrl::new("https://example.com".to_string()); - let create = RemoteAccount::create(id.clone(), acct.clone(), url.clone(), None); - let envelope = EventEnvelope::new( - create.id().clone(), - create.event().clone(), - EventVersion::new(Uuid::now_v7()), - ); - let mut entity = None; - RemoteAccount::apply(&mut entity, envelope).unwrap(); - assert!(entity.is_some()); - let entity = entity.unwrap(); - assert_eq!(entity.id(), &id); - assert_eq!(entity.acct(), &acct); - assert_eq!(entity.url(), &url); - assert!(entity.icon_id().is_none()); - } - - #[test] - fn update_remote_account() { - let id = RemoteAccountId::new(Uuid::now_v7()); - let acct = RemoteAccountAcct::new("acct:".to_string()); - let url = RemoteAccountUrl::new("https://example.com".to_string()); - let remote_account = RemoteAccount::new(id.clone(), acct.clone(), url.clone(), None); - let new_icon_id = Some(ImageId::new(Uuid::now_v7())); - let update = RemoteAccount::update(id.clone(), new_icon_id); - let envelope = EventEnvelope::new( - update.id().clone(), - update.event().clone(), - EventVersion::new(Uuid::now_v7()), - ); - let mut entity = Some(remote_account); - RemoteAccount::apply(&mut entity, envelope).unwrap(); - assert!(entity.is_some()); - let entity = entity.unwrap(); - assert_eq!(entity.id(), &id); - assert_eq!(entity.acct(), &acct); - assert_eq!(entity.url(), &url); - assert!(entity.icon_id().is_some()); - } - - #[test] - fn delete_remote_account() { - let id = RemoteAccountId::new(Uuid::now_v7()); - let acct = RemoteAccountAcct::new("acct:".to_string()); - let url = RemoteAccountUrl::new("https://example.com".to_string()); - let remote_account = RemoteAccount::new(id.clone(), acct.clone(), url.clone(), None); - let delete = RemoteAccount::delete(id.clone()); - let envelope = EventEnvelope::new( - delete.id().clone(), - delete.event().clone(), - EventVersion::new(Uuid::now_v7()), - ); - let mut entity = Some(remote_account); - RemoteAccount::apply(&mut entity, envelope).unwrap(); - assert!(entity.is_none()); - } -} diff --git a/kernel/src/entity/remote_account/id.rs b/kernel/src/entity/remote_account/id.rs index 3e2232c..329f3d9 100644 --- a/kernel/src/entity/remote_account/id.rs +++ b/kernel/src/entity/remote_account/id.rs @@ -1,13 +1,6 @@ -use crate::entity::{EventId, RemoteAccount, RemoteAccountEvent}; use serde::{Deserialize, Serialize}; use uuid::Uuid; use vodca::{AsRefln, Fromln, Newln}; #[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] pub struct RemoteAccountId(Uuid); - -impl From for EventId { - fn from(id: RemoteAccountId) -> Self { - EventId::new(id.0) - } -} diff --git a/kernel/src/entity/stellar_account.rs b/kernel/src/entity/stellar_account.rs deleted file mode 100644 index f9c5c3a..0000000 --- a/kernel/src/entity/stellar_account.rs +++ /dev/null @@ -1,250 +0,0 @@ -mod access_token; -mod client_id; -mod id; -mod refresh_token; - -pub use self::access_token::*; -pub use self::client_id::*; -pub use self::id::*; -pub use self::refresh_token::*; -use crate::entity::{ - CommandEnvelope, EventEnvelope, EventId, EventVersion, KnownEventVersion, StellarHostId, -}; -use crate::event::EventApplier; -use crate::KernelError; -use destructure::Destructure; -use error_stack::Report; -use serde::Deserialize; -use serde::Serialize; -use vodca::{Nameln, Newln, References}; - -#[derive( - Debug, Clone, Hash, Eq, PartialEq, References, Newln, Serialize, Deserialize, Destructure, -)] -pub struct StellarAccount { - id: StellarAccountId, - host: StellarHostId, - client_id: StellarAccountClientId, - access_token: StellarAccountAccessToken, - refresh_token: StellarAccountRefreshToken, - version: EventVersion, -} - -#[derive(Debug, Clone, Eq, PartialEq, Nameln, Serialize, Deserialize)] -#[serde(tag = "type", rename_all_fields = "snake_case")] -#[vodca(prefix = "stellar_account", snake_case)] -pub enum StellarAccountEvent { - Created { - host: StellarHostId, - client_id: StellarAccountClientId, - access_token: StellarAccountAccessToken, - refresh_token: StellarAccountRefreshToken, - }, - Updated { - access_token: StellarAccountAccessToken, - refresh_token: StellarAccountRefreshToken, - }, - Deleted, -} - -impl StellarAccount { - pub fn create( - id: StellarAccountId, - host: StellarHostId, - client_id: StellarAccountClientId, - access_token: StellarAccountAccessToken, - refresh_token: StellarAccountRefreshToken, - ) -> CommandEnvelope { - let event = StellarAccountEvent::Created { - host, - client_id, - access_token, - refresh_token, - }; - CommandEnvelope::new( - EventId::from(id), - event.name(), - event, - Some(KnownEventVersion::Nothing), - ) - } - - pub fn update( - id: StellarAccountId, - access_token: StellarAccountAccessToken, - refresh_token: StellarAccountRefreshToken, - ) -> CommandEnvelope { - let event = StellarAccountEvent::Updated { - access_token, - refresh_token, - }; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) - } - - pub fn delete(id: StellarAccountId) -> CommandEnvelope { - let event = StellarAccountEvent::Deleted; - CommandEnvelope::new(EventId::from(id), event.name(), event, None) - } -} - -impl EventApplier for StellarAccount { - type Event = StellarAccountEvent; - const ENTITY_NAME: &'static str = "StellarAccount"; - - fn apply( - entity: &mut Option, - event: EventEnvelope, - ) -> error_stack::Result<(), KernelError> - where - Self: Sized, - { - match event.event { - StellarAccountEvent::Created { - host, - client_id, - access_token, - refresh_token, - } => { - if let Some(entity) = entity { - return Err(Report::new(KernelError::Internal) - .attach_printable(Self::already_exists(entity))); - } - *entity = Some(StellarAccount { - id: StellarAccountId::new(event.id), - host, - client_id, - access_token, - refresh_token, - version: event.version, - }); - } - StellarAccountEvent::Updated { - access_token, - refresh_token, - } => { - if let Some(entity) = entity { - entity.access_token = access_token; - entity.refresh_token = refresh_token; - entity.version = event.version; - } else { - return Err(Report::new(KernelError::Internal) - .attach_printable(Self::not_exists(event.id.as_ref()))); - } - } - StellarAccountEvent::Deleted => { - if entity.is_none() { - return Err(Report::new(KernelError::Internal) - .attach_printable(Self::not_exists(event.id.as_ref()))); - } - *entity = None; - } - } - Ok(()) - } -} - -#[cfg(test)] -mod test { - use crate::entity::{ - EventEnvelope, EventVersion, StellarAccount, StellarAccountAccessToken, - StellarAccountClientId, StellarAccountId, StellarAccountRefreshToken, StellarHostId, - }; - use crate::event::EventApplier; - use uuid::Uuid; - - #[test] - fn create_stellar_account() { - let id = StellarAccountId::new(Uuid::now_v7()); - let host = StellarHostId::new(Uuid::now_v7()); - let client_id = StellarAccountClientId::new(Uuid::now_v7()); - let access_token = StellarAccountAccessToken::new(Uuid::now_v7()); - let refresh_token = StellarAccountRefreshToken::new(Uuid::now_v7()); - let create_account = StellarAccount::create( - id.clone(), - host.clone(), - client_id.clone(), - access_token.clone(), - refresh_token.clone(), - ); - let envelope = EventEnvelope::new( - create_account.id().clone(), - create_account.event().clone(), - EventVersion::new(Uuid::now_v7()), - ); - let mut account = None; - StellarAccount::apply(&mut account, envelope).unwrap(); - assert!(account.is_some()); - let account = account.unwrap(); - assert_eq!(account.id(), &id); - assert_eq!(account.host(), &host); - assert_eq!(account.client_id(), &client_id); - assert_eq!(account.access_token(), &access_token); - assert_eq!(account.refresh_token(), &refresh_token); - } - - #[test] - fn update_stellar_account() { - let id = StellarAccountId::new(Uuid::now_v7()); - let host = StellarHostId::new(Uuid::now_v7()); - let client_id = StellarAccountClientId::new(Uuid::now_v7()); - let access_token = StellarAccountAccessToken::new(Uuid::now_v7()); - let refresh_token = StellarAccountRefreshToken::new(Uuid::now_v7()); - let account = StellarAccount::new( - id.clone(), - host.clone(), - client_id.clone(), - access_token.clone(), - refresh_token.clone(), - EventVersion::new(Uuid::now_v7()), - ); - let new_access_token = StellarAccountAccessToken::new(Uuid::now_v7()); - let new_refresh_token = StellarAccountRefreshToken::new(Uuid::now_v7()); - let update_account = StellarAccount::update( - id.clone(), - new_access_token.clone(), - new_refresh_token.clone(), - ); - let version = EventVersion::new(Uuid::now_v7()); - let envelope = EventEnvelope::new( - update_account.id().clone(), - update_account.event().clone(), - version.clone(), - ); - let mut account = Some(account); - StellarAccount::apply(&mut account, envelope).unwrap(); - assert!(account.is_some()); - let account = account.unwrap(); - assert_eq!(account.id(), &id); - assert_eq!(account.host(), &host); - assert_eq!(account.client_id(), &client_id); - assert_eq!(account.access_token(), &new_access_token); - assert_eq!(account.refresh_token(), &new_refresh_token); - assert_eq!(account.version(), &version); - } - - #[test] - fn delete_stellar_account() { - let id = StellarAccountId::new(Uuid::now_v7()); - let host = StellarHostId::new(Uuid::now_v7()); - let client_id = StellarAccountClientId::new(Uuid::now_v7()); - let access_token = StellarAccountAccessToken::new(Uuid::now_v7()); - let refresh_token = StellarAccountRefreshToken::new(Uuid::now_v7()); - let account = StellarAccount::new( - id.clone(), - host.clone(), - client_id.clone(), - access_token.clone(), - refresh_token.clone(), - EventVersion::new(Uuid::now_v7()), - ); - let delete_account = StellarAccount::delete(id.clone()); - let envelope = EventEnvelope::new( - delete_account.id().clone(), - delete_account.event().clone(), - EventVersion::new(Uuid::now_v7()), - ); - let mut account = Some(account); - StellarAccount::apply(&mut account, envelope).unwrap(); - assert!(account.is_none()); - } -} diff --git a/kernel/src/entity/stellar_account/access_token.rs b/kernel/src/entity/stellar_account/access_token.rs deleted file mode 100644 index 28da751..0000000 --- a/kernel/src/entity/stellar_account/access_token.rs +++ /dev/null @@ -1,5 +0,0 @@ -use serde::{Deserialize, Serialize}; -use vodca::{AsRefln, Fromln, Newln}; - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] -pub struct StellarAccountAccessToken(String); diff --git a/kernel/src/entity/stellar_account/id.rs b/kernel/src/entity/stellar_account/id.rs deleted file mode 100644 index afbf46f..0000000 --- a/kernel/src/entity/stellar_account/id.rs +++ /dev/null @@ -1,13 +0,0 @@ -use crate::entity::{EventId, StellarAccount, StellarAccountEvent}; -use serde::{Deserialize, Serialize}; -use uuid::Uuid; -use vodca::{AsRefln, Fromln, Newln}; - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] -pub struct StellarAccountId(Uuid); - -impl From for EventId { - fn from(stellar_account_id: StellarAccountId) -> Self { - EventId::new(stellar_account_id.0) - } -} diff --git a/kernel/src/entity/stellar_account/refresh_token.rs b/kernel/src/entity/stellar_account/refresh_token.rs deleted file mode 100644 index 61ab683..0000000 --- a/kernel/src/entity/stellar_account/refresh_token.rs +++ /dev/null @@ -1,5 +0,0 @@ -use serde::{Deserialize, Serialize}; -use vodca::{AsRefln, Fromln, Newln}; - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Fromln, AsRefln, Newln, Serialize, Deserialize)] -pub struct StellarAccountRefreshToken(String); diff --git a/kernel/src/entity/stellar_host.rs b/kernel/src/entity/stellar_host.rs deleted file mode 100644 index fcf209b..0000000 --- a/kernel/src/entity/stellar_host.rs +++ /dev/null @@ -1,12 +0,0 @@ -mod id; -mod url; - -pub use self::{id::*, url::*}; -use serde::{Deserialize, Serialize}; -use vodca::{Newln, References}; - -#[derive(Debug, Clone, PartialEq, Eq, Hash, References, Newln, Serialize, Deserialize)] -pub struct StellarHost { - id: StellarHostId, - url: StellarHostUrl, -} diff --git a/kernel/src/error.rs b/kernel/src/error.rs index 9b3591c..72d56be 100644 --- a/kernel/src/error.rs +++ b/kernel/src/error.rs @@ -1,11 +1,13 @@ use error_stack::Context; use std::fmt::{Display, Formatter}; -#[derive(Debug)] +#[derive(Debug, Eq, PartialEq)] pub enum KernelError { Concurrency, Timeout, Internal, + PermissionDenied, + NotFound, } impl Display for KernelError { @@ -14,6 +16,8 @@ impl Display for KernelError { KernelError::Concurrency => write!(f, "Concurrency error"), KernelError::Timeout => write!(f, "Process Timed out"), KernelError::Internal => write!(f, "Internal kernel error"), + KernelError::PermissionDenied => write!(f, "Permission denied"), + KernelError::NotFound => write!(f, "Resource not found"), } } } diff --git a/kernel/src/event_store.rs b/kernel/src/event_store.rs new file mode 100644 index 0000000..fa7d58c --- /dev/null +++ b/kernel/src/event_store.rs @@ -0,0 +1,9 @@ +mod account; +mod auth_account; +mod metadata; +mod profile; + +pub use self::account::*; +pub use self::auth_account::*; +pub use self::metadata::*; +pub use self::profile::*; diff --git a/kernel/src/event_store/account.rs b/kernel/src/event_store/account.rs new file mode 100644 index 0000000..2e67d4e --- /dev/null +++ b/kernel/src/event_store/account.rs @@ -0,0 +1,38 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{Account, AccountEvent, CommandEnvelope, EventEnvelope, EventId, EventVersion}; +use crate::KernelError; +use std::future::Future; + +pub trait AccountEventStore: Sync + Send + 'static { + type Executor: Executor; + + fn persist( + &self, + executor: &mut Self::Executor, + command: &CommandEnvelope, + ) -> impl Future> + Send; + + fn persist_and_transform( + &self, + executor: &mut Self::Executor, + command: CommandEnvelope, + ) -> impl Future, KernelError>> + + Send; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &EventId, + since: Option<&EventVersion>, + ) -> impl Future< + Output = error_stack::Result>, KernelError>, + > + Send; +} + +pub trait DependOnAccountEventStore: Sync + Send + DependOnDatabaseConnection { + type AccountEventStore: AccountEventStore< + Executor = ::Executor, + >; + + fn account_event_store(&self) -> &Self::AccountEventStore; +} diff --git a/kernel/src/event_store/auth_account.rs b/kernel/src/event_store/auth_account.rs new file mode 100644 index 0000000..089d89f --- /dev/null +++ b/kernel/src/event_store/auth_account.rs @@ -0,0 +1,44 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{ + AuthAccount, AuthAccountEvent, CommandEnvelope, EventEnvelope, EventId, EventVersion, +}; +use crate::KernelError; +use std::future::Future; + +pub trait AuthAccountEventStore: Sync + Send + 'static { + type Executor: Executor; + + fn persist( + &self, + executor: &mut Self::Executor, + command: &CommandEnvelope, + ) -> impl Future> + Send; + + fn persist_and_transform( + &self, + executor: &mut Self::Executor, + command: CommandEnvelope, + ) -> impl Future< + Output = error_stack::Result, KernelError>, + > + Send; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &EventId, + since: Option<&EventVersion>, + ) -> impl Future< + Output = error_stack::Result< + Vec>, + KernelError, + >, + > + Send; +} + +pub trait DependOnAuthAccountEventStore: Sync + Send + DependOnDatabaseConnection { + type AuthAccountEventStore: AuthAccountEventStore< + Executor = ::Executor, + >; + + fn auth_account_event_store(&self) -> &Self::AuthAccountEventStore; +} diff --git a/kernel/src/event_store/metadata.rs b/kernel/src/event_store/metadata.rs new file mode 100644 index 0000000..95cd152 --- /dev/null +++ b/kernel/src/event_store/metadata.rs @@ -0,0 +1,40 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{ + CommandEnvelope, EventEnvelope, EventId, EventVersion, Metadata, MetadataEvent, +}; +use crate::KernelError; +use std::future::Future; + +pub trait MetadataEventStore: Sync + Send + 'static { + type Executor: Executor; + + fn persist( + &self, + executor: &mut Self::Executor, + command: &CommandEnvelope, + ) -> impl Future> + Send; + + fn persist_and_transform( + &self, + executor: &mut Self::Executor, + command: CommandEnvelope, + ) -> impl Future, KernelError>> + + Send; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &EventId, + since: Option<&EventVersion>, + ) -> impl Future< + Output = error_stack::Result>, KernelError>, + > + Send; +} + +pub trait DependOnMetadataEventStore: Sync + Send + DependOnDatabaseConnection { + type MetadataEventStore: MetadataEventStore< + Executor = ::Executor, + >; + + fn metadata_event_store(&self) -> &Self::MetadataEventStore; +} diff --git a/kernel/src/event_store/profile.rs b/kernel/src/event_store/profile.rs new file mode 100644 index 0000000..5e32323 --- /dev/null +++ b/kernel/src/event_store/profile.rs @@ -0,0 +1,38 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{CommandEnvelope, EventEnvelope, EventId, EventVersion, Profile, ProfileEvent}; +use crate::KernelError; +use std::future::Future; + +pub trait ProfileEventStore: Sync + Send + 'static { + type Executor: Executor; + + fn persist( + &self, + executor: &mut Self::Executor, + command: &CommandEnvelope, + ) -> impl Future> + Send; + + fn persist_and_transform( + &self, + executor: &mut Self::Executor, + command: CommandEnvelope, + ) -> impl Future, KernelError>> + + Send; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &EventId, + since: Option<&EventVersion>, + ) -> impl Future< + Output = error_stack::Result>, KernelError>, + > + Send; +} + +pub trait DependOnProfileEventStore: Sync + Send + DependOnDatabaseConnection { + type ProfileEventStore: ProfileEventStore< + Executor = ::Executor, + >; + + fn profile_event_store(&self) -> &Self::ProfileEventStore; +} diff --git a/kernel/src/lib.rs b/kernel/src/lib.rs index cd6b287..c3e40ea 100644 --- a/kernel/src/lib.rs +++ b/kernel/src/lib.rs @@ -1,9 +1,13 @@ +mod crypto; mod database; mod entity; mod error; mod event; -mod modify; -mod query; +mod event_store; +mod permission; +mod read_model; +mod repository; +mod signal; pub use self::error::*; @@ -16,16 +20,147 @@ pub mod prelude { #[cfg(feature = "interfaces")] pub mod interfaces { + pub mod crypto { + pub use crate::crypto::*; + } pub mod database { pub use crate::database::*; } - pub mod query { - pub use crate::query::*; - } - pub mod modify { - pub use crate::modify::*; - } pub mod event { pub use crate::event::*; } + pub mod event_store { + pub use crate::event_store::*; + } + pub mod read_model { + pub use crate::read_model::*; + } + pub mod repository { + pub use crate::repository::*; + } + pub mod permission { + pub use crate::permission::*; + } + pub mod signal { + pub use crate::signal::*; + } +} + +/// Macro to delegate database-related DependOn* traits to a field. +/// +/// This macro generates implementations for: +/// - DependOnDatabaseConnection +/// - DependOnAccountReadModel, DependOnAccountEventStore +/// - DependOnAuthAccountReadModel, DependOnAuthAccountEventStore +/// - DependOnProfileReadModel, DependOnProfileEventStore +/// - DependOnMetadataReadModel, DependOnMetadataEventStore +/// - DependOnAuthHostRepository +/// - DependOnFollowRepository +/// - DependOnRemoteAccountRepository +/// - DependOnImageRepository +/// +/// # Usage +/// ```ignore +/// impl_database_delegation!(Handler, pgpool, PostgresDatabase); +/// ``` +/// +/// When switching DB implementation, only the macro arguments need to change: +/// ```ignore +/// impl_database_delegation!(Handler, db, MysqlDatabase); +/// ``` +#[macro_export] +macro_rules! impl_database_delegation { + ($impl_type:ty, $field:ident, $db_type:ty) => { + impl $crate::interfaces::database::DependOnDatabaseConnection for $impl_type { + type DatabaseConnection = $db_type; + fn database_connection(&self) -> &Self::DatabaseConnection { + &self.$field + } + } + + impl $crate::interfaces::read_model::DependOnAccountReadModel for $impl_type { + type AccountReadModel = <$db_type as $crate::interfaces::read_model::DependOnAccountReadModel>::AccountReadModel; + fn account_read_model(&self) -> &Self::AccountReadModel { + $crate::interfaces::read_model::DependOnAccountReadModel::account_read_model(&self.$field) + } + } + + impl $crate::interfaces::event_store::DependOnAccountEventStore for $impl_type { + type AccountEventStore = <$db_type as $crate::interfaces::event_store::DependOnAccountEventStore>::AccountEventStore; + fn account_event_store(&self) -> &Self::AccountEventStore { + $crate::interfaces::event_store::DependOnAccountEventStore::account_event_store(&self.$field) + } + } + + impl $crate::interfaces::read_model::DependOnAuthAccountReadModel for $impl_type { + type AuthAccountReadModel = <$db_type as $crate::interfaces::read_model::DependOnAuthAccountReadModel>::AuthAccountReadModel; + fn auth_account_read_model(&self) -> &Self::AuthAccountReadModel { + $crate::interfaces::read_model::DependOnAuthAccountReadModel::auth_account_read_model(&self.$field) + } + } + + impl $crate::interfaces::event_store::DependOnAuthAccountEventStore for $impl_type { + type AuthAccountEventStore = <$db_type as $crate::interfaces::event_store::DependOnAuthAccountEventStore>::AuthAccountEventStore; + fn auth_account_event_store(&self) -> &Self::AuthAccountEventStore { + $crate::interfaces::event_store::DependOnAuthAccountEventStore::auth_account_event_store(&self.$field) + } + } + + impl $crate::interfaces::read_model::DependOnProfileReadModel for $impl_type { + type ProfileReadModel = <$db_type as $crate::interfaces::read_model::DependOnProfileReadModel>::ProfileReadModel; + fn profile_read_model(&self) -> &Self::ProfileReadModel { + $crate::interfaces::read_model::DependOnProfileReadModel::profile_read_model(&self.$field) + } + } + + impl $crate::interfaces::event_store::DependOnProfileEventStore for $impl_type { + type ProfileEventStore = <$db_type as $crate::interfaces::event_store::DependOnProfileEventStore>::ProfileEventStore; + fn profile_event_store(&self) -> &Self::ProfileEventStore { + $crate::interfaces::event_store::DependOnProfileEventStore::profile_event_store(&self.$field) + } + } + + impl $crate::interfaces::read_model::DependOnMetadataReadModel for $impl_type { + type MetadataReadModel = <$db_type as $crate::interfaces::read_model::DependOnMetadataReadModel>::MetadataReadModel; + fn metadata_read_model(&self) -> &Self::MetadataReadModel { + $crate::interfaces::read_model::DependOnMetadataReadModel::metadata_read_model(&self.$field) + } + } + + impl $crate::interfaces::event_store::DependOnMetadataEventStore for $impl_type { + type MetadataEventStore = <$db_type as $crate::interfaces::event_store::DependOnMetadataEventStore>::MetadataEventStore; + fn metadata_event_store(&self) -> &Self::MetadataEventStore { + $crate::interfaces::event_store::DependOnMetadataEventStore::metadata_event_store(&self.$field) + } + } + + impl $crate::interfaces::repository::DependOnAuthHostRepository for $impl_type { + type AuthHostRepository = <$db_type as $crate::interfaces::repository::DependOnAuthHostRepository>::AuthHostRepository; + fn auth_host_repository(&self) -> &Self::AuthHostRepository { + $crate::interfaces::repository::DependOnAuthHostRepository::auth_host_repository(&self.$field) + } + } + + impl $crate::interfaces::repository::DependOnFollowRepository for $impl_type { + type FollowRepository = <$db_type as $crate::interfaces::repository::DependOnFollowRepository>::FollowRepository; + fn follow_repository(&self) -> &Self::FollowRepository { + $crate::interfaces::repository::DependOnFollowRepository::follow_repository(&self.$field) + } + } + + impl $crate::interfaces::repository::DependOnRemoteAccountRepository for $impl_type { + type RemoteAccountRepository = <$db_type as $crate::interfaces::repository::DependOnRemoteAccountRepository>::RemoteAccountRepository; + fn remote_account_repository(&self) -> &Self::RemoteAccountRepository { + $crate::interfaces::repository::DependOnRemoteAccountRepository::remote_account_repository(&self.$field) + } + } + + impl $crate::interfaces::repository::DependOnImageRepository for $impl_type { + type ImageRepository = <$db_type as $crate::interfaces::repository::DependOnImageRepository>::ImageRepository; + fn image_repository(&self) -> &Self::ImageRepository { + $crate::interfaces::repository::DependOnImageRepository::image_repository(&self.$field) + } + } + + }; } diff --git a/kernel/src/modify.rs b/kernel/src/modify.rs deleted file mode 100644 index 8893e6f..0000000 --- a/kernel/src/modify.rs +++ /dev/null @@ -1,14 +0,0 @@ -mod account; -mod event; -mod follow; -mod image; -mod metadata; -mod profile; -mod remote_account; -mod stellar_account; -mod stellar_host; - -pub use self::{ - account::*, event::*, follow::*, image::*, metadata::*, profile::*, remote_account::*, - stellar_account::*, stellar_host::*, -}; diff --git a/kernel/src/modify/account.rs b/kernel/src/modify/account.rs deleted file mode 100644 index 63d5d3b..0000000 --- a/kernel/src/modify/account.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{Account, AccountId}; -use crate::KernelError; -use std::future::Future; - -pub trait AccountModifier: Sync + Send + 'static { - type Transaction: Transaction; - - fn create( - &self, - transaction: &mut Self::Transaction, - account: &Account, - ) -> impl Future> + Send; - - fn update( - &self, - transaction: &mut Self::Transaction, - account: &Account, - ) -> impl Future> + Send; - - fn delete( - &self, - transaction: &mut Self::Transaction, - account_id: &AccountId, - ) -> impl Future> + Send; -} - -pub trait DependOnAccountModifier: Sync + Send + DependOnDatabaseConnection { - type AccountModifier: AccountModifier< - Transaction = ::Transaction, - >; - - fn account_modifier(&self) -> &Self::AccountModifier; -} diff --git a/kernel/src/modify/event.rs b/kernel/src/modify/event.rs deleted file mode 100644 index db43658..0000000 --- a/kernel/src/modify/event.rs +++ /dev/null @@ -1,23 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::CommandEnvelope; -use crate::KernelError; -use serde::Serialize; -use std::future::Future; - -pub trait EventModifier: 'static + Sync + Send { - type Transaction: Transaction; - - fn handle( - &self, - transaction: &mut Self::Transaction, - event: &CommandEnvelope, - ) -> impl Future> + Send; -} - -pub trait DependOnEventModifier: Sync + Send + DependOnDatabaseConnection { - type EventModifier: EventModifier< - Transaction = ::Transaction, - >; - - fn event_modifier(&self) -> &Self::EventModifier; -} diff --git a/kernel/src/modify/follow.rs b/kernel/src/modify/follow.rs deleted file mode 100644 index 3f0f7d6..0000000 --- a/kernel/src/modify/follow.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{Follow, FollowId}; -use crate::KernelError; -use std::future::Future; - -pub trait FollowModifier: Sync + Send + 'static { - type Transaction: Transaction; - - fn create( - &self, - transaction: &mut Self::Transaction, - follow: &Follow, - ) -> impl Future> + Send; - - fn update( - &self, - transaction: &mut Self::Transaction, - follow: &Follow, - ) -> impl Future> + Send; - - fn delete( - &self, - transaction: &mut Self::Transaction, - follow_id: &FollowId, - ) -> impl Future> + Send; -} - -pub trait DependOnFollowModifier: Sync + Send + DependOnDatabaseConnection { - type FollowModifier: FollowModifier< - Transaction = ::Transaction, - >; - - fn follow_modifier(&self) -> &Self::FollowModifier; -} diff --git a/kernel/src/modify/image.rs b/kernel/src/modify/image.rs deleted file mode 100644 index d95d107..0000000 --- a/kernel/src/modify/image.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{Image, ImageId}; -use crate::KernelError; -use std::future::Future; - -pub trait ImageModifier: Sync + Send + 'static { - type Transaction: Transaction; - - fn create( - &self, - transaction: &mut Self::Transaction, - image: &Image, - ) -> impl Future> + Send; - - fn delete( - &self, - transaction: &mut Self::Transaction, - image_id: &ImageId, - ) -> impl Future> + Send; -} - -pub trait DependOnImageModifier: Sync + Send + DependOnDatabaseConnection { - type ImageModifier: ImageModifier< - Transaction = ::Transaction, - >; - - fn image_modifier(&self) -> &Self::ImageModifier; -} diff --git a/kernel/src/modify/metadata.rs b/kernel/src/modify/metadata.rs deleted file mode 100644 index 80555ac..0000000 --- a/kernel/src/modify/metadata.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::database::{DependOnDatabaseConnection, Transaction}; -use crate::entity::{Metadata, MetadataId}; -use crate::KernelError; -use std::future::Future; - -pub trait MetadataModifier: Sync + Send + 'static { - type Transaction: Transaction; - - fn create( - &self, - transaction: &mut Self::Transaction, - metadata: &Metadata, - ) -> impl Future> + Send; - - fn update( - &self, - transaction: &mut Self::Transaction, - metadata: &Metadata, - ) -> impl Future> + Send; - - fn delete( - &self, - transaction: &mut Self::Transaction, - metadata_id: &MetadataId, - ) -> impl Future> + Send; -} - -pub trait DependOnMetadataModifier: Sync + Send + DependOnDatabaseConnection { - type MetadataModifier: MetadataModifier< - Transaction = ::Transaction, - >; - - fn metadata_modifier(&self) -> &Self::MetadataModifier; -} diff --git a/kernel/src/modify/profile.rs b/kernel/src/modify/profile.rs deleted file mode 100644 index 146217e..0000000 --- a/kernel/src/modify/profile.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::database::{DependOnDatabaseConnection, Transaction}; -use crate::entity::Profile; -use crate::KernelError; -use std::future::Future; - -pub trait ProfileModifier: Sync + Send + 'static { - type Transaction: Transaction; - - fn create( - &self, - transaction: &mut Self::Transaction, - profile: &Profile, - ) -> impl Future> + Send; - - fn update( - &self, - transaction: &mut Self::Transaction, - profile: &Profile, - ) -> impl Future> + Send; -} - -pub trait DependOnProfileModifier: Sync + Send + DependOnDatabaseConnection { - type ProfileModifier: ProfileModifier< - Transaction = ::Transaction, - >; - - fn profile_modifier(&self) -> &Self::ProfileModifier; -} diff --git a/kernel/src/modify/remote_account.rs b/kernel/src/modify/remote_account.rs deleted file mode 100644 index b8a7c1c..0000000 --- a/kernel/src/modify/remote_account.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{RemoteAccount, RemoteAccountId}; -use crate::KernelError; -use std::future::Future; - -pub trait RemoteAccountModifier: Sync + Send + 'static { - type Transaction: Transaction; - - fn create( - &self, - transaction: &mut Self::Transaction, - account: &RemoteAccount, - ) -> impl Future> + Send; - - fn update( - &self, - transaction: &mut Self::Transaction, - account: &RemoteAccount, - ) -> impl Future> + Send; - - fn delete( - &self, - transaction: &mut Self::Transaction, - account_id: &RemoteAccountId, - ) -> impl Future> + Send; -} - -pub trait DependOnRemoteAccountModifier: Sync + Send + DependOnDatabaseConnection { - type RemoteAccountModifier: RemoteAccountModifier< - Transaction = ::Transaction, - >; - - fn remote_account_modifier(&self) -> &Self::RemoteAccountModifier; -} diff --git a/kernel/src/modify/stellar_account.rs b/kernel/src/modify/stellar_account.rs deleted file mode 100644 index 912c741..0000000 --- a/kernel/src/modify/stellar_account.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::database::{DependOnDatabaseConnection, Transaction}; -use crate::entity::{StellarAccount, StellarAccountId}; -use crate::KernelError; -use std::future::Future; - -pub trait StellarAccountModifier: Sync + Send + 'static { - type Transaction: Transaction; - - fn create( - &self, - transaction: &mut Self::Transaction, - stellar_account: &StellarAccount, - ) -> impl Future> + Send; - - fn update( - &self, - transaction: &mut Self::Transaction, - stellar_account: &StellarAccount, - ) -> impl Future> + Send; - - fn delete( - &self, - transaction: &mut Self::Transaction, - account_id: &StellarAccountId, - ) -> impl Future> + Send; -} - -pub trait DependOnStellarAccountModifier: Sync + Send + DependOnDatabaseConnection { - type StellarAccountModifier: StellarAccountModifier< - Transaction = ::Transaction, - >; - - fn stellar_account_modifier(&self) -> &Self::StellarAccountModifier; -} diff --git a/kernel/src/modify/stellar_host.rs b/kernel/src/modify/stellar_host.rs deleted file mode 100644 index 8dffeba..0000000 --- a/kernel/src/modify/stellar_host.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::StellarHost; -use crate::KernelError; -use std::future::Future; - -pub trait StellarHostModifier: Sync + Send + 'static { - type Transaction: Transaction; - - fn create( - &self, - transaction: &mut Self::Transaction, - stellar_host: &StellarHost, - ) -> impl Future> + Send; - - fn update( - &self, - transaction: &mut Self::Transaction, - stellar_host: &StellarHost, - ) -> impl Future> + Send; -} - -pub trait DependOnStellarHostModifier: Sync + Send + DependOnDatabaseConnection { - type StellarHostModifier: StellarHostModifier< - Transaction = ::Transaction, - >; - - fn stellar_host_modifier(&self) -> &Self::StellarHostModifier; -} diff --git a/kernel/src/permission.rs b/kernel/src/permission.rs new file mode 100644 index 0000000..c8377f5 --- /dev/null +++ b/kernel/src/permission.rs @@ -0,0 +1,146 @@ +use crate::entity::{AccountId, AuthAccountId}; +use crate::KernelError; +use std::collections::HashSet; +use std::future::Future; +use std::ops::Add; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Relation { + Owner, + Editor, + Signer, + Admin, + Moderator, +} + +impl Relation { + pub fn as_str(&self) -> &'static str { + match self { + Relation::Owner => "owner", + Relation::Editor => "editor", + Relation::Signer => "signer", + Relation::Admin => "admin", + Relation::Moderator => "moderator", + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Resource { + Account(AccountId), + Instance, +} + +impl Resource { + pub fn namespace(&self) -> &'static str { + match self { + Resource::Account(_) => "accounts", + Resource::Instance => "instance", + } + } + + pub fn object_id(&self) -> String { + match self { + Resource::Account(id) => id.as_ref().to_string(), + Resource::Instance => "singleton".to_string(), + } + } +} + +#[derive(Debug, Clone)] +pub struct PermissionReq { + resource: Resource, + relations: HashSet, +} + +impl PermissionReq { + pub fn new(resource: Resource, relations: impl IntoIterator) -> Self { + Self { + resource, + relations: relations.into_iter().collect(), + } + } + + pub fn resource(&self) -> &Resource { + &self.resource + } + + pub fn relations(&self) -> &HashSet { + &self.relations + } +} + +#[derive(Debug, Clone)] +pub struct Permission(Vec); + +impl Permission { + pub fn new(req: PermissionReq) -> Self { + Self(vec![req]) + } + + pub fn all(reqs: Vec) -> Self { + Self(reqs) + } + + pub fn requirements(&self) -> &[PermissionReq] { + &self.0 + } +} + +impl Add for Permission { + type Output = Permission; + + fn add(mut self, rhs: Self) -> Self::Output { + self.0.extend(rhs.0); + self + } +} + +pub trait PermissionChecker: Send + Sync + 'static { + fn check( + &self, + subject: &AuthAccountId, + req: &PermissionReq, + ) -> impl Future> + Send; + + fn satisfies( + &self, + subject: &AuthAccountId, + permission: &Permission, + ) -> impl Future> + Send { + async move { + for req in permission.requirements() { + if !self.check(subject, req).await? { + return Ok(false); + } + } + Ok(true) + } + } +} + +pub trait DependOnPermissionChecker: Send + Sync { + type PermissionChecker: PermissionChecker; + fn permission_checker(&self) -> &Self::PermissionChecker; +} + +pub trait PermissionWriter: Send + Sync + 'static { + fn create_relation( + &self, + resource: &Resource, + relation: Relation, + subject: &AuthAccountId, + ) -> impl Future> + Send; + + fn delete_relation( + &self, + resource: &Resource, + relation: Relation, + subject: &AuthAccountId, + ) -> impl Future> + Send; +} + +pub trait DependOnPermissionWriter: Send + Sync { + type PermissionWriter: PermissionWriter; + fn permission_writer(&self) -> &Self::PermissionWriter; +} diff --git a/kernel/src/query.rs b/kernel/src/query.rs deleted file mode 100644 index 8893e6f..0000000 --- a/kernel/src/query.rs +++ /dev/null @@ -1,14 +0,0 @@ -mod account; -mod event; -mod follow; -mod image; -mod metadata; -mod profile; -mod remote_account; -mod stellar_account; -mod stellar_host; - -pub use self::{ - account::*, event::*, follow::*, image::*, metadata::*, profile::*, remote_account::*, - stellar_account::*, stellar_host::*, -}; diff --git a/kernel/src/query/account.rs b/kernel/src/query/account.rs deleted file mode 100644 index a69d39a..0000000 --- a/kernel/src/query/account.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{Account, AccountId, AccountName, StellarAccountId}; -use crate::KernelError; -use std::future::Future; - -pub trait AccountQuery: Sync + Send + 'static { - type Transaction: Transaction; - - fn find_by_id( - &self, - transaction: &mut Self::Transaction, - id: &AccountId, - ) -> impl Future, KernelError>> + Send; - - fn find_by_stellar_id( - &self, - transaction: &mut Self::Transaction, - stellar_id: &StellarAccountId, - ) -> impl Future, KernelError>> + Send; - - fn find_by_name( - &self, - transaction: &mut Self::Transaction, - name: &AccountName, - ) -> impl Future, KernelError>> + Send; -} - -pub trait DependOnAccountQuery: Sync + Send + DependOnDatabaseConnection { - type AccountQuery: AccountQuery< - Transaction = ::Transaction, - >; - - fn account_query(&self) -> &Self::AccountQuery; -} diff --git a/kernel/src/query/event.rs b/kernel/src/query/event.rs deleted file mode 100644 index 994003a..0000000 --- a/kernel/src/query/event.rs +++ /dev/null @@ -1,24 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{EventEnvelope, EventId, EventVersion}; -use crate::KernelError; -use serde::Deserialize; -use std::future::Future; - -pub trait EventQuery: Sync + Send + 'static { - type Transaction: Transaction; - - fn find_by_id Deserialize<'de> + Sync, Entity: Sync>( - &self, - transaction: &mut Self::Transaction, - id: &EventId, - since: Option<&EventVersion>, - ) -> impl Future>, KernelError>> + Send; -} - -pub trait DependOnEventQuery: Sync + Send + DependOnDatabaseConnection { - type EventQuery: EventQuery< - Transaction = ::Transaction, - >; - - fn event_query(&self) -> &Self::EventQuery; -} diff --git a/kernel/src/query/follow.rs b/kernel/src/query/follow.rs deleted file mode 100644 index b6730cd..0000000 --- a/kernel/src/query/follow.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{Follow, FollowTargetId}; -use crate::KernelError; -use std::future::Future; - -pub trait FollowQuery: Sync + Send + 'static { - type Transaction: Transaction; - - fn find_followings( - &self, - transaction: &mut Self::Transaction, - source: &FollowTargetId, - ) -> impl Future, KernelError>> + Send; - - fn find_followers( - &self, - transaction: &mut Self::Transaction, - destination: &FollowTargetId, - ) -> impl Future, KernelError>> + Send; -} - -pub trait DependOnFollowQuery: Sync + Send + DependOnDatabaseConnection { - type FollowQuery: FollowQuery< - Transaction = ::Transaction, - >; - - fn follow_query(&self) -> &Self::FollowQuery; -} diff --git a/kernel/src/query/image.rs b/kernel/src/query/image.rs deleted file mode 100644 index ef34a40..0000000 --- a/kernel/src/query/image.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{Image, ImageId, ImageUrl}; -use crate::KernelError; -use std::future::Future; - -pub trait ImageQuery: Sync + Send + 'static { - type Transaction: Transaction; - - fn find_by_id( - &self, - transaction: &mut Self::Transaction, - id: &ImageId, - ) -> impl Future, KernelError>> + Send; - - fn find_by_url( - &self, - transaction: &mut Self::Transaction, - url: &ImageUrl, - ) -> impl Future, KernelError>> + Send; -} - -pub trait DependOnImageQuery: Sync + Send + DependOnDatabaseConnection { - type ImageQuery: ImageQuery< - Transaction = ::Transaction, - >; - - fn image_query(&self) -> &Self::ImageQuery; -} diff --git a/kernel/src/query/metadata.rs b/kernel/src/query/metadata.rs deleted file mode 100644 index 6d491fd..0000000 --- a/kernel/src/query/metadata.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::database::{DependOnDatabaseConnection, Transaction}; -use crate::entity::{AccountId, Metadata, MetadataId}; -use crate::KernelError; -use std::future::Future; - -pub trait MetadataQuery: Sync + Send + 'static { - type Transaction: Transaction; - - fn find_by_id( - &self, - transaction: &mut Self::Transaction, - metadata_id: &MetadataId, - ) -> impl Future, KernelError>> + Send; - - fn find_by_account_id( - &self, - transaction: &mut Self::Transaction, - account_id: &AccountId, - ) -> impl Future, KernelError>> + Send; -} - -pub trait DependOnMetadataQuery: Sync + Send + DependOnDatabaseConnection { - type MetadataQuery: MetadataQuery< - Transaction = ::Transaction, - >; - - fn metadata_query(&self) -> &Self::MetadataQuery; -} diff --git a/kernel/src/query/profile.rs b/kernel/src/query/profile.rs deleted file mode 100644 index d8d2cb9..0000000 --- a/kernel/src/query/profile.rs +++ /dev/null @@ -1,22 +0,0 @@ -use crate::database::{DependOnDatabaseConnection, Transaction}; -use crate::entity::{Profile, ProfileId}; -use crate::KernelError; -use std::future::Future; - -pub trait ProfileQuery: Sync + Send + 'static { - type Transaction: Transaction; - - fn find_by_id( - &self, - transaction: &mut Self::Transaction, - id: &ProfileId, - ) -> impl Future, KernelError>> + Send; -} - -pub trait DependOnProfileQuery: Sync + Send + DependOnDatabaseConnection { - type ProfileQuery: ProfileQuery< - Transaction = ::Transaction, - >; - - fn profile_query(&self) -> &Self::ProfileQuery; -} diff --git a/kernel/src/query/remote_account.rs b/kernel/src/query/remote_account.rs deleted file mode 100644 index 8538fab..0000000 --- a/kernel/src/query/remote_account.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{RemoteAccount, RemoteAccountAcct, RemoteAccountId, RemoteAccountUrl}; -use crate::KernelError; -use std::future::Future; - -pub trait RemoteAccountQuery: Sync + Send + 'static { - type Transaction: Transaction; - - fn find_by_id( - &self, - transaction: &mut Self::Transaction, - id: &RemoteAccountId, - ) -> impl Future, KernelError>> + Send; - - fn find_by_acct( - &self, - transaction: &mut Self::Transaction, - acct: &RemoteAccountAcct, - ) -> impl Future, KernelError>> + Send; - - fn find_by_url( - &self, - transaction: &mut Self::Transaction, - url: &RemoteAccountUrl, - ) -> impl Future, KernelError>> + Send; -} - -pub trait DependOnRemoteAccountQuery: Sync + Send + DependOnDatabaseConnection { - type RemoteAccountQuery: RemoteAccountQuery< - Transaction = ::Transaction, - >; - - fn remote_account_query(&self) -> &Self::RemoteAccountQuery; -} diff --git a/kernel/src/query/stellar_account.rs b/kernel/src/query/stellar_account.rs deleted file mode 100644 index 4d91ee3..0000000 --- a/kernel/src/query/stellar_account.rs +++ /dev/null @@ -1,22 +0,0 @@ -use crate::database::{DependOnDatabaseConnection, Transaction}; -use crate::entity::{StellarAccount, StellarAccountId}; -use crate::KernelError; -use std::future::Future; - -pub trait StellarAccountQuery: Sync + Send + 'static { - type Transaction: Transaction; - - fn find_by_id( - &self, - transaction: &mut Self::Transaction, - account_id: &StellarAccountId, - ) -> impl Future, KernelError>> + Send; -} - -pub trait DependOnStellarAccountQuery: Sync + Send + DependOnDatabaseConnection { - type StellarAccountQuery: StellarAccountQuery< - Transaction = ::Transaction, - >; - - fn stellar_account_query(&self) -> &Self::StellarAccountQuery; -} diff --git a/kernel/src/query/stellar_host.rs b/kernel/src/query/stellar_host.rs deleted file mode 100644 index d8692fa..0000000 --- a/kernel/src/query/stellar_host.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Transaction}; -use crate::entity::{StellarHost, StellarHostId, StellarHostUrl}; -use crate::KernelError; -use std::future::Future; - -pub trait StellarHostQuery: Sync + Send + 'static { - type Transaction: Transaction; - - fn find_by_id( - &self, - transaction: &mut Self::Transaction, - id: &StellarHostId, - ) -> impl Future, KernelError>> + Send; - - fn find_by_url( - &self, - transaction: &mut Self::Transaction, - domain: &StellarHostUrl, - ) -> impl Future, KernelError>> + Send; -} - -pub trait DependOnStellarHostQuery: Sync + Send + DependOnDatabaseConnection { - type StellarHostQuery: StellarHostQuery< - Transaction = ::Transaction, - >; - - fn stellar_host_query(&self) -> &Self::StellarHostQuery; -} diff --git a/kernel/src/read_model.rs b/kernel/src/read_model.rs new file mode 100644 index 0000000..fa7d58c --- /dev/null +++ b/kernel/src/read_model.rs @@ -0,0 +1,9 @@ +mod account; +mod auth_account; +mod metadata; +mod profile; + +pub use self::account::*; +pub use self::auth_account::*; +pub use self::metadata::*; +pub use self::profile::*; diff --git a/kernel/src/read_model/account.rs b/kernel/src/read_model/account.rs new file mode 100644 index 0000000..df25353 --- /dev/null +++ b/kernel/src/read_model/account.rs @@ -0,0 +1,79 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{Account, AccountId, AccountName, AuthAccountId, Nanoid}; +use crate::KernelError; +use std::future::Future; + +pub trait AccountReadModel: Sync + Send + 'static { + type Executor: Executor; + + // Query operations (projection reads) + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &AccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_auth_id( + &self, + executor: &mut Self::Executor, + auth_id: &AuthAccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_name( + &self, + executor: &mut Self::Executor, + name: &AccountName, + ) -> impl Future, KernelError>> + Send; + + fn find_by_nanoid( + &self, + executor: &mut Self::Executor, + nanoid: &Nanoid, + ) -> impl Future, KernelError>> + Send; + + fn find_by_nanoids( + &self, + executor: &mut Self::Executor, + nanoids: &[Nanoid], + ) -> impl Future, KernelError>> + Send; + + // Projection update operations (called by EventApplier pipeline) + fn create( + &self, + executor: &mut Self::Executor, + account: &Account, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + account: &Account, + ) -> impl Future> + Send; + + fn deactivate( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> impl Future> + Send; + + fn unlink_all_auth_accounts( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> impl Future> + Send; + + fn link_auth_account( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + auth_account_id: &AuthAccountId, + ) -> impl Future> + Send; +} + +pub trait DependOnAccountReadModel: Sync + Send + DependOnDatabaseConnection { + type AccountReadModel: AccountReadModel< + Executor = ::Executor, + >; + + fn account_read_model(&self) -> &Self::AccountReadModel; +} diff --git a/kernel/src/read_model/auth_account.rs b/kernel/src/read_model/auth_account.rs new file mode 100644 index 0000000..33e3fee --- /dev/null +++ b/kernel/src/read_model/auth_account.rs @@ -0,0 +1,48 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{AuthAccount, AuthAccountClientId, AuthAccountId}; +use crate::KernelError; +use std::future::Future; + +pub trait AuthAccountReadModel: Sync + Send + 'static { + type Executor: Executor; + + // Query operations (projection reads) + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &AuthAccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_client_id( + &self, + executor: &mut Self::Executor, + client_id: &AuthAccountClientId, + ) -> impl Future, KernelError>> + Send; + + // Projection update operations (called by EventApplier pipeline) + fn create( + &self, + executor: &mut Self::Executor, + auth_account: &AuthAccount, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + auth_account: &AuthAccount, + ) -> impl Future> + Send; + + fn delete( + &self, + executor: &mut Self::Executor, + account_id: &AuthAccountId, + ) -> impl Future> + Send; +} + +pub trait DependOnAuthAccountReadModel: Sync + Send + DependOnDatabaseConnection { + type AuthAccountReadModel: AuthAccountReadModel< + Executor = ::Executor, + >; + + fn auth_account_read_model(&self) -> &Self::AuthAccountReadModel; +} diff --git a/kernel/src/read_model/metadata.rs b/kernel/src/read_model/metadata.rs new file mode 100644 index 0000000..bbe35f3 --- /dev/null +++ b/kernel/src/read_model/metadata.rs @@ -0,0 +1,54 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{AccountId, Metadata, MetadataId}; +use crate::KernelError; +use std::future::Future; + +pub trait MetadataReadModel: Sync + Send + 'static { + type Executor: Executor; + + // Query operations (projection reads) + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &MetadataId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_account_id( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_account_ids( + &self, + executor: &mut Self::Executor, + account_ids: &[AccountId], + ) -> impl Future, KernelError>> + Send; + + // Projection update operations (called by EventApplier pipeline) + fn create( + &self, + executor: &mut Self::Executor, + metadata: &Metadata, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + metadata: &Metadata, + ) -> impl Future> + Send; + + fn delete( + &self, + executor: &mut Self::Executor, + metadata_id: &MetadataId, + ) -> impl Future> + Send; +} + +pub trait DependOnMetadataReadModel: Sync + Send + DependOnDatabaseConnection { + type MetadataReadModel: MetadataReadModel< + Executor = ::Executor, + >; + + fn metadata_read_model(&self) -> &Self::MetadataReadModel; +} diff --git a/kernel/src/read_model/profile.rs b/kernel/src/read_model/profile.rs new file mode 100644 index 0000000..0f12e9f --- /dev/null +++ b/kernel/src/read_model/profile.rs @@ -0,0 +1,54 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{AccountId, Profile, ProfileId}; +use crate::KernelError; +use std::future::Future; + +pub trait ProfileReadModel: Sync + Send + 'static { + type Executor: Executor; + + // Query operations (projection reads) + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &ProfileId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_account_id( + &self, + executor: &mut Self::Executor, + account_id: &AccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_account_ids( + &self, + executor: &mut Self::Executor, + account_ids: &[AccountId], + ) -> impl Future, KernelError>> + Send; + + // Projection update operations (called by EventApplier pipeline) + fn create( + &self, + executor: &mut Self::Executor, + profile: &Profile, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + profile: &Profile, + ) -> impl Future> + Send; + + fn delete( + &self, + executor: &mut Self::Executor, + profile_id: &ProfileId, + ) -> impl Future> + Send; +} + +pub trait DependOnProfileReadModel: Sync + Send + DependOnDatabaseConnection { + type ProfileReadModel: ProfileReadModel< + Executor = ::Executor, + >; + + fn profile_read_model(&self) -> &Self::ProfileReadModel; +} diff --git a/kernel/src/repository.rs b/kernel/src/repository.rs new file mode 100644 index 0000000..41ed3c9 --- /dev/null +++ b/kernel/src/repository.rs @@ -0,0 +1,9 @@ +mod auth_host; +mod follow; +mod image; +mod remote_account; + +pub use self::auth_host::*; +pub use self::follow::*; +pub use self::image::*; +pub use self::remote_account::*; diff --git a/kernel/src/repository/auth_host.rs b/kernel/src/repository/auth_host.rs new file mode 100644 index 0000000..3add4c2 --- /dev/null +++ b/kernel/src/repository/auth_host.rs @@ -0,0 +1,40 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{AuthHost, AuthHostId, AuthHostUrl}; +use crate::KernelError; +use std::future::Future; + +pub trait AuthHostRepository: Sync + Send + 'static { + type Executor: Executor; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &AuthHostId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_url( + &self, + executor: &mut Self::Executor, + url: &AuthHostUrl, + ) -> impl Future, KernelError>> + Send; + + fn create( + &self, + executor: &mut Self::Executor, + auth_host: &AuthHost, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + auth_host: &AuthHost, + ) -> impl Future> + Send; +} + +pub trait DependOnAuthHostRepository: Sync + Send + DependOnDatabaseConnection { + type AuthHostRepository: AuthHostRepository< + Executor = ::Executor, + >; + + fn auth_host_repository(&self) -> &Self::AuthHostRepository; +} diff --git a/kernel/src/repository/follow.rs b/kernel/src/repository/follow.rs new file mode 100644 index 0000000..11e7508 --- /dev/null +++ b/kernel/src/repository/follow.rs @@ -0,0 +1,46 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{Follow, FollowId, FollowTargetId}; +use crate::KernelError; +use std::future::Future; + +pub trait FollowRepository: Sync + Send + 'static { + type Executor: Executor; + + fn find_followings( + &self, + executor: &mut Self::Executor, + source: &FollowTargetId, + ) -> impl Future, KernelError>> + Send; + + fn find_followers( + &self, + executor: &mut Self::Executor, + destination: &FollowTargetId, + ) -> impl Future, KernelError>> + Send; + + fn create( + &self, + executor: &mut Self::Executor, + follow: &Follow, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + follow: &Follow, + ) -> impl Future> + Send; + + fn delete( + &self, + executor: &mut Self::Executor, + follow_id: &FollowId, + ) -> impl Future> + Send; +} + +pub trait DependOnFollowRepository: Sync + Send + DependOnDatabaseConnection { + type FollowRepository: FollowRepository< + Executor = ::Executor, + >; + + fn follow_repository(&self) -> &Self::FollowRepository; +} diff --git a/kernel/src/repository/image.rs b/kernel/src/repository/image.rs new file mode 100644 index 0000000..8c0ba1c --- /dev/null +++ b/kernel/src/repository/image.rs @@ -0,0 +1,40 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{Image, ImageId, ImageUrl}; +use crate::KernelError; +use std::future::Future; + +pub trait ImageRepository: Sync + Send + 'static { + type Executor: Executor; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &ImageId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_url( + &self, + executor: &mut Self::Executor, + url: &ImageUrl, + ) -> impl Future, KernelError>> + Send; + + fn create( + &self, + executor: &mut Self::Executor, + image: &Image, + ) -> impl Future> + Send; + + fn delete( + &self, + executor: &mut Self::Executor, + image_id: &ImageId, + ) -> impl Future> + Send; +} + +pub trait DependOnImageRepository: Sync + Send + DependOnDatabaseConnection { + type ImageRepository: ImageRepository< + Executor = ::Executor, + >; + + fn image_repository(&self) -> &Self::ImageRepository; +} diff --git a/kernel/src/repository/remote_account.rs b/kernel/src/repository/remote_account.rs new file mode 100644 index 0000000..beab5d5 --- /dev/null +++ b/kernel/src/repository/remote_account.rs @@ -0,0 +1,52 @@ +use crate::database::{DatabaseConnection, DependOnDatabaseConnection, Executor}; +use crate::entity::{RemoteAccount, RemoteAccountAcct, RemoteAccountId, RemoteAccountUrl}; +use crate::KernelError; +use std::future::Future; + +pub trait RemoteAccountRepository: Sync + Send + 'static { + type Executor: Executor; + + fn find_by_id( + &self, + executor: &mut Self::Executor, + id: &RemoteAccountId, + ) -> impl Future, KernelError>> + Send; + + fn find_by_acct( + &self, + executor: &mut Self::Executor, + acct: &RemoteAccountAcct, + ) -> impl Future, KernelError>> + Send; + + fn find_by_url( + &self, + executor: &mut Self::Executor, + url: &RemoteAccountUrl, + ) -> impl Future, KernelError>> + Send; + + fn create( + &self, + executor: &mut Self::Executor, + account: &RemoteAccount, + ) -> impl Future> + Send; + + fn update( + &self, + executor: &mut Self::Executor, + account: &RemoteAccount, + ) -> impl Future> + Send; + + fn delete( + &self, + executor: &mut Self::Executor, + account_id: &RemoteAccountId, + ) -> impl Future> + Send; +} + +pub trait DependOnRemoteAccountRepository: Sync + Send + DependOnDatabaseConnection { + type RemoteAccountRepository: RemoteAccountRepository< + Executor = ::Executor, + >; + + fn remote_account_repository(&self) -> &Self::RemoteAccountRepository; +} diff --git a/kernel/src/signal.rs b/kernel/src/signal.rs new file mode 100644 index 0000000..718f794 --- /dev/null +++ b/kernel/src/signal.rs @@ -0,0 +1,9 @@ +use crate::KernelError; +use std::future::Future; + +pub trait Signal { + fn emit( + &self, + signal_id: ID, + ) -> impl Future> + Send; +} diff --git a/migrations/20230707210300_init.dbml b/migrations/20230707210300_init.dbml index 6fbdf25..b91a4ff 100644 --- a/migrations/20230707210300_init.dbml +++ b/migrations/20230707210300_init.dbml @@ -16,6 +16,7 @@ Table accounts { deleted_at TIMESTAMPTZ version UUID [not null] nanoid TEXT [not null, unique] + created_at TIMESTAMPTZ [not null] } Table remote_accounts { @@ -53,31 +54,29 @@ Table metadatas { Ref: metadatas.account_id > accounts.id [delete: cascade] -Table stellar_hosts { +Table auth_hosts { id UUID [not null, pk] url TEXT [not null, unique] } -Table stellar_accounts { +Table auth_accounts { id UUID [not null, pk] host_id UUID [not null] client_id TEXT [not null] - access_token TEXT [not null] - refresh_token TEXT [not null] version UUID [not null] } -Ref: stellar_accounts.host_id > stellar_hosts.id [delete: cascade] +Ref: auth_accounts.host_id > auth_hosts.id [delete: cascade] -Table stellar_emumet_accounts { +Table auth_emumet_accounts { emumet_id UUID [not null] - stellar_id UUID [not null, ref: > stellar_accounts.id] + auth_id UUID [not null, ref: > auth_accounts.id] Indexes { - (emumet_id, stellar_id) [pk] + (emumet_id, auth_id) [pk] } } -Ref: stellar_emumet_accounts.emumet_id > accounts.id [delete: cascade] +Ref: auth_emumet_accounts.emumet_id > accounts.id [delete: cascade] Table follows { id UUID [not null, pk] @@ -107,14 +106,14 @@ Table images { // } // // Table moderators { -// stellar_id UUID [not null, pk, ref: > stellar_accounts.id] +// auth_id UUID [not null, pk, ref: > auth_accounts.id] // role_id UUID [not null, pk, ref: > moderator_roles.id] // } // // Table account_reports { // id UUID [not null, pk] // target_id UUID [not null, ref: > accounts.id] -// reported_id UUID [not null, ref: > stellar_accounts.id] +// reported_id UUID [not null, ref: > auth_accounts.id] // type TEXT [not null] // comment TEXT [not null] // created_at TIMESTAMPTZ [not null] @@ -125,7 +124,7 @@ Table images { // Table account_moderations { // id UUID [not null, pk] // target_id UUID [not null, ref: > accounts.id] -// moderator_id UUID [not null, ref: > stellar_accounts.id] +// moderator_id UUID [not null, ref: > auth_accounts.id] // type TEXT [not null] // comment TEXT [not null] // created_at TIMESTAMPTZ [not null] @@ -139,17 +138,17 @@ Table images { // } // } // -// Table stellar_account_moderations { +// Table auth_account_moderations { // id UUID [not null, pk] -// target_id UUID [not null, ref: > stellar_accounts.id] -// moderator_id UUID [not null, ref: > stellar_accounts.id] +// target_id UUID [not null, ref: > auth_accounts.id] +// moderator_id UUID [not null, ref: > auth_accounts.id] // type TEXT [not null] // comment TEXT [not null] // created_at TIMESTAMPTZ [not null] // closed_at TIMESTAMPTZ // } -// Table stellar_account_moderation_reports { -// moderation_id UUID [not null, ref: > stellar_account_moderations.id] +// Table auth_account_moderation_reports { +// moderation_id UUID [not null, ref: > auth_account_moderations.id] // report_id UUID [not null, ref: > account_reports.id] // Indexes { // (moderation_id, report_id) [pk] @@ -158,8 +157,8 @@ Table images { // // Table host_moderations { // id UUID [not null, pk] -// host_id UUID [not null, ref: > stellar_hosts.id] -// moderator_id UUID [not null, ref: > stellar_accounts.id] +// host_id UUID [not null, ref: > auth_hosts.id] +// moderator_id UUID [not null, ref: > auth_accounts.id] // type TEXT [not null] // comment TEXT [not null] // created_at TIMESTAMPTZ [not null] diff --git a/migrations/20230707210300_init.sql b/migrations/20230707210300_init.sql index f5256dd..8d3ec73 100644 --- a/migrations/20230707210300_init.sql +++ b/migrations/20230707210300_init.sql @@ -1,6 +1,6 @@ -- SQL dump generated using DBML (dbml.dbdiagram.io) -- Database: PostgreSQL --- Generated at: 2024-10-24T12:48:43.363Z +-- Generated at: 2025-01-25T10:07:34.134Z CREATE TABLE "event_streams" ( "version" UUID NOT NULL, @@ -18,7 +18,8 @@ CREATE TABLE "accounts" ( "is_bot" BOOLEAN NOT NULL, "deleted_at" TIMESTAMPTZ, "version" UUID NOT NULL, - "nanoid" TEXT UNIQUE NOT NULL + "nanoid" TEXT UNIQUE NOT NULL, + "created_at" TIMESTAMPTZ NOT NULL ); CREATE TABLE "remote_accounts" ( @@ -48,24 +49,22 @@ CREATE TABLE "metadatas" ( "nanoid" TEXT UNIQUE NOT NULL ); -CREATE TABLE "stellar_hosts" ( +CREATE TABLE "auth_hosts" ( "id" UUID PRIMARY KEY NOT NULL, "url" TEXT UNIQUE NOT NULL ); -CREATE TABLE "stellar_accounts" ( +CREATE TABLE "auth_accounts" ( "id" UUID PRIMARY KEY NOT NULL, "host_id" UUID NOT NULL, "client_id" TEXT NOT NULL, - "access_token" TEXT NOT NULL, - "refresh_token" TEXT NOT NULL, "version" UUID NOT NULL ); -CREATE TABLE "stellar_emumet_accounts" ( +CREATE TABLE "auth_emumet_accounts" ( "emumet_id" UUID NOT NULL, - "stellar_id" UUID NOT NULL, - PRIMARY KEY ("emumet_id", "stellar_id") + "auth_id" UUID NOT NULL, + PRIMARY KEY ("emumet_id", "auth_id") ); CREATE TABLE "follows" ( @@ -94,11 +93,11 @@ ALTER TABLE "profiles" ADD FOREIGN KEY ("banner_id") REFERENCES "images" ("id") ALTER TABLE "metadatas" ADD FOREIGN KEY ("account_id") REFERENCES "accounts" ("id") ON DELETE CASCADE; -ALTER TABLE "stellar_accounts" ADD FOREIGN KEY ("host_id") REFERENCES "stellar_hosts" ("id") ON DELETE CASCADE; +ALTER TABLE "auth_accounts" ADD FOREIGN KEY ("host_id") REFERENCES "auth_hosts" ("id") ON DELETE CASCADE; -ALTER TABLE "stellar_emumet_accounts" ADD FOREIGN KEY ("stellar_id") REFERENCES "stellar_accounts" ("id"); +ALTER TABLE "auth_emumet_accounts" ADD FOREIGN KEY ("auth_id") REFERENCES "auth_accounts" ("id"); -ALTER TABLE "stellar_emumet_accounts" ADD FOREIGN KEY ("emumet_id") REFERENCES "accounts" ("id") ON DELETE CASCADE; +ALTER TABLE "auth_emumet_accounts" ADD FOREIGN KEY ("emumet_id") REFERENCES "accounts" ("id") ON DELETE CASCADE; ALTER TABLE "follows" ADD FOREIGN KEY ("follower_local_id") REFERENCES "accounts" ("id") ON DELETE CASCADE; diff --git a/migrations/20260308000000_account_events.sql b/migrations/20260308000000_account_events.sql new file mode 100644 index 0000000..5d2245d --- /dev/null +++ b/migrations/20260308000000_account_events.sql @@ -0,0 +1,8 @@ +CREATE TABLE "account_events" ( + "version" UUID NOT NULL, + "id" UUID NOT NULL, + "event_name" TEXT NOT NULL, + "data" JSONB NOT NULL, + "occurred_at" TIMESTAMPTZ NOT NULL DEFAULT now(), + PRIMARY KEY ("id", "version") +); diff --git a/migrations/20260309000000_auth_account_events.sql b/migrations/20260309000000_auth_account_events.sql new file mode 100644 index 0000000..066f315 --- /dev/null +++ b/migrations/20260309000000_auth_account_events.sql @@ -0,0 +1,8 @@ +CREATE TABLE "auth_account_events" ( + "version" UUID NOT NULL, + "id" UUID NOT NULL, + "event_name" TEXT NOT NULL, + "data" JSONB NOT NULL, + "occurred_at" TIMESTAMPTZ NOT NULL DEFAULT now(), + PRIMARY KEY ("id", "version") +); diff --git a/migrations/20260310000000_profile_events.sql b/migrations/20260310000000_profile_events.sql new file mode 100644 index 0000000..0ba3a77 --- /dev/null +++ b/migrations/20260310000000_profile_events.sql @@ -0,0 +1,10 @@ +CREATE TABLE "profile_events" ( + "version" UUID NOT NULL, + "id" UUID NOT NULL, + "event_name" TEXT NOT NULL, + "data" JSONB NOT NULL, + "occurred_at" TIMESTAMPTZ NOT NULL DEFAULT now(), + PRIMARY KEY ("id", "version") +); + +ALTER TABLE "profiles" ADD CONSTRAINT "profiles_account_id_unique" UNIQUE ("account_id"); diff --git a/migrations/20260311000000_metadata_events.sql b/migrations/20260311000000_metadata_events.sql new file mode 100644 index 0000000..b763920 --- /dev/null +++ b/migrations/20260311000000_metadata_events.sql @@ -0,0 +1,8 @@ +CREATE TABLE "metadata_events" ( + "version" UUID NOT NULL, + "id" UUID NOT NULL, + "event_name" TEXT NOT NULL, + "data" JSONB NOT NULL, + "occurred_at" TIMESTAMPTZ NOT NULL DEFAULT now(), + PRIMARY KEY ("id", "version") +); diff --git a/migrations/gensql.sh b/migrations/gensql.sh index 7d7cb79..d7adc28 100755 --- a/migrations/gensql.sh +++ b/migrations/gensql.sh @@ -1,3 +1,3 @@ #!/usr/bin/env sh -curl -o 20230707210300_init.dbml https://raw.githubusercontent.com/ShuttlePub/document/afd96a4bf572786e336ed43ec7c8ad5cc0696bad/packages/document/dbml/emumet.dbml +curl -o 20230707210300_init.dbml https://raw.githubusercontent.com/ShuttlePub/document/6f36e9cd0eb1d2ec46f06eb9daab52c108e625fd/packages/document/dbml/emumet.dbml pnpm --package=@dbml/cli dlx dbml2sql 20230707210300_init.dbml -o 20230707210300_init.sql \ No newline at end of file diff --git a/ory/hydra/hydra.yml b/ory/hydra/hydra.yml new file mode 100644 index 0000000..cb224c2 --- /dev/null +++ b/ory/hydra/hydra.yml @@ -0,0 +1,31 @@ +serve: + public: + port: 4444 + admin: + port: 4445 + +urls: + self: + issuer: http://localhost:4444/ + login: http://localhost:8080/oauth2/login + consent: http://localhost:8080/oauth2/consent + post_logout_redirect: http://localhost:3000/ + +secrets: + system: + - "dev-secret-do-not-use-in-production-32ch" + +strategies: + access_token: jwt + +oidc: + subject_identifiers: + supported_types: + - public + +log: + level: debug + +ttl: + access_token: 1h + refresh_token: 720h diff --git a/ory/keto/keto.yml b/ory/keto/keto.yml new file mode 100644 index 0000000..904f09b --- /dev/null +++ b/ory/keto/keto.yml @@ -0,0 +1,17 @@ +version: v0.12.0 + +dsn: postgres://postgres:develop@postgres:5432/keto?sslmode=disable + +namespaces: + - id: 0 + name: accounts + - id: 1 + name: instance + +serve: + read: + host: 0.0.0.0 + port: 4466 + write: + host: 0.0.0.0 + port: 4467 diff --git a/ory/kratos/identity.schema.json b/ory/kratos/identity.schema.json new file mode 100644 index 0000000..2cc2952 --- /dev/null +++ b/ory/kratos/identity.schema.json @@ -0,0 +1,28 @@ +{ + "$id": "https://emumet.shuttlepub.example/identity.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Identity", + "type": "object", + "properties": { + "traits": { + "type": "object", + "properties": { + "email": { + "type": "string", + "format": "email", + "title": "Email", + "ory.sh/kratos": { + "credentials": { + "password": { + "identifier": true + } + } + } + } + }, + "required": ["email"], + "additionalProperties": false + } + }, + "additionalProperties": false +} diff --git a/ory/kratos/kratos.yml b/ory/kratos/kratos.yml new file mode 100644 index 0000000..d2ba802 --- /dev/null +++ b/ory/kratos/kratos.yml @@ -0,0 +1,33 @@ +serve: + public: + base_url: http://localhost:4433/ + admin: + base_url: http://localhost:4434/ + +selfservice: + default_browser_return_url: http://localhost:3000/ + + flows: + registration: + enabled: true + ui_url: http://localhost:3000/registration + + login: + ui_url: http://localhost:3000/login + + methods: + password: + enabled: true + +identity: + default_schema_id: default + schemas: + - id: default + url: file:///etc/config/kratos/identity.schema.json + +log: + level: debug + +courier: + smtp: + connection_uri: smtp://localhost:1025/?skip_ssl_verify=true diff --git a/ory/kratos/seed-users.json b/ory/kratos/seed-users.json new file mode 100644 index 0000000..0b4a467 --- /dev/null +++ b/ory/kratos/seed-users.json @@ -0,0 +1,14 @@ +{ + "schema_id": "default", + "state": "active", + "traits": { + "email": "testuser@example.com" + }, + "credentials": { + "password": { + "config": { + "password": "testuser" + } + } + } +} diff --git a/server/Cargo.toml b/server/Cargo.toml index a35a846..5bd5a66 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -7,6 +7,40 @@ authors.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +time = { workspace = true } +uuid = { workspace = true, features = ["v4"] } +url = { version = "2.5.4", features = [] } +dotenvy = { workspace = true} + +tracing = { workspace = true } +tracing-appender = "0.2.3" +tracing-subscriber = { workspace = true } + +axum = { version = "0.7.4", features = ["json", "tracing"] } +tower = { version = "0.5", features = ["util"] } +tower-http = { version = "0.5.1", features = ["tokio", "cors"] } +tokio = { workspace = true, features = ["rt", "rt-multi-thread", "macros"] } + +jsonwebtoken = "9" +reqwest = { version = "0.12", features = ["json"] } +serde_json = "1" + +rikka-mq = { version = "0.1.3", features = ["redis", "tracing"] } +error-stack = { workspace = true } + +vodca = { workspace = true } +destructure = { workspace = true } +serde = { workspace = true } + +adapter = { path = "../adapter" } application = { path = "../application" } driver = { path = "../driver" } kernel = { path = "../kernel" } + +[dev-dependencies] +rand = "0.8" +rsa = { version = "0.9" } +base64 = "0.22" +wiremock = "0.6" +http-body-util = "0.1" +test-with.workspace = true diff --git a/server/src/applier.rs b/server/src/applier.rs new file mode 100644 index 0000000..20737bd --- /dev/null +++ b/server/src/applier.rs @@ -0,0 +1,46 @@ +use crate::handler::Handler; +use account_applier::AccountApplier; +use auth_account_applier::AuthAccountApplier; +use kernel::interfaces::signal::Signal; +use kernel::prelude::entity::{AccountId, AuthAccountId, MetadataId, ProfileId}; +use metadata_applier::MetadataApplier; +use profile_applier::ProfileApplier; +use std::sync::Arc; + +mod account_applier; +mod auth_account_applier; +mod metadata_applier; +mod profile_applier; + +pub struct ApplierContainer { + account_applier: AccountApplier, + auth_account_applier: AuthAccountApplier, + profile_applier: ProfileApplier, + metadata_applier: MetadataApplier, +} + +impl ApplierContainer { + pub fn new(module: Arc) -> Self { + Self { + account_applier: AccountApplier::new(module.clone()), + auth_account_applier: AuthAccountApplier::new(module.clone()), + profile_applier: ProfileApplier::new(module.clone()), + metadata_applier: MetadataApplier::new(module.clone()), + } + } +} + +macro_rules! impl_signal { + ($type:ty, $field:ident) => { + impl Signal<$type> for ApplierContainer { + async fn emit(&self, signal_id: $type) -> error_stack::Result<(), kernel::KernelError> { + self.$field.emit(signal_id).await + } + } + }; +} + +impl_signal!(AccountId, account_applier); +impl_signal!(AuthAccountId, auth_account_applier); +impl_signal!(ProfileId, profile_applier); +impl_signal!(MetadataId, metadata_applier); diff --git a/server/src/applier/account_applier.rs b/server/src/applier/account_applier.rs new file mode 100644 index 0000000..97ff87a --- /dev/null +++ b/server/src/applier/account_applier.rs @@ -0,0 +1,198 @@ +use crate::handler::Handler; +use error_stack::ResultExt; +use kernel::interfaces::database::{DatabaseConnection, DependOnDatabaseConnection}; +use kernel::interfaces::event::EventApplier; +use kernel::interfaces::event_store::{AccountEventStore, DependOnAccountEventStore}; +use kernel::interfaces::read_model::{ + AccountReadModel, DependOnAccountReadModel, DependOnMetadataReadModel, + DependOnProfileReadModel, MetadataReadModel, ProfileReadModel, +}; +use kernel::interfaces::repository::{DependOnFollowRepository, FollowRepository}; +use kernel::interfaces::signal::Signal; +use kernel::prelude::entity::{Account, AccountEvent, AccountId, EventId, FollowTargetId}; +use kernel::KernelError; +use rikka_mq::config::MQConfig; +use rikka_mq::define::redis::mq::RedisMessageQueue; +use rikka_mq::error::ErrorOperation; +use rikka_mq::info::QueueInfo; +use rikka_mq::mq::MessageQueue; +use std::sync::Arc; +use uuid::Uuid; + +pub(crate) struct AccountApplier(RedisMessageQueue, Uuid, AccountId>); + +impl AccountApplier { + pub fn new(handler: Arc) -> Self { + let queue = RedisMessageQueue::new( + handler.redis().pool().clone(), + handler, + "account_applier".to_string(), + MQConfig::default(), + Uuid::new_v4, + |handler: Arc, id: AccountId| async move { + let mut tx = handler + .database_connection() + .begin_transaction() + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + let event_id = EventId::from(id.clone()); + + // 既存Projection取得 + let existing = handler + .account_read_model() + .find_by_id(&mut tx, &id) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + let since_version = existing.as_ref().map(|a| a.version().clone()); + + // 新規イベント取得 + let events = handler + .account_event_store() + .find_by_id(&mut tx, &event_id, since_version.as_ref()) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + if events.is_empty() { + return Ok(()); + } + + // Created イベントから auth_account_id 抽出 + let mut auth_account_id_for_link = None; + for event in &events { + if let AccountEvent::Created { + auth_account_id, .. + } = &event.event + { + auth_account_id_for_link = Some(auth_account_id.clone()); + } + } + + // イベント適用 + let mut entity = existing; + for event in events { + Account::apply(&mut entity, event) + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + } + + // Projection更新 + match (&entity, &since_version) { + (Some(account), None) => { + handler + .account_read_model() + .create(&mut tx, account) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + if let Some(auth_id) = auth_account_id_for_link { + handler + .account_read_model() + .link_auth_account(&mut tx, &id, &auth_id) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + } + } + (Some(account), Some(_)) => { + if account.deleted_at().is_some() { + // Account deactivated: cascade delete related data + handler + .account_read_model() + .deactivate(&mut tx, &id) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + + // Delete profile + if let Some(profile) = handler + .profile_read_model() + .find_by_account_id(&mut tx, &id) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))? + { + handler + .profile_read_model() + .delete(&mut tx, profile.id()) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + } + + // Delete all metadata + let metadata_list = handler + .metadata_read_model() + .find_by_account_id(&mut tx, &id) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + for metadata in &metadata_list { + handler + .metadata_read_model() + .delete(&mut tx, metadata.id()) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + } + + // Delete all follow relationships (as follower and followee) + let target_id = FollowTargetId::from(id.clone()); + let followings = handler + .follow_repository() + .find_followings(&mut tx, &target_id) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + for follow in &followings { + handler + .follow_repository() + .delete(&mut tx, follow.id()) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + } + let followers = handler + .follow_repository() + .find_followers(&mut tx, &target_id) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + for follow in &followers { + handler + .follow_repository() + .delete(&mut tx, follow.id()) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + } + + // Unlink all auth accounts + handler + .account_read_model() + .unlink_all_auth_accounts(&mut tx, &id) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + } else { + handler + .account_read_model() + .update(&mut tx, account) + .await + .map_err(|e| ErrorOperation::Delay(format!("{:?}", e)))?; + } + } + (None, Some(_)) => { + tracing::warn!( + "Account applier: entity became None with existing projection for id {:?} — this should not happen after Deactivated migration", + id + ); + } + (None, None) => { + tracing::warn!( + "Account applier: entity is None with no prior projection for id {:?}", + id + ); + } + } + Ok(()) + }, + ); + AccountApplier(queue) + } +} + +impl Signal for AccountApplier { + async fn emit(&self, signal_id: AccountId) -> error_stack::Result<(), KernelError> { + self.0 + .queue(QueueInfo::new(Uuid::new_v4(), signal_id)) + .await + .map_err(|e| error_stack::Report::new(e)) + .change_context_lazy(|| KernelError::Internal) + } +} diff --git a/server/src/applier/auth_account_applier.rs b/server/src/applier/auth_account_applier.rs new file mode 100644 index 0000000..9717393 --- /dev/null +++ b/server/src/applier/auth_account_applier.rs @@ -0,0 +1,41 @@ +use crate::handler::Handler; +use application::service::auth_account::UpdateAuthAccount; +use error_stack::ResultExt; +use kernel::interfaces::signal::Signal; +use kernel::prelude::entity::AuthAccountId; +use rikka_mq::define::redis::mq::RedisMessageQueue; +use rikka_mq::mq::MessageQueue; +use std::sync::Arc; +use uuid::Uuid; + +pub(crate) struct AuthAccountApplier(RedisMessageQueue, Uuid, AuthAccountId>); + +impl AuthAccountApplier { + pub fn new(handler: Arc) -> Self { + let queue = RedisMessageQueue::new( + handler.redis().pool().clone(), + handler, + "auth_account_applier".to_string(), + rikka_mq::config::MQConfig::default(), + Uuid::new_v4, + |handler: Arc, id: AuthAccountId| async move { + handler + .pgpool() + .update_auth_account(id) + .await + .map_err(|e| rikka_mq::error::ErrorOperation::Delay(format!("{:?}", e))) + }, + ); + AuthAccountApplier(queue) + } +} + +impl Signal for AuthAccountApplier { + async fn emit(&self, signal_id: AuthAccountId) -> error_stack::Result<(), kernel::KernelError> { + self.0 + .queue(rikka_mq::info::QueueInfo::new(Uuid::new_v4(), signal_id)) + .await + .map_err(|e| error_stack::Report::new(e)) + .change_context_lazy(|| kernel::KernelError::Internal) + } +} diff --git a/server/src/applier/metadata_applier.rs b/server/src/applier/metadata_applier.rs new file mode 100644 index 0000000..0fe659b --- /dev/null +++ b/server/src/applier/metadata_applier.rs @@ -0,0 +1,41 @@ +use crate::handler::Handler; +use application::service::metadata::UpdateMetadata; +use error_stack::ResultExt; +use kernel::interfaces::signal::Signal; +use kernel::prelude::entity::MetadataId; +use rikka_mq::define::redis::mq::RedisMessageQueue; +use rikka_mq::mq::MessageQueue; +use std::sync::Arc; +use uuid::Uuid; + +pub(crate) struct MetadataApplier(RedisMessageQueue, Uuid, MetadataId>); + +impl MetadataApplier { + pub fn new(handler: Arc) -> Self { + let queue = RedisMessageQueue::new( + handler.redis().pool().clone(), + handler, + "metadata_applier".to_string(), + rikka_mq::config::MQConfig::default(), + Uuid::new_v4, + |handler: Arc, id: MetadataId| async move { + handler + .pgpool() + .update_metadata(id) + .await + .map_err(|e| rikka_mq::error::ErrorOperation::Delay(format!("{:?}", e))) + }, + ); + MetadataApplier(queue) + } +} + +impl Signal for MetadataApplier { + async fn emit(&self, signal_id: MetadataId) -> error_stack::Result<(), kernel::KernelError> { + self.0 + .queue(rikka_mq::info::QueueInfo::new(Uuid::new_v4(), signal_id)) + .await + .map_err(|e| error_stack::Report::new(e)) + .change_context_lazy(|| kernel::KernelError::Internal) + } +} diff --git a/server/src/applier/profile_applier.rs b/server/src/applier/profile_applier.rs new file mode 100644 index 0000000..2405813 --- /dev/null +++ b/server/src/applier/profile_applier.rs @@ -0,0 +1,41 @@ +use crate::handler::Handler; +use application::service::profile::UpdateProfile; +use error_stack::ResultExt; +use kernel::interfaces::signal::Signal; +use kernel::prelude::entity::ProfileId; +use rikka_mq::define::redis::mq::RedisMessageQueue; +use rikka_mq::mq::MessageQueue; +use std::sync::Arc; +use uuid::Uuid; + +pub(crate) struct ProfileApplier(RedisMessageQueue, Uuid, ProfileId>); + +impl ProfileApplier { + pub fn new(handler: Arc) -> Self { + let queue = RedisMessageQueue::new( + handler.redis().pool().clone(), + handler, + "profile_applier".to_string(), + rikka_mq::config::MQConfig::default(), + Uuid::new_v4, + |handler: Arc, id: ProfileId| async move { + handler + .pgpool() + .update_profile(id) + .await + .map_err(|e| rikka_mq::error::ErrorOperation::Delay(format!("{:?}", e))) + }, + ); + ProfileApplier(queue) + } +} + +impl Signal for ProfileApplier { + async fn emit(&self, signal_id: ProfileId) -> error_stack::Result<(), kernel::KernelError> { + self.0 + .queue(rikka_mq::info::QueueInfo::new(Uuid::new_v4(), signal_id)) + .await + .map_err(|e| error_stack::Report::new(e)) + .change_context_lazy(|| kernel::KernelError::Internal) + } +} diff --git a/server/src/auth.rs b/server/src/auth.rs new file mode 100644 index 0000000..b6dec2d --- /dev/null +++ b/server/src/auth.rs @@ -0,0 +1,645 @@ +use axum::body::Body; +use axum::extract::State; +use axum::http::{Request, StatusCode}; +use axum::middleware::Next; +use axum::response::Response; +use jsonwebtoken::jwk::KeyAlgorithm; +use jsonwebtoken::{decode, decode_header, Algorithm, DecodingKey, Validation}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use std::time::{Duration, Instant}; +use tokio::sync::{Mutex, RwLock}; + +// --------------------------------------------------------------------------- +// OidcConfig +// --------------------------------------------------------------------------- + +pub struct OidcConfig { + pub issuer_url: String, + pub expected_audience: String, + /// Minimum interval between JWKS re-fetches. Set to 0 in tests. + pub jwks_refetch_interval_secs: u64, +} + +impl OidcConfig { + /// Initialize from environment variables `HYDRA_ISSUER_URL` and `EXPECTED_AUDIENCE`. + pub fn from_env() -> Self { + let issuer_url = dotenvy::var("HYDRA_ISSUER_URL").unwrap_or_else(|_| { + let default = "http://localhost:4444".to_string(); + tracing::warn!("HYDRA_ISSUER_URL not set, using default: {default}"); + default + }); + let expected_audience = dotenvy::var("EXPECTED_AUDIENCE").unwrap_or_else(|_| { + let default = "emumet".to_string(); + tracing::warn!("EXPECTED_AUDIENCE not set, using default: {default}"); + default + }); + Self { + issuer_url, + expected_audience, + jwks_refetch_interval_secs: 300, + } + } +} + +// --------------------------------------------------------------------------- +// AuthClaims +// --------------------------------------------------------------------------- + +/// JWT claims issued by Hydra. +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct AuthClaims { + pub iss: String, + pub sub: String, + /// `aud` may be a single string or an array of strings. + pub aud: OneOrMany, + pub exp: u64, +} + +/// Represents a JSON value that can be either a single string or a list of strings. +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +#[serde(untagged)] +pub enum OneOrMany { + One(String), + Many(Vec), +} + +impl OneOrMany { + pub fn contains(&self, value: &str) -> bool { + match self { + OneOrMany::One(s) => s == value, + OneOrMany::Many(v) => v.iter().any(|s| s == value), + } + } +} + +// --------------------------------------------------------------------------- +// OidcAuthInfo +// --------------------------------------------------------------------------- + +/// Extracted auth info consumed by `resolve_auth_account_id` (task 5.1). +pub struct OidcAuthInfo { + /// Hydra issuer URL → used as `AuthHost.url` + pub issuer: String, + /// Kratos identity UUID → used as `AuthAccount.client_id` + pub subject: String, +} + +impl From for OidcAuthInfo { + fn from(claims: AuthClaims) -> Self { + Self { + issuer: claims.iss, + subject: claims.sub, + } + } +} + +// --------------------------------------------------------------------------- +// OIDC Discovery response +// --------------------------------------------------------------------------- + +#[derive(Deserialize)] +struct OidcDiscovery { + jwks_uri: String, +} + +// --------------------------------------------------------------------------- +// JwkSet wrapper (re-export from jsonwebtoken) +// --------------------------------------------------------------------------- + +pub use jsonwebtoken::jwk::JwkSet; + +// --------------------------------------------------------------------------- +// JwksCache +// --------------------------------------------------------------------------- + +struct JwksCacheInner { + jwks: Option, + jwks_uri: Option, + last_fetch: Instant, +} + +pub struct JwksCache { + inner: RwLock, + /// Serialises refresh attempts to prevent thundering herd. + refresh_mutex: Mutex<()>, + issuer_url: String, + http_client: reqwest::Client, + min_refetch_interval: Duration, +} + +impl JwksCache { + pub fn new(issuer_url: String, min_refetch_interval: Duration) -> Self { + let initial_last_fetch = Instant::now() + .checked_sub(min_refetch_interval + Duration::from_secs(1)) + .unwrap_or_else(Instant::now); + + Self { + inner: RwLock::new(JwksCacheInner { + jwks: None, + jwks_uri: None, + last_fetch: initial_last_fetch, + }), + refresh_mutex: Mutex::new(()), + issuer_url, + http_client: reqwest::Client::new(), + min_refetch_interval, + } + } + + /// Construct a `JwksCache` with a pre-populated `JwkSet` (test helper). + /// The refetch interval is set to zero so re-fetches are always eligible. + pub fn new_with_jwks(issuer_url: String, jwks: JwkSet) -> Self { + let past = Instant::now() + .checked_sub(Duration::from_secs(1)) + .unwrap_or_else(Instant::now); + Self { + inner: RwLock::new(JwksCacheInner { + jwks: Some(jwks), + jwks_uri: None, + last_fetch: past, + }), + refresh_mutex: Mutex::new(()), + issuer_url, + http_client: reqwest::Client::new(), + min_refetch_interval: Duration::from_secs(0), + } + } + + /// Attempt to initialise the cache by performing OIDC Discovery and fetching + /// the JWKS. Failures are logged but do not panic (lazy initialisation). + pub async fn try_init(&self) { + let discovery_url = format!( + "{}/.well-known/openid-configuration", + self.issuer_url.trim_end_matches('/') + ); + + let discovery: OidcDiscovery = match self.http_client.get(&discovery_url).send().await { + Ok(resp) => match resp.json().await { + Ok(d) => d, + Err(e) => { + tracing::warn!("JwksCache: failed to parse OIDC discovery: {e}"); + return; + } + }, + Err(e) => { + tracing::warn!("JwksCache: OIDC discovery request failed ({discovery_url}): {e}"); + return; + } + }; + + let jwks_uri = discovery.jwks_uri.clone(); + self.fetch_jwks_inner(&jwks_uri).await; + // Update jwks_uri after successful discovery. + self.inner.write().await.jwks_uri = Some(jwks_uri); + } + + /// Fetch JWKS from the given URI and update the cache atomically. + async fn fetch_jwks_inner(&self, jwks_uri: &str) { + match self.http_client.get(jwks_uri).send().await { + Ok(resp) => match resp.json::().await { + Ok(jwks) => { + let mut inner = self.inner.write().await; + inner.jwks = Some(jwks); + inner.last_fetch = Instant::now(); + tracing::info!("JwksCache: JWKS refreshed from {jwks_uri}"); + } + Err(e) => { + tracing::warn!("JwksCache: failed to parse JWKS response: {e}"); + } + }, + Err(e) => { + tracing::warn!("JwksCache: JWKS fetch failed ({jwks_uri}): {e}"); + } + } + } + + /// Look up a decoding key by `kid`. + /// + /// If the key is not found **and** the minimum refetch interval has elapsed, + /// the JWKS is re-fetched once before returning. A mutex ensures only one + /// concurrent refresh. + pub async fn get_key(&self, kid: &str) -> Option { + // First attempt: check current cache. + if let Some(key) = self.key_from_cache(kid).await { + return Some(key); + } + + // Acquire refresh mutex to prevent thundering herd. + let _guard = self.refresh_mutex.lock().await; + + // Re-check cache: another task may have refreshed while we waited. + if let Some(key) = self.key_from_cache(kid).await { + return Some(key); + } + + // Check if we are allowed to re-fetch. + let elapsed = self.inner.read().await.last_fetch.elapsed(); + + if elapsed >= self.min_refetch_interval { + tracing::info!("JwksCache: kid '{kid}' not found – re-fetching JWKS"); + + let jwks_uri = self.inner.read().await.jwks_uri.clone(); + + if let Some(uri) = jwks_uri { + self.fetch_jwks_inner(&uri).await; + } else { + self.try_init().await; + } + + // Second attempt after re-fetch. + return self.key_from_cache(kid).await; + } + + None + } + + /// Extract a `DecodingKey` for the given `kid` from the current in-memory cache. + /// Rejects JWKs whose `alg` field does not match RS256. + async fn key_from_cache(&self, kid: &str) -> Option { + let inner = self.inner.read().await; + let jwks = inner.jwks.as_ref()?; + let jwk = jwks.find(kid)?; + if let Some(alg) = &jwk.common.key_algorithm { + if *alg != KeyAlgorithm::RS256 { + tracing::warn!("JwksCache: JWK kid={kid} has unexpected algorithm {alg:?}"); + return None; + } + } + DecodingKey::from_jwk(jwk).ok() + } +} + +// --------------------------------------------------------------------------- +// auth_middleware (axum middleware layer) +// --------------------------------------------------------------------------- + +/// Axum middleware that validates Bearer JWTs and inserts +/// [`Extension`] into the request. +/// +/// Usage with router: +/// ```ignore +/// let state = (config, jwks_cache); +/// router.layer(axum::middleware::from_fn_with_state(state, auth_middleware)) +/// ``` +pub async fn auth_middleware( + State((config, jwks_cache)): State<(Arc, Arc)>, + mut request: Request, + next: Next, +) -> Result { + auth_middleware_core(config, jwks_cache, &mut request).await?; + Ok(next.run(request).await) +} + +/// Core middleware logic: extracts and validates the Bearer token, then +/// inserts `Extension` into the request extensions. +async fn auth_middleware_core( + config: Arc, + jwks_cache: Arc, + request: &mut Request, +) -> Result<(), StatusCode> { + // 1. Extract the Bearer token from the Authorization header. + let token = extract_bearer_token(request)?; + + // 2. Decode the JWT header to obtain the `kid`. + let header = decode_header(token).map_err(|e| { + tracing::warn!("auth_middleware: failed to decode JWT header: {e}"); + StatusCode::UNAUTHORIZED + })?; + + let kid = header.kid.ok_or_else(|| { + tracing::warn!("auth_middleware: JWT header missing 'kid'"); + StatusCode::UNAUTHORIZED + })?; + + // 3. Fetch the public key from JWKS cache. + let decoding_key = jwks_cache.get_key(&kid).await.ok_or_else(|| { + tracing::warn!("auth_middleware: no JWKS key found for kid='{kid}'"); + StatusCode::UNAUTHORIZED + })?; + + // 4. Validate the JWT (RS256, iss, aud, exp). + let mut validation = Validation::new(Algorithm::RS256); + validation.set_audience(&[&config.expected_audience]); + validation.set_issuer(&[&config.issuer_url]); + + let token_data = decode::(token, &decoding_key, &validation).map_err(|e| { + tracing::warn!("auth_middleware: JWT validation failed: {e}"); + StatusCode::UNAUTHORIZED + })?; + + // 5. Insert claims as request extension. + request.extensions_mut().insert(token_data.claims); + + Ok(()) +} + +// --------------------------------------------------------------------------- +// resolve_auth_account_id +// --------------------------------------------------------------------------- + +use crate::handler::AppModule; +use adapter::processor::auth_account::{ + AuthAccountCommandProcessor, AuthAccountQueryProcessor, DependOnAuthAccountCommandProcessor, + DependOnAuthAccountQueryProcessor, +}; +use kernel::interfaces::database::{DatabaseConnection, DependOnDatabaseConnection}; +use kernel::interfaces::repository::{AuthHostRepository, DependOnAuthHostRepository}; +use kernel::prelude::entity::{ + AuthAccountClientId, AuthAccountId, AuthHost, AuthHostId, AuthHostUrl, +}; +use kernel::KernelError; + +pub async fn resolve_auth_account_id( + app: &AppModule, + auth_info: OidcAuthInfo, +) -> error_stack::Result { + let client_id = AuthAccountClientId::new(auth_info.subject); + let mut executor = app.database_connection().begin_transaction().await?; + let auth_account = app + .auth_account_query_processor() + .find_by_client_id(&mut executor, &client_id) + .await?; + let auth_account = if let Some(auth_account) = auth_account { + auth_account + } else { + let url = AuthHostUrl::new(auth_info.issuer); + let auth_host = app + .auth_host_repository() + .find_by_url(&mut executor, &url) + .await?; + let auth_host = if let Some(auth_host) = auth_host { + auth_host + } else { + let auth_host = AuthHost::new(AuthHostId::default(), url); + app.auth_host_repository() + .create(&mut executor, &auth_host) + .await?; + auth_host + }; + let host_id = auth_host.into_destruct().id; + app.auth_account_command_processor() + .create(&mut executor, host_id, client_id) + .await? + }; + Ok(auth_account.id().clone()) +} + +/// Extract the raw Bearer token string from the `Authorization` header. +fn extract_bearer_token<'a>(request: &'a Request) -> Result<&'a str, StatusCode> { + let header_value = request + .headers() + .get(axum::http::header::AUTHORIZATION) + .ok_or_else(|| { + tracing::warn!("auth_middleware: missing Authorization header"); + StatusCode::UNAUTHORIZED + })?; + + let header_str = header_value.to_str().map_err(|_| { + tracing::warn!("auth_middleware: Authorization header is not valid UTF-8"); + StatusCode::UNAUTHORIZED + })?; + + let token = header_str.strip_prefix("Bearer ").ok_or_else(|| { + tracing::warn!("auth_middleware: Authorization header does not start with 'Bearer '"); + StatusCode::UNAUTHORIZED + })?; + + Ok(token) +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use jsonwebtoken::jwk::{ + AlgorithmParameters, CommonParameters, Jwk, JwkSet, KeyAlgorithm, PublicKeyUse, + RSAKeyParameters, + }; + use jsonwebtoken::{encode, EncodingKey, Header}; + use rsa::pkcs1::EncodeRsaPrivateKey; + use rsa::RsaPrivateKey; + use std::time::{SystemTime, UNIX_EPOCH}; + + // ----------------------------------------------------------------------- + // Test helpers + // ----------------------------------------------------------------------- + + fn unix_now() -> u64 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs() + } + + struct TestKeys { + encoding_key: EncodingKey, + jwk_set: JwkSet, + kid: String, + } + + /// Generate a fresh 2048-bit RSA key pair and wrap it as a `JwkSet`. + fn generate_test_keys() -> TestKeys { + use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine}; + use rsa::traits::PublicKeyParts; + + let mut rng = rand::thread_rng(); + let private_key = RsaPrivateKey::new(&mut rng, 2048).expect("generate RSA key"); + + // PEM → EncodingKey + let pem = private_key + .to_pkcs1_pem(rsa::pkcs8::LineEnding::LF) + .expect("encode pkcs1 pem"); + let encoding_key = + EncodingKey::from_rsa_pem(pem.as_bytes()).expect("parse EncodingKey from PEM"); + + // Build JWK from RSA public key components. + let pub_key = private_key.to_public_key(); + let n = URL_SAFE_NO_PAD.encode(pub_key.n().to_bytes_be()); + let e = URL_SAFE_NO_PAD.encode(pub_key.e().to_bytes_be()); + + let kid = "test-key-1".to_string(); + let jwk = Jwk { + common: CommonParameters { + public_key_use: Some(PublicKeyUse::Signature), + key_id: Some(kid.clone()), + key_algorithm: Some(KeyAlgorithm::RS256), + ..Default::default() + }, + algorithm: AlgorithmParameters::RSA(RSAKeyParameters { + n, + e, + ..Default::default() + }), + }; + let jwk_set = JwkSet { keys: vec![jwk] }; + + TestKeys { + encoding_key, + jwk_set, + kid, + } + } + + fn make_claims(iss: &str, aud: &str, sub: &str, exp_offset_secs: i64) -> AuthClaims { + let exp = (unix_now() as i64 + exp_offset_secs) as u64; + AuthClaims { + iss: iss.to_string(), + sub: sub.to_string(), + aud: OneOrMany::One(aud.to_string()), + exp, + } + } + + fn encode_jwt(claims: &AuthClaims, encoding_key: &EncodingKey, kid: &str) -> String { + let mut header = Header::new(Algorithm::RS256); + header.kid = Some(kid.to_string()); + encode(&header, claims, encoding_key).expect("encode JWT") + } + + fn make_config(issuer: &str, audience: &str) -> Arc { + Arc::new(OidcConfig { + issuer_url: issuer.to_string(), + expected_audience: audience.to_string(), + jwks_refetch_interval_secs: 0, + }) + } + + async fn validate( + config: Arc, + cache: Arc, + token: &str, + ) -> Result { + let mut req: Request = Request::builder() + .header("Authorization", format!("Bearer {token}")) + .body(Body::empty()) + .unwrap(); + auth_middleware_core(config, cache, &mut req).await?; + Ok(req.extensions().get::().unwrap().clone()) + } + + // ----------------------------------------------------------------------- + // Test cases + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn valid_jwt_succeeds() { + let keys = generate_test_keys(); + let issuer = "https://hydra.example.com"; + let audience = "emumet"; + let config = make_config(issuer, audience); + let cache = Arc::new(JwksCache::new_with_jwks(issuer.to_string(), keys.jwk_set)); + + let claims = make_claims(issuer, audience, "kratos-uuid-123", 3600); + let token = encode_jwt(&claims, &keys.encoding_key, &keys.kid); + + let result = validate(config, cache, &token).await; + assert!(result.is_ok(), "expected Ok, got {result:?}"); + let decoded = result.unwrap(); + assert_eq!(decoded.sub, "kratos-uuid-123"); + assert_eq!(decoded.iss, issuer); + } + + #[tokio::test] + async fn expired_jwt_fails() { + let keys = generate_test_keys(); + let issuer = "https://hydra.example.com"; + let audience = "emumet"; + let config = make_config(issuer, audience); + let cache = Arc::new(JwksCache::new_with_jwks(issuer.to_string(), keys.jwk_set)); + + // exp well in the past (beyond default 60s leeway) + let claims = make_claims(issuer, audience, "sub", -120); + let token = encode_jwt(&claims, &keys.encoding_key, &keys.kid); + + let result = validate(config, cache, &token).await; + assert_eq!(result, Err(StatusCode::UNAUTHORIZED)); + } + + #[tokio::test] + async fn wrong_audience_fails() { + let keys = generate_test_keys(); + let issuer = "https://hydra.example.com"; + let config = make_config(issuer, "emumet"); + let cache = Arc::new(JwksCache::new_with_jwks(issuer.to_string(), keys.jwk_set)); + + let claims = make_claims(issuer, "wrong-audience", "sub", 3600); + let token = encode_jwt(&claims, &keys.encoding_key, &keys.kid); + + let result = validate(config, cache, &token).await; + assert_eq!(result, Err(StatusCode::UNAUTHORIZED)); + } + + #[tokio::test] + async fn wrong_issuer_fails() { + let keys = generate_test_keys(); + let issuer = "https://hydra.example.com"; + let audience = "emumet"; + let config = make_config(issuer, audience); + let cache = Arc::new(JwksCache::new_with_jwks(issuer.to_string(), keys.jwk_set)); + + let claims = make_claims("https://evil-issuer.example.com", audience, "sub", 3600); + let token = encode_jwt(&claims, &keys.encoding_key, &keys.kid); + + let result = validate(config, cache, &token).await; + assert_eq!(result, Err(StatusCode::UNAUTHORIZED)); + } + + #[tokio::test] + async fn missing_authorization_header_fails() { + let keys = generate_test_keys(); + let issuer = "https://hydra.example.com"; + let config = make_config(issuer, "emumet"); + let cache = Arc::new(JwksCache::new_with_jwks(issuer.to_string(), keys.jwk_set)); + + let mut req: Request = Request::builder().body(Body::empty()).unwrap(); + let result = auth_middleware_core(config, cache, &mut req).await; + assert_eq!(result, Err(StatusCode::UNAUTHORIZED)); + } + + #[tokio::test] + async fn wrong_signing_key_fails() { + let keys = generate_test_keys(); + let wrong_keys = generate_test_keys(); // different RSA key pair + let issuer = "https://hydra.example.com"; + let audience = "emumet"; + let config = make_config(issuer, audience); + // Cache has `keys.jwk_set`, but token is signed with `wrong_keys` + let cache = Arc::new(JwksCache::new_with_jwks(issuer.to_string(), keys.jwk_set)); + + let claims = make_claims(issuer, audience, "sub", 3600); + // Sign with wrong key but use the kid from the cached keyset + let token = encode_jwt(&claims, &wrong_keys.encoding_key, &keys.kid); + + let result = validate(config, cache, &token).await; + assert_eq!(result, Err(StatusCode::UNAUTHORIZED)); + } + + #[tokio::test] + async fn oidc_auth_info_from_claims() { + let claims = AuthClaims { + iss: "https://hydra.example.com".to_string(), + sub: "kratos-uuid-abc".to_string(), + aud: OneOrMany::One("emumet".to_string()), + exp: unix_now() + 3600, + }; + let info: OidcAuthInfo = claims.into(); + assert_eq!(info.issuer, "https://hydra.example.com"); + assert_eq!(info.subject, "kratos-uuid-abc"); + } + + #[test] + fn one_or_many_variants() { + let one = OneOrMany::One("emumet".to_string()); + assert!(one.contains("emumet")); + assert!(!one.contains("other")); + + let many = OneOrMany::Many(vec!["emumet".to_string(), "other".to_string()]); + assert!(many.contains("emumet")); + assert!(many.contains("other")); + assert!(!many.contains("missing")); + } +} diff --git a/server/src/error.rs b/server/src/error.rs new file mode 100644 index 0000000..f67c7e1 --- /dev/null +++ b/server/src/error.rs @@ -0,0 +1,62 @@ +use axum::http::StatusCode; +use axum::response::IntoResponse; +use error_stack::Report; +use kernel::KernelError; +use std::process::{ExitCode, Termination}; + +#[derive(Debug)] +pub struct StackTrace(Report); + +impl From> for StackTrace { + fn from(e: Report) -> Self { + StackTrace(e) + } +} + +impl Termination for StackTrace { + fn report(self) -> ExitCode { + self.0.report() + } +} + +#[derive(Debug)] +pub enum ErrorStatus { + Report(Report), + StatusCode(StatusCode), + StatusCodeWithMessage(StatusCode, String), +} + +impl From> for ErrorStatus { + fn from(e: Report) -> Self { + ErrorStatus::Report(e) + } +} + +impl From for ErrorStatus { + fn from(code: StatusCode) -> Self { + ErrorStatus::StatusCode(code) + } +} + +impl From<(StatusCode, String)> for ErrorStatus { + fn from((code, message): (StatusCode, String)) -> Self { + ErrorStatus::StatusCodeWithMessage(code, message) + } +} + +impl IntoResponse for ErrorStatus { + fn into_response(self) -> axum::response::Response { + match self { + ErrorStatus::Report(e) => match e.current_context() { + KernelError::Concurrency => StatusCode::CONFLICT, + KernelError::Timeout => StatusCode::REQUEST_TIMEOUT, + KernelError::Internal => StatusCode::INTERNAL_SERVER_ERROR, + KernelError::PermissionDenied => StatusCode::FORBIDDEN, + KernelError::NotFound => StatusCode::NOT_FOUND, + } + .into_response(), + ErrorStatus::StatusCode(code) => code.into_response(), + ErrorStatus::StatusCodeWithMessage(code, message) => (code, message).into_response(), + } + } +} diff --git a/server/src/handler.rs b/server/src/handler.rs new file mode 100644 index 0000000..60b72fb --- /dev/null +++ b/server/src/handler.rs @@ -0,0 +1,374 @@ +use crate::applier::ApplierContainer; +use crate::hydra::HydraAdminClient; +use crate::kratos::KratosClient; +use adapter::processor::account::DependOnAccountSignal; +use adapter::processor::auth_account::DependOnAuthAccountSignal; +use adapter::processor::metadata::DependOnMetadataSignal; +use adapter::processor::profile::DependOnProfileSignal; +use driver::crypto::{ + Argon2Encryptor, FilePasswordProvider, Rsa2048RawGenerator, Rsa2048Signer, Rsa2048Verifier, +}; +use driver::database::{PostgresDatabase, RedisDatabase}; +use driver::keto::KetoClient; +use kernel::interfaces::crypto::{ + DependOnKeyEncryptor, DependOnPasswordProvider, DependOnRawKeyGenerator, + DependOnSignatureVerifier, DependOnSigner, +}; +use kernel::interfaces::database::DependOnDatabaseConnection; +use kernel::interfaces::permission::{DependOnPermissionChecker, DependOnPermissionWriter}; +use kernel::KernelError; +use std::sync::Arc; +use vodca::References; + +#[derive(Clone, References)] +pub struct AppModule { + handler: Arc, + applier_container: Arc, +} + +impl AppModule { + pub async fn new() -> error_stack::Result { + let handler = Arc::new(Handler::init().await?); + let applier_container = Arc::new(ApplierContainer::new(handler.clone())); + Ok(Self { + handler, + applier_container, + }) + } + + pub fn hydra_admin_client(&self) -> &HydraAdminClient { + &self.handler.hydra_admin_client + } + + pub fn kratos_client(&self) -> &KratosClient { + &self.handler.kratos_client + } +} + +// --- DependOn* implementations for AppModule (delegate to handler/applier_container) --- + +impl kernel::interfaces::database::DependOnDatabaseConnection for AppModule { + type DatabaseConnection = PostgresDatabase; + fn database_connection(&self) -> &Self::DatabaseConnection { + self.handler.as_ref().database_connection() + } +} + +impl kernel::interfaces::read_model::DependOnAccountReadModel for AppModule { + type AccountReadModel = ::AccountReadModel; + fn account_read_model(&self) -> &Self::AccountReadModel { + kernel::interfaces::read_model::DependOnAccountReadModel::account_read_model( + self.handler.as_ref().database_connection(), + ) + } +} + +impl kernel::interfaces::event_store::DependOnAccountEventStore for AppModule { + type AccountEventStore = ::AccountEventStore; + fn account_event_store(&self) -> &Self::AccountEventStore { + kernel::interfaces::event_store::DependOnAccountEventStore::account_event_store( + self.handler.as_ref().database_connection(), + ) + } +} + +impl DependOnPasswordProvider for AppModule { + type PasswordProvider = FilePasswordProvider; + fn password_provider(&self) -> &Self::PasswordProvider { + self.handler.as_ref().password_provider() + } +} + +impl DependOnRawKeyGenerator for AppModule { + type RawKeyGenerator = Rsa2048RawGenerator; + fn raw_key_generator(&self) -> &Self::RawKeyGenerator { + self.handler.as_ref().raw_key_generator() + } +} + +impl DependOnKeyEncryptor for AppModule { + type KeyEncryptor = Argon2Encryptor; + fn key_encryptor(&self) -> &Self::KeyEncryptor { + self.handler.as_ref().key_encryptor() + } +} + +impl DependOnAccountSignal for AppModule { + type AccountSignal = ApplierContainer; + fn account_signal(&self) -> &Self::AccountSignal { + &self.applier_container + } +} + +impl DependOnAuthAccountSignal for AppModule { + type AuthAccountSignal = ApplierContainer; + fn auth_account_signal(&self) -> &Self::AuthAccountSignal { + &self.applier_container + } +} + +impl kernel::interfaces::read_model::DependOnAuthAccountReadModel for AppModule { + type AuthAccountReadModel = ::AuthAccountReadModel; + fn auth_account_read_model(&self) -> &Self::AuthAccountReadModel { + kernel::interfaces::read_model::DependOnAuthAccountReadModel::auth_account_read_model( + self.handler.as_ref().database_connection(), + ) + } +} + +impl kernel::interfaces::event_store::DependOnAuthAccountEventStore for AppModule { + type AuthAccountEventStore = ::AuthAccountEventStore; + fn auth_account_event_store(&self) -> &Self::AuthAccountEventStore { + kernel::interfaces::event_store::DependOnAuthAccountEventStore::auth_account_event_store( + self.handler.as_ref().database_connection(), + ) + } +} + +impl kernel::interfaces::read_model::DependOnProfileReadModel for AppModule { + type ProfileReadModel = ::ProfileReadModel; + fn profile_read_model(&self) -> &Self::ProfileReadModel { + kernel::interfaces::read_model::DependOnProfileReadModel::profile_read_model( + self.handler.as_ref().database_connection(), + ) + } +} + +impl kernel::interfaces::event_store::DependOnProfileEventStore for AppModule { + type ProfileEventStore = ::ProfileEventStore; + fn profile_event_store(&self) -> &Self::ProfileEventStore { + kernel::interfaces::event_store::DependOnProfileEventStore::profile_event_store( + self.handler.as_ref().database_connection(), + ) + } +} + +impl DependOnProfileSignal for AppModule { + type ProfileSignal = ApplierContainer; + fn profile_signal(&self) -> &Self::ProfileSignal { + &self.applier_container + } +} + +impl kernel::interfaces::read_model::DependOnMetadataReadModel for AppModule { + type MetadataReadModel = ::MetadataReadModel; + fn metadata_read_model(&self) -> &Self::MetadataReadModel { + kernel::interfaces::read_model::DependOnMetadataReadModel::metadata_read_model( + self.handler.as_ref().database_connection(), + ) + } +} + +impl kernel::interfaces::event_store::DependOnMetadataEventStore for AppModule { + type MetadataEventStore = ::MetadataEventStore; + fn metadata_event_store(&self) -> &Self::MetadataEventStore { + kernel::interfaces::event_store::DependOnMetadataEventStore::metadata_event_store( + self.handler.as_ref().database_connection(), + ) + } +} + +impl DependOnMetadataSignal for AppModule { + type MetadataSignal = ApplierContainer; + fn metadata_signal(&self) -> &Self::MetadataSignal { + &self.applier_container + } +} + +impl kernel::interfaces::repository::DependOnAuthHostRepository for AppModule { + type AuthHostRepository = + ::AuthHostRepository; + fn auth_host_repository(&self) -> &Self::AuthHostRepository { + kernel::interfaces::repository::DependOnAuthHostRepository::auth_host_repository( + self.handler.as_ref().database_connection(), + ) + } +} + +impl kernel::interfaces::repository::DependOnFollowRepository for AppModule { + type FollowRepository = + ::FollowRepository; + fn follow_repository(&self) -> &Self::FollowRepository { + kernel::interfaces::repository::DependOnFollowRepository::follow_repository( + self.handler.as_ref().database_connection(), + ) + } +} + +impl kernel::interfaces::repository::DependOnRemoteAccountRepository for AppModule { + type RemoteAccountRepository = + ::RemoteAccountRepository; + fn remote_account_repository(&self) -> &Self::RemoteAccountRepository { + kernel::interfaces::repository::DependOnRemoteAccountRepository::remote_account_repository( + self.handler.as_ref().database_connection(), + ) + } +} + +impl kernel::interfaces::repository::DependOnImageRepository for AppModule { + type ImageRepository = + ::ImageRepository; + fn image_repository(&self) -> &Self::ImageRepository { + kernel::interfaces::repository::DependOnImageRepository::image_repository( + self.handler.as_ref().database_connection(), + ) + } +} + +impl DependOnPermissionChecker for AppModule { + type PermissionChecker = KetoClient; + fn permission_checker(&self) -> &Self::PermissionChecker { + self.handler.as_ref().permission_checker() + } +} + +impl DependOnPermissionWriter for AppModule { + type PermissionWriter = KetoClient; + fn permission_writer(&self) -> &Self::PermissionWriter { + self.handler.as_ref().permission_writer() + } +} + +// Note: DependOnSigningKeyGenerator, DependOnAccountCommandProcessor, +// DependOnAccountQueryProcessor, and all UseCase traits are provided +// automatically via blanket impls in adapter. + +#[derive(References)] +pub struct Handler { + pgpool: PostgresDatabase, + redis: RedisDatabase, + // Crypto providers + password_provider: FilePasswordProvider, + raw_key_generator: Rsa2048RawGenerator, + key_encryptor: Argon2Encryptor, + signer: Rsa2048Signer, + verifier: Rsa2048Verifier, + // Ory clients + pub(crate) hydra_admin_client: HydraAdminClient, + pub(crate) kratos_client: KratosClient, + keto_client: KetoClient, +} + +impl Handler { + pub async fn init() -> error_stack::Result { + let hydra_admin_url = + dotenvy::var("HYDRA_ADMIN_URL").unwrap_or_else(|_| "http://localhost:4445".to_string()); + let kratos_public_url = dotenvy::var("KRATOS_PUBLIC_URL") + .unwrap_or_else(|_| "http://localhost:4433".to_string()); + let keto_read_url = + dotenvy::var("KETO_READ_URL").unwrap_or_else(|_| "http://localhost:4466".to_string()); + let keto_write_url = + dotenvy::var("KETO_WRITE_URL").unwrap_or_else(|_| "http://localhost:4467".to_string()); + Self::init_with_urls( + hydra_admin_url, + kratos_public_url, + keto_read_url, + keto_write_url, + ) + .await + } + + async fn init_with_urls( + hydra_admin_url: String, + kratos_public_url: String, + keto_read_url: String, + keto_write_url: String, + ) -> error_stack::Result { + let pgpool = PostgresDatabase::new().await?; + let redis = RedisDatabase::new()?; + Ok(Self { + pgpool, + redis, + password_provider: FilePasswordProvider::new(), + raw_key_generator: Rsa2048RawGenerator, + key_encryptor: Argon2Encryptor::default(), + signer: Rsa2048Signer, + verifier: Rsa2048Verifier, + hydra_admin_client: HydraAdminClient::new(hydra_admin_url), + kratos_client: KratosClient::new(kratos_public_url), + keto_client: KetoClient::new(keto_read_url, keto_write_url), + }) + } +} + +#[cfg(test)] +impl AppModule { + pub(crate) async fn new_for_oauth2_test( + hydra_admin_url: String, + kratos_public_url: String, + ) -> error_stack::Result { + let keto_read_url = + dotenvy::var("KETO_READ_URL").unwrap_or_else(|_| "http://localhost:4466".to_string()); + let keto_write_url = + dotenvy::var("KETO_WRITE_URL").unwrap_or_else(|_| "http://localhost:4467".to_string()); + let handler = Arc::new( + Handler::init_with_urls( + hydra_admin_url, + kratos_public_url, + keto_read_url, + keto_write_url, + ) + .await?, + ); + let applier_container = Arc::new(ApplierContainer::new(handler.clone())); + Ok(Self { + handler, + applier_container, + }) + } +} + +// --- Database DI implementations (via macro) --- + +kernel::impl_database_delegation!(Handler, pgpool, PostgresDatabase); + +// --- Crypto DI implementations --- + +impl DependOnPasswordProvider for Handler { + type PasswordProvider = FilePasswordProvider; + fn password_provider(&self) -> &Self::PasswordProvider { + &self.password_provider + } +} + +impl DependOnRawKeyGenerator for Handler { + type RawKeyGenerator = Rsa2048RawGenerator; + fn raw_key_generator(&self) -> &Self::RawKeyGenerator { + &self.raw_key_generator + } +} + +impl DependOnKeyEncryptor for Handler { + type KeyEncryptor = Argon2Encryptor; + fn key_encryptor(&self) -> &Self::KeyEncryptor { + &self.key_encryptor + } +} + +impl DependOnSigner for Handler { + type Signer = Rsa2048Signer; + fn signer(&self) -> &Self::Signer { + &self.signer + } +} + +impl DependOnSignatureVerifier for Handler { + type SignatureVerifier = Rsa2048Verifier; + fn signature_verifier(&self) -> &Self::SignatureVerifier { + &self.verifier + } +} + +impl DependOnPermissionChecker for Handler { + type PermissionChecker = KetoClient; + fn permission_checker(&self) -> &Self::PermissionChecker { + &self.keto_client + } +} + +impl DependOnPermissionWriter for Handler { + type PermissionWriter = KetoClient; + fn permission_writer(&self) -> &Self::PermissionWriter { + &self.keto_client + } +} diff --git a/server/src/hydra.rs b/server/src/hydra.rs new file mode 100644 index 0000000..e9bdd3b --- /dev/null +++ b/server/src/hydra.rs @@ -0,0 +1,212 @@ +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use url::Url; + +pub struct HydraAdminClient { + admin_url: String, + http_client: Client, +} + +impl HydraAdminClient { + /// Create a new HydraAdminClient. Panics if `admin_url` is not a valid URL. + pub fn new(admin_url: String) -> Self { + let admin_url = admin_url.trim_end_matches('/').to_string(); + // Validate URL at construction time to fail fast. + Url::parse(&admin_url) + .unwrap_or_else(|e| panic!("HYDRA_ADMIN_URL is not a valid URL ({admin_url}): {e}")); + Self { + admin_url, + http_client: Client::new(), + } + } + + /// Build a URL with a properly encoded challenge query parameter. + /// The base URL was validated in `new()`, so `parse_with_params` will not fail. + fn build_url(&self, path: &str, param_name: &str, challenge: &str) -> Url { + let base = format!("{}{}", self.admin_url, path); + Url::parse_with_params(&base, &[(param_name, challenge)]) + .expect("base URL was validated at construction time") + } + + pub async fn get_login_request(&self, challenge: &str) -> Result { + let url = self.build_url( + "/admin/oauth2/auth/requests/login", + "login_challenge", + challenge, + ); + self.http_client + .get(url) + .send() + .await? + .error_for_status()? + .json::() + .await + } + + pub async fn accept_login( + &self, + challenge: &str, + body: &AcceptLoginRequest, + ) -> Result { + let url = self.build_url( + "/admin/oauth2/auth/requests/login/accept", + "login_challenge", + challenge, + ); + self.http_client + .put(url) + .json(body) + .send() + .await? + .error_for_status()? + .json::() + .await + } + + pub async fn reject_login( + &self, + challenge: &str, + body: &RejectRequest, + ) -> Result { + let url = self.build_url( + "/admin/oauth2/auth/requests/login/reject", + "login_challenge", + challenge, + ); + self.http_client + .put(url) + .json(body) + .send() + .await? + .error_for_status()? + .json::() + .await + } + + pub async fn get_consent_request( + &self, + challenge: &str, + ) -> Result { + let url = self.build_url( + "/admin/oauth2/auth/requests/consent", + "consent_challenge", + challenge, + ); + self.http_client + .get(url) + .send() + .await? + .error_for_status()? + .json::() + .await + } + + pub async fn accept_consent( + &self, + challenge: &str, + body: &AcceptConsentRequest, + ) -> Result { + let url = self.build_url( + "/admin/oauth2/auth/requests/consent/accept", + "consent_challenge", + challenge, + ); + self.http_client + .put(url) + .json(body) + .send() + .await? + .error_for_status()? + .json::() + .await + } + + pub async fn reject_consent( + &self, + challenge: &str, + body: &RejectRequest, + ) -> Result { + let url = self.build_url( + "/admin/oauth2/auth/requests/consent/reject", + "consent_challenge", + challenge, + ); + self.http_client + .put(url) + .json(body) + .send() + .await? + .error_for_status()? + .json::() + .await + } +} + +#[derive(Debug, Deserialize)] +pub struct LoginRequest { + pub challenge: String, + pub skip: bool, + pub subject: String, + pub client: Option, + pub requested_scope: Vec, + pub requested_access_token_audience: Vec, + pub request_url: String, +} + +#[derive(Debug, Deserialize)] +pub struct OAuth2Client { + pub client_id: Option, + pub client_name: Option, + pub skip_consent: Option, +} + +#[derive(Debug, Serialize)] +pub struct AcceptLoginRequest { + pub subject: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub remember: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub remember_for: Option, +} + +#[derive(Debug, Deserialize)] +pub struct ConsentRequest { + pub challenge: String, + pub skip: bool, + pub subject: String, + pub client: Option, + pub requested_scope: Vec, + pub requested_access_token_audience: Vec, +} + +#[derive(Debug, Serialize)] +pub struct AcceptConsentRequest { + pub grant_scope: Vec, + pub grant_access_token_audience: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub remember: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub remember_for: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub session: Option, +} + +#[derive(Debug, Serialize)] +pub struct ConsentSession { + #[serde(skip_serializing_if = "Option::is_none")] + pub access_token: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub id_token: Option, +} + +#[derive(Debug, Serialize)] +pub struct RejectRequest { + pub error: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub error_description: Option, +} + +#[derive(Debug, Deserialize)] +pub struct RedirectResponse { + pub redirect_to: String, +} diff --git a/server/src/kratos.rs b/server/src/kratos.rs new file mode 100644 index 0000000..ed883a5 --- /dev/null +++ b/server/src/kratos.rs @@ -0,0 +1,70 @@ +use reqwest::Client; +use serde::Deserialize; +use url::Url; + +pub struct KratosClient { + public_url: String, + http_client: Client, +} + +impl KratosClient { + /// Create a new KratosClient. Panics if `public_url` is not a valid URL. + pub fn new(public_url: String) -> Self { + let public_url = public_url.trim_end_matches('/').to_string(); + Url::parse(&public_url) + .unwrap_or_else(|e| panic!("KRATOS_PUBLIC_URL is not a valid URL ({public_url}): {e}")); + Self { + public_url, + http_client: Client::new(), + } + } + + /// Kratos の /sessions/whoami エンドポイントを呼び出し、 + /// 有効なセッションがあれば KratosSession を返す。 + /// セッションがない場合(401)は None を返す。 + pub async fn whoami(&self, cookie: &str) -> Result, reqwest::Error> { + let url = format!("{}/sessions/whoami", self.public_url); + tracing::debug!("Calling Kratos whoami: url={url}"); + + let response = self + .http_client + .get(&url) + .header("cookie", cookie) + .send() + .await?; + + let status = response.status(); + tracing::debug!("Kratos whoami response: status={status}"); + + if status == reqwest::StatusCode::UNAUTHORIZED { + return Ok(None); + } + + let session = response.error_for_status()?.json::().await?; + tracing::debug!( + "Kratos whoami session: id={}, active={}", + session.id, + session.active + ); + if !session.active { + tracing::debug!("Kratos whoami: session is not active, treating as unauthenticated"); + return Ok(None); + } + Ok(Some(session)) + } +} + +#[derive(Debug, Clone, Deserialize)] +pub struct KratosSession { + pub id: String, + #[serde(default)] + pub active: bool, + pub identity: KratosIdentity, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct KratosIdentity { + pub id: String, + #[serde(default)] + pub traits: serde_json::Value, +} diff --git a/server/src/main.rs b/server/src/main.rs index e7a11a9..64ebd5a 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -1,3 +1,90 @@ -fn main() { - println!("Hello, world!"); +mod applier; +mod auth; +mod error; +mod handler; +mod hydra; +mod kratos; +mod route; + +use crate::auth::{JwksCache, OidcConfig}; +use crate::error::StackTrace; +use crate::handler::AppModule; +use crate::route::account::AccountRouter; +use crate::route::metadata::MetadataRouter; +use crate::route::oauth2::OAuth2Router; +use crate::route::profile::ProfileRouter; +use error_stack::ResultExt; +use kernel::KernelError; +use std::net::SocketAddr; +use std::sync::Arc; +use std::time::Duration; +use tokio::net::TcpListener; +use tower_http::cors::CorsLayer; +use tracing_subscriber::layer::SubscriberExt; +use tracing_subscriber::util::SubscriberInitExt; +use tracing_subscriber::Layer; + +#[tokio::main] +async fn main() -> Result<(), StackTrace> { + let appender = tracing_appender::rolling::daily(std::path::Path::new("./logs/"), "debug.log"); + let (non_blocking_appender, _guard) = tracing_appender::non_blocking(appender); + tracing_subscriber::registry() + .with( + tracing_subscriber::fmt::layer() + .with_filter(tracing_subscriber::EnvFilter::new( + std::env::var("RUST_LOG").unwrap_or_else(|_| { + "driver=debug,server=debug,tower_http=debug,hyper=debug,sqlx=debug".into() + }), + )) + .with_filter(tracing_subscriber::filter::LevelFilter::DEBUG), + ) + .with( + tracing_subscriber::fmt::Layer::default() + .with_writer(non_blocking_appender) + .with_ansi(false) + .with_filter(tracing_subscriber::filter::LevelFilter::DEBUG), + ) + .init(); + + // OIDC / JWT auth setup + let oidc_config = OidcConfig::from_env(); + let jwks_cache = Arc::new(JwksCache::new( + oidc_config.issuer_url.clone(), + Duration::from_secs(oidc_config.jwks_refetch_interval_secs), + )); + // Attempt eager JWKS init (non-fatal if Hydra is not yet available). + jwks_cache.try_init().await; + let oidc_config = Arc::new(oidc_config); + + let app = AppModule::new().await?; + + // Routes that require JWT auth + let authed_routes = axum::Router::new() + .route_account() + .route_profile() + .route_metadata() + .layer(axum::middleware::from_fn_with_state( + (oidc_config, jwks_cache), + auth::auth_middleware, + )); + + // Routes that do NOT require JWT auth (OAuth2 Login/Consent Provider) + let public_routes = axum::Router::new().route_oauth2(); + + let router = authed_routes + .merge(public_routes) + .layer(CorsLayer::new()) + .with_state(app); + + let bind = SocketAddr::from(([0, 0, 0, 0], 8080)); + let tcp = TcpListener::bind(bind) + .await + .change_context_lazy(|| KernelError::Internal) + .attach_printable_lazy(|| "Failed to bind to port 8080")?; + + axum::serve(tcp, router.into_make_service()) + .await + .change_context_lazy(|| KernelError::Internal)?; + + Ok(()) } diff --git a/server/src/route.rs b/server/src/route.rs new file mode 100644 index 0000000..50a788a --- /dev/null +++ b/server/src/route.rs @@ -0,0 +1,47 @@ +use crate::error::ErrorStatus; +use application::transfer::pagination::Direction; +use axum::http::StatusCode; + +pub mod account; +pub mod metadata; +pub mod oauth2; +pub mod profile; + +const MAX_BATCH_SIZE: usize = 100; + +fn parse_comma_ids(raw: &str) -> Result, ErrorStatus> { + let ids: Vec = raw + .split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect(); + if ids.is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "ID list cannot be empty".to_string(), + ))); + } + if ids.len() > MAX_BATCH_SIZE { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + format!("Too many IDs: maximum is {MAX_BATCH_SIZE}"), + ))); + } + Ok(ids) +} + +trait DirectionConverter { + fn convert_to_direction(self) -> Result; +} + +impl DirectionConverter for Option { + fn convert_to_direction(self) -> Result { + match self { + Some(d) => match Direction::try_from(d) { + Ok(d) => Ok(d), + Err(message) => Err((StatusCode::BAD_REQUEST, message).into()), + }, + None => Ok(Direction::default()), + } + } +} diff --git a/server/src/route/account.rs b/server/src/route/account.rs new file mode 100644 index 0000000..21489d7 --- /dev/null +++ b/server/src/route/account.rs @@ -0,0 +1,206 @@ +use crate::auth::{resolve_auth_account_id, AuthClaims, OidcAuthInfo}; +use crate::error::ErrorStatus; +use crate::handler::AppModule; +use crate::route::{parse_comma_ids, DirectionConverter}; +use application::service::account::{ + CreateAccountUseCase, DeactivateAccountUseCase, EditAccountUseCase, GetAccountUseCase, +}; +use application::transfer::pagination::Pagination; +use axum::extract::{Path, Query, State}; +use axum::http::StatusCode; +use axum::routing::{delete, get, post, put}; +use axum::{Extension, Json, Router}; +use serde::{Deserialize, Serialize}; +use time::OffsetDateTime; + +#[derive(Debug, Deserialize)] +struct GetAllAccountQuery { + ids: Option, + limit: Option, + cursor: Option, + direction: Option, +} + +#[derive(Debug, Deserialize)] +struct CreateAccountRequest { + name: String, + is_bot: bool, +} + +#[derive(Debug, Deserialize)] +struct UpdateAccountRequest { + is_bot: bool, +} + +#[derive(Debug, Serialize)] +struct AccountResponse { + id: String, + name: String, + public_key: String, + is_bot: bool, + created_at: OffsetDateTime, +} + +#[derive(Debug, Serialize)] +struct AccountsResponse { + first: Option, + last: Option, + items: Vec, +} + +pub trait AccountRouter { + fn route_account(self) -> Self; +} + +async fn get_accounts( + Extension(claims): Extension, + State(module): State, + Query(GetAllAccountQuery { + ids, + direction, + limit, + cursor, + }): Query, +) -> Result, ErrorStatus> { + let auth_info = OidcAuthInfo::from(claims); + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + let result = if let Some(ids) = ids { + if limit.is_some() || cursor.is_some() || direction.is_some() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Cannot use ids with pagination parameters".to_string(), + ))); + } + let id_list = parse_comma_ids(&ids)?; + module + .get_accounts_by_ids(&auth_account_id, id_list) + .await + .map_err(ErrorStatus::from)? + } else { + let direction = direction.convert_to_direction()?; + let pagination = Pagination::new(limit, cursor, direction); + module + .get_all_accounts(&auth_account_id, pagination) + .await + .map_err(ErrorStatus::from)? + .ok_or(ErrorStatus::from(StatusCode::NOT_FOUND))? + }; + + if result.is_empty() { + return Err(ErrorStatus::from(StatusCode::NOT_FOUND)); + } + let response = AccountsResponse { + first: result.first().map(|account| account.nanoid.clone()), + last: result.last().map(|account| account.nanoid.clone()), + items: result + .into_iter() + .map(|account| AccountResponse { + id: account.nanoid, + name: account.name, + public_key: account.public_key, + is_bot: account.is_bot, + created_at: account.created_at, + }) + .collect(), + }; + Ok(Json(response)) +} + +async fn create_account( + Extension(claims): Extension, + State(module): State, + Json(request): Json, +) -> Result, ErrorStatus> { + let auth_info = OidcAuthInfo::from(claims); + + if request.name.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Account name cannot be empty".to_string(), + ))); + } + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + let account = module + .create_account(auth_account_id, request.name, request.is_bot) + .await + .map_err(ErrorStatus::from)?; + + let response = AccountResponse { + id: account.nanoid, + name: account.name, + public_key: account.public_key, + is_bot: account.is_bot, + created_at: account.created_at, + }; + + Ok(Json(response)) +} + +async fn update_account_by_id( + Extension(claims): Extension, + State(module): State, + Path(id): Path, + Json(request): Json, +) -> Result { + let auth_info = OidcAuthInfo::from(claims); + + if id.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Account ID cannot be empty".to_string(), + ))); + } + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + module + .edit_account(&auth_account_id, id, request.is_bot) + .await + .map_err(ErrorStatus::from)?; + + Ok(StatusCode::NO_CONTENT) +} + +async fn deactivate_account_by_id( + Extension(claims): Extension, + State(module): State, + Path(id): Path, +) -> Result { + let auth_info = OidcAuthInfo::from(claims); + + if id.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Account ID cannot be empty".to_string(), + ))); + } + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + module + .deactivate_account(&auth_account_id, id) + .await + .map_err(ErrorStatus::from)?; + + Ok(StatusCode::NO_CONTENT) +} + +impl AccountRouter for Router { + fn route_account(self) -> Self { + self.route("/accounts", get(get_accounts)) + .route("/accounts", post(create_account)) + .route("/accounts/:id", put(update_account_by_id)) + .route("/accounts/:id", delete(deactivate_account_by_id)) + } +} diff --git a/server/src/route/metadata.rs b/server/src/route/metadata.rs new file mode 100644 index 0000000..6e56321 --- /dev/null +++ b/server/src/route/metadata.rs @@ -0,0 +1,212 @@ +use crate::auth::{resolve_auth_account_id, AuthClaims, OidcAuthInfo}; +use crate::error::ErrorStatus; +use crate::handler::AppModule; +use crate::route::parse_comma_ids; +use application::service::metadata::{ + CreateMetadataUseCase, DeleteMetadataUseCase, EditMetadataUseCase, GetMetadataUseCase, +}; +use axum::extract::{Path, Query, State}; +use axum::http::StatusCode; +use axum::routing::{get, post, put}; +use axum::{Extension, Json, Router}; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Deserialize)] +struct CreateMetadataRequest { + label: String, + content: String, +} + +#[derive(Debug, Deserialize)] +struct UpdateMetadataRequest { + label: String, + content: String, +} + +#[derive(Debug, Serialize)] +struct MetadataResponse { + account_id: String, + nanoid: String, + label: String, + content: String, +} + +impl From for MetadataResponse { + fn from(dto: application::transfer::metadata::MetadataDto) -> Self { + Self { + account_id: dto.account_nanoid, + nanoid: dto.nanoid, + label: dto.label, + content: dto.content, + } + } +} + +#[derive(Debug, Deserialize)] +struct GetMetadataQuery { + account_ids: String, +} + +pub trait MetadataRouter { + fn route_metadata(self) -> Self; +} + +async fn get_metadata_batch( + Extension(claims): Extension, + State(module): State, + Query(query): Query, +) -> Result>, ErrorStatus> { + let auth_info = OidcAuthInfo::from(claims); + + let account_ids = parse_comma_ids(&query.account_ids)?; + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + let metadata_list = module + .get_metadata_batch(&auth_account_id, account_ids) + .await + .map_err(ErrorStatus::from)?; + + Ok(Json( + metadata_list + .into_iter() + .map(MetadataResponse::from) + .collect(), + )) +} + +async fn create_metadata( + Extension(claims): Extension, + State(module): State, + Path(account_id): Path, + Json(body): Json, +) -> Result<(StatusCode, Json), ErrorStatus> { + let auth_info = OidcAuthInfo::from(claims); + + if account_id.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Account ID cannot be empty".to_string(), + ))); + } + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + let metadata = module + .create_metadata(&auth_account_id, account_id, body.label, body.content) + .await + .map_err(ErrorStatus::from)?; + + Ok((StatusCode::CREATED, Json(MetadataResponse::from(metadata)))) +} + +async fn update_metadata( + Extension(claims): Extension, + State(module): State, + Path((account_id, metadata_id)): Path<(String, String)>, + Json(body): Json, +) -> Result { + let auth_info = OidcAuthInfo::from(claims); + + if account_id.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Account ID cannot be empty".to_string(), + ))); + } + + if metadata_id.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Metadata ID cannot be empty".to_string(), + ))); + } + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + module + .edit_metadata( + &auth_account_id, + account_id, + metadata_id, + body.label, + body.content, + ) + .await + .map_err(ErrorStatus::from)?; + + Ok(StatusCode::NO_CONTENT) +} + +async fn delete_metadata( + Extension(claims): Extension, + State(module): State, + Path((account_id, metadata_id)): Path<(String, String)>, +) -> Result { + let auth_info = OidcAuthInfo::from(claims); + + if account_id.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Account ID cannot be empty".to_string(), + ))); + } + + if metadata_id.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Metadata ID cannot be empty".to_string(), + ))); + } + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + module + .delete_metadata(&auth_account_id, account_id, metadata_id) + .await + .map_err(ErrorStatus::from)?; + + Ok(StatusCode::NO_CONTENT) +} + +impl MetadataRouter for Router { + fn route_metadata(self) -> Self { + self.route("/metadata", get(get_metadata_batch)) + .route("/accounts/:account_id/metadata", post(create_metadata)) + .route( + "/accounts/:account_id/metadata/:id", + put(update_metadata).delete(delete_metadata), + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use application::transfer::metadata::MetadataDto; + + #[test] + fn test_metadata_response_from_dto() { + let dto = MetadataDto { + account_nanoid: "acc-123".to_string(), + nanoid: "test-nanoid".to_string(), + label: "test-label".to_string(), + content: "test-content".to_string(), + }; + + let response = MetadataResponse::from(dto); + + assert_eq!(response.account_id, "acc-123"); + assert_eq!(response.nanoid, "test-nanoid"); + assert_eq!(response.label, "test-label"); + assert_eq!(response.content, "test-content"); + } +} diff --git a/server/src/route/oauth2.rs b/server/src/route/oauth2.rs new file mode 100644 index 0000000..a558174 --- /dev/null +++ b/server/src/route/oauth2.rs @@ -0,0 +1,823 @@ +use crate::handler::AppModule; +use crate::hydra::{AcceptConsentRequest, AcceptLoginRequest, RejectRequest}; +use crate::kratos::KratosClient; +use axum::extract::{Query, State}; +use axum::http::StatusCode; +use axum::routing::{get, post}; +use axum::{Json, Router}; +use serde::{Deserialize, Serialize}; +use std::collections::HashSet; + +const REMEMBER_FOR_SECS: i64 = 3600; + +// --------------------------------------------------------------------------- +// Query parameters +// --------------------------------------------------------------------------- + +#[derive(Debug, Deserialize)] +struct LoginQuery { + login_challenge: String, +} + +#[derive(Debug, Deserialize)] +struct ConsentQuery { + consent_challenge: String, +} + +// --------------------------------------------------------------------------- +// Response types +// --------------------------------------------------------------------------- + +#[derive(Debug, Serialize)] +#[serde(tag = "action")] +enum OAuth2Response { + #[serde(rename = "redirect")] + Redirect { redirect_to: String }, + #[serde(rename = "show_consent")] + ShowConsent { + consent_challenge: String, + client_name: Option, + requested_scope: Vec, + }, +} + +#[derive(Debug, Deserialize)] +struct ConsentDecision { + consent_challenge: String, + accept: bool, + grant_scope: Option>, +} + +// --------------------------------------------------------------------------- +// GET /oauth2/login +// --------------------------------------------------------------------------- + +async fn login( + State(module): State, + Query(LoginQuery { login_challenge }): Query, + headers: axum::http::HeaderMap, +) -> Result, StatusCode> { + let hydra = module.hydra_admin_client(); + let kratos = module.kratos_client(); + + // 1. Fetch login request from Hydra. + let login_request = hydra + .get_login_request(&login_challenge) + .await + .map_err(|e| { + tracing::error!("Failed to get login request from Hydra: {e}"); + StatusCode::BAD_GATEWAY + })?; + + // 2. If Hydra says skip (already authenticated), accept immediately. + if login_request.skip { + let redirect = hydra + .accept_login( + &login_challenge, + &AcceptLoginRequest { + subject: login_request.subject.clone(), + remember: Some(true), + remember_for: Some(REMEMBER_FOR_SECS), + }, + ) + .await + .map_err(|e| { + tracing::error!("Failed to accept login at Hydra: {e}"); + StatusCode::BAD_GATEWAY + })?; + + return Ok(Json(OAuth2Response::Redirect { + redirect_to: redirect.redirect_to, + })); + } + + // 3. Verify user has a valid Kratos session via cookie. + let cookie = headers + .get(axum::http::header::COOKIE) + .and_then(|v| v.to_str().ok()) + .unwrap_or(""); + + let kratos_session = verify_kratos_session(kratos, cookie).await?; + + // 4. Accept login with Kratos identity UUID as subject. + let redirect = hydra + .accept_login( + &login_challenge, + &AcceptLoginRequest { + subject: kratos_session.identity_id, + remember: Some(true), + remember_for: Some(REMEMBER_FOR_SECS), + }, + ) + .await + .map_err(|e| { + tracing::error!("Failed to accept login at Hydra: {e}"); + StatusCode::BAD_GATEWAY + })?; + + Ok(Json(OAuth2Response::Redirect { + redirect_to: redirect.redirect_to, + })) +} + +struct VerifiedSession { + identity_id: String, +} + +/// Verify Kratos session via cookie, returning the identity ID on success. +async fn verify_kratos_session( + kratos: &KratosClient, + cookie: &str, +) -> Result { + if cookie.is_empty() { + tracing::warn!("oauth2/login: no cookie header, cannot verify Kratos session"); + return Err(StatusCode::UNAUTHORIZED); + } + + // Extract only the Kratos session cookie to avoid leaking other cookies. + let kratos_cookie = cookie + .split(';') + .map(|c| c.trim()) + .find(|c| c.starts_with("ory_kratos_session=")) + .unwrap_or(""); + + if kratos_cookie.is_empty() { + tracing::warn!("oauth2/login: no ory_kratos_session cookie found"); + return Err(StatusCode::UNAUTHORIZED); + } + + let session = kratos.whoami(kratos_cookie).await.map_err(|e| { + tracing::error!("Kratos whoami request failed: {e}"); + StatusCode::BAD_GATEWAY + })?; + + match session { + Some(s) => Ok(VerifiedSession { + identity_id: s.identity.id, + }), + None => { + tracing::warn!("oauth2/login: Kratos session invalid or expired"); + Err(StatusCode::UNAUTHORIZED) + } + } +} + +// --------------------------------------------------------------------------- +// GET /oauth2/consent +// --------------------------------------------------------------------------- + +async fn get_consent( + State(module): State, + Query(ConsentQuery { consent_challenge }): Query, +) -> Result, StatusCode> { + let hydra = module.hydra_admin_client(); + + let consent_request = hydra + .get_consent_request(&consent_challenge) + .await + .map_err(|e| { + tracing::error!("Failed to get consent request from Hydra: {e}"); + StatusCode::BAD_GATEWAY + })?; + + // If the client is configured to skip consent, accept automatically. + let skip_consent = consent_request + .client + .as_ref() + .and_then(|c| c.skip_consent) + .unwrap_or(false); + + if consent_request.skip || skip_consent { + let redirect = hydra + .accept_consent( + &consent_challenge, + &AcceptConsentRequest { + grant_scope: consent_request.requested_scope.clone(), + grant_access_token_audience: consent_request + .requested_access_token_audience + .clone(), + remember: Some(true), + remember_for: Some(REMEMBER_FOR_SECS), + session: None, + }, + ) + .await + .map_err(|e| { + tracing::error!("Failed to accept consent at Hydra: {e}"); + StatusCode::BAD_GATEWAY + })?; + + return Ok(Json(OAuth2Response::Redirect { + redirect_to: redirect.redirect_to, + })); + } + + // Non-skip: return consent details for frontend to display. + let client_name = consent_request + .client + .as_ref() + .and_then(|c| c.client_name.clone()); + + Ok(Json(OAuth2Response::ShowConsent { + consent_challenge, + client_name, + requested_scope: consent_request.requested_scope, + })) +} + +// --------------------------------------------------------------------------- +// POST /oauth2/consent +// --------------------------------------------------------------------------- + +async fn post_consent( + State(module): State, + Json(decision): Json, +) -> Result, StatusCode> { + let hydra = module.hydra_admin_client(); + + if decision.accept { + let grant_scope = decision.grant_scope.unwrap_or_default(); + + // Re-fetch consent request to get requested_access_token_audience + // and to validate that granted scopes are a subset of requested scopes. + let consent_request = hydra + .get_consent_request(&decision.consent_challenge) + .await + .map_err(|e| { + tracing::error!("Failed to get consent request from Hydra: {e}"); + StatusCode::BAD_GATEWAY + })?; + + // Validate: grant_scope must be a subset of requested_scope. + let requested: HashSet<&str> = consent_request + .requested_scope + .iter() + .map(|s| s.as_str()) + .collect(); + for scope in &grant_scope { + if !requested.contains(scope.as_str()) { + tracing::warn!("Client attempted to grant unrequested scope: {scope}"); + return Err(StatusCode::BAD_REQUEST); + } + } + + let redirect = hydra + .accept_consent( + &decision.consent_challenge, + &AcceptConsentRequest { + grant_scope, + grant_access_token_audience: consent_request.requested_access_token_audience, + remember: Some(true), + remember_for: Some(REMEMBER_FOR_SECS), + session: None, + }, + ) + .await + .map_err(|e| { + tracing::error!("Failed to accept consent at Hydra: {e}"); + StatusCode::BAD_GATEWAY + })?; + + Ok(Json(OAuth2Response::Redirect { + redirect_to: redirect.redirect_to, + })) + } else { + let redirect = hydra + .reject_consent( + &decision.consent_challenge, + &RejectRequest { + error: "consent_denied".to_string(), + error_description: Some("The user denied the consent request.".to_string()), + }, + ) + .await + .map_err(|e| { + tracing::error!("Failed to reject consent at Hydra: {e}"); + StatusCode::BAD_GATEWAY + })?; + + Ok(Json(OAuth2Response::Redirect { + redirect_to: redirect.redirect_to, + })) + } +} + +// --------------------------------------------------------------------------- +// Router +// --------------------------------------------------------------------------- + +pub trait OAuth2Router { + fn route_oauth2(self) -> Self; +} + +impl OAuth2Router for Router { + fn route_oauth2(self) -> Self { + self.route("/oauth2/login", get(login)) + .route("/oauth2/consent", get(get_consent)) + .route("/oauth2/consent", post(post_consent)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use axum::body::Body; + use axum::http::Request; + use http_body_util::BodyExt; + use tower::ServiceExt; + use wiremock::matchers::{method, path, query_param}; + use wiremock::{Mock, MockServer, ResponseTemplate}; + + async fn build_app(hydra_url: &str, kratos_url: &str) -> Router { + let app = AppModule::new_for_oauth2_test(hydra_url.into(), kratos_url.into()) + .await + .unwrap(); + Router::new().route_oauth2().with_state(app) + } + + async fn response_json(resp: axum::http::Response) -> serde_json::Value { + let body = resp.into_body().collect().await.unwrap().to_bytes(); + serde_json::from_slice(&body).unwrap_or_else(|e| { + panic!( + "Failed to parse response as JSON: {e}\nBody: {}", + String::from_utf8_lossy(&body) + ) + }) + } + + // ----------------------------------------------------------------------- + // GET /oauth2/login + // ----------------------------------------------------------------------- + + #[test_with::env(DATABASE_URL)] + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn login_skip_returns_redirect() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/admin/oauth2/auth/requests/login")) + .and(query_param("login_challenge", "test-challenge")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "challenge": "test-challenge", + "skip": true, + "subject": "user-uuid", + "client": null, + "requested_scope": ["openid"], + "requested_access_token_audience": ["account"], + "request_url": "http://example.com" + }))) + .mount(&hydra_mock) + .await; + + Mock::given(method("PUT")) + .and(path("/admin/oauth2/auth/requests/login/accept")) + .and(query_param("login_challenge", "test-challenge")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "redirect_to": "http://example.com/callback" + }))) + .mount(&hydra_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::get("/oauth2/login?login_challenge=test-challenge") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::OK); + let json = response_json(resp).await; + assert_eq!(json["action"], "redirect"); + assert_eq!(json["redirect_to"], "http://example.com/callback"); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn login_valid_kratos_session_returns_redirect() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/admin/oauth2/auth/requests/login")) + .and(query_param("login_challenge", "challenge-2")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "challenge": "challenge-2", + "skip": false, + "subject": "", + "client": null, + "requested_scope": ["openid"], + "requested_access_token_audience": ["account"], + "request_url": "http://example.com" + }))) + .mount(&hydra_mock) + .await; + + Mock::given(method("GET")) + .and(path("/sessions/whoami")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "id": "session-id", + "active": true, + "identity": { + "id": "identity-uuid", + "traits": {} + } + }))) + .mount(&kratos_mock) + .await; + + Mock::given(method("PUT")) + .and(path("/admin/oauth2/auth/requests/login/accept")) + .and(query_param("login_challenge", "challenge-2")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "redirect_to": "http://example.com/consent" + }))) + .mount(&hydra_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::get("/oauth2/login?login_challenge=challenge-2") + .header("cookie", "ory_kratos_session=test-session-token") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::OK); + let json = response_json(resp).await; + assert_eq!(json["action"], "redirect"); + assert_eq!(json["redirect_to"], "http://example.com/consent"); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn login_no_cookie_returns_401() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/admin/oauth2/auth/requests/login")) + .and(query_param("login_challenge", "challenge-3")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "challenge": "challenge-3", + "skip": false, + "subject": "", + "client": null, + "requested_scope": ["openid"], + "requested_access_token_audience": ["account"], + "request_url": "http://example.com" + }))) + .mount(&hydra_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::get("/oauth2/login?login_challenge=challenge-3") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::UNAUTHORIZED); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn login_invalid_kratos_session_returns_401() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/admin/oauth2/auth/requests/login")) + .and(query_param("login_challenge", "challenge-4")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "challenge": "challenge-4", + "skip": false, + "subject": "", + "client": null, + "requested_scope": ["openid"], + "requested_access_token_audience": ["account"], + "request_url": "http://example.com" + }))) + .mount(&hydra_mock) + .await; + + Mock::given(method("GET")) + .and(path("/sessions/whoami")) + .respond_with(ResponseTemplate::new(401)) + .mount(&kratos_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::get("/oauth2/login?login_challenge=challenge-4") + .header("cookie", "ory_kratos_session=expired-token") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::UNAUTHORIZED); + } + + // ----------------------------------------------------------------------- + // GET /oauth2/consent + // ----------------------------------------------------------------------- + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn consent_skip_returns_redirect() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/admin/oauth2/auth/requests/consent")) + .and(query_param("consent_challenge", "consent-1")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "challenge": "consent-1", + "skip": true, + "subject": "user-uuid", + "client": null, + "requested_scope": ["openid"], + "requested_access_token_audience": ["account"] + }))) + .mount(&hydra_mock) + .await; + + Mock::given(method("PUT")) + .and(path("/admin/oauth2/auth/requests/consent/accept")) + .and(query_param("consent_challenge", "consent-1")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "redirect_to": "http://example.com/token" + }))) + .mount(&hydra_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::get("/oauth2/consent?consent_challenge=consent-1") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::OK); + let json = response_json(resp).await; + assert_eq!(json["action"], "redirect"); + assert_eq!(json["redirect_to"], "http://example.com/token"); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn consent_client_skip_consent_returns_redirect() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/admin/oauth2/auth/requests/consent")) + .and(query_param("consent_challenge", "consent-2")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "challenge": "consent-2", + "skip": false, + "subject": "user-uuid", + "client": { + "client_id": "my-app", + "client_name": "My App", + "skip_consent": true + }, + "requested_scope": ["openid", "offline"], + "requested_access_token_audience": ["account"] + }))) + .mount(&hydra_mock) + .await; + + Mock::given(method("PUT")) + .and(path("/admin/oauth2/auth/requests/consent/accept")) + .and(query_param("consent_challenge", "consent-2")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "redirect_to": "http://example.com/token2" + }))) + .mount(&hydra_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::get("/oauth2/consent?consent_challenge=consent-2") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::OK); + let json = response_json(resp).await; + assert_eq!(json["action"], "redirect"); + assert_eq!(json["redirect_to"], "http://example.com/token2"); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn consent_no_skip_returns_show_consent() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/admin/oauth2/auth/requests/consent")) + .and(query_param("consent_challenge", "consent-3")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "challenge": "consent-3", + "skip": false, + "subject": "user-uuid", + "client": { + "client_id": "my-app", + "client_name": "My App", + "skip_consent": false + }, + "requested_scope": ["openid", "profile"], + "requested_access_token_audience": ["account"] + }))) + .mount(&hydra_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::get("/oauth2/consent?consent_challenge=consent-3") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::OK); + let json = response_json(resp).await; + assert_eq!(json["action"], "show_consent"); + assert_eq!(json["consent_challenge"], "consent-3"); + assert_eq!(json["client_name"], "My App"); + assert_eq!( + json["requested_scope"], + serde_json::json!(["openid", "profile"]) + ); + } + + // ----------------------------------------------------------------------- + // POST /oauth2/consent + // ----------------------------------------------------------------------- + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn consent_accept_valid_scopes_returns_redirect() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/admin/oauth2/auth/requests/consent")) + .and(query_param("consent_challenge", "consent-4")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "challenge": "consent-4", + "skip": false, + "subject": "user-uuid", + "client": null, + "requested_scope": ["openid", "profile"], + "requested_access_token_audience": ["account"] + }))) + .mount(&hydra_mock) + .await; + + Mock::given(method("PUT")) + .and(path("/admin/oauth2/auth/requests/consent/accept")) + .and(query_param("consent_challenge", "consent-4")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "redirect_to": "http://example.com/done" + }))) + .mount(&hydra_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::post("/oauth2/consent") + .header("content-type", "application/json") + .body(Body::from( + serde_json::to_string(&serde_json::json!({ + "consent_challenge": "consent-4", + "accept": true, + "grant_scope": ["openid"] + })) + .unwrap(), + )) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::OK); + let json = response_json(resp).await; + assert_eq!(json["action"], "redirect"); + assert_eq!(json["redirect_to"], "http://example.com/done"); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn consent_accept_invalid_scope_returns_400() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("GET")) + .and(path("/admin/oauth2/auth/requests/consent")) + .and(query_param("consent_challenge", "consent-5")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "challenge": "consent-5", + "skip": false, + "subject": "user-uuid", + "client": null, + "requested_scope": ["openid"], + "requested_access_token_audience": ["account"] + }))) + .mount(&hydra_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::post("/oauth2/consent") + .header("content-type", "application/json") + .body(Body::from( + serde_json::to_string(&serde_json::json!({ + "consent_challenge": "consent-5", + "accept": true, + "grant_scope": ["openid", "admin"] + })) + .unwrap(), + )) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::BAD_REQUEST); + } + + #[test_with::env(DATABASE_URL)] + #[tokio::test] + async fn consent_reject_returns_redirect() { + let hydra_mock = MockServer::start().await; + let kratos_mock = MockServer::start().await; + + Mock::given(method("PUT")) + .and(path("/admin/oauth2/auth/requests/consent/reject")) + .and(query_param("consent_challenge", "consent-6")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "redirect_to": "http://example.com/denied" + }))) + .mount(&hydra_mock) + .await; + + let app = build_app(&hydra_mock.uri(), &kratos_mock.uri()).await; + + let resp = app + .oneshot( + Request::post("/oauth2/consent") + .header("content-type", "application/json") + .body(Body::from( + serde_json::to_string(&serde_json::json!({ + "consent_challenge": "consent-6", + "accept": false, + "grant_scope": null + })) + .unwrap(), + )) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(resp.status(), StatusCode::OK); + let json = response_json(resp).await; + assert_eq!(json["action"], "redirect"); + assert_eq!(json["redirect_to"], "http://example.com/denied"); + } +} diff --git a/server/src/route/profile.rs b/server/src/route/profile.rs new file mode 100644 index 0000000..36c2c8e --- /dev/null +++ b/server/src/route/profile.rs @@ -0,0 +1,214 @@ +use crate::auth::{resolve_auth_account_id, AuthClaims, OidcAuthInfo}; +use crate::error::ErrorStatus; +use crate::handler::AppModule; +use crate::route::parse_comma_ids; +use application::service::profile::{CreateProfileUseCase, EditProfileUseCase, GetProfileUseCase}; +use axum::extract::{Path, Query, State}; +use axum::http::StatusCode; +use axum::routing::{get, post}; +use axum::{Extension, Json, Router}; +use kernel::prelude::entity::ImageId; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Deserialize)] +struct CreateProfileRequest { + display_name: Option, + summary: Option, + icon: Option, + banner: Option, +} + +#[derive(Debug, Deserialize)] +struct UpdateProfileRequest { + display_name: Option, + summary: Option, + icon: Option, + banner: Option, +} + +#[derive(Debug, Serialize)] +struct ProfileResponse { + account_id: String, + nanoid: String, + display_name: Option, + summary: Option, + icon_id: Option, + banner_id: Option, +} + +impl From for ProfileResponse { + fn from(dto: application::transfer::profile::ProfileDto) -> Self { + Self { + account_id: dto.account_nanoid, + nanoid: dto.nanoid, + display_name: dto.display_name, + summary: dto.summary, + icon_id: dto.icon_id, + banner_id: dto.banner_id, + } + } +} + +#[derive(Debug, Deserialize)] +struct GetProfilesQuery { + account_ids: String, +} + +pub trait ProfileRouter { + fn route_profile(self) -> Self; +} + +async fn get_profiles_batch( + Extension(claims): Extension, + State(module): State, + Query(query): Query, +) -> Result>, ErrorStatus> { + let auth_info = OidcAuthInfo::from(claims); + + let account_ids = parse_comma_ids(&query.account_ids)?; + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + let profiles = module + .get_profiles_batch(&auth_account_id, account_ids) + .await + .map_err(ErrorStatus::from)?; + + Ok(Json( + profiles.into_iter().map(ProfileResponse::from).collect(), + )) +} + +async fn create_profile( + Extension(claims): Extension, + State(module): State, + Path(account_id): Path, + Json(body): Json, +) -> Result<(StatusCode, Json), ErrorStatus> { + let auth_info = OidcAuthInfo::from(claims); + + if account_id.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Account ID cannot be empty".to_string(), + ))); + } + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + let icon = body.icon.map(ImageId::new); + let banner = body.banner.map(ImageId::new); + + let profile = module + .create_profile( + &auth_account_id, + account_id, + body.display_name, + body.summary, + icon, + banner, + ) + .await + .map_err(ErrorStatus::from)?; + + Ok((StatusCode::CREATED, Json(ProfileResponse::from(profile)))) +} + +async fn update_profile( + Extension(claims): Extension, + State(module): State, + Path(account_id): Path, + Json(body): Json, +) -> Result { + let auth_info = OidcAuthInfo::from(claims); + + if account_id.trim().is_empty() { + return Err(ErrorStatus::from(( + StatusCode::BAD_REQUEST, + "Account ID cannot be empty".to_string(), + ))); + } + + let auth_account_id = resolve_auth_account_id(&module, auth_info) + .await + .map_err(ErrorStatus::from)?; + + let icon = body.icon.map(ImageId::new); + let banner = body.banner.map(ImageId::new); + + module + .edit_profile( + &auth_account_id, + account_id, + body.display_name, + body.summary, + icon, + banner, + ) + .await + .map_err(ErrorStatus::from)?; + + Ok(StatusCode::NO_CONTENT) +} + +impl ProfileRouter for Router { + fn route_profile(self) -> Self { + self.route("/profiles", get(get_profiles_batch)).route( + "/accounts/:account_id/profile", + post(create_profile).put(update_profile), + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use application::transfer::profile::ProfileDto; + + #[test] + fn test_profile_response_from_dto_with_all_fields() { + let dto = ProfileDto { + account_nanoid: "acc-123".to_string(), + nanoid: "test-nanoid".to_string(), + display_name: Some("Test User".to_string()), + summary: Some("A test summary".to_string()), + icon_id: Some(Uuid::nil()), + banner_id: Some(Uuid::nil()), + }; + + let response = ProfileResponse::from(dto); + + assert_eq!(response.account_id, "acc-123"); + assert_eq!(response.nanoid, "test-nanoid"); + assert_eq!(response.display_name, Some("Test User".to_string())); + assert_eq!(response.summary, Some("A test summary".to_string())); + assert_eq!(response.icon_id, Some(Uuid::nil())); + assert_eq!(response.banner_id, Some(Uuid::nil())); + } + + #[test] + fn test_profile_response_from_dto_with_no_optional_fields() { + let dto = ProfileDto { + account_nanoid: "acc-456".to_string(), + nanoid: "test-nanoid-2".to_string(), + display_name: None, + summary: None, + icon_id: None, + banner_id: None, + }; + + let response = ProfileResponse::from(dto); + + assert_eq!(response.account_id, "acc-456"); + assert_eq!(response.nanoid, "test-nanoid-2"); + assert!(response.display_name.is_none()); + assert!(response.summary.is_none()); + assert!(response.icon_id.is_none()); + assert!(response.banner_id.is_none()); + } +}