diff --git a/.changeset/nasty-bananas-pretend.md b/.changeset/nasty-bananas-pretend.md new file mode 100644 index 000000000..98b6c7d21 --- /dev/null +++ b/.changeset/nasty-bananas-pretend.md @@ -0,0 +1,9 @@ +--- +'@powersync/service-module-postgres-storage': minor +'@powersync/service-module-mongodb-storage': minor +'@powersync/service-core-tests': minor +'@powersync/service-module-mongodb': minor +'@powersync/service-core': minor +--- + +Refactor `BucketStorageFactory` and `PersistedSyncRulesContent` to be abstract classes instead of interfaces. diff --git a/modules/module-mongodb-storage/src/storage/MongoBucketStorage.ts b/modules/module-mongodb-storage/src/storage/MongoBucketStorage.ts index 82a7d024b..0f11064af 100644 --- a/modules/module-mongodb-storage/src/storage/MongoBucketStorage.ts +++ b/modules/module-mongodb-storage/src/storage/MongoBucketStorage.ts @@ -1,8 +1,6 @@ -import { SqlSyncRules } from '@powersync/service-sync-rules'; - import { GetIntanceOptions, storage } from '@powersync/service-core'; -import { BaseObserver, ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework'; +import { ErrorCode, ServiceError } from '@powersync/lib-services-framework'; import { v4 as uuid } from 'uuid'; import * as lib_mongo from '@powersync/lib-service-mongodb'; @@ -11,7 +9,7 @@ import { mongo } from '@powersync/lib-service-mongodb'; import { PowerSyncMongo } from './implementation/db.js'; import { SyncRuleDocument } from './implementation/models.js'; import { MongoPersistedSyncRulesContent } from './implementation/MongoPersistedSyncRulesContent.js'; -import { MongoSyncBucketStorage, MongoSyncBucketStorageOptions } from './implementation/MongoSyncBucketStorage.js'; +import { MongoSyncBucketStorage } from './implementation/MongoSyncBucketStorage.js'; import { generateSlotName } from '../utils/util.js'; import { MongoChecksumOptions } from './implementation/MongoChecksums.js'; @@ -19,10 +17,7 @@ export interface MongoBucketStorageOptions { checksumOptions?: Omit; } -export class MongoBucketStorage - extends BaseObserver - implements storage.BucketStorageFactory -{ +export class MongoBucketStorage extends storage.BucketStorageFactory { private readonly client: mongo.MongoClient; private readonly session: mongo.ClientSession; // TODO: This is still Postgres specific and needs to be reworked @@ -91,33 +86,13 @@ export class MongoBucketStorage }; } - async configureSyncRules(options: storage.UpdateSyncRulesOptions) { - const next = await this.getNextSyncRulesContent(); - const active = await this.getActiveSyncRulesContent(); - - if (next?.sync_rules_content == options.content) { - logger.info('Sync rules from configuration unchanged'); - return { updated: false }; - } else if (next == null && active?.sync_rules_content == options.content) { - logger.info('Sync rules from configuration unchanged'); - return { updated: false }; - } else { - logger.info('Sync rules updated from configuration'); - const persisted_sync_rules = await this.updateSyncRules(options); - return { updated: true, persisted_sync_rules, lock: persisted_sync_rules.current_lock ?? undefined }; - } - } - async restartReplication(sync_rules_group_id: number) { const next = await this.getNextSyncRulesContent(); const active = await this.getActiveSyncRulesContent(); if (next != null && next.id == sync_rules_group_id) { // We need to redo the "next" sync rules - await this.updateSyncRules({ - content: next.sync_rules_content, - validate: false - }); + await this.updateSyncRules(next.asUpdateOptions()); // Pro-actively stop replicating await this.db.sync_rules.updateOne( { @@ -133,10 +108,7 @@ export class MongoBucketStorage await this.db.notifyCheckpoint(); } else if (next == null && active?.id == sync_rules_group_id) { // Slot removed for "active" sync rules, while there is no "next" one. - await this.updateSyncRules({ - content: active.sync_rules_content, - validate: false - }); + await this.updateSyncRules(active.asUpdateOptions()); // In this case we keep the old one as active for clients, so that that existing clients // can still get the latest data while we replicate the new ones. @@ -173,19 +145,6 @@ export class MongoBucketStorage } async updateSyncRules(options: storage.UpdateSyncRulesOptions): Promise { - if (options.validate) { - // Parse and validate before applying any changes - SqlSyncRules.fromYaml(options.content, { - // No schema-based validation at this point - schema: undefined, - defaultSchema: 'not_applicable', // Not needed for validation - throwOnError: true - }); - } else { - // We do not validate sync rules at this point. - // That is done when using the sync rules, so that the diagnostics API can report the errors. - } - let rules: MongoPersistedSyncRulesContent | undefined = undefined; await this.session.withTransaction(async () => { @@ -219,7 +178,7 @@ export class MongoBucketStorage const doc: SyncRuleDocument = { _id: id, storage_version: storageVersion, - content: options.content, + content: options.config.yaml, last_checkpoint: null, last_checkpoint_lsn: null, no_checkpoint_before: null, @@ -258,11 +217,6 @@ export class MongoBucketStorage return new MongoPersistedSyncRulesContent(this.db, doc); } - async getActiveSyncRules(options: storage.ParseSyncRulesOptions): Promise { - const content = await this.getActiveSyncRulesContent(); - return content?.parsed(options) ?? null; - } - async getNextSyncRulesContent(): Promise { const doc = await this.db.sync_rules.findOne( { @@ -277,11 +231,6 @@ export class MongoBucketStorage return new MongoPersistedSyncRulesContent(this.db, doc); } - async getNextSyncRules(options: storage.ParseSyncRulesOptions): Promise { - const content = await this.getNextSyncRulesContent(); - return content?.parsed(options) ?? null; - } - async getReplicatingSyncRules(): Promise { const docs = await this.db.sync_rules .find({ diff --git a/modules/module-mongodb-storage/src/storage/implementation/MongoPersistedSyncRules.ts b/modules/module-mongodb-storage/src/storage/implementation/MongoPersistedSyncRules.ts deleted file mode 100644 index 58620e706..000000000 --- a/modules/module-mongodb-storage/src/storage/implementation/MongoPersistedSyncRules.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { - CompatibilityOption, - DEFAULT_HYDRATION_STATE, - HydratedSyncRules, - HydrationState, - SyncConfigWithErrors, - versionedHydrationState -} from '@powersync/service-sync-rules'; - -import { storage } from '@powersync/service-core'; - -import { StorageConfig } from './models.js'; - -export class MongoPersistedSyncRules implements storage.PersistedSyncRules { - public readonly slot_name: string; - public readonly hydrationState: HydrationState; - - constructor( - public readonly id: number, - public readonly sync_rules: SyncConfigWithErrors, - public readonly checkpoint_lsn: string | null, - slot_name: string | null, - public readonly storageConfig: StorageConfig - ) { - this.slot_name = slot_name ?? `powersync_${id}`; - - if ( - storageConfig.versionedBuckets || - this.sync_rules.config.compatibility.isEnabled(CompatibilityOption.versionedBucketIds) - ) { - // For new sync config versions (using the new storage version), we always enable versioned bucket names. - // For older versions, this depends on the compatibility option. - this.hydrationState = versionedHydrationState(this.id); - } else { - this.hydrationState = DEFAULT_HYDRATION_STATE; - } - } - - hydratedSyncRules(): HydratedSyncRules { - return this.sync_rules.config.hydrate({ hydrationState: this.hydrationState }); - } -} diff --git a/modules/module-mongodb-storage/src/storage/implementation/MongoPersistedSyncRulesContent.ts b/modules/module-mongodb-storage/src/storage/implementation/MongoPersistedSyncRulesContent.ts index 8d3f09067..f1b1bd47f 100644 --- a/modules/module-mongodb-storage/src/storage/implementation/MongoPersistedSyncRulesContent.ts +++ b/modules/module-mongodb-storage/src/storage/implementation/MongoPersistedSyncRulesContent.ts @@ -1,49 +1,32 @@ import { mongo } from '@powersync/lib-service-mongodb'; import { storage } from '@powersync/service-core'; -import { SqlSyncRules } from '@powersync/service-sync-rules'; -import { MongoPersistedSyncRules } from './MongoPersistedSyncRules.js'; import { MongoSyncRulesLock } from './MongoSyncRulesLock.js'; import { PowerSyncMongo } from './db.js'; import { getMongoStorageConfig, SyncRuleDocument } from './models.js'; import { ErrorCode, ServiceError } from '@powersync/lib-services-framework'; -export class MongoPersistedSyncRulesContent implements storage.PersistedSyncRulesContent { - public readonly slot_name: string; - - public readonly id: number; - public readonly sync_rules_content: string; - public readonly last_checkpoint_lsn: string | null; - public readonly last_fatal_error: string | null; - public readonly last_fatal_error_ts: Date | null; - public readonly last_keepalive_ts: Date | null; - public readonly last_checkpoint_ts: Date | null; - public readonly active: boolean; - public readonly storageVersion: number; - +export class MongoPersistedSyncRulesContent extends storage.PersistedSyncRulesContent { public current_lock: MongoSyncRulesLock | null = null; constructor( private db: PowerSyncMongo, doc: mongo.WithId ) { - this.id = doc._id; - this.sync_rules_content = doc.content; - this.last_checkpoint_lsn = doc.last_checkpoint_lsn; - // Handle legacy values - this.slot_name = doc.slot_name ?? `powersync_${this.id}`; - this.last_fatal_error = doc.last_fatal_error; - this.last_fatal_error_ts = doc.last_fatal_error_ts; - this.last_checkpoint_ts = doc.last_checkpoint_ts; - this.last_keepalive_ts = doc.last_keepalive_ts; - this.active = doc.state == 'ACTIVE'; - this.storageVersion = doc.storage_version ?? storage.LEGACY_STORAGE_VERSION; + super({ + id: doc._id, + sync_rules_content: doc.content, + last_checkpoint_lsn: doc.last_checkpoint_lsn, + // Handle legacy values + slot_name: doc.slot_name ?? `powersync_${doc._id}`, + last_fatal_error: doc.last_fatal_error, + last_fatal_error_ts: doc.last_fatal_error_ts, + last_checkpoint_ts: doc.last_checkpoint_ts, + last_keepalive_ts: doc.last_keepalive_ts, + active: doc.state == 'ACTIVE', + storageVersion: doc.storage_version ?? storage.LEGACY_STORAGE_VERSION + }); } - /** - * Load the storage config. - * - * This may throw if the persisted storage version is not supported. - */ getStorageConfig() { const storageConfig = getMongoStorageConfig(this.storageVersion); if (storageConfig == null) { @@ -55,16 +38,6 @@ export class MongoPersistedSyncRulesContent implements storage.PersistedSyncRule return storageConfig; } - parsed(options: storage.ParseSyncRulesOptions) { - return new MongoPersistedSyncRules( - this.id, - SqlSyncRules.fromYaml(this.sync_rules_content, options), - this.last_checkpoint_lsn, - this.slot_name, - this.getStorageConfig() - ); - } - async lock() { const lock = await MongoSyncRulesLock.createLock(this.db, this); this.current_lock = lock; diff --git a/modules/module-mongodb-storage/src/storage/storage-index.ts b/modules/module-mongodb-storage/src/storage/storage-index.ts index cfb1d4ad0..1c754cb5f 100644 --- a/modules/module-mongodb-storage/src/storage/storage-index.ts +++ b/modules/module-mongodb-storage/src/storage/storage-index.ts @@ -2,7 +2,6 @@ export * from './implementation/db.js'; export * from './implementation/models.js'; export * from './implementation/MongoBucketBatch.js'; export * from './implementation/MongoIdSequence.js'; -export * from './implementation/MongoPersistedSyncRules.js'; export * from './implementation/MongoPersistedSyncRulesContent.js'; export * from './implementation/MongoStorageProvider.js'; export * from './implementation/MongoSyncBucketStorage.js'; diff --git a/modules/module-mongodb-storage/test/src/storage_compacting.test.ts b/modules/module-mongodb-storage/test/src/storage_compacting.test.ts index 4d1f3023f..66ae5902d 100644 --- a/modules/module-mongodb-storage/test/src/storage_compacting.test.ts +++ b/modules/module-mongodb-storage/test/src/storage_compacting.test.ts @@ -1,7 +1,7 @@ import { bucketRequest, bucketRequests, register, TEST_TABLE, test_utils } from '@powersync/service-core-tests'; import { describe, expect, test } from 'vitest'; import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js'; -import { storage, SyncRulesBucketStorage } from '@powersync/service-core'; +import { storage, SyncRulesBucketStorage, updateSyncRulesFromYaml } from '@powersync/service-core'; describe('Mongo Sync Bucket Storage Compact', () => { register.registerCompactTests(INITIALIZED_MONGO_STORAGE_FACTORY); @@ -38,14 +38,14 @@ describe('Mongo Sync Bucket Storage Compact', () => { const setup = async () => { await using factory = await INITIALIZED_MONGO_STORAGE_FACTORY(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: by_user: parameters: select request.user_id() as user_id data: [select * from test where owner_id = bucket.user_id] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); const { checkpoint } = await populate(bucketStorage); @@ -89,14 +89,14 @@ bucket_definitions: const { factory } = await setup(); // Not populate another version (bucket definition name changed) - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: by_user2: parameters: select request.user_id() as user_id data: [select * from test where owner_id = bucket.user_id] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); await populate(bucketStorage); diff --git a/modules/module-mongodb-storage/test/src/storage_sync.test.ts b/modules/module-mongodb-storage/test/src/storage_sync.test.ts index 4d61429b4..91731c78e 100644 --- a/modules/module-mongodb-storage/test/src/storage_sync.test.ts +++ b/modules/module-mongodb-storage/test/src/storage_sync.test.ts @@ -1,4 +1,4 @@ -import { storage } from '@powersync/service-core'; +import { storage, updateSyncRulesFromYaml } from '@powersync/service-core'; import { bucketRequest, register, TEST_TABLE, test_utils } from '@powersync/service-core-tests'; import { describe, expect, test } from 'vitest'; import { INITIALIZED_MONGO_STORAGE_FACTORY, TEST_STORAGE_VERSIONS } from './util.js'; @@ -12,15 +12,17 @@ function registerSyncStorageTests(storageFactory: storage.TestStorageFactory, st // but large enough in size to be split over multiple returned chunks. // Similar to the above test, but splits over 1MB chunks. await using factory = await storageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml( + ` bucket_definitions: global: data: - SELECT id, description FROM "%" `, - storageVersion - }); + { storageVersion } + ) + ); const bucketStorage = factory.getInstance(syncRules); const globalBucket = bucketRequest(syncRules, 'global[]'); diff --git a/modules/module-mongodb/test/src/change_stream_utils.ts b/modules/module-mongodb/test/src/change_stream_utils.ts index cd9e94056..cca169f3e 100644 --- a/modules/module-mongodb/test/src/change_stream_utils.ts +++ b/modules/module-mongodb/test/src/change_stream_utils.ts @@ -11,6 +11,7 @@ import { STORAGE_VERSION_CONFIG, SyncRulesBucketStorage, TestStorageOptions, + updateSyncRulesFromYaml, utils } from '@powersync/service-core'; import { METRICS_HELPER, test_utils } from '@powersync/service-core-tests'; @@ -99,11 +100,9 @@ export class ChangeStreamTestContext { } async updateSyncRules(content: string) { - const syncRules = await this.factory.updateSyncRules({ - content: content, - validate: true, - storageVersion: this.storageVersion - }); + const syncRules = await this.factory.updateSyncRules( + updateSyncRulesFromYaml(content, { validate: true, storageVersion: this.storageVersion }) + ); this.syncRulesId = syncRules.id; this.storage = this.factory.getInstance(syncRules); return this.storage!; diff --git a/modules/module-mssql/test/src/CDCStreamTestContext.ts b/modules/module-mssql/test/src/CDCStreamTestContext.ts index 6f7ed523c..00de56929 100644 --- a/modules/module-mssql/test/src/CDCStreamTestContext.ts +++ b/modules/module-mssql/test/src/CDCStreamTestContext.ts @@ -6,7 +6,8 @@ import { LEGACY_STORAGE_VERSION, OplogEntry, storage, - SyncRulesBucketStorage + SyncRulesBucketStorage, + updateSyncRulesFromYaml } from '@powersync/service-core'; import { METRICS_HELPER, test_utils } from '@powersync/service-core-tests'; import { clearTestDb, getClientCheckpoint, TEST_CONNECTION_OPTIONS } from './util.js'; @@ -73,11 +74,9 @@ export class CDCStreamTestContext implements AsyncDisposable { } async updateSyncRules(content: string) { - const syncRules = await this.factory.updateSyncRules({ - content: content, - validate: true, - storageVersion: LEGACY_STORAGE_VERSION - }); + const syncRules = await this.factory.updateSyncRules( + updateSyncRulesFromYaml(content, { validate: true, storageVersion: LEGACY_STORAGE_VERSION }) + ); this.storage = this.factory.getInstance(syncRules); return this.storage!; } diff --git a/modules/module-mysql/test/src/BinlogStreamUtils.ts b/modules/module-mysql/test/src/BinlogStreamUtils.ts index 8633fe5f1..065ae93a7 100644 --- a/modules/module-mysql/test/src/BinlogStreamUtils.ts +++ b/modules/module-mysql/test/src/BinlogStreamUtils.ts @@ -12,7 +12,8 @@ import { ProtocolOpId, ReplicationCheckpoint, storage, - SyncRulesBucketStorage + SyncRulesBucketStorage, + updateSyncRulesFromYaml } from '@powersync/service-core'; import { METRICS_HELPER, test_utils } from '@powersync/service-core-tests'; import mysqlPromise from 'mysql2/promise'; @@ -69,11 +70,9 @@ export class BinlogStreamTestContext { } async updateSyncRules(content: string): Promise { - const syncRules = await this.factory.updateSyncRules({ - content: content, - validate: true, - storageVersion: LEGACY_STORAGE_VERSION - }); + const syncRules = await this.factory.updateSyncRules( + updateSyncRulesFromYaml(content, { validate: true, storageVersion: LEGACY_STORAGE_VERSION }) + ); this.storage = this.factory.getInstance(syncRules); return this.storage!; } diff --git a/modules/module-postgres-storage/src/storage/PostgresBucketStorageFactory.ts b/modules/module-postgres-storage/src/storage/PostgresBucketStorageFactory.ts index c1d3d4611..176781298 100644 --- a/modules/module-postgres-storage/src/storage/PostgresBucketStorageFactory.ts +++ b/modules/module-postgres-storage/src/storage/PostgresBucketStorageFactory.ts @@ -1,4 +1,3 @@ -import * as framework from '@powersync/lib-services-framework'; import { GetIntanceOptions, storage, SyncRulesBucketStorage, UpdateSyncRulesOptions } from '@powersync/service-core'; import * as pg_wire from '@powersync/service-jpgwire'; import * as sync_rules from '@powersync/service-sync-rules'; @@ -19,10 +18,7 @@ export type PostgresBucketStorageOptions = { slot_name_prefix: string; }; -export class PostgresBucketStorageFactory - extends framework.BaseObserver - implements storage.BucketStorageFactory -{ +export class PostgresBucketStorageFactory extends storage.BucketStorageFactory { readonly db: lib_postgres.DatabaseClient; public readonly slot_name_prefix: string; @@ -145,42 +141,7 @@ export class PostgresBucketStorageFactory }; } - // TODO possibly share implementation in abstract class - async configureSyncRules(options: UpdateSyncRulesOptions): Promise<{ - updated: boolean; - persisted_sync_rules?: storage.PersistedSyncRulesContent; - lock?: storage.ReplicationLock; - }> { - const next = await this.getNextSyncRulesContent(); - const active = await this.getActiveSyncRulesContent(); - - if (next?.sync_rules_content == options.content) { - framework.logger.info('Sync rules from configuration unchanged'); - return { updated: false }; - } else if (next == null && active?.sync_rules_content == options.content) { - framework.logger.info('Sync rules from configuration unchanged'); - return { updated: false }; - } else { - framework.logger.info('Sync rules updated from configuration'); - const persisted_sync_rules = await this.updateSyncRules(options); - return { updated: true, persisted_sync_rules, lock: persisted_sync_rules.current_lock ?? undefined }; - } - } - async updateSyncRules(options: storage.UpdateSyncRulesOptions): Promise { - // TODO some shared implementation for this might be nice - if (options.validate) { - // Parse and validate before applying any changes - sync_rules.SqlSyncRules.fromYaml(options.content, { - // No schema-based validation at this point - schema: undefined, - defaultSchema: 'not_applicable', // Not needed for validation - throwOnError: true - }); - } else { - // Apply unconditionally. Any errors will be reported via the diagnostics API. - } - const storageVersion = options.storageVersion ?? storage.CURRENT_STORAGE_VERSION; return this.db.transaction(async (db) => { await db.sql` @@ -207,7 +168,7 @@ export class PostgresBucketStorageFactory FROM next_id ), - ${{ type: 'varchar', value: options.content }}, + ${{ type: 'varchar', value: options.config.yaml }}, ${{ type: 'varchar', value: storage.SyncRuleState.PROCESSING }}, CONCAT( ${{ type: 'varchar', value: this.slot_name_prefix }}, @@ -242,10 +203,8 @@ export class PostgresBucketStorageFactory // The current one will continue serving sync requests until the next one has finished processing. if (next != null && next.id == sync_rules_group_id) { // We need to redo the "next" sync rules - await this.updateSyncRules({ - content: next.sync_rules_content, - validate: false - }); + + await this.updateSyncRules(next.asUpdateOptions()); // Pro-actively stop replicating await this.db.sql` UPDATE sync_rules @@ -257,10 +216,7 @@ export class PostgresBucketStorageFactory `.execute(); } else if (next == null && active?.id == sync_rules_group_id) { // Slot removed for "active" sync rules, while there is no "next" one. - await this.updateSyncRules({ - content: active.sync_rules_content, - validate: false - }); + await this.updateSyncRules(active.asUpdateOptions()); // Pro-actively stop replicating, but still serve clients with existing data await this.db.sql` @@ -286,12 +242,6 @@ export class PostgresBucketStorageFactory } } - // TODO possibly share via abstract class - async getActiveSyncRules(options: storage.ParseSyncRulesOptions): Promise { - const content = await this.getActiveSyncRulesContent(); - return content?.parsed(options) ?? null; - } - async getActiveSyncRulesContent(): Promise { const activeRow = await this.db.sql` SELECT @@ -315,12 +265,6 @@ export class PostgresBucketStorageFactory return new PostgresPersistedSyncRulesContent(this.db, activeRow); } - // TODO possibly share via abstract class - async getNextSyncRules(options: storage.ParseSyncRulesOptions): Promise { - const content = await this.getNextSyncRulesContent(); - return content?.parsed(options) ?? null; - } - async getNextSyncRulesContent(): Promise { const nextRow = await this.db.sql` SELECT diff --git a/modules/module-postgres-storage/src/storage/sync-rules/PostgresPersistedSyncRulesContent.ts b/modules/module-postgres-storage/src/storage/sync-rules/PostgresPersistedSyncRulesContent.ts index 39854e618..79bc12e70 100644 --- a/modules/module-postgres-storage/src/storage/sync-rules/PostgresPersistedSyncRulesContent.ts +++ b/modules/module-postgres-storage/src/storage/sync-rules/PostgresPersistedSyncRulesContent.ts @@ -10,72 +10,24 @@ import { } from '@powersync/service-sync-rules'; import { models } from '../../types/types.js'; -export class PostgresPersistedSyncRulesContent implements storage.PersistedSyncRulesContent { - public readonly slot_name: string; - - public readonly id: number; - public readonly sync_rules_content: string; - public readonly last_checkpoint_lsn: string | null; - public readonly last_fatal_error: string | null; - public readonly last_keepalive_ts: Date | null; - public readonly last_checkpoint_ts: Date | null; - public readonly active: boolean; - public readonly storageVersion: number; +export class PostgresPersistedSyncRulesContent extends storage.PersistedSyncRulesContent { current_lock: storage.ReplicationLock | null = null; constructor( private db: lib_postgres.DatabaseClient, row: models.SyncRulesDecoded ) { - this.id = Number(row.id); - this.sync_rules_content = row.content; - this.last_checkpoint_lsn = row.last_checkpoint_lsn; - this.slot_name = row.slot_name; - this.last_fatal_error = row.last_fatal_error; - this.last_checkpoint_ts = row.last_checkpoint_ts ? new Date(row.last_checkpoint_ts) : null; - this.last_keepalive_ts = row.last_keepalive_ts ? new Date(row.last_keepalive_ts) : null; - this.active = row.state == 'ACTIVE'; - this.storageVersion = row.storage_version ?? storage.LEGACY_STORAGE_VERSION; - } - - /** - * Load the storage config. - * - * This may throw if the persisted storage version is not supported. - */ - getStorageConfig() { - const storageConfig = storage.STORAGE_VERSION_CONFIG[this.storageVersion]; - if (storageConfig == null) { - throw new ServiceError( - ErrorCode.PSYNC_S1005, - `Unsupported storage version ${this.storageVersion} for sync rules ${this.id}` - ); - } - return storageConfig; - } - - parsed(options: storage.ParseSyncRulesOptions): storage.PersistedSyncRules { - let hydrationState: HydrationState; - const syncRules = SqlSyncRules.fromYaml(this.sync_rules_content, options); - const storageConfig = this.getStorageConfig(); - if ( - storageConfig.versionedBuckets || - syncRules.config.compatibility.isEnabled(CompatibilityOption.versionedBucketIds) - ) { - hydrationState = versionedHydrationState(this.id); - } else { - hydrationState = DEFAULT_HYDRATION_STATE; - } - return { - id: this.id, - slot_name: this.slot_name, - sync_rules: syncRules, - hydratedSyncRules() { - return this.sync_rules.config.hydrate({ - hydrationState - }); - } - }; + super({ + id: Number(row.id), + sync_rules_content: row.content, + last_checkpoint_lsn: row.last_checkpoint_lsn, + slot_name: row.slot_name, + last_fatal_error: row.last_fatal_error, + last_checkpoint_ts: row.last_checkpoint_ts ? new Date(row.last_checkpoint_ts) : null, + last_keepalive_ts: row.last_keepalive_ts ? new Date(row.last_keepalive_ts) : null, + active: row.state == 'ACTIVE', + storageVersion: row.storage_version ?? storage.LEGACY_STORAGE_VERSION + }); } async lock(): Promise { diff --git a/modules/module-postgres-storage/test/src/storage.test.ts b/modules/module-postgres-storage/test/src/storage.test.ts index b8a82f4ce..1662f0320 100644 --- a/modules/module-postgres-storage/test/src/storage.test.ts +++ b/modules/module-postgres-storage/test/src/storage.test.ts @@ -1,4 +1,4 @@ -import { storage } from '@powersync/service-core'; +import { storage, updateSyncRulesFromYaml } from '@powersync/service-core'; import { bucketRequestMap, register, TEST_TABLE, test_utils } from '@powersync/service-core-tests'; import { describe, expect, test } from 'vitest'; import { POSTGRES_STORAGE_FACTORY } from './util.js'; @@ -25,14 +25,14 @@ describe('Postgres Sync Bucket Storage - pg-specific', () => { // but large enough in size to be split over multiple returned chunks. // Similar to the above test, but splits over 1MB chunks. await using factory = await POSTGRES_STORAGE_FACTORY(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id, description FROM "%" - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { diff --git a/modules/module-postgres-storage/test/src/storage_sync.test.ts b/modules/module-postgres-storage/test/src/storage_sync.test.ts index 62eeb479f..cfcfdbaa0 100644 --- a/modules/module-postgres-storage/test/src/storage_sync.test.ts +++ b/modules/module-postgres-storage/test/src/storage_sync.test.ts @@ -1,4 +1,4 @@ -import { storage } from '@powersync/service-core'; +import { storage, updateSyncRulesFromYaml } from '@powersync/service-core'; import { bucketRequest, register, TEST_TABLE, test_utils } from '@powersync/service-core-tests'; import { describe, expect, test } from 'vitest'; import { POSTGRES_STORAGE_FACTORY, TEST_STORAGE_VERSIONS } from './util.js'; @@ -19,15 +19,17 @@ function registerStorageVersionTests(storageVersion: number) { // but large enough in size to be split over multiple returned chunks. // Similar to the above test, but splits over 1MB chunks. await using factory = await storageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml( + ` bucket_definitions: global: data: - SELECT id, description FROM "%" `, - storageVersion - }); + { storageVersion } + ) + ); const bucketStorage = factory.getInstance(syncRules); const globalBucket = bucketRequest(syncRules, 'global[]'); diff --git a/modules/module-postgres/test/src/slow_tests.test.ts b/modules/module-postgres/test/src/slow_tests.test.ts index 112a49a39..216e6e18d 100644 --- a/modules/module-postgres/test/src/slow_tests.test.ts +++ b/modules/module-postgres/test/src/slow_tests.test.ts @@ -15,7 +15,11 @@ import * as pgwire from '@powersync/service-jpgwire'; import { SqliteRow } from '@powersync/service-sync-rules'; import { PgManager } from '@module/replication/PgManager.js'; -import { createCoreReplicationMetrics, initializeCoreReplicationMetrics } from '@powersync/service-core'; +import { + createCoreReplicationMetrics, + initializeCoreReplicationMetrics, + updateSyncRulesFromYaml +} from '@powersync/service-core'; import { METRICS_HELPER, test_utils } from '@powersync/service-core-tests'; import * as mongo_storage from '@powersync/service-module-mongodb-storage'; import * as postgres_storage from '@powersync/service-module-postgres-storage'; @@ -82,7 +86,7 @@ bucket_definitions: data: - SELECT * FROM "test_data" `; - const syncRules = await f.updateSyncRules({ content: syncRuleContent, storageVersion }); + const syncRules = await f.updateSyncRules(updateSyncRulesFromYaml(syncRuleContent, { storageVersion })); const storage = f.getInstance(syncRules); abortController = new AbortController(); const options: WalStreamOptions = { @@ -307,7 +311,8 @@ bucket_definitions: data: - SELECT id, description FROM "test_data" `; - const syncRules = await f.updateSyncRules({ content: syncRuleContent, storageVersion }); + + const syncRules = await f.updateSyncRules(updateSyncRulesFromYaml(syncRuleContent, { storageVersion })); const storage = f.getInstance(syncRules); // 1. Setup some base data that will be replicated in initial replication diff --git a/modules/module-postgres/test/src/validation.test.ts b/modules/module-postgres/test/src/validation.test.ts index c1d110fe8..6fd71c434 100644 --- a/modules/module-postgres/test/src/validation.test.ts +++ b/modules/module-postgres/test/src/validation.test.ts @@ -3,6 +3,7 @@ import { expect, test } from 'vitest'; import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js'; import { WalStreamTestContext } from './wal_stream_utils.js'; +import { updateSyncRulesFromYaml } from '@powersync/service-core'; test('validate tables', async () => { await using context = await WalStreamTestContext.open(INITIALIZED_MONGO_STORAGE_FACTORY); @@ -19,7 +20,7 @@ bucket_definitions: - SELECT * FROM "other%" `; - const syncRules = await context.factory.updateSyncRules({ content: syncRuleContent }); + const syncRules = await context.factory.updateSyncRules(updateSyncRulesFromYaml(syncRuleContent)); const tablePatterns = syncRules.parsed({ defaultSchema: 'public' }).sync_rules.config.getSourceTables(); const tableInfo = await getDebugTablesInfo({ diff --git a/modules/module-postgres/test/src/wal_stream_utils.ts b/modules/module-postgres/test/src/wal_stream_utils.ts index d5a0ac2dc..540ba69c2 100644 --- a/modules/module-postgres/test/src/wal_stream_utils.ts +++ b/modules/module-postgres/test/src/wal_stream_utils.ts @@ -10,7 +10,8 @@ import { OplogEntry, STORAGE_VERSION_CONFIG, storage, - SyncRulesBucketStorage + SyncRulesBucketStorage, + updateSyncRulesFromYaml } from '@powersync/service-core'; import { METRICS_HELPER, test_utils } from '@powersync/service-core-tests'; import * as pgwire from '@powersync/service-jpgwire'; @@ -103,11 +104,9 @@ export class WalStreamTestContext implements AsyncDisposable { } async updateSyncRules(content: string) { - const syncRules = await this.factory.updateSyncRules({ - content: content, - validate: true, - storageVersion: this.storageVersion - }); + const syncRules = await this.factory.updateSyncRules( + updateSyncRulesFromYaml(content, { validate: true, storageVersion: this.storageVersion }) + ); this.syncRulesId = syncRules.id; this.storage = this.factory.getInstance(syncRules); return this.storage!; diff --git a/packages/service-core-tests/src/tests/register-compacting-tests.ts b/packages/service-core-tests/src/tests/register-compacting-tests.ts index bac08de74..59f60bfb3 100644 --- a/packages/service-core-tests/src/tests/register-compacting-tests.ts +++ b/packages/service-core-tests/src/tests/register-compacting-tests.ts @@ -1,4 +1,4 @@ -import { storage } from '@powersync/service-core'; +import { storage, updateSyncRulesFromYaml } from '@powersync/service-core'; import { expect, test } from 'vitest'; import * as test_utils from '../test-utils/test-utils-index.js'; import { bucketRequest, bucketRequestMap, bucketRequests } from './util.js'; @@ -8,13 +8,13 @@ const TEST_TABLE = test_utils.makeTestTable('test', ['id']); export function registerCompactTests(generateStorageFactory: storage.TestStorageFactory) { test('compacting (1)', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: [select * from test] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -127,13 +127,13 @@ bucket_definitions: test('compacting (2)', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: [select * from test] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -250,13 +250,13 @@ bucket_definitions: test('compacting (3)', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: [select * from test] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -338,16 +338,16 @@ bucket_definitions: test('compacting (4)', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - /* yaml */ content: ` bucket_definitions: + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: grouped: # The parameter query here is not important # We specifically don't want to create bucket_parameter records here # since the op_ids for bucket_data could vary between storage implementations. parameters: select 'b' as b data: - - select * from test where b = bucket.b` - }); + - select * from test where b = bucket.b`) + ); const bucketStorage = factory.getInstance(syncRules); const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -468,13 +468,13 @@ bucket_definitions: test('partial checksums after compacting', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: [select * from test] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -539,13 +539,13 @@ bucket_definitions: test('partial checksums after compacting (2)', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: [select * from test] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { diff --git a/packages/service-core-tests/src/tests/register-data-storage-checkpoint-tests.ts b/packages/service-core-tests/src/tests/register-data-storage-checkpoint-tests.ts index 72dd7dced..f99c7999f 100644 --- a/packages/service-core-tests/src/tests/register-data-storage-checkpoint-tests.ts +++ b/packages/service-core-tests/src/tests/register-data-storage-checkpoint-tests.ts @@ -1,4 +1,4 @@ -import { storage } from '@powersync/service-core'; +import { storage, updateSyncRulesFromYaml } from '@powersync/service-core'; import { expect, test } from 'vitest'; import * as test_utils from '../test-utils/test-utils-index.js'; @@ -15,14 +15,13 @@ import * as test_utils from '../test-utils/test-utils-index.js'; export function registerDataStorageCheckpointTests(generateStorageFactory: storage.TestStorageFactory) { test('managed write checkpoints - checkpoint after write', async (context) => { await using factory = await generateStorageFactory(); - const r = await factory.configureSyncRules({ - content: ` + const r = await factory.configureSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: data: [] - `, - validate: false - }); + `) + ); const bucketStorage = factory.getInstance(r.persisted_sync_rules!); const abortController = new AbortController(); @@ -55,14 +54,13 @@ bucket_definitions: test('managed write checkpoints - write after checkpoint', async (context) => { await using factory = await generateStorageFactory(); - const r = await factory.configureSyncRules({ - content: ` + const r = await factory.configureSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: data: [] - `, - validate: false - }); + `) + ); const bucketStorage = factory.getInstance(r.persisted_sync_rules!); const abortController = new AbortController(); @@ -117,14 +115,13 @@ bucket_definitions: test('custom write checkpoints - checkpoint after write', async (context) => { await using factory = await generateStorageFactory(); - const r = await factory.configureSyncRules({ - content: ` + const r = await factory.configureSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: data: [] - `, - validate: false - }); + `) + ); const bucketStorage = factory.getInstance(r.persisted_sync_rules!); bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM); @@ -157,14 +154,13 @@ bucket_definitions: test('custom write checkpoints - standalone checkpoint', async (context) => { await using factory = await generateStorageFactory(); - const r = await factory.configureSyncRules({ - content: ` + const r = await factory.configureSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: data: [] - `, - validate: false - }); + `) + ); const bucketStorage = factory.getInstance(r.persisted_sync_rules!); bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM); @@ -200,14 +196,13 @@ bucket_definitions: test('custom write checkpoints - write after checkpoint', async (context) => { await using factory = await generateStorageFactory(); - const r = await factory.configureSyncRules({ - content: ` + const r = await factory.configureSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: data: [] - `, - validate: false - }); + `) + ); const bucketStorage = factory.getInstance(r.persisted_sync_rules!); bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM); diff --git a/packages/service-core-tests/src/tests/register-data-storage-data-tests.ts b/packages/service-core-tests/src/tests/register-data-storage-data-tests.ts index 26ded434d..c9e8c34e0 100644 --- a/packages/service-core-tests/src/tests/register-data-storage-data-tests.ts +++ b/packages/service-core-tests/src/tests/register-data-storage-data-tests.ts @@ -1,4 +1,10 @@ -import { BucketDataBatchOptions, getUuidReplicaIdentityBson, OplogEntry, storage } from '@powersync/service-core'; +import { + BucketDataBatchOptions, + getUuidReplicaIdentityBson, + OplogEntry, + storage, + updateSyncRulesFromYaml +} from '@powersync/service-core'; import { describe, expect, test } from 'vitest'; import * as test_utils from '../test-utils/test-utils-index.js'; import { bucketRequest, bucketRequestMap, bucketRequests, TEST_TABLE } from './util.js'; @@ -27,14 +33,14 @@ const normalizeOplogData = (data: OplogEntry['data']) => { export function registerDataStorageDataTests(generateStorageFactory: storage.TestStorageFactory) { test('removing row', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id, description FROM "%" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -92,14 +98,14 @@ bucket_definitions: test('changing client ids', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT client_id as id, description FROM "%" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); const sourceTable = TEST_TABLE; @@ -159,14 +165,14 @@ bucket_definitions: test('re-apply delete', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id, description FROM "%" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -240,14 +246,14 @@ bucket_definitions: test('re-apply update + delete', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id, description FROM "%" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -374,14 +380,14 @@ bucket_definitions: // 2. Output order not being correct. await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id, description FROM "test" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); // Pre-setup @@ -532,14 +538,14 @@ bucket_definitions: ]); } await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id, description FROM "test" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); const sourceTable = test_utils.makeTestTable('test', ['id', 'description']); @@ -640,14 +646,14 @@ bucket_definitions: } await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id, description FROM "test" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); const sourceTable = test_utils.makeTestTable('test', ['id', 'description']); @@ -738,14 +744,14 @@ bucket_definitions: // The specific batch splits is an implementation detail of the storage driver, // and the test will have to updated when other implementations are added. await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id, description FROM "%" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -847,14 +853,14 @@ bucket_definitions: test('long batch', async () => { // Test syncing a batch of data that is limited by count. await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id, description FROM "%" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -931,8 +937,8 @@ bucket_definitions: describe('batch has_more', () => { const setup = async (options: BucketDataBatchOptions) => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global1: data: @@ -940,8 +946,8 @@ bucket_definitions: global2: data: - SELECT id, description FROM test WHERE bucket = 'global2' - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -1094,7 +1100,7 @@ bucket_definitions: replication_size_bytes: 0 }); - const r = await f.configureSyncRules({ content: 'bucket_definitions: {}', validate: false }); + const r = await f.configureSyncRules(updateSyncRulesFromYaml('bucket_definitions: {}')); const storage = f.getInstance(r.persisted_sync_rules!); await storage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { await batch.keepalive('1/0'); @@ -1109,15 +1115,15 @@ bucket_definitions: // but large enough in size to be split over multiple returned chunks. // Similar to the above test, but splits over 1MB chunks. await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT id FROM test - SELECT id FROM test_ignore WHERE false - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); const sourceTable = test_utils.makeTestTable('test', ['id']); @@ -1156,14 +1162,14 @@ bucket_definitions: test('unchanged checksums', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: global: data: - SELECT client_id as id, description FROM "%" -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); const sourceTable = TEST_TABLE; @@ -1202,15 +1208,15 @@ bucket_definitions: export function testChecksumBatching(generateStorageFactory: storage.TestStorageFactory) { test('checksums for multiple buckets', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: user: parameters: select request.user_id() as user_id data: - select id, description from test where user_id = bucket.user_id -` - }); +`) + ); const bucketStorage = factory.getInstance(syncRules); const sourceTable = TEST_TABLE; diff --git a/packages/service-core-tests/src/tests/register-data-storage-parameter-tests.ts b/packages/service-core-tests/src/tests/register-data-storage-parameter-tests.ts index 5618110a6..1b9763921 100644 --- a/packages/service-core-tests/src/tests/register-data-storage-parameter-tests.ts +++ b/packages/service-core-tests/src/tests/register-data-storage-parameter-tests.ts @@ -1,4 +1,4 @@ -import { JwtPayload, storage } from '@powersync/service-core'; +import { JwtPayload, storage, updateSyncRulesFromYaml } from '@powersync/service-core'; import { RequestParameters, ScopedParameterLookup, SqliteJsonRow } from '@powersync/service-sync-rules'; import { expect, test } from 'vitest'; import * as test_utils from '../test-utils/test-utils-index.js'; @@ -20,15 +20,15 @@ export function registerDataStorageParameterTests(generateStorageFactory: storag test('save and load parameters', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: parameters: - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id data: [] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -70,15 +70,15 @@ bucket_definitions: test('it should use the latest version', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: parameters: - SELECT group_id FROM test WHERE id = token_parameters.user_id data: [] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -126,8 +126,8 @@ bucket_definitions: test('it should use the latest version after updates', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: parameters: @@ -135,8 +135,8 @@ bucket_definitions: FROM todos WHERE list_id IN token_parameters.list_id data: [] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); const table = test_utils.makeTestTable('todos', ['id', 'list_id']); @@ -201,15 +201,15 @@ bucket_definitions: test('save and load parameters with different number types', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: parameters: - SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3 data: [] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -251,15 +251,15 @@ bucket_definitions: // test this to ensure correct deserialization. await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: parameters: - SELECT group_id FROM test WHERE n1 = token_parameters.n1 data: [] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -304,16 +304,16 @@ bucket_definitions: const WORKSPACE_TABLE = test_utils.makeTestTable('workspace', ['id']); await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: by_workspace: parameters: - SELECT id as workspace_id FROM workspace WHERE workspace."userId" = token_parameters.user_id data: [] - ` - }); + `) + ); const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).hydratedSyncRules(); const bucketStorage = factory.getInstance(syncRules); @@ -358,16 +358,16 @@ bucket_definitions: const WORKSPACE_TABLE = test_utils.makeTestTable('workspace'); await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: by_public_workspace: parameters: - SELECT id as workspace_id FROM workspace WHERE workspace.visibility = 'public' data: [] - ` - }); + `) + ); const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).hydratedSyncRules(); const bucketStorage = factory.getInstance(syncRules); @@ -444,8 +444,8 @@ bucket_definitions: const WORKSPACE_TABLE = test_utils.makeTestTable('workspace'); await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: by_workspace: parameters: @@ -454,8 +454,8 @@ bucket_definitions: - SELECT id as workspace_id FROM workspace WHERE workspace.user_id = token_parameters.user_id data: [] - ` - }); + `) + ); const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).hydratedSyncRules(); const bucketStorage = factory.getInstance(syncRules); @@ -541,15 +541,15 @@ bucket_definitions: test('truncate parameters', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: mybucket: parameters: - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id data: [] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -576,16 +576,16 @@ bucket_definitions: test('invalidate cached parsed sync rules', async () => { await using bucketStorageFactory = await generateStorageFactory(); - const syncRules = await bucketStorageFactory.updateSyncRules({ - content: ` + const syncRules = await bucketStorageFactory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: by_workspace: parameters: - SELECT id as workspace_id FROM workspace WHERE workspace."userId" = token_parameters.user_id data: [] - ` - }); + `) + ); const syncBucketStorage = bucketStorageFactory.getInstance(syncRules); const parsedSchema1 = syncBucketStorage.getParsedSyncRules({ diff --git a/packages/service-core-tests/src/tests/register-parameter-compacting-tests.ts b/packages/service-core-tests/src/tests/register-parameter-compacting-tests.ts index 10263fc7d..23868d72e 100644 --- a/packages/service-core-tests/src/tests/register-parameter-compacting-tests.ts +++ b/packages/service-core-tests/src/tests/register-parameter-compacting-tests.ts @@ -1,4 +1,4 @@ -import { storage } from '@powersync/service-core'; +import { storage, updateSyncRulesFromYaml } from '@powersync/service-core'; import { ScopedParameterLookup } from '@powersync/service-sync-rules'; import { expect, test } from 'vitest'; import * as test_utils from '../test-utils/test-utils-index.js'; @@ -8,14 +8,14 @@ const TEST_TABLE = test_utils.makeTestTable('test', ['id']); export function registerParameterCompactTests(generateStorageFactory: storage.TestStorageFactory) { test('compacting parameters', async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: test: parameters: select id from test where id = request.user_id() data: [] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { @@ -91,14 +91,14 @@ bucket_definitions: for (let cacheLimit of [1, 10]) { test(`compacting deleted parameters with cache size ${cacheLimit}`, async () => { await using factory = await generateStorageFactory(); - const syncRules = await factory.updateSyncRules({ - content: ` + const syncRules = await factory.updateSyncRules( + updateSyncRulesFromYaml(` bucket_definitions: test: parameters: select id from test where uid = request.user_id() data: [] - ` - }); + `) + ); const bucketStorage = factory.getInstance(syncRules); await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { diff --git a/packages/service-core-tests/src/tests/register-sync-tests.ts b/packages/service-core-tests/src/tests/register-sync-tests.ts index 3a693ba85..b9260bf68 100644 --- a/packages/service-core-tests/src/tests/register-sync-tests.ts +++ b/packages/service-core-tests/src/tests/register-sync-tests.ts @@ -1,15 +1,14 @@ import { createCoreAPIMetrics, JwtPayload, - LEGACY_STORAGE_VERSION, storage, StreamingSyncCheckpoint, StreamingSyncCheckpointDiff, sync, + updateSyncRulesFromYaml, utils } from '@powersync/service-core'; import { JSONBig } from '@powersync/service-jsonbig'; -import { BucketSourceType, RequestParameters } from '@powersync/service-sync-rules'; import path from 'path'; import * as timers from 'timers/promises'; import { fileURLToPath } from 'url'; @@ -49,14 +48,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory, options: maxDataFetchConcurrency: 2 }); - const updateSyncRules = ( - bucketStorageFactory: storage.BucketStorageFactory, - updateOptions: storage.UpdateSyncRulesOptions - ) => { - return bucketStorageFactory.updateSyncRules({ - ...updateOptions, - storageVersion: updateOptions.storageVersion ?? options.storageVersion - }); + const updateSyncRules = (bucketStorageFactory: storage.BucketStorageFactory, updateOptions: { content: string }) => { + return bucketStorageFactory.updateSyncRules( + updateSyncRulesFromYaml(updateOptions.content, { + validate: true, + storageVersion: options.storageVersion + }) + ); }; test('sync global data', async () => { diff --git a/packages/service-core/src/replication/AbstractReplicator.ts b/packages/service-core/src/replication/AbstractReplicator.ts index 8331a6c27..81dbc0af4 100644 --- a/packages/service-core/src/replication/AbstractReplicator.ts +++ b/packages/service-core/src/replication/AbstractReplicator.ts @@ -138,11 +138,9 @@ export abstract class AbstractReplicator { + throw new Error('Lock not implemented'); + } + + parsed(options: storage.ParseSyncRulesOptions): storage.PersistedSyncRules { + return { + ...this, + sync_rules: SqlSyncRules.fromYaml(this.sync_rules_content, { + ...this.apiHandler.getParseSyncRulesOptions(), + schema: this.schema + }), + hydratedSyncRules() { + return this.sync_rules.config.hydrate({ hydrationState: DEFAULT_HYDRATION_STATE }); + } + }; + } +} + export const validate = routeDefinition({ path: '/api/admin/v1/validate', method: router.HTTPMethod.POST, @@ -168,31 +198,15 @@ export const validate = routeDefinition({ const schemaData = await api.getConnectionsSchema(apiHandler); const schema = new StaticSchema(schemaData.connections); - const sync_rules: storage.PersistedSyncRulesContent = { + const sync_rules = new FakeSyncRulesContentForValidation(apiHandler, schema, { // Dummy values id: 0, slot_name: '', active: false, last_checkpoint_lsn: '', storageVersion: storage.LEGACY_STORAGE_VERSION, - - parsed() { - return { - ...this, - sync_rules: SqlSyncRules.fromYaml(content, { - ...apiHandler.getParseSyncRulesOptions(), - schema - }), - hydratedSyncRules() { - return this.sync_rules.config.hydrate({ hydrationState: DEFAULT_HYDRATION_STATE }); - } - }; - }, - sync_rules_content: content, - async lock() { - throw new Error('Lock not implemented'); - } - }; + sync_rules_content: content + }); const connectionStatus = await apiHandler.getConnectionStatus(); if (!connectionStatus) { diff --git a/packages/service-core/src/routes/endpoints/sync-rules.ts b/packages/service-core/src/routes/endpoints/sync-rules.ts index 9d32032e8..a27cc5059 100644 --- a/packages/service-core/src/routes/endpoints/sync-rules.ts +++ b/packages/service-core/src/routes/endpoints/sync-rules.ts @@ -1,11 +1,12 @@ import { ErrorCode, errors, router, schema } from '@powersync/lib-services-framework'; -import { SqlSyncRules, SyncRulesErrors } from '@powersync/service-sync-rules'; +import { SqlSyncRules, SyncConfigWithErrors, SyncRulesErrors } from '@powersync/service-sync-rules'; import type { FastifyPluginAsync } from 'fastify'; import * as t from 'ts-codec'; import { RouteAPI } from '../../api/RouteAPI.js'; import { authApi } from '../auth.js'; import { routeDefinition } from '../router.js'; +import { updateSyncRulesFromConfig, updateSyncRulesFromYaml } from '../../storage/BucketStorageFactory.js'; const DeploySyncRulesRequest = t.object({ content: t.string @@ -51,10 +52,11 @@ export const deploySyncRules = routeDefinition({ }); } const content = payload.params.content; + let syncConfig: SyncConfigWithErrors; try { const apiHandler = service_context.routerEngine.getAPI(); - SqlSyncRules.fromYaml(payload.params.content, { + syncConfig = SqlSyncRules.fromYaml(payload.params.content, { ...apiHandler.getParseSyncRulesOptions(), // We don't do any schema-level validation at this point schema: undefined @@ -68,11 +70,9 @@ export const deploySyncRules = routeDefinition({ }); } - const sync_rules = await storageEngine.activeBucketStorage.updateSyncRules({ - content: content, - // Aready validated above - validate: false - }); + const sync_rules = await storageEngine.activeBucketStorage.updateSyncRules( + updateSyncRulesFromConfig(syncConfig.config) + ); return { slot_name: sync_rules.slot_name @@ -168,12 +168,13 @@ export const reprocessSyncRules = routeDefinition({ }); } - const new_rules = await activeBucketStorage.updateSyncRules({ - content: sync_rules.sync_rules.config.content, - // These sync rules already passed validation. But if the rules are not valid anymore due - // to a service change, we do want to report the error here. - validate: true - }); + const new_rules = await activeBucketStorage.updateSyncRules( + updateSyncRulesFromYaml(sync_rules.sync_rules.config.content, { + // These sync rules already passed validation. But if the rules are not valid anymore due + // to a service change, we do want to report the error here. + validate: true + }) + ); return { slot_name: new_rules.slot_name }; diff --git a/packages/service-core/src/storage/BucketStorageFactory.ts b/packages/service-core/src/storage/BucketStorageFactory.ts index bc2f9bde5..1b69355af 100644 --- a/packages/service-core/src/storage/BucketStorageFactory.ts +++ b/packages/service-core/src/storage/BucketStorageFactory.ts @@ -1,9 +1,10 @@ -import { ObserverClient } from '@powersync/lib-services-framework'; +import { BaseObserver, logger } from '@powersync/lib-services-framework'; import { ParseSyncRulesOptions, PersistedSyncRules, PersistedSyncRulesContent } from './PersistedSyncRulesContent.js'; import { ReplicationEventPayload } from './ReplicationEventPayload.js'; import { ReplicationLock } from './ReplicationLock.js'; import { SyncRulesBucketStorage } from './SyncRulesBucketStorage.js'; import { ReportStorage } from './ReportStorage.js'; +import { SqlSyncRules, SyncConfig } from '@powersync/service-sync-rules'; /** * Represents a configured storage provider. @@ -13,23 +14,41 @@ import { ReportStorage } from './ReportStorage.js'; * * Storage APIs for a specific copy of sync rules are provided by the `SyncRulesBucketStorage` instances. */ -export interface BucketStorageFactory extends ObserverClient, AsyncDisposable { +export abstract class BucketStorageFactory + extends BaseObserver + implements AsyncDisposable +{ /** * Update sync rules from configuration, if changed. */ - configureSyncRules( + async configureSyncRules( options: UpdateSyncRulesOptions - ): Promise<{ updated: boolean; persisted_sync_rules?: PersistedSyncRulesContent; lock?: ReplicationLock }>; + ): Promise<{ updated: boolean; persisted_sync_rules?: PersistedSyncRulesContent; lock?: ReplicationLock }> { + const next = await this.getNextSyncRulesContent(); + const active = await this.getActiveSyncRulesContent(); + + if (next?.sync_rules_content == options.config.yaml) { + logger.info('Sync rules from configuration unchanged'); + return { updated: false }; + } else if (next == null && active?.sync_rules_content == options.config.yaml) { + logger.info('Sync rules from configuration unchanged'); + return { updated: false }; + } else { + logger.info('Sync rules updated from configuration'); + const persisted_sync_rules = await this.updateSyncRules(options); + return { updated: true, persisted_sync_rules, lock: persisted_sync_rules.current_lock ?? undefined }; + } + } /** * Get a storage instance to query sync data for specific sync rules. */ - getInstance(syncRules: PersistedSyncRulesContent, options?: GetIntanceOptions): SyncRulesBucketStorage; + abstract getInstance(syncRules: PersistedSyncRulesContent, options?: GetIntanceOptions): SyncRulesBucketStorage; /** * Deploy new sync rules. */ - updateSyncRules(options: UpdateSyncRulesOptions): Promise; + abstract updateSyncRules(options: UpdateSyncRulesOptions): Promise; /** * Indicate that a slot was removed, and we should re-sync by creating @@ -41,57 +60,65 @@ export interface BucketStorageFactory extends ObserverClient; + abstract restartReplication(sync_rules_group_id: number): Promise; /** * Get the sync rules used for querying. */ - getActiveSyncRules(options: ParseSyncRulesOptions): Promise; + async getActiveSyncRules(options: ParseSyncRulesOptions): Promise { + const content = await this.getActiveSyncRulesContent(); + return content?.parsed(options) ?? null; + } /** * Get the sync rules used for querying. */ - getActiveSyncRulesContent(): Promise; + abstract getActiveSyncRulesContent(): Promise; /** * Get the sync rules that will be active next once done with initial replicatino. */ - getNextSyncRules(options: ParseSyncRulesOptions): Promise; + async getNextSyncRules(options: ParseSyncRulesOptions): Promise { + const content = await this.getNextSyncRulesContent(); + return content?.parsed(options) ?? null; + } /** * Get the sync rules that will be active next once done with initial replicatino. */ - getNextSyncRulesContent(): Promise; + abstract getNextSyncRulesContent(): Promise; /** * Get all sync rules currently replicating. Typically this is the "active" and "next" sync rules. */ - getReplicatingSyncRules(): Promise; + abstract getReplicatingSyncRules(): Promise; /** * Get all sync rules stopped but not terminated yet. */ - getStoppedSyncRules(): Promise; + abstract getStoppedSyncRules(): Promise; /** * Get the active storage instance. */ - getActiveStorage(): Promise; + abstract getActiveStorage(): Promise; /** * Get storage size of active sync rules. */ - getStorageMetrics(): Promise; + abstract getStorageMetrics(): Promise; /** * Get the unique identifier for this instance of Powersync */ - getPowerSyncInstanceId(): Promise; + abstract getPowerSyncInstanceId(): Promise; /** * Get a unique identifier for the system used for storage. */ - getSystemIdentifier(): Promise; + abstract getSystemIdentifier(): Promise; + + abstract [Symbol.asyncDispose](): PromiseLike; } export interface BucketStorageFactoryListener { @@ -119,12 +146,32 @@ export interface StorageMetrics { } export interface UpdateSyncRulesOptions { - content: string; + config: { + yaml: string; + // TODO: Add serialized sync plan if available + }; lock?: boolean; - validate?: boolean; storageVersion?: number; } +export function updateSyncRulesFromYaml( + content: string, + options?: Omit & { validate?: boolean } +): UpdateSyncRulesOptions { + const { config } = SqlSyncRules.fromYaml(content, { + // No schema-based validation at this point + schema: undefined, + defaultSchema: 'not_applicable', // Not needed for validation + throwOnError: options?.validate ?? false + }); + + return updateSyncRulesFromConfig(config, options); +} + +export function updateSyncRulesFromConfig(parsed: SyncConfig, options?: Omit) { + return { config: { yaml: parsed.content }, ...options }; +} + export interface GetIntanceOptions { /** * Set to true to skip trigger any events for creating the instance. diff --git a/packages/service-core/src/storage/PersistedSyncRulesContent.ts b/packages/service-core/src/storage/PersistedSyncRulesContent.ts index 66b0d4f7a..da8bef840 100644 --- a/packages/service-core/src/storage/PersistedSyncRulesContent.ts +++ b/packages/service-core/src/storage/PersistedSyncRulesContent.ts @@ -1,11 +1,22 @@ -import { HydratedSyncRules, SyncConfig, SyncConfigWithErrors } from '@powersync/service-sync-rules'; +import { + CompatibilityOption, + DEFAULT_HYDRATION_STATE, + HydratedSyncRules, + HydrationState, + SqlSyncRules, + SyncConfigWithErrors, + versionedHydrationState +} from '@powersync/service-sync-rules'; import { ReplicationLock } from './ReplicationLock.js'; +import { STORAGE_VERSION_CONFIG, StorageVersionConfig } from './StorageVersionConfig.js'; +import { ErrorCode, ServiceError } from '@powersync/lib-services-framework'; +import { UpdateSyncRulesOptions } from './BucketStorageFactory.js'; export interface ParseSyncRulesOptions { defaultSchema: string; } -export interface PersistedSyncRulesContent { +export interface PersistedSyncRulesContentData { readonly id: number; readonly sync_rules_content: string; readonly slot_name: string; @@ -13,7 +24,6 @@ export interface PersistedSyncRulesContent { * True if this is the "active" copy of the sync rules. */ readonly active: boolean; - readonly storageVersion: number; readonly last_checkpoint_lsn: string | null; @@ -22,10 +32,75 @@ export interface PersistedSyncRulesContent { readonly last_fatal_error_ts?: Date | null; readonly last_keepalive_ts?: Date | null; readonly last_checkpoint_ts?: Date | null; +} + +export abstract class PersistedSyncRulesContent implements PersistedSyncRulesContentData { + readonly id!: number; + readonly sync_rules_content!: string; + readonly slot_name!: string; + readonly active!: boolean; + readonly storageVersion!: number; + + readonly last_checkpoint_lsn!: string | null; + + readonly last_fatal_error?: string | null; + readonly last_fatal_error_ts?: Date | null; + readonly last_keepalive_ts?: Date | null; + readonly last_checkpoint_ts?: Date | null; + + abstract readonly current_lock: ReplicationLock | null; + + constructor(data: PersistedSyncRulesContentData) { + Object.assign(this, data); + } + + /** + * Load the storage config. + * + * This may throw if the persisted storage version is not supported. + */ + getStorageConfig(): StorageVersionConfig { + const storageConfig = STORAGE_VERSION_CONFIG[this.storageVersion]; + if (storageConfig == null) { + throw new ServiceError( + ErrorCode.PSYNC_S1005, + `Unsupported storage version ${this.storageVersion} for sync rules ${this.id}` + ); + } + return storageConfig; + } + + parsed(options: ParseSyncRulesOptions): PersistedSyncRules { + let hydrationState: HydrationState; + const syncRules = SqlSyncRules.fromYaml(this.sync_rules_content, options); + const storageConfig = this.getStorageConfig(); + if ( + storageConfig.versionedBuckets || + syncRules.config.compatibility.isEnabled(CompatibilityOption.versionedBucketIds) + ) { + hydrationState = versionedHydrationState(this.id); + } else { + hydrationState = DEFAULT_HYDRATION_STATE; + } + + return { + id: this.id, + slot_name: this.slot_name, + sync_rules: syncRules, + hydratedSyncRules: () => { + return syncRules.config.hydrate({ hydrationState }); + } + }; + } - parsed(options: ParseSyncRulesOptions): PersistedSyncRules; + asUpdateOptions(options?: Omit): UpdateSyncRulesOptions { + return { + config: { yaml: this.sync_rules_content }, + ...options + }; + } - lock(): Promise; + abstract lock(): Promise; } export interface PersistedSyncRules {