diff --git a/.changeset/fix-optional-getnextpageparam.md b/.changeset/fix-optional-getnextpageparam.md deleted file mode 100644 index 4082eafa3..000000000 --- a/.changeset/fix-optional-getnextpageparam.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@tanstack/react-db': patch ---- - -fix(react-db): make getNextPageParam optional in useLiveInfiniteQuery diff --git a/.changeset/fix-setwindow-promises.md b/.changeset/fix-setwindow-promises.md deleted file mode 100644 index beca3632a..000000000 --- a/.changeset/fix-setwindow-promises.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@tanstack/react-db': patch ---- - -fix(react-db): handle rejected/stale setWindow promises in useLiveInfiniteQuery diff --git a/.changeset/upgrade-client-version.md b/.changeset/upgrade-client-version.md deleted file mode 100644 index c369e3612..000000000 --- a/.changeset/upgrade-client-version.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@tanstack/electric-db-collection': patch ---- - -fix(electric-db-collection): Upgrade to latest electric client version diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index b2aa42d79..3b26c63a0 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -62,6 +62,16 @@ jobs: env: ELECTRIC_URL: http://localhost:3000 + - name: Run Node SQLite persisted collection E2E tests + run: | + cd packages/db-node-sqlite-persisted-collection + pnpm test:e2e + + - name: Run Electron SQLite persisted collection E2E tests (full bridge) + run: | + cd packages/db-electron-sqlite-persisted-collection + TANSTACK_DB_ELECTRON_E2E_ALL=1 pnpm test:e2e + - name: Run React Native/Expo persisted collection E2E tests run: | cd packages/db-react-native-sqlite-persisted-collection diff --git a/_artifacts/skill_tree.yaml b/_artifacts/skill_tree.yaml new file mode 100644 index 000000000..abbfac7df --- /dev/null +++ b/_artifacts/skill_tree.yaml @@ -0,0 +1,273 @@ +# _artifacts/skill_tree.yaml +library: + name: '@tanstack/db' + version: '0.5.30' + repository: 'https://github.com/TanStack/db' + description: 'Reactive client store with normalized collections, sub-millisecond live queries, and instant optimistic mutations' +generated_from: + domain_map: 'domain_map.yaml' + skill_spec: 'skill_spec.md' +generated_at: '2026-03-04' + +skills: + # ─── Core overview (entry point) ─── + - name: 'db-core' + slug: 'db-core' + type: 'core' + domain: 'collection-setup' + path: 'skills/db-core/SKILL.md' + package: 'packages/db' + description: > + TanStack DB core concepts: createCollection, live queries via query builder, + optimistic mutations with draft proxy, transaction lifecycle. Entry point + for all TanStack DB skills with sub-skill routing table. + sources: + - 'TanStack/db:docs/overview.md' + - 'TanStack/db:packages/db/src/index.ts' + + # ─── Core sub-skills ─── + - name: 'Collection Setup' + slug: 'db-core/collection-setup' + type: 'sub-skill' + domain: 'collection-setup' + path: 'skills/db-core/collection-setup/SKILL.md' + package: 'packages/db' + description: > + Creating typed collections with createCollection, queryCollectionOptions, + electricCollectionOptions, powerSyncCollectionOptions, rxdbCollectionOptions, + trailbaseCollectionOptions, localOnlyCollectionOptions, localStorageCollectionOptions. + CollectionConfig (getKey, schema, sync, gcTime, autoIndex, syncMode). + StandardSchema validation with Zod/Valibot/ArkType. Collection lifecycle + (idle/loading/ready/error/cleaned-up). Adapter-specific sync patterns. + requires: + - 'db-core' + sources: + - 'TanStack/db:docs/overview.md' + - 'TanStack/db:docs/guides/schemas.md' + - 'TanStack/db:docs/collections/query-collection.md' + - 'TanStack/db:docs/collections/electric-collection.md' + - 'TanStack/db:docs/collections/powersync-collection.md' + - 'TanStack/db:docs/collections/rxdb-collection.md' + - 'TanStack/db:docs/collections/trailbase-collection.md' + - 'TanStack/db:docs/collections/local-only-collection.md' + - 'TanStack/db:docs/collections/local-storage-collection.md' + - 'TanStack/db:packages/db/src/collection/index.ts' + subsystems: + - 'TanStack Query adapter' + - 'ElectricSQL adapter' + - 'PowerSync adapter' + - 'RxDB adapter' + - 'TrailBase adapter' + - 'Local-only' + - 'localStorage' + references: + - 'references/query-adapter.md' + - 'references/electric-adapter.md' + - 'references/powersync-adapter.md' + - 'references/rxdb-adapter.md' + - 'references/trailbase-adapter.md' + - 'references/local-adapters.md' + - 'references/schema-patterns.md' + + - name: 'Live Queries' + slug: 'db-core/live-queries' + type: 'sub-skill' + domain: 'live-queries' + path: 'skills/db-core/live-queries/SKILL.md' + package: 'packages/db' + description: > + Query builder fluent API: from, where, join, leftJoin, rightJoin, innerJoin, + fullJoin, select, fn.select, groupBy, having, orderBy, limit, offset, distinct, + findOne. Operators: eq, gt, gte, lt, lte, like, ilike, inArray, isNull, + isUndefined, and, or, not. Aggregates: count, sum, avg, min, max. String + functions: upper, lower, length, concat, coalesce. Math: add. + $selected namespace. createLiveQueryCollection for + standalone queries. Derived collections. Predicate push-down. IVM via + differential dataflow (d2ts). + requires: + - 'db-core' + sources: + - 'TanStack/db:docs/guides/live-queries.md' + - 'TanStack/db:packages/db/src/query/builder/index.ts' + - 'TanStack/db:packages/db/src/query/compiler/index.ts' + - 'TanStack/db:packages/db-ivm/src/index.ts' + references: + - 'references/operators.md' + + - name: 'Mutations & Optimistic State' + slug: 'db-core/mutations-optimistic' + type: 'sub-skill' + domain: 'mutations-optimistic' + path: 'skills/db-core/mutations-optimistic/SKILL.md' + package: 'packages/db' + description: > + collection.insert, collection.update (Immer-style draft proxy), + collection.delete. createOptimisticAction (onMutate + mutationFn). + createPacedMutations with debounceStrategy, throttleStrategy, queueStrategy. + createTransaction, getActiveTransaction, ambient transaction context. + Transaction lifecycle (pending/persisting/completed/failed). Mutation merging + (insert+update→insert, insert+delete→cancel). onInsert/onUpdate/onDelete + handlers. PendingMutation (original, modified, changes, globalKey). + Transaction.isPersisted promise. TanStack Pacer integration. + requires: + - 'db-core' + sources: + - 'TanStack/db:docs/guides/mutations.md' + - 'TanStack/db:packages/db/src/transactions.ts' + - 'TanStack/db:packages/db/src/optimistic-action.ts' + - 'TanStack/db:packages/db/src/paced-mutations.ts' + - 'TanStack/db:packages/db/src/collection/mutations.ts' + references: + - 'references/transaction-api.md' + + - name: 'Custom Adapter Authoring' + slug: 'db-core/custom-adapter' + type: 'sub-skill' + domain: 'custom-adapter' + path: 'skills/db-core/custom-adapter/SKILL.md' + package: 'packages/db' + description: > + Building custom collection adapters. SyncConfig interface: sync function + receiving begin, write, commit, markReady, truncate primitives. ChangeMessage + format. loadSubset for on-demand sync. LoadSubsetOptions (where, orderBy, + limit, cursor). Expression parsing: parseWhereExpression, + parseOrderByExpression, extractSimpleComparisons, parseLoadSubsetOptions. + Collection options creator pattern. Subscription lifecycle and cleanup. + requires: + - 'db-core' + - 'db-core/collection-setup' + sources: + - 'TanStack/db:docs/guides/collection-options-creator.md' + - 'TanStack/db:packages/db/src/collection/sync.ts' + - 'TanStack/db:packages/db/src/query/ir.ts' + + # ─── Framework skills (one per adapter package) ─── + - name: 'React DB' + slug: 'react-db' + type: 'framework' + domain: 'framework-integration' + path: 'skills/react-db/SKILL.md' + package: 'packages/react-db' + description: > + React bindings for TanStack DB. useLiveQuery hook with dependency arrays + and 8 overloads (query function, config object, pre-created collection, + disabled state). useLiveSuspenseQuery for React Suspense with Error Boundaries. + useLiveInfiniteQuery for cursor-based pagination (pageSize, fetchNextPage, + hasNextPage). usePacedMutations for debounced React state. Return shape: + data, state, collection, status, isLoading, isReady, isError. + requires: + - 'db-core' + sources: + - 'TanStack/db:docs/framework/react/overview.md' + - 'TanStack/db:packages/react-db/src/useLiveQuery.ts' + - 'TanStack/db:packages/react-db/src/useLiveInfiniteQuery.ts' + - 'TanStack/db:packages/react-db/src/usePacedMutations.ts' + + - name: 'Vue DB' + slug: 'vue-db' + type: 'framework' + domain: 'framework-integration' + path: 'skills/vue-db/SKILL.md' + package: 'packages/vue-db' + description: > + Vue bindings for TanStack DB. useLiveQuery composable with + MaybeRefOrGetter query functions and ComputedRef return values. + Reactive deps via Vue refs and computed properties. + requires: + - 'db-core' + sources: + - 'TanStack/db:docs/framework/vue/overview.md' + - 'TanStack/db:packages/vue-db/src/useLiveQuery.ts' + + - name: 'Svelte DB' + slug: 'svelte-db' + type: 'framework' + domain: 'framework-integration' + path: 'skills/svelte-db/SKILL.md' + package: 'packages/svelte-db' + description: > + Svelte 5 bindings for TanStack DB. useLiveQuery with Svelte 5 runes + ($state reactivity). Dependency arrays with getter functions for props. + requires: + - 'db-core' + sources: + - 'TanStack/db:docs/framework/svelte/overview.md' + - 'TanStack/db:packages/svelte-db/src/useLiveQuery.svelte.ts' + + - name: 'Solid DB' + slug: 'solid-db' + type: 'framework' + domain: 'framework-integration' + path: 'skills/solid-db/SKILL.md' + package: 'packages/solid-db' + description: > + SolidJS bindings for TanStack DB. useLiveQuery with fine-grained + reactivity (Accessor, createSignal). Signal reads must happen inside + the query function for tracking. + requires: + - 'db-core' + sources: + - 'TanStack/db:docs/framework/solid/overview.md' + - 'TanStack/db:packages/solid-db/src/useLiveQuery.ts' + + - name: 'Angular DB' + slug: 'angular-db' + type: 'framework' + domain: 'framework-integration' + path: 'skills/angular-db/SKILL.md' + package: 'packages/angular-db' + description: > + Angular bindings for TanStack DB. injectLiveQuery with Angular Signals. + Must be called in injection context. Supports reactive params via + { params: () => T, query: ({ params, q }) => QueryBuilder } pattern. + Uses inject(DestroyRef) for cleanup. + requires: + - 'db-core' + sources: + - 'TanStack/db:docs/framework/angular/overview.md' + - 'TanStack/db:packages/angular-db/src/inject-live-query.ts' + + # ─── Composition skills ─── + - name: 'Meta-Framework Integration' + slug: 'meta-framework' + type: 'composition' + domain: 'meta-framework' + path: 'skills/meta-framework/SKILL.md' + package: 'packages/db' + description: > + Client-side preloading of TanStack DB collections in route loaders. + collection.preload(), stateWhenReady(), toArrayWhenReady(), onFirstReady(). + Pre-creating createLiveQueryCollection in loaders. Setting ssr: false on + routes (SSR not yet supported). TanStack Start and TanStack Router loader + patterns. Coordinating collection lifecycle with route transitions. + requires: + - 'db-core' + - 'db-core/collection-setup' + sources: + - 'TanStack/db:docs/guides/live-queries.md' + - 'TanStack/db:examples/react/projects/src/routes/_authenticated/project/$projectId.tsx' + - 'TanStack/db:examples/react/projects/README.md' + + # ─── Offline skill ─── + - name: 'Offline Transactions' + slug: 'offline' + type: 'composition' + domain: 'offline' + path: 'skills/offline/SKILL.md' + package: 'packages/offline-transactions' + description: > + Offline-first transaction queueing with @tanstack/offline-transactions. + OfflineExecutor, startOfflineExecutor, OfflineConfig (collections, + mutationFns, storage, maxConcurrency). Storage adapters (IndexedDBAdapter, + LocalStorageAdapter). Retry policies (DefaultRetryPolicy, BackoffCalculator, + NonRetriableError). Leader election (WebLocksLeader, BroadcastChannelLeader). + Online detection (WebOnlineDetector). OutboxManager, KeyScheduler, + TransactionSerializer. React Native support via @react-native-community/netinfo. + requires: + - 'db-core' + - 'db-core/mutations-optimistic' + sources: + - 'TanStack/db:packages/offline-transactions/src/index.ts' + - 'TanStack/db:packages/offline-transactions/src/offline-executor.ts' + - 'TanStack/db:packages/offline-transactions/src/outbox.ts' + - 'TanStack/db:packages/offline-transactions/src/leader/' diff --git a/docs/framework/angular/reference/functions/injectLiveQuery.md b/docs/framework/angular/reference/functions/injectLiveQuery.md index 87aecaecd..5eb790a54 100644 --- a/docs/framework/angular/reference/functions/injectLiveQuery.md +++ b/docs/framework/angular/reference/functions/injectLiveQuery.md @@ -11,7 +11,7 @@ title: injectLiveQuery function injectLiveQuery(options): InjectLiveQueryResult; ``` -Defined in: [index.ts:87](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L87) +Defined in: [index.ts:89](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L89) ### Type Parameters @@ -45,7 +45,7 @@ Defined in: [index.ts:87](https://github.com/TanStack/db/blob/main/packages/angu function injectLiveQuery(options): InjectLiveQueryResult; ``` -Defined in: [index.ts:97](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L97) +Defined in: [index.ts:99](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L99) ### Type Parameters @@ -79,7 +79,7 @@ Defined in: [index.ts:97](https://github.com/TanStack/db/blob/main/packages/angu function injectLiveQuery(queryFn): InjectLiveQueryResult; ``` -Defined in: [index.ts:107](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L107) +Defined in: [index.ts:109](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L109) ### Type Parameters @@ -103,7 +103,7 @@ Defined in: [index.ts:107](https://github.com/TanStack/db/blob/main/packages/ang function injectLiveQuery(queryFn): InjectLiveQueryResult; ``` -Defined in: [index.ts:110](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L110) +Defined in: [index.ts:112](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L112) ### Type Parameters @@ -127,7 +127,7 @@ Defined in: [index.ts:110](https://github.com/TanStack/db/blob/main/packages/ang function injectLiveQuery(config): InjectLiveQueryResult; ``` -Defined in: [index.ts:115](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L115) +Defined in: [index.ts:117](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L117) ### Type Parameters @@ -151,7 +151,7 @@ Defined in: [index.ts:115](https://github.com/TanStack/db/blob/main/packages/ang function injectLiveQuery(liveQueryCollection): InjectLiveQueryResultWithCollection; ``` -Defined in: [index.ts:119](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L119) +Defined in: [index.ts:121](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L121) ### Type Parameters @@ -183,7 +183,7 @@ Defined in: [index.ts:119](https://github.com/TanStack/db/blob/main/packages/ang function injectLiveQuery(liveQueryCollection): InjectLiveQueryResultWithSingleResultCollection; ``` -Defined in: [index.ts:127](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L127) +Defined in: [index.ts:129](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L129) ### Type Parameters diff --git a/docs/framework/angular/reference/interfaces/InjectLiveQueryResult.md b/docs/framework/angular/reference/interfaces/InjectLiveQueryResult.md index 527a90f21..523f0dad9 100644 --- a/docs/framework/angular/reference/interfaces/InjectLiveQueryResult.md +++ b/docs/framework/angular/reference/interfaces/InjectLiveQueryResult.md @@ -5,7 +5,7 @@ title: InjectLiveQueryResult # Interface: InjectLiveQueryResult\ -Defined in: [index.ts:30](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L30) +Defined in: [index.ts:32](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L32) The result of calling `injectLiveQuery`. Contains reactive signals for the query state and data. @@ -27,7 +27,7 @@ collection: Signal< | null>; ``` -Defined in: [index.ts:36](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L36) +Defined in: [index.ts:38](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L38) A signal containing the underlying collection instance (null for disabled queries) @@ -39,7 +39,7 @@ A signal containing the underlying collection instance (null for disabled querie data: Signal>; ``` -Defined in: [index.ts:34](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L34) +Defined in: [index.ts:36](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L36) A signal containing the results as an array, or single result for findOne queries @@ -51,7 +51,7 @@ A signal containing the results as an array, or single result for findOne querie isCleanedUp: Signal; ``` -Defined in: [index.ts:52](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L52) +Defined in: [index.ts:54](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L54) A signal indicating whether the collection has been cleaned up @@ -63,7 +63,7 @@ A signal indicating whether the collection has been cleaned up isError: Signal; ``` -Defined in: [index.ts:50](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L50) +Defined in: [index.ts:52](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L52) A signal indicating whether the collection has an error @@ -75,7 +75,7 @@ A signal indicating whether the collection has an error isIdle: Signal; ``` -Defined in: [index.ts:48](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L48) +Defined in: [index.ts:50](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L50) A signal indicating whether the collection is idle @@ -87,7 +87,7 @@ A signal indicating whether the collection is idle isLoading: Signal; ``` -Defined in: [index.ts:44](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L44) +Defined in: [index.ts:46](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L46) A signal indicating whether the collection is currently loading @@ -99,7 +99,7 @@ A signal indicating whether the collection is currently loading isReady: Signal; ``` -Defined in: [index.ts:46](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L46) +Defined in: [index.ts:48](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L48) A signal indicating whether the collection is ready @@ -111,7 +111,7 @@ A signal indicating whether the collection is ready state: Signal>; ``` -Defined in: [index.ts:32](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L32) +Defined in: [index.ts:34](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L34) A signal containing the complete state map of results keyed by their ID @@ -123,6 +123,6 @@ A signal containing the complete state map of results keyed by their ID status: Signal; ``` -Defined in: [index.ts:42](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L42) +Defined in: [index.ts:44](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L44) A signal containing the current status of the collection diff --git a/docs/framework/angular/reference/interfaces/InjectLiveQueryResultWithCollection.md b/docs/framework/angular/reference/interfaces/InjectLiveQueryResultWithCollection.md index 5585894a8..c6f1c6c3b 100644 --- a/docs/framework/angular/reference/interfaces/InjectLiveQueryResultWithCollection.md +++ b/docs/framework/angular/reference/interfaces/InjectLiveQueryResultWithCollection.md @@ -5,7 +5,7 @@ title: InjectLiveQueryResultWithCollection # Interface: InjectLiveQueryResultWithCollection\ -Defined in: [index.ts:55](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L55) +Defined in: [index.ts:57](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L57) ## Type Parameters @@ -32,7 +32,7 @@ collection: Signal< | null>; ``` -Defined in: [index.ts:62](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L62) +Defined in: [index.ts:64](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L64) *** @@ -42,7 +42,7 @@ Defined in: [index.ts:62](https://github.com/TanStack/db/blob/main/packages/angu data: Signal; ``` -Defined in: [index.ts:61](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L61) +Defined in: [index.ts:63](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L63) *** @@ -52,7 +52,7 @@ Defined in: [index.ts:61](https://github.com/TanStack/db/blob/main/packages/angu isCleanedUp: Signal; ``` -Defined in: [index.ts:68](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L68) +Defined in: [index.ts:70](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L70) *** @@ -62,7 +62,7 @@ Defined in: [index.ts:68](https://github.com/TanStack/db/blob/main/packages/angu isError: Signal; ``` -Defined in: [index.ts:67](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L67) +Defined in: [index.ts:69](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L69) *** @@ -72,7 +72,7 @@ Defined in: [index.ts:67](https://github.com/TanStack/db/blob/main/packages/angu isIdle: Signal; ``` -Defined in: [index.ts:66](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L66) +Defined in: [index.ts:68](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L68) *** @@ -82,7 +82,7 @@ Defined in: [index.ts:66](https://github.com/TanStack/db/blob/main/packages/angu isLoading: Signal; ``` -Defined in: [index.ts:64](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L64) +Defined in: [index.ts:66](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L66) *** @@ -92,7 +92,7 @@ Defined in: [index.ts:64](https://github.com/TanStack/db/blob/main/packages/angu isReady: Signal; ``` -Defined in: [index.ts:65](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L65) +Defined in: [index.ts:67](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L67) *** @@ -102,7 +102,7 @@ Defined in: [index.ts:65](https://github.com/TanStack/db/blob/main/packages/angu state: Signal>; ``` -Defined in: [index.ts:60](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L60) +Defined in: [index.ts:62](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L62) *** @@ -112,4 +112,4 @@ Defined in: [index.ts:60](https://github.com/TanStack/db/blob/main/packages/angu status: Signal; ``` -Defined in: [index.ts:63](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L63) +Defined in: [index.ts:65](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L65) diff --git a/docs/framework/angular/reference/interfaces/InjectLiveQueryResultWithSingleResultCollection.md b/docs/framework/angular/reference/interfaces/InjectLiveQueryResultWithSingleResultCollection.md index a20e6af8c..2cf88f285 100644 --- a/docs/framework/angular/reference/interfaces/InjectLiveQueryResultWithSingleResultCollection.md +++ b/docs/framework/angular/reference/interfaces/InjectLiveQueryResultWithSingleResultCollection.md @@ -5,7 +5,7 @@ title: InjectLiveQueryResultWithSingleResultCollection # Interface: InjectLiveQueryResultWithSingleResultCollection\ -Defined in: [index.ts:71](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L71) +Defined in: [index.ts:73](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L73) ## Type Parameters @@ -32,7 +32,7 @@ collection: Signal< | null>; ``` -Defined in: [index.ts:78](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L78) +Defined in: [index.ts:80](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L80) *** @@ -42,7 +42,7 @@ Defined in: [index.ts:78](https://github.com/TanStack/db/blob/main/packages/angu data: Signal; ``` -Defined in: [index.ts:77](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L77) +Defined in: [index.ts:79](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L79) *** @@ -52,7 +52,7 @@ Defined in: [index.ts:77](https://github.com/TanStack/db/blob/main/packages/angu isCleanedUp: Signal; ``` -Defined in: [index.ts:84](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L84) +Defined in: [index.ts:86](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L86) *** @@ -62,7 +62,7 @@ Defined in: [index.ts:84](https://github.com/TanStack/db/blob/main/packages/angu isError: Signal; ``` -Defined in: [index.ts:83](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L83) +Defined in: [index.ts:85](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L85) *** @@ -72,7 +72,7 @@ Defined in: [index.ts:83](https://github.com/TanStack/db/blob/main/packages/angu isIdle: Signal; ``` -Defined in: [index.ts:82](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L82) +Defined in: [index.ts:84](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L84) *** @@ -82,7 +82,7 @@ Defined in: [index.ts:82](https://github.com/TanStack/db/blob/main/packages/angu isLoading: Signal; ``` -Defined in: [index.ts:80](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L80) +Defined in: [index.ts:82](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L82) *** @@ -92,7 +92,7 @@ Defined in: [index.ts:80](https://github.com/TanStack/db/blob/main/packages/angu isReady: Signal; ``` -Defined in: [index.ts:81](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L81) +Defined in: [index.ts:83](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L83) *** @@ -102,7 +102,7 @@ Defined in: [index.ts:81](https://github.com/TanStack/db/blob/main/packages/angu state: Signal>; ``` -Defined in: [index.ts:76](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L76) +Defined in: [index.ts:78](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L78) *** @@ -112,4 +112,4 @@ Defined in: [index.ts:76](https://github.com/TanStack/db/blob/main/packages/angu status: Signal; ``` -Defined in: [index.ts:79](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L79) +Defined in: [index.ts:81](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L81) diff --git a/docs/framework/react/reference/functions/useLiveInfiniteQuery.md b/docs/framework/react/reference/functions/useLiveInfiniteQuery.md index 8fe91f730..20377885e 100644 --- a/docs/framework/react/reference/functions/useLiveInfiniteQuery.md +++ b/docs/framework/react/reference/functions/useLiveInfiniteQuery.md @@ -11,7 +11,7 @@ title: useLiveInfiniteQuery function useLiveInfiniteQuery(liveQueryCollection, config): UseLiveInfiniteQueryReturn; ``` -Defined in: [useLiveInfiniteQuery.ts:113](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L113) +Defined in: [useLiveInfiniteQuery.ts:118](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L118) Create an infinite query using a query function with live updates @@ -118,7 +118,7 @@ function useLiveInfiniteQuery( deps?): UseLiveInfiniteQueryReturn; ``` -Defined in: [useLiveInfiniteQuery.ts:123](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L123) +Defined in: [useLiveInfiniteQuery.ts:128](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L128) Create an infinite query using a query function with live updates diff --git a/docs/framework/react/reference/type-aliases/UseLiveInfiniteQueryConfig.md b/docs/framework/react/reference/type-aliases/UseLiveInfiniteQueryConfig.md index c0a4ad638..8ec4417d3 100644 --- a/docs/framework/react/reference/type-aliases/UseLiveInfiniteQueryConfig.md +++ b/docs/framework/react/reference/type-aliases/UseLiveInfiniteQueryConfig.md @@ -19,13 +19,13 @@ Defined in: [useLiveInfiniteQuery.ts:23](https://github.com/TanStack/db/blob/mai ## Properties -### getNextPageParam() +### ~~getNextPageParam()?~~ ```ts -getNextPageParam: (lastPage, allPages, lastPageParam, allPageParams) => number | undefined; +optional getNextPageParam: (lastPage, allPages, lastPageParam, allPageParams) => number | undefined; ``` -Defined in: [useLiveInfiniteQuery.ts:26](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L26) +Defined in: [useLiveInfiniteQuery.ts:31](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L31) #### Parameters @@ -49,6 +49,12 @@ Defined in: [useLiveInfiniteQuery.ts:26](https://github.com/TanStack/db/blob/mai `number` \| `undefined` +#### Deprecated + +This callback is not used by the current implementation. +Pagination is determined internally via a peek-ahead strategy. +Provided for API compatibility with TanStack Query conventions. + *** ### initialPageParam? diff --git a/docs/framework/react/reference/type-aliases/UseLiveInfiniteQueryReturn.md b/docs/framework/react/reference/type-aliases/UseLiveInfiniteQueryReturn.md index b7d68849d..e1f0cfc90 100644 --- a/docs/framework/react/reference/type-aliases/UseLiveInfiniteQueryReturn.md +++ b/docs/framework/react/reference/type-aliases/UseLiveInfiniteQueryReturn.md @@ -9,7 +9,7 @@ title: UseLiveInfiniteQueryReturn type UseLiveInfiniteQueryReturn = Omit, "data"> & object; ``` -Defined in: [useLiveInfiniteQuery.ts:34](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L34) +Defined in: [useLiveInfiniteQuery.ts:39](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L39) ## Type Declaration diff --git a/docs/guides/live-queries.md b/docs/guides/live-queries.md index 2f6054daa..a4371f888 100644 --- a/docs/guides/live-queries.md +++ b/docs/guides/live-queries.md @@ -32,6 +32,7 @@ The result types are automatically inferred from your query structure, providing ## Table of Contents - [Creating Live Query Collections](#creating-live-query-collections) +- [One-shot Queries with queryOnce](#one-shot-queries-with-queryonce) - [From Clause](#from-clause) - [Where Clauses](#where-clauses) - [Select Projections](#select) @@ -114,6 +115,36 @@ const activeUsers = createLiveQueryCollection((q) => ) ``` +## One-shot Queries with queryOnce + +If you need a one-time snapshot (no ongoing reactivity), use `queryOnce`. It +creates a live query collection, preloads it, extracts the results, and cleans +up automatically so you do not have to remember to call `cleanup()`. + +```ts +import { eq, queryOnce } from '@tanstack/db' + +// Basic one-shot query +const activeUsers = await queryOnce((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + .select(({ user }) => ({ id: user.id, name: user.name })) +) + +// Single result with findOne() +const user = await queryOnce((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.id, userId)) + .findOne() +) +``` + +Use `queryOnce` for scripts, background tasks, data export, or AI/LLM context +building. `findOne()` resolves to `undefined` when no rows match. For UI +bindings and reactive updates, use live queries instead. + ### Using with Frameworks In React, you can use the `useLiveQuery` hook: diff --git a/docs/reference/classes/AggregateFunctionNotInSelectError.md b/docs/reference/classes/AggregateFunctionNotInSelectError.md index 0184d233c..79e85c6a8 100644 --- a/docs/reference/classes/AggregateFunctionNotInSelectError.md +++ b/docs/reference/classes/AggregateFunctionNotInSelectError.md @@ -5,7 +5,7 @@ title: AggregateFunctionNotInSelectError # Class: AggregateFunctionNotInSelectError -Defined in: [packages/db/src/errors.ts:594](https://github.com/TanStack/db/blob/main/packages/db/src/errors.ts#L594) +Defined in: [packages/db/src/errors.ts:605](https://github.com/TanStack/db/blob/main/packages/db/src/errors.ts#L605) ## Extends @@ -19,7 +19,7 @@ Defined in: [packages/db/src/errors.ts:594](https://github.com/TanStack/db/blob/ new AggregateFunctionNotInSelectError(functionName): AggregateFunctionNotInSelectError; ``` -Defined in: [packages/db/src/errors.ts:595](https://github.com/TanStack/db/blob/main/packages/db/src/errors.ts#L595) +Defined in: [packages/db/src/errors.ts:606](https://github.com/TanStack/db/blob/main/packages/db/src/errors.ts#L606) #### Parameters diff --git a/docs/reference/classes/AggregateNotSupportedError.md b/docs/reference/classes/AggregateNotSupportedError.md index 47bcd6cf7..2dbb09b2b 100644 --- a/docs/reference/classes/AggregateNotSupportedError.md +++ b/docs/reference/classes/AggregateNotSupportedError.md @@ -5,7 +5,7 @@ title: AggregateNotSupportedError # Class: AggregateNotSupportedError -Defined in: [packages/db/src/errors.ts:710](https://github.com/TanStack/db/blob/main/packages/db/src/errors.ts#L710) +Defined in: [packages/db/src/errors.ts:721](https://github.com/TanStack/db/blob/main/packages/db/src/errors.ts#L721) Error thrown when aggregate expressions are used outside of a GROUP BY context. @@ -21,7 +21,7 @@ Error thrown when aggregate expressions are used outside of a GROUP BY context. new AggregateNotSupportedError(): AggregateNotSupportedError; ``` -Defined in: [packages/db/src/errors.ts:711](https://github.com/TanStack/db/blob/main/packages/db/src/errors.ts#L711) +Defined in: [packages/db/src/errors.ts:722](https://github.com/TanStack/db/blob/main/packages/db/src/errors.ts#L722) #### Returns diff --git a/docs/reference/classes/BaseQueryBuilder.md b/docs/reference/classes/BaseQueryBuilder.md index 6cb9213b3..8cc8a5476 100644 --- a/docs/reference/classes/BaseQueryBuilder.md +++ b/docs/reference/classes/BaseQueryBuilder.md @@ -102,8 +102,7 @@ select(callback): QueryBuilder(message): message is ChangeMessage; ``` -Defined in: node\_modules/.pnpm/@electric-sql+client@1.5.8/node\_modules/@electric-sql/client/dist/index.d.ts:886 +Defined in: node\_modules/.pnpm/@electric-sql+client@1.5.12/node\_modules/@electric-sql/client/dist/index.d.ts:886 Type guard for checking Message is ChangeMessage. diff --git a/docs/reference/electric-db-collection/functions/isControlMessage.md b/docs/reference/electric-db-collection/functions/isControlMessage.md index 5fd10e31e..ef035985b 100644 --- a/docs/reference/electric-db-collection/functions/isControlMessage.md +++ b/docs/reference/electric-db-collection/functions/isControlMessage.md @@ -9,7 +9,7 @@ title: isControlMessage function isControlMessage(message): message is ControlMessage; ``` -Defined in: node\_modules/.pnpm/@electric-sql+client@1.5.8/node\_modules/@electric-sql/client/dist/index.d.ts:904 +Defined in: node\_modules/.pnpm/@electric-sql+client@1.5.12/node\_modules/@electric-sql/client/dist/index.d.ts:904 Type guard for checking Message is ControlMessage. diff --git a/docs/reference/functions/compileQuery.md b/docs/reference/functions/compileQuery.md index ae1e2e08a..80941c0c1 100644 --- a/docs/reference/functions/compileQuery.md +++ b/docs/reference/functions/compileQuery.md @@ -19,7 +19,7 @@ function compileQuery( queryMapping): CompilationResult; ``` -Defined in: [packages/db/src/query/compiler/index.ts:85](https://github.com/TanStack/db/blob/main/packages/db/src/query/compiler/index.ts#L85) +Defined in: [packages/db/src/query/compiler/index.ts:86](https://github.com/TanStack/db/blob/main/packages/db/src/query/compiler/index.ts#L86) Compiles a query IR into a D2 pipeline diff --git a/docs/reference/functions/queryOnce.md b/docs/reference/functions/queryOnce.md new file mode 100644 index 000000000..7adb84312 --- /dev/null +++ b/docs/reference/functions/queryOnce.md @@ -0,0 +1,100 @@ +--- +id: queryOnce +title: queryOnce +--- + +# Function: queryOnce() + +## Call Signature + +```ts +function queryOnce(queryFn): Promise>; +``` + +Defined in: [packages/db/src/query/query-once.ts:47](https://github.com/TanStack/db/blob/main/packages/db/src/query/query-once.ts#L47) + +Executes a one-shot query and returns the results as an array. + +This function creates a live query collection, preloads it, extracts the results, +and automatically cleans up the collection. It's ideal for: +- AI/LLM context building +- Data export +- Background processing +- Testing + +### Type Parameters + +#### TContext + +`TContext` *extends* [`Context`](../interfaces/Context.md) + +### Parameters + +#### queryFn + +(`q`) => [`QueryBuilder`](../type-aliases/QueryBuilder.md)\<`TContext`\> + +A function that receives the query builder and returns a query + +### Returns + +`Promise`\<[`InferResultType`](../type-aliases/InferResultType.md)\<`TContext`\>\> + +A promise that resolves to an array of query results + +### Example + +```typescript +// Basic query +const users = await queryOnce((q) => + q.from({ user: usersCollection }) +) + +// With filtering and projection +const activeUserNames = await queryOnce((q) => + q.from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + .select(({ user }) => ({ name: user.name })) +) +``` + +## Call Signature + +```ts +function queryOnce(config): Promise>; +``` + +Defined in: [packages/db/src/query/query-once.ts:68](https://github.com/TanStack/db/blob/main/packages/db/src/query/query-once.ts#L68) + +Executes a one-shot query using a configuration object. + +### Type Parameters + +#### TContext + +`TContext` *extends* [`Context`](../interfaces/Context.md) + +### Parameters + +#### config + +[`QueryOnceConfig`](../interfaces/QueryOnceConfig.md)\<`TContext`\> + +Configuration object with the query function + +### Returns + +`Promise`\<[`InferResultType`](../type-aliases/InferResultType.md)\<`TContext`\>\> + +A promise that resolves to an array of query results + +### Example + +```typescript +const recentOrders = await queryOnce({ + query: (q) => + q.from({ order: ordersCollection }) + .orderBy(({ order }) => desc(order.createdAt)) + .limit(100), +}) +``` diff --git a/docs/reference/index.md b/docs/reference/index.md index d2387c6e3..ad75b786d 100644 --- a/docs/reference/index.md +++ b/docs/reference/index.md @@ -34,6 +34,7 @@ title: "@tanstack/db" - [DuplicateKeyError](classes/DuplicateKeyError.md) - [DuplicateKeySyncError](classes/DuplicateKeySyncError.md) - [EmptyReferencePathError](classes/EmptyReferencePathError.md) +- [FnSelectWithGroupByError](classes/FnSelectWithGroupByError.md) - [GroupByError](classes/GroupByError.md) - [HavingRequiresGroupByError](classes/HavingRequiresGroupByError.md) - [IndexProxy](classes/IndexProxy.md) @@ -134,6 +135,7 @@ title: "@tanstack/db" - [Parser](interfaces/Parser.md) - [ParseWhereOptions](interfaces/ParseWhereOptions.md) - [PendingMutation](interfaces/PendingMutation.md) +- [QueryOnceConfig](interfaces/QueryOnceConfig.md) - [QueueStrategy](interfaces/QueueStrategy.md) - [QueueStrategyOptions](interfaces/QueueStrategyOptions.md) - [RangeQueryOptions](interfaces/RangeQueryOptions.md) @@ -293,6 +295,7 @@ title: "@tanstack/db" - [parseLoadSubsetOptions](functions/parseLoadSubsetOptions.md) - [parseOrderByExpression](functions/parseOrderByExpression.md) - [parseWhereExpression](functions/parseWhereExpression.md) +- [queryOnce](functions/queryOnce.md) - [queueStrategy](functions/queueStrategy.md) - [sum](functions/sum.md) - [throttleStrategy](functions/throttleStrategy.md) diff --git a/docs/reference/interfaces/QueryOnceConfig.md b/docs/reference/interfaces/QueryOnceConfig.md new file mode 100644 index 000000000..1286022d2 --- /dev/null +++ b/docs/reference/interfaces/QueryOnceConfig.md @@ -0,0 +1,30 @@ +--- +id: QueryOnceConfig +title: QueryOnceConfig +--- + +# Interface: QueryOnceConfig\ + +Defined in: [packages/db/src/query/query-once.ts:8](https://github.com/TanStack/db/blob/main/packages/db/src/query/query-once.ts#L8) + +Configuration options for queryOnce + +## Type Parameters + +### TContext + +`TContext` *extends* [`Context`](Context.md) + +## Properties + +### query + +```ts +query: + | QueryBuilder +| (q) => QueryBuilder; +``` + +Defined in: [packages/db/src/query/query-once.ts:12](https://github.com/TanStack/db/blob/main/packages/db/src/query/query-once.ts#L12) + +Query builder function that defines the query diff --git a/docs/reference/powersync-db-collection/classes/PowerSyncTransactor.md b/docs/reference/powersync-db-collection/classes/PowerSyncTransactor.md index e33ad5872..10d3c8d3e 100644 --- a/docs/reference/powersync-db-collection/classes/PowerSyncTransactor.md +++ b/docs/reference/powersync-db-collection/classes/PowerSyncTransactor.md @@ -113,7 +113,7 @@ Persists a Transaction to the PowerSync SQLite database. protected getMutationCollectionMeta(mutation): PowerSyncCollectionMeta; ``` -Defined in: [PowerSyncTransactor.ts:294](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L294) +Defined in: [PowerSyncTransactor.ts:297](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L297) #### Parameters @@ -136,7 +136,7 @@ protected handleDelete( waitForCompletion): Promise; ``` -Defined in: [PowerSyncTransactor.ts:221](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L221) +Defined in: [PowerSyncTransactor.ts:223](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L223) #### Parameters @@ -199,7 +199,7 @@ protected handleOperationWithCompletion( handler): Promise; ``` -Defined in: [PowerSyncTransactor.ts:263](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L263) +Defined in: [PowerSyncTransactor.ts:266](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L266) Helper function which wraps a persistence operation by: - Fetching the mutation's collection's SQLite table details @@ -239,7 +239,7 @@ protected handleUpdate( waitForCompletion): Promise; ``` -Defined in: [PowerSyncTransactor.ts:187](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L187) +Defined in: [PowerSyncTransactor.ts:188](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L188) #### Parameters @@ -267,7 +267,7 @@ Defined in: [PowerSyncTransactor.ts:187](https://github.com/TanStack/db/blob/mai protected processMutationMetadata(mutation): string | null; ``` -Defined in: [PowerSyncTransactor.ts:313](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L313) +Defined in: [PowerSyncTransactor.ts:316](https://github.com/TanStack/db/blob/main/packages/powersync-db-collection/src/PowerSyncTransactor.ts#L316) Processes collection mutation metadata for persistence to the database. We only support storing string metadata. diff --git a/docs/reference/type-aliases/ApplyJoinOptionalityToMergedSchema.md b/docs/reference/type-aliases/ApplyJoinOptionalityToMergedSchema.md index 372c4c51c..7861710af 100644 --- a/docs/reference/type-aliases/ApplyJoinOptionalityToMergedSchema.md +++ b/docs/reference/type-aliases/ApplyJoinOptionalityToMergedSchema.md @@ -9,7 +9,7 @@ title: ApplyJoinOptionalityToMergedSchema type ApplyJoinOptionalityToMergedSchema = { [K in keyof TExistingSchema]: K extends TFromSourceName ? TJoinType extends "right" | "full" ? TExistingSchema[K] | undefined : TExistingSchema[K] : TExistingSchema[K] } & { [K in keyof TNewSchema]: TJoinType extends "left" | "full" ? TNewSchema[K] | undefined : TNewSchema[K] }; ``` -Defined in: [packages/db/src/query/builder/types.ts:622](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L622) +Defined in: [packages/db/src/query/builder/types.ts:636](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L636) ApplyJoinOptionalityToMergedSchema - Applies optionality rules when merging schemas diff --git a/docs/reference/type-aliases/FunctionalHavingRow.md b/docs/reference/type-aliases/FunctionalHavingRow.md index 279104ac3..0788c4e41 100644 --- a/docs/reference/type-aliases/FunctionalHavingRow.md +++ b/docs/reference/type-aliases/FunctionalHavingRow.md @@ -9,7 +9,7 @@ title: FunctionalHavingRow type FunctionalHavingRow = TContext["schema"] & TContext["result"] extends object ? object : object; ``` -Defined in: [packages/db/src/query/builder/types.ts:365](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L365) +Defined in: [packages/db/src/query/builder/types.ts:375](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L375) FunctionalHavingRow - Type for the row parameter in functional having callbacks diff --git a/docs/reference/type-aliases/GetResult.md b/docs/reference/type-aliases/GetResult.md index eaa044ea0..17bc3feda 100644 --- a/docs/reference/type-aliases/GetResult.md +++ b/docs/reference/type-aliases/GetResult.md @@ -9,7 +9,7 @@ title: GetResult type GetResult = Prettify; ``` -Defined in: [packages/db/src/query/builder/types.ts:678](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L678) +Defined in: [packages/db/src/query/builder/types.ts:692](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L692) GetResult - Determines the final result type of a query diff --git a/docs/reference/type-aliases/GroupByCallback.md b/docs/reference/type-aliases/GroupByCallback.md index 0ca5fd004..a716c39dc 100644 --- a/docs/reference/type-aliases/GroupByCallback.md +++ b/docs/reference/type-aliases/GroupByCallback.md @@ -9,7 +9,7 @@ title: GroupByCallback type GroupByCallback = (refs) => any; ``` -Defined in: [packages/db/src/query/builder/types.ts:328](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L328) +Defined in: [packages/db/src/query/builder/types.ts:338](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L338) GroupByCallback - Type for groupBy clause callback functions diff --git a/docs/reference/type-aliases/InferResultType.md b/docs/reference/type-aliases/InferResultType.md index dcb111c85..cd318feb8 100644 --- a/docs/reference/type-aliases/InferResultType.md +++ b/docs/reference/type-aliases/InferResultType.md @@ -9,7 +9,7 @@ title: InferResultType type InferResultType = TContext extends SingleResult ? GetResult | undefined : GetResult[]; ``` -Defined in: [packages/db/src/query/builder/types.ts:648](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L648) +Defined in: [packages/db/src/query/builder/types.ts:662](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L662) Utility type to infer the query result size (single row or an array) diff --git a/docs/reference/type-aliases/JoinOnCallback.md b/docs/reference/type-aliases/JoinOnCallback.md index b7db28bfa..60b50c32f 100644 --- a/docs/reference/type-aliases/JoinOnCallback.md +++ b/docs/reference/type-aliases/JoinOnCallback.md @@ -9,7 +9,7 @@ title: JoinOnCallback type JoinOnCallback = (refs) => any; ``` -Defined in: [packages/db/src/query/builder/types.ts:344](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L344) +Defined in: [packages/db/src/query/builder/types.ts:354](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L354) JoinOnCallback - Type for join condition callback functions diff --git a/docs/reference/type-aliases/MergeContextForJoinCallback.md b/docs/reference/type-aliases/MergeContextForJoinCallback.md index bfc431752..f19198080 100644 --- a/docs/reference/type-aliases/MergeContextForJoinCallback.md +++ b/docs/reference/type-aliases/MergeContextForJoinCallback.md @@ -9,7 +9,7 @@ title: MergeContextForJoinCallback type MergeContextForJoinCallback = object; ``` -Defined in: [packages/db/src/query/builder/types.ts:807](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L807) +Defined in: [packages/db/src/query/builder/types.ts:821](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L821) MergeContextForJoinCallback - Special context for join condition callbacks @@ -55,7 +55,7 @@ The simple intersection (&) merges schemas without any optionality transformatio baseSchema: TContext["baseSchema"]; ``` -Defined in: [packages/db/src/query/builder/types.ts:811](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L811) +Defined in: [packages/db/src/query/builder/types.ts:825](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L825) *** @@ -65,7 +65,7 @@ Defined in: [packages/db/src/query/builder/types.ts:811](https://github.com/TanS fromSourceName: TContext["fromSourceName"]; ``` -Defined in: [packages/db/src/query/builder/types.ts:814](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L814) +Defined in: [packages/db/src/query/builder/types.ts:828](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L828) *** @@ -75,7 +75,7 @@ Defined in: [packages/db/src/query/builder/types.ts:814](https://github.com/TanS hasJoins: true; ``` -Defined in: [packages/db/src/query/builder/types.ts:815](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L815) +Defined in: [packages/db/src/query/builder/types.ts:829](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L829) *** @@ -85,7 +85,7 @@ Defined in: [packages/db/src/query/builder/types.ts:815](https://github.com/TanS joinTypes: TContext["joinTypes"] extends Record ? TContext["joinTypes"] : object; ``` -Defined in: [packages/db/src/query/builder/types.ts:816](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L816) +Defined in: [packages/db/src/query/builder/types.ts:830](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L830) *** @@ -95,7 +95,7 @@ Defined in: [packages/db/src/query/builder/types.ts:816](https://github.com/TanS result: TContext["result"]; ``` -Defined in: [packages/db/src/query/builder/types.ts:819](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L819) +Defined in: [packages/db/src/query/builder/types.ts:833](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L833) *** @@ -105,4 +105,4 @@ Defined in: [packages/db/src/query/builder/types.ts:819](https://github.com/TanS schema: TContext["schema"] & TNewSchema; ``` -Defined in: [packages/db/src/query/builder/types.ts:813](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L813) +Defined in: [packages/db/src/query/builder/types.ts:827](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L827) diff --git a/docs/reference/type-aliases/MergeContextWithJoinType.md b/docs/reference/type-aliases/MergeContextWithJoinType.md index 0bac1b7db..a12cf3ba2 100644 --- a/docs/reference/type-aliases/MergeContextWithJoinType.md +++ b/docs/reference/type-aliases/MergeContextWithJoinType.md @@ -9,7 +9,7 @@ title: MergeContextWithJoinType type MergeContextWithJoinType = object & PreserveSingleResultFlag; ``` -Defined in: [packages/db/src/query/builder/types.ts:573](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L573) +Defined in: [packages/db/src/query/builder/types.ts:587](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L587) MergeContextWithJoinType - Creates a new context after a join operation diff --git a/docs/reference/type-aliases/OrderByCallback.md b/docs/reference/type-aliases/OrderByCallback.md index 6810877b3..fa78b20dc 100644 --- a/docs/reference/type-aliases/OrderByCallback.md +++ b/docs/reference/type-aliases/OrderByCallback.md @@ -9,7 +9,7 @@ title: OrderByCallback type OrderByCallback = (refs) => any; ``` -Defined in: [packages/db/src/query/builder/types.ts:292](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L292) +Defined in: [packages/db/src/query/builder/types.ts:302](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L302) OrderByCallback - Type for orderBy clause callback functions diff --git a/docs/reference/type-aliases/Prettify.md b/docs/reference/type-aliases/Prettify.md index 5700563ec..c55eea7fb 100644 --- a/docs/reference/type-aliases/Prettify.md +++ b/docs/reference/type-aliases/Prettify.md @@ -9,7 +9,7 @@ title: Prettify type Prettify = { [K in keyof T]: T[K] } & object; ``` -Defined in: [packages/db/src/query/builder/types.ts:845](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L845) +Defined in: [packages/db/src/query/builder/types.ts:859](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L859) Prettify - Utility type for clean IDE display diff --git a/docs/reference/type-aliases/Ref.md b/docs/reference/type-aliases/Ref.md index a346b0dc0..8c8015219 100644 --- a/docs/reference/type-aliases/Ref.md +++ b/docs/reference/type-aliases/Ref.md @@ -3,13 +3,13 @@ id: Ref title: Ref --- -# Type Alias: Ref\ +# Type Alias: Ref\ ```ts -type Ref = { [K in keyof T]: IsNonExactOptional extends true ? IsNonExactNullable extends true ? IsPlainObject> extends true ? Ref> | undefined : RefLeaf> | undefined : IsPlainObject> extends true ? Ref> | undefined : RefLeaf> | undefined : IsNonExactNullable extends true ? IsPlainObject> extends true ? Ref> | null : RefLeaf> | null : IsPlainObject extends true ? Ref : RefLeaf } & RefLeaf; +type Ref = { [K in keyof T]: IsNonExactOptional extends true ? IsNonExactNullable extends true ? IsPlainObject> extends true ? Ref, Nullable> | undefined : RefLeaf, Nullable> | undefined : IsPlainObject> extends true ? Ref, Nullable> | undefined : RefLeaf, Nullable> | undefined : IsNonExactNullable extends true ? IsPlainObject> extends true ? Ref, Nullable> | null : RefLeaf, Nullable> | null : IsPlainObject extends true ? Ref : RefLeaf } & RefLeaf; ``` -Defined in: [packages/db/src/query/builder/types.ts:496](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L496) +Defined in: [packages/db/src/query/builder/types.ts:502](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L502) Ref - The user-facing ref interface for the query builder @@ -18,15 +18,18 @@ designed for optimal IDE experience without internal implementation details. It provides a recursive interface that allows nested property access while preserving optionality and nullability correctly. -When spread in select clauses, it correctly produces the underlying data type -without Ref wrappers, enabling clean spread operations. +The `Nullable` parameter indicates whether this ref comes from a nullable +join side (left/right/full). When `true`, the `Nullable` flag propagates +through all nested property accesses, ensuring the result type includes +`| undefined` for all fields accessed through this ref. Example usage: ```typescript -// Clean interface - no internal properties visible -const users: Ref<{ id: number; profile?: { bio: string } }> = { ... } -users.id // Ref - clean display -users.profile?.bio // Ref - nested optional access works +// Non-nullable ref (inner join or from table): +select(({ user }) => ({ name: user.name })) // result: string + +// Nullable ref (left join right side): +select(({ dept }) => ({ name: dept.name })) // result: string | undefined // Spread operations work cleanly: select(({ user }) => ({ ...user })) // Returns User type, not Ref types @@ -37,3 +40,7 @@ select(({ user }) => ({ ...user })) // Returns User type, not Ref types ### T `T` = `any` + +### Nullable + +`Nullable` *extends* `boolean` = `false` diff --git a/docs/reference/type-aliases/RefsForContext.md b/docs/reference/type-aliases/RefsForContext.md index b4540de81..959f47f3d 100644 --- a/docs/reference/type-aliases/RefsForContext.md +++ b/docs/reference/type-aliases/RefsForContext.md @@ -6,29 +6,22 @@ title: RefsForContext # Type Alias: RefsForContext\ ```ts -type RefsForContext = { [K in keyof TContext["schema"]]: IsNonExactOptional extends true ? IsNonExactNullable extends true ? Ref> | undefined : Ref> | undefined : IsNonExactNullable extends true ? Ref> | null : Ref } & TContext["result"] extends object ? object : object; +type RefsForContext = { [K in keyof TContext["schema"]]: IsNonExactOptional extends true ? IsNonExactNullable extends true ? Ref, true> : Ref, true> : IsNonExactNullable extends true ? Ref, true> : Ref } & TContext["result"] extends object ? object : object; ``` -Defined in: [packages/db/src/query/builder/types.ts:391](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L391) +Defined in: [packages/db/src/query/builder/types.ts:394](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L394) -RefProxyForContext - Creates ref proxies for all tables/collections in a query context +RefsForContext - Creates ref proxies for all tables/collections in a query context This is the main entry point for creating ref objects in query builder callbacks. -It handles optionality by placing undefined/null OUTSIDE the RefProxy to enable -JavaScript's optional chaining operator (?.): +For nullable join sides (left/right/full joins), it produces `Ref` instead +of `Ref | undefined`. This accurately reflects that the proxy object is always +present at build time (it's a truthy proxy that records property access paths), +while the `Nullable` flag ensures the result type correctly includes `| undefined`. Examples: -- Required field: `RefProxy` → user.name works -- Optional field: `RefProxy | undefined` → user?.name works -- Nullable field: `RefProxy | null` → user?.name works -- Both optional and nullable: `RefProxy | undefined` → user?.name works - -The key insight is that `RefProxy` would NOT allow `user?.name` -because the undefined is "inside" the proxy, but `RefProxy | undefined` -does allow it because the undefined is "outside" the proxy. - -The logic prioritizes optional chaining by always placing `undefined` outside when -a type is both optional and nullable (e.g., `string | null | undefined`). +- Required field: `Ref` → user.name works, result is T +- Nullable join side: `Ref` → user.name works, result is T | undefined After `select()` is called, this type also includes `$selected` which provides access to the SELECT result fields via `$selected.fieldName` syntax. diff --git a/docs/reference/type-aliases/ResultTypeFromSelect.md b/docs/reference/type-aliases/ResultTypeFromSelect.md index 0bbf59cab..159c94170 100644 --- a/docs/reference/type-aliases/ResultTypeFromSelect.md +++ b/docs/reference/type-aliases/ResultTypeFromSelect.md @@ -6,7 +6,7 @@ title: ResultTypeFromSelect # Type Alias: ResultTypeFromSelect\ ```ts -type ResultTypeFromSelect = WithoutRefBrand extends true ? ExtractExpressionType : TSelectObject[K] extends Ref ? ExtractRef : TSelectObject[K] extends RefLeaf ? T : TSelectObject[K] extends RefLeaf | undefined ? T | undefined : TSelectObject[K] extends RefLeaf | null ? T | null : TSelectObject[K] extends Ref<(...)> | undefined ? ExtractRef<(...)> | undefined : (...)[(...)] extends (...) | (...) ? (...) | (...) : (...) extends (...) ? (...) : (...) }>>; +type ResultTypeFromSelect = WithoutRefBrand extends true ? ExtractExpressionType : TSelectObject[K] extends Ref ? ExtractRef : TSelectObject[K] extends RefLeaf ? IsNullableRef extends true ? T | undefined : T : TSelectObject[K] extends RefLeaf | undefined ? T | undefined : TSelectObject[K] extends RefLeaf | null ? IsNullableRef> extends true ? T | null | undefined : T | null : TSelectObject[K] extends Ref<(...)> | undefined ? ExtractRef<(...)> | undefined : (...)[(...)] extends (...) | (...) ? (...) | (...) : (...) extends (...) ? (...) : (...) }>>; ``` Defined in: [packages/db/src/query/builder/types.ts:226](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L226) diff --git a/docs/reference/type-aliases/WithResult.md b/docs/reference/type-aliases/WithResult.md index cb968175c..e0513937a 100644 --- a/docs/reference/type-aliases/WithResult.md +++ b/docs/reference/type-aliases/WithResult.md @@ -9,7 +9,7 @@ title: WithResult type WithResult = Prettify & object>; ``` -Defined in: [packages/db/src/query/builder/types.ts:836](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L836) +Defined in: [packages/db/src/query/builder/types.ts:850](https://github.com/TanStack/db/blob/main/packages/db/src/query/builder/types.ts#L850) WithResult - Updates a context with a new result type after select() diff --git a/domain_map.yaml b/domain_map.yaml new file mode 100644 index 000000000..20997e900 --- /dev/null +++ b/domain_map.yaml @@ -0,0 +1,707 @@ +# domain_map.yaml +# Generated by skill-domain-discovery +# Library: @tanstack/db +# Version: 0.5.30 +# Date: 2026-03-04 +# Status: reviewed + +library: + name: '@tanstack/db' + version: '0.5.30' + repository: 'https://github.com/TanStack/db' + description: 'Reactive client store with normalized collections, sub-millisecond live queries, and instant optimistic mutations' + primary_framework: 'framework-agnostic (React, Vue, Svelte, Solid, Angular adapters)' + +domains: + - name: 'Collection Setup & Schema' + slug: 'collection-setup' + description: 'Creating and configuring typed collections from any data source, with optional schema validation, type transformations, and adapter-specific sync patterns' + + - name: 'Live Query Construction' + slug: 'live-queries' + description: 'Building SQL-like reactive queries across collections using the fluent query builder API with expressions, joins, aggregations, and derived collections' + + - name: 'Framework Integration' + slug: 'framework-integration' + description: 'Binding live queries to UI framework components using framework-specific hooks, dependency tracking, Suspense, and pagination' + + - name: 'Mutations & Optimistic State' + slug: 'mutations-optimistic' + description: 'Writing data to collections with instant optimistic feedback, managing transaction lifecycles, and handling rollback on failure' + + - name: 'Meta-Framework Integration' + slug: 'meta-framework' + description: 'Client-side preloading of collections in route loaders for TanStack Start/Router, Next.js, Remix, Nuxt, and SvelteKit — ensuring data is ready before component render' + + - name: 'Custom Adapter Authoring' + slug: 'custom-adapter' + description: 'Building custom collection adapters that implement the SyncConfig contract for new backends' + + - name: 'Offline Transactions' + slug: 'offline' + description: 'Offline-first transaction queueing with persistence, retry, multi-tab coordination, and connectivity detection' + +skills: + - name: 'Collection Setup' + slug: 'collection-setup' + domain: 'collection-setup' + description: 'Creating and configuring typed collections backed by any data source' + type: 'core' + covers: + - 'createCollection' + - 'queryCollectionOptions (@tanstack/query-db-collection)' + - 'electricCollectionOptions (@tanstack/electric-db-collection)' + - 'trailbaseCollectionOptions (@tanstack/trailbase-db-collection)' + - 'powerSyncCollectionOptions (@tanstack/powersync-db-collection)' + - 'rxdbCollectionOptions (@tanstack/rxdb-db-collection)' + - 'localOnlyCollectionOptions' + - 'localStorageCollectionOptions' + - 'CollectionConfig interface (id, getKey, schema, sync, compare, autoIndex, startSync, gcTime, utils)' + - 'StandardSchema integration (Zod, Valibot, ArkType, Effect)' + - 'Schema validation (TInput vs TOutput, transformations, defaults)' + - 'Collection lifecycle (idle -> loading -> initialCommit -> ready -> error -> cleaned-up)' + - 'Collection status tracking (isReady, isLoading, isError, isCleanedUp)' + - 'localOnly -> real backend upgrade path' + - 'Adapter-specific sync patterns (Electric txid, Query direct writes, PowerSync persistence)' + - 'Sync modes (eager, on-demand, progressive)' + tasks: + - 'Create a collection backed by TanStack Query for REST API data' + - 'Create a collection synced with ElectricSQL for real-time Postgres data' + - 'Create a local-only collection for temporary UI state' + - 'Add schema validation with type transformations (e.g. string -> Date)' + - 'Configure collection with custom getKey, gcTime, and autoIndex' + - 'Create a localStorage collection for cross-tab persistent state' + - 'Configure PowerSync collection with custom serializer' + - 'Prototype with localOnlyCollectionOptions then swap to a real backend' + - 'Configure on-demand sync mode for large datasets' + - 'Track Electric txid to prevent optimistic state flash' + - 'Use direct writes to update query collection without refetch' + subsystems: + - name: 'TanStack Query adapter' + package: '@tanstack/query-db-collection' + config_surface: 'queryKey, queryFn, queryClient, select, enabled, refetchInterval, staleTime, syncMode' + - name: 'ElectricSQL adapter' + package: '@tanstack/electric-db-collection' + config_surface: 'shapeOptions, syncMode (eager/on-demand/progressive), txid tracking via awaitTxId/awaitMatch' + - name: 'PowerSync adapter' + package: '@tanstack/powersync-db-collection' + config_surface: 'database, table, conversions, batchSize, syncMode' + - name: 'RxDB adapter' + package: '@tanstack/rxdb-db-collection' + config_surface: 'rxCollection, syncBatchSize; keys always strings' + - name: 'TrailBase adapter' + package: '@tanstack/trailbase-db-collection' + config_surface: 'recordApi, conversions' + - name: 'Local-only' + package: '@tanstack/db' + config_surface: 'getKey, schema, initialData' + - name: 'localStorage' + package: '@tanstack/db' + config_surface: 'storageKey, getKey, schema' + failure_modes: + - mistake: 'queryFn returning empty array deletes all collection data' + mechanism: "queryCollectionOptions treats queryFn result as complete server state; returning [] means 'server has no items', causing all existing items to be deleted from the collection" + wrong_pattern: | + queryFn: async () => { + const res = await fetch('/api/todos?status=active') + return res.json() // returns [] when no active todos + } + correct_pattern: | + queryFn: async () => { + const res = await fetch('/api/todos') // fetch ALL todos + return res.json() + } + // Or use on-demand sync mode for filtered queries + source: 'docs/collections/query-collection.md - Full State Sync section' + priority: 'CRITICAL' + skills: ['collection-setup'] + + - mistake: 'Not knowing which collection type to use for a given backend' + mechanism: 'AI agents default to bare createCollection or localOnlyCollectionOptions when they should use queryCollectionOptions, electricCollectionOptions, etc.; each adapter handles sync, handlers, and utilities differently' + source: 'maintainer interview' + priority: 'CRITICAL' + skills: ['collection-setup'] + + - mistake: 'Using async schema validation' + mechanism: 'Schema validation must be synchronous; returning a Promise from a schema throws SchemaMustBeSynchronousError, but the error only surfaces at mutation time, not at collection creation' + source: 'packages/db/src/collection/mutations.ts:101' + priority: 'HIGH' + skills: ['collection-setup'] + + - mistake: 'getKey returning undefined for some items' + mechanism: "If getKey returns undefined for any item, throws UndefinedKeyError; common when accessing a nested property that doesn't exist on all items" + source: 'packages/db/src/collection/mutations.ts:148' + priority: 'HIGH' + skills: ['collection-setup'] + + - mistake: 'TInput not a superset of TOutput with schema transforms' + mechanism: 'When schema transforms types (e.g. string -> Date), the input type for mutations must accept the pre-transform type; mismatches cause type errors that are confusing because they reference internal schema types' + source: 'docs/guides/schemas.md - TInput must be superset of TOutput' + priority: 'HIGH' + skills: ['collection-setup'] + + - mistake: 'Providing both explicit type parameter and schema' + mechanism: 'When a schema is provided, the collection infers types from it; also passing an explicit generic type parameter creates conflicting type constraints' + source: 'docs/overview.md - schema type inference note' + priority: 'MEDIUM' + skills: ['collection-setup'] + + - mistake: 'React Native missing crypto.randomUUID polyfill' + mechanism: "TanStack DB uses crypto.randomUUID() internally for IDs; React Native doesn't provide this, causing runtime crash; must install react-native-random-uuid" + source: 'docs/overview.md - React Native section' + priority: 'HIGH' + skills: ['collection-setup'] + + - mistake: 'Electric txid queried outside mutation transaction' + mechanism: "If pg_current_xact_id() is queried in a separate transaction from the actual mutation, the txid won't match the mutation's transaction, causing awaitTxId to stall forever; must query txid INSIDE the same SQL transaction as the mutation" + source: 'docs/collections/electric-collection.md - Debugging txid section' + priority: 'CRITICAL' + skills: ['collection-setup'] + + - mistake: 'queryFn returning partial data without merging' + mechanism: 'queryFn result is treated as complete state; returning only new/changed items without merging with existing data causes all non-returned items to be deleted from the collection' + source: 'docs/collections/query-collection.md - Handling Partial/Incremental Fetches' + priority: 'CRITICAL' + skills: ['collection-setup'] + + - mistake: 'Direct writes overridden by next query sync' + mechanism: 'Direct writes (writeInsert, etc.) update the collection immediately, but the next queryFn execution returns the complete server state which overwrites the direct writes; must coordinate staleTime and refetch behavior' + source: 'docs/collections/query-collection.md - Direct Writes and Query Sync' + priority: 'MEDIUM' + skills: ['collection-setup'] + + - name: 'Live Queries' + slug: 'live-queries' + domain: 'live-queries' + description: 'Building SQL-like reactive queries across collections' + type: 'core' + covers: + - 'Query builder fluent API (.from, .where, .join, .select, .groupBy, .having, .orderBy, .limit, .offset, .distinct, .findOne)' + - 'Comparison operators (eq, ne, gt, gte, lt, lte, like, ilike, inArray, isNull, isUndefined)' + - 'Logical operators (and, or, not)' + - 'Aggregate functions (count, sum, avg, min, max)' + - 'String functions (upper, lower, length, concat, coalesce)' + - 'Math functions (add, subtract, multiply, divide)' + - 'Join types (inner, left, right, full)' + - 'Derived collections (query results are themselves collections)' + - 'createLiveQueryCollection (standalone queries outside components)' + - 'QueryIR (intermediate representation)' + - 'compileQuery (query compilation)' + - '$selected namespace for accessing SELECT fields in ORDER BY / HAVING' + - 'Predicate push-down (loadSubsetOptions for on-demand sync)' + - 'Incremental view maintenance via d2ts (differential dataflow)' + tasks: + - 'Filter collection items with complex WHERE conditions' + - 'Join two collections on a foreign key' + - 'Aggregate data with GROUP BY and HAVING' + - 'Sort and paginate query results' + - 'Create a derived collection from a query for reuse across components' + - 'Build a query with computed/projected fields' + - 'Use subqueries for complex data access patterns' + - 'Move JS array filtering/transformation logic into live queries for better performance' + reference_candidates: + - topic: 'Query operators' + reason: '>20 distinct operators (comparison, logical, aggregate, string, math) with signatures' + failure_modes: + - mistake: 'Using === instead of eq() in where clauses' + mechanism: 'JavaScript === in a where callback returns a boolean, not an expression object; the query silently evaluates to always-false or always-true instead of building the correct filter predicate. Throws InvalidWhereExpressionError.' + wrong_pattern: | + q.from({ users }).where(({ users }) => users.active === true) + correct_pattern: | + q.from({ users }).where(({ users }) => eq(users.active, true)) + source: 'packages/db/src/query/builder/index.ts:375' + priority: 'CRITICAL' + skills: ['live-queries'] + + - mistake: 'Filtering/transforming data in JS instead of using live query operators' + mechanism: "AI agents write .filter()/.map()/.reduce() on the data array instead of using the query builder's where/select/groupBy; this throws away incremental maintenance -- the JS code re-runs from scratch on every change, while the query only recomputes the delta" + wrong_pattern: | + const { data } = useLiveQuery(q => q.from({ todos })) + const active = data.filter(t => t.completed === false) + correct_pattern: | + const { data } = useLiveQuery(q => + q.from({ todos }).where(({ todos }) => eq(todos.completed, false)) + ) + source: 'maintainer interview' + priority: 'CRITICAL' + skills: ['live-queries'] + + - mistake: 'Not using the full set of available query operators' + mechanism: 'The library has a comprehensive operator set (string functions, math, aggregates, coalesce, etc.) but agents default to basic eq/gt/lt and do the rest in JS; every operator is incrementally maintained and should be preferred over JS equivalents' + source: 'maintainer interview' + priority: 'HIGH' + skills: ['live-queries'] + + - mistake: 'Using .distinct() without .select()' + mechanism: 'distinct() deduplicates by the entire selected object shape; without select(), the shape is undefined, throwing DistinctRequiresSelectError' + source: 'packages/db/src/query/compiler/index.ts:218' + priority: 'HIGH' + skills: ['live-queries'] + + - mistake: 'Using .having() without .groupBy()' + mechanism: 'HAVING filters aggregated groups; without GROUP BY there are no groups to filter, throwing HavingRequiresGroupByError' + source: 'packages/db/src/query/compiler/index.ts:293' + priority: 'HIGH' + skills: ['live-queries'] + + - mistake: 'Using .limit() or .offset() without .orderBy()' + mechanism: 'Without deterministic ordering, limit/offset results are non-deterministic and cannot be incrementally maintained; throws LimitOffsetRequireOrderByError' + source: 'packages/db/src/query/compiler/index.ts:356' + priority: 'HIGH' + skills: ['live-queries'] + + - mistake: 'Join condition using operator other than eq()' + mechanism: 'The D2 differential dataflow join operator only supports equality joins; using gt(), like(), etc. throws JoinConditionMustBeEqualityError' + source: 'packages/db/src/query/builder/index.ts:216' + priority: 'HIGH' + skills: ['live-queries'] + + - mistake: 'Passing source directly instead of as {alias: collection}' + mechanism: 'from() and join() require sources wrapped as {alias: collection}; passing the collection directly throws InvalidSourceTypeError' + wrong_pattern: | + q.from(usersCollection) + correct_pattern: | + q.from({ users: usersCollection }) + source: 'packages/db/src/query/builder/index.ts:79-96' + priority: 'MEDIUM' + skills: ['live-queries'] + + - name: 'Framework Integration' + slug: 'framework-integration' + domain: 'framework-integration' + description: 'Binding live queries to UI framework components' + type: 'framework' + covers: + - 'React: useLiveQuery, useLiveSuspenseQuery, useLiveInfiniteQuery, usePacedMutations' + - 'Vue: useLiveQuery composable with computed refs' + - 'Svelte: useLiveQuery with Svelte 5 runes' + - 'Solid: useLiveQuery with fine-grained reactivity' + - 'Angular: injectLiveQuery with signals' + - 'Dependency arrays for reactive query parameters' + - 'React Suspense integration with Error Boundaries' + - 'Infinite query pagination (cursor-based)' + - 'Return shape: { data, state, collection, status, isLoading, isReady, isError }' + tasks: + - 'Bind a live query to a React component with useLiveQuery' + - 'Use React Suspense for loading states with useLiveSuspenseQuery' + - 'Implement infinite scroll with useLiveInfiniteQuery' + - 'Pass reactive parameters to queries in Vue (refs) / Angular (signals) / Solid (signals)' + - 'Set up dependency arrays for dynamic query parameters' + subsystems: + - name: 'React' + package: '@tanstack/react-db' + config_surface: 'useLiveQuery, useLiveSuspenseQuery, useLiveInfiniteQuery, usePacedMutations' + - name: 'Vue' + package: '@tanstack/vue-db' + config_surface: 'useLiveQuery composable with MaybeRefOrGetter' + - name: 'Svelte' + package: '@tanstack/svelte-db' + config_surface: 'useLiveQuery with Svelte 5 runes ($state)' + - name: 'Solid' + package: '@tanstack/solid-db' + config_surface: 'useLiveQuery with Accessor/createSignal' + - name: 'Angular' + package: '@tanstack/angular-db' + config_surface: 'injectLiveQuery with Signal, inject(DestroyRef)' + failure_modes: + - mistake: 'Missing external values in useLiveQuery dependency array' + mechanism: "When query uses external state (props, local state) not included in deps array, the query won't re-run when those values change, showing stale results" + wrong_pattern: | + const { data } = useLiveQuery(q => + q.from({ todos }).where(({ todos }) => eq(todos.userId, userId)) + ) // userId not in deps + correct_pattern: | + const { data } = useLiveQuery(q => + q.from({ todos }).where(({ todos }) => eq(todos.userId, userId)), + [userId] + ) + source: 'docs/framework/react/overview.md - dependency array section' + priority: 'CRITICAL' + skills: ['framework-integration'] + + - mistake: 'Reading Solid signals outside the query function' + mechanism: "Solid's reactivity tracks signal reads inside the query function; reading signals before passing to useLiveQuery means changes aren't tracked and query won't re-run" + source: 'docs/framework/solid/overview.md - fine-grained reactivity section' + priority: 'HIGH' + skills: ['framework-integration'] + + - mistake: 'Using useLiveSuspenseQuery without Error Boundary' + mechanism: 'Suspense query throws errors during rendering; without an Error Boundary wrapping the component, the entire app crashes instead of showing a fallback' + source: 'docs/guides/live-queries.md - React Suspense section' + priority: 'HIGH' + skills: ['framework-integration'] + + - mistake: 'Passing non-function deps in Svelte instead of getter functions' + mechanism: 'In Svelte 5, props and derived values should be wrapped in getter functions in the dependency array to maintain reactivity; passing values directly captures them at creation time' + source: 'docs/framework/svelte/overview.md - Props in dependencies' + priority: 'MEDIUM' + skills: ['framework-integration'] + compositions: + - library: 'meta-framework' + skill: 'meta-framework' + + - name: 'Mutations & Optimistic State' + slug: 'mutations-optimistic' + domain: 'mutations-optimistic' + description: 'Writing data to collections with instant optimistic feedback' + type: 'core' + covers: + - 'collection.insert(), collection.update(), collection.delete()' + - 'createOptimisticAction (custom mutation actions)' + - 'createPacedMutations (debounced/throttled mutations)' + - 'createTransaction (manual transaction control)' + - 'getActiveTransaction (ambient transaction context)' + - 'Transaction lifecycle (pending -> persisting -> completed | failed)' + - 'Transaction stacking (concurrent transactions build on each other)' + - 'Mutation merging (insert+update -> insert, insert+delete -> null, etc.)' + - 'onInsert, onUpdate, onDelete handlers' + - 'Optimistic vs non-optimistic updates (optimistic: false)' + - 'Automatic rollback on handler error' + - 'Change tracking proxy (draft updates via Immer-like API)' + - 'PendingMutation type (original, modified, changes, globalKey)' + - 'Transaction.isPersisted promise' + - 'Temporary ID handling' + - 'TanStack Pacer integration for sequential execution' + tasks: + - 'Insert a new item with optimistic UI update' + - 'Update an item using the draft proxy pattern' + - 'Delete items with automatic rollback on server error' + - 'Create a custom optimistic action for complex mutations' + - 'Use paced mutations for real-time text editing' + - 'Batch multiple mutations into a single transaction' + - 'Handle temporary IDs that get replaced by server-generated IDs' + - 'Use pacer for sequential transaction execution to avoid conflicts' + failure_modes: + - mistake: 'Passing a new object to update() instead of mutating the draft' + mechanism: "collection.update(id, {...item, title: 'new'}) is wrong; the API uses an Immer-style draft proxy: collection.update(id, (draft) => { draft.title = 'new' }). Passing an object instead of a callback silently fails or throws a confusing error." + wrong_pattern: | + collection.update(id, { ...item, title: 'new' }) + correct_pattern: | + collection.update(id, (draft) => { draft.title = 'new' }) + source: 'maintainer interview' + priority: 'CRITICAL' + skills: ['mutations-optimistic'] + + - mistake: 'Hallucinating mutation API signatures' + mechanism: 'AI agents generate plausible but wrong mutation code -- inventing handler signatures, confusing createOptimisticAction with createTransaction, missing the ambient transaction pattern, or wrong PendingMutation property names (e.g. transaction.mutations[0].changes vs .data)' + source: 'maintainer interview' + priority: 'CRITICAL' + skills: ['mutations-optimistic'] + + - mistake: 'onMutate callback returning a Promise' + mechanism: 'onMutate in createOptimisticAction must be synchronous because optimistic state needs to be applied immediately in the current tick; returning a Promise throws OnMutateMustBeSynchronousError' + wrong_pattern: | + createOptimisticAction({ + onMutate: async (text) => { collection.insert({ id: await generateId(), text }) }, + mutationFn: async (text, { transaction }) => { ... } + }) + correct_pattern: | + createOptimisticAction({ + onMutate: (text) => { collection.insert({ id: crypto.randomUUID(), text }) }, + mutationFn: async (text, { transaction }) => { ... } + }) + source: 'packages/db/src/optimistic-action.ts:75' + priority: 'CRITICAL' + skills: ['mutations-optimistic'] + + - mistake: 'Calling insert/update/delete without handler or ambient transaction' + mechanism: 'Collection mutations require either an onInsert/onUpdate/onDelete handler or an ambient transaction from createTransaction; without either, throws MissingInsertHandlerError (or Update/Delete variant)' + source: 'packages/db/src/collection/mutations.ts:166' + priority: 'CRITICAL' + skills: ['mutations-optimistic'] + + - mistake: 'Calling .mutate() after transaction is no longer pending' + mechanism: "Transactions can only accept new mutations while in 'pending' state; calling mutate() after commit() or rollback() throws TransactionNotPendingMutateError" + source: 'packages/db/src/transactions.ts:289' + priority: 'HIGH' + skills: ['mutations-optimistic'] + + - mistake: "Attempting to change an item's primary key via update" + mechanism: 'The update proxy detects key changes and throws KeyUpdateNotAllowedError; primary keys are immutable once set' + source: 'packages/db/src/collection/mutations.ts:352' + priority: 'HIGH' + skills: ['mutations-optimistic'] + + - mistake: 'Inserting item with duplicate key' + mechanism: 'If an item with the same key already exists in the collection (synced or optimistic), throws DuplicateKeyError; common when using client-generated IDs without checking' + source: 'packages/db/src/collection/mutations.ts:181' + priority: 'HIGH' + skills: ['mutations-optimistic'] + + - mistake: 'Not awaiting refetch after mutation in query collection handler' + mechanism: "In query collection onInsert/onUpdate/onDelete handlers, the optimistic state is only held until the handler resolves; if you don't await the refetch or sync back, the optimistic state is dropped before new server state arrives, causing a flash of missing data" + wrong_pattern: | + onInsert: async ({ transaction }) => { + await api.createTodo(transaction.mutations[0].modified) + // missing: await collection.utils.refetch() + } + correct_pattern: | + onInsert: async ({ transaction }) => { + await api.createTodo(transaction.mutations[0].modified) + await collection.utils.refetch() + } + source: 'docs/overview.md - optimistic state lifecycle' + priority: 'HIGH' + skills: ['mutations-optimistic'] + + - name: 'Meta-Framework Integration' + slug: 'meta-framework' + domain: 'meta-framework' + description: 'Client-side preloading of collections in route loaders for meta-frameworks' + type: 'composition' + covers: + - 'collection.preload() in route loaders' + - 'collection.stateWhenReady() and toArrayWhenReady()' + - 'collection.onFirstReady(callback)' + - 'Pre-creating createLiveQueryCollection in loaders' + - 'Setting ssr: false on routes using collections' + - 'TanStack Start / TanStack Router loader patterns' + - 'Coordinating collection lifecycle with route transitions' + - 'Passing pre-loaded collections to components via loader data' + tasks: + - 'Preload a collection in a TanStack Router route loader' + - 'Pre-create a live query collection in a loader and pass to component' + - 'Configure ssr: false on routes that use TanStack DB collections' + - 'Coordinate multiple collection preloads in a single route loader' + - 'Handle route transitions when collections are still loading' + failure_modes: + - mistake: 'Not preloading collections in route loaders' + mechanism: 'Without preload() in the loader, the collection starts syncing only when the component mounts; this causes a loading flash even though the router could have started the sync during navigation' + wrong_pattern: | + export const Route = createFileRoute('/todos')({ + component: TodoList, + // no loader -- collection loads on mount + }) + correct_pattern: | + export const Route = createFileRoute('/todos')({ + component: TodoList, + ssr: false, + loader: async () => { + await todosCollection.preload() + }, + }) + source: 'examples/react/projects/src/routes' + priority: 'HIGH' + skills: ['meta-framework'] + + - mistake: 'Not setting ssr: false on routes using collections' + mechanism: 'Collections are client-side only (no SSR support yet); rendering a route with collections on the server attempts to access browser-only APIs, causing crashes or hydration mismatches' + wrong_pattern: | + export const Route = createFileRoute('/todos')({ + component: TodoList, + loader: async () => { + await todosCollection.preload() + }, + }) + correct_pattern: | + export const Route = createFileRoute('/todos')({ + component: TodoList, + ssr: false, + loader: async () => { + await todosCollection.preload() + }, + }) + source: 'examples/react/projects/src/start.tsx - defaultSsr: false' + priority: 'CRITICAL' + skills: ['meta-framework'] + + - mistake: 'Creating new collection instances inside loaders on every navigation' + mechanism: 'createLiveQueryCollection should be called once and reused; creating new instances on each navigation leaks D2 graph nodes and subscriptions' + source: 'docs/guides/live-queries.md - standalone queries' + priority: 'HIGH' + skills: ['meta-framework'] + compositions: + - library: '@tanstack/react-router' + skill: 'framework-integration' + + - name: 'Custom Adapter Authoring' + slug: 'custom-adapter' + domain: 'custom-adapter' + description: 'Building custom collection adapters for new backends' + type: 'core' + covers: + - 'SyncConfig interface (sync, getSyncMetadata, rowUpdateMode)' + - 'Sync primitives (begin, write, commit, markReady, truncate)' + - 'ChangeMessage format (insert, update, delete)' + - 'loadSubset for on-demand sync mode' + - 'LoadSubsetOptions (where, orderBy, limit, cursor)' + - 'Expression parsing helpers (parseWhereExpression, parseOrderByExpression, extractSimpleComparisons)' + - 'Collection options creator pattern' + - 'Subscription lifecycle and cleanup' + tasks: + - 'Build a custom collection adapter for a new backend' + - 'Implement loadSubset for on-demand predicate push-down' + - 'Use expression parsing helpers to translate query predicates to API params' + - 'Handle the sync lifecycle correctly (begin/write/commit/markReady)' + failure_modes: + - mistake: 'Not calling markReady() in custom sync implementation' + mechanism: "markReady() transitions the collection from 'loading' to 'ready' status; forgetting to call it means live queries never resolve and useLiveSuspenseQuery hangs forever in Suspense" + wrong_pattern: | + sync: ({ begin, write, commit }) => { + fetchData().then(items => { + begin() + items.forEach(item => write({ type: 'insert', value: item })) + commit() + // forgot markReady()! + }) + } + correct_pattern: | + sync: ({ begin, write, commit, markReady }) => { + fetchData().then(items => { + begin() + items.forEach(item => write({ type: 'insert', value: item })) + commit() + markReady() + }) + } + source: 'docs/guides/collection-options-creator.md - markReady section' + priority: 'CRITICAL' + skills: ['custom-adapter'] + + - mistake: 'Race condition between initial sync and event subscription' + mechanism: "If live change events aren't subscribed BEFORE the initial data fetch, changes that occur during the fetch are lost; the sync implementation must start listening before fetching" + wrong_pattern: | + sync: ({ begin, write, commit, markReady }) => { + // BAD: fetch first, then subscribe + const data = await fetchAll() + writeAll(data) + subscribe(onChange) // missed changes during fetch! + } + correct_pattern: | + sync: ({ begin, write, commit, markReady }) => { + // GOOD: subscribe first, then fetch + subscribe(onChange) + const data = await fetchAll() + writeAll(data) + } + source: 'docs/guides/collection-options-creator.md - Race condition prevention' + priority: 'HIGH' + skills: ['custom-adapter'] + + - mistake: 'write() called without begin() in sync implementation' + mechanism: 'Sync data must be written within a transaction (begin -> write -> commit); calling write() without begin() throws NoPendingSyncTransactionWriteError' + source: 'packages/db/src/collection/sync.ts:110' + priority: 'HIGH' + skills: ['custom-adapter'] + + - name: 'Offline Transactions' + slug: 'offline' + domain: 'offline' + description: 'Offline-first transaction queueing with persistence and retry' + type: 'composition' + covers: + - 'OfflineExecutor / startOfflineExecutor' + - 'OfflineConfig (collections, mutationFns, storage, maxConcurrency)' + - 'Storage adapters (IndexedDBAdapter, LocalStorageAdapter)' + - 'Retry policies (DefaultRetryPolicy, BackoffCalculator, NonRetriableError)' + - 'Leader election (WebLocksLeader, BroadcastChannelLeader)' + - 'Online detection (WebOnlineDetector)' + - 'OutboxManager (transaction queue)' + - 'KeyScheduler (prevents concurrent mutations on same key)' + - 'TransactionSerializer (persistence)' + - 'React Native support (@react-native-community/netinfo)' + tasks: + - 'Set up offline-first transactions with @tanstack/offline-transactions' + - 'Configure IndexedDB storage for transaction persistence' + - 'Handle multi-tab coordination with leader election' + - 'Implement custom retry logic with NonRetriableError' + - 'Set up offline support for React Native with NetInfo' + failure_modes: + - mistake: 'Using offline transactions when not needed' + mechanism: 'Offline is inherently complex; @tanstack/offline-transactions adds storage, leader election, and retry overhead. Only adopt when true offline support is required. PowerSync/RxDB handle their own local persistence, which is a different concern.' + source: 'maintainer interview' + priority: 'HIGH' + skills: ['offline'] + + - mistake: 'Not handling NonRetriableError for permanent failures' + mechanism: 'By default, failed transactions retry with exponential backoff; for permanent failures (e.g. 400 Bad Request), throw NonRetriableError to skip retry and move the transaction to a dead-letter state' + source: 'packages/offline-transactions/src/retry.ts' + priority: 'HIGH' + skills: ['offline'] + + - mistake: 'Multiple tabs executing the same queued transaction' + mechanism: 'Without leader election, each tab runs its own OfflineExecutor and processes the outbox independently, causing duplicate mutations; must configure WebLocksLeader or BroadcastChannelLeader' + source: 'packages/offline-transactions/src/leader/' + priority: 'CRITICAL' + skills: ['offline'] + +tensions: + - name: 'Simplicity vs. correctness in sync' + skills: ['collection-setup', 'custom-adapter'] + description: 'Getting-started simplicity (localOnlyCollectionOptions, eager sync) conflicts with production correctness (on-demand sync, proper adapter selection, race condition prevention)' + implication: 'Agents use localOnly or eager mode for everything; production apps need adapter-specific patterns and on-demand sync for large datasets' + + - name: 'Optimistic speed vs. data consistency' + skills: ['mutations-optimistic', 'collection-setup'] + description: 'Instant optimistic updates create a window where client state diverges from server state; resolving conflicts on rollback can lose user work' + implication: 'Agents apply optimistic updates without considering rollback UX or awaiting refetch in mutation handlers' + + - name: 'Query expressiveness vs. IVM constraints' + skills: ['live-queries', 'framework-integration'] + description: "The query builder looks like SQL but has constraints (equality joins only, orderBy required for limit, no distinct without select) that SQL doesn't have" + implication: 'Agents write SQL-style queries that violate IVM constraints, producing confusing errors' + + - name: 'Offline complexity vs. app simplicity' + skills: ['offline', 'mutations-optimistic'] + description: "Offline transaction support adds storage, leader election, and retry complexity; most apps don't need it but agents may recommend it prematurely" + implication: 'Agents add @tanstack/offline-transactions to apps that only need basic optimistic mutations' + +cross_references: + - from: 'framework-integration' + to: 'meta-framework' + reason: 'Framework hooks render data; meta-framework loaders preload it. Developers need both for production apps with routing.' + + - from: 'meta-framework' + to: 'framework-integration' + reason: 'Preloaded collections are consumed by framework hooks; understanding the hook API informs what to preload.' + + - from: 'collection-setup' + to: 'mutations-optimistic' + reason: 'Collection mutation handlers (onInsert/onUpdate/onDelete) are configured at setup time but execute during mutations; understanding both is required for working writes.' + + - from: 'mutations-optimistic' + to: 'collection-setup' + reason: 'Mutation handler signatures and behavior depend on which adapter is used (e.g. Electric txid return, Query refetch).' + + - from: 'live-queries' + to: 'collection-setup' + reason: 'Live queries reference collections by alias; understanding collection types and sync modes affects query behavior (e.g. on-demand predicate push-down).' + + - from: 'custom-adapter' + to: 'collection-setup' + reason: 'Custom adapters produce the same CollectionConfig shape that built-in adapters use; understanding the config contract is essential.' + + - from: 'offline' + to: 'mutations-optimistic' + reason: 'Offline transactions wrap the same transaction/mutation model; understanding createTransaction and PendingMutation is prerequisite.' + +gaps: + - skill: 'meta-framework' + question: 'What are the specific patterns for non-TanStack-Start frameworks (Next.js App Router, Remix loaders, Nuxt middleware, SvelteKit load functions)?' + context: 'Only TanStack Start/Router patterns are documented in examples; other frameworks need guidance' + status: 'open' + + - skill: 'collection-setup' + question: 'What is the recommended pattern for collection cleanup/disposal in single-page apps with route-based code splitting?' + context: "gcTime defaults to 5 minutes, but docs don't clearly explain when/how collections are garbage collected or what triggers cleanup" + status: 'open' + + - skill: 'live-queries' + question: 'Are there performance cliffs with live queries? At what complexity/data size do queries degrade?' + context: "Docs claim sub-millisecond for 100k items, but don't discuss limits (e.g., 5-way joins, deeply nested aggregations)" + status: 'open' + + - skill: 'mutations-optimistic' + question: 'What is the recommended pattern for handling temporary IDs that get replaced by server-generated IDs?' + context: "The mutations guide mentions temporary IDs but the pattern for mapping client IDs to server IDs during sync isn't well documented" + status: 'open' + + - skill: 'meta-framework' + question: 'What are the specific patterns for TanStack Router integration with collection loading/prefetching?' + context: 'Maintainer reports this is a major composition pain point; agents struggle with the loading/prefetching pattern' + status: 'open' + + - skill: 'offline' + question: 'What happens to in-flight transactions when the browser goes offline mid-persist?' + context: "The offline executor package handles queuing, but the interaction with the main transaction lifecycle isn't documented" + status: 'open' diff --git a/eslint.config.js b/eslint.config.js index 5c4455420..570c4d31b 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -9,6 +9,7 @@ export default [ `**/.nitro/**`, `**/traildepot/**`, `examples/angular/**`, + `packages/db-collection-e2e/vite.config.ts`, ], }, { diff --git a/examples/angular/todos/package.json b/examples/angular/todos/package.json index d45d61aa5..16345af90 100644 --- a/examples/angular/todos/package.json +++ b/examples/angular/todos/package.json @@ -28,8 +28,8 @@ "@angular/forms": "^20.3.16", "@angular/platform-browser": "^20.3.16", "@angular/router": "^20.3.16", - "@tanstack/angular-db": "^0.1.56", - "@tanstack/db": "^0.5.30", + "@tanstack/angular-db": "^0.1.58", + "@tanstack/db": "^0.5.32", "rxjs": "^7.8.2", "tslib": "^2.8.1", "zone.js": "~0.15.0" diff --git a/examples/react-native/offline-transactions/android/app/build.gradle b/examples/react-native/offline-transactions/android/app/build.gradle index 860628dd9..ccf47ff66 100644 --- a/examples/react-native/offline-transactions/android/app/build.gradle +++ b/examples/react-native/offline-transactions/android/app/build.gradle @@ -87,9 +87,9 @@ android { buildToolsVersion rootProject.ext.buildToolsVersion compileSdk rootProject.ext.compileSdkVersion - namespace "com.offlinetransactionsdemo" + namespace "com.tanstack.offlinetransactions" defaultConfig { - applicationId "com.offlinetransactionsdemo" + applicationId "com.tanstack.offlinetransactions" minSdkVersion rootProject.ext.minSdkVersion targetSdkVersion rootProject.ext.targetSdkVersion versionCode 1 diff --git a/examples/react-native/offline-transactions/app/_layout.tsx b/examples/react-native/offline-transactions/app/_layout.tsx index b9d10f17b..0d629ba2d 100644 --- a/examples/react-native/offline-transactions/app/_layout.tsx +++ b/examples/react-native/offline-transactions/app/_layout.tsx @@ -2,25 +2,21 @@ import '../src/polyfills' import { Stack } from 'expo-router' -import { QueryClientProvider } from '@tanstack/react-query' import { SafeAreaProvider } from 'react-native-safe-area-context' import { StatusBar } from 'expo-status-bar' -import { queryClient } from '../src/utils/queryClient' export default function RootLayout() { return ( - - - - - - + + + + ) } diff --git a/examples/react-native/offline-transactions/app/index.tsx b/examples/react-native/offline-transactions/app/index.tsx index 46f1e79d8..67bc1e2e7 100644 --- a/examples/react-native/offline-transactions/app/index.tsx +++ b/examples/react-native/offline-transactions/app/index.tsx @@ -1,10 +1,102 @@ +import React, { useEffect, useState } from 'react' +import { ActivityIndicator, StyleSheet, Text, View } from 'react-native' import { SafeAreaView } from 'react-native-safe-area-context' import { TodoList } from '../src/components/TodoList' +import { createTodos } from '../src/db/todos' +import type { TodosHandle } from '../src/db/todos' export default function HomeScreen() { + const [handle, setHandle] = useState(null) + const [error, setError] = useState(null) + + useEffect(() => { + let disposed = false + let currentHandle: TodosHandle | null = null + + try { + const h = createTodos() + if (disposed as boolean) { + h.close() + return + } + currentHandle = h + setHandle(h) + } catch (err) { + if (!(disposed as boolean)) { + console.error(`Failed to initialize:`, err) + setError(err instanceof Error ? err.message : `Failed to initialize`) + } + } + + return () => { + disposed = true + currentHandle?.close() + } + }, []) + + if (error) { + return ( + + + Initialization Error + + {error} + + + + ) + } + + if (!handle) { + return ( + + + + Initializing... + + + ) + } + return ( - + ) } + +const styles = StyleSheet.create({ + errorContainer: { + flex: 1, + padding: 16, + backgroundColor: `#f5f5f5`, + }, + errorTitle: { + fontSize: 24, + fontWeight: `bold`, + color: `#111`, + marginBottom: 16, + }, + errorBox: { + backgroundColor: `#fee2e2`, + borderWidth: 1, + borderColor: `#fca5a5`, + borderRadius: 8, + padding: 12, + }, + errorText: { + color: `#dc2626`, + fontSize: 14, + }, + loadingContainer: { + flex: 1, + justifyContent: `center`, + alignItems: `center`, + gap: 12, + backgroundColor: `#f5f5f5`, + }, + loadingText: { + color: `#666`, + fontSize: 14, + }, +}) diff --git a/examples/react-native/offline-transactions/metro.config.js b/examples/react-native/offline-transactions/metro.config.js index 7bed46fc6..18b7c545c 100644 --- a/examples/react-native/offline-transactions/metro.config.js +++ b/examples/react-native/offline-transactions/metro.config.js @@ -11,39 +11,60 @@ config.watchFolders = [monorepoRoot] // Ensure symlinks are followed (important for pnpm) config.resolver.unstable_enableSymlinks = true +config.resolver.unstable_enablePackageExports = true -// Force all React-related packages to resolve from THIS project's node_modules -// This prevents the "multiple copies of React" error const localNodeModules = path.resolve(projectRoot, 'node_modules') -config.resolver.extraNodeModules = new Proxy( - { - react: path.resolve(localNodeModules, 'react'), - 'react-native': path.resolve(localNodeModules, 'react-native'), - 'react/jsx-runtime': path.resolve(localNodeModules, 'react/jsx-runtime'), - 'react/jsx-dev-runtime': path.resolve( - localNodeModules, - 'react/jsx-dev-runtime', - ), - }, - { - get: (target, name) => { - if (target[name]) { - return target[name] - } - // Fall back to normal resolution for other modules - return path.resolve(localNodeModules, name) - }, + +// Singleton packages that must resolve to exactly one copy. +// In a pnpm monorepo, workspace packages may resolve these to a different +// version in the .pnpm store. This custom resolveRequest forces every import +// of these packages (from anywhere) to the app's local node_modules copy. +const singletonPackages = ['react', 'react-native'] +const singletonPaths = {} +for (const pkg of singletonPackages) { + singletonPaths[pkg] = path.resolve(localNodeModules, pkg) +} + +const defaultResolveRequest = config.resolver.resolveRequest +config.resolver.resolveRequest = (context, moduleName, platform) => { + // Force singleton packages to resolve from the app's local node_modules, + // regardless of where the import originates. This prevents workspace + // packages (e.g. react-db) from pulling in their own copy of React. + for (const pkg of singletonPackages) { + if (moduleName === pkg || moduleName.startsWith(pkg + '/')) { + try { + const filePath = require.resolve(moduleName, { + paths: [projectRoot], + }) + return { type: 'sourceFile', filePath } + } catch {} + } + } + + if (defaultResolveRequest) { + return defaultResolveRequest(context, moduleName, platform) + } + return context.resolveRequest( + { ...context, resolveRequest: undefined }, + moduleName, + platform, + ) +} + +// Force singleton packages to resolve from the app's local node_modules +config.resolver.extraNodeModules = new Proxy(singletonPaths, { + get: (target, name) => { + if (target[name]) { + return target[name] + } + return path.resolve(localNodeModules, name) }, -) +}) // Block react-native 0.83 from root node_modules +const escMonorepoRoot = monorepoRoot.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') config.resolver.blockList = [ - new RegExp( - `${monorepoRoot.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}/node_modules/\\.pnpm/react-native@0\\.83.*`, - ), - new RegExp( - `${monorepoRoot.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}/node_modules/\\.pnpm/react@(?!19\\.0\\.0).*`, - ), + new RegExp(`${escMonorepoRoot}/node_modules/\\.pnpm/react-native@0\\.83.*`), ] // Let Metro know where to resolve packages from (local first, then root) @@ -52,4 +73,8 @@ config.resolver.nodeModulesPaths = [ path.resolve(monorepoRoot, 'node_modules'), ] +// Allow dynamic imports with non-literal arguments (used by workspace packages +// for optional Node.js-only code paths that are never reached on React Native) +config.transformer.dynamicDepsInPackages = 'throwAtRuntime' + module.exports = config diff --git a/examples/react-native/offline-transactions/package.json b/examples/react-native/offline-transactions/package.json index bf0dcc6f6..e49fc06c3 100644 --- a/examples/react-native/offline-transactions/package.json +++ b/examples/react-native/offline-transactions/package.json @@ -12,11 +12,14 @@ }, "dependencies": { "@expo/metro-runtime": "~5.0.5", + "@op-engineering/op-sqlite": "^15.2.5", "@react-native-async-storage/async-storage": "2.1.2", "@react-native-community/netinfo": "11.4.1", - "@tanstack/offline-transactions": "^1.0.21", - "@tanstack/query-db-collection": "^1.0.27", - "@tanstack/react-db": "^0.1.74", + "@tanstack/db": "workspace:*", + "@tanstack/db-react-native-sqlite-persisted-collection": "workspace:*", + "@tanstack/offline-transactions": "^1.0.23", + "@tanstack/query-db-collection": "^1.0.29", + "@tanstack/react-db": "^0.1.76", "@tanstack/react-query": "^5.90.20", "expo": "~53.0.26", "expo-constants": "~17.1.0", @@ -24,7 +27,7 @@ "expo-router": "~5.1.11", "expo-status-bar": "~2.2.0", "metro": "0.82.5", - "react": "^19.2.4", + "react": "19.0.0", "react-native": "0.79.6", "react-native-safe-area-context": "5.4.0", "react-native-screens": "~4.11.1", diff --git a/examples/react-native/offline-transactions/server/index.ts b/examples/react-native/offline-transactions/server/index.ts index d7990b7a8..bf6a348a9 100644 --- a/examples/react-native/offline-transactions/server/index.ts +++ b/examples/react-native/offline-transactions/server/index.ts @@ -1,8 +1,12 @@ -import express from 'express' +import { readFileSync, writeFileSync } from 'node:fs' +import { dirname, join } from 'node:path' +import { fileURLToPath } from 'node:url' import cors from 'cors' +import express from 'express' const app = express() const PORT = 3001 +const DATA_FILE = join(dirname(fileURLToPath(import.meta.url)), 'todos.json') app.use(cors()) app.use(express.json()) @@ -24,21 +28,26 @@ function generateId(): string { return Math.random().toString(36).substring(2) + Date.now().toString(36) } -// Add some initial data -const initialTodos = [ - { id: '1', text: 'Learn TanStack DB', completed: false }, - { id: '2', text: 'Build offline-first app', completed: false }, - { id: '3', text: 'Test on React Native', completed: true }, -] +// Load persisted data or seed with initial data +function loadData() { + try { + const raw = readFileSync(DATA_FILE, 'utf-8') + const todos: Array = JSON.parse(raw) + todos.forEach((todo) => todosStore.set(todo.id, todo)) + console.log(`Loaded ${todos.length} todos from ${DATA_FILE}`) + } catch { + console.log(`No existing data file, starting empty`) + } +} -initialTodos.forEach((todo) => { - const now = new Date().toISOString() - todosStore.set(todo.id, { - ...todo, - createdAt: now, - updatedAt: now, - }) -}) +function saveData() { + writeFileSync( + DATA_FILE, + JSON.stringify(Array.from(todosStore.values()), null, 2), + ) +} + +loadData() // Simulate network delay const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) @@ -58,20 +67,21 @@ app.post('/api/todos', async (req, res) => { console.log('POST /api/todos', req.body) await delay(200) - const { text, completed } = req.body + const { id, text, completed } = req.body if (!text || text.trim() === '') { return res.status(400).json({ error: 'Todo text is required' }) } const now = new Date().toISOString() const todo: Todo = { - id: generateId(), + id: id || generateId(), text, completed: completed ?? false, createdAt: now, updatedAt: now, } todosStore.set(todo.id, todo) + saveData() res.status(201).json(todo) }) @@ -91,6 +101,7 @@ app.put('/api/todos/:id', async (req, res) => { updatedAt: new Date().toISOString(), } todosStore.set(req.params.id, updated) + saveData() res.json(updated) }) @@ -102,6 +113,7 @@ app.delete('/api/todos/:id', async (req, res) => { if (!todosStore.delete(req.params.id)) { return res.status(404).json({ error: 'Todo not found' }) } + saveData() res.json({ success: true }) }) diff --git a/examples/react-native/offline-transactions/src/components/TodoList.tsx b/examples/react-native/offline-transactions/src/components/TodoList.tsx index 3f9b0ce54..875959880 100644 --- a/examples/react-native/offline-transactions/src/components/TodoList.tsx +++ b/examples/react-native/offline-transactions/src/components/TodoList.tsx @@ -1,69 +1,38 @@ import React, { useEffect, useMemo, useState } from 'react' import { - View, + ActivityIndicator, + FlatList, + StyleSheet, Text, TextInput, TouchableOpacity, - FlatList, - StyleSheet, - ActivityIndicator, + View, } from 'react-native' import NetInfo from '@react-native-community/netinfo' import { useLiveQuery } from '@tanstack/react-db' -import { - todoCollection, - createOfflineExecutor, - createTodoActions, -} from '../db/todos' +import { createTodoActions } from '../db/todos' +import type { Todo, TodosHandle } from '../db/todos' +import type { Collection } from '@tanstack/db' + +interface TodoListProps { + collection: Collection + executor: TodosHandle[`executor`] +} -export function TodoList() { +export function TodoList({ collection, executor }: TodoListProps) { const [newTodoText, setNewTodoText] = useState(``) const [error, setError] = useState(null) const [isOnline, setIsOnline] = useState(true) const [pendingCount, setPendingCount] = useState(0) - const [offline, setOffline] = useState | null>(null) - const [initError, setInitError] = useState(null) - const [isInitialized, setIsInitialized] = useState(false) - - // Initialize offline executor - useEffect(() => { - console.log(`[TodoList] Initializing...`) - try { - const executor = createOfflineExecutor() - setOffline(executor) - setIsInitialized(true) - console.log(`[TodoList] Executor created successfully`) - return () => { - executor.dispose() - } - } catch (err) { - console.error(`[TodoList] Failed to create executor:`, err) - setInitError(err instanceof Error ? err.message : `Failed to initialize`) - setIsInitialized(true) - } - }, []) - - // Create actions based on offline executor - const actions = useMemo(() => createTodoActions(offline), [offline]) - - // Use live query to get todos - const queryResult = useLiveQuery((q) => - q - .from({ todo: todoCollection }) - .orderBy(({ todo }) => todo.createdAt, `desc`), + const actions = useMemo( + () => createTodoActions(executor, collection), + [executor, collection], ) - const todoList = queryResult.data ?? [] - const isLoading = queryResult.isLoading - useEffect(() => { - console.log(`[TodoList] Query state:`, { - todoCount: todoList.length, - isLoading, - }) - }, [todoList.length, isLoading]) + const { data: todoList = [] } = useLiveQuery((q) => + q.from({ todo: collection }).orderBy(({ todo }) => todo.createdAt, `desc`), + ) // Monitor network status useEffect(() => { @@ -72,27 +41,25 @@ export function TodoList() { state.isConnected === true && state.isInternetReachable !== false setIsOnline(connected) - if (connected && offline) { - offline.notifyOnline() + if (connected) { + executor.notifyOnline() } }) return () => unsubscribe() - }, [offline]) + }, [executor]) // Monitor pending transactions useEffect(() => { - if (!offline) return - const interval = setInterval(() => { - setPendingCount(offline.getPendingCount()) + setPendingCount(executor.getPendingCount()) }, 100) return () => clearInterval(interval) - }, [offline]) + }, [executor]) const handleAddTodo = async () => { - if (!newTodoText.trim() || !actions.addTodo) return + if (!newTodoText.trim()) return try { setError(null) @@ -104,8 +71,6 @@ export function TodoList() { } const handleToggleTodo = async (id: string) => { - if (!actions.toggleTodo) return - try { setError(null) await actions.toggleTodo(id) @@ -115,8 +80,6 @@ export function TodoList() { } const handleDeleteTodo = async (id: string) => { - if (!actions.deleteTodo) return - try { setError(null) await actions.deleteTodo(id) @@ -125,40 +88,11 @@ export function TodoList() { } } - // Show init error if any - if (initError) { - return ( - - Initialization Error - - {initError} - - - ) - } - - // Show loading while initializing - if (!isInitialized) { - return ( - - Offline Transactions Demo - - - Initializing... - - - ) - } - return ( Offline Transactions Demo - TanStack DB on React Native - - {/* Debug info */} - - Init: {isInitialized ? `yes` : `no`} | Offline: {offline ? `yes` : `no`}{' '} - | Loading: {isLoading ? `yes` : `no`} | Todos: {todoList.length} + + SQLite persistence + offline sync to server {/* Status indicators */} @@ -180,22 +114,27 @@ export function TodoList() { + + + SQLite Persistence + + - {offline?.isOfflineEnabled ? `Offline Mode` : `Online Only`} + {executor.isOfflineEnabled ? `Offline Mode` : `Online Only`} @@ -222,31 +161,25 @@ export function TodoList() { onChangeText={setNewTodoText} placeholder="Add a new todo..." onSubmitEditing={handleAddTodo} - editable={!isLoading} /> Add {/* Todo list */} - {isLoading && todoList.length === 0 ? ( - - - Loading todos... - - ) : todoList.length === 0 ? ( + {todoList.length === 0 ? ( No todos yet. Add one above! - Try going offline to test offline mode + Todos persist in SQLite and sync to server when online ) : ( @@ -293,14 +226,16 @@ export function TodoList() { Try this: - 1. Add some todos while online + 1. Add some todos (persisted to SQLite + queued for server) - 2. Enable airplane mode - 3. Add more todos (queued locally) + 2. Close and reopen the app (data still there from SQLite) - 4. Disable airplane mode to sync + 3. Enable airplane mode, add more todos + + + 4. Disable airplane mode (queued mutations sync to server) @@ -359,12 +294,18 @@ const styles = StyleSheet.create({ offlineDot: { backgroundColor: `#ef4444`, }, - enabled: { + persisted: { backgroundColor: `#dbeafe`, }, - enabledDot: { + persistedDot: { backgroundColor: `#3b82f6`, }, + enabled: { + backgroundColor: `#e0e7ff`, + }, + enabledDot: { + backgroundColor: `#6366f1`, + }, disabled: { backgroundColor: `#e5e5e5`, }, @@ -416,16 +357,6 @@ const styles = StyleSheet.create({ fontWeight: `600`, fontSize: 16, }, - loadingContainer: { - flex: 1, - justifyContent: `center`, - alignItems: `center`, - gap: 12, - }, - loadingText: { - color: `#666`, - fontSize: 14, - }, emptyContainer: { flex: 1, justifyContent: `center`, diff --git a/examples/react-native/offline-transactions/src/db/todos.ts b/examples/react-native/offline-transactions/src/db/todos.ts index 88a690394..ff98c13f7 100644 --- a/examples/react-native/offline-transactions/src/db/todos.ts +++ b/examples/react-native/offline-transactions/src/db/todos.ts @@ -1,95 +1,123 @@ +import { open } from '@op-engineering/op-sqlite' import { createCollection } from '@tanstack/react-db' +import { + createReactNativeSQLitePersistence, + persistedCollectionOptions, +} from '@tanstack/db-react-native-sqlite-persisted-collection' import { queryCollectionOptions } from '@tanstack/query-db-collection' import { startOfflineExecutor } from '@tanstack/offline-transactions/react-native' -import { z } from 'zod' import { queryClient } from '../utils/queryClient' import { todoApi } from '../utils/api' import { AsyncStorageAdapter } from './AsyncStorageAdapter' -import type { Todo } from '../utils/api' -import type { PendingMutation } from '@tanstack/db' - -// Define schema -const todoSchema = z.object({ - id: z.string(), - text: z.string(), - completed: z.boolean(), - createdAt: z.date(), - updatedAt: z.date(), -}) - -// Create the todo collection with polling to sync changes from other devices -export const todoCollection = createCollection( - queryCollectionOptions({ - id: `todos-collection`, // Explicit ID to avoid crypto.randomUUID() on RN +import type { Collection, PendingMutation } from '@tanstack/db' + +export type Todo = { + id: string + text: string + completed: boolean + createdAt: string + updatedAt: string +} + +export type TodosHandle = { + collection: Collection + executor: ReturnType + close: () => void +} + +export function createTodos(): TodosHandle { + const database = open({ + name: `tanstack-db-demo.sqlite`, + location: `default`, + }) + + const persistence = createReactNativeSQLitePersistence({ database }) + + // Query collection options provide server sync (polling every 3s) + const queryOpts = queryCollectionOptions({ + id: `todos-collection`, queryClient, queryKey: [`todos`], queryFn: async (): Promise> => { const todos = await todoApi.getAll() - return todos + // Convert Date objects from API to ISO strings for SQLite storage + return todos.map((todo) => ({ + ...todo, + createdAt: todo.createdAt.toISOString(), + updatedAt: todo.updatedAt.toISOString(), + })) }, getKey: (item) => item.id, - schema: todoSchema, - // Poll every 3 seconds to sync changes from other devices refetchInterval: 3000, - }), -) - -// Sync function to push mutations to the "backend" -async function syncTodos({ - transaction, - idempotencyKey, -}: { - transaction: { mutations: Array } - idempotencyKey: string -}) { - const mutations = transaction.mutations - - console.log(`[Sync] Processing ${mutations.length} mutations`, idempotencyKey) - - for (const mutation of mutations) { - try { - switch (mutation.type) { - case `insert`: { - const todoData = mutation.modified as Todo - await todoApi.create({ - text: todoData.text, - completed: todoData.completed, - }) - break - } - - case `update`: { - const todoData = mutation.modified as Partial - const id = (mutation.modified as Todo).id - await todoApi.update(id, { - text: todoData.text, - completed: todoData.completed, - }) - break - } + }) - case `delete`: { - const id = (mutation.original as Todo).id - await todoApi.delete(id) - break + // Wrap query options with SQLite persistence — gives us both: + // 1. Server sync via polling (from queryCollectionOptions) + // 2. Local SQLite persistence (from persistedCollectionOptions) + const collection = createCollection( + persistedCollectionOptions({ + ...queryOpts, + persistence, + schemaVersion: 1, + }), + ) + + // Sync function to push mutations to the backend + async function syncTodos({ + transaction, + idempotencyKey, + }: { + transaction: { mutations: Array } + idempotencyKey: string + }) { + const mutations = transaction.mutations + + console.log( + `[Sync] Processing ${mutations.length} mutations`, + idempotencyKey, + ) + + for (const mutation of mutations) { + try { + switch (mutation.type) { + case `insert`: { + const todoData = mutation.modified as Todo + await todoApi.create({ + id: todoData.id, + text: todoData.text, + completed: todoData.completed, + }) + break + } + + case `update`: { + const todoData = mutation.modified as Partial + const id = (mutation.modified as Todo).id + await todoApi.update(id, { + text: todoData.text, + completed: todoData.completed, + }) + break + } + + case `delete`: { + const id = (mutation.original as Todo).id + await todoApi.delete(id) + break + } } + } catch (error) { + console.error(`[Sync] Error syncing mutation:`, mutation, error) + throw error } - } catch (error) { - console.error(`[Sync] Error syncing mutation:`, mutation, error) - throw error } - } - - // Refresh the collection after sync - await todoCollection.utils.refetch() -} -// Create the offline executor with React Native support -export function createOfflineExecutor() { - console.log(`[Offline] Creating executor with AsyncStorage adapter`) + // Refresh the collection after sync to pull latest server state + await collection.utils.refetch() + } const executor = startOfflineExecutor({ - collections: { todos: todoCollection }, + collections: { todos: collection }, storage: new AsyncStorageAdapter(`offline-todos:`), mutationFns: { syncTodos, @@ -103,59 +131,57 @@ export function createOfflineExecutor() { }) console.log(`[Offline] Executor mode:`, executor.mode) - console.log(`[Offline] Storage diagnostic:`, executor.storageDiagnostic) - return executor + return { + collection, + executor, + close: () => { + executor.dispose() + database.close() + }, + } } // Helper functions to create offline actions export function createTodoActions( - offline: ReturnType | null, + executor: TodosHandle[`executor`], + collection: Collection, ) { - if (!offline) { - return { - addTodo: null, - toggleTodo: null, - deleteTodo: null, - } - } - - const addTodoAction = offline.createOfflineAction({ + const addTodoAction = executor.createOfflineAction({ mutationFnName: `syncTodos`, onMutate: (text: string) => { - // Use Math.random based ID generation (crypto.randomUUID not available on RN Hermes) - const id = `${Date.now().toString(36)}-${Math.random().toString(36).substring(2, 10)}` - const newTodo = { - id, + const now = new Date().toISOString() + const newTodo: Todo = { + id: crypto.randomUUID(), text: text.trim(), completed: false, - createdAt: new Date(), - updatedAt: new Date(), + createdAt: now, + updatedAt: now, } - todoCollection.insert(newTodo) + collection.insert(newTodo) return newTodo }, }) - const toggleTodoAction = offline.createOfflineAction({ + const toggleTodoAction = executor.createOfflineAction({ mutationFnName: `syncTodos`, onMutate: (id: string) => { - const todo = todoCollection.get(id) + const todo = collection.get(id) if (!todo) return - todoCollection.update(id, (draft) => { + collection.update(id, (draft) => { draft.completed = !draft.completed - draft.updatedAt = new Date() + draft.updatedAt = new Date().toISOString() }) return todo }, }) - const deleteTodoAction = offline.createOfflineAction({ + const deleteTodoAction = executor.createOfflineAction({ mutationFnName: `syncTodos`, onMutate: (id: string) => { - const todo = todoCollection.get(id) + const todo = collection.get(id) if (todo) { - todoCollection.delete(id) + collection.delete(id) } return todo }, diff --git a/examples/react-native/offline-transactions/src/utils/api.ts b/examples/react-native/offline-transactions/src/utils/api.ts index c8ee50e1b..6a6430bb5 100644 --- a/examples/react-native/offline-transactions/src/utils/api.ts +++ b/examples/react-native/offline-transactions/src/utils/api.ts @@ -46,7 +46,11 @@ export const todoApi = { return data.map(parseTodo) }, - async create(data: { text: string; completed?: boolean }): Promise { + async create(data: { + id?: string + text: string + completed?: boolean + }): Promise { const response = await fetch(`${BASE_URL}/api/todos`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, diff --git a/examples/react/offline-transactions/package.json b/examples/react/offline-transactions/package.json index badb36576..0a4835aef 100644 --- a/examples/react/offline-transactions/package.json +++ b/examples/react/offline-transactions/package.json @@ -9,9 +9,9 @@ "start": "node .output/server/index.mjs" }, "dependencies": { - "@tanstack/offline-transactions": "^1.0.21", - "@tanstack/query-db-collection": "^1.0.27", - "@tanstack/react-db": "^0.1.74", + "@tanstack/offline-transactions": "^1.0.23", + "@tanstack/query-db-collection": "^1.0.29", + "@tanstack/react-db": "^0.1.76", "@tanstack/react-query": "^5.90.20", "@tanstack/react-router": "^1.159.5", "@tanstack/react-router-devtools": "^1.159.5", diff --git a/examples/react/paced-mutations-demo/package.json b/examples/react/paced-mutations-demo/package.json index 88e1e0328..d4927215e 100644 --- a/examples/react/paced-mutations-demo/package.json +++ b/examples/react/paced-mutations-demo/package.json @@ -9,8 +9,8 @@ "preview": "vite preview" }, "dependencies": { - "@tanstack/db": "^0.5.30", - "@tanstack/react-db": "^0.1.74", + "@tanstack/db": "^0.5.32", + "@tanstack/react-db": "^0.1.76", "mitt": "^3.0.1", "react": "^19.2.4", "react-dom": "^19.2.4" diff --git a/examples/react/projects/package.json b/examples/react/projects/package.json index 226311efd..29d3d26e7 100644 --- a/examples/react/projects/package.json +++ b/examples/react/projects/package.json @@ -17,8 +17,8 @@ "dependencies": { "@tailwindcss/vite": "^4.1.18", "@tanstack/query-core": "^5.90.20", - "@tanstack/query-db-collection": "^1.0.27", - "@tanstack/react-db": "^0.1.74", + "@tanstack/query-db-collection": "^1.0.29", + "@tanstack/react-db": "^0.1.76", "@tanstack/react-router": "^1.159.5", "@tanstack/react-router-devtools": "^1.159.5", "@tanstack/react-router-with-query": "^1.130.17", diff --git a/examples/react/todo/package.json b/examples/react/todo/package.json index 6b781dc7a..1e2ddb5c7 100644 --- a/examples/react/todo/package.json +++ b/examples/react/todo/package.json @@ -3,13 +3,13 @@ "private": true, "version": "0.1.24", "dependencies": { - "@tanstack/electric-db-collection": "^0.2.37", + "@tanstack/electric-db-collection": "^0.2.40", "@tanstack/query-core": "^5.90.20", - "@tanstack/query-db-collection": "^1.0.27", - "@tanstack/react-db": "^0.1.74", + "@tanstack/query-db-collection": "^1.0.29", + "@tanstack/react-db": "^0.1.76", "@tanstack/react-router": "^1.159.5", "@tanstack/react-start": "^1.159.5", - "@tanstack/trailbase-db-collection": "^0.1.74", + "@tanstack/trailbase-db-collection": "^0.1.76", "cors": "^2.8.6", "drizzle-orm": "^0.45.1", "drizzle-zod": "^0.8.3", diff --git a/examples/solid/todo/package.json b/examples/solid/todo/package.json index a154f8838..9c51535f2 100644 --- a/examples/solid/todo/package.json +++ b/examples/solid/todo/package.json @@ -3,13 +3,13 @@ "private": true, "version": "0.0.34", "dependencies": { - "@tanstack/electric-db-collection": "^0.2.37", + "@tanstack/electric-db-collection": "^0.2.40", "@tanstack/query-core": "^5.90.20", - "@tanstack/query-db-collection": "^1.0.27", - "@tanstack/solid-db": "^0.2.10", + "@tanstack/query-db-collection": "^1.0.29", + "@tanstack/solid-db": "^0.2.12", "@tanstack/solid-router": "^1.159.5", "@tanstack/solid-start": "^1.159.5", - "@tanstack/trailbase-db-collection": "^0.1.74", + "@tanstack/trailbase-db-collection": "^0.1.76", "cors": "^2.8.6", "drizzle-orm": "^0.45.1", "drizzle-zod": "^0.8.3", diff --git a/package.json b/package.json index ff4f36257..b3877473f 100644 --- a/package.json +++ b/package.json @@ -28,7 +28,7 @@ "prepare": "husky", "test": "pnpm --filter \"./packages/**\" test", "test:docs": "node scripts/verify-links.ts", - "test:sherif": "sherif -i zod", + "test:sherif": "sherif -i zod -p offline-transactions-react-native", "generate-docs": "node scripts/generate-docs.ts" }, "devDependencies": { diff --git a/packages/angular-db/CHANGELOG.md b/packages/angular-db/CHANGELOG.md index c26efb16e..379b963f0 100644 --- a/packages/angular-db/CHANGELOG.md +++ b/packages/angular-db/CHANGELOG.md @@ -1,5 +1,21 @@ # @tanstack/angular-db +## 0.1.58 + +### Patch Changes + +- Updated dependencies [[`eeb5321`](https://github.com/TanStack/db/commit/eeb5321c578ffa2fbdfb7b0b3d64f579d1933522), [`495abc2`](https://github.com/TanStack/db/commit/495abc29fe8c088783b43402c7eeed35566d8524), [`a55e2bf`](https://github.com/TanStack/db/commit/a55e2bf54dbe78128adf5ce26d524a13dedf8145), [`41c0ea2`](https://github.com/TanStack/db/commit/41c0ea2d956f9de37d0216af371f58a461be6f1f)]: + - @tanstack/db@0.5.32 + +## 0.1.57 + +### Patch Changes + +- Add Intent agent skills (SKILL.md files) to guide AI coding agents. Include skills for core DB concepts, all 5 framework bindings, meta-framework integration, and offline transactions. Also add `export * from '@tanstack/db'` to angular-db for consistency with other framework packages. ([#1330](https://github.com/TanStack/db/pull/1330)) + +- Updated dependencies [[`bf1d078`](https://github.com/TanStack/db/commit/bf1d078627de150bfca02e2ae2ad8b0289c19b37)]: + - @tanstack/db@0.5.31 + ## 0.1.56 ### Patch Changes diff --git a/packages/angular-db/package.json b/packages/angular-db/package.json index bb0daaa5e..575215dae 100644 --- a/packages/angular-db/package.json +++ b/packages/angular-db/package.json @@ -1,6 +1,6 @@ { "name": "@tanstack/angular-db", - "version": "0.1.56", + "version": "0.1.58", "description": "Angular integration for @tanstack/db", "author": "Ethan McDaniel", "license": "MIT", @@ -41,7 +41,8 @@ "sideEffects": false, "files": [ "dist", - "src" + "src", + "skills" ], "dependencies": { "@tanstack/db": "workspace:*" diff --git a/packages/angular-db/skills/angular-db/SKILL.md b/packages/angular-db/skills/angular-db/SKILL.md new file mode 100644 index 000000000..e595520f0 --- /dev/null +++ b/packages/angular-db/skills/angular-db/SKILL.md @@ -0,0 +1,265 @@ +--- +name: angular-db +description: > + Angular bindings for TanStack DB. injectLiveQuery inject function with + Angular signals (Signal) for all return values. Reactive params pattern + ({ params: () => T, query: ({ params, q }) => QueryBuilder }) for dynamic + queries. Must be called in injection context. Angular 17+ control flow + (@if, @for) and signal inputs supported. Import from + @tanstack/angular-db (re-exports all of @tanstack/db). +type: framework +library: db +framework: angular +library_version: '0.5.30' +requires: + - db-core +sources: + - 'TanStack/db:docs/framework/angular/overview.md' + - 'TanStack/db:packages/angular-db/src/index.ts' +--- + +This skill builds on db-core. Read it first for collection setup, query builder, and mutation patterns. + +# TanStack DB — Angular + +## Setup + +```typescript +import { Component } from '@angular/core' +import { injectLiveQuery, eq, not } from '@tanstack/angular-db' + +@Component({ + selector: 'app-todo-list', + standalone: true, + template: ` + @if (query.isLoading()) { +
Loading...
+ } @else { +
    + @for (todo of query.data(); track todo.id) { +
  • {{ todo.text }}
  • + } +
+ } + `, +}) +export class TodoListComponent { + query = injectLiveQuery((q) => + q + .from({ todos: todosCollection }) + .where(({ todos }) => not(todos.completed)) + .orderBy(({ todos }) => todos.created_at, 'asc'), + ) +} +``` + +`@tanstack/angular-db` re-exports everything from `@tanstack/db`. + +## Inject Function + +### injectLiveQuery + +Returns an object with Angular `Signal` properties — call with `()` in templates: + +```typescript +// Static query — no reactive dependencies +const query = injectLiveQuery((q) => q.from({ todo: todoCollection })) +// query.data() → Array +// query.status() → CollectionStatus | 'disabled' +// query.isLoading(), query.isReady(), query.isError() +// query.isIdle(), query.isCleanedUp() (seldom used) +// query.state() → Map +// query.collection() → Collection | null + +// Reactive params — re-runs when params change +const query = injectLiveQuery({ + params: () => ({ minPriority: this.minPriority() }), + query: ({ params, q }) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => gt(todo.priority, params.minPriority)), +}) + +// Config object +const query = injectLiveQuery({ + query: (q) => q.from({ todo: todoCollection }), + gcTime: 60000, +}) + +// Pre-created collection +const query = injectLiveQuery(preloadedCollection) + +// Conditional query — return undefined/null to disable +const query = injectLiveQuery({ + params: () => ({ userId: this.userId() }), + query: ({ params, q }) => { + if (!params.userId) return undefined + return q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.userId, params.userId)) + }, +}) +``` + +## Angular-Specific Patterns + +### Reactive params with signals + +```typescript +@Component({ + selector: 'app-filtered-todos', + standalone: true, + template: `
{{ query.data().length }} todos
`, +}) +export class FilteredTodosComponent { + minPriority = signal(5) + + query = injectLiveQuery({ + params: () => ({ minPriority: this.minPriority() }), + query: ({ params, q }) => + q + .from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, params.minPriority)), + }) +} +``` + +When `params()` return value changes, the previous collection is disposed and a new query is created. + +### Signal inputs (Angular 17+) + +```typescript +@Component({ + selector: 'app-user-todos', + standalone: true, + template: `
{{ query.data().length }} todos
`, +}) +export class UserTodosComponent { + userId = input.required() + + query = injectLiveQuery({ + params: () => ({ userId: this.userId() }), + query: ({ params, q }) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.userId, params.userId)), + }) +} +``` + +### Legacy @Input (Angular 16) + +```typescript +export class UserTodosComponent { + @Input({ required: true }) userId!: number + + query = injectLiveQuery({ + params: () => ({ userId: this.userId }), + query: ({ params, q }) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.userId, params.userId)), + }) +} +``` + +### Template syntax + +Angular 17+ control flow: + +```html +@if (query.isLoading()) { +
Loading...
+} @else { @for (todo of query.data(); track todo.id) { +
  • {{ todo.text }}
  • +} } +``` + +Angular 16 structural directives: + +```html +
    Loading...
    +
  • {{ todo.text }}
  • +``` + +## Common Mistakes + +### CRITICAL Using injectLiveQuery outside injection context + +Wrong: + +```typescript +export class TodoComponent { + ngOnInit() { + this.query = injectLiveQuery((q) => q.from({ todo: todoCollection })) + } +} +``` + +Correct: + +```typescript +export class TodoComponent { + query = injectLiveQuery((q) => q.from({ todo: todoCollection })) +} +``` + +`injectLiveQuery` calls `assertInInjectionContext` internally — it must be called during construction (field initializer or constructor), not in lifecycle hooks. + +Source: packages/angular-db/src/index.ts + +### HIGH Using query function for reactive values instead of params + +Wrong: + +```typescript +export class FilteredComponent { + status = signal('active') + + query = injectLiveQuery((q) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.status, this.status())), + ) +} +``` + +Correct: + +```typescript +export class FilteredComponent { + status = signal('active') + + query = injectLiveQuery({ + params: () => ({ status: this.status() }), + query: ({ params, q }) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.status, params.status)), + }) +} +``` + +The plain query function overload does not track Angular signal reads. Use the `params` pattern to make reactive values trigger query re-creation. + +Source: packages/angular-db/src/index.ts + +### MEDIUM Forgetting to call signals in templates + +Wrong: + +```html +
    {{ query.data.length }}
    +``` + +Correct: + +```html +
    {{ query.data().length }}
    +``` + +All return values are Angular signals. Without `()`, you get the signal object, not the value. + +See also: db-core/live-queries/SKILL.md — for query builder API. + +See also: db-core/mutations-optimistic/SKILL.md — for mutation patterns. diff --git a/packages/angular-db/src/index.ts b/packages/angular-db/src/index.ts index 329d08800..622ace95d 100644 --- a/packages/angular-db/src/index.ts +++ b/packages/angular-db/src/index.ts @@ -23,6 +23,8 @@ import type { } from '@tanstack/db' import type { Signal } from '@angular/core' +export * from '@tanstack/db' + /** * The result of calling `injectLiveQuery`. * Contains reactive signals for the query state and data. diff --git a/packages/db-collection-e2e/src/suites/joins.suite.ts b/packages/db-collection-e2e/src/suites/joins.suite.ts index 5f25bb20b..2a953d38c 100644 --- a/packages/db-collection-e2e/src/suites/joins.suite.ts +++ b/packages/db-collection-e2e/src/suites/joins.suite.ts @@ -24,9 +24,9 @@ export function createJoinsTestSuite(getConfig: () => Promise) { eq(user.id, post.userId), ) .select(({ user, post }) => ({ - id: post!.id, + id: post.id, userName: user.name, - postTitle: post!.title, + postTitle: post.title, })), ) @@ -53,12 +53,12 @@ export function createJoinsTestSuite(getConfig: () => Promise) { .join({ post: postsCollection }, ({ user, post }) => eq(user.id, post.userId), ) - .where(({ post }) => gt(post!.viewCount, 10)) + .where(({ post }) => gt(post.viewCount, 10)) .select(({ user, post }) => ({ - id: post!.id, + id: post.id, userName: user.name, - postTitle: post!.title, - viewCount: post!.viewCount, + postTitle: post.title, + viewCount: post.viewCount, })), ) @@ -88,9 +88,9 @@ export function createJoinsTestSuite(getConfig: () => Promise) { eq(user.id, post.userId), ) .select(({ user, post }) => ({ - id: post!.id, + id: post.id, userName: user.name, - postTitle: post!.title, + postTitle: post.title, })), ) @@ -117,12 +117,12 @@ export function createJoinsTestSuite(getConfig: () => Promise) { .join({ post: postsCollection }, ({ user, post }) => eq(user.id, post.userId), ) - .orderBy(({ post }) => post!.viewCount, `desc`) + .orderBy(({ post }) => post.viewCount, `desc`) .select(({ user, post }) => ({ - id: post!.id, + id: post.id, userName: user.name, - postTitle: post!.title, - viewCount: post!.viewCount, + postTitle: post.title, + viewCount: post.viewCount, })), ) @@ -149,7 +149,7 @@ export function createJoinsTestSuite(getConfig: () => Promise) { for (let i = 1; i < results.length; i++) { const prevCount = results[i - 1]!.viewCount const currCount = results[i]!.viewCount - expect(prevCount).toBeGreaterThanOrEqual(currCount) + expect(prevCount!).toBeGreaterThanOrEqual(currCount!) } await query.cleanup() @@ -166,13 +166,13 @@ export function createJoinsTestSuite(getConfig: () => Promise) { .join({ post: postsCollection }, ({ user, post }) => eq(user.id, post.userId), ) - .orderBy(({ post }) => post!.id, `asc`) + .orderBy(({ post }) => post.id, `asc`) .limit(10) .offset(5) .select(({ user, post }) => ({ - id: post!.id, + id: post.id, userName: user.name, - postTitle: post!.title, + postTitle: post.title, })), ) @@ -194,13 +194,13 @@ export function createJoinsTestSuite(getConfig: () => Promise) { .from({ user: users }) .join({ post: posts }, ({ user, post }) => eq(user.id, post.userId)) .join({ comment: comments }, ({ post, comment }) => - eq(post!.id, comment.postId), + eq(post.id, comment.postId), ) .select(({ user, post, comment }) => ({ - id: comment!.id, + id: comment.id, userName: user.name, - postTitle: post!.title, - commentText: comment!.text, + postTitle: post.title, + commentText: comment.text, })), ) @@ -225,16 +225,16 @@ export function createJoinsTestSuite(getConfig: () => Promise) { .from({ user: users }) .where(({ user }) => eq(user.isActive, true)) .join({ post: posts }, ({ user, post }) => eq(user.id, post.userId)) - .where(({ post }) => isNull(post!.deletedAt)) + .where(({ post }) => isNull(post.deletedAt)) .join({ comment: comments }, ({ post, comment }) => - eq(post!.id, comment.postId), + eq(post.id, comment.postId), ) - .where(({ comment }) => isNull(comment!.deletedAt)) + .where(({ comment }) => isNull(comment.deletedAt)) .select(({ user, post, comment }) => ({ - id: comment!.id, + id: comment.id, userName: user.name, - postTitle: post!.title, - commentText: comment!.text, + postTitle: post.title, + commentText: comment.text, })), ) @@ -264,13 +264,13 @@ export function createJoinsTestSuite(getConfig: () => Promise) { eq(user.id, post.userId), ) .join({ comment: commentsOnDemand }, ({ post, comment }) => - eq(post!.id, comment.postId), + eq(post.id, comment.postId), ) .select(({ user, post, comment }) => ({ - id: comment!.id, + id: comment.id, userName: user.name, - postTitle: post!.title, - commentText: comment!.text, + postTitle: post.title, + commentText: comment.text, })), ) @@ -296,12 +296,12 @@ export function createJoinsTestSuite(getConfig: () => Promise) { q .from({ user: users }) .join({ post: posts }, ({ user, post }) => eq(user.id, post.userId)) - .where(({ post }) => gt(post!.viewCount, 50)) + .where(({ post }) => gt(post.viewCount, 50)) .select(({ user, post }) => ({ - id: post!.id, + id: post.id, userName: user.name, - postTitle: post!.title, - viewCount: post!.viewCount, + postTitle: post.title, + viewCount: post.viewCount, })), ) @@ -326,10 +326,10 @@ export function createJoinsTestSuite(getConfig: () => Promise) { .where(({ user }) => gt(user.age, 30)) .join({ post: posts }, ({ user, post }) => eq(user.id, post.userId)) .select(({ user, post }) => ({ - id: post!.id, + id: post.id, userName: user.name, userAge: user.age, - postTitle: post!.title, + postTitle: post.title, })), ) @@ -359,7 +359,7 @@ export function createJoinsTestSuite(getConfig: () => Promise) { .select(({ user, post }) => ({ id: user.id, userName: user.name, - postTitle: post!.title, // May be null for users without posts + postTitle: post.title, // May be null for users without posts })), ) diff --git a/packages/db-collection-e2e/support/global-setup.ts b/packages/db-collection-e2e/support/global-setup.ts index 57fa7663b..667e4e2e4 100644 --- a/packages/db-collection-e2e/support/global-setup.ts +++ b/packages/db-collection-e2e/support/global-setup.ts @@ -1,5 +1,6 @@ +import { Client } from 'pg' +import type { ClientConfig } from 'pg' import type { GlobalSetupContext } from 'vitest/node' -import { Client, type ClientConfig } from 'pg' const ELECTRIC_URL = process.env.ELECTRIC_URL ?? 'http://localhost:3000' const POSTGRES_HOST = process.env.POSTGRES_HOST ?? 'localhost' diff --git a/packages/db-collection-e2e/support/test-context.ts b/packages/db-collection-e2e/support/test-context.ts index aab8f787f..17bb81f67 100644 --- a/packages/db-collection-e2e/support/test-context.ts +++ b/packages/db-collection-e2e/support/test-context.ts @@ -1,8 +1,8 @@ -import { test, inject } from 'vitest' -import { Client } from 'pg' +import { inject, test } from 'vitest' +import { generateSeedData } from '../src/fixtures/seed-data' import { makePgClient } from './global-setup' +import type { Client } from 'pg' import type { SeedDataResult } from '../src/types' -import { generateSeedData } from '../src/fixtures/seed-data' /** * Base fixture with database client and abort controller @@ -14,6 +14,7 @@ export const testWithDb = test.extend<{ testSchema: string tableName: (base: string) => string }>({ + // eslint-disable-next-line no-empty-pattern dbClient: async ({}, use) => { const schema = inject('testSchema') const client = makePgClient({ @@ -28,16 +29,19 @@ export const testWithDb = test.extend<{ await client.end() }, + // eslint-disable-next-line no-empty-pattern aborter: async ({}, use) => { const controller = new AbortController() await use(controller) controller.abort('Test complete') }, + // eslint-disable-next-line no-empty-pattern baseUrl: async ({}, use) => { await use(inject('baseUrl')) }, + // eslint-disable-next-line no-empty-pattern testSchema: async ({}, use) => { await use(inject('testSchema')) }, @@ -63,7 +67,7 @@ export const testWithTables = testWithDb.extend<{ }>({ usersTable: async ({ dbClient, tableName, task }, use) => { const name = tableName('users') - const taskFile = task.file?.name.replace(/'/g, '`') ?? 'unknown' + const taskFile = task.file.name.replace(/'/g, '`') const taskName = task.name.replace(/'/g, '`') await dbClient.query(` @@ -90,9 +94,10 @@ export const testWithTables = testWithDb.extend<{ } }, + // eslint-disable-next-line @typescript-eslint/no-unused-vars postsTable: async ({ dbClient, tableName, usersTable, task }, use) => { const name = tableName('posts') - const taskFile = task.file?.name.replace(/'/g, '`') ?? 'unknown' + const taskFile = task.file.name.replace(/'/g, '`') const taskName = task.name.replace(/'/g, '`') await dbClient.query(` @@ -119,11 +124,12 @@ export const testWithTables = testWithDb.extend<{ }, commentsTable: async ( + // eslint-disable-next-line @typescript-eslint/no-unused-vars { dbClient, tableName, postsTable, usersTable, task }, use, ) => { const name = tableName('comments') - const taskFile = task.file?.name.replace(/'/g, '`') ?? 'unknown' + const taskFile = task.file.name.replace(/'/g, '`') const taskName = task.name.replace(/'/g, '`') await dbClient.query(` @@ -171,6 +177,7 @@ export const testWithSeedData = testWithTables.extend<{ seedData: SeedDataResult insertSeedData: () => Promise }>({ + // eslint-disable-next-line no-empty-pattern seedData: async ({}, use) => { const seed = generateSeedData() await use(seed) diff --git a/packages/db-electron-sqlite-persisted-collection/README.md b/packages/db-electron-sqlite-persisted-collection/README.md new file mode 100644 index 000000000..d439d36ec --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/README.md @@ -0,0 +1,71 @@ +# @tanstack/db-electron-sqlite-persisted-collection + +Thin Electron bridge for TanStack DB SQLite persistence. + +## Public API + +- `exposeElectronSQLitePersistence(...)` (main process) +- `createElectronSQLitePersistence(...)` (renderer process) +- `persistedCollectionOptions(...)` (re-exported from core) + +Use `@tanstack/db-electron-sqlite-persisted-collection/main` and +`@tanstack/db-electron-sqlite-persisted-collection/renderer` if you prefer +explicit process-specific entrypoints. + +## Main process + +```ts +import { ipcMain } from 'electron' +import { createNodeSQLitePersistence } from '@tanstack/db-node-sqlite-persisted-collection' +import { exposeElectronSQLitePersistence } from '@tanstack/db-electron-sqlite-persisted-collection/main' +import Database from 'better-sqlite3' + +const database = new Database(`./tanstack-db.sqlite`) + +const persistence = createNodeSQLitePersistence({ + database, +}) + +const dispose = exposeElectronSQLitePersistence({ + ipcMain, + persistence, +}) + +// Call dispose() and database.close() during shutdown. +``` + +## Renderer process + +```ts +import { createCollection } from '@tanstack/db' +import { ipcRenderer } from 'electron' +import { + createElectronSQLitePersistence, + persistedCollectionOptions, +} from '@tanstack/db-electron-sqlite-persisted-collection' + +type Todo = { + id: string + title: string + completed: boolean +} + +const persistence = createElectronSQLitePersistence({ + ipcRenderer, +}) + +export const todosCollection = createCollection( + persistedCollectionOptions({ + id: `todos`, + getKey: (todo) => todo.id, + persistence, + schemaVersion: 1, // Per-collection schema version + }), +) +``` + +## Notes + +- The renderer API mirrors other runtimes: one shared `create...Persistence`. +- Collection mode (`sync-present` vs `sync-absent`) and `schemaVersion` are + resolved per collection and forwarded across IPC automatically. diff --git a/packages/db-electron-sqlite-persisted-collection/package.json b/packages/db-electron-sqlite-persisted-collection/package.json new file mode 100644 index 000000000..3e39b25a2 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/package.json @@ -0,0 +1,80 @@ +{ + "name": "@tanstack/db-electron-sqlite-persisted-collection", + "version": "0.1.0", + "description": "Electron SQLite persisted collection bridge for TanStack DB", + "author": "TanStack Team", + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/TanStack/db.git", + "directory": "packages/db-electron-sqlite-persisted-collection" + }, + "homepage": "https://tanstack.com/db", + "keywords": [ + "sqlite", + "electron", + "ipc", + "persistence", + "typescript" + ], + "scripts": { + "build": "vite build", + "dev": "vite build --watch", + "lint": "eslint . --fix", + "test": "vitest --run", + "test:e2e": "pnpm --filter @tanstack/db-ivm build && pnpm --filter @tanstack/db build && pnpm --filter @tanstack/db-sqlite-persisted-collection-core build && pnpm --filter @tanstack/db-node-sqlite-persisted-collection build && pnpm --filter @tanstack/db-electron-sqlite-persisted-collection build && vitest --config vitest.e2e.config.ts --run", + "test:e2e:all": "pnpm --filter @tanstack/db-ivm build && pnpm --filter @tanstack/db build && pnpm --filter @tanstack/db-sqlite-persisted-collection-core build && pnpm --filter @tanstack/db-node-sqlite-persisted-collection build && pnpm --filter @tanstack/db-electron-sqlite-persisted-collection build && TANSTACK_DB_ELECTRON_E2E_ALL=1 vitest --run && TANSTACK_DB_ELECTRON_E2E_ALL=1 vitest --config vitest.e2e.config.ts --run" + }, + "type": "module", + "main": "dist/cjs/index.cjs", + "module": "dist/esm/index.js", + "types": "dist/esm/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.cts", + "default": "./dist/cjs/index.cjs" + } + }, + "./main": { + "import": { + "types": "./dist/esm/main.d.ts", + "default": "./dist/esm/main.js" + }, + "require": { + "types": "./dist/cjs/main.d.cts", + "default": "./dist/cjs/main.cjs" + } + }, + "./renderer": { + "import": { + "types": "./dist/esm/renderer.d.ts", + "default": "./dist/esm/renderer.js" + }, + "require": { + "types": "./dist/cjs/renderer.d.cts", + "default": "./dist/cjs/renderer.cjs" + } + }, + "./package.json": "./package.json" + }, + "sideEffects": false, + "files": [ + "dist", + "src" + ], + "dependencies": { + "@tanstack/db-sqlite-persisted-collection-core": "workspace:*" + }, + "peerDependencies": { + "typescript": ">=4.7" + }, + "devDependencies": { + "@vitest/coverage-istanbul": "^3.2.4", + "electron": "^40.2.1" + } +} diff --git a/packages/db-electron-sqlite-persisted-collection/src/electron-coordinator.ts b/packages/db-electron-sqlite-persisted-collection/src/electron-coordinator.ts new file mode 100644 index 000000000..b8e93f3bb --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/src/electron-coordinator.ts @@ -0,0 +1,842 @@ +import type { + ApplyLocalMutationsResponse, + PersistedCollectionCoordinator, + PersistedIndexSpec, + PersistedMutationEnvelope, + PersistenceAdapter, + ProtocolEnvelope, + PullSinceResponse, +} from '@tanstack/db-sqlite-persisted-collection-core' +import type { LoadSubsetOptions } from '@tanstack/db' + +// --------------------------------------------------------------------------- +// Constants +// --------------------------------------------------------------------------- + +const HEARTBEAT_INTERVAL_MS = 3_000 +const RPC_TIMEOUT_MS = 10_000 +const RPC_RETRY_ATTEMPTS = 2 +const RPC_RETRY_DELAY_MS = 200 +const WRITER_LOCK_BUSY_RETRY_MS = 50 +const WRITER_LOCK_MAX_RETRIES = 20 + +// --------------------------------------------------------------------------- +// Internal types +// --------------------------------------------------------------------------- + +type RPCRequest = + | { + type: `rpc:ensureRemoteSubset:req` + rpcId: string + options: LoadSubsetOptions + } + | { + type: `rpc:ensurePersistedIndex:req` + rpcId: string + signature: string + spec: PersistedIndexSpec + } + | { + type: `rpc:applyLocalMutations:req` + rpcId: string + envelopeId: string + mutations: Array + } + | { + type: `rpc:pullSince:req` + rpcId: string + fromRowVersion: number + } + +type RPCResponse = + | { + type: `rpc:ensureRemoteSubset:res` + rpcId: string + ok: boolean + error?: string + } + | { + type: `rpc:ensurePersistedIndex:res` + rpcId: string + ok: boolean + error?: string + } + | ApplyLocalMutationsResponse + | PullSinceResponse + +type PendingRPC = { + resolve: (response: RPCResponse) => void + reject: (error: Error) => void + timer: ReturnType +} + +type CollectionState = { + isLeader: boolean + lockAbortController: AbortController | null + heartbeatTimer: ReturnType | null + latestTerm: number + latestSeq: number + latestRowVersion: number + subscribers: Set<(message: ProtocolEnvelope) => void> +} + +// Adapter with pullSince support +type AdapterWithPullSince = PersistenceAdapter< + Record, + string | number +> & { + pullSince?: ( + collectionId: string, + fromRowVersion: number, + ) => Promise< + | { + latestRowVersion: number + requiresFullReload: true + } + | { + latestRowVersion: number + requiresFullReload: false + changedKeys: Array + deletedKeys: Array + } + > + getStreamPosition?: (collectionId: string) => Promise<{ + latestTerm: number + latestSeq: number + latestRowVersion: number + }> +} + +// --------------------------------------------------------------------------- +// Options +// --------------------------------------------------------------------------- + +export type ElectronCollectionCoordinatorOptions = { + dbName: string + adapter?: AdapterWithPullSince +} + +// --------------------------------------------------------------------------- +// ElectronCollectionCoordinator +// --------------------------------------------------------------------------- + +export class ElectronCollectionCoordinator implements PersistedCollectionCoordinator { + private readonly nodeId = crypto.randomUUID() + private readonly dbName: string + private adapter: AdapterWithPullSince | null + private readonly channel: BroadcastChannel + private readonly collections = new Map() + private readonly pendingRPCs = new Map() + private readonly appliedEnvelopeIds = new Map() + private disposed = false + + /** Method indirection to prevent TypeScript from narrowing `disposed` across awaits */ + private isDisposed(): boolean { + return this.disposed + } + + private requireAdapter(): AdapterWithPullSince { + if (!this.adapter) { + throw new Error( + `ElectronCollectionCoordinator: adapter not set. Call setAdapter() before using leader-side operations.`, + ) + } + return this.adapter + } + + constructor(options: ElectronCollectionCoordinatorOptions) { + this.dbName = options.dbName + this.adapter = options.adapter ?? null + this.channel = new BroadcastChannel(`tsdb:coord:${this.dbName}`) + this.channel.onmessage = (event: MessageEvent) => { + this.onChannelMessage(event.data) + } + } + + /** + * Set or replace the persistence adapter used for leader-side RPC handling. + * Called by `createElectronSQLitePersistence` to wire the internally-created + * adapter into the coordinator. + */ + setAdapter(adapter: AdapterWithPullSince): void { + this.adapter = adapter + } + + // ----------------------------------------------------------------------- + // PersistedCollectionCoordinator interface + // ----------------------------------------------------------------------- + + getNodeId(): string { + return this.nodeId + } + + subscribe( + collectionId: string, + onMessage: (message: ProtocolEnvelope) => void, + ): () => void { + const state = this.ensureCollectionState(collectionId) + state.subscribers.add(onMessage) + return () => { + state.subscribers.delete(onMessage) + } + } + + publish(_collectionId: string, message: ProtocolEnvelope): void { + this.channel.postMessage(message) + } + + isLeader(collectionId: string): boolean { + return this.collections.get(collectionId)?.isLeader ?? false + } + + async ensureLeadership(collectionId: string): Promise { + const state = this.ensureCollectionState(collectionId) + if (state.isLeader) return + await this.acquireLeadership(collectionId, state) + } + + async requestEnsureRemoteSubset( + collectionId: string, + options: LoadSubsetOptions, + ): Promise { + if (this.isLeader(collectionId)) return + + const response = await this.sendRPC<{ + type: `rpc:ensureRemoteSubset:res` + rpcId: string + ok: boolean + error?: string + }>(collectionId, { + type: `rpc:ensureRemoteSubset:req`, + rpcId: crypto.randomUUID(), + options, + }) + + if (!response.ok) { + throw new Error( + `ensureRemoteSubset failed: ${response.error ?? `unknown error`}`, + ) + } + } + + async requestEnsurePersistedIndex( + collectionId: string, + signature: string, + spec: PersistedIndexSpec, + ): Promise { + if (this.isLeader(collectionId)) { + await this.requireAdapter().ensureIndex(collectionId, signature, spec) + return + } + + const response = await this.sendRPC<{ + type: `rpc:ensurePersistedIndex:res` + rpcId: string + ok: boolean + error?: string + }>(collectionId, { + type: `rpc:ensurePersistedIndex:req`, + rpcId: crypto.randomUUID(), + signature, + spec, + }) + + if (!response.ok) { + throw new Error( + `ensurePersistedIndex failed: ${response.error ?? `unknown error`}`, + ) + } + } + + async requestApplyLocalMutations( + collectionId: string, + mutations: Array, + ): Promise { + if (this.isLeader(collectionId)) { + return this.handleApplyLocalMutations(collectionId, { + type: `rpc:applyLocalMutations:req`, + rpcId: crypto.randomUUID(), + envelopeId: crypto.randomUUID(), + mutations, + }) + } + + return this.sendRPC(collectionId, { + type: `rpc:applyLocalMutations:req`, + rpcId: crypto.randomUUID(), + envelopeId: crypto.randomUUID(), + mutations, + }) + } + + async pullSince( + collectionId: string, + fromRowVersion: number, + ): Promise { + if (this.isLeader(collectionId)) { + return this.handlePullSince(collectionId, { + type: `rpc:pullSince:req`, + rpcId: crypto.randomUUID(), + fromRowVersion, + }) + } + + return this.sendRPC(collectionId, { + type: `rpc:pullSince:req`, + rpcId: crypto.randomUUID(), + fromRowVersion, + }) + } + + // ----------------------------------------------------------------------- + // Lifecycle + // ----------------------------------------------------------------------- + + dispose(): void { + this.disposed = true + + for (const [collectionId, state] of this.collections) { + this.releaseLeadership(collectionId, state) + } + + for (const [, pending] of this.pendingRPCs) { + clearTimeout(pending.timer) + pending.reject(new Error(`coordinator disposed`)) + } + this.pendingRPCs.clear() + + this.channel.close() + this.collections.clear() + } + + // ----------------------------------------------------------------------- + // Leadership via Web Locks + // ----------------------------------------------------------------------- + + private ensureCollectionState(collectionId: string): CollectionState { + let state = this.collections.get(collectionId) + if (!state) { + state = { + isLeader: false, + lockAbortController: null, + heartbeatTimer: null, + latestTerm: 0, + latestSeq: 0, + latestRowVersion: 0, + subscribers: new Set(), + } + this.collections.set(collectionId, state) + void this.acquireLeadership(collectionId, state) + } + return state + } + + private async acquireLeadership( + collectionId: string, + state: CollectionState, + ): Promise { + if (this.disposed || state.isLeader) return + + const lockName = `tsdb:leader:${this.dbName}:${collectionId}` + const abortController = new AbortController() + state.lockAbortController = abortController + + try { + await navigator.locks.request( + lockName, + { signal: abortController.signal }, + async () => { + if (this.isDisposed()) return + + try { + // Restore stream position from DB before claiming leadership + const adapter = this.requireAdapter() + if (adapter.getStreamPosition) { + const pos = await adapter.getStreamPosition(collectionId) + state.latestTerm = pos.latestTerm + state.latestSeq = pos.latestSeq + state.latestRowVersion = pos.latestRowVersion + } + + state.latestTerm++ + state.isLeader = true + + this.emitHeartbeat(collectionId, state) + state.heartbeatTimer = setInterval(() => { + this.emitHeartbeat(collectionId, state) + }, HEARTBEAT_INTERVAL_MS) + + // Hold the lock until disposed or aborted + await new Promise((resolve) => { + const onAbort = () => { + abortController.signal.removeEventListener(`abort`, onAbort) + resolve() + } + if (abortController.signal.aborted) { + resolve() + return + } + abortController.signal.addEventListener(`abort`, onAbort) + }) + } finally { + state.isLeader = false + if (state.heartbeatTimer) { + clearInterval(state.heartbeatTimer) + state.heartbeatTimer = null + } + } + }, + ) + } catch (error) { + if (error instanceof DOMException && error.name === `AbortError`) { + return + } + console.warn(`Failed to acquire leadership for ${collectionId}:`, error) + } + + // Re-acquire if not disposed (leadership was released by another means) + if (!this.isDisposed()) { + void this.acquireLeadership(collectionId, state) + } + } + + private releaseLeadership( + _collectionId: string, + state: CollectionState, + ): void { + if (state.lockAbortController) { + state.lockAbortController.abort() + state.lockAbortController = null + } + if (state.heartbeatTimer) { + clearInterval(state.heartbeatTimer) + state.heartbeatTimer = null + } + state.isLeader = false + } + + private emitHeartbeat(collectionId: string, state: CollectionState): void { + const envelope: ProtocolEnvelope = { + v: 1, + dbName: this.dbName, + collectionId, + senderId: this.nodeId, + ts: Date.now(), + payload: { + type: `leader:heartbeat`, + term: state.latestTerm, + leaderId: this.nodeId, + latestSeq: state.latestSeq, + latestRowVersion: state.latestRowVersion, + }, + } + this.channel.postMessage(envelope) + } + + // ----------------------------------------------------------------------- + // BroadcastChannel message handling + // ----------------------------------------------------------------------- + + private onChannelMessage(data: unknown): void { + if (!isProtocolEnvelope(data)) return + + const envelope = data + + // Ignore own messages + if (envelope.senderId === this.nodeId) return + + const payload = envelope.payload + if (!payload || typeof payload !== `object`) return + + const type = (payload as Record).type as string | undefined + + // Handle RPC responses (for pending outbound RPCs) + if (type && type.endsWith(`:res`)) { + const rpcId = (payload as { rpcId?: string }).rpcId + if (rpcId && this.pendingRPCs.has(rpcId)) { + const pending = this.pendingRPCs.get(rpcId)! + this.pendingRPCs.delete(rpcId) + clearTimeout(pending.timer) + pending.resolve(payload as RPCResponse) + return + } + } + + // Handle RPC requests (leader only) + if (type && type.endsWith(`:req`)) { + const collectionId = envelope.collectionId + if (this.isLeader(collectionId)) { + void this.handleRPCRequest(collectionId, payload as RPCRequest) + } + return + } + + // Forward protocol messages to subscribers + const state = this.collections.get(envelope.collectionId) + if (state) { + for (const subscriber of state.subscribers) { + subscriber(envelope) + } + } + } + + // ----------------------------------------------------------------------- + // RPC - Outbound (follower side) + // ----------------------------------------------------------------------- + + private async sendRPC( + collectionId: string, + request: RPCRequest, + ): Promise { + let lastError: Error | undefined + + for (let attempt = 0; attempt <= RPC_RETRY_ATTEMPTS; attempt++) { + if (attempt > 0) { + await sleep(RPC_RETRY_DELAY_MS * attempt) + } + + try { + return await this.sendRPCOnce(collectionId, request) + } catch (error) { + lastError = error instanceof Error ? error : new Error(String(error)) + } + } + + throw lastError ?? new Error(`RPC failed after retries`) + } + + private sendRPCOnce( + collectionId: string, + request: RPCRequest, + ): Promise { + return new Promise((resolve, reject) => { + const rpcId = request.rpcId + + const timer = setTimeout(() => { + this.pendingRPCs.delete(rpcId) + reject( + new Error(`RPC ${request.type} timed out after ${RPC_TIMEOUT_MS}ms`), + ) + }, RPC_TIMEOUT_MS) + + this.pendingRPCs.set(rpcId, { + resolve: resolve as (response: RPCResponse) => void, + reject, + timer, + }) + + const envelope: ProtocolEnvelope = { + v: 1, + dbName: this.dbName, + collectionId, + senderId: this.nodeId, + ts: Date.now(), + payload: request, + } + this.channel.postMessage(envelope) + }) + } + + // ----------------------------------------------------------------------- + // RPC - Inbound (leader side) + // ----------------------------------------------------------------------- + + private async handleRPCRequest( + collectionId: string, + request: RPCRequest, + ): Promise { + let response: RPCResponse + + try { + switch (request.type) { + case `rpc:ensureRemoteSubset:req`: + response = await this.handleEnsureRemoteSubset(collectionId, request) + break + case `rpc:ensurePersistedIndex:req`: + response = await this.handleEnsurePersistedIndex( + collectionId, + request, + ) + break + case `rpc:applyLocalMutations:req`: + response = await this.handleApplyLocalMutations(collectionId, request) + break + case `rpc:pullSince:req`: + response = await this.handlePullSince(collectionId, request) + break + default: + return + } + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error) + response = { + type: request.type.replace(`:req`, `:res`) as RPCResponse[`type`], + rpcId: request.rpcId, + ok: false, + error: errorMessage, + } as RPCResponse + } + + const envelope: ProtocolEnvelope = { + v: 1, + dbName: this.dbName, + collectionId, + senderId: this.nodeId, + ts: Date.now(), + payload: response, + } + this.channel.postMessage(envelope) + } + + private handleEnsureRemoteSubset( + _collectionId: string, + request: { type: `rpc:ensureRemoteSubset:req`; rpcId: string }, + ): RPCResponse { + // Leader doesn't need to do anything special — the remote subset + // is ensured by the leader's own sync connection + return { + type: `rpc:ensureRemoteSubset:res`, + rpcId: request.rpcId, + ok: true, + } + } + + private async handleEnsurePersistedIndex( + collectionId: string, + request: { + type: `rpc:ensurePersistedIndex:req` + rpcId: string + signature: string + spec: PersistedIndexSpec + }, + ): Promise { + await this.withWriterLock(() => + this.requireAdapter().ensureIndex( + collectionId, + request.signature, + request.spec, + ), + ) + return { + type: `rpc:ensurePersistedIndex:res`, + rpcId: request.rpcId, + ok: true, + } + } + + private async handleApplyLocalMutations( + collectionId: string, + request: { + type: `rpc:applyLocalMutations:req` + rpcId: string + envelopeId: string + mutations: Array + }, + ): Promise { + // Dedupe by envelopeId + if (this.appliedEnvelopeIds.has(request.envelopeId)) { + return { + type: `rpc:applyLocalMutations:res`, + rpcId: request.rpcId, + ok: false, + code: `CONFLICT`, + error: `envelope ${request.envelopeId} already applied`, + } + } + + const state = this.collections.get(collectionId) + if (!state || !state.isLeader) { + return { + type: `rpc:applyLocalMutations:res`, + rpcId: request.rpcId, + ok: false, + code: `NOT_LEADER`, + error: `not the leader for ${collectionId}`, + } + } + + // Assign stream position + state.latestSeq++ + state.latestRowVersion++ + + const term = state.latestTerm + const seq = state.latestSeq + const rowVersion = state.latestRowVersion + + // Build and apply the persisted transaction + const tx = { + txId: crypto.randomUUID(), + term, + seq, + rowVersion, + mutations: request.mutations.map((m) => ({ + type: m.type, + key: m.key, + value: m.value, + })), + } + + await this.withWriterLock(() => + this.requireAdapter().applyCommittedTx(collectionId, tx), + ) + + // Track envelope for dedup + this.appliedEnvelopeIds.set(request.envelopeId, Date.now()) + this.pruneAppliedEnvelopeIds() + + // Broadcast tx:committed to all tabs + const changedRows = request.mutations + .filter((m) => m.type !== `delete`) + .map((m) => ({ key: m.key, value: m.value })) + const deletedKeys = request.mutations + .filter((m) => m.type === `delete`) + .map((m) => m.key) + + const txCommitted: ProtocolEnvelope = { + v: 1, + dbName: this.dbName, + collectionId, + senderId: this.nodeId, + ts: Date.now(), + payload: { + type: `tx:committed`, + term, + seq, + txId: tx.txId, + latestRowVersion: rowVersion, + requiresFullReload: false, + changedRows, + deletedKeys, + }, + } + this.channel.postMessage(txCommitted) + + // Deliver to local subscribers too + for (const subscriber of state.subscribers) { + subscriber(txCommitted) + } + + return { + type: `rpc:applyLocalMutations:res`, + rpcId: request.rpcId, + ok: true, + term, + seq, + latestRowVersion: rowVersion, + acceptedMutationIds: request.mutations.map((m) => m.mutationId), + } + } + + private async handlePullSince( + collectionId: string, + request: { + type: `rpc:pullSince:req` + rpcId: string + fromRowVersion: number + }, + ): Promise { + const state = this.collections.get(collectionId) + + const adapter = this.requireAdapter() + if (!adapter.pullSince) { + return { + type: `rpc:pullSince:res`, + rpcId: request.rpcId, + ok: true, + latestTerm: state?.latestTerm ?? 0, + latestSeq: state?.latestSeq ?? 0, + latestRowVersion: state?.latestRowVersion ?? 0, + requiresFullReload: true, + } + } + + const result = await adapter.pullSince(collectionId, request.fromRowVersion) + + if (result.requiresFullReload) { + return { + type: `rpc:pullSince:res`, + rpcId: request.rpcId, + ok: true, + latestTerm: state?.latestTerm ?? 0, + latestSeq: state?.latestSeq ?? 0, + latestRowVersion: result.latestRowVersion, + requiresFullReload: true, + } + } + + return { + type: `rpc:pullSince:res`, + rpcId: request.rpcId, + ok: true, + latestTerm: state?.latestTerm ?? 0, + latestSeq: state?.latestSeq ?? 0, + latestRowVersion: result.latestRowVersion, + requiresFullReload: false, + changedKeys: result.changedKeys, + deletedKeys: result.deletedKeys, + } + } + + // ----------------------------------------------------------------------- + // DB Writer Lock + // ----------------------------------------------------------------------- + + private async withWriterLock(fn: () => Promise): Promise { + const lockName = `tsdb:writer:${this.dbName}` + + for (let attempt = 0; attempt <= WRITER_LOCK_MAX_RETRIES; attempt++) { + try { + return await navigator.locks.request(lockName, async () => fn()) + } catch (error) { + if (error instanceof DOMException && error.name === `AbortError`) { + throw error + } + + if (attempt < WRITER_LOCK_MAX_RETRIES) { + await sleep(WRITER_LOCK_BUSY_RETRY_MS * Math.min(attempt + 1, 5)) + continue + } + + throw error + } + } + + // Unreachable but satisfies TypeScript + throw new Error(`writer lock acquisition failed`) + } + + // ----------------------------------------------------------------------- + // Helpers + // ----------------------------------------------------------------------- + + private pruneAppliedEnvelopeIds(): void { + // Keep envelopes for 60 seconds for dedup + const cutoff = Date.now() - 60_000 + for (const [id, ts] of this.appliedEnvelopeIds) { + if (ts < cutoff) { + this.appliedEnvelopeIds.delete(id) + } + } + } +} + +// --------------------------------------------------------------------------- +// Utilities +// --------------------------------------------------------------------------- + +function isProtocolEnvelope(data: unknown): data is ProtocolEnvelope { + if (!data || typeof data !== `object`) return false + const record = data as Record + return ( + record.v === 1 && + typeof record.dbName === `string` && + typeof record.collectionId === `string` && + typeof record.senderId === `string` && + typeof record.ts === `number` + ) +} + +function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)) +} diff --git a/packages/db-electron-sqlite-persisted-collection/src/errors.ts b/packages/db-electron-sqlite-persisted-collection/src/errors.ts new file mode 100644 index 000000000..33bd0c4ec --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/src/errors.ts @@ -0,0 +1,110 @@ +import type { + ElectronPersistenceMethod, + ElectronSerializedError, +} from './protocol' + +type ElectronPersistenceErrorOptions = { + code?: string + cause?: unknown +} + +export class ElectronPersistenceError extends Error { + readonly code: string | undefined + + constructor(message: string, options?: ElectronPersistenceErrorOptions) { + super(message, { cause: options?.cause }) + this.name = `ElectronPersistenceError` + this.code = options?.code + } +} + +export class UnknownElectronPersistenceCollectionError extends ElectronPersistenceError { + readonly collectionId: string + + constructor(collectionId: string) { + super( + `Unknown electron persistence collection "${collectionId}". Register the collection adapter in the main process host.`, + { + code: `UNKNOWN_COLLECTION`, + }, + ) + this.name = `UnknownElectronPersistenceCollectionError` + this.collectionId = collectionId + } +} + +export class UnsupportedElectronPersistenceMethodError extends ElectronPersistenceError { + readonly method: ElectronPersistenceMethod + readonly collectionId: string + + constructor(method: ElectronPersistenceMethod, collectionId: string) { + super( + `Method "${method}" is not supported by the electron persistence adapter for collection "${collectionId}".`, + { + code: `UNSUPPORTED_METHOD`, + }, + ) + this.name = `UnsupportedElectronPersistenceMethodError` + this.method = method + this.collectionId = collectionId + } +} + +export class ElectronPersistenceProtocolError extends ElectronPersistenceError { + constructor(message: string, options?: ElectronPersistenceErrorOptions) { + super(message, { + code: options?.code ?? `INVALID_PROTOCOL`, + cause: options?.cause, + }) + this.name = `ElectronPersistenceProtocolError` + } +} + +export class ElectronPersistenceTimeoutError extends ElectronPersistenceError { + constructor(message: string) { + super(message, { + code: `TIMEOUT`, + }) + this.name = `ElectronPersistenceTimeoutError` + } +} + +export class ElectronPersistenceRpcError extends ElectronPersistenceError { + readonly method: ElectronPersistenceMethod + readonly collectionId: string + readonly requestId: string + readonly remoteName: string + + constructor( + method: ElectronPersistenceMethod, + collectionId: string, + requestId: string, + serializedError: ElectronSerializedError, + ) { + super( + `${serializedError.name}: ${serializedError.message} (method=${method}, collection=${collectionId}, request=${requestId})`, + { + code: serializedError.code ?? `REMOTE_ERROR`, + }, + ) + this.name = `ElectronPersistenceRpcError` + this.method = method + this.collectionId = collectionId + this.requestId = requestId + this.remoteName = serializedError.name + } + + static fromSerialized( + method: ElectronPersistenceMethod, + collectionId: string, + requestId: string, + serializedError: ElectronSerializedError, + ): ElectronPersistenceRpcError { + return new ElectronPersistenceRpcError( + method, + collectionId, + requestId, + serializedError, + ) + } +} diff --git a/packages/db-electron-sqlite-persisted-collection/src/index.ts b/packages/db-electron-sqlite-persisted-collection/src/index.ts new file mode 100644 index 000000000..d06335511 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/src/index.ts @@ -0,0 +1,17 @@ +export { exposeElectronSQLitePersistence } from './main' +export type { + ElectronIpcMainLike, + ElectronSQLiteMainProcessOptions, +} from './main' +export { createElectronSQLitePersistence } from './renderer' +export type { + ElectronIpcRendererLike, + ElectronSQLitePersistenceOptions, +} from './renderer' +export { ElectronCollectionCoordinator } from './electron-coordinator' +export type { ElectronCollectionCoordinatorOptions } from './electron-coordinator' +export { persistedCollectionOptions } from '@tanstack/db-sqlite-persisted-collection-core' +export type { + PersistedCollectionCoordinator, + PersistedCollectionPersistence, +} from '@tanstack/db-sqlite-persisted-collection-core' diff --git a/packages/db-electron-sqlite-persisted-collection/src/main.ts b/packages/db-electron-sqlite-persisted-collection/src/main.ts new file mode 100644 index 000000000..ccdb5dd12 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/src/main.ts @@ -0,0 +1,267 @@ +import { InvalidPersistedCollectionConfigError } from '@tanstack/db-sqlite-persisted-collection-core' +import { + DEFAULT_ELECTRON_PERSISTENCE_CHANNEL, + ELECTRON_PERSISTENCE_PROTOCOL_VERSION, +} from './protocol' +import type { + PersistedCollectionPersistence, + PersistenceAdapter, + SQLitePullSinceResult, +} from '@tanstack/db-sqlite-persisted-collection-core' +import type { + ElectronPersistedKey, + ElectronPersistedRow, + ElectronPersistenceRequestEnvelope, + ElectronPersistenceResponseEnvelope, + ElectronSerializedError, +} from './protocol' + +type ElectronMainPersistenceAdapter = PersistenceAdapter< + ElectronPersistedRow, + ElectronPersistedKey +> & { + pullSince?: ( + collectionId: string, + fromRowVersion: number, + ) => Promise> + getStreamPosition?: (collectionId: string) => Promise<{ + latestTerm: number + latestSeq: number + latestRowVersion: number + }> +} + +function serializeError(error: unknown): ElectronSerializedError { + const fallbackMessage = `Unknown electron persistence error` + + if (!(error instanceof Error)) { + return { + name: `Error`, + message: fallbackMessage, + code: undefined, + } + } + + const codedError = error as Error & { code?: unknown } + return { + name: error.name || `Error`, + message: error.message || fallbackMessage, + stack: error.stack, + code: typeof codedError.code === `string` ? codedError.code : undefined, + } +} + +function createErrorResponse( + request: ElectronPersistenceRequestEnvelope, + error: unknown, +): ElectronPersistenceResponseEnvelope { + return { + v: ELECTRON_PERSISTENCE_PROTOCOL_VERSION, + requestId: request.requestId, + method: request.method, + ok: false, + error: serializeError(error), + } +} + +function assertValidRequest(request: ElectronPersistenceRequestEnvelope): void { + if (request.v !== ELECTRON_PERSISTENCE_PROTOCOL_VERSION) { + throw new InvalidPersistedCollectionConfigError( + `Unsupported electron persistence protocol version "${request.v}"`, + ) + } + + if ( + typeof request.requestId !== `string` || + request.requestId.trim().length === 0 + ) { + throw new InvalidPersistedCollectionConfigError( + `Electron persistence requestId cannot be empty`, + ) + } + + if ( + typeof request.collectionId !== `string` || + request.collectionId.trim().length === 0 + ) { + throw new InvalidPersistedCollectionConfigError( + `Electron persistence collectionId cannot be empty`, + ) + } +} + +async function executeRequestAgainstAdapter( + request: ElectronPersistenceRequestEnvelope, + adapter: ElectronMainPersistenceAdapter, +): Promise { + switch (request.method) { + case `loadSubset`: { + const result = await adapter.loadSubset( + request.collectionId, + request.payload.options, + request.payload.ctx, + ) + return { + v: ELECTRON_PERSISTENCE_PROTOCOL_VERSION, + requestId: request.requestId, + method: request.method, + ok: true, + result, + } + } + + case `applyCommittedTx`: { + await adapter.applyCommittedTx(request.collectionId, request.payload.tx) + return { + v: ELECTRON_PERSISTENCE_PROTOCOL_VERSION, + requestId: request.requestId, + method: request.method, + ok: true, + result: null, + } + } + + case `ensureIndex`: { + await adapter.ensureIndex( + request.collectionId, + request.payload.signature, + request.payload.spec, + ) + return { + v: ELECTRON_PERSISTENCE_PROTOCOL_VERSION, + requestId: request.requestId, + method: request.method, + ok: true, + result: null, + } + } + + case `markIndexRemoved`: { + if (!adapter.markIndexRemoved) { + throw new InvalidPersistedCollectionConfigError( + `markIndexRemoved is not supported by the configured electron persistence adapter`, + ) + } + await adapter.markIndexRemoved( + request.collectionId, + request.payload.signature, + ) + return { + v: ELECTRON_PERSISTENCE_PROTOCOL_VERSION, + requestId: request.requestId, + method: request.method, + ok: true, + result: null, + } + } + + case `pullSince`: { + if (!adapter.pullSince) { + throw new InvalidPersistedCollectionConfigError( + `pullSince is not supported by the configured electron persistence adapter`, + ) + } + const result = await adapter.pullSince( + request.collectionId, + request.payload.fromRowVersion, + ) + return { + v: ELECTRON_PERSISTENCE_PROTOCOL_VERSION, + requestId: request.requestId, + method: request.method, + ok: true, + result, + } + } + + case `getStreamPosition`: { + if (!adapter.getStreamPosition) { + throw new InvalidPersistedCollectionConfigError( + `getStreamPosition is not supported by the configured electron persistence adapter`, + ) + } + const position = await adapter.getStreamPosition(request.collectionId) + return { + v: ELECTRON_PERSISTENCE_PROTOCOL_VERSION, + requestId: request.requestId, + method: request.method, + ok: true, + result: position, + } + } + } +} + +function resolveModeAwarePersistence( + persistence: PersistedCollectionPersistence< + ElectronPersistedRow, + ElectronPersistedKey + >, + request: ElectronPersistenceRequestEnvelope, +): PersistedCollectionPersistence { + const mode = request.resolution?.mode ?? `sync-absent` + const schemaVersion = request.resolution?.schemaVersion + const collectionAwarePersistence = + persistence.resolvePersistenceForCollection?.({ + collectionId: request.collectionId, + mode, + schemaVersion, + }) + if (collectionAwarePersistence) { + return collectionAwarePersistence + } + + const modeAwarePersistence = persistence.resolvePersistenceForMode?.(mode) + return modeAwarePersistence ?? persistence +} + +export type ElectronIpcMainLike = { + handle: ( + channel: string, + listener: ( + event: unknown, + request: ElectronPersistenceRequestEnvelope, + ) => Promise, + ) => void + removeHandler?: (channel: string) => void +} + +export type ElectronSQLiteMainProcessOptions = { + persistence: PersistedCollectionPersistence< + ElectronPersistedRow, + ElectronPersistedKey + > + ipcMain: ElectronIpcMainLike + channel?: string +} + +export function exposeElectronSQLitePersistence( + options: ElectronSQLiteMainProcessOptions, +): () => void { + const channel = options.channel ?? DEFAULT_ELECTRON_PERSISTENCE_CHANNEL + options.ipcMain.handle( + channel, + async ( + _event, + request: ElectronPersistenceRequestEnvelope, + ): Promise => { + try { + assertValidRequest(request) + const modeAwarePersistence = resolveModeAwarePersistence( + options.persistence, + request, + ) + return await executeRequestAgainstAdapter( + request, + modeAwarePersistence.adapter, + ) + } catch (error) { + return createErrorResponse(request, error) + } + }, + ) + + return () => { + options.ipcMain.removeHandler?.(channel) + } +} diff --git a/packages/db-electron-sqlite-persisted-collection/src/protocol.ts b/packages/db-electron-sqlite-persisted-collection/src/protocol.ts new file mode 100644 index 000000000..b9faa4cfa --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/src/protocol.ts @@ -0,0 +1,123 @@ +import type { LoadSubsetOptions } from '@tanstack/db' +import type { + PersistedCollectionMode, + PersistedIndexSpec, + PersistedTx, + SQLitePullSinceResult, +} from '@tanstack/db-sqlite-persisted-collection-core' + +export const ELECTRON_PERSISTENCE_PROTOCOL_VERSION = 1 as const +export const DEFAULT_ELECTRON_PERSISTENCE_CHANNEL = `tanstack-db:sqlite-persistence` + +export type ElectronPersistedRow = Record +export type ElectronPersistedKey = string | number + +export type ElectronPersistenceResolution = { + mode: PersistedCollectionMode + schemaVersion?: number +} + +export type ElectronPersistenceMethod = + | `loadSubset` + | `applyCommittedTx` + | `ensureIndex` + | `markIndexRemoved` + | `pullSince` + | `getStreamPosition` + +export type ElectronPersistencePayloadMap = { + loadSubset: { + options: LoadSubsetOptions + ctx?: { requiredIndexSignatures?: ReadonlyArray } + } + applyCommittedTx: { + tx: PersistedTx + } + ensureIndex: { + signature: string + spec: PersistedIndexSpec + } + markIndexRemoved: { + signature: string + } + pullSince: { + fromRowVersion: number + } + getStreamPosition: {} +} + +export type ElectronPersistenceResultMap = { + loadSubset: Array<{ key: ElectronPersistedKey; value: ElectronPersistedRow }> + applyCommittedTx: null + ensureIndex: null + markIndexRemoved: null + pullSince: SQLitePullSinceResult + getStreamPosition: { + latestTerm: number + latestSeq: number + latestRowVersion: number + } +} + +export type ElectronSerializedError = { + name: string + message: string + stack?: string + code?: string +} + +export type ElectronPersistenceRequestByMethod = { + [Method in ElectronPersistenceMethod]: { + v: number + requestId: string + collectionId: string + resolution?: ElectronPersistenceResolution + method: Method + payload: ElectronPersistencePayloadMap[Method] + } +} + +export type ElectronPersistenceRequest< + TMethod extends ElectronPersistenceMethod = ElectronPersistenceMethod, +> = ElectronPersistenceRequestByMethod[TMethod] + +export type ElectronPersistenceRequestEnvelope = + ElectronPersistenceRequestByMethod[ElectronPersistenceMethod] + +type ElectronPersistenceSuccessResponseByMethod = { + [Method in ElectronPersistenceMethod]: { + v: number + requestId: string + method: Method + ok: true + result: ElectronPersistenceResultMap[Method] + } +} + +type ElectronPersistenceErrorResponseByMethod = { + [Method in ElectronPersistenceMethod]: { + v: number + requestId: string + method: Method + ok: false + error: ElectronSerializedError + } +} + +export type ElectronPersistenceResponse< + TMethod extends ElectronPersistenceMethod = ElectronPersistenceMethod, +> = + | ElectronPersistenceSuccessResponseByMethod[TMethod] + | ElectronPersistenceErrorResponseByMethod[TMethod] + +export type ElectronPersistenceResponseEnvelope = + ElectronPersistenceResponse + +export type ElectronPersistenceRequestHandler = ( + request: ElectronPersistenceRequestEnvelope, +) => Promise + +export type ElectronPersistenceInvoke = ( + channel: string, + request: ElectronPersistenceRequestEnvelope, +) => Promise diff --git a/packages/db-electron-sqlite-persisted-collection/src/renderer.ts b/packages/db-electron-sqlite-persisted-collection/src/renderer.ts new file mode 100644 index 000000000..99b0a0a3c --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/src/renderer.ts @@ -0,0 +1,362 @@ +import { + InvalidPersistedCollectionConfigError, + SingleProcessCoordinator, +} from '@tanstack/db-sqlite-persisted-collection-core' +import { ElectronCollectionCoordinator } from './electron-coordinator' +import { + DEFAULT_ELECTRON_PERSISTENCE_CHANNEL, + ELECTRON_PERSISTENCE_PROTOCOL_VERSION, +} from './protocol' +import type { + PersistedCollectionCoordinator, + PersistedCollectionMode, + PersistedCollectionPersistence, + PersistedIndexSpec, + PersistedTx, + SQLitePullSinceResult, +} from '@tanstack/db-sqlite-persisted-collection-core' +import type { + ElectronPersistedKey, + ElectronPersistedRow, + ElectronPersistenceInvoke, + ElectronPersistenceMethod, + ElectronPersistencePayloadMap, + ElectronPersistenceRequest, + ElectronPersistenceRequestEnvelope, + ElectronPersistenceResolution, + ElectronPersistenceResponseEnvelope, + ElectronPersistenceResultMap, +} from './protocol' +import type { LoadSubsetOptions } from '@tanstack/db' + +const DEFAULT_REQUEST_TIMEOUT_MS = 5_000 +let nextRequestId = 1 + +function createRequestId(): string { + const requestId = nextRequestId + nextRequestId++ + return `electron-persistence-${requestId}` +} + +function withTimeout( + promise: Promise, + timeoutMs: number, + timeoutMessage: string, +): Promise { + if (timeoutMs <= 0) { + return promise + } + + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + reject(new InvalidPersistedCollectionConfigError(timeoutMessage)) + }, timeoutMs) + + promise.then( + (value) => { + clearTimeout(timer) + resolve(value) + }, + (error: unknown) => { + clearTimeout(timer) + reject(error) + }, + ) + }) +} + +function assertValidResponse( + response: ElectronPersistenceResponseEnvelope, + request: ElectronPersistenceRequestEnvelope, +): void { + if (response.v !== ELECTRON_PERSISTENCE_PROTOCOL_VERSION) { + throw new InvalidPersistedCollectionConfigError( + `Unexpected electron persistence protocol version "${response.v}" in response`, + ) + } + + if (response.requestId !== request.requestId) { + throw new InvalidPersistedCollectionConfigError( + `Mismatched electron persistence response request id. Expected "${request.requestId}", received "${response.requestId}"`, + ) + } + + if (response.method !== request.method) { + throw new InvalidPersistedCollectionConfigError( + `Mismatched electron persistence response method. Expected "${request.method}", received "${response.method}"`, + ) + } +} + +function createSerializableLoadSubsetOptions( + subsetOptions: LoadSubsetOptions, +): LoadSubsetOptions { + const { subscription: _subscription, ...serializableOptions } = subsetOptions + return serializableOptions +} + +type RendererRequestExecutor = ( + method: TMethod, + collectionId: string, + payload: ElectronPersistencePayloadMap[TMethod], + resolution?: ElectronPersistenceResolution, +) => Promise + +function createRendererRequestExecutor(options: { + invoke: ElectronPersistenceInvoke + channel?: string + timeoutMs?: number +}): RendererRequestExecutor { + const channel = options.channel ?? DEFAULT_ELECTRON_PERSISTENCE_CHANNEL + const timeoutMs = options.timeoutMs ?? DEFAULT_REQUEST_TIMEOUT_MS + + return async ( + method: TMethod, + collectionId: string, + payload: ElectronPersistencePayloadMap[TMethod], + resolution?: ElectronPersistenceResolution, + ) => { + const request = { + v: ELECTRON_PERSISTENCE_PROTOCOL_VERSION, + requestId: createRequestId(), + collectionId, + method, + resolution, + payload, + } as ElectronPersistenceRequest + + const response = await withTimeout( + options.invoke(channel, request), + timeoutMs, + `Electron persistence request timed out (method=${method}, collection=${collectionId}, timeoutMs=${timeoutMs})`, + ) + assertValidResponse(response, request) + + if (!response.ok) { + const remoteError = new InvalidPersistedCollectionConfigError( + `${response.error.name}: ${response.error.message}`, + ) + if (typeof response.error.stack === `string`) { + remoteError.stack = response.error.stack + } + if (typeof response.error.code === `string`) { + ;(remoteError as Error & { code?: string }).code = response.error.code + } + throw remoteError + } + + return response.result as ElectronPersistenceResultMap[TMethod] + } +} + +type ElectronRendererResolvedAdapter< + T extends object, + TKey extends string | number = string | number, +> = PersistedCollectionPersistence[`adapter`] & { + pullSince: ( + collectionId: string, + fromRowVersion: number, + ) => Promise> + getStreamPosition: (collectionId: string) => Promise<{ + latestTerm: number + latestSeq: number + latestRowVersion: number + }> +} + +function createResolvedRendererAdapter< + T extends object, + TKey extends string | number = string | number, +>( + executeRequest: RendererRequestExecutor, + resolution?: ElectronPersistenceResolution, +): ElectronRendererResolvedAdapter { + return { + loadSubset: async ( + collectionId: string, + subsetOptions: LoadSubsetOptions, + ctx?: { requiredIndexSignatures?: ReadonlyArray }, + ) => { + const result = await executeRequest( + `loadSubset`, + collectionId, + { + options: createSerializableLoadSubsetOptions(subsetOptions), + ctx, + }, + resolution, + ) + + return result as Array<{ key: TKey; value: T }> + }, + applyCommittedTx: async ( + collectionId: string, + tx: PersistedTx, + ): Promise => { + await executeRequest( + `applyCommittedTx`, + collectionId, + { + tx: tx as PersistedTx, + }, + resolution, + ) + }, + ensureIndex: async ( + collectionId: string, + signature: string, + spec: PersistedIndexSpec, + ): Promise => { + await executeRequest( + `ensureIndex`, + collectionId, + { + signature, + spec, + }, + resolution, + ) + }, + markIndexRemoved: async ( + collectionId: string, + signature: string, + ): Promise => { + await executeRequest( + `markIndexRemoved`, + collectionId, + { + signature, + }, + resolution, + ) + }, + pullSince: async ( + collectionId: string, + fromRowVersion: number, + ): Promise> => { + const result = await executeRequest( + `pullSince`, + collectionId, + { + fromRowVersion, + }, + resolution, + ) + return result as SQLitePullSinceResult + }, + getStreamPosition: async ( + collectionId: string, + ): Promise<{ + latestTerm: number + latestSeq: number + latestRowVersion: number + }> => { + return executeRequest(`getStreamPosition`, collectionId, {}, resolution) + }, + } +} + +export type ElectronIpcRendererLike = { + invoke: ( + channel: string, + request: ElectronPersistenceRequestEnvelope, + ) => Promise +} + +export type ElectronSQLitePersistenceOptions = { + invoke?: ElectronPersistenceInvoke + ipcRenderer?: ElectronIpcRendererLike + channel?: string + timeoutMs?: number + coordinator?: PersistedCollectionCoordinator +} + +function resolveInvoke( + options: ElectronSQLitePersistenceOptions, +): ElectronPersistenceInvoke { + if (options.invoke) { + return options.invoke + } + + if (options.ipcRenderer) { + return (channel, request) => options.ipcRenderer!.invoke(channel, request) + } + + throw new InvalidPersistedCollectionConfigError( + `Electron renderer persistence requires either invoke or ipcRenderer`, + ) +} + +export function createElectronSQLitePersistence< + T extends object, + TKey extends string | number = string | number, +>( + options: ElectronSQLitePersistenceOptions, +): PersistedCollectionPersistence { + const invoke = resolveInvoke(options) + const coordinator = options.coordinator ?? new SingleProcessCoordinator() + const executeRequest = createRendererRequestExecutor({ + invoke, + channel: options.channel, + timeoutMs: options.timeoutMs, + }) + const adapterCache = new Map< + string, + ElectronRendererResolvedAdapter, string | number> + >() + + const getAdapterForCollection = ( + mode: PersistedCollectionMode, + schemaVersion: number | undefined, + ) => { + const schemaVersionKey = + schemaVersion === undefined ? `schema:default` : `schema:${schemaVersion}` + const cacheKey = `mode:${mode}|${schemaVersionKey}` + const cachedAdapter = adapterCache.get(cacheKey) + if (cachedAdapter) { + return cachedAdapter + } + + const adapter = createResolvedRendererAdapter< + Record, + string | number + >(executeRequest, { + mode, + schemaVersion, + }) + adapterCache.set(cacheKey, adapter) + + // Wire the adapter into the coordinator so it can handle + // leader-side RPCs (applyCommittedTx, pullSince, getStreamPosition, etc.) + if (coordinator instanceof ElectronCollectionCoordinator) { + coordinator.setAdapter(adapter) + } + + return adapter + } + + const createCollectionPersistence = ( + mode: PersistedCollectionMode, + schemaVersion: number | undefined, + ): PersistedCollectionPersistence => ({ + adapter: getAdapterForCollection( + mode, + schemaVersion, + ) as unknown as PersistedCollectionPersistence[`adapter`], + coordinator, + }) + + const defaultPersistence = createCollectionPersistence( + `sync-absent`, + undefined, + ) + + return { + ...defaultPersistence, + resolvePersistenceForCollection: ({ mode, schemaVersion }) => + createCollectionPersistence(mode, schemaVersion), + // Backward compatible fallback for older callers. + resolvePersistenceForMode: (mode) => + createCollectionPersistence(mode, undefined), + } +} diff --git a/packages/db-electron-sqlite-persisted-collection/tests/e2e/electron-process-client.ts b/packages/db-electron-sqlite-persisted-collection/tests/e2e/electron-process-client.ts new file mode 100644 index 000000000..f960721ed --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/e2e/electron-process-client.ts @@ -0,0 +1,425 @@ +import { spawn } from 'node:child_process' +import { existsSync } from 'node:fs' +import { createRequire } from 'node:module' +import { dirname, join } from 'node:path' +import { fileURLToPath } from 'node:url' +import { deserialize } from 'node:v8' +import { DEFAULT_ELECTRON_PERSISTENCE_CHANNEL } from '../../src/protocol' +import { + E2E_RESULT_BASE64_PREFIX, + E2E_RESULT_PREFIX, +} from './fixtures/runtime-bridge-types' +import type { ElectronPersistenceInvoke } from '../../src/protocol' +import type { + ElectronRuntimeBridgeAdapterOptions, + ElectronRuntimeBridgeHostKind, + ElectronRuntimeBridgeInput, + ElectronRuntimeBridgeProcessResult, + ElectronRuntimeBridgeScenarioResult, +} from './fixtures/runtime-bridge-types' + +const ELECTRON_SCENARIO_TIMEOUT_MS = 20_000 +const require = createRequire(import.meta.url) +const currentFilePath = fileURLToPath(import.meta.url) +const e2eDirectory = dirname(currentFilePath) +const testsDirectory = dirname(e2eDirectory) +const packageRoot = dirname(testsDirectory) +const electronRunnerPath = join(e2eDirectory, `fixtures`, `electron-main.mjs`) +const E2E_INPUT_ENV_VAR = `TANSTACK_DB_E2E_INPUT` +const E2E_TRANSPORT_TYPE_TAG = `__tanstack_db_e2e_transport_type__` +const E2E_TRANSPORT_VALUE_TAG = `value` + +export const ELECTRON_FULL_E2E_ENV_VAR = `TANSTACK_DB_ELECTRON_E2E_ALL` + +export function isElectronFullE2EEnabled(): boolean { + return process.env[ELECTRON_FULL_E2E_ENV_VAR] === `1` +} + +type CreateElectronRuntimeBridgeInvokeOptions = { + dbPath: string + collectionId: string + allowAnyCollectionId?: boolean + timeoutMs?: number + hostKind?: ElectronRuntimeBridgeHostKind + adapterOptions?: ElectronRuntimeBridgeAdapterOptions +} + +function createElectronScenarioEnv( + input: ElectronRuntimeBridgeInput, +): NodeJS.ProcessEnv { + const childEnv: NodeJS.ProcessEnv = { ...process.env } + + delete childEnv.NODE_V8_COVERAGE + + for (const envKey of Object.keys(childEnv)) { + if ( + envKey.startsWith(`VITEST_`) || + envKey.startsWith(`__VITEST_`) || + envKey.startsWith(`NYC_`) + ) { + delete childEnv[envKey] + } + } + + childEnv[E2E_INPUT_ENV_VAR] = encodeInputForEnv(input) + childEnv.ELECTRON_DISABLE_SECURITY_WARNINGS = `true` + + return childEnv +} + +function resolveElectronBinaryPath(): string { + const electronModuleValue: unknown = require(`electron`) + if ( + typeof electronModuleValue !== `string` || + electronModuleValue.length === 0 + ) { + throw new Error(`Failed to resolve electron binary path`) + } + return electronModuleValue +} + +function parseScenarioResult( + stdoutBuffer: string, + stderrBuffer: string, + exitCode: number | null, +): ElectronRuntimeBridgeProcessResult { + const outputLines = stdoutBuffer.split(/\r?\n/u) + const base64ResultLine = outputLines.find((line) => + line.startsWith(E2E_RESULT_BASE64_PREFIX), + ) + if (base64ResultLine) { + const rawResult = base64ResultLine.slice(E2E_RESULT_BASE64_PREFIX.length) + const serializedResult = Buffer.from(rawResult, `base64`) + return deserialize(serializedResult) as ElectronRuntimeBridgeProcessResult + } + + const jsonResultLine = outputLines.find((line) => + line.startsWith(E2E_RESULT_PREFIX), + ) + + if (!jsonResultLine) { + throw new Error( + [ + `Electron e2e runner did not emit a result line`, + `exitCode=${String(exitCode)}`, + `stderr=${stderrBuffer}`, + `stdout=${stdoutBuffer}`, + ].join(`\n`), + ) + } + + const rawResult = jsonResultLine.slice(E2E_RESULT_PREFIX.length) + return JSON.parse(rawResult) as ElectronRuntimeBridgeProcessResult +} + +function encodeTransportValue( + value: unknown, + ancestors: WeakSet = new WeakSet(), +): unknown { + if (value === null) { + return null + } + + if ( + typeof value === `string` || + typeof value === `boolean` || + (typeof value === `number` && Number.isFinite(value)) + ) { + return value + } + + if (typeof value === `number`) { + if (Number.isNaN(value)) { + return { + [E2E_TRANSPORT_TYPE_TAG]: `nan`, + } + } + if (value === Number.POSITIVE_INFINITY) { + return { + [E2E_TRANSPORT_TYPE_TAG]: `infinity`, + } + } + if (value === Number.NEGATIVE_INFINITY) { + return { + [E2E_TRANSPORT_TYPE_TAG]: `-infinity`, + } + } + } + + if (typeof value === `bigint`) { + return { + [E2E_TRANSPORT_TYPE_TAG]: `bigint`, + [E2E_TRANSPORT_VALUE_TAG]: value.toString(), + } + } + + if (value instanceof Date) { + const timestamp = value.getTime() + if (Number.isNaN(timestamp)) { + return { + [E2E_TRANSPORT_TYPE_TAG]: `date_invalid`, + } + } + return { + [E2E_TRANSPORT_TYPE_TAG]: `date`, + [E2E_TRANSPORT_VALUE_TAG]: value.toISOString(), + } + } + + if (Array.isArray(value)) { + if (ancestors.has(value)) { + return undefined + } + ancestors.add(value) + try { + return value.map((item) => { + const encodedItem = encodeTransportValue(item, ancestors) + return encodedItem === undefined ? null : encodedItem + }) + } finally { + ancestors.delete(value) + } + } + + if ( + typeof value === `undefined` || + typeof value === `function` || + typeof value === `symbol` + ) { + return undefined + } + + if (typeof value === `object`) { + if (ancestors.has(value)) { + return undefined + } + ancestors.add(value) + try { + const encodedObject: Record = {} + for (const [key, objectValue] of Object.entries( + value as Record, + )) { + const encodedObjectValue = encodeTransportValue(objectValue, ancestors) + if (encodedObjectValue !== undefined) { + encodedObject[key] = encodedObjectValue + } + } + return encodedObject + } finally { + ancestors.delete(value) + } + } + + return undefined +} + +function encodeInputForEnv(input: ElectronRuntimeBridgeInput): string { + const encodedInput = encodeTransportValue(input) + if (!encodedInput || typeof encodedInput !== `object`) { + throw new Error(`Failed to encode e2e runtime input`) + } + return JSON.stringify(encodedInput) +} + +export async function runElectronRuntimeBridgeScenario( + input: ElectronRuntimeBridgeInput, +): Promise { + const scenarioTimeoutMs = Math.max( + ELECTRON_SCENARIO_TIMEOUT_MS, + (input.timeoutMs ?? 0) + 8_000, + ) + const electronBinaryPath = resolveElectronBinaryPath() + const xvfbRunPath = `/usr/bin/xvfb-run` + const hasXvfbRun = existsSync(xvfbRunPath) + const electronArgs = [ + `--disable-gpu`, + `--disable-dev-shm-usage`, + `--no-sandbox`, + electronRunnerPath, + ] + const command = hasXvfbRun ? xvfbRunPath : electronBinaryPath + const args = hasXvfbRun + ? [ + `-a`, + `--server-args=-screen 0 1280x720x24`, + electronBinaryPath, + ...electronArgs, + ] + : electronArgs + + const processResult = await new Promise( + (resolve, reject) => { + const child = spawn(command, args, { + cwd: packageRoot, + env: createElectronScenarioEnv(input), + stdio: [`ignore`, `pipe`, `pipe`], + }) + let stdoutBuffer = `` + let stderrBuffer = `` + let isSettled = false + let resultFromStdout: ElectronRuntimeBridgeProcessResult | undefined + let gracefulCloseTimeout: ReturnType | undefined + + const settle = ( + callback: (result: ElectronRuntimeBridgeProcessResult) => void, + result: ElectronRuntimeBridgeProcessResult, + ) => { + if (isSettled) { + return + } + isSettled = true + clearTimeout(timeout) + if (gracefulCloseTimeout) { + clearTimeout(gracefulCloseTimeout) + } + callback(result) + + if (!child.killed) { + child.kill(`SIGKILL`) + } + } + + const rejectOnce = (error: unknown) => { + if (isSettled) { + return + } + isSettled = true + clearTimeout(timeout) + if (gracefulCloseTimeout) { + clearTimeout(gracefulCloseTimeout) + } + reject(error) + if (!child.killed) { + child.kill(`SIGKILL`) + } + } + + const timeout = setTimeout(() => { + rejectOnce( + new Error( + [ + `Electron e2e scenario timed out after ${String(scenarioTimeoutMs)}ms`, + `stderr=${stderrBuffer}`, + `stdout=${stdoutBuffer}`, + ].join(`\n`), + ), + ) + }, scenarioTimeoutMs) + + child.on(`error`, (error) => { + rejectOnce(error) + }) + + child.stdout.on(`data`, (chunk: Buffer) => { + stdoutBuffer += chunk.toString() + + try { + const parsedResult = parseScenarioResult( + stdoutBuffer, + stderrBuffer, + null, + ) + if (!resultFromStdout) { + resultFromStdout = parsedResult + gracefulCloseTimeout = setTimeout(() => { + settle(resolve, parsedResult) + }, 1_000) + } + } catch { + // Result line might not be complete yet. + } + }) + child.stderr.on(`data`, (chunk: Buffer) => { + stderrBuffer += chunk.toString() + }) + + child.on(`close`, (exitCode) => { + if (isSettled) { + return + } + + try { + if (resultFromStdout) { + settle(resolve, resultFromStdout) + return + } + + const parsedResult = parseScenarioResult( + stdoutBuffer, + stderrBuffer, + exitCode, + ) + settle(resolve, parsedResult) + } catch (error) { + rejectOnce(error) + } + }) + }, + ) + + if (!processResult.ok) { + throw new Error( + `Electron e2e runner failed: ${processResult.error.name}: ${processResult.error.message}`, + ) + } + + return processResult.result +} + +export function createElectronRuntimeBridgeInvoke( + options: CreateElectronRuntimeBridgeInvokeOptions, +): ElectronPersistenceInvoke { + let queue: Promise = Promise.resolve() + + return async (channel, request) => { + const queuedInvoke = queue.then( + () => + runElectronRuntimeBridgeScenario({ + dbPath: options.dbPath, + collectionId: options.collectionId, + allowAnyCollectionId: options.allowAnyCollectionId, + hostKind: options.hostKind, + adapterOptions: options.adapterOptions, + channel, + timeoutMs: options.timeoutMs ?? 4_000, + scenario: { + type: `invokeRequest`, + request, + }, + }), + () => + runElectronRuntimeBridgeScenario({ + dbPath: options.dbPath, + collectionId: options.collectionId, + allowAnyCollectionId: options.allowAnyCollectionId, + hostKind: options.hostKind, + adapterOptions: options.adapterOptions, + channel, + timeoutMs: options.timeoutMs ?? 4_000, + scenario: { + type: `invokeRequest`, + request, + }, + }), + ) + queue = queuedInvoke.then( + () => undefined, + () => undefined, + ) + + const result = await queuedInvoke + + if (result.type !== `invokeRequest`) { + throw new Error(`Unexpected invokeRequest result: ${result.type}`) + } + + return result.response + } +} + +export function withDefaultElectronChannel( + invoke: ElectronPersistenceInvoke, +): ElectronPersistenceInvoke { + return (channel, request) => + invoke(channel || DEFAULT_ELECTRON_PERSISTENCE_CHANNEL, request) +} diff --git a/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/electron-main.mjs b/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/electron-main.mjs new file mode 100644 index 000000000..94ff930b9 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/electron-main.mjs @@ -0,0 +1,428 @@ +import { dirname, join } from 'node:path' +import { execFile } from 'node:child_process' +import { promisify } from 'node:util' +import { AsyncLocalStorage } from 'node:async_hooks' +import { copyFileSync, existsSync, mkdtempSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { fileURLToPath } from 'node:url' +import { serialize } from 'node:v8' +import { BrowserWindow, app, ipcMain } from 'electron' +import { createSQLiteCorePersistenceAdapter } from '@tanstack/db-sqlite-persisted-collection-core' +import { exposeElectronSQLitePersistence } from '../../../dist/esm/main.js' + +const E2E_RESULT_PREFIX = `__TANSTACK_DB_E2E_RESULT__:` +const E2E_RESULT_BASE64_PREFIX = `__TANSTACK_DB_E2E_RESULT_BASE64__:` +const E2E_INPUT_ENV_VAR = `TANSTACK_DB_E2E_INPUT` +const E2E_TRANSPORT_TYPE_TAG = `__tanstack_db_e2e_transport_type__` +const E2E_TRANSPORT_VALUE_TAG = `value` +const execFileAsync = promisify(execFile) + +function toSqlLiteral(value) { + if (value === null || value === undefined) { + return `NULL` + } + + if (typeof value === `number`) { + return Number.isFinite(value) ? String(value) : `NULL` + } + + if (typeof value === `boolean`) { + return value ? `1` : `0` + } + + if (typeof value === `bigint`) { + return value.toString() + } + + const textValue = typeof value === `string` ? value : String(value) + return `'${textValue.replace(/'/g, `''`)}'` +} + +function interpolateSql(sql, params) { + let parameterIndex = 0 + const renderedSql = sql.replace(/\?/g, () => { + const currentParam = params[parameterIndex] + parameterIndex++ + return toSqlLiteral(currentParam) + }) + + if (parameterIndex !== params.length) { + throw new Error( + `SQL interpolation mismatch: used ${parameterIndex} params, received ${params.length}`, + ) + } + + return renderedSql +} + +class SqliteCliDriver { + transactionDbPath = new AsyncLocalStorage() + queue = Promise.resolve() + + constructor(dbPath) { + this.dbPath = dbPath + } + + async exec(sql) { + const activeDbPath = this.transactionDbPath.getStore() + if (activeDbPath) { + await execFileAsync(`sqlite3`, [activeDbPath, sql]) + return + } + + await this.enqueue(async () => { + await execFileAsync(`sqlite3`, [this.dbPath, sql]) + }) + } + + async query(sql, params = []) { + const activeDbPath = this.transactionDbPath.getStore() + const renderedSql = interpolateSql(sql, params) + const queryDbPath = activeDbPath ?? this.dbPath + + const runQuery = async () => { + const { stdout } = await execFileAsync(`sqlite3`, [ + `-json`, + queryDbPath, + renderedSql, + ]) + const trimmedOutput = stdout.trim() + if (!trimmedOutput) { + return [] + } + + return JSON.parse(trimmedOutput) + } + + if (activeDbPath) { + return runQuery() + } + + return this.enqueue(async () => runQuery()) + } + + async run(sql, params = []) { + const activeDbPath = this.transactionDbPath.getStore() + const renderedSql = interpolateSql(sql, params) + const runDbPath = activeDbPath ?? this.dbPath + + if (activeDbPath) { + await execFileAsync(`sqlite3`, [runDbPath, renderedSql]) + return + } + + await this.enqueue(async () => { + await execFileAsync(`sqlite3`, [runDbPath, renderedSql]) + }) + } + + async transaction(fn) { + const activeDbPath = this.transactionDbPath.getStore() + if (activeDbPath) { + return fn(this) + } + + return this.enqueue(async () => { + const txDirectory = mkdtempSync(join(tmpdir(), `db-electron-e2e-tx-`)) + const txDbPath = join(txDirectory, `state.sqlite`) + + if (existsSync(this.dbPath)) { + copyFileSync(this.dbPath, txDbPath) + } + + try { + const txResult = await this.transactionDbPath.run(txDbPath, async () => + fn(this), + ) + if (existsSync(txDbPath)) { + copyFileSync(txDbPath, this.dbPath) + } + return txResult + } finally { + rmSync(txDirectory, { recursive: true, force: true }) + } + }) + } + + enqueue(operation) { + const queuedOperation = this.queue.then(operation, operation) + this.queue = queuedOperation.then( + () => undefined, + () => undefined, + ) + return queuedOperation + } +} + +function parseInputFromEnv() { + const rawInput = process.env[E2E_INPUT_ENV_VAR] + if (!rawInput) { + throw new Error(`Missing ${E2E_INPUT_ENV_VAR}`) + } + + const parsed = JSON.parse(rawInput) + const decoded = decodeTransportValue(parsed) + if (!decoded || typeof decoded !== `object`) { + throw new Error(`Invalid ${E2E_INPUT_ENV_VAR} payload`) + } + + return decoded +} + +function isEncodedTransportValue(value) { + return ( + value && + typeof value === `object` && + typeof value[E2E_TRANSPORT_TYPE_TAG] === `string` + ) +} + +function decodeTransportValue(value) { + if (value === null) { + return null + } + + if ( + typeof value === `string` || + typeof value === `number` || + typeof value === `boolean` + ) { + return value + } + + if (Array.isArray(value)) { + return value.map((item) => decodeTransportValue(item)) + } + + if (isEncodedTransportValue(value)) { + switch (value[E2E_TRANSPORT_TYPE_TAG]) { + case `bigint`: + return BigInt(value[E2E_TRANSPORT_VALUE_TAG]) + case `date`: + return new Date(value[E2E_TRANSPORT_VALUE_TAG]) + case `date_invalid`: + return new Date(Number.NaN) + case `nan`: + return Number.NaN + case `infinity`: + return Number.POSITIVE_INFINITY + case `-infinity`: + return Number.NEGATIVE_INFINITY + default: + break + } + } + + if (typeof value === `object`) { + const decodedObject = {} + for (const [key, objectValue] of Object.entries(value)) { + decodedObject[key] = decodeTransportValue(objectValue) + } + return decodedObject + } + + return value +} + +function printProcessResult(result) { + try { + const serializedResult = Buffer.from(serialize(result)).toString(`base64`) + process.stdout.write(`${E2E_RESULT_BASE64_PREFIX}${serializedResult}\n`) + } catch { + process.stdout.write(`${E2E_RESULT_PREFIX}${JSON.stringify(result)}\n`) + } +} + +function getPreloadPath() { + const currentFile = fileURLToPath(import.meta.url) + return join(dirname(currentFile), `renderer-preload.cjs`) +} + +function getRendererPagePath() { + const currentFile = fileURLToPath(import.meta.url) + return join(dirname(currentFile), `renderer-page.html`) +} + +function serializeError(error) { + if (error instanceof Error) { + return { + name: error.name, + message: error.message, + stack: error.stack, + } + } + + return { + name: `Error`, + message: `Unknown runtime error`, + } +} + +function createUnknownCollectionError(collectionId) { + const error = new Error( + `Unknown electron persistence collection "${collectionId}"`, + ) + error.name = `UnknownElectronPersistenceCollectionError` + error.code = `UNKNOWN_COLLECTION` + return error +} + +function createMainPersistence(input, driver) { + const adapter = createSQLiteCorePersistenceAdapter({ + driver, + ...(input.adapterOptions ?? {}), + }) + + if (input.allowAnyCollectionId) { + return { + persistence: { + adapter, + }, + cleanup: () => {}, + } + } + + return { + persistence: { + adapter: { + loadSubset: (collectionId, options, ctx) => { + if (collectionId !== input.collectionId) { + throw createUnknownCollectionError(collectionId) + } + return adapter.loadSubset(collectionId, options, ctx) + }, + applyCommittedTx: (collectionId, tx) => { + if (collectionId !== input.collectionId) { + throw createUnknownCollectionError(collectionId) + } + return adapter.applyCommittedTx(collectionId, tx) + }, + ensureIndex: (collectionId, signature, spec) => { + if (collectionId !== input.collectionId) { + throw createUnknownCollectionError(collectionId) + } + return adapter.ensureIndex(collectionId, signature, spec) + }, + markIndexRemoved: (collectionId, signature) => { + if (collectionId !== input.collectionId) { + throw createUnknownCollectionError(collectionId) + } + return adapter.markIndexRemoved?.(collectionId, signature) + }, + pullSince: (collectionId, fromRowVersion) => { + if (collectionId !== input.collectionId) { + throw createUnknownCollectionError(collectionId) + } + return adapter.pullSince?.(collectionId, fromRowVersion) + }, + }, + }, + cleanup: () => {}, + } +} + +async function run() { + app.commandLine.appendSwitch(`disable-gpu`) + app.commandLine.appendSwitch(`disable-dev-shm-usage`) + app.commandLine.appendSwitch(`no-sandbox`) + + const input = parseInputFromEnv() + const driver = new SqliteCliDriver(input.dbPath) + const mainRuntime = createMainPersistence(input, driver) + const disposeIpc = exposeElectronSQLitePersistence({ + ipcMain, + persistence: mainRuntime.persistence, + channel: input.channel, + }) + + let window + try { + await app.whenReady() + window = new BrowserWindow({ + show: false, + webPreferences: { + contextIsolation: true, + nodeIntegration: false, + sandbox: false, + preload: getPreloadPath(), + }, + }) + + const rendererDiagnostics = [] + window.webContents.on( + `console-message`, + (_event, level, message, line, sourceId) => { + rendererDiagnostics.push( + `[console:${String(level)}] ${sourceId}:${String(line)} ${message}`, + ) + }, + ) + window.webContents.on(`preload-error`, (_event, path, error) => { + rendererDiagnostics.push( + `[preload-error] ${path}: ${error?.message ?? `unknown preload error`}`, + ) + }) + + await window.loadFile(getRendererPagePath()) + + const scenarioInputBase64 = Buffer.from( + serialize({ + collectionId: input.collectionId, + allowAnyCollectionId: input.allowAnyCollectionId, + hostKind: input.hostKind, + adapterOptions: input.adapterOptions, + channel: input.channel, + timeoutMs: input.timeoutMs, + scenario: input.scenario, + }), + ).toString(`base64`) + + const hasBridgeApi = await window.webContents.executeJavaScript( + `typeof window.__tanstackDbRuntimeBridge__ === 'object'`, + true, + ) + if (!hasBridgeApi) { + throw new Error( + `Renderer preload bridge is unavailable.\n${rendererDiagnostics.join(`\n`)}`, + ) + } + + let result + try { + result = await window.webContents.executeJavaScript( + `window.__tanstackDbRuntimeBridge__.runScenarioFromBase64('${scenarioInputBase64}')`, + true, + ) + } catch (error) { + const message = error instanceof Error ? error.message : `Unknown error` + throw new Error( + `Renderer scenario execution failed: ${message}\n${rendererDiagnostics.join(`\n`)}`, + ) + } + + return { + ok: true, + result, + } + } finally { + if (window) { + window.destroy() + } + disposeIpc() + mainRuntime.cleanup() + await app.quit() + } +} + +void run() + .then((result) => { + printProcessResult(result) + process.exitCode = 0 + }) + .catch((error) => { + printProcessResult({ + ok: false, + error: serializeError(error), + }) + process.exitCode = 1 + }) diff --git a/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/renderer-page.html b/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/renderer-page.html new file mode 100644 index 000000000..4c51c20cc --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/renderer-page.html @@ -0,0 +1,10 @@ + + + + + TanStack DB Electron Runtime Bridge E2E + + +
    runtime bridge e2e
    + + diff --git a/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/renderer-preload.cjs b/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/renderer-preload.cjs new file mode 100644 index 000000000..62899bfd5 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/renderer-preload.cjs @@ -0,0 +1,118 @@ +const { contextBridge, ipcRenderer } = require(`electron`) +const { deserialize } = require(`node:v8`) +const rendererModulePath = `${__dirname}/../../../dist/cjs/renderer.cjs` +const protocolModulePath = `${__dirname}/../../../dist/cjs/protocol.cjs` +const { createElectronSQLitePersistence } = require(rendererModulePath) +const { DEFAULT_ELECTRON_PERSISTENCE_CHANNEL } = require(protocolModulePath) + +async function runScenario(input) { + const invokeBridge = (channel, request) => + ipcRenderer.invoke(channel, request) + const persistence = createElectronSQLitePersistence({ + ipcRenderer, + channel: input.channel, + timeoutMs: input.timeoutMs, + }) + const adapter = persistence.adapter + + const scenario = input.scenario + switch (scenario.type) { + case `noop`: + return { type: `noop` } + + case `writeTodo`: { + await adapter.applyCommittedTx(input.collectionId, { + txId: scenario.txId, + term: 1, + seq: scenario.seq, + rowVersion: scenario.rowVersion, + mutations: [ + { + type: `insert`, + key: scenario.todo.id, + value: scenario.todo, + }, + ], + }) + return { type: `writeTodo` } + } + + case `loadTodos`: { + const rows = await adapter.loadSubset( + scenario.collectionId ?? input.collectionId, + {}, + ) + return { + type: `loadTodos`, + rows: rows.map((row) => ({ + key: String(row.key), + value: { + id: String(row.value?.id ?? ``), + title: String(row.value?.title ?? ``), + score: Number(row.value?.score ?? 0), + }, + })), + } + } + + case `loadUnknownCollectionError`: { + try { + await adapter.loadSubset(scenario.collectionId, {}) + return { + type: `loadUnknownCollectionError`, + error: { + name: `Error`, + message: `Expected unknown collection error but operation succeeded`, + }, + } + } catch (error) { + if (error instanceof Error) { + return { + type: `loadUnknownCollectionError`, + error: { + name: error.name, + message: error.message, + code: + `code` in error && typeof error.code === `string` + ? error.code + : undefined, + }, + } + } + + return { + type: `loadUnknownCollectionError`, + error: { + name: `Error`, + message: `Unknown error type`, + }, + } + } + } + + case `invokeRequest`: { + const response = await invokeBridge( + input.channel ?? DEFAULT_ELECTRON_PERSISTENCE_CHANNEL, + scenario.request, + ) + return { + type: `invokeRequest`, + response, + } + } + + default: + throw new Error(`Unsupported electron runtime bridge scenario`) + } +} + +function runScenarioFromBase64(serializedInputBase64) { + const serializedInputBuffer = Buffer.from(serializedInputBase64, `base64`) + const input = deserialize(serializedInputBuffer) + return runScenario(input) +} + +contextBridge.exposeInMainWorld(`__tanstackDbRuntimeBridge__`, { + runScenario, + runScenarioFromBase64, +}) diff --git a/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/runtime-bridge-types.ts b/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/runtime-bridge-types.ts new file mode 100644 index 000000000..3ec1283af --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/e2e/fixtures/runtime-bridge-types.ts @@ -0,0 +1,93 @@ +import type { + RuntimeBridgeE2EContractError, + RuntimeBridgeE2EContractTodo, +} from '../../../../db-sqlite-persisted-collection-core/tests/contracts/runtime-bridge-e2e-contract' +import type { SQLiteCoreAdapterOptions } from '@tanstack/db-sqlite-persisted-collection-core' +import type { + ElectronPersistenceRequestEnvelope, + ElectronPersistenceResponseEnvelope, +} from '../../../src/protocol' + +export const E2E_RESULT_PREFIX = `__TANSTACK_DB_E2E_RESULT__:` +export const E2E_RESULT_BASE64_PREFIX = `__TANSTACK_DB_E2E_RESULT_BASE64__:` + +export type ElectronRuntimeBridgeHostKind = `core-host` | `node-registry` + +export type ElectronRuntimeBridgeAdapterOptions = Omit< + SQLiteCoreAdapterOptions, + `driver` +> + +export type ElectronRuntimeBridgeScenario = + | { + type: `noop` + } + | { + type: `writeTodo` + todo: RuntimeBridgeE2EContractTodo + txId: string + seq: number + rowVersion: number + } + | { + type: `loadTodos` + collectionId?: string + } + | { + type: `loadUnknownCollectionError` + collectionId: string + } + | { + type: `invokeRequest` + request: ElectronPersistenceRequestEnvelope + } + +export type ElectronRuntimeBridgeInput = { + dbPath: string + collectionId: string + allowAnyCollectionId?: boolean + hostKind?: ElectronRuntimeBridgeHostKind + adapterOptions?: ElectronRuntimeBridgeAdapterOptions + channel?: string + timeoutMs?: number + scenario: ElectronRuntimeBridgeScenario +} + +export type ElectronRuntimeBridgeScenarioResult = + | { + type: `noop` + } + | { + type: `writeTodo` + } + | { + type: `loadTodos` + rows: Array<{ + key: string + value: RuntimeBridgeE2EContractTodo + }> + } + | { + type: `loadUnknownCollectionError` + error: RuntimeBridgeE2EContractError + } + | { + type: `invokeRequest` + response: ElectronPersistenceResponseEnvelope + } + +export type ElectronRuntimeBridgeProcessError = { + name: string + message: string + stack?: string +} + +export type ElectronRuntimeBridgeProcessResult = + | { + ok: true + result: ElectronRuntimeBridgeScenarioResult + } + | { + ok: false + error: ElectronRuntimeBridgeProcessError + } diff --git a/packages/db-electron-sqlite-persisted-collection/tests/electron-ipc.test-d.ts b/packages/db-electron-sqlite-persisted-collection/tests/electron-ipc.test-d.ts new file mode 100644 index 000000000..21648b29a --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/electron-ipc.test-d.ts @@ -0,0 +1,61 @@ +import { expectTypeOf, test } from 'vitest' +import { createElectronSQLitePersistence } from '../src' +import type { ElectronPersistenceInvoke } from '../src/protocol' + +test(`renderer persistence requires invoke transport`, () => { + const invoke: ElectronPersistenceInvoke = (_channel, request) => { + switch (request.method) { + case `loadSubset`: + return Promise.resolve({ + v: 1, + requestId: request.requestId, + method: request.method, + ok: true, + result: [], + }) + case `pullSince`: + return Promise.resolve({ + v: 1, + requestId: request.requestId, + method: request.method, + ok: true, + result: { + latestRowVersion: 0, + requiresFullReload: true, + }, + }) + case `getStreamPosition`: + return Promise.resolve({ + v: 1, + requestId: request.requestId, + method: request.method, + ok: true, + result: { + latestTerm: 0, + latestSeq: 0, + latestRowVersion: 0, + }, + }) + default: + return Promise.resolve({ + v: 1, + requestId: request.requestId, + method: request.method, + ok: true, + result: null, + }) + } + } + + const persistence = createElectronSQLitePersistence({ + invoke, + }) + + expectTypeOf(persistence.adapter).toHaveProperty(`loadSubset`) + + createElectronSQLitePersistence({ + invoke, + // @ts-expect-error renderer-side persistence must use invoke transport, not a direct driver + driver: {}, + }) +}) diff --git a/packages/db-electron-sqlite-persisted-collection/tests/electron-ipc.test.ts b/packages/db-electron-sqlite-persisted-collection/tests/electron-ipc.test.ts new file mode 100644 index 000000000..a97131d1b --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/electron-ipc.test.ts @@ -0,0 +1,391 @@ +import { mkdtempSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, describe, expect, it } from 'vitest' +import { InvalidPersistedCollectionConfigError } from '@tanstack/db-sqlite-persisted-collection-core' +import { createNodeSQLitePersistence } from '@tanstack/db-node-sqlite-persisted-collection' +import { BetterSqlite3SQLiteDriver } from '../../db-node-sqlite-persisted-collection/src/node-driver' +import { + createElectronSQLitePersistence, + exposeElectronSQLitePersistence, +} from '../src' +import { + DEFAULT_ELECTRON_PERSISTENCE_CHANNEL, + ELECTRON_PERSISTENCE_PROTOCOL_VERSION, +} from '../src/protocol' +import { + createElectronRuntimeBridgeInvoke, + isElectronFullE2EEnabled, +} from './e2e/electron-process-client' +import type { PersistedCollectionPersistence } from '@tanstack/db-sqlite-persisted-collection-core' +import type { + ElectronPersistenceInvoke, + ElectronPersistenceRequestEnvelope, + ElectronPersistenceResponseEnvelope, +} from '../src/protocol' + +type Todo = { + id: string + title: string + score: number +} + +type InvokeHarness = { + invoke: ElectronPersistenceInvoke + close: () => void +} + +type ElectronMainPersistence = PersistedCollectionPersistence< + Record, + string | number +> + +const electronRuntimeBridgeTimeoutMs = isElectronFullE2EEnabled() + ? 45_000 + : 4_000 + +function createFilteredPersistence( + collectionId: string, + allowAnyCollectionId: boolean, + persistence: ElectronMainPersistence, +): ElectronMainPersistence { + if (allowAnyCollectionId) { + return persistence + } + + const baseAdapter = persistence.adapter + const assertKnownCollection = (requestedCollectionId: string) => { + if (requestedCollectionId !== collectionId) { + const error = new Error( + `Unknown electron persistence collection "${requestedCollectionId}"`, + ) + error.name = `UnknownElectronPersistenceCollectionError` + ;(error as Error & { code?: string }).code = `UNKNOWN_COLLECTION` + throw error + } + } + + const adapter: ElectronMainPersistence[`adapter`] = { + loadSubset: (requestedCollectionId, options, ctx) => { + assertKnownCollection(requestedCollectionId) + return baseAdapter.loadSubset(requestedCollectionId, options, ctx) + }, + applyCommittedTx: (requestedCollectionId, tx) => { + assertKnownCollection(requestedCollectionId) + return baseAdapter.applyCommittedTx(requestedCollectionId, tx) + }, + ensureIndex: (requestedCollectionId, signature, spec) => { + assertKnownCollection(requestedCollectionId) + return baseAdapter.ensureIndex(requestedCollectionId, signature, spec) + }, + markIndexRemoved: (requestedCollectionId, signature) => { + assertKnownCollection(requestedCollectionId) + if (!baseAdapter.markIndexRemoved) { + return Promise.resolve() + } + return baseAdapter.markIndexRemoved(requestedCollectionId, signature) + }, + } + + return { + coordinator: persistence.coordinator, + adapter, + } +} + +function createInvokeHarness( + dbPath: string, + collectionId: string, + allowAnyCollectionId: boolean = true, +): InvokeHarness { + if (isElectronFullE2EEnabled()) { + return { + invoke: createElectronRuntimeBridgeInvoke({ + dbPath, + collectionId, + allowAnyCollectionId, + timeoutMs: electronRuntimeBridgeTimeoutMs, + }), + close: () => {}, + } + } + + const driver = new BetterSqlite3SQLiteDriver({ filename: dbPath }) + const persistence = createNodeSQLitePersistence< + Record, + string | number + >({ + database: driver.getDatabase(), + }) + const filteredPersistence = createFilteredPersistence( + collectionId, + allowAnyCollectionId, + persistence, + ) + + let handler: + | (( + event: unknown, + request: ElectronPersistenceRequestEnvelope, + ) => Promise) + | undefined + + const ipcMainLike = { + handle: ( + _channel: string, + listener: ( + event: unknown, + request: ElectronPersistenceRequestEnvelope, + ) => Promise, + ) => { + handler = listener + }, + removeHandler: () => {}, + } + const dispose = exposeElectronSQLitePersistence({ + ipcMain: ipcMainLike, + persistence: filteredPersistence, + }) + + return { + invoke: async (_channel, request) => { + if (!handler) { + throw new Error(`Electron IPC handler was not registered`) + } + return handler(undefined, request) + }, + close: () => { + dispose() + driver.close() + }, + } +} + +const activeCleanupFns: Array<() => void> = [] + +afterEach(() => { + while (activeCleanupFns.length > 0) { + const cleanupFn = activeCleanupFns.pop() + cleanupFn?.() + } +}) + +function createTempDbPath(): string { + const tempDirectory = mkdtempSync(join(tmpdir(), `db-electron-ipc-`)) + const dbPath = join(tempDirectory, `state.sqlite`) + activeCleanupFns.push(() => { + rmSync(tempDirectory, { recursive: true, force: true }) + }) + return dbPath +} + +describe(`electron sqlite persistence bridge`, () => { + it(`round-trips reads and writes through main process`, async () => { + const dbPath = createTempDbPath() + const invokeHarness = createInvokeHarness(dbPath, `todos`) + activeCleanupFns.push(() => invokeHarness.close()) + + const rendererPersistence = createElectronSQLitePersistence({ + invoke: async (channel, request) => { + expect(channel).toBe(DEFAULT_ELECTRON_PERSISTENCE_CHANNEL) + return invokeHarness.invoke(channel, request) + }, + timeoutMs: electronRuntimeBridgeTimeoutMs, + }) + + await rendererPersistence.adapter.applyCommittedTx(`todos`, { + txId: `tx-1`, + term: 1, + seq: 1, + rowVersion: 1, + mutations: [ + { + type: `insert`, + key: `1`, + value: { + id: `1`, + title: `From renderer`, + score: 10, + }, + }, + ], + }) + + const rows = await rendererPersistence.adapter.loadSubset(`todos`, {}) + expect(rows).toEqual([ + { + key: `1`, + value: { + id: `1`, + title: `From renderer`, + score: 10, + }, + }, + ]) + }) + + it(`persists data across main process restarts`, async () => { + const dbPath = createTempDbPath() + + if (isElectronFullE2EEnabled()) { + const invoke = createElectronRuntimeBridgeInvoke({ + dbPath, + collectionId: `todos`, + timeoutMs: electronRuntimeBridgeTimeoutMs, + }) + const rendererPersistence = createElectronSQLitePersistence( + { + invoke, + timeoutMs: electronRuntimeBridgeTimeoutMs, + }, + ) + + await rendererPersistence.adapter.applyCommittedTx(`todos`, { + txId: `tx-restart-1`, + term: 1, + seq: 1, + rowVersion: 1, + mutations: [ + { + type: `insert`, + key: `persisted`, + value: { + id: `persisted`, + title: `Survives restart`, + score: 42, + }, + }, + ], + }) + + const rows = await rendererPersistence.adapter.loadSubset(`todos`, {}) + expect(rows[0]?.value.title).toBe(`Survives restart`) + return + } + + const invokeHarnessA = createInvokeHarness(dbPath, `todos`) + const rendererPersistenceA = createElectronSQLitePersistence({ + invoke: invokeHarnessA.invoke, + timeoutMs: electronRuntimeBridgeTimeoutMs, + }) + await rendererPersistenceA.adapter.applyCommittedTx(`todos`, { + txId: `tx-restart-1`, + term: 1, + seq: 1, + rowVersion: 1, + mutations: [ + { + type: `insert`, + key: `persisted`, + value: { + id: `persisted`, + title: `Survives restart`, + score: 42, + }, + }, + ], + }) + invokeHarnessA.close() + + const invokeHarnessB = createInvokeHarness(dbPath, `todos`) + activeCleanupFns.push(() => invokeHarnessB.close()) + const rendererPersistenceB = createElectronSQLitePersistence({ + invoke: invokeHarnessB.invoke, + timeoutMs: electronRuntimeBridgeTimeoutMs, + }) + const rows = await rendererPersistenceB.adapter.loadSubset(`todos`, {}) + expect(rows[0]?.value.title).toBe(`Survives restart`) + }) + + it(`returns deterministic timeout errors`, async () => { + const neverInvoke: ElectronPersistenceInvoke = async () => + await new Promise(() => {}) + + const rendererPersistence = createElectronSQLitePersistence({ + invoke: neverInvoke, + timeoutMs: 5, + }) + + await expect( + rendererPersistence.adapter.loadSubset(`todos`, {}), + ).rejects.toBeInstanceOf(InvalidPersistedCollectionConfigError) + }) + + it(`returns remote errors for unknown collections`, async () => { + const dbPath = createTempDbPath() + const invokeHarness = createInvokeHarness(dbPath, `known`, false) + activeCleanupFns.push(() => invokeHarness.close()) + const rendererPersistence = createElectronSQLitePersistence({ + invoke: invokeHarness.invoke, + timeoutMs: electronRuntimeBridgeTimeoutMs, + }) + + await expect( + rendererPersistence.adapter.loadSubset(`missing`, {}), + ).rejects.toThrow(`Unknown electron persistence collection`) + }) + + it(`registers and unregisters ipc handlers through thin api`, async () => { + let registeredChannel: string | undefined + let registeredHandler: + | (( + event: unknown, + request: ElectronPersistenceRequestEnvelope, + ) => Promise) + | undefined + const removedChannels: Array = [] + + const fakeIpcMain = { + handle: ( + channel: string, + handler: ( + event: unknown, + request: ElectronPersistenceRequestEnvelope, + ) => Promise, + ) => { + registeredChannel = channel + registeredHandler = handler + }, + removeHandler: (channel: string) => { + removedChannels.push(channel) + }, + } + + const driver = new BetterSqlite3SQLiteDriver({ + filename: createTempDbPath(), + }) + activeCleanupFns.push(() => driver.close()) + const persistence = createNodeSQLitePersistence< + Record, + string | number + >({ + database: driver.getDatabase(), + }) + + const dispose = exposeElectronSQLitePersistence({ + ipcMain: fakeIpcMain, + persistence, + }) + + expect(registeredChannel).toBe(DEFAULT_ELECTRON_PERSISTENCE_CHANNEL) + expect(registeredHandler).toBeDefined() + + const response = await registeredHandler?.(undefined, { + v: ELECTRON_PERSISTENCE_PROTOCOL_VERSION, + requestId: `req-1`, + collectionId: `todos`, + method: `loadSubset`, + payload: { + options: {}, + }, + }) + expect(response).toMatchObject({ + ok: true, + requestId: `req-1`, + method: `loadSubset`, + }) + + dispose() + expect(removedChannels).toEqual([DEFAULT_ELECTRON_PERSISTENCE_CHANNEL]) + }) +}) diff --git a/packages/db-electron-sqlite-persisted-collection/tests/electron-persisted-collection.e2e.test.ts b/packages/db-electron-sqlite-persisted-collection/tests/electron-persisted-collection.e2e.test.ts new file mode 100644 index 000000000..16b91a038 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/electron-persisted-collection.e2e.test.ts @@ -0,0 +1,351 @@ +import { mkdtempSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterAll, afterEach, beforeAll } from 'vitest' +import { createCollection } from '@tanstack/db' +import { persistedCollectionOptions } from '@tanstack/db-sqlite-persisted-collection-core' +import { createNodeSQLitePersistence } from '@tanstack/db-node-sqlite-persisted-collection' +import { BetterSqlite3SQLiteDriver } from '../../db-node-sqlite-persisted-collection/src/node-driver' +import { + createElectronSQLitePersistence, + exposeElectronSQLitePersistence, +} from '../src' +import { generateSeedData } from '../../db-collection-e2e/src/fixtures/seed-data' +import { runPersistedCollectionConformanceSuite } from '../../db-sqlite-persisted-collection-core/tests/contracts/persisted-collection-conformance-contract' +import { + createElectronRuntimeBridgeInvoke, + isElectronFullE2EEnabled, +} from './e2e/electron-process-client' +import type { PersistedTx } from '@tanstack/db-sqlite-persisted-collection-core' +import type { Collection } from '@tanstack/db' +import type { + ElectronPersistenceInvoke, + ElectronPersistenceRequestEnvelope, + ElectronPersistenceResponseEnvelope, +} from '../src/protocol' +import type { + Comment, + E2ETestConfig, + Post, + User, +} from '../../db-collection-e2e/src/types' + +type PersistableRow = { + id: string +} + +type ElectronPersistedCollectionTestConfig = E2ETestConfig +type PersistedCollectionHarness = { + collection: Collection + seedPersisted: (rows: Array) => Promise +} + +type InvokeHarness = { + invoke: ElectronPersistenceInvoke + close: () => void +} + +let config: ElectronPersistedCollectionTestConfig | undefined +const runFullE2E = isElectronFullE2EEnabled() +const requestTimeoutMs = runFullE2E ? 45_000 : 5_000 + +function createInvokeHarness(dbPath: string): InvokeHarness { + if (runFullE2E) { + return { + invoke: createElectronRuntimeBridgeInvoke({ + dbPath, + collectionId: `seed`, + allowAnyCollectionId: true, + timeoutMs: requestTimeoutMs, + }), + close: () => {}, + } + } + + const driver = new BetterSqlite3SQLiteDriver({ filename: dbPath }) + const persistence = createNodeSQLitePersistence< + Record, + string | number + >({ + database: driver.getDatabase(), + }) + let handler: + | (( + event: unknown, + request: ElectronPersistenceRequestEnvelope, + ) => Promise) + | undefined + const dispose = exposeElectronSQLitePersistence({ + ipcMain: { + handle: (_channel, listener) => { + handler = listener + }, + removeHandler: () => {}, + }, + persistence, + }) + + return { + invoke: async (_channel, request) => { + if (!handler) { + throw new Error(`Electron IPC handler not registered`) + } + return handler(undefined, request) + }, + close: () => { + dispose() + driver.close() + }, + } +} + +function createSeedTx( + collectionId: string, + seedSequence: number, + rows: Array, +): PersistedTx { + return { + txId: `seed-${collectionId}-${seedSequence}`, + term: 1, + seq: seedSequence, + rowVersion: seedSequence, + mutations: rows.map((row) => ({ + type: `insert` as const, + key: row.id, + value: row, + })), + } +} + +function createPersistedCollection( + invoke: ElectronPersistenceInvoke, + id: string, + syncMode: `eager` | `on-demand`, +): PersistedCollectionHarness { + const persistence = createElectronSQLitePersistence({ + invoke, + timeoutMs: requestTimeoutMs, + }) + let seedSequence = 0 + const seedPersisted = async (rows: Array): Promise => { + if (rows.length === 0) { + return + } + seedSequence++ + await persistence.adapter.applyCommittedTx( + id, + createSeedTx(id, seedSequence, rows), + ) + } + + const collection = createCollection( + persistedCollectionOptions({ + id, + syncMode, + getKey: (item) => item.id, + persistence, + }), + ) + + return { + collection, + seedPersisted, + } +} + +type PersistedTransactionHandle = { + isPersisted: { + promise: Promise + } +} + +async function waitForPersisted( + transaction: PersistedTransactionHandle, +): Promise { + await transaction.isPersisted.promise +} + +async function seedCollection( + collection: Collection, + rows: Array, +): Promise { + const tx = collection.insert(rows) + await waitForPersisted(tx) +} + +async function insertRowIntoCollections( + collections: ReadonlyArray>, + row: T, +): Promise { + for (const collection of collections) { + const tx = collection.insert(row) + await waitForPersisted(tx) + } +} + +async function updateRowAcrossCollections( + collections: ReadonlyArray>, + id: string, + updates: Partial, +): Promise { + for (const collection of collections) { + if (!collection.has(id)) { + continue + } + const tx = collection.update(id, (draft) => { + Object.assign(draft, updates) + }) + await waitForPersisted(tx) + } +} + +async function deleteRowAcrossCollections( + collections: ReadonlyArray>, + id: string, +): Promise { + for (const collection of collections) { + if (!collection.has(id)) { + continue + } + const tx = collection.delete(id) + await waitForPersisted(tx) + } +} + +beforeAll(async () => { + const tempDirectory = mkdtempSync( + join(tmpdir(), `db-electron-persisted-e2e-`), + ) + const dbPath = join(tempDirectory, `state.sqlite`) + const suiteId = Date.now().toString(36) + const invokeHarness = createInvokeHarness(dbPath) + const seedData = generateSeedData() + + const eagerUsers = createPersistedCollection( + invokeHarness.invoke, + `electron-persisted-users-eager-${suiteId}`, + `eager`, + ) + const eagerPosts = createPersistedCollection( + invokeHarness.invoke, + `electron-persisted-posts-eager-${suiteId}`, + `eager`, + ) + const eagerComments = createPersistedCollection( + invokeHarness.invoke, + `electron-persisted-comments-eager-${suiteId}`, + `eager`, + ) + const onDemandUsers = createPersistedCollection( + invokeHarness.invoke, + `electron-persisted-users-ondemand-${suiteId}`, + `on-demand`, + ) + const onDemandPosts = createPersistedCollection( + invokeHarness.invoke, + `electron-persisted-posts-ondemand-${suiteId}`, + `on-demand`, + ) + const onDemandComments = createPersistedCollection( + invokeHarness.invoke, + `electron-persisted-comments-ondemand-${suiteId}`, + `on-demand`, + ) + + await eagerUsers.collection.preload() + await eagerPosts.collection.preload() + await eagerComments.collection.preload() + + await seedCollection(eagerUsers.collection, seedData.users) + await seedCollection(eagerPosts.collection, seedData.posts) + await seedCollection(eagerComments.collection, seedData.comments) + await onDemandUsers.seedPersisted(seedData.users) + await onDemandPosts.seedPersisted(seedData.posts) + await onDemandComments.seedPersisted(seedData.comments) + + config = { + collections: { + eager: { + users: eagerUsers.collection, + posts: eagerPosts.collection, + comments: eagerComments.collection, + }, + onDemand: { + users: onDemandUsers.collection, + posts: onDemandPosts.collection, + comments: onDemandComments.collection, + }, + }, + mutations: { + insertUser: async (user) => + insertRowIntoCollections( + [eagerUsers.collection, onDemandUsers.collection], + user, + ), + updateUser: async (id, updates) => + updateRowAcrossCollections( + [eagerUsers.collection, onDemandUsers.collection], + id, + updates, + ), + deleteUser: async (id) => + deleteRowAcrossCollections( + [eagerUsers.collection, onDemandUsers.collection], + id, + ), + insertPost: async (post) => + insertRowIntoCollections( + [eagerPosts.collection, onDemandPosts.collection], + post, + ), + }, + setup: async () => {}, + afterEach: async () => { + await onDemandUsers.collection.cleanup() + await onDemandPosts.collection.cleanup() + await onDemandComments.collection.cleanup() + + onDemandUsers.collection.startSyncImmediate() + onDemandPosts.collection.startSyncImmediate() + onDemandComments.collection.startSyncImmediate() + }, + teardown: async () => { + await eagerUsers.collection.cleanup() + await eagerPosts.collection.cleanup() + await eagerComments.collection.cleanup() + await onDemandUsers.collection.cleanup() + await onDemandPosts.collection.cleanup() + await onDemandComments.collection.cleanup() + invokeHarness.close() + rmSync(tempDirectory, { recursive: true, force: true }) + }, + } +}) + +afterEach(async () => { + if (config?.afterEach) { + await config.afterEach() + } +}) + +afterAll(async () => { + if (config) { + await config.teardown() + } +}) + +function getConfig(): Promise { + if (!config) { + throw new Error( + `Electron persisted collection conformance is not initialized`, + ) + } + return Promise.resolve(config) +} + +const conformanceMode = runFullE2E ? `real electron ipc` : `in-process invoke` + +runPersistedCollectionConformanceSuite( + `electron persisted collection conformance (${conformanceMode})`, + getConfig, +) diff --git a/packages/db-electron-sqlite-persisted-collection/tests/electron-runtime-bridge.e2e.test.ts b/packages/db-electron-sqlite-persisted-collection/tests/electron-runtime-bridge.e2e.test.ts new file mode 100644 index 000000000..ae163bbd4 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/electron-runtime-bridge.e2e.test.ts @@ -0,0 +1,93 @@ +import { mkdtempSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { runRuntimeBridgeE2EContractSuite } from '../../db-sqlite-persisted-collection-core/tests/contracts/runtime-bridge-e2e-contract' +import { runElectronRuntimeBridgeScenario } from './e2e/electron-process-client' +import type { + RuntimeBridgeE2EContractError, + RuntimeBridgeE2EContractHarness, + RuntimeBridgeE2EContractHarnessFactory, + RuntimeBridgeE2EContractTodo, +} from '../../db-sqlite-persisted-collection-core/tests/contracts/runtime-bridge-e2e-contract' +import type { + ElectronRuntimeBridgeInput, + ElectronRuntimeBridgeScenarioResult, +} from './e2e/fixtures/runtime-bridge-types' + +const createHarness: RuntimeBridgeE2EContractHarnessFactory = () => { + const tempDirectory = mkdtempSync( + join(tmpdir(), `db-electron-runtime-bridge-`), + ) + const dbPath = join(tempDirectory, `state.sqlite`) + const collectionId = `todos` + let nextSequence = 1 + + const runScenario = async ( + scenario: ElectronRuntimeBridgeInput[`scenario`], + ): Promise => + runElectronRuntimeBridgeScenario({ + dbPath, + collectionId, + timeoutMs: 4_000, + scenario, + }) + + const harness: RuntimeBridgeE2EContractHarness = { + writeTodoFromClient: async (todo: RuntimeBridgeE2EContractTodo) => { + const result = await runScenario({ + type: `writeTodo`, + todo, + txId: `tx-${nextSequence}`, + seq: nextSequence, + rowVersion: nextSequence, + }) + nextSequence++ + + if (result.type !== `writeTodo`) { + throw new Error(`Unexpected write scenario result: ${result.type}`) + } + }, + loadTodosFromClient: async (targetCollectionId?: string) => { + const result = await runScenario({ + type: `loadTodos`, + collectionId: targetCollectionId, + }) + if (result.type !== `loadTodos`) { + throw new Error(`Unexpected load scenario result: ${result.type}`) + } + return result.rows + }, + loadUnknownCollectionErrorFromClient: + async (): Promise => { + const result = await runScenario({ + type: `loadUnknownCollectionError`, + collectionId: `missing`, + }) + if (result.type !== `loadUnknownCollectionError`) { + throw new Error(`Unexpected error scenario result: ${result.type}`) + } + return result.error + }, + restartHost: async () => { + const result = await runScenario({ + type: `noop`, + }) + if (result.type !== `noop`) { + throw new Error(`Unexpected restart scenario result: ${result.type}`) + } + }, + cleanup: () => { + rmSync(tempDirectory, { recursive: true, force: true }) + }, + } + + return harness +} + +runRuntimeBridgeE2EContractSuite( + `electron runtime bridge e2e (real main/renderer IPC)`, + createHarness, + { + testTimeoutMs: 45_000, + }, +) diff --git a/packages/db-electron-sqlite-persisted-collection/tests/electron-sqlite-core-adapter-contract.test.ts b/packages/db-electron-sqlite-persisted-collection/tests/electron-sqlite-core-adapter-contract.test.ts new file mode 100644 index 000000000..22651f0e3 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tests/electron-sqlite-core-adapter-contract.test.ts @@ -0,0 +1,115 @@ +import { mkdtempSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { createSQLiteCorePersistenceAdapter } from '@tanstack/db-sqlite-persisted-collection-core' +import { runSQLiteCoreAdapterContractSuite } from '../../db-sqlite-persisted-collection-core/tests/contracts/sqlite-core-adapter-contract' +import { BetterSqlite3SQLiteDriver } from '../../db-node-sqlite-persisted-collection/src/node-driver' +import { + createElectronSQLitePersistence, + exposeElectronSQLitePersistence, +} from '../src' +import { + createElectronRuntimeBridgeInvoke, + isElectronFullE2EEnabled, +} from './e2e/electron-process-client' +import type { + SQLiteCoreAdapterContractTodo, + SQLiteCoreAdapterHarnessFactory, +} from '../../db-sqlite-persisted-collection-core/tests/contracts/sqlite-core-adapter-contract' +import type { + ElectronPersistenceInvoke, + ElectronPersistenceResponseEnvelope, +} from '../src/protocol' + +const createHarness: SQLiteCoreAdapterHarnessFactory = (options) => { + const tempDirectory = mkdtempSync(join(tmpdir(), `db-electron-contract-`)) + const dbPath = join(tempDirectory, `state.sqlite`) + const runFullE2E = isElectronFullE2EEnabled() + const requestTimeoutMs = runFullE2E ? 45_000 : 2_000 + const driver = new BetterSqlite3SQLiteDriver({ + filename: dbPath, + pragmas: runFullE2E + ? [`journal_mode = DELETE`, `synchronous = NORMAL`, `foreign_keys = ON`] + : undefined, + }) + + let invoke: ElectronPersistenceInvoke + let cleanupInvoke: () => void = () => {} + if (runFullE2E) { + invoke = createElectronRuntimeBridgeInvoke({ + dbPath, + collectionId: `todos`, + allowAnyCollectionId: true, + timeoutMs: requestTimeoutMs, + adapterOptions: options, + }) + } else { + const mainAdapter = createSQLiteCorePersistenceAdapter< + Record, + string | number + >({ + driver, + ...options, + }) + let handler: + | ((event: unknown, request: unknown) => Promise) + | undefined + const dispose = exposeElectronSQLitePersistence({ + ipcMain: { + handle: (_channel, listener) => { + handler = listener as ( + event: unknown, + request: unknown, + ) => Promise + }, + removeHandler: () => {}, + }, + persistence: { + adapter: mainAdapter, + }, + }) + invoke = async (_channel, request) => { + if (!handler) { + throw new Error(`Electron IPC handler not registered`) + } + return handler( + undefined, + request, + ) as Promise + } + cleanupInvoke = () => dispose() + } + + const rendererAdapter = createElectronSQLitePersistence< + SQLiteCoreAdapterContractTodo, + string + >({ + invoke, + timeoutMs: requestTimeoutMs, + }).adapter as ReturnType[`adapter`] + + return { + adapter: rendererAdapter, + driver, + cleanup: () => { + try { + cleanupInvoke() + } finally { + try { + driver.close() + } finally { + rmSync(tempDirectory, { recursive: true, force: true }) + } + } + }, + } +} + +const electronContractMode = isElectronFullE2EEnabled() + ? `real electron e2e invoke` + : `in-process invoke` + +runSQLiteCoreAdapterContractSuite( + `SQLiteCorePersistenceAdapter contract over electron IPC bridge (${electronContractMode})`, + createHarness, +) diff --git a/packages/db-electron-sqlite-persisted-collection/tsconfig.docs.json b/packages/db-electron-sqlite-persisted-collection/tsconfig.docs.json new file mode 100644 index 000000000..3fd384ad1 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tsconfig.docs.json @@ -0,0 +1,15 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "paths": { + "@tanstack/db": ["../db/src"], + "@tanstack/db-node-sqlite-persisted-collection": [ + "../db-node-sqlite-persisted-collection/src" + ], + "@tanstack/db-sqlite-persisted-collection-core": [ + "../db-sqlite-persisted-collection-core/src" + ] + } + }, + "include": ["src"] +} diff --git a/packages/db-electron-sqlite-persisted-collection/tsconfig.json b/packages/db-electron-sqlite-persisted-collection/tsconfig.json new file mode 100644 index 000000000..b07723d02 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/tsconfig.json @@ -0,0 +1,33 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "ESNext", + "moduleResolution": "Bundler", + "declaration": true, + "outDir": "dist", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "jsx": "react", + "paths": { + "@tanstack/db": ["../db/src"], + "@tanstack/db-ivm": ["../db-ivm/src"], + "@tanstack/db-node-sqlite-persisted-collection": [ + "../db-node-sqlite-persisted-collection/src" + ], + "@tanstack/db-sqlite-persisted-collection-core": [ + "../db-sqlite-persisted-collection-core/src" + ] + } + }, + "include": [ + "src", + "tests/**/*.test.ts", + "tests/**/*.test-d.ts", + "vite.config.ts", + "vitest.e2e.config.ts" + ], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/db-electron-sqlite-persisted-collection/vite.config.ts b/packages/db-electron-sqlite-persisted-collection/vite.config.ts new file mode 100644 index 000000000..872ef9ccf --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/vite.config.ts @@ -0,0 +1,30 @@ +import { defineConfig, mergeConfig } from 'vitest/config' +import { tanstackViteConfig } from '@tanstack/vite-config' +import packageJson from './package.json' + +const runElectronFullE2E = process.env.TANSTACK_DB_ELECTRON_E2E_ALL === `1` + +const config = defineConfig({ + test: { + name: packageJson.name, + include: [`tests/**/*.test.ts`], + exclude: [`tests/**/*.e2e.test.ts`], + environment: `node`, + fileParallelism: !runElectronFullE2E, + testTimeout: runElectronFullE2E ? 120_000 : undefined, + hookTimeout: runElectronFullE2E ? 180_000 : undefined, + coverage: { enabled: true, provider: `istanbul`, include: [`src/**/*`] }, + typecheck: { + enabled: true, + include: [`tests/**/*.test.ts`, `tests/**/*.test-d.ts`], + }, + }, +}) + +export default mergeConfig( + config, + tanstackViteConfig({ + entry: [`./src/index.ts`, `./src/main.ts`, `./src/renderer.ts`], + srcDir: `./src`, + }), +) diff --git a/packages/db-electron-sqlite-persisted-collection/vitest.e2e.config.ts b/packages/db-electron-sqlite-persisted-collection/vitest.e2e.config.ts new file mode 100644 index 000000000..70fe559f0 --- /dev/null +++ b/packages/db-electron-sqlite-persisted-collection/vitest.e2e.config.ts @@ -0,0 +1,35 @@ +import { dirname, resolve } from 'node:path' +import { fileURLToPath } from 'node:url' +import { defineConfig } from 'vitest/config' + +const packageDirectory = dirname(fileURLToPath(import.meta.url)) + +export default defineConfig({ + resolve: { + alias: { + '@tanstack/db': resolve(packageDirectory, `../db/src`), + '@tanstack/db-ivm': resolve(packageDirectory, `../db-ivm/src`), + '@tanstack/db-node-sqlite-persisted-collection': resolve( + packageDirectory, + `../db-node-sqlite-persisted-collection/src`, + ), + '@tanstack/db-sqlite-persisted-collection-core': resolve( + packageDirectory, + `../db-sqlite-persisted-collection-core/src`, + ), + }, + }, + test: { + include: [`tests/**/*.e2e.test.ts`], + environment: `node`, + fileParallelism: false, + testTimeout: 60_000, + hookTimeout: 120_000, + typecheck: { + enabled: false, + }, + coverage: { + enabled: false, + }, + }, +}) diff --git a/packages/db-node-sqlite-persisted-collection/README.md b/packages/db-node-sqlite-persisted-collection/README.md new file mode 100644 index 000000000..d13b9224b --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/README.md @@ -0,0 +1,49 @@ +# @tanstack/db-node-sqlite-persisted-collection + +Thin Node SQLite persistence for TanStack DB. + +## Public API + +- `createNodeSQLitePersistence(...)` +- `persistedCollectionOptions(...)` (re-exported from core) + +## Quick start + +```ts +import { createCollection } from '@tanstack/db' +import { + createNodeSQLitePersistence, + persistedCollectionOptions, +} from '@tanstack/db-node-sqlite-persisted-collection' +import Database from 'better-sqlite3' + +type Todo = { + id: string + title: string + completed: boolean +} + +// You own database lifecycle directly. +const database = new Database(`./tanstack-db.sqlite`) + +// One shared persistence instance for the whole database. +const persistence = createNodeSQLitePersistence({ + database, +}) + +export const todosCollection = createCollection( + persistedCollectionOptions({ + id: `todos`, + getKey: (todo) => todo.id, + persistence, + schemaVersion: 1, // Per-collection schema version + }), +) +``` + +## Notes + +- `createNodeSQLitePersistence` is shared across collections; it resolves + mode-specific behavior (`sync-present` vs `sync-absent`) automatically. +- `schemaVersion` is specified per collection via `persistedCollectionOptions`. +- Call `database.close()` when your app shuts down. diff --git a/packages/db-node-sqlite-persisted-collection/e2e/node-persisted-collection.e2e.test.ts b/packages/db-node-sqlite-persisted-collection/e2e/node-persisted-collection.e2e.test.ts new file mode 100644 index 000000000..6331e31cf --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/e2e/node-persisted-collection.e2e.test.ts @@ -0,0 +1,263 @@ +import { mkdtempSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterAll, afterEach, beforeAll } from 'vitest' +import { createCollection } from '@tanstack/db' +import BetterSqlite3 from 'better-sqlite3' +import { createNodeSQLitePersistence, persistedCollectionOptions } from '../src' +import { generateSeedData } from '../../db-collection-e2e/src/fixtures/seed-data' +import { runPersistedCollectionConformanceSuite } from '../../db-sqlite-persisted-collection-core/tests/contracts/persisted-collection-conformance-contract' +import type { Collection } from '@tanstack/db' +import type { + Comment, + E2ETestConfig, + Post, + User, +} from '../../db-collection-e2e/src/types' + +type PersistableRow = { + id: string +} + +type NodePersistedCollectionTestConfig = E2ETestConfig +type PersistedCollectionHarness = { + collection: Collection + seedPersisted: (rows: Array) => Promise +} + +let config: NodePersistedCollectionTestConfig + +function createPersistedCollection( + database: InstanceType, + id: string, + syncMode: `eager` | `on-demand`, +): PersistedCollectionHarness { + const persistence = createNodeSQLitePersistence({ + database, + }) + let seedTxSequence = 0 + const seedPersisted = async (rows: Array): Promise => { + if (rows.length === 0) { + return + } + seedTxSequence++ + await persistence.adapter.applyCommittedTx(id, { + txId: `seed-${id}-${seedTxSequence}`, + term: 1, + seq: seedTxSequence, + rowVersion: seedTxSequence, + mutations: rows.map((row) => ({ + type: `insert` as const, + key: row.id, + value: row, + })), + }) + } + + const collection = createCollection( + persistedCollectionOptions({ + id, + syncMode, + getKey: (item) => item.id, + persistence, + }), + ) + + return { + collection, + seedPersisted, + } +} + +type PersistedTransactionHandle = { + isPersisted: { + promise: Promise + } +} + +async function waitForPersisted( + transaction: PersistedTransactionHandle, +): Promise { + await transaction.isPersisted.promise +} + +async function seedCollection( + collection: Collection, + rows: Array, +): Promise { + const tx = collection.insert(rows) + await waitForPersisted(tx) +} + +async function insertRowIntoCollections( + collections: ReadonlyArray>, + row: T, +): Promise { + for (const collection of collections) { + const tx = collection.insert(row) + await waitForPersisted(tx) + } +} + +async function updateRowAcrossCollections( + collections: ReadonlyArray>, + id: string, + updates: Partial, +): Promise { + for (const collection of collections) { + if (!collection.has(id)) { + continue + } + const tx = collection.update(id, (draft) => { + Object.assign(draft, updates) + }) + await waitForPersisted(tx) + } +} + +async function deleteRowAcrossCollections( + collections: ReadonlyArray>, + id: string, +): Promise { + for (const collection of collections) { + if (!collection.has(id)) { + continue + } + const tx = collection.delete(id) + await waitForPersisted(tx) + } +} + +beforeAll(async () => { + const tempDirectory = mkdtempSync(join(tmpdir(), `db-node-persisted-e2e-`)) + const dbPath = join(tempDirectory, `state.sqlite`) + const suiteId = Date.now().toString(36) + const database = new BetterSqlite3(dbPath) + const seedData = generateSeedData() + + const eagerUsers = createPersistedCollection( + database, + `node-persisted-users-eager-${suiteId}`, + `eager`, + ) + const eagerPosts = createPersistedCollection( + database, + `node-persisted-posts-eager-${suiteId}`, + `eager`, + ) + const eagerComments = createPersistedCollection( + database, + `node-persisted-comments-eager-${suiteId}`, + `eager`, + ) + + const onDemandUsers = createPersistedCollection( + database, + `node-persisted-users-ondemand-${suiteId}`, + `on-demand`, + ) + const onDemandPosts = createPersistedCollection( + database, + `node-persisted-posts-ondemand-${suiteId}`, + `on-demand`, + ) + const onDemandComments = createPersistedCollection( + database, + `node-persisted-comments-ondemand-${suiteId}`, + `on-demand`, + ) + + await Promise.all([ + eagerUsers.collection.preload(), + eagerPosts.collection.preload(), + eagerComments.collection.preload(), + ]) + + await seedCollection(eagerUsers.collection, seedData.users) + await seedCollection(eagerPosts.collection, seedData.posts) + await seedCollection(eagerComments.collection, seedData.comments) + await onDemandUsers.seedPersisted(seedData.users) + await onDemandPosts.seedPersisted(seedData.posts) + await onDemandComments.seedPersisted(seedData.comments) + + config = { + collections: { + eager: { + users: eagerUsers.collection, + posts: eagerPosts.collection, + comments: eagerComments.collection, + }, + onDemand: { + users: onDemandUsers.collection, + posts: onDemandPosts.collection, + comments: onDemandComments.collection, + }, + }, + mutations: { + insertUser: async (user) => + insertRowIntoCollections( + [eagerUsers.collection, onDemandUsers.collection], + user, + ), + updateUser: async (id, updates) => + updateRowAcrossCollections( + [eagerUsers.collection, onDemandUsers.collection], + id, + updates, + ), + deleteUser: async (id) => + deleteRowAcrossCollections( + [eagerUsers.collection, onDemandUsers.collection], + id, + ), + insertPost: async (post) => + insertRowIntoCollections( + [eagerPosts.collection, onDemandPosts.collection], + post, + ), + }, + setup: async () => {}, + afterEach: async () => { + await Promise.all([ + onDemandUsers.collection.cleanup(), + onDemandPosts.collection.cleanup(), + onDemandComments.collection.cleanup(), + ]) + + onDemandUsers.collection.startSyncImmediate() + onDemandPosts.collection.startSyncImmediate() + onDemandComments.collection.startSyncImmediate() + }, + teardown: async () => { + await Promise.all([ + eagerUsers.collection.cleanup(), + eagerPosts.collection.cleanup(), + eagerComments.collection.cleanup(), + onDemandUsers.collection.cleanup(), + onDemandPosts.collection.cleanup(), + onDemandComments.collection.cleanup(), + ]) + database.close() + rmSync(tempDirectory, { recursive: true, force: true }) + }, + } +}) + +afterEach(async () => { + if (config.afterEach) { + await config.afterEach() + } +}) + +afterAll(async () => { + await config.teardown() +}) + +function getConfig(): Promise { + return Promise.resolve(config) +} + +runPersistedCollectionConformanceSuite( + `node persisted collection conformance`, + getConfig, +) diff --git a/packages/db-node-sqlite-persisted-collection/package.json b/packages/db-node-sqlite-persisted-collection/package.json new file mode 100644 index 000000000..727b95159 --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/package.json @@ -0,0 +1,60 @@ +{ + "name": "@tanstack/db-node-sqlite-persisted-collection", + "version": "0.1.0", + "description": "Node SQLite persisted collection adapter for TanStack DB", + "author": "TanStack Team", + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/TanStack/db.git", + "directory": "packages/db-node-sqlite-persisted-collection" + }, + "homepage": "https://tanstack.com/db", + "keywords": [ + "sqlite", + "node", + "persistence", + "optimistic", + "typescript" + ], + "scripts": { + "build": "vite build", + "dev": "vite build --watch", + "lint": "eslint . --fix", + "test": "vitest --run", + "test:e2e": "pnpm --filter @tanstack/db-ivm build && pnpm --filter @tanstack/db build && pnpm --filter @tanstack/db-sqlite-persisted-collection-core build && pnpm --filter @tanstack/db-node-sqlite-persisted-collection build && vitest --config vitest.e2e.config.ts --run" + }, + "type": "module", + "main": "dist/cjs/index.cjs", + "module": "dist/esm/index.js", + "types": "dist/esm/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.cts", + "default": "./dist/cjs/index.cjs" + } + }, + "./package.json": "./package.json" + }, + "sideEffects": false, + "files": [ + "dist", + "src" + ], + "dependencies": { + "@tanstack/db-sqlite-persisted-collection-core": "workspace:*", + "better-sqlite3": "^12.6.2" + }, + "peerDependencies": { + "typescript": ">=4.7" + }, + "devDependencies": { + "@types/better-sqlite3": "^7.6.13", + "@vitest/coverage-istanbul": "^3.2.4" + } +} diff --git a/packages/db-node-sqlite-persisted-collection/src/index.ts b/packages/db-node-sqlite-persisted-collection/src/index.ts new file mode 100644 index 000000000..0e649333f --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/src/index.ts @@ -0,0 +1,11 @@ +export { createNodeSQLitePersistence } from './node-persistence' +export type { + BetterSqlite3Database, + NodeSQLitePersistenceOptions, + NodeSQLiteSchemaMismatchPolicy, +} from './node-persistence' +export { persistedCollectionOptions } from '@tanstack/db-sqlite-persisted-collection-core' +export type { + PersistedCollectionCoordinator, + PersistedCollectionPersistence, +} from '@tanstack/db-sqlite-persisted-collection-core' diff --git a/packages/db-node-sqlite-persisted-collection/src/node-driver.ts b/packages/db-node-sqlite-persisted-collection/src/node-driver.ts new file mode 100644 index 000000000..b849a9f3d --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/src/node-driver.ts @@ -0,0 +1,246 @@ +import { AsyncLocalStorage } from 'node:async_hooks' +import BetterSqlite3 from 'better-sqlite3' +import { InvalidPersistedCollectionConfigError } from '@tanstack/db-sqlite-persisted-collection-core' +import type { SQLiteDriver } from '@tanstack/db-sqlite-persisted-collection-core' + +const DEFAULT_PRAGMAS = [ + `journal_mode = WAL`, + `synchronous = NORMAL`, + `foreign_keys = ON`, +] as const + +const INVALID_PRAGMA_PATTERN = /(;|--|\/\*)/ + +export type BetterSqlite3Database = InstanceType +export type BetterSqlite3OpenOptions = ConstructorParameters< + typeof BetterSqlite3 +>[1] + +type BetterSqlite3ExistingDatabaseOptions = { + database: BetterSqlite3Database + pragmas?: ReadonlyArray +} + +type BetterSqlite3OpenFileOptions = { + filename: string + options?: BetterSqlite3OpenOptions + pragmas?: ReadonlyArray +} + +export type BetterSqlite3DriverOptions = + | BetterSqlite3ExistingDatabaseOptions + | BetterSqlite3OpenFileOptions + +type TransactionContext = { + depth: number +} + +function assertTransactionCallbackHasDriverArg( + fn: (transactionDriver: SQLiteDriver) => Promise, +): void { + if (fn.length > 0) { + return + } + + throw new InvalidPersistedCollectionConfigError( + `SQLiteDriver.transaction callback must accept the transaction driver argument`, + ) +} + +function hasExistingDatabase( + options: BetterSqlite3DriverOptions, +): options is BetterSqlite3ExistingDatabaseOptions { + return `database` in options +} + +export class BetterSqlite3SQLiteDriver implements SQLiteDriver { + private readonly database: BetterSqlite3Database + private readonly ownsDatabase: boolean + private readonly transactionContext = + new AsyncLocalStorage() + private queue: Promise = Promise.resolve() + private nextSavepointId = 1 + + constructor(options: BetterSqlite3DriverOptions) { + if (hasExistingDatabase(options)) { + this.database = options.database + this.ownsDatabase = false + this.applyPragmas(options.pragmas ?? DEFAULT_PRAGMAS) + return + } + + if (options.filename.trim().length === 0) { + throw new InvalidPersistedCollectionConfigError( + `Node SQLite driver filename cannot be empty`, + ) + } + + this.database = new BetterSqlite3(options.filename, options.options) + this.ownsDatabase = true + this.applyPragmas(options.pragmas ?? DEFAULT_PRAGMAS) + } + + async exec(sql: string): Promise { + if (this.isInsideTransaction()) { + this.database.exec(sql) + return + } + + await this.enqueue(() => { + this.database.exec(sql) + }) + } + + async query( + sql: string, + params: ReadonlyArray = [], + ): Promise> { + if (this.isInsideTransaction()) { + return this.executeQuery(sql, params) + } + + return this.enqueue(() => this.executeQuery(sql, params)) + } + + async run(sql: string, params: ReadonlyArray = []): Promise { + if (this.isInsideTransaction()) { + this.executeRun(sql, params) + return + } + + await this.enqueue(() => { + this.executeRun(sql, params) + }) + } + + async transaction( + fn: (transactionDriver: SQLiteDriver) => Promise, + ): Promise { + assertTransactionCallbackHasDriverArg(fn) + + if (this.isInsideTransaction()) { + return this.runNestedTransaction(fn) + } + + return this.enqueue(async () => { + this.database.exec(`BEGIN IMMEDIATE`) + try { + const result = await this.transactionContext.run( + { depth: 1 }, + async () => fn(this), + ) + this.database.exec(`COMMIT`) + return result + } catch (error) { + try { + this.database.exec(`ROLLBACK`) + } catch { + // Keep the original transaction error as the primary failure. + } + throw error + } + }) + } + + async transactionWithDriver( + fn: (transactionDriver: SQLiteDriver) => Promise, + ): Promise { + return this.transaction(fn) + } + + close(): void { + if (!this.ownsDatabase) { + return + } + + this.database.close() + } + + getDatabase(): BetterSqlite3Database { + return this.database + } + + private applyPragmas(pragmas: ReadonlyArray): void { + for (const pragma of pragmas) { + const trimmedPragma = pragma.trim() + if (trimmedPragma.length === 0) { + continue + } + + if (INVALID_PRAGMA_PATTERN.test(trimmedPragma)) { + throw new InvalidPersistedCollectionConfigError( + `Invalid SQLite PRAGMA: "${pragma}"`, + ) + } + + this.database.exec(`PRAGMA ${trimmedPragma}`) + } + } + + private isInsideTransaction(): boolean { + return this.transactionContext.getStore() !== undefined + } + + private executeQuery( + sql: string, + params: ReadonlyArray, + ): ReadonlyArray { + const statement = this.database.prepare(sql) + if (params.length === 0) { + return statement.all() as ReadonlyArray + } + + return statement.all(...params) as ReadonlyArray + } + + private executeRun(sql: string, params: ReadonlyArray): void { + const statement = this.database.prepare(sql) + if (params.length === 0) { + statement.run() + return + } + + statement.run(...params) + } + + private enqueue(operation: () => Promise | T): Promise { + const queuedOperation = this.queue.then(operation, operation) + this.queue = queuedOperation.then( + () => undefined, + () => undefined, + ) + return queuedOperation + } + + private async runNestedTransaction( + fn: (transactionDriver: SQLiteDriver) => Promise, + ): Promise { + const context = this.transactionContext.getStore() + if (!context) { + return fn(this) + } + + const savepointName = `tsdb_sp_${this.nextSavepointId}` + this.nextSavepointId++ + this.database.exec(`SAVEPOINT ${savepointName}`) + + try { + const result = await this.transactionContext.run( + { depth: context.depth + 1 }, + async () => fn(this), + ) + this.database.exec(`RELEASE SAVEPOINT ${savepointName}`) + return result + } catch (error) { + this.database.exec(`ROLLBACK TO SAVEPOINT ${savepointName}`) + this.database.exec(`RELEASE SAVEPOINT ${savepointName}`) + throw error + } + } +} + +export function createBetterSqlite3Driver( + options: BetterSqlite3DriverOptions, +): BetterSqlite3SQLiteDriver { + return new BetterSqlite3SQLiteDriver(options) +} diff --git a/packages/db-node-sqlite-persisted-collection/src/node-persistence.ts b/packages/db-node-sqlite-persisted-collection/src/node-persistence.ts new file mode 100644 index 000000000..993a1418a --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/src/node-persistence.ts @@ -0,0 +1,169 @@ +import { + SingleProcessCoordinator, + createSQLiteCorePersistenceAdapter, +} from '@tanstack/db-sqlite-persisted-collection-core' +import { BetterSqlite3SQLiteDriver } from './node-driver' +import type { + PersistedCollectionCoordinator, + PersistedCollectionMode, + PersistedCollectionPersistence, + SQLiteCoreAdapterOptions, + SQLiteDriver, +} from '@tanstack/db-sqlite-persisted-collection-core' +import type { BetterSqlite3Database } from './node-driver' + +export type { BetterSqlite3Database } from './node-driver' + +type NodeSQLiteCoreSchemaMismatchPolicy = + | `sync-present-reset` + | `sync-absent-error` + | `reset` + +export type NodeSQLiteSchemaMismatchPolicy = + | NodeSQLiteCoreSchemaMismatchPolicy + | `throw` + +type NodeSQLitePersistenceBaseOptions = Omit< + SQLiteCoreAdapterOptions, + `driver` | `schemaVersion` | `schemaMismatchPolicy` +> & { + database: BetterSqlite3Database + pragmas?: ReadonlyArray + coordinator?: PersistedCollectionCoordinator + schemaMismatchPolicy?: NodeSQLiteSchemaMismatchPolicy +} + +export type NodeSQLitePersistenceOptions = NodeSQLitePersistenceBaseOptions + +function normalizeSchemaMismatchPolicy( + policy: NodeSQLiteSchemaMismatchPolicy, +): NodeSQLiteCoreSchemaMismatchPolicy { + if (policy === `throw`) { + return `sync-absent-error` + } + + return policy +} + +function resolveSchemaMismatchPolicy( + explicitPolicy: NodeSQLiteSchemaMismatchPolicy | undefined, + mode: PersistedCollectionMode, +): NodeSQLiteCoreSchemaMismatchPolicy { + if (explicitPolicy) { + return normalizeSchemaMismatchPolicy(explicitPolicy) + } + + return mode === `sync-present` ? `sync-present-reset` : `sync-absent-error` +} + +function createAdapterCacheKey( + schemaMismatchPolicy: NodeSQLiteCoreSchemaMismatchPolicy, + schemaVersion: number | undefined, +): string { + const schemaVersionKey = + schemaVersion === undefined ? `schema:default` : `schema:${schemaVersion}` + return `${schemaMismatchPolicy}|${schemaVersionKey}` +} + +function createInternalSQLiteDriver( + options: NodeSQLitePersistenceOptions, +): SQLiteDriver { + return new BetterSqlite3SQLiteDriver({ + database: options.database, + ...(options.pragmas ? { pragmas: options.pragmas } : {}), + }) +} + +function resolveAdapterBaseOptions( + options: NodeSQLitePersistenceOptions, +): Omit< + SQLiteCoreAdapterOptions, + `driver` | `schemaVersion` | `schemaMismatchPolicy` +> { + return { + appliedTxPruneMaxRows: options.appliedTxPruneMaxRows, + appliedTxPruneMaxAgeSeconds: options.appliedTxPruneMaxAgeSeconds, + pullSinceReloadThreshold: options.pullSinceReloadThreshold, + } +} + +/** + * Creates a shared SQLite persistence instance that can be reused by many + * collections on the same database. Collection-specific schema versions are + * resolved by `persistedCollectionOptions` via `resolvePersistenceForCollection`. + */ +export function createNodeSQLitePersistence< + T extends object, + TKey extends string | number = string | number, +>( + options: NodeSQLitePersistenceOptions, +): PersistedCollectionPersistence { + const { coordinator, schemaMismatchPolicy } = options + const driver = createInternalSQLiteDriver(options) + const adapterBaseOptions = resolveAdapterBaseOptions(options) + const resolvedCoordinator = coordinator ?? new SingleProcessCoordinator() + const adapterCache = new Map< + string, + ReturnType< + typeof createSQLiteCorePersistenceAdapter< + Record, + string | number + > + > + >() + + const getAdapterForCollection = ( + mode: PersistedCollectionMode, + schemaVersion: number | undefined, + ) => { + const resolvedSchemaMismatchPolicy = resolveSchemaMismatchPolicy( + schemaMismatchPolicy, + mode, + ) + const cacheKey = createAdapterCacheKey( + resolvedSchemaMismatchPolicy, + schemaVersion, + ) + const cachedAdapter = adapterCache.get(cacheKey) + if (cachedAdapter) { + return cachedAdapter + } + + const adapter = createSQLiteCorePersistenceAdapter< + Record, + string | number + >({ + ...adapterBaseOptions, + driver, + schemaMismatchPolicy: resolvedSchemaMismatchPolicy, + ...(schemaVersion === undefined ? {} : { schemaVersion }), + }) + adapterCache.set(cacheKey, adapter) + return adapter + } + + const createCollectionPersistence = ( + mode: PersistedCollectionMode, + schemaVersion: number | undefined, + ): PersistedCollectionPersistence => ({ + adapter: getAdapterForCollection( + mode, + schemaVersion, + ) as unknown as PersistedCollectionPersistence[`adapter`], + coordinator: resolvedCoordinator, + }) + + const defaultPersistence = createCollectionPersistence( + `sync-absent`, + undefined, + ) + + return { + ...defaultPersistence, + resolvePersistenceForCollection: ({ mode, schemaVersion }) => + createCollectionPersistence(mode, schemaVersion), + // Backward compatible fallback for older callers. + resolvePersistenceForMode: (mode) => + createCollectionPersistence(mode, undefined), + } +} diff --git a/packages/db-node-sqlite-persisted-collection/tests/node-driver.test.ts b/packages/db-node-sqlite-persisted-collection/tests/node-driver.test.ts new file mode 100644 index 000000000..2979b9983 --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/tests/node-driver.test.ts @@ -0,0 +1,25 @@ +import { mkdtempSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { runSQLiteDriverContractSuite } from '../../db-sqlite-persisted-collection-core/tests/contracts/sqlite-driver-contract' +import { BetterSqlite3SQLiteDriver } from '../src/node-driver' +import type { SQLiteDriverContractHarness } from '../../db-sqlite-persisted-collection-core/tests/contracts/sqlite-driver-contract' + +function createDriverHarness(): SQLiteDriverContractHarness { + const tempDirectory = mkdtempSync(join(tmpdir(), `db-node-sqlite-`)) + const dbPath = join(tempDirectory, `state.sqlite`) + const driver = new BetterSqlite3SQLiteDriver({ filename: dbPath }) + + return { + driver, + cleanup: () => { + try { + driver.close() + } finally { + rmSync(tempDirectory, { recursive: true, force: true }) + } + }, + } +} + +runSQLiteDriverContractSuite(`better-sqlite3 node driver`, createDriverHarness) diff --git a/packages/db-node-sqlite-persisted-collection/tests/node-persistence.test.ts b/packages/db-node-sqlite-persisted-collection/tests/node-persistence.test.ts new file mode 100644 index 000000000..c4f3a5f17 --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/tests/node-persistence.test.ts @@ -0,0 +1,225 @@ +import { mkdtempSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import BetterSqlite3 from 'better-sqlite3' +import { describe, expect, it } from 'vitest' +import { createNodeSQLitePersistence, persistedCollectionOptions } from '../src' +import { BetterSqlite3SQLiteDriver } from '../src/node-driver' +import { SingleProcessCoordinator } from '../../db-sqlite-persisted-collection-core/src' +import { runRuntimePersistenceContractSuite } from '../../db-sqlite-persisted-collection-core/tests/contracts/runtime-persistence-contract' +import type { + RuntimePersistenceContractTodo, + RuntimePersistenceDatabaseHarness, +} from '../../db-sqlite-persisted-collection-core/tests/contracts/runtime-persistence-contract' + +function createRuntimeDatabaseHarness(): RuntimePersistenceDatabaseHarness { + const tempDirectory = mkdtempSync(join(tmpdir(), `db-node-persistence-`)) + const dbPath = join(tempDirectory, `state.sqlite`) + const drivers = new Set() + + return { + createDriver: () => { + const driver = new BetterSqlite3SQLiteDriver({ filename: dbPath }) + drivers.add(driver) + return driver + }, + cleanup: () => { + for (const driver of drivers) { + try { + driver.close() + } catch { + // ignore cleanup errors from already-closed handles + } + } + drivers.clear() + rmSync(tempDirectory, { recursive: true, force: true }) + }, + } +} + +runRuntimePersistenceContractSuite(`node runtime persistence helpers`, { + createDatabaseHarness: createRuntimeDatabaseHarness, + createAdapter: (driver) => + createNodeSQLitePersistence({ + database: (driver as BetterSqlite3SQLiteDriver).getDatabase(), + }).adapter, + createPersistence: (driver, coordinator) => + createNodeSQLitePersistence({ + database: (driver as BetterSqlite3SQLiteDriver).getDatabase(), + coordinator, + }), + createCoordinator: () => new SingleProcessCoordinator(), +}) + +describe(`node persistence helpers`, () => { + it(`defaults coordinator to SingleProcessCoordinator`, () => { + const runtimeHarness = createRuntimeDatabaseHarness() + const driver = runtimeHarness.createDriver() + try { + const persistence = createNodeSQLitePersistence({ + database: (driver as BetterSqlite3SQLiteDriver).getDatabase(), + }) + expect(persistence.coordinator).toBeInstanceOf(SingleProcessCoordinator) + } finally { + runtimeHarness.cleanup() + } + }) + + it(`allows overriding the default coordinator`, () => { + const runtimeHarness = createRuntimeDatabaseHarness() + const driver = runtimeHarness.createDriver() + try { + const coordinator = new SingleProcessCoordinator() + const persistence = createNodeSQLitePersistence({ + database: (driver as BetterSqlite3SQLiteDriver).getDatabase(), + coordinator, + }) + + expect(persistence.coordinator).toBe(coordinator) + } finally { + runtimeHarness.cleanup() + } + }) + + it(`accepts a bare better-sqlite3 database handle`, async () => { + const tempDirectory = mkdtempSync(join(tmpdir(), `db-node-direct-db-`)) + const dbPath = join(tempDirectory, `state.sqlite`) + const collectionId = `todos` + const database = new BetterSqlite3(dbPath) + + try { + const persistence = createNodeSQLitePersistence< + RuntimePersistenceContractTodo, + string + >({ + database, + }) + + await persistence.adapter.applyCommittedTx(collectionId, { + txId: `tx-direct-db-1`, + term: 1, + seq: 1, + rowVersion: 1, + mutations: [ + { + type: `insert`, + key: `1`, + value: { + id: `1`, + title: `from raw database`, + score: 1, + }, + }, + ], + }) + + const rows = await persistence.adapter.loadSubset(collectionId, {}) + expect(rows).toEqual([ + { + key: `1`, + value: { + id: `1`, + title: `from raw database`, + score: 1, + }, + }, + ]) + } finally { + database.close() + rmSync(tempDirectory, { recursive: true, force: true }) + } + }) + + it(`infers schema policy from sync mode`, async () => { + const tempDirectory = mkdtempSync(join(tmpdir(), `db-node-schema-infer-`)) + const dbPath = join(tempDirectory, `state.sqlite`) + const collectionId = `todos` + const firstDatabase = new BetterSqlite3(dbPath) + + try { + const firstPersistence = createNodeSQLitePersistence< + RuntimePersistenceContractTodo, + string + >({ + database: firstDatabase, + }) + const firstCollectionOptions = persistedCollectionOptions< + RuntimePersistenceContractTodo, + string + >({ + id: collectionId, + schemaVersion: 1, + getKey: (todo) => todo.id, + persistence: firstPersistence, + }) + + await firstCollectionOptions.persistence.adapter.applyCommittedTx( + collectionId, + { + txId: `tx-1`, + term: 1, + seq: 1, + rowVersion: 1, + mutations: [ + { + type: `insert`, + key: `1`, + value: { + id: `1`, + title: `before mismatch`, + score: 1, + }, + }, + ], + }, + ) + } finally { + firstDatabase.close() + } + + const secondDatabase = new BetterSqlite3(dbPath) + try { + const secondPersistence = createNodeSQLitePersistence< + RuntimePersistenceContractTodo, + string + >({ + database: secondDatabase, + }) + const syncAbsentOptions = persistedCollectionOptions< + RuntimePersistenceContractTodo, + string + >({ + id: collectionId, + schemaVersion: 2, + getKey: (todo) => todo.id, + persistence: secondPersistence, + }) + await expect( + syncAbsentOptions.persistence.adapter.loadSubset(collectionId, {}), + ).rejects.toThrow(`Schema version mismatch`) + + const syncPresentOptions = persistedCollectionOptions< + RuntimePersistenceContractTodo, + string + >({ + id: collectionId, + schemaVersion: 2, + getKey: (todo) => todo.id, + sync: { + sync: ({ markReady }) => { + markReady() + }, + }, + persistence: secondPersistence, + }) + const rows = await syncPresentOptions.persistence.adapter.loadSubset( + collectionId, + {}, + ) + expect(rows).toEqual([]) + } finally { + secondDatabase.close() + rmSync(tempDirectory, { recursive: true, force: true }) + } + }) +}) diff --git a/packages/db-node-sqlite-persisted-collection/tests/node-sqlite-core-adapter-contract.test.ts b/packages/db-node-sqlite-persisted-collection/tests/node-sqlite-core-adapter-contract.test.ts new file mode 100644 index 000000000..eb37c05d9 --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/tests/node-sqlite-core-adapter-contract.test.ts @@ -0,0 +1,40 @@ +import { mkdtempSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { runSQLiteCoreAdapterContractSuite } from '../../db-sqlite-persisted-collection-core/tests/contracts/sqlite-core-adapter-contract' +import { BetterSqlite3SQLiteDriver } from '../src/node-driver' +import { SQLiteCorePersistenceAdapter } from '../../db-sqlite-persisted-collection-core/src' +import type { + SQLiteCoreAdapterContractTodo, + SQLiteCoreAdapterHarnessFactory, +} from '../../db-sqlite-persisted-collection-core/tests/contracts/sqlite-core-adapter-contract' + +const createHarness: SQLiteCoreAdapterHarnessFactory = (options) => { + const tempDirectory = mkdtempSync(join(tmpdir(), `db-node-sqlite-core-`)) + const dbPath = join(tempDirectory, `state.sqlite`) + const driver = new BetterSqlite3SQLiteDriver({ filename: dbPath }) + const adapter = new SQLiteCorePersistenceAdapter< + SQLiteCoreAdapterContractTodo, + string + >({ + driver, + ...options, + }) + + return { + adapter, + driver, + cleanup: () => { + try { + driver.close() + } finally { + rmSync(tempDirectory, { recursive: true, force: true }) + } + }, + } +} + +runSQLiteCoreAdapterContractSuite( + `SQLiteCorePersistenceAdapter (better-sqlite3 node driver)`, + createHarness, +) diff --git a/packages/db-node-sqlite-persisted-collection/tsconfig.docs.json b/packages/db-node-sqlite-persisted-collection/tsconfig.docs.json new file mode 100644 index 000000000..5fddb4598 --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/tsconfig.docs.json @@ -0,0 +1,12 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "paths": { + "@tanstack/db": ["../db/src"], + "@tanstack/db-sqlite-persisted-collection-core": [ + "../db-sqlite-persisted-collection-core/src" + ] + } + }, + "include": ["src"] +} diff --git a/packages/db-node-sqlite-persisted-collection/tsconfig.json b/packages/db-node-sqlite-persisted-collection/tsconfig.json new file mode 100644 index 000000000..4074d193d --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/tsconfig.json @@ -0,0 +1,24 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "ESNext", + "moduleResolution": "Bundler", + "declaration": true, + "outDir": "dist", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "jsx": "react", + "paths": { + "@tanstack/db": ["../db/src"], + "@tanstack/db-ivm": ["../db-ivm/src"], + "@tanstack/db-sqlite-persisted-collection-core": [ + "../db-sqlite-persisted-collection-core/src" + ] + } + }, + "include": ["src", "tests", "e2e", "vite.config.ts", "vitest.e2e.config.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/db-node-sqlite-persisted-collection/vite.config.ts b/packages/db-node-sqlite-persisted-collection/vite.config.ts new file mode 100644 index 000000000..ea27c667a --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/vite.config.ts @@ -0,0 +1,24 @@ +import { defineConfig, mergeConfig } from 'vitest/config' +import { tanstackViteConfig } from '@tanstack/vite-config' +import packageJson from './package.json' + +const config = defineConfig({ + test: { + name: packageJson.name, + include: [`tests/**/*.test.ts`], + environment: `node`, + coverage: { enabled: true, provider: `istanbul`, include: [`src/**/*`] }, + typecheck: { + enabled: true, + include: [`tests/**/*.test.ts`, `tests/**/*.test-d.ts`], + }, + }, +}) + +export default mergeConfig( + config, + tanstackViteConfig({ + entry: `./src/index.ts`, + srcDir: `./src`, + }), +) diff --git a/packages/db-node-sqlite-persisted-collection/vitest.e2e.config.ts b/packages/db-node-sqlite-persisted-collection/vitest.e2e.config.ts new file mode 100644 index 000000000..063029e51 --- /dev/null +++ b/packages/db-node-sqlite-persisted-collection/vitest.e2e.config.ts @@ -0,0 +1,24 @@ +import { dirname, resolve } from 'node:path' +import { fileURLToPath } from 'node:url' +import { defineConfig } from 'vitest/config' + +const packageDirectory = dirname(fileURLToPath(import.meta.url)) + +export default defineConfig({ + resolve: { + alias: { + '@tanstack/db': resolve(packageDirectory, `../db/src`), + '@tanstack/db-ivm': resolve(packageDirectory, `../db-ivm/src`), + '@tanstack/db-sqlite-persisted-collection-core': resolve( + packageDirectory, + `../db-sqlite-persisted-collection-core/src`, + ), + }, + }, + test: { + include: [`e2e/**/*.e2e.test.ts`], + fileParallelism: false, + testTimeout: 60_000, + environment: `jsdom`, + }, +}) diff --git a/packages/db-react-native-sqlite-persisted-collection/src/op-sqlite-driver.ts b/packages/db-react-native-sqlite-persisted-collection/src/op-sqlite-driver.ts index 062505b50..432d80630 100644 --- a/packages/db-react-native-sqlite-persisted-collection/src/op-sqlite-driver.ts +++ b/packages/db-react-native-sqlite-persisted-collection/src/op-sqlite-driver.ts @@ -87,9 +87,15 @@ async function resolveAsyncLocalStorageCtor(): Promise Promise<{ AsyncLocalStorage?: AsyncLocalStorageCtor - } + }> + const asyncHooksModule = await importFn(getNodeAsyncHooksSpecifier()) return typeof asyncHooksModule.AsyncLocalStorage === `function` ? asyncHooksModule.AsyncLocalStorage diff --git a/packages/db/CHANGELOG.md b/packages/db/CHANGELOG.md index cadf76d13..f3839250e 100644 --- a/packages/db/CHANGELOG.md +++ b/packages/db/CHANGELOG.md @@ -1,5 +1,25 @@ # @tanstack/db +## 0.5.32 + +### Patch Changes + +- fix(db): use `Ref` brand instead of `Ref | undefined` for nullable join refs in declarative select ([#1262](https://github.com/TanStack/db/pull/1262)) + + The declarative `select()` callback receives proxy objects that record property accesses. These proxies are always truthy at build time, but nullable join sides (left/right/full) were typed as `Ref | undefined`, misleading users into using `?.` and `??` operators that have no effect at runtime. Nullable join refs are now typed as `Ref`, which allows direct property access without optional chaining while correctly producing `T | undefined` in the result type. + +- Fix unbounded WHERE expression growth in `DeduplicatedLoadSubset` when loading all data after accumulating specific predicates. The deduplication layer now correctly tracks the original request predicate (e.g., `where: undefined` for "load all") instead of the optimized difference query sent to the backend, ensuring `hasLoadedAllData` is properly set and subsequent requests are deduplicated. ([#1348](https://github.com/TanStack/db/pull/1348)) + +- fix(db): throw error when fn.select() is used with groupBy() ([#1324](https://github.com/TanStack/db/pull/1324)) + +- Add `queryOnce` helper for one-shot query execution, including `findOne()` support and optional QueryBuilder configs. ([#1211](https://github.com/TanStack/db/pull/1211)) + +## 0.5.31 + +### Patch Changes + +- Add Intent agent skills (SKILL.md files) to guide AI coding agents. Include skills for core DB concepts, all 5 framework bindings, meta-framework integration, and offline transactions. Also add `export * from '@tanstack/db'` to angular-db for consistency with other framework packages. ([#1330](https://github.com/TanStack/db/pull/1330)) + ## 0.5.30 ### Patch Changes diff --git a/packages/db/package.json b/packages/db/package.json index 9e43b3858..396adaf0b 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,6 +1,6 @@ { "name": "@tanstack/db", - "version": "0.5.30", + "version": "0.5.32", "description": "A reactive client store for building super fast apps on sync", "author": "Kyle Mathews", "license": "MIT", @@ -41,7 +41,8 @@ "sideEffects": false, "files": [ "dist", - "src" + "src", + "skills" ], "dependencies": { "@standard-schema/spec": "^1.1.0", diff --git a/packages/db/skills/db-core/SKILL.md b/packages/db/skills/db-core/SKILL.md new file mode 100644 index 000000000..5cdc46502 --- /dev/null +++ b/packages/db/skills/db-core/SKILL.md @@ -0,0 +1,61 @@ +--- +name: db-core +description: > + TanStack DB core concepts: createCollection with queryCollectionOptions, + electricCollectionOptions, powerSyncCollectionOptions, rxdbCollectionOptions, + trailbaseCollectionOptions, localOnlyCollectionOptions. Live queries via + query builder (from, where, join, select, groupBy, orderBy, limit). Optimistic + mutations with draft proxy (collection.insert, collection.update, + collection.delete). createOptimisticAction, createTransaction, + createPacedMutations. Entry point for all TanStack DB skills. +type: core +library: db +library_version: '0.5.30' +--- + +# TanStack DB — Core Concepts + +TanStack DB is a reactive client-side data store. It loads data into typed +collections from any backend (REST APIs, sync engines, local storage), provides +sub-millisecond live queries via differential dataflow, and supports instant +optimistic mutations with automatic rollback. + +Framework packages (`@tanstack/react-db`, `@tanstack/vue-db`, `@tanstack/svelte-db`, +`@tanstack/solid-db`) re-export everything from `@tanstack/db` plus framework-specific +hooks. In framework projects, import from the framework package directly. +`@tanstack/angular-db` is the exception -- import operators from `@tanstack/db` separately. + +## Sub-Skills + +| Need to... | Read | +| ------------------------------------------------ | ---------------------------------------------------- | +| Create a collection, pick an adapter, add schema | db-core/collection-setup/SKILL.md | +| Query data with where, join, groupBy, select | db-core/live-queries/SKILL.md | +| Insert, update, delete with optimistic UI | db-core/mutations-optimistic/SKILL.md | +| Build a custom sync adapter | db-core/custom-adapter/SKILL.md | +| Preload collections in route loaders | meta-framework/SKILL.md | +| Add offline transaction queueing | offline/SKILL.md (in @tanstack/offline-transactions) | + +For framework-specific hooks: + +| Framework | Read | +| --------- | ------------------- | +| React | react-db/SKILL.md | +| Vue | vue-db/SKILL.md | +| Svelte | svelte-db/SKILL.md | +| Solid | solid-db/SKILL.md | +| Angular | angular-db/SKILL.md | + +## Quick Decision Tree + +- Setting up for the first time? → db-core/collection-setup +- Building queries on collection data? → db-core/live-queries +- Writing data / handling optimistic state? → db-core/mutations-optimistic +- Using React hooks? → react-db +- Preloading in route loaders (Start, Next, Remix)? → meta-framework +- Building an adapter for a new backend? → db-core/custom-adapter +- Need offline transaction persistence? → offline + +## Version + +Targets @tanstack/db v0.5.30. diff --git a/packages/db/skills/db-core/collection-setup/SKILL.md b/packages/db/skills/db-core/collection-setup/SKILL.md new file mode 100644 index 000000000..52e791778 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/SKILL.md @@ -0,0 +1,427 @@ +--- +name: db-core/collection-setup +description: > + Creating typed collections with createCollection. Adapter selection: + queryCollectionOptions (REST/TanStack Query), electricCollectionOptions + (ElectricSQL real-time sync), powerSyncCollectionOptions (PowerSync SQLite), + rxdbCollectionOptions (RxDB), trailbaseCollectionOptions (TrailBase), + localOnlyCollectionOptions, localStorageCollectionOptions. CollectionConfig + options: getKey, schema, sync, gcTime, autoIndex, syncMode (eager/on-demand/ + progressive). StandardSchema validation with Zod/Valibot/ArkType. Collection + lifecycle (idle/loading/ready/error). Adapter-specific sync patterns including + Electric txid tracking and Query direct writes. +type: sub-skill +library: db +library_version: '0.5.30' +sources: + - 'TanStack/db:docs/overview.md' + - 'TanStack/db:docs/guides/schemas.md' + - 'TanStack/db:docs/collections/query-collection.md' + - 'TanStack/db:docs/collections/electric-collection.md' + - 'TanStack/db:docs/collections/powersync-collection.md' + - 'TanStack/db:docs/collections/rxdb-collection.md' + - 'TanStack/db:docs/collections/trailbase-collection.md' + - 'TanStack/db:packages/db/src/collection/index.ts' +--- + +This skill builds on db-core. Read it first for the overall mental model. + +# Collection Setup & Schema + +## Setup + +```ts +import { createCollection } from '@tanstack/react-db' +import { queryCollectionOptions } from '@tanstack/query-db-collection' +import { QueryClient } from '@tanstack/query-core' +import { z } from 'zod' + +const queryClient = new QueryClient() + +const todoSchema = z.object({ + id: z.number(), + text: z.string(), + completed: z.boolean().default(false), + created_at: z + .union([z.string(), z.date()]) + .transform((val) => (typeof val === 'string' ? new Date(val) : val)), +}) + +const todoCollection = createCollection( + queryCollectionOptions({ + queryKey: ['todos'], + queryFn: async () => { + const res = await fetch('/api/todos') + return res.json() + }, + queryClient, + getKey: (item) => item.id, + schema: todoSchema, + onInsert: async ({ transaction }) => { + await api.todos.create(transaction.mutations[0].modified) + await todoCollection.utils.refetch() + }, + onUpdate: async ({ transaction }) => { + const mut = transaction.mutations[0] + await api.todos.update(mut.key, mut.changes) + await todoCollection.utils.refetch() + }, + onDelete: async ({ transaction }) => { + await api.todos.delete(transaction.mutations[0].key) + await todoCollection.utils.refetch() + }, + }), +) +``` + +## Choosing an Adapter + +| Backend | Adapter | Package | +| -------------------------------- | ------------------------------- | ----------------------------------- | +| REST API / TanStack Query | `queryCollectionOptions` | `@tanstack/query-db-collection` | +| ElectricSQL (real-time Postgres) | `electricCollectionOptions` | `@tanstack/electric-db-collection` | +| PowerSync (SQLite offline) | `powerSyncCollectionOptions` | `@tanstack/powersync-db-collection` | +| RxDB (reactive database) | `rxdbCollectionOptions` | `@tanstack/rxdb-db-collection` | +| TrailBase (event streaming) | `trailbaseCollectionOptions` | `@tanstack/trailbase-db-collection` | +| No backend (UI state) | `localOnlyCollectionOptions` | `@tanstack/db` | +| Browser localStorage | `localStorageCollectionOptions` | `@tanstack/db` | + +If the user specifies a backend (e.g. Electric, PowerSync), use that adapter directly. Only use `localOnlyCollectionOptions` when there is no backend yet — the collection API is uniform, so swapping to a real adapter later only changes the options creator. + +## Sync Modes + +```ts +queryCollectionOptions({ + syncMode: 'eager', // default — loads all data upfront + // syncMode: "on-demand", // loads only what live queries request + // syncMode: "progressive", // (Electric only) query subset first, full sync in background +}) +``` + +| Mode | Best for | Data size | +| ------------- | ---------------------------------------------- | --------- | +| `eager` | Mostly-static datasets | <10k rows | +| `on-demand` | Search, catalogs, large tables | >50k rows | +| `progressive` | Collaborative apps needing instant first paint | Any | + +## Core Patterns + +### Local-only collection for prototyping + +```ts +import { + createCollection, + localOnlyCollectionOptions, +} from '@tanstack/react-db' + +const todoCollection = createCollection( + localOnlyCollectionOptions({ + getKey: (item) => item.id, + initialData: [{ id: 1, text: 'Learn TanStack DB', completed: false }], + }), +) +``` + +### Schema with type transformations + +```ts +const schema = z.object({ + id: z.number(), + title: z.string(), + due_date: z + .union([z.string(), z.date()]) + .transform((val) => (typeof val === 'string' ? new Date(val) : val)), + priority: z.number().default(0), +}) +``` + +Use `z.union([z.string(), z.date()])` for transformed fields — this ensures `TInput` is a superset of `TOutput` so that `update()` works correctly with the draft proxy. + +### ElectricSQL with txid tracking + +Always use a schema with Electric — without one, the collection types as `Record`. + +```ts +import { electricCollectionOptions } from '@tanstack/electric-db-collection' +import { z } from 'zod' + +const todoSchema = z.object({ + id: z.string(), + text: z.string(), + completed: z.boolean(), + created_at: z.coerce.date(), +}) + +const todoCollection = createCollection( + electricCollectionOptions({ + schema: todoSchema, + shapeOptions: { url: '/api/electric/todos' }, + getKey: (item) => item.id, + onInsert: async ({ transaction }) => { + const res = await api.todos.create(transaction.mutations[0].modified) + return { txid: res.txid } + }, + }), +) +``` + +The returned `txid` tells the collection to hold optimistic state until Electric streams back that transaction. See the [Electric adapter reference](references/electric-adapter.md) for the full dual-path pattern (schema + parser). + +## Common Mistakes + +### CRITICAL queryFn returning empty array deletes all data + +Wrong: + +```ts +queryCollectionOptions({ + queryFn: async () => { + const res = await fetch('/api/todos?status=active') + return res.json() // returns [] when no active todos — deletes everything + }, +}) +``` + +Correct: + +```ts +queryCollectionOptions({ + queryFn: async () => { + const res = await fetch('/api/todos') // fetch complete state + return res.json() + }, + // Use on-demand mode + live query where() for filtering + syncMode: 'on-demand', +}) +``` + +`queryFn` result is treated as complete server state. Returning `[]` means "server has no items", deleting all existing collection data. + +Source: docs/collections/query-collection.md + +### CRITICAL Not using the correct adapter for your backend + +Wrong: + +```ts +const todoCollection = createCollection( + localOnlyCollectionOptions({ + getKey: (item) => item.id, + }), +) +// Manually fetching and inserting... +``` + +Correct: + +```ts +const todoCollection = createCollection( + queryCollectionOptions({ + queryKey: ['todos'], + queryFn: async () => fetch('/api/todos').then((r) => r.json()), + queryClient, + getKey: (item) => item.id, + }), +) +``` + +Each backend has a dedicated adapter that handles sync, mutation handlers, and utilities. Using `localOnlyCollectionOptions` or bare `createCollection` for a real backend bypasses all of this. + +Source: docs/overview.md + +### CRITICAL Electric txid queried outside mutation transaction + +Wrong: + +```ts +// Backend handler +app.post('/api/todos', async (req, res) => { + const txid = await generateTxId(sql) // WRONG: separate transaction + await sql`INSERT INTO todos ${sql(req.body)}` + res.json({ txid }) +}) +``` + +Correct: + +```ts +app.post('/api/todos', async (req, res) => { + let txid + await sql.begin(async (tx) => { + txid = await generateTxId(tx) // CORRECT: same transaction + await tx`INSERT INTO todos ${tx(req.body)}` + }) + res.json({ txid }) +}) +``` + +`pg_current_xact_id()` must be queried inside the same SQL transaction as the mutation. Otherwise the txid doesn't match and `awaitTxId` stalls forever. + +Source: docs/collections/electric-collection.md + +### CRITICAL queryFn returning partial data without merging + +Wrong: + +```ts +queryCollectionOptions({ + queryFn: async () => { + const newItems = await fetch('/api/todos?since=' + lastSync) + return newItems.json() // only new items — everything else deleted + }, +}) +``` + +Correct: + +```ts +queryCollectionOptions({ + queryFn: async (ctx) => { + const existing = ctx.queryClient.getQueryData(['todos']) || [] + const newItems = await fetch('/api/todos?since=' + lastSync).then((r) => + r.json(), + ) + return [...existing, ...newItems] + }, +}) +``` + +`queryFn` result replaces all collection data. For incremental fetches, merge with existing data. + +Source: docs/collections/query-collection.md + +### HIGH Using async schema validation + +Wrong: + +```ts +const schema = z.object({ + email: z.string().refine(async (val) => { + const exists = await checkEmail(val) + return !exists + }), +}) +``` + +Correct: + +```ts +const schema = z.object({ + email: z.string().email(), +}) +// Do async validation in the mutation handler instead +``` + +Schema validation must be synchronous. Async validation throws `SchemaMustBeSynchronousError` at mutation time. + +Source: packages/db/src/collection/mutations.ts:101 + +### HIGH getKey returning undefined for some items + +Wrong: + +```ts +createCollection( + queryCollectionOptions({ + getKey: (item) => item.metadata.id, // undefined if metadata missing + }), +) +``` + +Correct: + +```ts +createCollection( + queryCollectionOptions({ + getKey: (item) => item.id, // always present + }), +) +``` + +`getKey` must return a defined value for every item. Throws `UndefinedKeyError` otherwise. + +Source: packages/db/src/collection/mutations.ts:148 + +### HIGH TInput not a superset of TOutput with schema transforms + +Wrong: + +```ts +const schema = z.object({ + created_at: z.string().transform((val) => new Date(val)), +}) +// update() fails — draft.created_at is Date but schema only accepts string +``` + +Correct: + +```ts +const schema = z.object({ + created_at: z + .union([z.string(), z.date()]) + .transform((val) => (typeof val === 'string' ? new Date(val) : val)), +}) +``` + +When a schema transforms types, `TInput` must accept both the pre-transform and post-transform types for `update()` to work with the draft proxy. + +Source: docs/guides/schemas.md + +### HIGH React Native missing crypto.randomUUID polyfill + +TanStack DB uses `crypto.randomUUID()` internally. React Native doesn't provide this. Install `react-native-random-uuid` and import it at your app entry point. + +Source: docs/overview.md + +### MEDIUM Providing both explicit type parameter and schema + +Wrong: + +```ts +createCollection(queryCollectionOptions({ schema: todoSchema, ... })) +``` + +Correct: + +```ts +createCollection(queryCollectionOptions({ schema: todoSchema, ... })) +``` + +When a schema is provided, the collection infers types from it. An explicit generic creates conflicting type constraints. + +Source: docs/overview.md + +### MEDIUM Direct writes overridden by next query sync + +Wrong: + +```ts +todoCollection.utils.writeInsert(newItem) +// Next queryFn execution replaces all data, losing the direct write +``` + +Correct: + +```ts +todoCollection.utils.writeInsert(newItem) +// Use staleTime to prevent immediate refetch +// Or return { refetch: false } from mutation handlers +``` + +Direct writes update the collection immediately, but the next `queryFn` returns complete server state which overwrites them. + +Source: docs/collections/query-collection.md + +## References + +- [TanStack Query adapter](references/query-adapter.md) +- [ElectricSQL adapter](references/electric-adapter.md) +- [PowerSync adapter](references/powersync-adapter.md) +- [RxDB adapter](references/rxdb-adapter.md) +- [TrailBase adapter](references/trailbase-adapter.md) +- [Local adapters (local-only, localStorage)](references/local-adapters.md) +- [Schema validation patterns](references/schema-patterns.md) + +See also: db-core/mutations-optimistic/SKILL.md — mutation handlers configured here execute during mutations. + +See also: db-core/custom-adapter/SKILL.md — for building your own adapter. diff --git a/packages/db/skills/db-core/collection-setup/references/electric-adapter.md b/packages/db/skills/db-core/collection-setup/references/electric-adapter.md new file mode 100644 index 000000000..c575db064 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/electric-adapter.md @@ -0,0 +1,238 @@ +# Electric Adapter Reference + +## Install + +```bash +pnpm add @tanstack/electric-db-collection @tanstack/react-db +``` + +## Required Config + +```typescript +import { createCollection } from '@tanstack/react-db' +import { electricCollectionOptions } from '@tanstack/electric-db-collection' + +const collection = createCollection( + electricCollectionOptions({ + shapeOptions: { url: '/api/todos' }, + getKey: (item) => item.id, + }), +) +``` + +- `shapeOptions` -- ElectricSQL ShapeStream config; `url` is the proxy URL to Electric +- `getKey` -- extracts unique key from each item + +## Optional Config + +| Option | Default | Description | +| --------------------- | ------- | --------------------------------------------------- | +| `id` | (none) | Unique collection identifier | +| `schema` | (none) | StandardSchema validator | +| `shapeOptions.params` | (none) | Additional shape params (e.g. `{ table: 'todos' }`) | +| `onInsert` | (none) | Persistence handler; should return `{ txid }` | +| `onUpdate` | (none) | Persistence handler; should return `{ txid }` | +| `onDelete` | (none) | Persistence handler; should return `{ txid }` | + +## Three Sync Strategies + +### 1. Txid Return (Recommended) + +Handler returns `{ txid }`. Client waits for that txid in the Electric stream. + +```typescript +onInsert: async ({ transaction }) => { + const response = await api.todos.create(transaction.mutations[0].modified) + return { txid: response.txid } +}, +``` + +### 2. awaitMatch (Custom Match) + +Use when txids are unavailable. Import `isChangeMessage` to match on message content. + +```typescript +import { isChangeMessage } from "@tanstack/electric-db-collection" + +onInsert: async ({ transaction, collection }) => { + const newItem = transaction.mutations[0].modified + await api.todos.create(newItem) + await collection.utils.awaitMatch( + (message) => + isChangeMessage(message) && + message.headers.operation === "insert" && + message.value.text === newItem.text, + 5000 // timeout ms, defaults to 3000 + ) +}, +``` + +### 3. Simple Timeout (Prototyping) + +```typescript +onInsert: async ({ transaction }) => { + await api.todos.create(transaction.mutations[0].modified) + await new Promise((resolve) => setTimeout(resolve, 2000)) +}, +``` + +## Utility Methods (`collection.utils`) + +- `awaitTxId(txid, timeout?)` -- wait for txid in Electric stream; default timeout 30s +- `awaitMatch(matchFn, timeout?)` -- wait for message matching predicate; default timeout 3000ms + +### Helper Exports + +```typescript +import { + isChangeMessage, + isControlMessage, +} from '@tanstack/electric-db-collection' +// isChangeMessage(msg) -- true for insert/update/delete +// isControlMessage(msg) -- true for up-to-date/must-refetch +``` + +## generateTxId Backend Pattern + +The txid **must** be queried inside the same Postgres transaction as the mutation. + +```typescript +async function generateTxId(tx: any): Promise { + const result = await tx`SELECT pg_current_xact_id()::xid::text as txid` + const txid = result[0]?.txid + if (txid === undefined) throw new Error('Failed to get transaction ID') + return parseInt(txid, 10) +} + +async function createTodo(data) { + let txid!: number + const result = await sql.begin(async (tx) => { + txid = await generateTxId(tx) // INSIDE the transaction + const [todo] = await tx`INSERT INTO todos ${tx(data)} RETURNING *` + return todo + }) + return { todo: result, txid } +} +``` + +Querying txid outside the transaction produces a mismatched txid -- `awaitTxId` stalls indefinitely. + +## Schema vs Parser: Two Separate Paths + +When using Electric with a schema, data enters the collection via **two independent paths**: + +1. **Sync path** — Electric's `ShapeStream` applies the `parser` from `shapeOptions`. The schema is NOT applied to synced data. +2. **Mutation path** — `insert()` and `update()` run through the collection schema. The parser is not involved. + +For types that need transformation (e.g., `timestamptz`), you need BOTH configured: + +```typescript +const todosCollection = createCollection( + electricCollectionOptions({ + schema: z.object({ + id: z.string(), + text: z.string(), + completed: z.boolean(), // Electric auto-parses bools + created_at: z.coerce.date(), // mutation path: coerce string → Date + }), + shapeOptions: { + url: '/api/todos', + parser: { + timestamptz: (value: string) => new Date(value), // sync path: parse incoming strings + }, + }, + getKey: (item) => item.id, + }), +) +``` + +### Postgres → Electric type handling + +| PG type | Electric auto-parses? | Schema needed? | Parser needed? | +| -------------- | --------------------- | ----------------- | --------------------------------------------------- | +| `text`, `uuid` | Yes (string) | `z.string()` | No | +| `int4`, `int8` | Yes (number) | `z.number()` | No | +| `bool` | Yes (boolean) | `z.boolean()` | No | +| `timestamptz` | No (stays string) | `z.coerce.date()` | Yes — `parser: { timestamptz: (v) => new Date(v) }` | +| `jsonb` | Yes (parsed object) | As needed | No | + +Note: `z.coerce.date()` is Zod-specific. Other StandardSchema libraries have their own coercion patterns. + +## Proxy Route + +Electric collections connect to a proxy URL (`shapeOptions.url`), not directly to Electric. Your app server must forward shape requests to Electric, passing through the Electric protocol query params. + +The proxy route must: + +1. Accept GET requests at the URL you specify in `shapeOptions.url` +2. Forward all query parameters (these are Electric protocol params like `offset`, `handle`, `live`, etc.) +3. Proxy the response (SSE stream) back to the client +4. Optionally add authentication headers or filter params + +Implementation depends on your framework — use `createServerFn` in TanStack Start, API routes in Next.js, `loader` in Remix, etc. See the `@electric-sql/client` skills for proxy route examples: + +```bash +npx @electric-sql/client intent list +``` + +## Electric Client Skills + +For deeper Electric-specific guidance (ShapeStream config, shape filtering, etc.), load the Electric client's built-in skills: + +```bash +npx @electric-sql/client intent list +``` + +## Debug Logging + +```javascript +localStorage.debug = 'ts/db:electric' +``` + +## Complete Example + +Always use a schema — types are inferred automatically, avoiding generic placement confusion. + +```typescript +import { createCollection } from '@tanstack/react-db' +import { electricCollectionOptions } from '@tanstack/electric-db-collection' +import { z } from 'zod' + +const todoSchema = z.object({ + id: z.string(), + text: z.string().min(1), + completed: z.boolean(), + created_at: z.coerce.date(), +}) + +const todosCollection = createCollection( + electricCollectionOptions({ + id: 'todos', + schema: todoSchema, + getKey: (item) => item.id, + shapeOptions: { + url: '/api/todos', + params: { table: 'todos' }, + parser: { + timestamptz: (value: string) => new Date(value), // sync path + }, + }, + onInsert: async ({ transaction }) => { + const response = await api.todos.create(transaction.mutations[0].modified) + return { txid: response.txid } + }, + onUpdate: async ({ transaction }) => { + const { original, changes } = transaction.mutations[0] + const response = await api.todos.update({ + where: { id: original.id }, + data: changes, + }) + return { txid: response.txid } + }, + onDelete: async ({ transaction }) => { + const response = await api.todos.delete(transaction.mutations[0].key) + return { txid: response.txid } + }, + }), +) +``` diff --git a/packages/db/skills/db-core/collection-setup/references/local-adapters.md b/packages/db/skills/db-core/collection-setup/references/local-adapters.md new file mode 100644 index 000000000..cc3ffea28 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/local-adapters.md @@ -0,0 +1,220 @@ +# Local Adapters Reference + +Both adapters are included in the core package. + +## Install + +```bash +pnpm add @tanstack/react-db +``` + +--- + +## localOnlyCollectionOptions + +In-memory only. No persistence. No cross-tab sync. + +### Required Config + +```typescript +import { + createCollection, + localOnlyCollectionOptions, +} from '@tanstack/react-db' + +const collection = createCollection( + localOnlyCollectionOptions({ + id: 'ui-state', + getKey: (item) => item.id, + }), +) +``` + +- `id` -- unique collection identifier +- `getKey` -- extracts unique key from each item + +### Optional Config + +| Option | Default | Description | +| ------------- | ------- | -------------------------------------- | +| `schema` | (none) | StandardSchema validator | +| `initialData` | (none) | Array of items to populate on creation | +| `onInsert` | (none) | Handler before confirming inserts | +| `onUpdate` | (none) | Handler before confirming updates | +| `onDelete` | (none) | Handler before confirming deletes | + +### Direct Mutations + +```typescript +collection.insert({ id: 'theme', mode: 'dark' }) +collection.update('theme', (draft) => { + draft.mode = 'light' +}) +collection.delete('theme') +``` + +### initialData + +```typescript +localOnlyCollectionOptions({ + id: 'ui-state', + getKey: (item) => item.id, + initialData: [ + { id: 'sidebar', isOpen: false }, + { id: 'theme', mode: 'light' }, + ], +}) +``` + +### acceptMutations in Manual Transactions + +When using `createTransaction`, call `collection.utils.acceptMutations(transaction)` in `mutationFn`: + +```typescript +import { createTransaction } from '@tanstack/react-db' + +const tx = createTransaction({ + mutationFn: async ({ transaction }) => { + // Handle server mutations first, then: + localData.utils.acceptMutations(transaction) + }, +}) +tx.mutate(() => { + localData.insert({ id: 'draft-1', data: '...' }) +}) +await tx.commit() +``` + +--- + +## localStorageCollectionOptions + +Persists to `localStorage`. Cross-tab sync via storage events. Survives reloads. + +### Required Config + +```typescript +import { + createCollection, + localStorageCollectionOptions, +} from '@tanstack/react-db' + +const collection = createCollection( + localStorageCollectionOptions({ + id: 'user-preferences', + storageKey: 'app-user-prefs', + getKey: (item) => item.id, + }), +) +``` + +- `id` -- unique collection identifier +- `storageKey` -- localStorage key for all collection data +- `getKey` -- extracts unique key from each item + +### Optional Config + +| Option | Default | Description | +| ----------------- | -------------- | -------------------------------------------------------------------- | +| `schema` | (none) | StandardSchema validator | +| `storage` | `localStorage` | Custom storage (`sessionStorage` or any localStorage-compatible API) | +| `storageEventApi` | `window` | Event API for cross-tab sync | +| `onInsert` | (none) | Handler on insert | +| `onUpdate` | (none) | Handler on update | +| `onDelete` | (none) | Handler on delete | + +### Using sessionStorage + +```typescript +localStorageCollectionOptions({ + id: 'session-data', + storageKey: 'session-key', + storage: sessionStorage, + getKey: (item) => item.id, +}) +``` + +### Custom Storage Backend + +Provide any object with `getItem`, `setItem`, `removeItem`: + +```typescript +const encryptedStorage = { + getItem: (key) => { + const v = localStorage.getItem(key) + return v ? decrypt(v) : null + }, + setItem: (key, value) => localStorage.setItem(key, encrypt(value)), + removeItem: (key) => localStorage.removeItem(key), +} +localStorageCollectionOptions({ + id: 'secure', + storageKey: 'enc-key', + storage: encryptedStorage, + getKey: (i) => i.id, +}) +``` + +### acceptMutations + +Same as LocalOnly -- call `collection.utils.acceptMutations(transaction)` in manual transactions. + +--- + +## Comparison + +| Feature | LocalOnly | LocalStorage | +| --------------- | ---------------- | ------------ | +| Persistence | None (in-memory) | localStorage | +| Cross-tab sync | No | Yes | +| Survives reload | No | Yes | +| Performance | Fastest | Fast | +| Size limits | Memory | ~5-10MB | + +## Complete Example + +```typescript +import { + createCollection, + localOnlyCollectionOptions, + localStorageCollectionOptions, +} from '@tanstack/react-db' +import { z } from 'zod' + +// In-memory UI state +const modalState = createCollection( + localOnlyCollectionOptions({ + id: 'modal-state', + getKey: (item) => item.id, + initialData: [ + { id: 'confirm-delete', isOpen: false }, + { id: 'settings', isOpen: false }, + ], + }), +) + +// Persistent user prefs +const userPrefs = createCollection( + localStorageCollectionOptions({ + id: 'user-preferences', + storageKey: 'app-user-prefs', + getKey: (item) => item.id, + schema: z.object({ + id: z.string(), + theme: z.enum(['light', 'dark', 'auto']), + language: z.string(), + notifications: z.boolean(), + }), + }), +) + +modalState.update('settings', (draft) => { + draft.isOpen = true +}) +userPrefs.insert({ + id: 'current-user', + theme: 'dark', + language: 'en', + notifications: true, +}) +``` diff --git a/packages/db/skills/db-core/collection-setup/references/powersync-adapter.md b/packages/db/skills/db-core/collection-setup/references/powersync-adapter.md new file mode 100644 index 000000000..c552a14d1 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/powersync-adapter.md @@ -0,0 +1,241 @@ +# PowerSync Adapter Reference + +## Install + +```bash +pnpm add @tanstack/powersync-db-collection @powersync/web @journeyapps/wa-sqlite +``` + +## Required Config + +```typescript +import { createCollection } from '@tanstack/react-db' +import { powerSyncCollectionOptions } from '@tanstack/powersync-db-collection' +import { Schema, Table, column, PowerSyncDatabase } from '@powersync/web' + +const APP_SCHEMA = new Schema({ + documents: new Table({ + name: column.text, + author: column.text, + created_at: column.text, + archived: column.integer, + }), +}) + +const db = new PowerSyncDatabase({ + database: { dbFilename: 'app.sqlite' }, + schema: APP_SCHEMA, +}) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + }), +) +``` + +- `database` -- `PowerSyncDatabase` instance +- `table` -- PowerSync `Table` from schema (provides `getKey` and type inference) + +## Optional Config (with defaults) + +| Option | Default | Description | +| ------------------------ | ------- | ------------------------------------------------------------------------------------- | +| `schema` | (none) | StandardSchema for mutation validation | +| `deserializationSchema` | (none) | Transforms SQLite types to output types; required when input types differ from SQLite | +| `onDeserializationError` | (none) | Fatal error handler; **required** when using `schema` or `deserializationSchema` | +| `serializer` | (none) | Per-field functions to serialize output types back to SQLite | +| `syncBatchSize` | `1000` | Batch size for initial sync | + +### SQLite Type Mapping + +| PowerSync Column | TypeScript Type | +| ---------------- | ---------------- | +| `column.text` | `string \| null` | +| `column.integer` | `number \| null` | +| `column.real` | `number \| null` | + +All columns nullable by default. `id: string` is always included automatically. + +## Conversions (4 patterns) + +### 1. Type Inference Only (no schema) + +```typescript +const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + }), +) +// Input/Output: { id: string, name: string | null, created_at: string | null, ... } +``` + +### 2. Schema Validation (same SQLite types) + +```typescript +const schema = z.object({ + id: z.string(), + name: z.string().min(3), + author: z.string(), + created_at: z.string(), + archived: z.number(), +}) +const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + onDeserializationError: (error) => { + /* fatal */ + }, + }), +) +``` + +### 3. Transform SQLite to Rich Output Types + +```typescript +const schema = z.object({ + id: z.string(), + name: z.string().nullable(), + created_at: z + .string() + .nullable() + .transform((val) => (val ? new Date(val) : null)), + archived: z + .number() + .nullable() + .transform((val) => (val != null ? val > 0 : null)), +}) +const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + onDeserializationError: (error) => { + /* fatal */ + }, + serializer: { created_at: (value) => (value ? value.toISOString() : null) }, + }), +) +// Input: { created_at: string | null, ... } +// Output: { created_at: Date | null, archived: boolean | null, ... } +``` + +### 4. Custom Input + Output with deserializationSchema + +```typescript +const schema = z.object({ + id: z.string(), + name: z.string(), + created_at: z.date(), + archived: z.boolean(), +}) +const deserializationSchema = z.object({ + id: z.string(), + name: z.string(), + created_at: z.string().transform((val) => new Date(val)), + archived: z.number().transform((val) => val > 0), +}) +const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + deserializationSchema, + onDeserializationError: (error) => { + /* fatal */ + }, + }), +) +// Input: { created_at: Date, archived: boolean } +// Output: { created_at: Date, archived: boolean } +``` + +## Metadata Tracking + +Enable on the table, then pass metadata with operations: + +```typescript +const APP_SCHEMA = new Schema({ + documents: new Table({ name: column.text }, { trackMetadata: true }), +}) + +await collection.insert( + { id: crypto.randomUUID(), name: 'Report' }, + { metadata: { source: 'web-app', userId: 'user-123' } }, +).isPersisted.promise +``` + +Metadata appears as `entry.metadata` (stringified JSON) in PowerSync `CrudEntry`. + +## Advanced Transactions + +```typescript +import { createTransaction } from '@tanstack/react-db' +import { PowerSyncTransactor } from '@tanstack/powersync-db-collection' + +const tx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction, + ) + }, +}) +tx.mutate(() => { + documentsCollection.insert({ + id: crypto.randomUUID(), + name: 'Doc 1', + created_at: new Date().toISOString(), + }) +}) +await tx.commit() +await tx.isPersisted.promise +``` + +## Complete Example + +```typescript +import { Schema, Table, column, PowerSyncDatabase } from '@powersync/web' +import { createCollection } from '@tanstack/react-db' +import { powerSyncCollectionOptions } from '@tanstack/powersync-db-collection' +import { z } from 'zod' + +const APP_SCHEMA = new Schema({ + tasks: new Table({ + title: column.text, + due_date: column.text, + completed: column.integer, + }), +}) +const db = new PowerSyncDatabase({ + database: { dbFilename: 'app.sqlite' }, + schema: APP_SCHEMA, +}) + +const taskSchema = z.object({ + id: z.string(), + title: z.string().nullable(), + due_date: z + .string() + .nullable() + .transform((val) => (val ? new Date(val) : null)), + completed: z + .number() + .nullable() + .transform((val) => (val != null ? val > 0 : null)), +}) + +const tasksCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.tasks, + schema: taskSchema, + onDeserializationError: (error) => console.error('Fatal:', error), + syncBatchSize: 500, + }), +) +``` diff --git a/packages/db/skills/db-core/collection-setup/references/query-adapter.md b/packages/db/skills/db-core/collection-setup/references/query-adapter.md new file mode 100644 index 000000000..24e21e4cd --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/query-adapter.md @@ -0,0 +1,183 @@ +# Query Adapter Reference + +## Install + +```bash +pnpm add @tanstack/query-db-collection @tanstack/query-core @tanstack/db +``` + +## Required Config + +```typescript +import { QueryClient } from '@tanstack/query-core' +import { createCollection } from '@tanstack/db' +import { queryCollectionOptions } from '@tanstack/query-db-collection' + +const queryClient = new QueryClient() +const collection = createCollection( + queryCollectionOptions({ + queryKey: ['todos'], + queryFn: async () => fetch('/api/todos').then((r) => r.json()), + queryClient, + getKey: (item) => item.id, + }), +) +``` + +- `queryKey` -- TanStack Query cache key +- `queryFn` -- fetches data; must be provided (throws `QueryFnRequiredError` if missing) +- `queryClient` -- `QueryClient` instance +- `getKey` -- extracts unique key from each item + +## Optional Config (with defaults) + +| Option | Default | Description | +| ----------------- | ------------ | ----------------------------------------------- | +| `id` | (none) | Unique collection identifier | +| `schema` | (none) | StandardSchema validator | +| `select` | (none) | Extracts array items when wrapped with metadata | +| `enabled` | `true` | Whether query runs automatically | +| `refetchInterval` | `0` | Polling interval in ms; 0 = disabled | +| `retry` | (TQ default) | Retry config for failed queries | +| `retryDelay` | (TQ default) | Delay between retries | +| `staleTime` | (TQ default) | How long data is considered fresh | +| `meta` | (none) | Metadata passed to queryFn context | +| `startSync` | `true` | Start syncing immediately | +| `syncMode` | (none) | Set `"on-demand"` for predicate push-down | + +### Persistence Handlers + +```typescript +onInsert: async ({ transaction }) => { + await api.createTodos(transaction.mutations.map((m) => m.modified)) + // return nothing or { refetch: true } to trigger refetch + // return { refetch: false } to skip refetch +}, +onUpdate: async ({ transaction }) => { + await api.updateTodos(transaction.mutations.map((m) => ({ id: m.key, changes: m.changes }))) +}, +onDelete: async ({ transaction }) => { + await api.deleteTodos(transaction.mutations.map((m) => m.key)) +}, +``` + +## Utility Methods (`collection.utils`) + +- `refetch(opts?)` -- manual refetch; `opts.throwOnError` (default `false`); bypasses `enabled: false` +- `writeInsert(data)` -- insert directly to synced store (bypasses optimistic system) +- `writeUpdate(data)` -- update directly in synced store +- `writeDelete(keys)` -- delete directly from synced store +- `writeUpsert(data)` -- insert or update directly +- `writeBatch(callback)` -- multiple write ops atomically + +Direct writes bypass optimistic updates, do NOT trigger refetches, and update TQ cache immediately. + +```typescript +collection.utils.writeBatch(() => { + collection.utils.writeInsert({ id: '1', text: 'Buy milk' }) + collection.utils.writeUpdate({ id: '2', completed: true }) + collection.utils.writeDelete('3') +}) +``` + +## Predicate Push-Down (syncMode: "on-demand") + +Query predicates (where, orderBy, limit, offset) passed to `queryFn` via `ctx.meta.loadSubsetOptions`. + +```typescript +import { parseLoadSubsetOptions } from '@tanstack/query-db-collection' + +queryFn: async (ctx) => { + const { filters, sorts, limit, offset } = parseLoadSubsetOptions( + ctx.meta?.loadSubsetOptions, + ) + // filters: [{ field: ['category'], operator: 'eq', value: 'electronics' }] + // sorts: [{ field: ['price'], direction: 'asc', nulls: 'last' }] +} +``` + +### Expression Helpers (from `@tanstack/db`) + +- `parseLoadSubsetOptions(opts)` -- returns `{ filters, sorts, limit, offset }` +- `parseWhereExpression(expr, { handlers })` -- custom handlers per operator +- `parseOrderByExpression(expr)` -- returns `[{ field, direction, nulls }]` +- `extractSimpleComparisons(expr)` -- flat AND-ed comparisons only + +Supported operators: `eq`, `gt`, `gte`, `lt`, `lte`, `and`, `or`, `in` + +## Dynamic queryKey + +```typescript +queryKey: (opts) => { + const parsed = parseLoadSubsetOptions(opts) + const key = ["products"] + parsed.filters.forEach((f) => key.push(`${f.field.join(".")}-${f.operator}-${f.value}`)) + if (parsed.limit) key.push(`limit-${parsed.limit}`) + return key +}, +``` + +## Complete Example + +```typescript +import { QueryClient } from '@tanstack/query-core' +import { createCollection } from '@tanstack/react-db' +import { + queryCollectionOptions, + parseLoadSubsetOptions, +} from '@tanstack/query-db-collection' + +const queryClient = new QueryClient() + +const productsCollection = createCollection( + queryCollectionOptions({ + id: 'products', + queryKey: ['products'], + queryClient, + getKey: (item) => item.id, + syncMode: 'on-demand', + queryFn: async (ctx) => { + const { filters, sorts, limit } = parseLoadSubsetOptions( + ctx.meta?.loadSubsetOptions, + ) + const params = new URLSearchParams() + filters.forEach(({ field, operator, value }) => { + params.set(`${field.join('.')}_${operator}`, String(value)) + }) + if (sorts.length > 0) { + params.set( + 'sort', + sorts.map((s) => `${s.field.join('.')}:${s.direction}`).join(','), + ) + } + if (limit) params.set('limit', String(limit)) + return fetch(`/api/products?${params}`).then((r) => r.json()) + }, + onInsert: async ({ transaction }) => { + const serverItems = await api.createProducts( + transaction.mutations.map((m) => m.modified), + ) + productsCollection.utils.writeBatch(() => { + serverItems.forEach((item) => + productsCollection.utils.writeInsert(item), + ) + }) + return { refetch: false } + }, + onUpdate: async ({ transaction }) => { + await api.updateProducts( + transaction.mutations.map((m) => ({ id: m.key, changes: m.changes })), + ) + }, + onDelete: async ({ transaction }) => { + await api.deleteProducts(transaction.mutations.map((m) => m.key)) + }, + }), +) +``` + +## Key Behaviors + +- `queryFn` result is treated as **complete state** -- missing items are deleted +- Empty array from `queryFn` deletes all items +- Direct writes update TQ cache but are overridden by subsequent `queryFn` results diff --git a/packages/db/skills/db-core/collection-setup/references/rxdb-adapter.md b/packages/db/skills/db-core/collection-setup/references/rxdb-adapter.md new file mode 100644 index 000000000..fcdcf84b0 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/rxdb-adapter.md @@ -0,0 +1,152 @@ +# RxDB Adapter Reference + +## Install + +```bash +pnpm add @tanstack/rxdb-db-collection rxdb @tanstack/react-db +``` + +## Required Config + +```typescript +import { createCollection } from '@tanstack/react-db' +import { rxdbCollectionOptions } from '@tanstack/rxdb-db-collection' + +const todosCollection = createCollection( + rxdbCollectionOptions({ + rxCollection: db.todos, + }), +) +``` + +- `rxCollection` -- the underlying RxDB `RxCollection` instance + +## Optional Config (with defaults) + +| Option | Default | Description | +| --------------- | ----------------------- | -------------------------------------------------------------------------------------------------- | +| `id` | (none) | Unique collection identifier | +| `schema` | (none) | StandardSchema validator (RxDB has its own validation; this adds TanStack DB-side validation) | +| `startSync` | `true` | Start ingesting RxDB data immediately | +| `syncBatchSize` | `1000` | Max documents per batch during initial sync from RxDB; only affects initial load, not live updates | +| `onInsert` | (default: `bulkUpsert`) | Override default insert persistence | +| `onUpdate` | (default: `patch`) | Override default update persistence | +| `onDelete` | (default: `bulkRemove`) | Override default delete persistence | + +## Key Behavior: String Keys + +RxDB primary keys are always strings. The `getKey` function is derived from the RxDB schema's `primaryKey` field automatically. All key values will be strings. + +## RxDB Setup (prerequisite) + +```typescript +import { createRxDatabase } from 'rxdb/plugins/core' +import { getRxStorageLocalstorage } from 'rxdb/plugins/storage-localstorage' + +const db = await createRxDatabase({ + name: 'my-app', + storage: getRxStorageLocalstorage(), +}) + +await db.addCollections({ + todos: { + schema: { + title: 'todos', + version: 0, + type: 'object', + primaryKey: 'id', + properties: { + id: { type: 'string', maxLength: 100 }, + text: { type: 'string' }, + completed: { type: 'boolean' }, + }, + required: ['id', 'text', 'completed'], + }, + }, +}) +``` + +## Backend Sync (optional, RxDB-managed) + +Replication is configured directly on the RxDB collection, independent of TanStack DB. Changes from replication flow into the TanStack DB collection via RxDB's change stream automatically. + +```typescript +import { replicateRxCollection } from 'rxdb/plugins/replication' + +const replicationState = replicateRxCollection({ + collection: db.todos, + pull: { handler: myPullHandler }, + push: { handler: myPushHandler }, +}) +``` + +## Data Flow + +- Writes via `todosCollection.insert/update/delete` persist to RxDB +- Direct RxDB writes (or replication changes) flow into the TanStack collection via change streams +- Initial sync loads data in batches of `syncBatchSize` +- Ongoing updates stream one by one via RxDB's change feed + +## Indexes + +RxDB schema indexes do not affect TanStack DB query performance (queries run in-memory). Indexes may still matter if you query RxDB directly, use filtered replication, or selectively load subsets. + +## Complete Example + +```typescript +import { createRxDatabase } from 'rxdb/plugins/core' +import { getRxStorageLocalstorage } from 'rxdb/plugins/storage-localstorage' +import { createCollection } from '@tanstack/react-db' +import { rxdbCollectionOptions } from '@tanstack/rxdb-db-collection' +import { z } from 'zod' + +type Todo = { id: string; text: string; completed: boolean } + +const db = await createRxDatabase({ + name: 'my-todos', + storage: getRxStorageLocalstorage(), +}) + +await db.addCollections({ + todos: { + schema: { + title: 'todos', + version: 0, + type: 'object', + primaryKey: 'id', + properties: { + id: { type: 'string', maxLength: 100 }, + text: { type: 'string' }, + completed: { type: 'boolean' }, + }, + required: ['id', 'text', 'completed'], + }, + }, +}) + +const todoSchema = z.object({ + id: z.string(), + text: z.string().min(1), + completed: z.boolean(), +}) + +const todosCollection = createCollection( + rxdbCollectionOptions({ + rxCollection: db.todos, + schema: todoSchema, + startSync: true, + syncBatchSize: 500, + }), +) + +// Usage +todosCollection.insert({ + id: crypto.randomUUID(), + text: 'Buy milk', + completed: false, +}) +todosCollection.update('some-id', (draft) => { + draft.completed = true +}) +todosCollection.delete('some-id') +``` diff --git a/packages/db/skills/db-core/collection-setup/references/schema-patterns.md b/packages/db/skills/db-core/collection-setup/references/schema-patterns.md new file mode 100644 index 000000000..1bee2c134 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/schema-patterns.md @@ -0,0 +1,215 @@ +# Schema Patterns Reference + +## StandardSchema Integration + +TanStack DB accepts any [StandardSchema](https://standardschema.dev)-compatible library via the `schema` option. + +### Supported Libraries + +- [Zod](https://zod.dev), [Valibot](https://valibot.dev), [ArkType](https://arktype.io), [Effect Schema](https://effect.website/docs/schema/introduction/) + +## TInput vs TOutput + +- **TInput** -- type accepted by `insert()` and `update()` +- **TOutput** -- type stored in collection and returned from queries + +When no transforms exist, TInput === TOutput. + +```typescript +const schema = z.object({ + id: z.string(), + created_at: z.string().transform((val) => new Date(val)), +}) +// TInput: { id: string, created_at: string } +// TOutput: { id: string, created_at: Date } +``` + +## Union Pattern for Transforms (Required) + +When a schema transforms A to B, TInput **must** accept both A and B. During `update()`, the draft contains TOutput data. + +```typescript +// WRONG -- update() fails because draft.created_at is Date but schema expects string +z.string().transform((val) => new Date(val)) + +// CORRECT +z.union([z.string(), z.date()]).transform((val) => + typeof val === 'string' ? new Date(val) : val, +) +// TInput: string | Date, TOutput: Date +``` + +## Defaults + +```typescript +const schema = z.object({ + id: z.string(), + text: z.string(), + completed: z.boolean().default(false), + priority: z.number().default(0), + tags: z.array(z.string()).default([]), + created_at: z.date().default(() => new Date()), +}) +// insert({ id: "1", text: "Task" }) -- missing fields auto-filled +``` + +## Computed Fields + +```typescript +const schema = z + .object({ + id: z.string(), + first_name: z.string(), + last_name: z.string(), + }) + .transform((data) => ({ + ...data, + full_name: `${data.first_name} ${data.last_name}`, + })) +``` + +## Combining Defaults with Transforms + +```typescript +const schema = z.object({ + created_at: z + .string() + .default(() => new Date().toISOString()) + .transform((val) => new Date(val)), +}) +``` + +## Validation Examples + +```typescript +// Basic constraints +z.string().min(3).max(100) +z.string().email() +z.number().int().positive() +z.enum(['active', 'inactive']) +z.array(z.string()).min(1) + +// Optional/nullable +z.string().optional() // can be omitted +z.string().nullable() // can be null + +// Cross-field +z.object({ start: z.string(), end: z.string() }).refine( + (d) => new Date(d.end) > new Date(d.start), + 'End must be after start', +) + +// Custom +z.string().refine((v) => /^[a-zA-Z0-9_]+$/.test(v), 'Alphanumeric only') +``` + +## SchemaValidationError + +```typescript +import { SchemaValidationError } from '@tanstack/db' + +try { + collection.insert({ id: '1', email: 'bad', age: -5 }) +} catch (error) { + if (error instanceof SchemaValidationError) { + error.type // "insert" or "update" + error.message // "Validation failed with 2 issues" + error.issues // [{ path: ["email"], message: "Invalid email" }, ...] + } +} +``` + +## Scope: Schema vs Sync — Two Separate Paths + +**Schemas validate client mutations only** (`insert()`, `update()`). Synced data from backends (Electric, PowerSync, etc.) bypasses the schema entirely. + +This means for types that need transformation (e.g., `timestamptz`): + +- **Sync path**: handled by the adapter's parser (e.g., Electric's `shapeOptions.parser`) +- **Mutation path**: handled by the Zod schema + +You need BOTH configured for full type safety. See electric-adapter.md for the dual-path pattern. + +### Simpler date coercion (Zod-specific) + +With Zod, `z.coerce.date()` is simpler than the `z.union([z.string(), z.date()]).transform(...)` pattern: + +```typescript +// Zod-specific: z.coerce.date() accepts string, number, or Date as input +const schema = z.object({ + created_at: z.coerce.date(), +}) +// TInput: { created_at: string | number | Date } (coerce accepts many types) +// TOutput: { created_at: Date } +``` + +This satisfies the TInput-superset-of-TOutput requirement automatically. Other StandardSchema libraries have their own coercion patterns — consult library docs. + +### Important + +- Validation is synchronous, runs on every mutation +- Keep transforms simple for performance + +## Where TOutput Appears + +- Data stored in collection and returned from queries +- `PendingMutation.modified` +- Mutation handler `transaction.mutations[].modified` + +## Performance + +Keep transforms simple -- validation runs synchronously on every mutation. + +## Complete Example + +```typescript +import { z } from 'zod' +import { createCollection, SchemaValidationError } from '@tanstack/react-db' +import { queryCollectionOptions } from '@tanstack/query-db-collection' + +const todoSchema = z.object({ + id: z.string(), + text: z.string().min(1, 'Text is required'), + completed: z.boolean().default(false), + priority: z.enum(['low', 'medium', 'high']).default('medium'), + created_at: z + .union([z.string(), z.date()]) + .transform((val) => (typeof val === 'string' ? new Date(val) : val)) + .default(() => new Date()), +}) + +const todosCollection = createCollection( + queryCollectionOptions({ + queryKey: ['todos'], + queryFn: async () => fetch('/api/todos').then((r) => r.json()), + queryClient, + getKey: (item) => item.id, + schema: todoSchema, + onInsert: async ({ transaction }) => { + const todo = transaction.mutations[0].modified + await api.todos.create({ + ...todo, + created_at: todo.created_at.toISOString(), + }) + }, + }), +) + +// Defaults and transforms applied +todosCollection.insert({ id: '1', text: 'Buy groceries' }) +// => { id: "1", text: "Buy groceries", completed: false, priority: "medium", created_at: Date } + +// Update works -- draft contains TOutput, schema accepts via union +todosCollection.update('1', (draft) => { + draft.completed = true +}) + +// Error handling +try { + todosCollection.insert({ id: '2', text: '' }) +} catch (e) { + if (e instanceof SchemaValidationError) { + console.log(e.issues) // [{ path: ["text"], message: "Text is required" }] + } +} +``` diff --git a/packages/db/skills/db-core/collection-setup/references/trailbase-adapter.md b/packages/db/skills/db-core/collection-setup/references/trailbase-adapter.md new file mode 100644 index 000000000..a01b29ee6 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/trailbase-adapter.md @@ -0,0 +1,147 @@ +# TrailBase Adapter Reference + +## Install + +```bash +pnpm add @tanstack/trailbase-db-collection @tanstack/react-db trailbase +``` + +## Required Config + +```typescript +import { createCollection } from '@tanstack/react-db' +import { trailBaseCollectionOptions } from '@tanstack/trailbase-db-collection' +import { initClient } from 'trailbase' + +const trailBaseClient = initClient('https://your-trailbase-instance.com') + +const todosCollection = createCollection( + trailBaseCollectionOptions({ + id: 'todos', + recordApi: trailBaseClient.records('todos'), + getKey: (item) => item.id, + }), +) +``` + +- `id` -- unique collection identifier +- `recordApi` -- TrailBase Record API instance from `trailBaseClient.records(tableName)` +- `getKey` -- extracts unique key from each item + +## Optional Config + +| Option | Default | Description | +| ----------- | ------- | --------------------------------------------------------------------------------- | +| `schema` | (none) | StandardSchema validator | +| `parse` | (none) | Object mapping field names to functions that transform data coming FROM TrailBase | +| `serialize` | (none) | Object mapping field names to functions that transform data going TO TrailBase | +| `onInsert` | (none) | Handler called on insert | +| `onUpdate` | (none) | Handler called on update | +| `onDelete` | (none) | Handler called on delete | + +## Conversions (parse/serialize) + +TrailBase uses different data formats (e.g. Unix timestamps). Use `parse` and `serialize` for field-level transformations. + +```typescript +type SelectTodo = { + id: string + text: string + created_at: number // Unix timestamp from TrailBase + completed: boolean +} + +type Todo = { + id: string + text: string + created_at: Date // Rich JS type for app usage + completed: boolean +} + +const collection = createCollection( + trailBaseCollectionOptions({ + id: 'todos', + recordApi: trailBaseClient.records('todos'), + getKey: (item) => item.id, + parse: { + created_at: (ts) => new Date(ts * 1000), + }, + serialize: { + created_at: (date) => Math.floor(date.valueOf() / 1000), + }, + }), +) +``` + +## Real-time Subscriptions + +Automatic when `enable_subscriptions` is enabled on the TrailBase server. No additional client config needed -- the collection subscribes to changes automatically. + +## Persistence Handlers + +```typescript +onInsert: async ({ transaction }) => { + const newItem = transaction.mutations[0].modified +}, +onUpdate: async ({ transaction }) => { + const { original, modified } = transaction.mutations[0] +}, +onDelete: async ({ transaction }) => { + const deletedItem = transaction.mutations[0].original +}, +``` + +TrailBase handles persistence through the Record API automatically. Custom handlers are for additional logic only. + +## Complete Example + +```typescript +import { createCollection } from '@tanstack/react-db' +import { trailBaseCollectionOptions } from '@tanstack/trailbase-db-collection' +import { initClient } from 'trailbase' +import { z } from 'zod' + +const trailBaseClient = initClient('https://your-trailbase-instance.com') + +const todoSchema = z.object({ + id: z.string(), + text: z.string(), + completed: z.boolean(), + created_at: z.date(), +}) + +type SelectTodo = { + id: string + text: string + completed: boolean + created_at: number +} + +type Todo = z.infer + +const todosCollection = createCollection( + trailBaseCollectionOptions({ + id: 'todos', + recordApi: trailBaseClient.records('todos'), + getKey: (item) => item.id, + schema: todoSchema, + parse: { + created_at: (ts) => new Date(ts * 1000), + }, + serialize: { + created_at: (date) => Math.floor(date.valueOf() / 1000), + }, + onInsert: async ({ transaction }) => { + console.log('Created:', transaction.mutations[0].modified) + }, + }), +) + +// Usage +todosCollection.insert({ + id: crypto.randomUUID(), + text: 'Review PR', + completed: false, + created_at: new Date(), +}) +``` diff --git a/packages/db/skills/db-core/custom-adapter/SKILL.md b/packages/db/skills/db-core/custom-adapter/SKILL.md new file mode 100644 index 000000000..abc8d3cce --- /dev/null +++ b/packages/db/skills/db-core/custom-adapter/SKILL.md @@ -0,0 +1,285 @@ +--- +name: db-core/custom-adapter +description: > + Building custom collection adapters for new backends. SyncConfig interface: + sync function receiving begin, write, commit, markReady, truncate primitives. + ChangeMessage format (insert, update, delete). loadSubset for on-demand sync. + LoadSubsetOptions (where, orderBy, limit, cursor). Expression parsing: + parseWhereExpression, parseOrderByExpression, extractSimpleComparisons, + parseLoadSubsetOptions. Collection options creator pattern. rowUpdateMode + (partial vs full). Subscription lifecycle and cleanup functions. +type: sub-skill +library: db +library_version: '0.5.30' +sources: + - 'TanStack/db:docs/guides/collection-options-creator.md' + - 'TanStack/db:packages/db/src/collection/sync.ts' +--- + +This skill builds on db-core and db-core/collection-setup. Read those first. + +# Custom Adapter Authoring + +## Setup + +```ts +import { createCollection } from '@tanstack/db' +import type { SyncConfig, CollectionConfig } from '@tanstack/db' + +interface MyItem { + id: string + name: string +} + +function myBackendCollectionOptions(config: { + endpoint: string + getKey: (item: T) => string +}): CollectionConfig { + return { + getKey: config.getKey, + sync: { + sync: ({ begin, write, commit, markReady, collection }) => { + let isInitialSyncComplete = false + const bufferedEvents: Array = [] + + // 1. Subscribe to real-time events FIRST + const unsubscribe = myWebSocket.subscribe(config.endpoint, (event) => { + if (!isInitialSyncComplete) { + bufferedEvents.push(event) + return + } + begin() + write({ type: event.type, key: event.id, value: event.data }) + commit() + }) + + // 2. Fetch initial data + fetch(config.endpoint).then(async (res) => { + const items = await res.json() + begin() + for (const item of items) { + write({ type: 'insert', value: item }) + } + commit() + + // 3. Process buffered events + isInitialSyncComplete = true + for (const event of bufferedEvents) { + begin() + write({ type: event.type, key: event.id, value: event.data }) + commit() + } + + // 4. Signal readiness + markReady() + }) + + // 5. Return cleanup function + return () => { + unsubscribe() + } + }, + rowUpdateMode: 'partial', + }, + onInsert: async ({ transaction }) => { + await fetch(config.endpoint, { + method: 'POST', + body: JSON.stringify(transaction.mutations[0].modified), + }) + }, + onUpdate: async ({ transaction }) => { + const mut = transaction.mutations[0] + await fetch(`${config.endpoint}/${mut.key}`, { + method: 'PATCH', + body: JSON.stringify(mut.changes), + }) + }, + onDelete: async ({ transaction }) => { + await fetch(`${config.endpoint}/${transaction.mutations[0].key}`, { + method: 'DELETE', + }) + }, + } +} +``` + +## Core Patterns + +### ChangeMessage format + +```ts +// Insert +write({ type: 'insert', value: item }) + +// Update (partial — only changed fields) +write({ type: 'update', key: itemId, value: partialItem }) + +// Update (full row replacement) +write({ type: 'update', key: itemId, value: fullItem }) +// Set rowUpdateMode: "full" in sync config + +// Delete +write({ type: 'delete', key: itemId, value: item }) +``` + +### On-demand sync with loadSubset + +```ts +import { parseLoadSubsetOptions } from "@tanstack/db" + +sync: { + sync: ({ begin, write, commit, markReady }) => { + // Initial sync... + markReady() + return () => {} + }, + loadSubset: async (options) => { + const { filters, sorts, limit, offset } = parseLoadSubsetOptions(options) + // filters: [{ field: ['category'], operator: 'eq', value: 'electronics' }] + // sorts: [{ field: ['price'], direction: 'asc', nulls: 'last' }] + const params = new URLSearchParams() + for (const f of filters) { + params.set(f.field.join("."), `${f.operator}:${f.value}`) + } + const res = await fetch(`/api/items?${params}`) + return res.json() + }, +} +``` + +### Managing optimistic state duration + +Mutation handlers must not resolve until server changes have synced back to the collection. Five strategies: + +1. **Refetch** (simplest): `await collection.utils.refetch()` +2. **Transaction ID**: return `{ txid }` and track via sync stream +3. **ID-based tracking**: await specific record ID appearing in sync stream +4. **Version/timestamp**: wait until sync stream catches up to mutation time +5. **Provider method**: `await backend.waitForPendingWrites()` + +### Expression parsing for predicate push-down + +```ts +import { + parseWhereExpression, + parseOrderByExpression, + extractSimpleComparisons, +} from '@tanstack/db' + +// In loadSubset or queryFn: +const comparisons = extractSimpleComparisons(options.where) +// Returns: [{ field: ['name'], operator: 'eq', value: 'John' }] + +const orderBy = parseOrderByExpression(options.orderBy) +// Returns: [{ field: ['created_at'], direction: 'desc', nulls: 'last' }] +``` + +## Common Mistakes + +### CRITICAL Not calling markReady() in sync implementation + +Wrong: + +```ts +sync: ({ begin, write, commit }) => { + fetchData().then((items) => { + begin() + items.forEach((item) => write({ type: 'insert', value: item })) + commit() + // forgot markReady()! + }) +} +``` + +Correct: + +```ts +sync: ({ begin, write, commit, markReady }) => { + fetchData().then((items) => { + begin() + items.forEach((item) => write({ type: 'insert', value: item })) + commit() + markReady() + }) +} +``` + +`markReady()` transitions the collection to "ready" status. Without it, live queries never resolve and `useLiveSuspenseQuery` hangs forever in Suspense. + +Source: docs/guides/collection-options-creator.md + +### HIGH Race condition: subscribing after initial fetch + +Wrong: + +```ts +sync: ({ begin, write, commit, markReady }) => { + fetchAll().then((data) => { + writeAll(data) + subscribe(onChange) // changes during fetch are LOST + markReady() + }) +} +``` + +Correct: + +```ts +sync: ({ begin, write, commit, markReady }) => { + const buffer = [] + subscribe((event) => { + if (!ready) { + buffer.push(event) + return + } + begin() + write(event) + commit() + }) + fetchAll().then((data) => { + writeAll(data) + ready = true + buffer.forEach((e) => { + begin() + write(e) + commit() + }) + markReady() + }) +} +``` + +Subscribe to real-time events before fetching initial data. Buffer events during the fetch, then replay them after the initial sync completes. + +Source: docs/guides/collection-options-creator.md + +### HIGH write() called without begin() + +Wrong: + +```ts +onMessage((event) => { + write({ type: event.type, key: event.id, value: event.data }) + commit() +}) +``` + +Correct: + +```ts +onMessage((event) => { + begin() + write({ type: event.type, key: event.id, value: event.data }) + commit() +}) +``` + +Sync data must be written within a transaction (`begin` → `write` → `commit`). Calling `write()` without `begin()` throws `NoPendingSyncTransactionWriteError`. + +Source: packages/db/src/collection/sync.ts:110 + +## Tension: Simplicity vs. Correctness in Sync + +Getting-started simplicity (localOnly, eager mode) conflicts with production correctness (on-demand sync, race condition prevention, proper markReady handling). Agents optimizing for quick setup tend to skip buffering, markReady, and cleanup functions. + +See also: db-core/collection-setup/SKILL.md -- for built-in adapter patterns to model after. diff --git a/packages/db/skills/db-core/live-queries/SKILL.md b/packages/db/skills/db-core/live-queries/SKILL.md new file mode 100644 index 000000000..842788fef --- /dev/null +++ b/packages/db/skills/db-core/live-queries/SKILL.md @@ -0,0 +1,332 @@ +--- +name: db-core/live-queries +description: > + Query builder fluent API: from, where, join, leftJoin, rightJoin, innerJoin, + fullJoin, select, fn.select, groupBy, having, orderBy, limit, offset, distinct, + findOne. Operators: eq, gt, gte, lt, lte, like, ilike, inArray, isNull, + isUndefined, and, or, not. Aggregates: count, sum, avg, min, max. String + functions: upper, lower, length, concat, coalesce. Math: add. $selected + namespace. createLiveQueryCollection. Derived collections. Predicate push-down. + Incremental view maintenance via differential dataflow (d2ts). +type: sub-skill +library: db +library_version: '0.5.30' +sources: + - 'TanStack/db:docs/guides/live-queries.md' + - 'TanStack/db:packages/db/src/query/builder/index.ts' + - 'TanStack/db:packages/db/src/query/compiler/index.ts' +--- + +# Live Queries + +> This skill builds on db-core. + +TanStack DB live queries use a SQL-like fluent query builder to create **reactive derived collections** that automatically update when underlying data changes. The query engine compiles queries into incremental view maintenance (IVM) pipelines using differential dataflow (d2ts), so only deltas are recomputed. + +All operators, string functions, math functions, and aggregates are incrementally maintained. Prefer them over equivalent JS code. + +## Setup + +Minimal example using the core API (no framework hooks): + +```ts +import { + createCollection, + createLiveQueryCollection, + liveQueryCollectionOptions, + eq, +} from '@tanstack/db' + +// Assume usersCollection is already created via createCollection(...) + +// Option 1: createLiveQueryCollection shorthand +const activeUsers = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + .select(({ user }) => ({ + id: user.id, + name: user.name, + email: user.email, + })), +) + +// Option 2: full options via liveQueryCollectionOptions +const activeUsers2 = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + .select(({ user }) => ({ + id: user.id, + name: user.name, + })), + getKey: (user) => user.id, + }), +) + +// The result is a live collection -- iterate, subscribe, or use as source +for (const user of activeUsers) { + console.log(user.name) +} +``` + +## Core Patterns + +### 1. Filtering with where + operators + +Chain `.where()` calls (ANDed together) using expression operators. Use `and()`, `or()`, `not()` for complex logic. + +```ts +import { eq, gt, or, and, not, inArray, like } from '@tanstack/db' + +const results = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + .where(({ user }) => + and( + gt(user.age, 18), + or(eq(user.role, 'admin'), eq(user.role, 'moderator')), + not(inArray(user.id, bannedIds)), + ), + ), +) +``` + +Boolean column references work directly: + +```ts +.where(({ user }) => user.active) // bare boolean ref +.where(({ user }) => not(user.suspended)) // negated boolean ref +``` + +### 2. Joining two collections + +Join conditions **must** use `eq()` (equality only -- IVM constraint). Default join type is `left`. Convenience methods: `leftJoin`, `rightJoin`, `innerJoin`, `fullJoin`. + +```ts +import { eq } from '@tanstack/db' + +const userPosts = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .innerJoin({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId), + ) + .select(({ user, post }) => ({ + userName: user.name, + postTitle: post.title, + })), +) +``` + +Multiple joins: + +```ts +q.from({ user: usersCollection }) + .join({ post: postsCollection }, ({ user, post }) => eq(user.id, post.userId)) + .join({ comment: commentsCollection }, ({ post, comment }) => + eq(post.id, comment.postId), + ) +``` + +### 3. Aggregation with groupBy + having + +Use `groupBy` to group rows, then aggregate in `select`. Filter groups with `having`. The `$selected` namespace lets `having` and `orderBy` reference fields defined in `select`. + +```ts +import { count, sum, gt } from '@tanstack/db' + +const topCustomers = createLiveQueryCollection((q) => + q + .from({ order: ordersCollection }) + .groupBy(({ order }) => order.customerId) + .select(({ order }) => ({ + customerId: order.customerId, + totalSpent: sum(order.amount), + orderCount: count(order.id), + })) + .having(({ $selected }) => gt($selected.totalSpent, 1000)) + .orderBy(({ $selected }) => $selected.totalSpent, 'desc') + .limit(10), +) +``` + +Without `groupBy`, aggregates in `select` treat the entire collection as one group: + +```ts +const stats = createLiveQueryCollection((q) => + q.from({ user: usersCollection }).select(({ user }) => ({ + totalUsers: count(user.id), + avgAge: avg(user.age), + })), +) +``` + +### 4. Standalone derived collection with createLiveQueryCollection + +Derived collections are themselves collections. Use one as a source for another query to cache intermediate results: + +```ts +// Base derived collection +const activeUsers = createLiveQueryCollection((q) => + q.from({ user: usersCollection }).where(({ user }) => eq(user.active, true)), +) + +// Second query uses the derived collection as its source +const activeUserPosts = createLiveQueryCollection((q) => + q + .from({ user: activeUsers }) + .join({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId), + ) + .select(({ user, post }) => ({ + userName: user.name, + postTitle: post.title, + })), +) +``` + +Create derived collections once at module scope and reuse them. Do not recreate on every render or navigation. + +## Common Mistakes + +### CRITICAL: Using === instead of eq() + +JavaScript `===` in a where callback returns a boolean primitive, not an expression object. Throws `InvalidWhereExpressionError`. + +```ts +// WRONG +q.from({ user: usersCollection }).where(({ user }) => user.active === true) + +// CORRECT +q.from({ user: usersCollection }).where(({ user }) => eq(user.active, true)) +``` + +### CRITICAL: Filtering in JS instead of query operators + +JS `.filter()` / `.map()` on the result array throws away incremental maintenance -- the JS code re-runs from scratch on every change. + +```ts +// WRONG -- re-runs filter on every change +const { data } = useLiveQuery((q) => q.from({ todos: todosCollection })) +const active = data.filter((t) => t.completed === false) + +// CORRECT -- incrementally maintained +const { data } = useLiveQuery((q) => + q + .from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)), +) +``` + +### HIGH: Not using the full operator set + +The library provides string functions (`upper`, `lower`, `length`, `concat`), math (`add`), utility (`coalesce`), and aggregates (`count`, `sum`, `avg`, `min`, `max`). All are incrementally maintained. Prefer them over JS equivalents. + +```ts +// WRONG +.fn.select((row) => ({ + name: row.user.name.toUpperCase(), + total: row.order.price + row.order.tax, +})) + +// CORRECT +.select(({ user, order }) => ({ + name: upper(user.name), + total: add(order.price, order.tax), +})) +``` + +### HIGH: .distinct() without .select() + +`distinct()` deduplicates by the selected columns. Without `select()`, throws `DistinctRequiresSelectError`. + +```ts +// WRONG +q.from({ user: usersCollection }).distinct() + +// CORRECT +q.from({ user: usersCollection }) + .select(({ user }) => ({ country: user.country })) + .distinct() +``` + +### HIGH: .having() without .groupBy() + +`having` filters aggregated groups. Without `groupBy`, there are no groups. Throws `HavingRequiresGroupByError`. + +```ts +// WRONG +q.from({ order: ordersCollection }).having(({ order }) => + gt(count(order.id), 5), +) + +// CORRECT +q.from({ order: ordersCollection }) + .groupBy(({ order }) => order.customerId) + .having(({ order }) => gt(count(order.id), 5)) +``` + +### HIGH: .limit() / .offset() without .orderBy() + +Without deterministic ordering, limit/offset results are non-deterministic and cannot be incrementally maintained. Throws `LimitOffsetRequireOrderByError`. + +```ts +// WRONG +q.from({ user: usersCollection }).limit(10) + +// CORRECT +q.from({ user: usersCollection }) + .orderBy(({ user }) => user.name) + .limit(10) +``` + +### HIGH: Join condition using non-eq() operator + +The differential dataflow join operator only supports equality joins. Using `gt()`, `like()`, etc. throws `JoinConditionMustBeEqualityError`. + +```ts +// WRONG +q.from({ user: usersCollection }).join( + { post: postsCollection }, + ({ user, post }) => gt(user.id, post.userId), +) + +// CORRECT +q.from({ user: usersCollection }).join( + { post: postsCollection }, + ({ user, post }) => eq(user.id, post.userId), +) +``` + +### MEDIUM: Passing source directly instead of {alias: collection} + +`from()` and `join()` require sources wrapped as `{alias: collection}`. Passing the collection directly throws `InvalidSourceTypeError`. + +```ts +// WRONG +q.from(usersCollection) + +// CORRECT +q.from({ users: usersCollection }) +``` + +## Tension: Query expressiveness vs. IVM constraints + +The query builder looks like SQL but has constraints that SQL does not: + +- **Equality joins only** -- `eq()` is the only allowed join condition operator. +- **orderBy required for limit/offset** -- non-deterministic pagination cannot be incrementally maintained. +- **distinct requires select** -- deduplication needs an explicit projection. +- **fn.select() cannot be used with groupBy()** -- the compiler must statically analyze select to discover aggregate functions. + +These constraints exist because the underlying d2ts differential dataflow engine requires them for correct incremental view maintenance. + +See also: react-db/SKILL.md for React hooks (`useLiveQuery`, `useLiveSuspenseQuery`, `useLiveInfiniteQuery`). + +## References + +- [Query Operators Reference](./references/operators.md) -- full signatures and examples for all operators, functions, and aggregates. diff --git a/packages/db/skills/db-core/live-queries/references/operators.md b/packages/db/skills/db-core/live-queries/references/operators.md new file mode 100644 index 000000000..be494de0d --- /dev/null +++ b/packages/db/skills/db-core/live-queries/references/operators.md @@ -0,0 +1,302 @@ +# Query Operators Reference + +All operators are imported from `@tanstack/db` (also re-exported by `@tanstack/react-db` and other framework packages). + +```ts +import { + // Comparison + eq, + gt, + gte, + lt, + lte, + like, + ilike, + inArray, + isNull, + isUndefined, + // Logical + and, + or, + not, + // Aggregate + count, + sum, + avg, + min, + max, + // String + upper, + lower, + length, + concat, + // Math + add, + // Utility + coalesce, +} from '@tanstack/db' +``` + +--- + +## Comparison Operators + +### eq(left, right) -> BasicExpression\ + +Equality comparison. Works with any type. + +```ts +eq(user.id, 1) +eq(user.name, 'Alice') +``` + +### not(eq(left, right)) — not-equal pattern + +There is no `ne` operator. Use `not(eq(...))` for not-equal: + +```ts +not(eq(user.role, 'banned')) +``` + +### gt, gte, lt, lte (left, right) -> BasicExpression\ + +Ordering comparisons. Work with numbers, strings, dates. + +```ts +gt(user.age, 18) // greater than +gte(user.salary, 50000) // greater than or equal +lt(user.age, 65) // less than +lte(user.rating, 5) // less than or equal +gt(user.createdAt, new Date('2024-01-01')) +``` + +### like(left, right) -> BasicExpression\ + +Case-sensitive string pattern matching. Use `%` as wildcard. + +```ts +like(user.name, 'John%') // starts with John +like(user.email, '%@corp.com') // ends with @corp.com +``` + +### ilike(left, right) -> BasicExpression\ + +Case-insensitive string pattern matching. + +```ts +ilike(user.email, '%@gmail.com') +``` + +### inArray(value, array) -> BasicExpression\ + +Check if value is contained in an array. + +```ts +inArray(user.id, [1, 2, 3]) +inArray(user.role, ['admin', 'moderator']) +``` + +### isNull(value) -> BasicExpression\ + +Check if value is explicitly `null`. + +```ts +isNull(user.bio) +``` + +### isUndefined(value) -> BasicExpression\ + +Check if value is `undefined` (absent). Especially useful after left joins where unmatched rows produce `undefined`. + +```ts +isUndefined(profile) // no matching profile in left join +``` + +--- + +## Logical Operators + +### and(...conditions) -> BasicExpression\ + +Combine two or more conditions with AND logic. + +```ts +and(eq(user.active, true), gt(user.age, 18)) +and(eq(user.active, true), gt(user.age, 18), eq(user.role, 'user')) +``` + +### or(...conditions) -> BasicExpression\ + +Combine two or more conditions with OR logic. + +```ts +or(eq(user.role, 'admin'), eq(user.role, 'moderator')) +``` + +### not(condition) -> BasicExpression\ + +Negate a condition. + +```ts +not(eq(user.active, false)) +not(inArray(user.id, bannedIds)) +``` + +--- + +## Aggregate Functions + +Used inside `.select()` with `.groupBy()`, or without `groupBy` to aggregate the entire collection as one group. + +### count(value) -> Aggregate\ + +Count non-null values in a group. + +```ts +count(user.id) +``` + +### sum(value), avg(value) -> Aggregate\ + +Sum or average of numeric values. + +```ts +sum(order.amount) +avg(user.salary) +``` + +### min(value), max(value) -> Aggregate\ + +Minimum/maximum value (numbers, strings, dates). + +```ts +min(order.amount) +max(user.createdAt) +``` + +--- + +## String Functions + +### upper(value), lower(value) -> BasicExpression\ + +Convert string case. + +```ts +upper(user.name) // 'ALICE' +lower(user.email) // 'alice@example.com' +``` + +### length(value) -> BasicExpression\ + +Get string or array length. + +```ts +length(user.name) // string length +length(user.tags) // array length +``` + +### concat(...values) -> BasicExpression\ + +Concatenate any number of values into a string. + +```ts +concat(user.firstName, ' ', user.lastName) +``` + +--- + +## Math Functions + +### add(left, right) -> BasicExpression\ + +Add two numeric values. + +```ts +add(order.price, order.tax) +add(user.salary, coalesce(user.bonus, 0)) +``` + +--- + +## Utility Functions + +### coalesce(...values) -> BasicExpression\ + +Return the first non-null, non-undefined value. + +```ts +coalesce(user.displayName, user.name, 'Unknown') +coalesce(user.bonus, 0) +``` + +--- + +## $selected Namespace + +When a query has a `.select()` clause, the `$selected` namespace becomes available in `.orderBy()` and `.having()` callbacks. It provides access to the computed/aggregated fields defined in `select`. + +```ts +q.from({ order: ordersCollection }) + .groupBy(({ order }) => order.customerId) + .select(({ order }) => ({ + customerId: order.customerId, + totalSpent: sum(order.amount), + orderCount: count(order.id), + })) + .having(({ $selected }) => gt($selected.totalSpent, 1000)) + .orderBy(({ $selected }) => $selected.totalSpent, 'desc') +``` + +`$selected` is only available when `.select()` (or `.fn.select()`) has been called on the query. + +--- + +## Functional Variants (fn.select, fn.where, fn.having) + +Escape hatches for logic that cannot be expressed with declarative operators. These execute arbitrary JS on each row but **cannot be optimized** by the query compiler (no predicate push-down, no index use). + +### fn.select(callback) + +```ts +q.from({ user: usersCollection }).fn.select((row) => ({ + id: row.user.id, + domain: row.user.email.split('@')[1], + tier: row.user.salary > 100000 ? 'senior' : 'junior', +})) +``` + +**Limitation**: `fn.select()` cannot be used with `groupBy()`. The compiler must statically analyze select to discover aggregate functions. + +### fn.where(callback) + +```ts +q.from({ user: usersCollection }).fn.where( + (row) => row.user.active && row.user.email.endsWith('@company.com'), +) +``` + +### fn.having(callback) + +Receives `$selected` when a `select()` clause exists. + +```ts +q.from({ order: ordersCollection }) + .groupBy(({ order }) => order.customerId) + .select(({ order }) => ({ + customerId: order.customerId, + totalSpent: sum(order.amount), + orderCount: count(order.id), + })) + .fn.having( + ({ $selected }) => $selected.totalSpent > 1000 && $selected.orderCount >= 3, + ) +``` + +### When to use functional variants + +- String manipulation not covered by `upper`/`lower`/`concat`/`like` (e.g., `split`, `slice`, regex) +- Complex conditional logic (ternaries, multi-branch) +- External function calls or lookups + +Prefer declarative operators whenever possible for incremental maintenance. diff --git a/packages/db/skills/db-core/mutations-optimistic/SKILL.md b/packages/db/skills/db-core/mutations-optimistic/SKILL.md new file mode 100644 index 000000000..e63eb3aec --- /dev/null +++ b/packages/db/skills/db-core/mutations-optimistic/SKILL.md @@ -0,0 +1,375 @@ +--- +name: db-core/mutations-optimistic +description: > + collection.insert, collection.update (Immer-style draft proxy), + collection.delete. createOptimisticAction (onMutate + mutationFn). + createPacedMutations with debounceStrategy, throttleStrategy, queueStrategy. + createTransaction, getActiveTransaction, ambient transaction context. + Transaction lifecycle (pending/persisting/completed/failed). Mutation merging. + onInsert/onUpdate/onDelete handlers. PendingMutation type. Transaction.isPersisted. +type: sub-skill +library: db +library_version: '0.5.30' +sources: + - 'TanStack/db:docs/guides/mutations.md' + - 'TanStack/db:packages/db/src/transactions.ts' + - 'TanStack/db:packages/db/src/optimistic-action.ts' + - 'TanStack/db:packages/db/src/paced-mutations.ts' +--- + +# Mutations & Optimistic State + +> **Depends on:** `db-core/collection-setup` -- you need a configured collection +> (with `getKey`, sync adapter, and optionally `onInsert`/`onUpdate`/`onDelete` +> handlers) before you can mutate. + +TanStack DB mutations follow a unidirectional loop: +**optimistic mutation -> handler persists to backend -> sync back -> confirmed state**. +Optimistic state is applied in the current tick and dropped when the handler resolves. + +--- + +## Setup -- Collection Write Operations + +### insert + +```ts +// Single item +todoCollection.insert({ + id: crypto.randomUUID(), + text: 'Buy groceries', + completed: false, +}) + +// Multiple items +todoCollection.insert([ + { id: crypto.randomUUID(), text: 'Buy groceries', completed: false }, + { id: crypto.randomUUID(), text: 'Walk dog', completed: false }, +]) + +// With metadata / non-optimistic +todoCollection.insert(item, { metadata: { source: 'import' } }) +todoCollection.insert(item, { optimistic: false }) +``` + +### update (Immer-style draft proxy) + +```ts +// Single item -- mutate the draft, do NOT reassign it +todoCollection.update(todo.id, (draft) => { + draft.completed = true + draft.completedAt = new Date() +}) + +// Multiple items +todoCollection.update([id1, id2], (drafts) => { + drafts.forEach((d) => { + d.completed = true + }) +}) + +// With metadata +todoCollection.update( + todo.id, + { metadata: { reason: 'user-edit' } }, + (draft) => { + draft.text = 'Updated' + }, +) +``` + +### delete + +```ts +todoCollection.delete(todo.id) +todoCollection.delete([id1, id2]) +todoCollection.delete(todo.id, { metadata: { reason: 'completed' } }) +``` + +All three return a `Transaction` object. Use `tx.isPersisted.promise` to await +persistence or catch rollback errors. + +--- + +## Core Patterns + +### 1. createOptimisticAction -- intent-based mutations + +Use when the optimistic change is a _guess_ at how the server will transform +the data, or when you need to mutate multiple collections atomically. + +```ts +import { createOptimisticAction } from '@tanstack/db' + +const likePost = createOptimisticAction({ + // MUST be synchronous -- applied in the current tick + onMutate: (postId) => { + postCollection.update(postId, (draft) => { + draft.likeCount += 1 + draft.likedByMe = true + }) + }, + mutationFn: async (postId, { transaction }) => { + await api.posts.like(postId) + // IMPORTANT: wait for server state to sync back before returning + await postCollection.utils.refetch() + }, +}) + +// Returns a Transaction +const tx = likePost(postId) +await tx.isPersisted.promise +``` + +Multi-collection example: + +```ts +const createProject = createOptimisticAction<{ name: string; ownerId: string }>( + { + onMutate: ({ name, ownerId }) => { + projectCollection.insert({ id: crypto.randomUUID(), name, ownerId }) + userCollection.update(ownerId, (d) => { + d.projectCount += 1 + }) + }, + mutationFn: async ({ name, ownerId }) => { + await api.projects.create({ name, ownerId }) + await Promise.all([ + projectCollection.utils.refetch(), + userCollection.utils.refetch(), + ]) + }, + }, +) +``` + +### 2. createPacedMutations -- auto-save with debounce / throttle / queue + +```ts +import { createPacedMutations, debounceStrategy } from '@tanstack/db' + +const autoSaveNote = createPacedMutations({ + onMutate: (text) => { + noteCollection.update(noteId, (draft) => { + draft.body = text + }) + }, + mutationFn: async ({ transaction }) => { + const mutation = transaction.mutations[0] + await api.notes.update(mutation.key, mutation.changes) + await noteCollection.utils.refetch() + }, + strategy: debounceStrategy({ wait: 500 }), +}) + +// Each call resets the debounce timer; mutations merge into one transaction +autoSaveNote('Hello') +autoSaveNote('Hello, world') // only this version persists +``` + +Other strategies: + +```ts +import { throttleStrategy, queueStrategy } from '@tanstack/db' + +// Evenly spaced (sliders, scroll) +throttleStrategy({ wait: 200, leading: true, trailing: true }) + +// Sequential FIFO -- every mutation persisted in order +queueStrategy({ wait: 0, maxSize: 100 }) +``` + +### 3. createTransaction -- manual batching + +```ts +import { createTransaction } from '@tanstack/db' + +const tx = createTransaction({ + autoCommit: false, // wait for explicit commit() + mutationFn: async ({ transaction }) => { + await api.batchUpdate(transaction.mutations) + }, +}) + +tx.mutate(() => { + todoCollection.update(id1, (d) => { + d.status = 'reviewed' + }) + todoCollection.update(id2, (d) => { + d.status = 'reviewed' + }) +}) + +// User reviews... then commits or rolls back +await tx.commit() +// OR: tx.rollback() +``` + +Inside `tx.mutate(() => { ... })`, the transaction is pushed onto an ambient +stack. Any `collection.insert/update/delete` call joins the ambient transaction +automatically via `getActiveTransaction()`. + +### 4. Mutation handler with refetch (QueryCollection pattern) + +```ts +const todoCollection = createCollection( + queryCollectionOptions({ + queryKey: ['todos'], + queryFn: () => api.todos.getAll(), + getKey: (t) => t.id, + onInsert: async ({ transaction }) => { + await Promise.all( + transaction.mutations.map((m) => api.todos.create(m.modified)), + ) + // IMPORTANT: handler must not resolve until server state is synced back + // QueryCollection auto-refetches after handler completes + }, + onUpdate: async ({ transaction }) => { + await Promise.all( + transaction.mutations.map((m) => + api.todos.update(m.original.id, m.changes), + ), + ) + }, + onDelete: async ({ transaction }) => { + await Promise.all( + transaction.mutations.map((m) => api.todos.delete(m.original.id)), + ) + }, + }), +) +``` + +For ElectricCollection, return `{ txid }` instead of refetching: + +```ts +onUpdate: async ({ transaction }) => { + const txids = await Promise.all( + transaction.mutations.map(async (m) => { + const res = await api.todos.update(m.original.id, m.changes) + return res.txid + }), + ) + return { txid: txids } +} +``` + +--- + +## Common Mistakes + +### CRITICAL: Passing an object to update() instead of a draft callback + +```ts +// WRONG -- silently fails or throws +collection.update(id, { ...item, title: 'new' }) + +// CORRECT -- mutate the draft proxy +collection.update(id, (draft) => { + draft.title = 'new' +}) +``` + +### CRITICAL: Hallucinating mutation API signatures + +The most common AI-generated errors: + +- Inventing handler signatures (e.g. `onMutate` on a collection config) +- Confusing `createOptimisticAction` with `createTransaction` +- Wrong PendingMutation property names (`mutation.data` does not exist -- + use `mutation.modified`, `mutation.changes`, `mutation.original`) +- Missing the ambient transaction pattern + +Always reference the exact types in `references/transaction-api.md`. + +### CRITICAL: onMutate returning a Promise + +`onMutate` in `createOptimisticAction` **must be synchronous**. Optimistic state +is applied in the current tick. Returning a Promise throws +`OnMutateMustBeSynchronousError`. + +```ts +// WRONG +createOptimisticAction({ + onMutate: async (text) => { + collection.insert({ id: await generateId(), text }) + }, + ... +}) + +// CORRECT +createOptimisticAction({ + onMutate: (text) => { + collection.insert({ id: crypto.randomUUID(), text }) + }, + ... +}) +``` + +### CRITICAL: Mutations without handler or ambient transaction + +Collection mutations require either: + +1. An `onInsert`/`onUpdate`/`onDelete` handler on the collection, OR +2. An ambient transaction from `createTransaction`/`createOptimisticAction` + +Without either, throws `MissingInsertHandlerError` (or the Update/Delete variant). + +### HIGH: Calling .mutate() after transaction is no longer pending + +Transactions only accept new mutations while in `pending` state. Calling +`mutate()` after `commit()` or `rollback()` throws +`TransactionNotPendingMutateError`. Create a new transaction instead. + +### HIGH: Changing primary key via update + +The update proxy detects key changes and throws `KeyUpdateNotAllowedError`. +Primary keys are immutable once set. If you need a different key, delete and +re-insert. + +### HIGH: Inserting item with duplicate key + +If an item with the same key already exists (synced or optimistic), throws +`DuplicateKeyError`. Always generate a unique key (e.g. `crypto.randomUUID()`) +or check before inserting. + +### HIGH: Not awaiting refetch after mutation in query collection handler + +The optimistic state is held only until the handler resolves. If the handler +returns before server state has synced back, optimistic state is dropped and +users see a flash of missing data. + +```ts +// WRONG -- optimistic state dropped before new server state arrives +onInsert: async ({ transaction }) => { + await api.createTodo(transaction.mutations[0].modified) + // missing: await collection.utils.refetch() +} + +// CORRECT +onInsert: async ({ transaction }) => { + await api.createTodo(transaction.mutations[0].modified) + await collection.utils.refetch() +} +``` + +--- + +## Tension: Optimistic Speed vs. Data Consistency + +Instant optimistic updates create a window where client state diverges from +server state. If the handler fails, the rollback removes the optimistic state -- +which can discard user work the user thought was saved. Consider: + +- Showing pending/saving indicators so users know state is unconfirmed +- Using `{ optimistic: false }` for destructive operations +- Designing idempotent server endpoints so retries are safe +- Handling `tx.isPersisted.promise` rejection to surface errors to the user + +--- + +## References + +- [Transaction API Reference](references/transaction-api.md) -- createTransaction config, + Transaction object, PendingMutation type, mutation merging rules, strategy types +- [TanStack DB Mutations Guide](https://tanstack.com/db/latest/docs/guides/mutations) diff --git a/packages/db/skills/db-core/mutations-optimistic/references/transaction-api.md b/packages/db/skills/db-core/mutations-optimistic/references/transaction-api.md new file mode 100644 index 000000000..5c8e91848 --- /dev/null +++ b/packages/db/skills/db-core/mutations-optimistic/references/transaction-api.md @@ -0,0 +1,207 @@ +# Transaction API Reference + +## createTransaction + +```ts +import { createTransaction } from "@tanstack/db" + +const tx = createTransaction({ + id?: string, // defaults to crypto.randomUUID() + autoCommit?: boolean, // default true -- commit after mutate() + mutationFn: MutationFn, // (params: { transaction }) => Promise + metadata?: Record, // custom data attached to the transaction +}) +``` + +## Transaction Object + +```ts +interface Transaction { + id: string + state: 'pending' | 'persisting' | 'completed' | 'failed' + mutations: Array> + autoCommit: boolean + createdAt: Date + sequenceNumber: number + metadata: Record + error?: { message: string; error: Error } + + // Deferred promise -- resolves when mutationFn completes, rejects on failure + isPersisted: { + promise: Promise> + resolve: (value: Transaction) => void + reject: (reason?: any) => void + } + + // Execute collection operations inside the ambient transaction context + mutate(callback: () => void): Transaction + + // Commit -- calls mutationFn, transitions to persisting -> completed|failed + commit(): Promise> + + // Rollback -- transitions to failed, also rolls back conflicting transactions + rollback(config?: { isSecondaryRollback?: boolean }): Transaction +} +``` + +**Lifecycle:** `pending` -> `persisting` -> `completed` | `failed` + +- `mutate()` only allowed in `pending` state (throws `TransactionNotPendingMutateError`) +- `commit()` only allowed in `pending` state (throws `TransactionNotPendingCommitError`) +- `rollback()` allowed in `pending` or `persisting` (throws `TransactionAlreadyCompletedRollbackError` if completed) +- Failed `mutationFn` automatically triggers `rollback()` +- Rollback cascades to other pending transactions sharing the same item keys + +## PendingMutation Type + +```ts +interface PendingMutation { + mutationId: string // unique id for this mutation + original: TOperation extends 'insert' ? {} : T // state before mutation + modified: T // state after mutation + changes: Partial // only the changed fields + key: any // collection-local key + globalKey: string // globally unique key (collectionId + key) + type: TOperation // "insert" | "update" | "delete" + metadata: unknown // user-provided metadata + syncMetadata: Record // adapter-specific metadata + optimistic: boolean // whether applied optimistically (default true) + createdAt: Date + updatedAt: Date + collection: Collection // reference to the source collection +} +``` + +## Mutation Merging Rules + +When multiple mutations target the same item (same `globalKey`) within a +transaction, they merge: + +| Existing | Incoming | Result | Notes | +| -------- | -------- | --------- | ---------------------------------- | +| insert | update | insert | Merge changes, keep empty original | +| insert | delete | _removed_ | Both mutations cancel out | +| update | update | update | Union changes, keep first original | +| update | delete | delete | Delete dominates | +| delete | delete | delete | Replace with latest | +| insert | insert | insert | Replace with latest | + +`(delete, update)` and `(delete, insert)` cannot occur -- the collection +prevents operations on deleted items within the same transaction. + +## getActiveTransaction / Ambient Transaction Context + +```ts +import { getActiveTransaction } from '@tanstack/db' + +const tx = getActiveTransaction() // Transaction | undefined +``` + +Inside `tx.mutate(() => { ... })`, the transaction is pushed onto an internal +stack. Any `collection.insert/update/delete` call automatically joins the +topmost ambient transaction. This is how `createOptimisticAction` and +`createPacedMutations` wire collection operations into their transactions. + +## createOptimisticAction + +```ts +import { createOptimisticAction } from "@tanstack/db" + +const action = createOptimisticAction({ + // Synchronous -- apply optimistic state immediately (MUST NOT return a Promise) + onMutate: (variables: TVariables) => void, + + // Async -- persist to backend, wait for sync back + mutationFn: (variables: TVariables, params: { transaction }) => Promise, + + // Optional: same as createTransaction config + id?: string, + autoCommit?: boolean, // always true (commit happens after mutate) + metadata?: Record, +}) + +// Returns a function: (variables: TVariables) => Transaction +const tx = action(variables) +await tx.isPersisted.promise +``` + +## createPacedMutations + +```ts +import { createPacedMutations } from "@tanstack/db" + +const mutate = createPacedMutations({ + onMutate: (variables: TVariables) => void, // synchronous optimistic update + mutationFn: MutationFn, // persists merged transaction + strategy: Strategy, // timing control + metadata?: Record, +}) + +// Returns a function: (variables: TVariables) => Transaction +const tx = mutate(variables) +``` + +Rapid calls merge into the active transaction (via `applyMutations`) until the +strategy fires the commit. A new transaction is created for subsequent calls. + +## Strategy Types + +### debounceStrategy + +```ts +import { debounceStrategy } from "@tanstack/db" + +debounceStrategy({ + wait: number, // ms to wait after last call before committing + leading?: boolean, // execute on the leading edge (default false) + trailing?: boolean, // execute on the trailing edge (default true) +}) +``` + +### throttleStrategy + +```ts +import { throttleStrategy } from "@tanstack/db" + +throttleStrategy({ + wait: number, // minimum ms between commits + leading?: boolean, // execute on the leading edge + trailing?: boolean, // execute on the trailing edge +}) +``` + +### queueStrategy + +```ts +import { queueStrategy } from "@tanstack/db" + +queueStrategy({ + wait?: number, // ms between processing items (default 0) + maxSize?: number, // drop items if queue exceeds this + addItemsTo?: "front" | "back", // default "back" (FIFO) + getItemsFrom?: "front" | "back", // default "front" (FIFO) +}) +``` + +Queue creates a **separate transaction per call** (unlike debounce/throttle +which merge). Each transaction commits and awaits `isPersisted` before the next +starts. Failed transactions do not block subsequent ones. + +## Transaction.isPersisted.promise + +```ts +const tx = collection.insert({ id: '1', text: 'Hello' }) + +try { + await tx.isPersisted.promise // resolves with the Transaction on success + console.log(tx.state) // "completed" +} catch (error) { + console.log(tx.state) // "failed" + // optimistic state has been rolled back +} +``` + +The promise is a `Deferred` -- it is created at transaction construction time +and settled when `commit()` completes or `rollback()` is called. For +`autoCommit: true` transactions, the promise settles shortly after `mutate()` +returns (the commit runs asynchronously). diff --git a/packages/db/skills/meta-framework/SKILL.md b/packages/db/skills/meta-framework/SKILL.md new file mode 100644 index 000000000..7032c7343 --- /dev/null +++ b/packages/db/skills/meta-framework/SKILL.md @@ -0,0 +1,361 @@ +--- +name: meta-framework +description: > + Integrating TanStack DB with meta-frameworks (TanStack Start, Next.js, + Remix, Nuxt, SvelteKit). Client-side only: SSR is NOT supported — routes + must disable SSR. Preloading collections in route loaders with + collection.preload(). Pattern: ssr: false + await collection.preload() in + loader. Multiple collection preloading with Promise.all. Framework-specific + loader APIs. +type: composition +library: db +library_version: '0.5.30' +requires: + - db-core + - db-core/collection-setup +sources: + - 'TanStack/db:examples/react/todo/src/routes/electric.tsx' + - 'TanStack/db:examples/react/todo/src/routes/query.tsx' + - 'TanStack/db:examples/react/todo/src/start.tsx' +--- + +This skill builds on db-core. Read it first for collection setup and query builder. + +# TanStack DB — Meta-Framework Integration + +## Setup + +TanStack DB collections are **client-side only**. SSR is not implemented. Routes using TanStack DB **must disable SSR**. The setup pattern is: + +1. Set `ssr: false` on the route +2. Call `collection.preload()` in the route loader +3. Use `useLiveQuery` in the component + +## TanStack Start + +### Global SSR disable + +```ts +// start.tsx +import { createStart } from '@tanstack/react-start' + +export const startInstance = createStart(() => { + return { + defaultSsr: false, + } +}) +``` + +### Per-route SSR disable + preload + +```tsx +import { createFileRoute } from '@tanstack/react-router' +import { useLiveQuery } from '@tanstack/react-db' + +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { + await todoCollection.preload() + return null + }, + component: TodoPage, +}) + +function TodoPage() { + const { data: todos } = useLiveQuery((q) => q.from({ todo: todoCollection })) + return ( +
      + {todos.map((t) => ( +
    • {t.text}
    • + ))} +
    + ) +} +``` + +### Multiple collection preloading + +```tsx +export const Route = createFileRoute('/electric')({ + ssr: false, + loader: async () => { + await Promise.all([todoCollection.preload(), configCollection.preload()]) + return null + }, + component: ElectricPage, +}) +``` + +## Next.js (App Router) + +### Client component with preloading + +```tsx +// app/todos/page.tsx +'use client' + +import { useEffect, useState } from 'react' +import { useLiveQuery } from '@tanstack/react-db' + +export default function TodoPage() { + const { data: todos, isLoading } = useLiveQuery((q) => + q.from({ todo: todoCollection }), + ) + + if (isLoading) return
    Loading...
    + return ( +
      + {todos.map((t) => ( +
    • {t.text}
    • + ))} +
    + ) +} +``` + +Next.js App Router components using TanStack DB must be client components (`'use client'`). There is no server-side preloading — collections sync on mount. + +### With route-level preloading (experimental) + +```tsx +// app/todos/page.tsx +'use client' + +import { useEffect } from 'react' +import { useLiveQuery } from '@tanstack/react-db' + +// Trigger preload immediately when module is loaded +const preloadPromise = todoCollection.preload() + +export default function TodoPage() { + const { data: todos } = useLiveQuery((q) => q.from({ todo: todoCollection })) + return ( +
      + {todos.map((t) => ( +
    • {t.text}
    • + ))} +
    + ) +} +``` + +## Remix + +### Client loader pattern + +```tsx +// app/routes/todos.tsx +import { useLiveQuery } from '@tanstack/react-db' +import type { ClientLoaderFunctionArgs } from '@remix-run/react' + +export const clientLoader = async ({ request }: ClientLoaderFunctionArgs) => { + await todoCollection.preload() + return null +} + +// Prevent server loader from running +export const loader = () => null + +export default function TodoPage() { + const { data: todos } = useLiveQuery((q) => q.from({ todo: todoCollection })) + return ( +
      + {todos.map((t) => ( +
    • {t.text}
    • + ))} +
    + ) +} +``` + +## Nuxt + +### Client-only component + +```vue + + + + +``` + +Wrap TanStack DB components in `` to prevent SSR. + +## SvelteKit + +### Client-side only page + +```svelte + + + +{#if todosQuery} + {#each todosQuery.data as todo (todo.id)} +
  • {todo.text}
  • + {/each} +{/if} +``` + +Or disable SSR for the route: + +```ts +// src/routes/todos/+page.ts +export const ssr = false +``` + +## Core Patterns + +### What preload() does + +`collection.preload()` starts the sync process and returns a promise that resolves when the collection reaches "ready" status. This means: + +1. The sync function connects to the backend +2. Initial data is fetched and written to the collection +3. `markReady()` is called by the adapter +4. The promise resolves + +Subsequent calls to `preload()` on an already-ready collection return immediately. + +### Collection module pattern + +Define collections in a shared module, import in both loaders and components: + +```ts +// lib/collections.ts +import { createCollection, queryCollectionOptions } from '@tanstack/react-db' + +export const todoCollection = createCollection( + queryCollectionOptions({ ... }) +) +``` + +```tsx +// routes/todos.tsx — loader uses the same collection instance +import { todoCollection } from '../lib/collections' + +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { + await todoCollection.preload() + return null + }, + component: () => { + const { data } = useLiveQuery((q) => q.from({ todo: todoCollection })) + // ... + }, +}) +``` + +## Server-Side Integration + +This skill covers the **client-side** read path only (preloading, live queries). For server-side concerns: + +- **Electric proxy route** (forwarding shape requests to Electric) — see the [Electric adapter reference](../db-core/collection-setup/references/electric-adapter.md) +- **Mutation endpoints** (`createServerFn` in TanStack Start, API routes in Next.js/Remix) — implement using your framework's server function pattern. See the Electric adapter reference for the txid handshake that mutations must return. + +## Common Mistakes + +### CRITICAL Enabling SSR with TanStack DB + +Wrong: + +```tsx +export const Route = createFileRoute('/todos')({ + loader: async () => { + await todoCollection.preload() + return null + }, +}) +``` + +Correct: + +```tsx +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { + await todoCollection.preload() + return null + }, +}) +``` + +TanStack DB collections are client-side only. Without `ssr: false`, the route loader runs on the server where collections cannot sync, causing hangs or errors. + +Source: examples/react/todo/src/start.tsx + +### HIGH Forgetting to preload in route loader + +Wrong: + +```tsx +export const Route = createFileRoute('/todos')({ + ssr: false, + component: TodoPage, +}) +``` + +Correct: + +```tsx +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { + await todoCollection.preload() + return null + }, + component: TodoPage, +}) +``` + +Without preloading, the collection starts syncing only when the component mounts, causing a loading flash. Preloading in the route loader starts sync during navigation, making data available immediately when the component renders. + +### MEDIUM Creating separate collection instances + +Wrong: + +```tsx +// routes/todos.tsx +const todoCollection = createCollection(queryCollectionOptions({ ... })) + +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { await todoCollection.preload() }, + component: () => { + const { data } = useLiveQuery((q) => q.from({ todo: todoCollection })) + }, +}) +``` + +Correct: + +```ts +// lib/collections.ts — single shared instance +export const todoCollection = createCollection(queryCollectionOptions({ ... })) +``` + +Collections are singletons. Creating multiple instances for the same data causes duplicate syncs, wasted bandwidth, and inconsistent state between components. + +See also: react-db/SKILL.md, vue-db/SKILL.md, svelte-db/SKILL.md, solid-db/SKILL.md, angular-db/SKILL.md — for framework-specific hook usage. + +See also: db-core/collection-setup/SKILL.md — for collection creation and adapter selection. diff --git a/packages/db/src/collection/subscription.ts b/packages/db/src/collection/subscription.ts index 40060ca05..ae13e1295 100644 --- a/packages/db/src/collection/subscription.ts +++ b/packages/db/src/collection/subscription.ts @@ -571,20 +571,20 @@ export class CollectionSubscription if (whereFromCursor) { const { expression } = orderBy[0]! - const minValue = minValues[0] + const cursorMinValue = minValues[0] // Build the whereCurrent expression for the first orderBy column // For Date values, we need to handle precision differences between JS (ms) and backends (μs) // A JS Date represents a 1ms range, so we query for all values within that range let whereCurrentCursor: BasicExpression - if (minValue instanceof Date) { - const minValuePlus1ms = new Date(minValue.getTime() + 1) + if (cursorMinValue instanceof Date) { + const cursorMinValuePlus1ms = new Date(cursorMinValue.getTime() + 1) whereCurrentCursor = and( - gte(expression, new Value(minValue)), - lt(expression, new Value(minValuePlus1ms)), + gte(expression, new Value(cursorMinValue)), + lt(expression, new Value(cursorMinValuePlus1ms)), ) } else { - whereCurrentCursor = eq(expression, new Value(minValue)) + whereCurrentCursor = eq(expression, new Value(cursorMinValue)) } cursorExpressions = { diff --git a/packages/db/src/errors.ts b/packages/db/src/errors.ts index dc1c7b900..f62278c56 100644 --- a/packages/db/src/errors.ts +++ b/packages/db/src/errors.ts @@ -433,6 +433,17 @@ export class DistinctRequiresSelectError extends QueryCompilationError { } } +export class FnSelectWithGroupByError extends QueryCompilationError { + constructor() { + super( + `fn.select() cannot be used with groupBy(). ` + + `groupBy requires the compiler to statically analyze aggregate functions (count, sum, max, etc.) in the SELECT clause, ` + + `which is not possible with fn.select() since it is an opaque function. ` + + `Use .select() instead of .fn.select() when combining with groupBy().`, + ) + } +} + export class HavingRequiresGroupByError extends QueryCompilationError { constructor() { super(`HAVING clause requires GROUP BY clause`) diff --git a/packages/db/src/query/builder/types.ts b/packages/db/src/query/builder/types.ts index 11360dd82..6dce531f8 100644 --- a/packages/db/src/query/builder/types.ts +++ b/packages/db/src/query/builder/types.ts @@ -227,18 +227,28 @@ export type ResultTypeFromSelect = WithoutRefBrand< Prettify<{ [K in keyof TSelectObject]: NeedsExtraction extends true ? ExtractExpressionType - : TSelectObject[K] extends Ref + : // Ref (full object ref or spread with RefBrand) - recursively process properties + TSelectObject[K] extends Ref ? ExtractRef - : TSelectObject[K] extends RefLeaf - ? T - : TSelectObject[K] extends RefLeaf | undefined + : // RefLeaf (simple property ref like user.name) + TSelectObject[K] extends RefLeaf + ? IsNullableRef extends true ? T | undefined - : TSelectObject[K] extends RefLeaf | null - ? T | null - : TSelectObject[K] extends Ref | undefined - ? ExtractRef | undefined - : TSelectObject[K] extends Ref | null - ? ExtractRef | null + : T + : // RefLeaf | undefined (schema-optional field) + TSelectObject[K] extends RefLeaf | undefined + ? T | undefined + : // RefLeaf | null (schema-nullable field) + TSelectObject[K] extends RefLeaf | null + ? IsNullableRef> extends true + ? T | null | undefined + : T | null + : // Ref | undefined (optional object-type schema field) + TSelectObject[K] extends Ref | undefined + ? ExtractRef> | undefined + : // Ref | null (nullable object-type schema field) + TSelectObject[K] extends Ref | null + ? ExtractRef> | null : TSelectObject[K] extends Aggregate ? T : TSelectObject[K] extends @@ -366,24 +376,17 @@ export type FunctionalHavingRow = TContext[`schema`] & (TContext[`result`] extends object ? { $selected: TContext[`result`] } : {}) /** - * RefProxyForContext - Creates ref proxies for all tables/collections in a query context + * RefsForContext - Creates ref proxies for all tables/collections in a query context * * This is the main entry point for creating ref objects in query builder callbacks. - * It handles optionality by placing undefined/null OUTSIDE the RefProxy to enable - * JavaScript's optional chaining operator (?.): + * For nullable join sides (left/right/full joins), it produces `Ref` instead + * of `Ref | undefined`. This accurately reflects that the proxy object is always + * present at build time (it's a truthy proxy that records property access paths), + * while the `Nullable` flag ensures the result type correctly includes `| undefined`. * * Examples: - * - Required field: `RefProxy` → user.name works - * - Optional field: `RefProxy | undefined` → user?.name works - * - Nullable field: `RefProxy | null` → user?.name works - * - Both optional and nullable: `RefProxy | undefined` → user?.name works - * - * The key insight is that `RefProxy` would NOT allow `user?.name` - * because the undefined is "inside" the proxy, but `RefProxy | undefined` - * does allow it because the undefined is "outside" the proxy. - * - * The logic prioritizes optional chaining by always placing `undefined` outside when - * a type is both optional and nullable (e.g., `string | null | undefined`). + * - Required field: `Ref` → user.name works, result is T + * - Nullable join side: `Ref` → user.name works, result is T | undefined * * After `select()` is called, this type also includes `$selected` which provides access * to the SELECT result fields via `$selected.fieldName` syntax. @@ -394,17 +397,17 @@ export type RefsForContext = { > extends true ? IsNonExactNullable extends true ? // T is both non-exact optional and non-exact nullable (e.g., string | null | undefined) - // Extract the non-undefined and non-null part and place undefined outside - Ref> | undefined + // Extract the non-undefined and non-null part, mark as nullable ref + Ref, true> : // T is optional (T | undefined) but not exactly undefined, and not nullable - // Extract the non-undefined part and place undefined outside - Ref> | undefined + // Extract the non-undefined part, mark as nullable ref + Ref, true> : IsNonExactNullable extends true ? // T is nullable (T | null) but not exactly null, and not optional - // Extract the non-null part and place null outside - Ref> | null + // Extract the non-null part, mark as nullable ref + Ref, true> : // T is exactly undefined, exactly null, or neither optional nor nullable - // Wrap in RefProxy as-is (includes exact undefined, exact null, and normal types) + // Wrap in Ref as-is (includes exact undefined, exact null, and normal types) Ref } & (TContext[`result`] extends object ? { $selected: Ref } @@ -479,41 +482,44 @@ type NonNull = T extends null ? never : T * It provides a recursive interface that allows nested property access while * preserving optionality and nullability correctly. * - * When spread in select clauses, it correctly produces the underlying data type - * without Ref wrappers, enabling clean spread operations. + * The `Nullable` parameter indicates whether this ref comes from a nullable + * join side (left/right/full). When `true`, the `Nullable` flag propagates + * through all nested property accesses, ensuring the result type includes + * `| undefined` for all fields accessed through this ref. * * Example usage: * ```typescript - * // Clean interface - no internal properties visible - * const users: Ref<{ id: number; profile?: { bio: string } }> = { ... } - * users.id // Ref - clean display - * users.profile?.bio // Ref - nested optional access works + * // Non-nullable ref (inner join or from table): + * select(({ user }) => ({ name: user.name })) // result: string + * + * // Nullable ref (left join right side): + * select(({ dept }) => ({ name: dept.name })) // result: string | undefined * * // Spread operations work cleanly: * select(({ user }) => ({ ...user })) // Returns User type, not Ref types * ``` */ -export type Ref = { +export type Ref = { [K in keyof T]: IsNonExactOptional extends true ? IsNonExactNullable extends true ? // Both optional and nullable IsPlainObject> extends true - ? Ref> | undefined - : RefLeaf> | undefined + ? Ref, Nullable> | undefined + : RefLeaf, Nullable> | undefined : // Optional only IsPlainObject> extends true - ? Ref> | undefined - : RefLeaf> | undefined + ? Ref, Nullable> | undefined + : RefLeaf, Nullable> | undefined : IsNonExactNullable extends true ? // Nullable only IsPlainObject> extends true - ? Ref> | null - : RefLeaf> | null + ? Ref, Nullable> | null + : RefLeaf, Nullable> | null : // Required IsPlainObject extends true - ? Ref - : RefLeaf -} & RefLeaf + ? Ref + : RefLeaf +} & RefLeaf /** * Ref - The user-facing ref type with clean IDE display @@ -527,11 +533,19 @@ export type Ref = { * - No internal properties like __refProxy, __path, __type are visible */ declare const RefBrand: unique symbol -export type RefLeaf = { readonly [RefBrand]?: T } +declare const NullableBrand: unique symbol +export type RefLeaf = { + readonly [RefBrand]?: T +} & ([Nullable] extends [true] ? { readonly [NullableBrand]?: true } : {}) + +// Detect NullableBrand by checking for the key's presence +type IsNullableRef = typeof NullableBrand extends keyof T ? true : false -// Helper type to remove RefBrand from objects +// Helper type to remove RefBrand and NullableBrand from objects type WithoutRefBrand = - T extends Record ? Omit : T + T extends Record + ? Omit + : T /** * PreserveSingleResultFlag - Conditionally includes the singleResult flag diff --git a/packages/db/src/query/compiler/index.ts b/packages/db/src/query/compiler/index.ts index d0d7469e2..885a7eaa6 100644 --- a/packages/db/src/query/compiler/index.ts +++ b/packages/db/src/query/compiler/index.ts @@ -4,6 +4,7 @@ import { CollectionInputNotFoundError, DistinctRequiresSelectError, DuplicateAliasInSubqueryError, + FnSelectWithGroupByError, HavingRequiresGroupByError, LimitOffsetRequireOrderByError, UnsupportedFromTypeError, @@ -218,6 +219,10 @@ export function compileQuery( throw new DistinctRequiresSelectError() } + if (query.fnSelect && query.groupBy && query.groupBy.length > 0) { + throw new FnSelectWithGroupByError() + } + // Process the SELECT clause early - always create $selected // This eliminates duplication and allows for DISTINCT implementation if (query.fnSelect) { diff --git a/packages/db/src/query/index.ts b/packages/db/src/query/index.ts index e5f989f6c..2d66ad524 100644 --- a/packages/db/src/query/index.ts +++ b/packages/db/src/query/index.ts @@ -79,6 +79,9 @@ export { liveQueryCollectionOptions, } from './live-query-collection.js' +// One-shot query execution +export { queryOnce, type QueryOnceConfig } from './query-once.js' + export { type LiveQueryCollectionConfig } from './live/types.js' export { type LiveQueryCollectionUtils } from './live/collection-config-builder.js' diff --git a/packages/db/src/query/query-once.ts b/packages/db/src/query/query-once.ts new file mode 100644 index 000000000..c0e04b7e3 --- /dev/null +++ b/packages/db/src/query/query-once.ts @@ -0,0 +1,115 @@ +import { createLiveQueryCollection } from './live-query-collection.js' +import type { InitialQueryBuilder, QueryBuilder } from './builder/index.js' +import type { Context, InferResultType } from './builder/types.js' + +/** + * Configuration options for queryOnce + */ +export interface QueryOnceConfig { + /** + * Query builder function that defines the query + */ + query: + | ((q: InitialQueryBuilder) => QueryBuilder) + | QueryBuilder + // Future: timeout, signal, etc. +} + +// Overload 1: Simple query function returning array (non-single result) +/** + * Executes a one-shot query and returns the results as an array. + * + * This function creates a live query collection, preloads it, extracts the results, + * and automatically cleans up the collection. It's ideal for: + * - AI/LLM context building + * - Data export + * - Background processing + * - Testing + * + * @param queryFn - A function that receives the query builder and returns a query + * @returns A promise that resolves to an array of query results + * + * @example + * ```typescript + * // Basic query + * const users = await queryOnce((q) => + * q.from({ user: usersCollection }) + * ) + * + * // With filtering and projection + * const activeUserNames = await queryOnce((q) => + * q.from({ user: usersCollection }) + * .where(({ user }) => eq(user.active, true)) + * .select(({ user }) => ({ name: user.name })) + * ) + * ``` + */ +export function queryOnce( + queryFn: (q: InitialQueryBuilder) => QueryBuilder, +): Promise> + +// Overload 2: Config object form returning array (non-single result) +/** + * Executes a one-shot query using a configuration object. + * + * @param config - Configuration object with the query function + * @returns A promise that resolves to an array of query results + * + * @example + * ```typescript + * const recentOrders = await queryOnce({ + * query: (q) => + * q.from({ order: ordersCollection }) + * .orderBy(({ order }) => desc(order.createdAt)) + * .limit(100), + * }) + * ``` + */ +export function queryOnce( + config: QueryOnceConfig, +): Promise> + +// Implementation +export async function queryOnce( + configOrQuery: + | QueryOnceConfig + | ((q: InitialQueryBuilder) => QueryBuilder), +): Promise> { + // Normalize input + const config: QueryOnceConfig = + typeof configOrQuery === `function` + ? { query: configOrQuery } + : configOrQuery + + const query = (q: InitialQueryBuilder) => { + const queryConfig = config.query + return typeof queryConfig === `function` ? queryConfig(q) : queryConfig + } + + // Create collection with minimal GC time; preload handles sync start + const collection = createLiveQueryCollection({ + query, + gcTime: 1, // Cleanup in next tick when no subscribers (0 disables GC) + }) + + try { + // Wait for initial data load + await collection.preload() + + // Check if this is a single-result query (findOne was called) + const isSingleResult = + (collection.config as { singleResult?: boolean }).singleResult === true + + // Extract and return results + if (isSingleResult) { + const first = collection.values().next().value as + | InferResultType + | undefined + return first as InferResultType + } + return collection.toArray as InferResultType + } finally { + // Always cleanup, even on error + await collection.cleanup() + } +} diff --git a/packages/db/src/query/subset-dedupe.ts b/packages/db/src/query/subset-dedupe.ts index 4959d3cbb..87d703b74 100644 --- a/packages/db/src/query/subset-dedupe.ts +++ b/packages/db/src/query/subset-dedupe.ts @@ -126,28 +126,29 @@ export class DeduplicatedLoadSubset { return prom } - // Not fully covered by existing data - // Compute the subset of data that is not covered by the existing data - // such that we only have to load that subset of missing data - const clonedOptions = cloneOptions(options) + // Not fully covered by existing data — load the missing subset. + // We need two clones: trackingOptions preserves the original predicate for + // accurate tracking (e.g., where=undefined means "all data"), while loadOptions + // may be narrowed with a difference expression for the actual backend request. + const trackingOptions = cloneOptions(options) + const loadOptions = cloneOptions(options) if (this.unlimitedWhere !== undefined && options.limit === undefined) { // Compute difference to get only the missing data // We can only do this for unlimited queries // and we can only remove data that was loaded from unlimited queries // because with limited queries we have no way to express that we already loaded part of the matching data - clonedOptions.where = - minusWherePredicates(clonedOptions.where, this.unlimitedWhere) ?? - clonedOptions.where + loadOptions.where = + minusWherePredicates(loadOptions.where, this.unlimitedWhere) ?? + loadOptions.where } // Call underlying loadSubset to load the missing data - const resultPromise = this._loadSubset(clonedOptions) + const resultPromise = this._loadSubset(loadOptions) // Handle both sync (true) and async (Promise) return values if (resultPromise === true) { - // Sync return - update tracking synchronously - // Clone options before storing to protect against caller mutation - this.updateTracking(clonedOptions) + // Sync return - update tracking with the original predicate + this.updateTracking(trackingOptions) return true } else { // Async return - track the promise and update tracking after it resolves @@ -158,16 +159,14 @@ export class DeduplicatedLoadSubset { // We need to create a reference to the in-flight entry so we can remove it later const inflightEntry = { - options: clonedOptions, // Store cloned options for subset matching + options: loadOptions, // Store load options for subset matching of in-flight requests promise: resultPromise .then((result) => { // Only update tracking if this request is still from the current generation // If reset() was called, the generation will have incremented and we should // not repopulate the state that was just cleared if (capturedGeneration === this.generation) { - // Use the cloned options that we captured before any caller mutations - // This ensures we track exactly what was loaded, not what the caller changed - this.updateTracking(clonedOptions) + this.updateTracking(trackingOptions) } return result }) diff --git a/packages/db/tests/collection-subscribe-changes.test.ts b/packages/db/tests/collection-subscribe-changes.test.ts index 02e987e1e..37ce36a04 100644 --- a/packages/db/tests/collection-subscribe-changes.test.ts +++ b/packages/db/tests/collection-subscribe-changes.test.ts @@ -1284,6 +1284,7 @@ describe(`Collection.subscribeChanges`, () => { commit() }, }, + // eslint-disable-next-line @typescript-eslint/require-await onDelete: async ({ transaction }) => { emitter.emit(`sync`, transaction.mutations) }, diff --git a/packages/db/tests/collection-subscriber-duplicate-inserts.test.ts b/packages/db/tests/collection-subscriber-duplicate-inserts.test.ts index 5657eb206..774d600d6 100644 --- a/packages/db/tests/collection-subscriber-duplicate-inserts.test.ts +++ b/packages/db/tests/collection-subscriber-duplicate-inserts.test.ts @@ -399,13 +399,13 @@ describe(`CollectionSubscriber duplicate insert prevention`, () => { const liveQueryCollection = createLiveQueryCollection((q) => q .from({ users: usersCollection }) - .join({ orders: ordersCollection }, ({ users, orders }) => - eq(users.id, orders.userId), + .join({ orders: ordersCollection }, ({ users: u, orders: o }) => + eq(u.id, o.userId), ) - .select(({ users, orders }) => ({ - orderId: orders!.id, - userName: users.name, - amount: orders!.amount, + .select(({ users: u2, orders: o2 }) => ({ + orderId: o2.id, + userName: u2.name, + amount: o2.amount, })), ) diff --git a/packages/db/tests/collection.test.ts b/packages/db/tests/collection.test.ts index ecb60361d..afeb9f682 100644 --- a/packages/db/tests/collection.test.ts +++ b/packages/db/tests/collection.test.ts @@ -1547,7 +1547,7 @@ describe(`Collection`, () => { expect(state.size).toBe(3) }) - it(`should allow deleting a row by passing only the key to write function`, async () => { + it(`should allow deleting a row by passing only the key to write function`, () => { let testSyncFunctions: any = null const collection = createCollection<{ id: number; value: string }>({ @@ -1612,7 +1612,7 @@ describe(`Collection`, () => { expect(Array.from(collection.state.keys())).toEqual([3]) }) - it(`should allow deleting a row by key with string keys`, async () => { + it(`should allow deleting a row by key with string keys`, () => { let testSyncFunctions: any = null const collection = createCollection<{ id: string; name: string }>({ diff --git a/packages/db/tests/local-only.test.ts b/packages/db/tests/local-only.test.ts index 0dd841a27..c963866e6 100644 --- a/packages/db/tests/local-only.test.ts +++ b/packages/db/tests/local-only.test.ts @@ -529,6 +529,7 @@ describe(`LocalOnly Collection`, () => { ) const tx = createTransaction({ + // eslint-disable-next-line @typescript-eslint/require-await mutationFn: async ({ transaction }: any) => { noIdCollection.utils.acceptMutations(transaction) }, diff --git a/packages/db/tests/optimistic-action.test.ts b/packages/db/tests/optimistic-action.test.ts index 5b2572bc9..5ecf5cff9 100644 --- a/packages/db/tests/optimistic-action.test.ts +++ b/packages/db/tests/optimistic-action.test.ts @@ -70,6 +70,7 @@ describe(`createOptimisticAction`, () => { }) const addTodo = createOptimisticAction({ + // eslint-disable-next-line @typescript-eslint/require-await onMutate: async (text) => { collection.insert({ id: `1`, text }) }, diff --git a/packages/db/tests/paced-mutations.test.ts b/packages/db/tests/paced-mutations.test.ts index 026ed314a..83d169206 100644 --- a/packages/db/tests/paced-mutations.test.ts +++ b/packages/db/tests/paced-mutations.test.ts @@ -458,7 +458,7 @@ describe(`createPacedMutations`, () => { describe(`error handling`, () => { it(`should handle mutationFn errors and set transaction to failed state`, async () => { const error = new Error(`Mutation failed`) - const mutationFn = vi.fn(async () => { + const mutationFn = vi.fn(() => { throw error }) diff --git a/packages/db/tests/proxy.test.ts b/packages/db/tests/proxy.test.ts index 40b5da5a3..bbac0151a 100644 --- a/packages/db/tests/proxy.test.ts +++ b/packages/db/tests/proxy.test.ts @@ -1829,9 +1829,7 @@ describe(`Proxy Library`, () => { const { proxy, getChanges } = createChangeProxy(obj) // Use find() to get an array item and modify it - const order = proxy.job.orders.find( - (order) => order.orderId === `order-1`, - ) + const order = proxy.job.orders.find((o) => o.orderId === `order-1`) if (order) { order.orderBinInt = 99 } diff --git a/packages/db/tests/query/builder/buildQuery.test.ts b/packages/db/tests/query/builder/buildQuery.test.ts index 431fbde06..292d6f892 100644 --- a/packages/db/tests/query/builder/buildQuery.test.ts +++ b/packages/db/tests/query/builder/buildQuery.test.ts @@ -67,7 +67,7 @@ describe(`buildQuery function`, () => { ) .select(({ employees, departments }) => ({ employee_name: employees.name, - department_name: departments?.name, + department_name: departments.name, })), ) diff --git a/packages/db/tests/query/builder/callback-types.test-d.ts b/packages/db/tests/query/builder/callback-types.test-d.ts index 0e901b7fb..52060e983 100644 --- a/packages/db/tests/query/builder/callback-types.test-d.ts +++ b/packages/db/tests/query/builder/callback-types.test-d.ts @@ -122,17 +122,15 @@ describe(`Query Builder Callback Types`, () => { expectTypeOf( user.department_id, ).toEqualTypeOf | null>() - expectTypeOf(dept?.id).toEqualTypeOf | undefined>() - expectTypeOf(dept?.name).toEqualTypeOf | undefined>() - expectTypeOf(dept?.budget).toEqualTypeOf< - RefLeaf | undefined - >() + expectTypeOf(dept.id).toEqualTypeOf>() + expectTypeOf(dept.name).toEqualTypeOf>() + expectTypeOf(dept.budget).toEqualTypeOf>() return { user_name: user.name, - dept_name: dept?.name, + dept_name: dept.name, user_email: user.email, - dept_budget: dept?.budget, + dept_budget: dept.budget, } }) }) @@ -263,17 +261,13 @@ describe(`Query Builder Callback Types`, () => { ) .where(({ user, dept }) => { expectTypeOf(user.active).toEqualTypeOf>() - expectTypeOf(dept?.active).toEqualTypeOf< - RefLeaf | undefined - >() - expectTypeOf(dept?.budget).toEqualTypeOf< - RefLeaf | undefined - >() + expectTypeOf(dept.active).toEqualTypeOf>() + expectTypeOf(dept.budget).toEqualTypeOf>() return and( eq(user.active, true), - eq(dept?.active, true), - gt(dept?.budget, 100000), + eq(dept.active, true), + gt(dept.budget, 100000), ) }) }) @@ -315,13 +309,13 @@ describe(`Query Builder Callback Types`, () => { ) .join({ project: projectsCollection }, ({ user, dept, project }) => { expectTypeOf(user.id).toEqualTypeOf>() - expectTypeOf(dept?.id).toEqualTypeOf | undefined>() + expectTypeOf(dept.id).toEqualTypeOf>() expectTypeOf(project.user_id).toEqualTypeOf>() expectTypeOf(project.department_id).toEqualTypeOf>() return and( eq(project.user_id, user.id), - eq(project.department_id, dept?.id), + eq(project.department_id, dept.id), ) }) }) @@ -360,10 +354,10 @@ describe(`Query Builder Callback Types`, () => { ) .orderBy(({ user, dept }) => { expectTypeOf(user.id).toEqualTypeOf>() - expectTypeOf(dept?.id).toEqualTypeOf | undefined>() - expectTypeOf(dept?.name).toEqualTypeOf | undefined>() + expectTypeOf(dept.id).toEqualTypeOf>() + expectTypeOf(dept.name).toEqualTypeOf>() - return dept?.name + return dept.name }) }) }) @@ -400,12 +394,10 @@ describe(`Query Builder Callback Types`, () => { ) .groupBy(({ user, dept }) => { expectTypeOf(user.id).toEqualTypeOf>() - expectTypeOf(dept?.id).toEqualTypeOf | undefined>() - expectTypeOf(dept?.location).toEqualTypeOf< - RefLeaf | undefined - >() + expectTypeOf(dept.id).toEqualTypeOf>() + expectTypeOf(dept.location).toEqualTypeOf>() - return dept?.location + return dept.location }) }) }) @@ -481,13 +473,11 @@ describe(`Query Builder Callback Types`, () => { .join({ dept: departmentsCollection }, ({ user, dept }) => eq(user.department_id, dept.id), ) - .groupBy(({ dept }) => dept?.location) + .groupBy(({ dept }) => dept.location) .having(({ user, dept }) => { expectTypeOf(user.id).toEqualTypeOf>() - expectTypeOf(dept?.id).toEqualTypeOf | undefined>() - expectTypeOf(dept?.location).toEqualTypeOf< - RefLeaf | undefined - >() + expectTypeOf(dept.id).toEqualTypeOf>() + expectTypeOf(dept.location).toEqualTypeOf>() return and(gt(count(user.id), 3), gt(avg(user.salary), 70000)) }) @@ -506,67 +496,51 @@ describe(`Query Builder Callback Types`, () => { }) .join({ project: projectsCollection }, ({ user, dept, project }) => { expectTypeOf(user.id).toEqualTypeOf>() - expectTypeOf(dept?.id).toEqualTypeOf | undefined>() - expectTypeOf(dept?.location).toEqualTypeOf< - RefLeaf | undefined - >() + expectTypeOf(dept.id).toEqualTypeOf>() + expectTypeOf(dept.location).toEqualTypeOf>() expectTypeOf(project.user_id).toEqualTypeOf>() expectTypeOf(project.department_id).toEqualTypeOf>() return eq(project.user_id, user.id) }) .where(({ user, dept, project }) => { expectTypeOf(user.id).toEqualTypeOf>() - expectTypeOf(dept?.id).toEqualTypeOf | undefined>() - expectTypeOf(dept?.location).toEqualTypeOf< - RefLeaf | undefined - >() - expectTypeOf(project?.user_id).toEqualTypeOf< - RefLeaf | undefined - >() - expectTypeOf(project?.department_id).toEqualTypeOf< - RefLeaf | undefined + expectTypeOf(dept.id).toEqualTypeOf>() + expectTypeOf(dept.location).toEqualTypeOf>() + expectTypeOf(project.user_id).toEqualTypeOf>() + expectTypeOf(project.department_id).toEqualTypeOf< + RefLeaf >() return and( eq(user.active, true), - eq(dept?.active, true), - eq(project?.status, `active`), + eq(dept.active, true), + eq(project.status, `active`), ) }) .groupBy(({ dept }) => { - expectTypeOf(dept?.id).toEqualTypeOf | undefined>() - expectTypeOf(dept?.location).toEqualTypeOf< - RefLeaf | undefined - >() - return dept?.location + expectTypeOf(dept.id).toEqualTypeOf>() + expectTypeOf(dept.location).toEqualTypeOf>() + return dept.location }) .having(({ user, project }) => { expectTypeOf(user.id).toEqualTypeOf>() - expectTypeOf(project?.budget).toEqualTypeOf< - RefLeaf | undefined - >() - return and(gt(count(user.id), 2), gt(avg(project?.budget), 50000)) + expectTypeOf(project.budget).toEqualTypeOf>() + return and(gt(count(user.id), 2), gt(avg(project.budget), 50000)) }) .select(({ user, dept, project }) => { expectTypeOf(user.id).toEqualTypeOf>() - expectTypeOf(dept?.location).toEqualTypeOf< - RefLeaf | undefined - >() - expectTypeOf(project?.budget).toEqualTypeOf< - RefLeaf | undefined - >() + expectTypeOf(dept.location).toEqualTypeOf>() + expectTypeOf(project.budget).toEqualTypeOf>() return { - location: dept?.location, + location: dept.location, user_count: count(user.id), avg_salary: avg(user.salary), - total_project_budget: sum(project?.budget), - avg_project_budget: avg(project?.budget), + total_project_budget: sum(project.budget), + avg_project_budget: avg(project.budget), } }) .orderBy(({ dept }) => { - expectTypeOf(dept?.location).toEqualTypeOf< - RefLeaf | undefined - >() - return dept?.location + expectTypeOf(dept.location).toEqualTypeOf>() + return dept.location }) }) }) diff --git a/packages/db/tests/query/builder/functional-variants.test.ts b/packages/db/tests/query/builder/functional-variants.test.ts index f39f914f7..b993d77b9 100644 --- a/packages/db/tests/query/builder/functional-variants.test.ts +++ b/packages/db/tests/query/builder/functional-variants.test.ts @@ -221,7 +221,7 @@ describe(`QueryBuilder functional variants (fn)`, () => { ({ employees, departments }) => eq(employees.department_id, departments.id), ) - .groupBy(({ departments }) => departments?.name) + .groupBy(({ departments }) => departments.name) .fn.having( (row) => row.employees.salary > 60000 && @@ -245,7 +245,7 @@ describe(`QueryBuilder functional variants (fn)`, () => { ) .fn.where((row) => row.employees.active) .fn.where((row) => row.employees.salary > 40000) - .groupBy(({ departments }) => departments?.name) + .groupBy(({ departments }) => departments.name) .fn.having((row) => row.employees.salary > 70000) .fn.select((row) => ({ departmentName: row.departments?.name || `Unknown`, diff --git a/packages/db/tests/query/builder/join.test.ts b/packages/db/tests/query/builder/join.test.ts index db75e3cbc..f76d8ad44 100644 --- a/packages/db/tests/query/builder/join.test.ts +++ b/packages/db/tests/query/builder/join.test.ts @@ -75,7 +75,7 @@ describe(`QueryBuilder.join`, () => { eq(employees.department_id, departments.id), ) .join({ projects: projectsCollection }, ({ departments, projects }) => - eq(departments?.id, projects.department_id), + eq(departments.id, projects.department_id), ) const builtQuery = getQueryIR(query) @@ -101,8 +101,8 @@ describe(`QueryBuilder.join`, () => { .select(({ employees, departments }) => ({ id: employees.id, name: employees.name, - department_name: departments?.name, - department_budget: departments?.budget, + department_name: departments.name, + department_budget: departments.budget, })) const builtQuery = getQueryIR(query) @@ -122,7 +122,7 @@ describe(`QueryBuilder.join`, () => { ({ employees, departments }) => eq(employees.department_id, departments.id), ) - .where(({ departments }) => gt(departments?.budget, 1000000)) + .where(({ departments }) => gt(departments.budget, 1000000)) const builtQuery = getQueryIR(query) expect(builtQuery.where).toBeDefined() @@ -160,13 +160,13 @@ describe(`QueryBuilder.join`, () => { eq(employees.department_id, departments.id), ) .where(({ employees, departments }) => - and(gt(employees.salary, 50000), gt(departments?.budget, 1000000)), + and(gt(employees.salary, 50000), gt(departments.budget, 1000000)), ) .select(({ employees, departments }) => ({ id: employees.id, name: employees.name, - department_name: departments?.name, - dept_location: departments?.location, + department_name: departments.name, + dept_location: departments.location, })) const builtQuery = getQueryIR(query) @@ -360,7 +360,7 @@ describe(`QueryBuilder.join`, () => { .innerJoin( { projects: projectsCollection }, ({ departments, projects }) => - eq(departments?.id, projects.department_id), + eq(departments.id, projects.department_id), ) const builtQuery = getQueryIR(query) diff --git a/packages/db/tests/query/builder/subqueries.test-d.ts b/packages/db/tests/query/builder/subqueries.test-d.ts index f8343e395..7009a899e 100644 --- a/packages/db/tests/query/builder/subqueries.test-d.ts +++ b/packages/db/tests/query/builder/subqueries.test-d.ts @@ -119,7 +119,7 @@ describe(`Subquery Types`, () => { .select(({ issue, activeUser }) => ({ issueId: issue.id, issueTitle: issue.title, - userName: activeUser?.name, + userName: activeUser.name, })) // Verify the result type @@ -148,7 +148,7 @@ describe(`Subquery Types`, () => { ) .select(({ issue, activeUser }) => ({ issueId: issue.id, - userName: activeUser?.name, + userName: activeUser.name, })) // Verify the result type @@ -250,7 +250,7 @@ describe(`Subquery Types`, () => { ) .select(({ issue, activeUser }) => ({ issueId: issue.id, - userName: activeUser?.name, + userName: activeUser.name, })) // Verify the result type @@ -274,7 +274,7 @@ describe(`Subquery Types`, () => { ) .select(({ issue, user }) => ({ issueId: issue.id, - userName: user?.name, + userName: user.name, })) // Verify the result type diff --git a/packages/db/tests/query/compiler/subqueries.test.ts b/packages/db/tests/query/compiler/subqueries.test.ts index 4b41eec18..52918252c 100644 --- a/packages/db/tests/query/compiler/subqueries.test.ts +++ b/packages/db/tests/query/compiler/subqueries.test.ts @@ -232,7 +232,7 @@ describe(`Query2 Subqueries`, () => { .select(({ issue, activeUser }) => ({ issueId: issue.id, issueTitle: issue.title, - userName: activeUser?.name, + userName: activeUser.name, })) const builtQuery = getQueryIR(query) @@ -267,7 +267,7 @@ describe(`Query2 Subqueries`, () => { .select(({ issue, activeUser }) => ({ issueId: issue.id, issueTitle: issue.title, - userName: activeUser?.name, + userName: activeUser.name, })) const builtQuery = getQueryIR(query) @@ -363,7 +363,7 @@ describe(`Query2 Subqueries`, () => { .select(({ issue, userInfo }) => ({ issueId: issue.id, issueTitle: issue.title, - userName: userInfo?.name, + userName: userInfo.name, })) const builtQuery = getQueryIR(outerQuery) diff --git a/packages/db/tests/query/distinct.test.ts b/packages/db/tests/query/distinct.test.ts index 1979b03e4..6a09460c3 100644 --- a/packages/db/tests/query/distinct.test.ts +++ b/packages/db/tests/query/distinct.test.ts @@ -703,7 +703,7 @@ function createDistinctTests(autoIndex: `off` | `eager`): void { ) .where(({ users }) => eq(users.active, true)) .select(({ departments }) => ({ - department: departments?.id, + department: departments.id, })) .distinct(), }) diff --git a/packages/db/tests/query/findone-joins.test-d.ts b/packages/db/tests/query/findone-joins.test-d.ts index 849a7811d..6c4c67324 100644 --- a/packages/db/tests/query/findone-joins.test-d.ts +++ b/packages/db/tests/query/findone-joins.test-d.ts @@ -187,7 +187,7 @@ describe(`findOne() with joins`, () => { ) .select(({ todo, todoOptions }) => ({ todoText: todo.text, - optionText: todoOptions?.optionText, + optionText: todoOptions.optionText, })) .findOne(), }) @@ -212,7 +212,7 @@ describe(`findOne() with joins`, () => { .findOne() .select(({ todo, todoOptions }) => ({ todoText: todo.text, - optionText: todoOptions?.optionText, + optionText: todoOptions.optionText, })), }) diff --git a/packages/db/tests/query/functional-variants.test-d.ts b/packages/db/tests/query/functional-variants.test-d.ts index f39083047..0e3171e6c 100644 --- a/packages/db/tests/query/functional-variants.test-d.ts +++ b/packages/db/tests/query/functional-variants.test-d.ts @@ -450,11 +450,11 @@ describe(`Functional Variants Types`, () => { .join({ dept: departmentsCollection }, ({ user, dept }) => eq(user.department_id, dept.id), ) - .groupBy(({ dept }) => dept?.name) + .groupBy(({ dept }) => dept.name) .fn.having((row) => row.dept?.name !== `HR`) .select(({ dept, user }) => ({ - departmentId: dept?.id, - departmentName: dept?.name, + departmentId: dept.id, + departmentName: dept.name, totalEmployees: count(user.id), })), }) diff --git a/packages/db/tests/query/group-by.test.ts b/packages/db/tests/query/group-by.test.ts index 0875ec141..747c9bb6e 100644 --- a/packages/db/tests/query/group-by.test.ts +++ b/packages/db/tests/query/group-by.test.ts @@ -1944,7 +1944,7 @@ function createGroupByTests(autoIndex: `off` | `eager`): void { ) .select(({ customer, oc }) => ({ name: customer.name, - orderCount: oc?.orderCount, + orderCount: oc.orderCount, })) }, }) @@ -2167,6 +2167,31 @@ function createGroupByTests(autoIndex: `off` | `eager`): void { expect(result?.totalAmount).toBe(700) }) }) + + describe(`fn.select with groupBy throws error`, () => { + let ordersCollection: ReturnType + + beforeEach(() => { + ordersCollection = createOrdersCollection(autoIndex) + }) + + test(`fn.select with groupBy should throw FnSelectWithGroupByError`, () => { + expect(() => + createLiveQueryCollection({ + startSync: true, + query: (q) => + q + .from({ orders: ordersCollection }) + .groupBy(({ orders }) => orders.customer_id) + .fn.select((row) => ({ + customerId: row.orders.customer_id, + totalAmount: sum(row.orders.amount), + orderCount: count(row.orders.id), + })), + }), + ).toThrow(`fn.select() cannot be used with groupBy()`) + }) + }) }) } diff --git a/packages/db/tests/query/join-subquery.test-d.ts b/packages/db/tests/query/join-subquery.test-d.ts index 29d411854..d458d2b41 100644 --- a/packages/db/tests/query/join-subquery.test-d.ts +++ b/packages/db/tests/query/join-subquery.test-d.ts @@ -386,7 +386,7 @@ describe(`Join Subquery Types`, () => { ) .select(({ issue, activeUser }) => ({ issue_title: issue.title, - user_name: activeUser?.name, // Should now be string | undefined + user_name: activeUser.name, // Should now be string | undefined issue_status: issue.status, })) }, diff --git a/packages/db/tests/query/join-subquery.test.ts b/packages/db/tests/query/join-subquery.test.ts index 259fac7e2..daca3337d 100644 --- a/packages/db/tests/query/join-subquery.test.ts +++ b/packages/db/tests/query/join-subquery.test.ts @@ -268,7 +268,7 @@ function createJoinSubqueryTests(autoIndex: `off` | `eager`): void { ) .select(({ issue, activeUser }) => ({ issue_title: issue.title, - user_name: activeUser?.name, + user_name: activeUser.name, issue_status: issue.status, })) }, @@ -412,8 +412,8 @@ function createJoinSubqueryTests(autoIndex: `off` | `eager`): void { .select(({ issue, activeUser }) => ({ issue_title: issue.title, issue_status: issue.status, - user_name: activeUser?.name, - user_status: activeUser?.status, + user_name: activeUser.name, + user_status: activeUser.status, })) }, }) @@ -464,7 +464,7 @@ function createJoinSubqueryTests(autoIndex: `off` | `eager`): void { expect(results).toHaveLength(1) expect(results[0]!.product.id).toBe(1) expect(results[0]!.tried).toBeDefined() - expect(results[0]!.tried!.userId).toBe(1) + expect(results[0]!.tried.userId).toBe(1) expect(results[0]).toEqual({ product: { id: 1, a: `8` }, tried: sampleTrials[0], @@ -770,8 +770,8 @@ function createJoinSubqueryTests(autoIndex: `off` | `eager`): void { .select(({ issue, author }) => ({ issue_id: issue.id, issue_title: issue.title, - author_name: author?.userName, - author_bio: author?.profileBio, + author_name: author.userName, + author_bio: author.profileBio, })) }, }) diff --git a/packages/db/tests/query/join.test-d.ts b/packages/db/tests/query/join.test-d.ts index 9fc1a6bde..495240484 100644 --- a/packages/db/tests/query/join.test-d.ts +++ b/packages/db/tests/query/join.test-d.ts @@ -209,8 +209,8 @@ describe(`Join Types - Type Safety`, () => { ) .select(({ user, dept }) => ({ userName: user.name, - deptName: dept?.name, // This should still be accessible in select - deptBudget: dept?.budget, + deptName: dept.name, // This should still be accessible in select + deptBudget: dept.budget, })), }) @@ -383,8 +383,8 @@ describe(`Join Alias Methods - Type Safety`, () => { ) .select(({ user, dept }) => ({ userName: user.name, - deptName: dept?.name, // This should be string | undefined due to left join - deptBudget: dept?.budget, + deptName: dept.name, // This should be string | undefined due to left join + deptBudget: dept.budget, })), }) @@ -457,7 +457,7 @@ describe(`Join Alias Methods - Type Safety`, () => { ) .join( { project: projectsCollection }, - ({ dept, project }) => eq(dept?.id, project.department_id), + ({ dept, project }) => eq(dept.id, project.department_id), `inner`, ), }) @@ -687,7 +687,7 @@ describe(`Join Alias Methods - Type Safety`, () => { ) .select(({ post, user }) => ({ postTitle: post.title, - authorName: user?.name, // This will be string | undefined due to left join + authorName: user.name, // This will be string | undefined due to left join })), }) @@ -701,6 +701,127 @@ describe(`Join Alias Methods - Type Safety`, () => { }) }) +describe(`Declarative select refs should not use union with undefined for nullable joins`, () => { + test(`left-joined ref in declarative select should allow direct property access without optional chaining`, () => { + const usersCollection = createUsersCollection() + const departmentsCollection = createDepartmentsCollection() + + const query = createLiveQueryCollection({ + query: (q) => + q + .from({ user: usersCollection }) + .leftJoin({ dept: departmentsCollection }, ({ user, dept }) => + eq(user.department_id, dept.id), + ) + .select(({ user, dept }) => ({ + userName: user.name, + // dept is a proxy ref that is always present at build time, + // so direct property access should work without optional chaining + deptName: dept.name, + deptBudget: dept.budget, + })), + }) + + const results = query.toArray + + // Result fields from left-joined tables should still produce T | undefined + // because the actual data may have no matching row + expectTypeOf(results).toEqualTypeOf< + Array<{ + userName: string + deptName: string | undefined + deptBudget: number | undefined + }> + >() + }) + + test(`right-joined ref in declarative select should allow direct property access on nullable left table`, () => { + const usersCollection = createUsersCollection() + const departmentsCollection = createDepartmentsCollection() + + const query = createLiveQueryCollection({ + query: (q) => + q + .from({ user: usersCollection }) + .rightJoin({ dept: departmentsCollection }, ({ user, dept }) => + eq(user.department_id, dept.id), + ) + .select(({ user, dept }) => ({ + // user is the nullable side in a right join + userName: user.name, + deptName: dept.name, + })), + }) + + const results = query.toArray + + expectTypeOf(results).toEqualTypeOf< + Array<{ + userName: string | undefined + deptName: string + }> + >() + }) + + test(`full-joined refs in declarative select should allow direct property access on both nullable tables`, () => { + const usersCollection = createUsersCollection() + const departmentsCollection = createDepartmentsCollection() + + const query = createLiveQueryCollection({ + query: (q) => + q + .from({ user: usersCollection }) + .fullJoin({ dept: departmentsCollection }, ({ user, dept }) => + eq(user.department_id, dept.id), + ) + .select(({ user, dept }) => ({ + userName: user.name, + deptName: dept.name, + })), + }) + + const results = query.toArray + + // Both sides are nullable in a full join + expectTypeOf(results).toEqualTypeOf< + Array<{ + userName: string | undefined + deptName: string | undefined + }> + >() + }) + + test(`inner-joined ref in declarative select should allow direct property access with non-optional result`, () => { + const usersCollection = createUsersCollection() + const departmentsCollection = createDepartmentsCollection() + + const query = createLiveQueryCollection({ + query: (q) => + q + .from({ user: usersCollection }) + .innerJoin({ dept: departmentsCollection }, ({ user, dept }) => + eq(user.department_id, dept.id), + ) + .select(({ user, dept }) => ({ + userName: user.name, + deptName: dept.name, + deptBudget: dept.budget, + })), + }) + + const results = query.toArray + + // Inner join fields should never be undefined + expectTypeOf(results).toEqualTypeOf< + Array<{ + userName: string + deptName: string + deptBudget: number + }> + >() + }) +}) + describe(`Join with ArkType Schemas`, () => { test(`join with optional foreign key using ArkType schema should work`, () => { // Define ArkType schemas with optional foreign key @@ -835,7 +956,7 @@ describe(`Join with ArkType Schemas`, () => { ) .select(({ post, user }) => ({ postTitle: post.title, - authorName: user?.name, // This will be string | undefined due to left join + authorName: user.name, // This will be string | undefined due to left join })), }) diff --git a/packages/db/tests/query/join.test.ts b/packages/db/tests/query/join.test.ts index 49751eaa9..0219cdaf2 100644 --- a/packages/db/tests/query/join.test.ts +++ b/packages/db/tests/query/join.test.ts @@ -121,9 +121,9 @@ function testJoinType(joinType: JoinType, autoIndex: `off` | `eager`) { joinType, ) .select(({ user, dept }) => ({ - user_name: user?.name, - department_name: dept?.name, - budget: dept?.budget, + user_name: user.name, + department_name: dept.name, + budget: dept.budget, })), }) @@ -302,8 +302,8 @@ function testJoinType(joinType: JoinType, autoIndex: `off` | `eager`) { joinType, ) .select(({ user, dept }) => ({ - user_name: user?.name, - department_name: dept?.name, + user_name: user.name, + department_name: dept.name, })), }) @@ -345,8 +345,8 @@ function testJoinType(joinType: JoinType, autoIndex: `off` | `eager`) { joinType, ) .select(({ user, dept }) => ({ - user_name: user?.name, - department_name: dept?.name, + user_name: user.name, + department_name: dept.name, })), }) @@ -384,8 +384,8 @@ function testJoinType(joinType: JoinType, autoIndex: `off` | `eager`) { joinType, ) .select(({ user, dept }) => ({ - user_name: user?.name, - department_name: dept?.name, + user_name: user.name, + department_name: dept.name, })), }) @@ -433,8 +433,8 @@ function testJoinType(joinType: JoinType, autoIndex: `off` | `eager`) { joinType, ) .select(({ user, dept }) => ({ - user_name: user?.name, - department_name: dept?.name, + user_name: user.name, + department_name: dept.name, })), }) @@ -533,12 +533,12 @@ function testJoinType(joinType: JoinType, autoIndex: `off` | `eager`) { .leftJoin({ member: teamMembersCollection }, ({ team, member }) => eq(team.id, member.team_id), ) - .where(({ member }) => eq(member?.user_id, 100)) + .where(({ member }) => eq(member.user_id, 100)) .select(({ team, member }) => ({ team_id: team.id, team_name: team.name, - user_id: member?.user_id, - role: member?.role, + user_id: member.user_id, + role: member.role, })), }) } else if (joinType === `right`) { @@ -553,10 +553,10 @@ function testJoinType(joinType: JoinType, autoIndex: `off` | `eager`) { { member: teamMembersCollection }, ({ team, member }) => eq(team.id, member.team_id), ) - .where(({ team }) => eq(team?.active, true)) + .where(({ team }) => eq(team.active, true)) .select(({ team, member }) => ({ - team_id: team?.id, - team_name: team?.name, + team_id: team.id, + team_name: team.name, user_id: member.user_id, role: member.role, })), @@ -572,12 +572,12 @@ function testJoinType(joinType: JoinType, autoIndex: `off` | `eager`) { .fullJoin({ member: teamMembersCollection }, ({ team, member }) => eq(team.id, member.team_id), ) - .where(({ member }) => eq(member?.role, `admin`)) + .where(({ member }) => eq(member.role, `admin`)) .select(({ team, member }) => ({ - team_id: team?.id, - team_name: team?.name, - user_id: member?.user_id, - role: member?.role, + team_id: team.id, + team_name: team.name, + user_id: member.user_id, + role: member.role, })), }) } else { @@ -736,13 +736,13 @@ function testJoinType(joinType: JoinType, autoIndex: `off` | `eager`) { ) .join( { task: tasksCollection }, - ({ task, project }) => eq(task.project_id, project?.id), + ({ task, project }) => eq(task.project_id, project.id), joinType, ) .select(({ company, project, task }) => ({ - company_name: company?.name, - project_name: project?.name, - task_name: task?.name, + company_name: company.name, + project_name: project.name, + task_name: task.name, })), }) @@ -941,7 +941,7 @@ function createJoinTests(autoIndex: `off` | `eager`): void { user_id: user.id, user_name: user.name, department_id: user.department_id, - department_name: dept?.name, + department_name: dept.name, })), }) @@ -1653,7 +1653,7 @@ function createJoinTests(autoIndex: `off` | `eager`): void { id: event.id, parent_id: event.parent_id, parent: { - id: parent?.id, + id: parent.id, }, })), }) @@ -1714,16 +1714,14 @@ function createJoinTests(autoIndex: `off` | `eager`): void { ({ employee, manager }) => eq(employee.manager_id, manager.id), `left`, ) - .where(({ manager }) => - or(isNull(manager?.id), gt(manager?.age, 35)), - ) + .where(({ manager }) => or(isNull(manager.id), gt(manager.age, 35))) .select(({ employee, manager }) => ({ employeeId: employee.id, employeeName: employee.name, employeeAge: employee.age, - managerId: manager?.id, - managerName: manager?.name, - managerAge: manager?.age, + managerId: manager.id, + managerName: manager.name, + managerAge: manager.age, })), }) @@ -1888,7 +1886,7 @@ function createJoinTests(autoIndex: `off` | `eager`): void { q .from({ l: leftCollection }) .leftJoin({ r: rightCollection }, ({ l, r }) => eq(l.rightId, r.id)) - .where(({ r }) => isUndefined(r?.payload)) + .where(({ r }) => isUndefined(r.payload)) .select(({ l, r }) => ({ leftId: l.id, right: r })), }) @@ -1985,13 +1983,13 @@ function createJoinTests(autoIndex: `off` | `eager`): void { ) .join( { balance: balancesCollection }, - ({ balance, client }) => eq(balance.client, client?.name), + ({ balance, client }) => eq(balance.client, client.name), `left`, ) .select(({ player, client, balance }) => ({ player_name: player.name, - client_name: client?.name, - balance_amount: balance?.amount, + client_name: client.name, + balance_amount: balance.amount, })), }) diff --git a/packages/db/tests/query/live-query-collection.test.ts b/packages/db/tests/query/live-query-collection.test.ts index 3c73efa93..5fd1edec0 100644 --- a/packages/db/tests/query/live-query-collection.test.ts +++ b/packages/db/tests/query/live-query-collection.test.ts @@ -3,6 +3,7 @@ import { Temporal } from 'temporal-polyfill' import { createCollection } from '../../src/collection/index.js' import { and, + coalesce, createLiveQueryCollection, eq, ilike, @@ -103,7 +104,7 @@ describe(`createLiveQueryCollection`, () => { }) describe(`compareOptions inheritance`, () => { - it(`should inherit compareOptions from FROM collection`, async () => { + it(`should inherit compareOptions from FROM collection`, () => { // Create a collection with non-default compareOptions const sourceCollection = createCollection( mockSyncCollectionOptions({ @@ -130,7 +131,7 @@ describe(`createLiveQueryCollection`, () => { }) }) - it(`should inherit compareOptions from FROM collection via subquery`, async () => { + it(`should inherit compareOptions from FROM collection via subquery`, () => { // Create a collection with non-default compareOptions const sourceCollection = createCollection( mockSyncCollectionOptions({ @@ -167,7 +168,7 @@ describe(`createLiveQueryCollection`, () => { }) }) - it(`should use default compareOptions when FROM collection has no compareOptions`, async () => { + it(`should use default compareOptions when FROM collection has no compareOptions`, () => { // Create a collection without compareOptions (uses defaults) const sourceCollection = createCollection( mockSyncCollectionOptions({ @@ -199,7 +200,7 @@ describe(`createLiveQueryCollection`, () => { }) }) - it(`should use explicitly provided compareOptions instead of inheriting from FROM collection`, async () => { + it(`should use explicitly provided compareOptions instead of inheriting from FROM collection`, () => { // Create a collection with non-default compareOptions const sourceCollection = createCollection( mockSyncCollectionOptions({ @@ -2284,7 +2285,7 @@ describe(`createLiveQueryCollection`, () => { .select(({ base: b, related: r }) => ({ id: b.id, name: b.name, - value: r?.value, + value: r.value, })), getKey: (item) => item.id, // Valid for 1:1 joins with unique keys }) @@ -2324,9 +2325,9 @@ describe(`createLiveQueryCollection`, () => { .join({ users }, ({ comments: c, users: u }) => eq(c.userId, u.id)) .select(({ comments: c, users: u }) => ({ id: c.id, - userId: u?.id ?? c.userId, + userId: coalesce(u.id, c.userId), text: c.text, - userName: u?.name, + userName: u.name, })), getKey: (item) => item.userId, startSync: true, diff --git a/packages/db/tests/query/optional-fields-negative.test-d.ts b/packages/db/tests/query/optional-fields-negative.test-d.ts index 56436d10c..8b81f1c89 100644 --- a/packages/db/tests/query/optional-fields-negative.test-d.ts +++ b/packages/db/tests/query/optional-fields-negative.test-d.ts @@ -154,7 +154,7 @@ describe(`Optional Fields - Type Safety Tests`, () => { .select(({ user, dept }) => ({ user_name: user.name, - dept_name: dept?.name, // Should be string | undefined due to left join + dept_name: dept.name, // Should be string | undefined due to left join })), }) diff --git a/packages/db/tests/query/optional-fields-runtime.test.ts b/packages/db/tests/query/optional-fields-runtime.test.ts index 158df33bf..3778eb7d8 100644 --- a/packages/db/tests/query/optional-fields-runtime.test.ts +++ b/packages/db/tests/query/optional-fields-runtime.test.ts @@ -167,7 +167,7 @@ describe(`Optional Fields - Runtime Tests`, () => { .select(({ user, dept }) => ({ user_name: user.name, - dept_name: dept?.name, // Should be undefined for Bob + dept_name: dept.name, // Should be undefined for Bob })), }) diff --git a/packages/db/tests/query/order-by.test.ts b/packages/db/tests/query/order-by.test.ts index c35760599..2f958b28f 100644 --- a/packages/db/tests/query/order-by.test.ts +++ b/packages/db/tests/query/order-by.test.ts @@ -777,12 +777,12 @@ function createOrderByTests(autoIndex: `off` | `eager`): void { ({ employees, departments }) => eq(employees.department_id, departments.id), ) - .orderBy(({ departments }) => departments?.name, `asc`) + .orderBy(({ departments }) => departments.name, `asc`) .orderBy(({ employees }) => employees.salary, `desc`) .select(({ employees, departments }) => ({ id: employees.id, employee_name: employees.name, - department_name: departments?.name, + department_name: departments.name, salary: employees.salary, })), ) @@ -1846,12 +1846,12 @@ function createOrderByTests(autoIndex: `off` | `eager`): void { ({ employees, departments }) => eq(employees.department_id, departments.id), ) - .orderBy(({ departments }) => departments?.name, `asc`) + .orderBy(({ departments }) => departments.name, `asc`) .limit(5) .select(({ employees, departments }) => ({ employeeId: employees.id, employeeName: employees.name, - departmentName: departments?.name, + departmentName: departments.name, })), ) diff --git a/packages/db/tests/query/query-once.test.ts b/packages/db/tests/query/query-once.test.ts new file mode 100644 index 000000000..74efc105e --- /dev/null +++ b/packages/db/tests/query/query-once.test.ts @@ -0,0 +1,366 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { CollectionImpl, createCollection } from '../../src/collection/index.js' +import { eq, queryOnce } from '../../src/query/index.js' +import { Query } from '../../src/query/builder/index.js' +import { mockSyncCollectionOptions } from '../utils.js' + +// Sample user type for tests +type User = { + id: number + name: string + active: boolean + age: number +} + +// Sample data for tests +const sampleUsers: Array = [ + { id: 1, name: `Alice`, active: true, age: 30 }, + { id: 2, name: `Bob`, active: true, age: 25 }, + { id: 3, name: `Charlie`, active: false, age: 35 }, + { id: 4, name: `Diana`, active: true, age: 28 }, + { id: 5, name: `Eve`, active: false, age: 22 }, +] + +function createUsersCollection() { + return createCollection( + mockSyncCollectionOptions({ + id: `test-users-query-once`, + getKey: (user) => user.id, + initialData: sampleUsers, + }), + ) +} + +describe(`queryOnce`, () => { + let usersCollection: ReturnType + + beforeEach(() => { + usersCollection = createUsersCollection() + }) + + describe(`basic functionality`, () => { + it(`should execute a basic query and return results as an array`, async () => { + const users = await queryOnce((q) => q.from({ user: usersCollection })) + + expect(Array.isArray(users)).toBe(true) + expect(users.length).toBe(5) + expect(users.map((u) => u.name)).toEqual( + expect.arrayContaining([`Alice`, `Bob`, `Charlie`, `Diana`, `Eve`]), + ) + }) + + it(`should accept a query function directly`, async () => { + const users = await queryOnce((q) => q.from({ user: usersCollection })) + + expect(users.length).toBe(5) + }) + + it(`should accept a config object with query property`, async () => { + const users = await queryOnce({ + query: (q) => q.from({ user: usersCollection }), + }) + + expect(users.length).toBe(5) + }) + + it(`should accept a QueryBuilder instance via config`, async () => { + const queryBuilder = new Query() + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + + const users = await queryOnce({ + query: queryBuilder, + }) + + expect(users.length).toBe(3) + expect(users.every((u) => u.active)).toBe(true) + }) + }) + + describe(`filtering with where clause`, () => { + it(`should filter results with a where clause`, async () => { + const activeUsers = await queryOnce((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)), + ) + + expect(activeUsers.length).toBe(3) + expect(activeUsers.every((u) => u.active)).toBe(true) + }) + + it(`should handle empty results from filtering`, async () => { + const noUsers = await queryOnce((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.age, 100)), + ) + + expect(noUsers.length).toBe(0) + expect(Array.isArray(noUsers)).toBe(true) + }) + }) + + describe(`projection with select clause`, () => { + it(`should project results with a select clause`, async () => { + const userNames = await queryOnce((q) => + q + .from({ user: usersCollection }) + .select(({ user }) => ({ name: user.name })), + ) + + expect(userNames.length).toBe(5) + expect(userNames[0]).toHaveProperty(`name`) + expect(userNames[0]).not.toHaveProperty(`id`) + expect(userNames[0]).not.toHaveProperty(`active`) + }) + + it(`should project multiple fields`, async () => { + const projected = await queryOnce((q) => + q.from({ user: usersCollection }).select(({ user }) => ({ + id: user.id, + name: user.name, + })), + ) + + expect(projected.length).toBe(5) + expect(projected[0]).toHaveProperty(`id`) + expect(projected[0]).toHaveProperty(`name`) + expect(projected[0]).not.toHaveProperty(`active`) + expect(projected[0]).not.toHaveProperty(`age`) + }) + }) + + describe(`ordering and limits`, () => { + it(`should order results with orderBy clause`, async () => { + const orderedUsers = await queryOnce((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.name, `asc`), + ) + + expect(orderedUsers.map((u) => u.name)).toEqual([ + `Alice`, + `Bob`, + `Charlie`, + `Diana`, + `Eve`, + ]) + }) + + it(`should order results in descending order`, async () => { + const orderedUsers = await queryOnce((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.age, `desc`), + ) + + expect(orderedUsers[0]!.age).toBe(35) // Charlie + expect(orderedUsers[orderedUsers.length - 1]!.age).toBe(22) // Eve + }) + + it(`should limit results with limit clause`, async () => { + const limitedUsers = await queryOnce((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.name, `asc`) + .limit(2), + ) + + expect(limitedUsers.length).toBe(2) + expect(limitedUsers.map((u) => u.name)).toEqual([`Alice`, `Bob`]) + }) + + it(`should support offset with limit`, async () => { + const offsetUsers = await queryOnce((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.name, `asc`) + .offset(2) + .limit(2), + ) + + expect(offsetUsers.length).toBe(2) + expect(offsetUsers.map((u) => u.name)).toEqual([`Charlie`, `Diana`]) + }) + }) + + describe(`single result with findOne`, () => { + it(`should return a single result with findOne`, async () => { + const user = await queryOnce((q) => + q + .from({ user: usersCollection }) + .where(({ user: u }) => eq(u.id, 1)) + .findOne(), + ) + + expect(user).toBeDefined() + expect(user?.name).toBe(`Alice`) + }) + + it(`should return undefined when findOne matches no results`, async () => { + const user = await queryOnce((q) => + q + .from({ user: usersCollection }) + .where(({ user: u }) => eq(u.id, 999)) + .findOne(), + ) + + expect(user).toBeUndefined() + }) + }) + + describe(`joins`, () => { + it(`should support join queries`, async () => { + type Post = { + id: number + authorId: number + title: string + } + + const postsCollection = createCollection( + mockSyncCollectionOptions({ + id: `test-posts-query-once`, + getKey: (post) => post.id, + initialData: [ + { id: 1, authorId: 1, title: `Alice Post 1` }, + { id: 2, authorId: 1, title: `Alice Post 2` }, + { id: 3, authorId: 2, title: `Bob Post 1` }, + ], + }), + ) + + const usersWithPosts = await queryOnce((q) => + q + .from({ user: usersCollection }) + .join( + { post: postsCollection }, + ({ user, post }) => eq(user.id, post.authorId), + `inner`, + ) + .select(({ user, post }) => ({ + userName: user.name, + postTitle: post.title, + })), + ) + + expect(usersWithPosts.length).toBe(3) + expect(usersWithPosts.some((r) => r.userName === `Alice`)).toBe(true) + expect(usersWithPosts.some((r) => r.userName === `Bob`)).toBe(true) + }) + }) + + describe(`empty collections`, () => { + it(`should handle empty collections`, async () => { + const emptyCollection = createCollection( + mockSyncCollectionOptions({ + id: `empty-users-query-once`, + getKey: (user) => user.id, + initialData: [], + }), + ) + + const users = await queryOnce((q) => q.from({ user: emptyCollection })) + + expect(users.length).toBe(0) + expect(Array.isArray(users)).toBe(true) + }) + }) + + describe(`cleanup`, () => { + it(`should cleanup the collection after returning results`, async () => { + // Run the query + const users = await queryOnce((q) => q.from({ user: usersCollection })) + + // Verify we got results + expect(users.length).toBe(5) + + // The collection should be cleaned up (no way to directly test this, + // but if cleanup doesn't happen, memory would leak over time) + }) + + it(`should cleanup even if preload rejects`, async () => { + const preloadError = new Error(`preload failed`) + const preloadSpy = vi + .spyOn(CollectionImpl.prototype, `preload`) + .mockRejectedValueOnce(preloadError) + const cleanupSpy = vi.spyOn(CollectionImpl.prototype, `cleanup`) + + try { + await expect( + queryOnce((q) => q.from({ user: usersCollection })), + ).rejects.toThrow(`preload failed`) + + expect(cleanupSpy).toHaveBeenCalled() + } finally { + preloadSpy.mockRestore() + cleanupSpy.mockRestore() + } + }) + }) + + describe(`combined operations`, () => { + it(`should support complex queries with multiple operations`, async () => { + const result = await queryOnce((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + .select(({ user }) => ({ + id: user.id, + name: user.name, + age: user.age, + })) + .orderBy(({ user }) => user.age, `desc`) + .limit(2), + ) + + expect(result.length).toBe(2) + // Active users ordered by age desc, limited to 2 + // Active users: Alice (30), Bob (25), Diana (28) + // Ordered by age desc: Alice (30), Diana (28), Bob (25) + // Limited to 2: Alice, Diana + expect(result[0]!.name).toBe(`Alice`) + expect(result[1]!.name).toBe(`Diana`) + }) + }) + + describe(`type inference`, () => { + it(`should correctly infer types for simple queries`, async () => { + const users = await queryOnce((q) => q.from({ user: usersCollection })) + + // TypeScript should infer the correct type + const firstUser = users[0] + if (firstUser) { + // These should compile without errors + const _id: number = firstUser.id + const _name: string = firstUser.name + const _active: boolean = firstUser.active + const _age: number = firstUser.age + + expect(_id).toBeDefined() + expect(_name).toBeDefined() + expect(_active).toBeDefined() + expect(_age).toBeDefined() + } + }) + + it(`should correctly infer types for projected queries`, async () => { + const users = await queryOnce((q) => + q.from({ user: usersCollection }).select(({ user }) => ({ + userName: user.name, + userAge: user.age, + })), + ) + + const firstUser = users[0] + if (firstUser) { + // These should compile without errors + const _userName: string = firstUser.userName + const _userAge: number = firstUser.userAge + + expect(_userName).toBeDefined() + expect(_userAge).toBeDefined() + } + }) + }) +}) diff --git a/packages/db/tests/query/query-while-syncing.test.ts b/packages/db/tests/query/query-while-syncing.test.ts index 5616e08a8..60184dd22 100644 --- a/packages/db/tests/query/query-while-syncing.test.ts +++ b/packages/db/tests/query/query-while-syncing.test.ts @@ -427,7 +427,7 @@ describe(`Query while syncing`, () => { ) .select(({ user, dept }) => ({ user_name: user.name, - department_name: dept?.name, + department_name: dept.name, })), }) diff --git a/packages/db/tests/query/scheduler.test.ts b/packages/db/tests/query/scheduler.test.ts index adb85c979..40812d676 100644 --- a/packages/db/tests/query/scheduler.test.ts +++ b/packages/db/tests/query/scheduler.test.ts @@ -61,8 +61,8 @@ function setupLiveQueryCollections(id: string) { .join({ task: tasks }, ({ user, task }) => eq(user.id, task.userId)) .select(({ user, task }) => ({ userId: user.id, - taskId: task?.id, - title: task?.title, + taskId: task.id, + title: task.title, })), }) @@ -277,8 +277,8 @@ describe(`live query scheduler`, () => { `full`, ) .select(({ left, right }) => ({ - left: left?.value, - right: right?.value, + left: left.value, + right: right.value, })), }) @@ -364,8 +364,8 @@ describe(`live query scheduler`, () => { `full`, ) .select(({ left, right }) => ({ - left: left?.value, - right: right?.value, + left: left.value, + right: right.value, })), }) @@ -447,8 +447,8 @@ describe(`live query scheduler`, () => { `full`, ) .select(({ left, right }) => ({ - left: left?.value, - right: right?.value, + left: left.value, + right: right.value, })), }) @@ -600,7 +600,7 @@ describe(`live query scheduler`, () => { ) .select(({ account, user }) => ({ account: account, - profile: user?.profile, + profile: user.profile, })), }) @@ -626,7 +626,7 @@ describe(`live query scheduler`, () => { .select(({ accountWithUser, team }) => ({ account: accountWithUser.account, profile: accountWithUser.profile, - team: team?.team, + team: team.team, })), }) @@ -721,7 +721,7 @@ describe(`live query scheduler`, () => { .select(({ a, b }) => ({ id: a.id, aValue: a.value, - bValue: b?.value ?? null, + bValue: b.value, })), }) diff --git a/packages/db/tests/query/subset-dedupe.test.ts b/packages/db/tests/query/subset-dedupe.test.ts index 9067c931f..0234c31a1 100644 --- a/packages/db/tests/query/subset-dedupe.test.ts +++ b/packages/db/tests/query/subset-dedupe.test.ts @@ -4,7 +4,6 @@ import { cloneOptions, } from '../../src/query/subset-dedupe' import { Func, PropRef, Value } from '../../src/query/ir' -import { minusWherePredicates } from '../../src/query/predicate-utils' import type { BasicExpression, OrderBy } from '../../src/query/ir' import type { LoadSubsetOptions } from '../../src/types' @@ -517,9 +516,6 @@ describe(`createDeduplicatedLoadSubset`, () => { eq(ref(`status`), val(`active`)), ) - const test = minusWherePredicates(secondPredicate, firstPredicate) - console.log(`test`, test) - await deduplicated.loadSubset({ where: secondPredicate }) expect(callCount).toBe(2) expect(calls[1]).toEqual({ @@ -596,7 +592,135 @@ describe(`createDeduplicatedLoadSubset`, () => { // i.e. should request NOT (age > 20) await deduplicated.loadSubset({}) expect(callCount).toBe(2) - expect(calls[1]).toEqual({ where: not(gt(ref(`age`), val(20))) }) // Should request all data except what we already loaded + expect(calls[1]).toEqual({ where: not(gt(ref(`age`), val(20))) }) + + // After loading all data, subsequent calls should be deduplicated + const result = await deduplicated.loadSubset({ + where: gt(ref(`age`), val(5)), + }) + expect(result).toBe(true) + expect(callCount).toBe(2) + }) + + it(`should not produce unbounded WHERE expressions when loading all data after eq accumulation`, async () => { + // This test reproduces the production bug where accumulating many eq predicates + // and then loading all data (no WHERE clause) caused unboundedly growing + // expressions instead of correctly setting hasLoadedAllData=true. + let callCount = 0 + const calls: Array = [] + const mockLoadSubset = (options: LoadSubsetOptions) => { + callCount++ + calls.push(cloneOptions(options)) + return Promise.resolve() + } + + const deduplicated = new DeduplicatedLoadSubset({ + loadSubset: mockLoadSubset, + }) + + // Simulate visiting multiple tasks, each adding an eq predicate + for (let i = 0; i < 10; i++) { + await deduplicated.loadSubset({ + where: eq(ref(`task_id`), val(`uuid-${i}`)), + }) + } + // After 10 eq calls, unlimitedWhere should be IN(task_id, [uuid-0, ..., uuid-9]) + expect(callCount).toBe(10) + + // Now load all data (no WHERE clause) + // This should send NOT(IN(...)) to the backend but track as "all data loaded" + await deduplicated.loadSubset({}) + expect(callCount).toBe(11) + + // The load request should be NOT(IN(task_id, [all accumulated uuids])) + const loadWhere = calls[10]!.where as any + expect(loadWhere.name).toBe(`not`) + expect(loadWhere.args[0].name).toBe(`in`) + expect(loadWhere.args[0].args[0].path).toEqual([`task_id`]) + const loadedUuids = ( + loadWhere.args[0].args[1].value as Array + ).sort() + const expectedUuids = Array.from( + { length: 10 }, + (_, i) => `uuid-${i}`, + ).sort() + expect(loadedUuids).toEqual(expectedUuids) + + // Critical: after loading all data, subsequent requests should be deduplicated + const result1 = await deduplicated.loadSubset({ + where: eq(ref(`task_id`), val(`uuid-999`)), + }) + expect(result1).toBe(true) // Covered by "all data" load + expect(callCount).toBe(11) // No additional call + + // Loading all data again should also be deduplicated + const result2 = await deduplicated.loadSubset({}) + expect(result2).toBe(true) + expect(callCount).toBe(11) // Still no additional call + }) + + it(`should not produce unbounded WHERE expressions with synchronous loadSubset`, () => { + // Same scenario as the async accumulation test, but with a sync mock + // to exercise the sync return path (line 150 of subset-dedupe.ts) + let callCount = 0 + const mockLoadSubset = () => { + callCount++ + return true as const + } + + const deduplicated = new DeduplicatedLoadSubset({ + loadSubset: mockLoadSubset, + }) + + // Accumulate eq predicates via sync returns + for (let i = 0; i < 10; i++) { + deduplicated.loadSubset({ + where: eq(ref(`task_id`), val(`uuid-${i}`)), + }) + } + expect(callCount).toBe(10) + + // Load all data (no WHERE clause) — should track as "all data loaded" + deduplicated.loadSubset({}) + expect(callCount).toBe(11) + + // Subsequent requests should be deduplicated + const result1 = deduplicated.loadSubset({ + where: eq(ref(`task_id`), val(`uuid-999`)), + }) + expect(result1).toBe(true) + expect(callCount).toBe(11) + + const result2 = deduplicated.loadSubset({}) + expect(result2).toBe(true) + expect(callCount).toBe(11) + }) + + it(`should handle multiple all-data loads without expression growth`, async () => { + let callCount = 0 + const mockLoadSubset = () => { + callCount++ + return Promise.resolve() + } + + const deduplicated = new DeduplicatedLoadSubset({ + loadSubset: mockLoadSubset, + }) + + // First: load some specific data + await deduplicated.loadSubset({ + where: eq(ref(`task_id`), val(`uuid-1`)), + }) + expect(callCount).toBe(1) + + // Load all data (first time) + await deduplicated.loadSubset({}) + expect(callCount).toBe(2) + + // Load all data (second time) - should be deduplicated since we already have everything + const result = await deduplicated.loadSubset({}) + expect(result).toBe(true) + expect(callCount).toBe(2) // No additional call - all data already loaded }) it(`should handle multiple overlapping unlimited calls`, async () => { diff --git a/packages/db/tests/query/validate-aliases.test.ts b/packages/db/tests/query/validate-aliases.test.ts index b4c29da38..e09635de9 100644 --- a/packages/db/tests/query/validate-aliases.test.ts +++ b/packages/db/tests/query/validate-aliases.test.ts @@ -74,7 +74,7 @@ describe(`Alias validation in subqueries`, () => { ) .select(({ vote, lock }) => ({ voteId: vote._id, - lockName: lock!.lockName, + lockName: lock.lockName, })) }, }) @@ -103,7 +103,7 @@ describe(`Alias validation in subqueries`, () => { ) .select(({ vote, lock }) => ({ voteId: vote._id, - lockName: lock!.lockName, + lockName: lock.lockName, })) }, }) diff --git a/packages/electric-db-collection/CHANGELOG.md b/packages/electric-db-collection/CHANGELOG.md index b85f1e5cb..9975e0d95 100644 --- a/packages/electric-db-collection/CHANGELOG.md +++ b/packages/electric-db-collection/CHANGELOG.md @@ -1,5 +1,27 @@ # @tanstack/electric-db-collection +## 0.2.40 + +### Patch Changes + +- Updated dependencies [[`eeb5321`](https://github.com/TanStack/db/commit/eeb5321c578ffa2fbdfb7b0b3d64f579d1933522), [`495abc2`](https://github.com/TanStack/db/commit/495abc29fe8c088783b43402c7eeed35566d8524), [`a55e2bf`](https://github.com/TanStack/db/commit/a55e2bf54dbe78128adf5ce26d524a13dedf8145), [`41c0ea2`](https://github.com/TanStack/db/commit/41c0ea2d956f9de37d0216af371f58a461be6f1f)]: + - @tanstack/db@0.5.32 + +## 0.2.39 + +### Patch Changes + +- fix(electric-db-collection): Upgrade to latest electric client version ([#1337](https://github.com/TanStack/db/pull/1337)) + +## 0.2.38 + +### Patch Changes + +- fix(electric-db-collection): Upgrade to latest electric client version ([#1321](https://github.com/TanStack/db/pull/1321)) + +- Updated dependencies [[`bf1d078`](https://github.com/TanStack/db/commit/bf1d078627de150bfca02e2ae2ad8b0289c19b37)]: + - @tanstack/db@0.5.31 + ## 0.2.37 ### Patch Changes diff --git a/packages/electric-db-collection/package.json b/packages/electric-db-collection/package.json index 9acb0d6ca..e2387cf03 100644 --- a/packages/electric-db-collection/package.json +++ b/packages/electric-db-collection/package.json @@ -1,6 +1,6 @@ { "name": "@tanstack/electric-db-collection", - "version": "0.2.37", + "version": "0.2.40", "description": "ElectricSQL collection for TanStack DB", "author": "Kyle Mathews", "license": "MIT", @@ -46,7 +46,7 @@ "src" ], "dependencies": { - "@electric-sql/client": "^1.5.10", + "@electric-sql/client": "^1.5.12", "@standard-schema/spec": "^1.1.0", "@tanstack/db": "workspace:*", "@tanstack/store": "^0.8.0", diff --git a/packages/electric-db-collection/tests/electric.test.ts b/packages/electric-db-collection/tests/electric.test.ts index 8ed73a863..deff48991 100644 --- a/packages/electric-db-collection/tests/electric.test.ts +++ b/packages/electric-db-collection/tests/electric.test.ts @@ -2439,8 +2439,8 @@ describe(`Electric Integration`, () => { const testCollection = createCollection(electricCollectionOptions(config)) mockStream.isUpToDate = true - mockForceDisconnectAndRefresh.mockImplementationOnce(async () => { - throw new Error(`PauseLock held`) + mockForceDisconnectAndRefresh.mockImplementationOnce(() => { + return Promise.reject(new Error(`PauseLock held`)) }) await testCollection._sync.loadSubset({ limit: 10 }) diff --git a/packages/offline-transactions/CHANGELOG.md b/packages/offline-transactions/CHANGELOG.md index af315b024..5768ace4a 100644 --- a/packages/offline-transactions/CHANGELOG.md +++ b/packages/offline-transactions/CHANGELOG.md @@ -1,5 +1,21 @@ # @tanstack/offline-transactions +## 1.0.23 + +### Patch Changes + +- Updated dependencies [[`eeb5321`](https://github.com/TanStack/db/commit/eeb5321c578ffa2fbdfb7b0b3d64f579d1933522), [`495abc2`](https://github.com/TanStack/db/commit/495abc29fe8c088783b43402c7eeed35566d8524), [`a55e2bf`](https://github.com/TanStack/db/commit/a55e2bf54dbe78128adf5ce26d524a13dedf8145), [`41c0ea2`](https://github.com/TanStack/db/commit/41c0ea2d956f9de37d0216af371f58a461be6f1f)]: + - @tanstack/db@0.5.32 + +## 1.0.22 + +### Patch Changes + +- Add Intent agent skills (SKILL.md files) to guide AI coding agents. Include skills for core DB concepts, all 5 framework bindings, meta-framework integration, and offline transactions. Also add `export * from '@tanstack/db'` to angular-db for consistency with other framework packages. ([#1330](https://github.com/TanStack/db/pull/1330)) + +- Updated dependencies [[`bf1d078`](https://github.com/TanStack/db/commit/bf1d078627de150bfca02e2ae2ad8b0289c19b37)]: + - @tanstack/db@0.5.31 + ## 1.0.21 ### Patch Changes diff --git a/packages/offline-transactions/package.json b/packages/offline-transactions/package.json index 62b78ac13..b3e021cc6 100644 --- a/packages/offline-transactions/package.json +++ b/packages/offline-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@tanstack/offline-transactions", - "version": "1.0.21", + "version": "1.0.23", "description": "Offline-first transaction capabilities for TanStack DB", "author": "TanStack", "license": "MIT", @@ -55,7 +55,8 @@ "sideEffects": false, "files": [ "dist", - "src" + "src", + "skills" ], "dependencies": { "@tanstack/db": "workspace:*" diff --git a/packages/offline-transactions/skills/offline/SKILL.md b/packages/offline-transactions/skills/offline/SKILL.md new file mode 100644 index 000000000..542c06572 --- /dev/null +++ b/packages/offline-transactions/skills/offline/SKILL.md @@ -0,0 +1,356 @@ +--- +name: offline +description: > + Offline transaction support for TanStack DB. OfflineExecutor orchestrates + persistent outbox (IndexedDB/localStorage), leader election (WebLocks/ + BroadcastChannel), retry with backoff, and connectivity detection. + createOfflineTransaction/createOfflineAction wrap TanStack DB primitives + with offline persistence. Idempotency keys for at-least-once delivery. + Graceful degradation to online-only mode when storage unavailable. + React Native support via separate entry point. +type: composition +library: db +library_version: '0.5.30' +requires: + - db-core + - db-core/mutations-optimistic +sources: + - 'TanStack/db:packages/offline-transactions/src/OfflineExecutor.ts' + - 'TanStack/db:packages/offline-transactions/src/types.ts' + - 'TanStack/db:packages/offline-transactions/src/index.ts' +--- + +This skill builds on db-core and mutations-optimistic. Read those first. + +# TanStack DB — Offline Transactions + +## Setup + +```ts +import { + startOfflineExecutor, + IndexedDBAdapter, +} from '@tanstack/offline-transactions' +import { todoCollection } from './collections' + +const executor = startOfflineExecutor({ + collections: { todos: todoCollection }, + mutationFns: { + createTodo: async ({ transaction, idempotencyKey }) => { + const mutation = transaction.mutations[0] + await api.todos.create({ + ...mutation.modified, + idempotencyKey, + }) + }, + updateTodo: async ({ transaction, idempotencyKey }) => { + const mutation = transaction.mutations[0] + await api.todos.update(mutation.key, { + ...mutation.changes, + idempotencyKey, + }) + }, + }, +}) + +// Wait for initialization (storage probe, leader election, outbox replay) +await executor.waitForInit() +``` + +## Core API + +### createOfflineTransaction + +```ts +const tx = executor.createOfflineTransaction({ + mutationFnName: 'createTodo', +}) + +// Mutations run inside tx.mutate() — uses ambient transaction context +tx.mutate(() => { + todoCollection.insert({ id: crypto.randomUUID(), text: 'New todo' }) +}) +tx.commit() +``` + +If the executor is not the leader tab, falls back to `createTransaction` directly (no offline persistence). + +### createOfflineAction + +```ts +const addTodo = executor.createOfflineAction({ + mutationFnName: 'createTodo', + onMutate: (variables) => { + todoCollection.insert({ + id: crypto.randomUUID(), + text: variables.text, + }) + }, +}) + +// Call it +addTodo({ text: 'Buy milk' }) +``` + +If the executor is not the leader tab, falls back to `createOptimisticAction` directly. + +## Architecture + +### Components + +| Component | Purpose | Default | +| ----------------------- | ------------------------------------------- | --------------------------------- | +| **Storage** | Persist transactions to survive page reload | IndexedDB → localStorage fallback | +| **OutboxManager** | FIFO queue of pending transactions | Automatic | +| **KeyScheduler** | Serialize transactions touching same keys | Automatic | +| **TransactionExecutor** | Execute with retry + backoff | Automatic | +| **LeaderElection** | Only one tab processes the outbox | WebLocks → BroadcastChannel | +| **OnlineDetector** | Pause/resume on connectivity changes | navigator.onLine + events | + +### Transaction lifecycle + +1. Mutation applied optimistically to collection (instant UI update) +2. Transaction serialized and persisted to storage (outbox) +3. Leader tab picks up transaction and executes `mutationFn` +4. On success: removed from outbox, optimistic state resolved +5. On failure: retried with exponential backoff +6. On page reload: outbox replayed, optimistic state restored + +### Leader election + +Only one tab processes the outbox to prevent duplicate execution. Non-leader tabs use regular `createTransaction`/`createOptimisticAction` (online-only, no persistence). + +```ts +const executor = startOfflineExecutor({ + // ... + onLeadershipChange: (isLeader) => { + console.log( + isLeader + ? 'This tab is processing offline transactions' + : 'Another tab is leader', + ) + }, +}) + +executor.isOfflineEnabled // true only if leader AND storage available +``` + +### Storage degradation + +The executor probes storage availability on startup: + +```ts +const executor = startOfflineExecutor({ + // ... + onStorageFailure: (diagnostic) => { + // diagnostic.code: 'STORAGE_BLOCKED' | 'QUOTA_EXCEEDED' | 'UNKNOWN_ERROR' + // diagnostic.mode: 'online-only' + console.warn(diagnostic.message) + }, +}) + +executor.mode // 'offline' | 'online-only' +executor.storageDiagnostic // Full diagnostic info +``` + +When storage is unavailable (private browsing, quota exceeded), the executor operates in online-only mode — mutations work normally but aren't persisted across page reloads. + +## Configuration + +```ts +interface OfflineConfig { + collections: Record // Collections for optimistic state restoration + mutationFns: Record // Named mutation functions + storage?: StorageAdapter // Custom storage (default: auto-detect) + maxConcurrency?: number // Parallel execution limit + jitter?: boolean // Add jitter to retry delays + beforeRetry?: (txs) => txs // Transform/filter before retry + onUnknownMutationFn?: (name, tx) => void // Handle orphaned transactions + onLeadershipChange?: (isLeader) => void // Leadership state callback + onStorageFailure?: (diagnostic) => void // Storage probe failure callback + leaderElection?: LeaderElection // Custom leader election + onlineDetector?: OnlineDetector // Custom connectivity detection +} +``` + +### Custom storage adapter + +```ts +interface StorageAdapter { + get: (key: string) => Promise + set: (key: string, value: string) => Promise + delete: (key: string) => Promise + keys: () => Promise> + clear: () => Promise +} +``` + +## Error Handling + +### NonRetriableError + +```ts +import { NonRetriableError } from '@tanstack/offline-transactions' + +const executor = startOfflineExecutor({ + mutationFns: { + createTodo: async ({ transaction, idempotencyKey }) => { + const res = await fetch('/api/todos', { method: 'POST', body: ... }) + if (res.status === 409) { + throw new NonRetriableError('Duplicate detected') + } + if (!res.ok) throw new Error('Server error') + }, + }, +}) +``` + +Throwing `NonRetriableError` stops retry and removes the transaction from the outbox. Use for permanent failures (validation errors, conflicts, 4xx responses). + +### Idempotency keys + +Every offline transaction includes an `idempotencyKey`. Pass it to your API to prevent duplicate execution on retry: + +```ts +mutationFns: { + createTodo: async ({ transaction, idempotencyKey }) => { + await fetch('/api/todos', { + method: 'POST', + headers: { 'Idempotency-Key': idempotencyKey }, + body: JSON.stringify(transaction.mutations[0].modified), + }) + }, +} +``` + +## React Native + +```ts +import { + startOfflineExecutor, +} from '@tanstack/offline-transactions/react-native' + +// Uses ReactNativeOnlineDetector automatically +// Uses AsyncStorage-compatible storage +const executor = startOfflineExecutor({ ... }) +``` + +## Outbox Management + +```ts +// Inspect pending transactions +const pending = await executor.peekOutbox() + +// Get counts +executor.getPendingCount() // Queued transactions +executor.getRunningCount() // Currently executing + +// Clear all pending transactions +await executor.clearOutbox() + +// Cleanup +executor.dispose() +``` + +## Common Mistakes + +### CRITICAL Not passing idempotencyKey to the API + +Wrong: + +```ts +mutationFns: { + createTodo: async ({ transaction }) => { + await api.todos.create(transaction.mutations[0].modified) + }, +} +``` + +Correct: + +```ts +mutationFns: { + createTodo: async ({ transaction, idempotencyKey }) => { + await api.todos.create({ + ...transaction.mutations[0].modified, + idempotencyKey, + }) + }, +} +``` + +Offline transactions retry on failure. Without idempotency keys, retries can create duplicate records on the server. + +### HIGH Not waiting for initialization + +Wrong: + +```ts +const executor = startOfflineExecutor({ ... }) +const tx = executor.createOfflineTransaction({ mutationFnName: 'createTodo' }) +``` + +Correct: + +```ts +const executor = startOfflineExecutor({ ... }) +await executor.waitForInit() +const tx = executor.createOfflineTransaction({ mutationFnName: 'createTodo' }) +``` + +`startOfflineExecutor` initializes asynchronously (probes storage, requests leadership, replays outbox). Creating transactions before initialization completes may miss the leader election result and use the wrong code path. + +### HIGH Missing collection in collections map + +Wrong: + +```ts +const executor = startOfflineExecutor({ + collections: {}, + mutationFns: { createTodo: ... }, +}) +``` + +Correct: + +```ts +const executor = startOfflineExecutor({ + collections: { todos: todoCollection }, + mutationFns: { createTodo: ... }, +}) +``` + +The `collections` map is used to restore optimistic state from the outbox on page reload. Without it, previously pending mutations won't show their optimistic state while being replayed. + +### MEDIUM Not handling NonRetriableError for permanent failures + +Wrong: + +```ts +mutationFns: { + createTodo: async ({ transaction }) => { + const res = await fetch('/api/todos', { ... }) + if (!res.ok) throw new Error('Failed') + }, +} +``` + +Correct: + +```ts +mutationFns: { + createTodo: async ({ transaction }) => { + const res = await fetch('/api/todos', { ... }) + if (res.status >= 400 && res.status < 500) { + throw new NonRetriableError(`Client error: ${res.status}`) + } + if (!res.ok) throw new Error('Server error') + }, +} +``` + +Without distinguishing retriable from permanent errors, 4xx responses (validation, auth, not found) will retry forever until max retries, wasting resources and filling logs. + +See also: db-core/mutations-optimistic/SKILL.md — for the underlying mutation primitives. + +See also: db-core/collection-setup/SKILL.md — for setting up collections used with offline transactions. diff --git a/packages/offline-transactions/src/outbox/OutboxManager.ts b/packages/offline-transactions/src/outbox/OutboxManager.ts index bb2d8dda4..b2f1faca9 100644 --- a/packages/offline-transactions/src/outbox/OutboxManager.ts +++ b/packages/offline-transactions/src/outbox/OutboxManager.ts @@ -10,7 +10,7 @@ export class OutboxManager { constructor( storage: StorageAdapter, - // eslint-disable-next-line @typescript-eslint/no-explicit-any + collections: Record>, ) { this.storage = storage diff --git a/packages/offline-transactions/tests/leader-failover.test.ts b/packages/offline-transactions/tests/leader-failover.test.ts index dd041f5fa..e8a9a9a8c 100644 --- a/packages/offline-transactions/tests/leader-failover.test.ts +++ b/packages/offline-transactions/tests/leader-failover.test.ts @@ -430,7 +430,7 @@ describe(`leader failover`, () => { private listeners = new Set<(isLeader: boolean) => void>() private leader = false - async requestLeadership(): Promise { + requestLeadership(): Promise { // Simulate: lock is available, will return true immediately // but the actual lock acquisition (and callback) happens async setTimeout(() => { @@ -441,7 +441,7 @@ describe(`leader failover`, () => { } }, 10) - return true // Returns immediately before callback fires + return Promise.resolve(true) // Returns immediately before callback fires } releaseLeadership(): void { @@ -480,13 +480,13 @@ describe(`leader failover`, () => { let replayCount = 0 const env = createTestOfflineEnvironment({ storage: sharedStorage, - mutationFn: async (params) => { + mutationFn: (params) => { replayCount++ const mutations = params.transaction.mutations as Array< PendingMutation > env.applyMutations(mutations) - return { ok: true, mutations } + return Promise.resolve({ ok: true, mutations }) }, config: { leaderElection: new AsyncLeaderElection(), diff --git a/packages/powersync-db-collection/CHANGELOG.md b/packages/powersync-db-collection/CHANGELOG.md index fa5474d8d..01a14d10d 100644 --- a/packages/powersync-db-collection/CHANGELOG.md +++ b/packages/powersync-db-collection/CHANGELOG.md @@ -1,5 +1,19 @@ # @tanstack/powersync-db-collection +## 0.1.36 + +### Patch Changes + +- Updated dependencies [[`eeb5321`](https://github.com/TanStack/db/commit/eeb5321c578ffa2fbdfb7b0b3d64f579d1933522), [`495abc2`](https://github.com/TanStack/db/commit/495abc29fe8c088783b43402c7eeed35566d8524), [`a55e2bf`](https://github.com/TanStack/db/commit/a55e2bf54dbe78128adf5ce26d524a13dedf8145), [`41c0ea2`](https://github.com/TanStack/db/commit/41c0ea2d956f9de37d0216af371f58a461be6f1f)]: + - @tanstack/db@0.5.32 + +## 0.1.35 + +### Patch Changes + +- Updated dependencies [[`bf1d078`](https://github.com/TanStack/db/commit/bf1d078627de150bfca02e2ae2ad8b0289c19b37)]: + - @tanstack/db@0.5.31 + ## 0.1.34 ### Patch Changes diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index b984598e3..fbf4de800 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -1,6 +1,6 @@ { "name": "@tanstack/powersync-db-collection", - "version": "0.1.34", + "version": "0.1.36", "description": "PowerSync collection for TanStack DB", "author": "POWERSYNC", "license": "MIT", diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index a45a4c751..2542b6d54 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -160,6 +160,7 @@ export class PowerSyncTransactor { mutation, context, waitForCompletion, + // eslint-disable-next-line no-shadow async (tableName, mutation, serializeValue) => { const values = serializeValue(mutation.modified) const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`) @@ -173,9 +174,9 @@ export class PowerSyncTransactor { await context.execute( ` - INSERT into ${tableName} - (${keys.join(`, `)}) - VALUES + INSERT into ${tableName} + (${keys.join(`, `)}) + VALUES (${keys.map((_) => `?`).join(`, `)}) `, queryParameters, @@ -195,6 +196,7 @@ export class PowerSyncTransactor { mutation, context, waitForCompletion, + // eslint-disable-next-line no-shadow async (tableName, mutation, serializeValue) => { const values = serializeValue(mutation.modified) const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`) @@ -208,7 +210,7 @@ export class PowerSyncTransactor { await context.execute( ` - UPDATE ${tableName} + UPDATE ${tableName} SET ${keys.map((key) => `${key} = ?`).join(`, `)} WHERE id = ? `, @@ -229,6 +231,7 @@ export class PowerSyncTransactor { mutation, context, waitForCompletion, + // eslint-disable-next-line no-shadow async (tableName, mutation) => { const metadataValue = this.processMutationMetadata(mutation) if (metadataValue != null) { diff --git a/packages/powersync-db-collection/src/serialization.ts b/packages/powersync-db-collection/src/serialization.ts index a3719c5f8..f25815f80 100644 --- a/packages/powersync-db-collection/src/serialization.ts +++ b/packages/powersync-db-collection/src/serialization.ts @@ -51,6 +51,7 @@ export function serializeForSQLite< > = {}, ): ExtractedTable { return Object.fromEntries( + // eslint-disable-next-line no-shadow Object.entries(value).map(([key, value]) => { // First get the output schema type const outputType = diff --git a/packages/query-db-collection/CHANGELOG.md b/packages/query-db-collection/CHANGELOG.md index d64d83501..158acff87 100644 --- a/packages/query-db-collection/CHANGELOG.md +++ b/packages/query-db-collection/CHANGELOG.md @@ -1,5 +1,19 @@ # @tanstack/query-db-collection +## 1.0.29 + +### Patch Changes + +- Updated dependencies [[`eeb5321`](https://github.com/TanStack/db/commit/eeb5321c578ffa2fbdfb7b0b3d64f579d1933522), [`495abc2`](https://github.com/TanStack/db/commit/495abc29fe8c088783b43402c7eeed35566d8524), [`a55e2bf`](https://github.com/TanStack/db/commit/a55e2bf54dbe78128adf5ce26d524a13dedf8145), [`41c0ea2`](https://github.com/TanStack/db/commit/41c0ea2d956f9de37d0216af371f58a461be6f1f)]: + - @tanstack/db@0.5.32 + +## 1.0.28 + +### Patch Changes + +- Updated dependencies [[`bf1d078`](https://github.com/TanStack/db/commit/bf1d078627de150bfca02e2ae2ad8b0289c19b37)]: + - @tanstack/db@0.5.31 + ## 1.0.27 ### Patch Changes diff --git a/packages/query-db-collection/package.json b/packages/query-db-collection/package.json index 73b46ac12..6cab76ef6 100644 --- a/packages/query-db-collection/package.json +++ b/packages/query-db-collection/package.json @@ -1,6 +1,6 @@ { "name": "@tanstack/query-db-collection", - "version": "1.0.27", + "version": "1.0.29", "description": "TanStack Query collection for TanStack DB", "author": "Kyle Mathews", "license": "MIT", diff --git a/packages/query-db-collection/src/query.ts b/packages/query-db-collection/src/query.ts index c5feecbdf..7bc8f532b 100644 --- a/packages/query-db-collection/src/query.ts +++ b/packages/query-db-collection/src/query.ts @@ -803,6 +803,7 @@ export function queryCollectionOptions( type UpdateHandler = Parameters[0] + // eslint-disable-next-line no-shadow const makeQueryResultHandler = (queryKey: QueryKey) => { const hashedQueryKey = hashKey(queryKey) const handleQueryResult: UpdateHandler = (result) => { diff --git a/packages/query-db-collection/tests/query.test-d.ts b/packages/query-db-collection/tests/query.test-d.ts index 5722445b0..68b3696ec 100644 --- a/packages/query-db-collection/tests/query.test-d.ts +++ b/packages/query-db-collection/tests/query.test-d.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/require-await */ import { describe, expectTypeOf, it } from 'vitest' import { and, diff --git a/packages/query-db-collection/tests/query.test.ts b/packages/query-db-collection/tests/query.test.ts index 0a623de61..5c8a8d19a 100644 --- a/packages/query-db-collection/tests/query.test.ts +++ b/packages/query-db-collection/tests/query.test.ts @@ -2301,8 +2301,8 @@ describe(`QueryCollection`, () => { const serverBrands: Array = [{ id: `123`, brandName: `A` }] - const queryFn = vi.fn().mockImplementation(async () => { - return [...serverBrands] + const queryFn = vi.fn().mockImplementation(() => { + return Promise.resolve([...serverBrands]) }) // Track syncedData state immediately after writeUpsert @@ -3281,11 +3281,11 @@ describe(`QueryCollection`, () => { const queryFn = vi.fn().mockResolvedValue(items) - const onDelete = vi.fn(async ({ transaction, collection }) => { + const onDelete = vi.fn(({ transaction, collection }) => { const deletedItem = transaction.mutations[0]?.original // Call writeDelete inside onDelete handler - this should work without throwing collection.utils.writeDelete(deletedItem.id) - return { refetch: false } + return Promise.resolve({ refetch: false }) }) const config: QueryCollectionConfig = { @@ -4197,7 +4197,7 @@ describe(`QueryCollection`, () => { getKey: (item) => item.id, startSync: true, syncMode: `on-demand`, - onInsert: async () => ({ refetch: false }), + onInsert: () => Promise.resolve({ refetch: false }), } const options = queryCollectionOptions(config) diff --git a/packages/react-db/CHANGELOG.md b/packages/react-db/CHANGELOG.md index c727e87ff..13dde677c 100644 --- a/packages/react-db/CHANGELOG.md +++ b/packages/react-db/CHANGELOG.md @@ -1,5 +1,25 @@ # @tanstack/react-db +## 0.1.76 + +### Patch Changes + +- Updated dependencies [[`eeb5321`](https://github.com/TanStack/db/commit/eeb5321c578ffa2fbdfb7b0b3d64f579d1933522), [`495abc2`](https://github.com/TanStack/db/commit/495abc29fe8c088783b43402c7eeed35566d8524), [`a55e2bf`](https://github.com/TanStack/db/commit/a55e2bf54dbe78128adf5ce26d524a13dedf8145), [`41c0ea2`](https://github.com/TanStack/db/commit/41c0ea2d956f9de37d0216af371f58a461be6f1f)]: + - @tanstack/db@0.5.32 + +## 0.1.75 + +### Patch Changes + +- Add Intent agent skills (SKILL.md files) to guide AI coding agents. Include skills for core DB concepts, all 5 framework bindings, meta-framework integration, and offline transactions. Also add `export * from '@tanstack/db'` to angular-db for consistency with other framework packages. ([#1330](https://github.com/TanStack/db/pull/1330)) + +- fix(react-db): make getNextPageParam optional in useLiveInfiniteQuery ([#1268](https://github.com/TanStack/db/pull/1268)) + +- fix(react-db): handle rejected/stale setWindow promises in useLiveInfiniteQuery ([#1269](https://github.com/TanStack/db/pull/1269)) + +- Updated dependencies [[`bf1d078`](https://github.com/TanStack/db/commit/bf1d078627de150bfca02e2ae2ad8b0289c19b37)]: + - @tanstack/db@0.5.31 + ## 0.1.74 ### Patch Changes diff --git a/packages/react-db/package.json b/packages/react-db/package.json index c38f73fe1..f98c54343 100644 --- a/packages/react-db/package.json +++ b/packages/react-db/package.json @@ -1,6 +1,6 @@ { "name": "@tanstack/react-db", - "version": "0.1.74", + "version": "0.1.76", "description": "React integration for @tanstack/db", "author": "Kyle Mathews", "license": "MIT", @@ -42,7 +42,8 @@ "sideEffects": false, "files": [ "dist", - "src" + "src", + "skills" ], "dependencies": { "@tanstack/db": "workspace:*", @@ -52,7 +53,7 @@ "react": ">=16.8.0" }, "devDependencies": { - "@electric-sql/client": "^1.5.10", + "@electric-sql/client": "^1.5.12", "@testing-library/react": "^16.3.2", "@types/react": "^19.2.13", "@types/react-dom": "^19.2.3", diff --git a/packages/react-db/skills/react-db/SKILL.md b/packages/react-db/skills/react-db/SKILL.md new file mode 100644 index 000000000..813b14145 --- /dev/null +++ b/packages/react-db/skills/react-db/SKILL.md @@ -0,0 +1,317 @@ +--- +name: react-db +description: > + React bindings for TanStack DB. useLiveQuery hook with dependency arrays + (8 overloads: query function, config object, pre-created collection, + disabled state via returning undefined/null). useLiveSuspenseQuery for + React Suspense with Error Boundaries (data always defined). + useLiveInfiniteQuery for cursor-based pagination (pageSize, fetchNextPage, + hasNextPage, isFetchingNextPage). usePacedMutations for debounced React + state updates. Return shape: data, state, collection, status, isLoading, + isReady, isError. Import from @tanstack/react-db (re-exports all of + @tanstack/db). +type: framework +library: db +framework: react +library_version: '0.5.30' +requires: + - db-core +sources: + - 'TanStack/db:docs/framework/react/overview.md' + - 'TanStack/db:docs/guides/live-queries.md' + - 'TanStack/db:packages/react-db/src/useLiveQuery.ts' + - 'TanStack/db:packages/react-db/src/useLiveInfiniteQuery.ts' +--- + +This skill builds on db-core. Read it first for collection setup, query builder, and mutation patterns. + +# TanStack DB — React + +## Setup + +```tsx +import { useLiveQuery, eq, not } from '@tanstack/react-db' + +function TodoList() { + const { data: todos, isLoading } = useLiveQuery((q) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => not(todo.completed)) + .orderBy(({ todo }) => todo.created_at, 'asc'), + ) + + if (isLoading) return
    Loading...
    + + return ( +
      + {todos.map((todo) => ( +
    • {todo.text}
    • + ))} +
    + ) +} +``` + +`@tanstack/react-db` re-exports everything from `@tanstack/db`. In React projects, import everything from `@tanstack/react-db`. + +## Hooks + +### useLiveQuery + +```tsx +// Query function with dependency array +const { + data, + state, + collection, + status, + isLoading, + isReady, + isError, + isIdle, + isCleanedUp, +} = useLiveQuery( + (q) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.userId, userId)), + [userId], +) + +// Config object +const { data } = useLiveQuery({ + query: (q) => q.from({ todo: todoCollection }), + gcTime: 60000, +}) + +// Pre-created collection (from route loader) +const { data } = useLiveQuery(preloadedCollection) + +// Conditional query — return undefined/null to disable +const { data, status } = useLiveQuery( + (q) => { + if (!userId) return undefined + return q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.userId, userId)) + }, + [userId], +) +// When disabled: status='disabled', data=undefined +``` + +### useLiveSuspenseQuery + +```tsx +// data is ALWAYS defined — never undefined +// Must wrap in and +function TodoList() { + const { data: todos } = useLiveSuspenseQuery((q) => + q.from({ todo: todoCollection }), + ) + + return ( +
      + {todos.map((t) => ( +
    • {t.text}
    • + ))} +
    + ) +} + +// With deps — re-suspends when deps change +const { data } = useLiveSuspenseQuery( + (q) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.category, category)), + [category], +) +``` + +### useLiveInfiniteQuery + +```tsx +const { data, fetchNextPage, hasNextPage, isFetchingNextPage } = + useLiveInfiniteQuery( + (q) => + q + .from({ posts: postsCollection }) + .orderBy(({ posts }) => posts.createdAt, 'desc'), + { pageSize: 20 }, + [category], + ) + +// data is the flat array of all loaded pages +// fetchNextPage() loads the next page +// hasNextPage is true when more data is available +``` + +### usePacedMutations + +```tsx +import { usePacedMutations, debounceStrategy } from "@tanstack/react-db" + +const mutate = usePacedMutations({ + onMutate: (value: string) => { + noteCollection.update(noteId, (draft) => { + draft.content = value + }) + }, + mutationFn: async ({ transaction }) => { + await api.notes.update(noteId, transaction.mutations[0].changes) + }, + strategy: debounceStrategy({ wait: 500 }), +}) + +// In handler: +