diff --git a/examples/nextjs/todo/README.md b/examples/nextjs/todo/README.md new file mode 100644 index 00000000..3f317b78 --- /dev/null +++ b/examples/nextjs/todo/README.md @@ -0,0 +1,48 @@ +# Todo Example App - Next.js + +A todo application built with Next.js 15 (App Router), TanStack DB, and TypeScript. + +## Features + +- **Next.js 15** with App Router +- **TanStack DB** with both Electric and Query collections +- **Real-time updates** with Electric SQL +- **Optimistic mutations** for instant UI feedback +- **TypeScript** for type safety +- **Tailwind CSS** for styling +- **PostgreSQL** database with Drizzle ORM + +## How to run + +- Go to the root of the repository and run: + + - `pnpm install` + - `pnpm build` + +- Install packages + `pnpm install` + +- Start dev server & Docker containers + `pnpm dev` + +- Run db migrations + `pnpm db:push` + +## Architecture + +This example demonstrates the same functionality as the React version but using Next.js App Router: + +- **App Router**: Uses the latest Next.js app directory structure +- **Client Components**: The main todo interface is a client component for interactivity +- **Server Components**: Layout and other components use server components where possible +- **API Routes**: Express server runs separately for database operations +- **Real-time sync**: Electric SQL provides real-time database synchronization + +## Collection Types + +The app supports two collection types: + +1. **Query Collections**: Traditional API-based data fetching with polling +2. **Electric Collections**: Real-time streaming updates via Electric SQL + +You can switch between collection types using the toggle in the UI to see the difference in behavior. \ No newline at end of file diff --git a/examples/nextjs/todo/docker-compose.yml b/examples/nextjs/todo/docker-compose.yml new file mode 100644 index 00000000..5d6ece8c --- /dev/null +++ b/examples/nextjs/todo/docker-compose.yml @@ -0,0 +1,38 @@ +version: "3.8" +services: + postgres: + image: postgres:17-alpine + environment: + POSTGRES_DB: todo_app + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + ports: + - "54322:5432" + volumes: + - ./postgres.conf:/etc/postgresql/postgresql.conf:ro + tmpfs: + - /var/lib/postgresql/data + - /tmp + command: + - postgres + - -c + - config_file=/etc/postgresql/postgresql.conf + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 5s + retries: 5 + + electric: + image: electricsql/electric:canary + environment: + DATABASE_URL: postgresql://postgres:postgres@postgres:5432/todo_app?sslmode=disable + ELECTRIC_INSECURE: true + ports: + - 3003:3000 + depends_on: + postgres: + condition: service_healthy + +volumes: + postgres_data: diff --git a/examples/nextjs/todo/drizzle.config.ts b/examples/nextjs/todo/drizzle.config.ts new file mode 100644 index 00000000..55063343 --- /dev/null +++ b/examples/nextjs/todo/drizzle.config.ts @@ -0,0 +1,15 @@ +import type { Config } from "drizzle-kit" + +export default { + schema: `./src/db/schema.ts`, + out: `./drizzle`, + dialect: `postgresql`, + casing: `snake_case`, + dbCredentials: { + host: process.env.DB_HOST || `localhost`, + port: parseInt(process.env.DB_PORT || `54322`), + user: process.env.DB_USER || `postgres`, + password: process.env.DB_PASSWORD || `postgres`, + database: process.env.DB_NAME || `todo_app`, + }, +} satisfies Config diff --git a/examples/nextjs/todo/drizzle/0000_whole_sprite.sql b/examples/nextjs/todo/drizzle/0000_whole_sprite.sql new file mode 100644 index 00000000..8223fef7 --- /dev/null +++ b/examples/nextjs/todo/drizzle/0000_whole_sprite.sql @@ -0,0 +1,7 @@ +CREATE TABLE "todos" ( + "id" serial PRIMARY KEY NOT NULL, + "text" text NOT NULL, + "completed" boolean DEFAULT false NOT NULL, + "created_at" timestamp with time zone DEFAULT now() NOT NULL, + "updated_at" timestamp with time zone DEFAULT now() NOT NULL +); diff --git a/examples/nextjs/todo/drizzle/0001_sturdy_titania.sql b/examples/nextjs/todo/drizzle/0001_sturdy_titania.sql new file mode 100644 index 00000000..fefdf679 --- /dev/null +++ b/examples/nextjs/todo/drizzle/0001_sturdy_titania.sql @@ -0,0 +1,8 @@ +CREATE TABLE "config" ( + "id" serial PRIMARY KEY NOT NULL, + "key" text NOT NULL, + "value" text NOT NULL, + "created_at" timestamp with time zone DEFAULT now() NOT NULL, + "updated_at" timestamp with time zone DEFAULT now() NOT NULL, + CONSTRAINT "config_key_unique" UNIQUE("key") +); diff --git a/examples/nextjs/todo/drizzle/0002_update_timestamps_trigger.sql b/examples/nextjs/todo/drizzle/0002_update_timestamps_trigger.sql new file mode 100644 index 00000000..3e39c477 --- /dev/null +++ b/examples/nextjs/todo/drizzle/0002_update_timestamps_trigger.sql @@ -0,0 +1,29 @@ +-- Custom SQL migration file, put your code below! -- + +-- Create a function to update the updated_at timestamp +CREATE OR REPLACE FUNCTION update_updated_at_column() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = NOW(); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- Create trigger for todos table +DROP TRIGGER IF EXISTS update_todos_updated_at ON "todos"; +CREATE TRIGGER update_todos_updated_at +BEFORE UPDATE ON "todos" +FOR EACH ROW +EXECUTE FUNCTION update_updated_at_column(); + +-- Create trigger for config table +DROP TRIGGER IF EXISTS update_config_updated_at ON "config"; +CREATE TRIGGER update_config_updated_at +BEFORE UPDATE ON "config" +FOR EACH ROW +EXECUTE FUNCTION update_updated_at_column(); + +-- Insert default config for background color +INSERT INTO "config" ("key", "value") +VALUES ('backgroundColor', '#f5f5f5') +ON CONFLICT ("key") DO NOTHING; \ No newline at end of file diff --git a/examples/nextjs/todo/drizzle/meta/0000_snapshot.json b/examples/nextjs/todo/drizzle/meta/0000_snapshot.json new file mode 100644 index 00000000..407155f7 --- /dev/null +++ b/examples/nextjs/todo/drizzle/meta/0000_snapshot.json @@ -0,0 +1,65 @@ +{ + "id": "1649b703-2b2d-413c-8185-15390a9d97e7", + "prevId": "00000000-0000-0000-0000-000000000000", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.todos": { + "name": "todos", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true + }, + "text": { + "name": "text", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "completed": { + "name": "completed", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} diff --git a/examples/nextjs/todo/drizzle/meta/0001_snapshot.json b/examples/nextjs/todo/drizzle/meta/0001_snapshot.json new file mode 100644 index 00000000..5774acf0 --- /dev/null +++ b/examples/nextjs/todo/drizzle/meta/0001_snapshot.json @@ -0,0 +1,116 @@ +{ + "id": "1b3a815a-1865-451b-b646-983e995ee97e", + "prevId": "1649b703-2b2d-413c-8185-15390a9d97e7", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.config": { + "name": "config", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true + }, + "key": { + "name": "key", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "config_key_unique": { + "name": "config_key_unique", + "nullsNotDistinct": false, + "columns": ["key"] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.todos": { + "name": "todos", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true + }, + "text": { + "name": "text", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "completed": { + "name": "completed", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} diff --git a/examples/nextjs/todo/drizzle/meta/0002_snapshot.json b/examples/nextjs/todo/drizzle/meta/0002_snapshot.json new file mode 100644 index 00000000..dde3eae1 --- /dev/null +++ b/examples/nextjs/todo/drizzle/meta/0002_snapshot.json @@ -0,0 +1,116 @@ +{ + "id": "017c3a1c-9d70-4fed-ad07-9a2e1ab49bb4", + "prevId": "1b3a815a-1865-451b-b646-983e995ee97e", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.config": { + "name": "config", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true + }, + "key": { + "name": "key", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "config_key_unique": { + "name": "config_key_unique", + "columns": ["key"], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.todos": { + "name": "todos", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true + }, + "text": { + "name": "text", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "completed": { + "name": "completed", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "views": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} diff --git a/examples/nextjs/todo/drizzle/meta/_journal.json b/examples/nextjs/todo/drizzle/meta/_journal.json new file mode 100644 index 00000000..bf477dc8 --- /dev/null +++ b/examples/nextjs/todo/drizzle/meta/_journal.json @@ -0,0 +1,27 @@ +{ + "version": "7", + "dialect": "postgresql", + "entries": [ + { + "idx": 0, + "version": "7", + "when": 1740502196428, + "tag": "0000_whole_sprite", + "breakpoints": true + }, + { + "idx": 1, + "version": "7", + "when": 1740601351239, + "tag": "0001_sturdy_titania", + "breakpoints": true + }, + { + "idx": 2, + "version": "7", + "when": 1740606615508, + "tag": "0002_update_timestamps_trigger", + "breakpoints": true + } + ] +} diff --git a/examples/nextjs/todo/next-env.d.ts b/examples/nextjs/todo/next-env.d.ts new file mode 100644 index 00000000..1b3be084 --- /dev/null +++ b/examples/nextjs/todo/next-env.d.ts @@ -0,0 +1,5 @@ +/// +/// + +// NOTE: This file should not be edited +// see https://nextjs.org/docs/app/api-reference/config/typescript for more information. diff --git a/examples/nextjs/todo/next.config.ts b/examples/nextjs/todo/next.config.ts new file mode 100644 index 00000000..e587634c --- /dev/null +++ b/examples/nextjs/todo/next.config.ts @@ -0,0 +1,10 @@ +import type { NextConfig } from "next" + +const nextConfig: NextConfig = { + reactStrictMode: true, + experimental: { + // Enable latest Next.js features if needed + }, +} + +export default nextConfig diff --git a/examples/nextjs/todo/package.json b/examples/nextjs/todo/package.json new file mode 100644 index 00000000..1b12dd7d --- /dev/null +++ b/examples/nextjs/todo/package.json @@ -0,0 +1,55 @@ +{ + "name": "@tanstack/db-example-nextjs-todo", + "private": true, + "version": "0.0.1", + "scripts": { + "api:dev": "tsx watch src/api/server.ts", + "build": "next build", + "db:ensure-config": "tsx scripts/ensure-default-config.ts", + "db:generate": "drizzle-kit generate", + "db:push": "tsx scripts/migrate.ts", + "db:studio": "drizzle-kit studio", + "dev": "docker compose up -d && concurrently \"pnpm api:dev\" \"next dev\"", + "lint": "next lint", + "start": "next start", + "preview": "next start" + }, + "dependencies": { + "@electric-sql/client": "1.0.0", + "@tanstack/db-collections": "^0.0.6", + "@tanstack/query-core": "^5.75.7", + "@tanstack/react-db": "^0.0.4", + "cors": "^2.8.5", + "drizzle-orm": "^0.40.1", + "drizzle-zod": "^0.7.0", + "express": "^4.19.2", + "next": "^15.3.2", + "postgres": "^3.4.5", + "react": "^19.0.0", + "react-dom": "^19.0.0", + "tailwindcss": "^4.0.17", + "zod": "^3.24.2" + }, + "devDependencies": { + "@eslint/js": "^9.22.0", + "@tailwindcss/postcss": "^4.0.0-alpha.8", + "@types/cors": "^2.8.17", + "@types/express": "^4.17.21", + "@types/node": "^22.13.10", + "@types/pg": "^8.11.11", + "@types/react": "^19.0.12", + "@types/react-dom": "^19.0.0", + "@typescript-eslint/eslint-plugin": "^8.26.1", + "@typescript-eslint/parser": "^8.26.1", + "concurrently": "^9.1.2", + "dotenv": "^16.3.1", + "drizzle-kit": "^0.30.5", + "eslint": "^9.22.0", + "eslint-config-next": "^15.3.2", + "pg": "^8.14.1", + "postcss": "^8.5.0", + "tsx": "^4.6.2", + "typescript": "^5.8.2" + }, + "type": "module" +} \ No newline at end of file diff --git a/examples/nextjs/todo/postcss.config.mjs b/examples/nextjs/todo/postcss.config.mjs new file mode 100644 index 00000000..df19a05d --- /dev/null +++ b/examples/nextjs/todo/postcss.config.mjs @@ -0,0 +1,5 @@ +export default { + plugins: { + '@tailwindcss/postcss': {}, + }, +} \ No newline at end of file diff --git a/examples/nextjs/todo/postgres.conf b/examples/nextjs/todo/postgres.conf new file mode 100644 index 00000000..cafbb9b3 --- /dev/null +++ b/examples/nextjs/todo/postgres.conf @@ -0,0 +1,17 @@ +listen_addresses = '*' +max_connections = 100 +shared_buffers = 128MB +dynamic_shared_memory_type = posix +max_wal_size = 1GB +min_wal_size = 80MB +log_timezone = 'UTC' +datestyle = 'iso, mdy' +timezone = 'UTC' +lc_messages = 'en_US.utf8' +lc_monetary = 'en_US.utf8' +lc_numeric = 'en_US.utf8' +lc_time = 'en_US.utf8' +default_text_search_config = 'pg_catalog.english' +wal_level = logical +max_replication_slots = 10 +max_wal_senders = 10 diff --git a/examples/nextjs/todo/scripts/migrate.ts b/examples/nextjs/todo/scripts/migrate.ts new file mode 100644 index 00000000..ee0089f3 --- /dev/null +++ b/examples/nextjs/todo/scripts/migrate.ts @@ -0,0 +1,29 @@ +import { drizzle } from "drizzle-orm/node-postgres" +import { migrate } from "drizzle-orm/node-postgres/migrator" +import pkg from "pg" +import * as dotenv from "dotenv" + +dotenv.config() + +const { Pool } = pkg +const pool = new Pool({ + host: process.env.DB_HOST || `localhost`, + port: parseInt(process.env.DB_PORT || `54322`), + user: process.env.DB_USER || `postgres`, + password: process.env.DB_PASSWORD || `postgres`, + database: process.env.DB_NAME || `todo_app`, +}) + +const db = drizzle(pool) + +async function main() { + console.log(`Running migrations...`) + await migrate(db, { migrationsFolder: `./drizzle` }) + console.log(`Migrations completed!`) + await pool.end() +} + +main().catch((err) => { + console.error(`Migration failed!`, err) + process.exit(1) +}) diff --git a/examples/nextjs/todo/src/api/server.ts b/examples/nextjs/todo/src/api/server.ts new file mode 100644 index 00000000..9f3bf164 --- /dev/null +++ b/examples/nextjs/todo/src/api/server.ts @@ -0,0 +1,302 @@ +/* eslint-disable @typescript-eslint/no-unnecessary-condition */ +import express from "express" +import cors from "cors" +import { sql } from "../db/postgres" +import { + validateInsertConfig, + validateInsertTodo, + validateUpdateConfig, + validateUpdateTodo, +} from "../db/validation" + +// Create Express app +const app = express() +const PORT = process.env.PORT || 3001 + +// Middleware +app.use(cors()) +app.use(express.json()) + +// Health check endpoint +app.get(`/api/health`, (req, res) => { + res.status(200).json({ status: `ok` }) +}) + +// Generate a transaction ID +async function generateTxId(tx: any): Promise { + const [{ txid }] = await tx`SELECT txid_current() as txid` + return Number(txid) +} + +// ===== TODOS API ===== + +// GET all todos +app.get(`/api/todos`, async (req, res) => { + try { + const todos = await sql`SELECT * FROM todos` + res.status(200).json(todos) + } catch (error) { + console.error(`Error fetching todos:`, error) + res.status(500).json({ + error: `Failed to fetch todos`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// GET a single todo by ID +app.get(`/api/todos/:id`, async (req, res) => { + try { + const { id } = req.params + const [todo] = await sql`SELECT * FROM todos WHERE id = ${id}` + + if (!todo) { + return res.status(404).json({ error: `Todo not found` }) + } + + res.status(200).json(todo) + } catch (error) { + console.error(`Error fetching todo:`, error) + res.status(500).json({ + error: `Failed to fetch todo`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// POST create a new todo +app.post(`/api/todos`, async (req, res) => { + try { + const todoData = validateInsertTodo(req.body) + + const { result: newTodo, txid } = await sql.begin(async (tx) => { + const generatedTxid = await generateTxId(tx) + + const [result] = await tx` + INSERT INTO todos ${tx(todoData)} + RETURNING * + ` + return { result, txid: generatedTxid } + }) + + res.status(201).json({ todo: newTodo, txid }) + } catch (error) { + console.error(`Error creating todo:`, error) + res.status(500).json({ + error: `Failed to create todo`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// PUT update a todo +app.put(`/api/todos/:id`, async (req, res) => { + try { + const { id } = req.params + const todoData = validateUpdateTodo(req.body) + + const { result: updatedTodo, txid } = await sql.begin(async (tx) => { + const generatedTxid = await generateTxId(tx) + + const [result] = await tx` + UPDATE todos + SET ${tx(todoData)} + WHERE id = ${id} + RETURNING * + ` + + if (!result) { + throw new Error(`Todo not found`) + } + + return { result, txid: generatedTxid } + }) + + res.status(200).json({ todo: updatedTodo, txid }) + } catch (error) { + if (error instanceof Error && error.message === `Todo not found`) { + return res.status(404).json({ error: `Todo not found` }) + } + + console.error(`Error updating todo:`, error) + res.status(500).json({ + error: `Failed to update todo`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// DELETE a todo +app.delete(`/api/todos/:id`, async (req, res) => { + try { + const { id } = req.params + + const txid = await sql.begin(async (tx) => { + const generatedTxid = await generateTxId(tx) + + const [result] = await tx` + DELETE FROM todos + WHERE id = ${id} + RETURNING id + ` + + if (!result) { + throw new Error(`Todo not found`) + } + + return generatedTxid + }) + + res.status(200).json({ success: true, txid }) + } catch (error) { + if (error instanceof Error && error.message === `Todo not found`) { + return res.status(404).json({ error: `Todo not found` }) + } + + console.error(`Error deleting todo:`, error) + res.status(500).json({ + error: `Failed to delete todo`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// ===== CONFIG API ===== + +// GET all config entries +app.get(`/api/config`, async (req, res) => { + try { + const config = await sql`SELECT * FROM config` + res.status(200).json(config) + } catch (error) { + console.error(`Error fetching config:`, error) + res.status(500).json({ + error: `Failed to fetch config`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// GET a single config by ID +app.get(`/api/config/:id`, async (req, res) => { + try { + const { id } = req.params + const [config] = await sql`SELECT * FROM config WHERE id = ${id}` + + if (!config) { + return res.status(404).json({ error: `Config not found` }) + } + + res.status(200).json(config) + } catch (error) { + console.error(`Error fetching config:`, error) + res.status(500).json({ + error: `Failed to fetch config`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// POST create a new config +app.post(`/api/config`, async (req, res) => { + try { + const configData = validateInsertConfig(req.body) + + const { result: newConfig, txid } = await sql.begin(async (tx) => { + const generatedTxid = await generateTxId(tx) + + const [result] = await tx` + INSERT INTO config ${tx(configData)} + RETURNING * + ` + return { result, txid: generatedTxid } + }) + + res.status(201).json({ config: newConfig, txid }) + } catch (error) { + console.error(`Error creating config:`, error) + res.status(500).json({ + error: `Failed to create config`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// PUT update a config +app.put(`/api/config/:id`, async (req, res) => { + try { + const { id } = req.params + const configData = validateUpdateConfig(req.body) + + const { result: updatedConfig, txid } = await sql.begin(async (tx) => { + const generatedTxid = await generateTxId(tx) + + const [result] = await tx` + UPDATE config + SET ${tx(configData)} + WHERE id = ${id} + RETURNING * + ` + + if (!result) { + throw new Error(`Config not found`) + } + + return { result, txid: generatedTxid } + }) + + res.status(200).json({ config: updatedConfig, txid }) + } catch (error) { + if (error instanceof Error && error.message === `Config not found`) { + return res.status(404).json({ error: `Config not found` }) + } + + console.error(`Error updating config:`, error) + res.status(500).json({ + error: `Failed to update config`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// DELETE a config +app.delete(`/api/config/:id`, async (req, res) => { + try { + const { id } = req.params + + const txid = await sql.begin(async (tx) => { + const generatedTxid = await generateTxId(tx) + + const [result] = await tx` + DELETE FROM config + WHERE id = ${id} + RETURNING id + ` + + if (!result) { + throw new Error(`Config not found`) + } + + return generatedTxid + }) + + res.status(200).json({ success: true, txid }) + } catch (error) { + if (error instanceof Error && error.message === `Config not found`) { + return res.status(404).json({ error: `Config not found` }) + } + + console.error(`Error deleting config:`, error) + res.status(500).json({ + error: `Failed to delete config`, + details: error instanceof Error ? error.message : String(error), + }) + } +}) + +// Start server +app.listen(PORT, () => { + console.log(`Server running on port ${PORT}`) +}) + +export default app diff --git a/examples/nextjs/todo/src/api/write-to-pg.ts b/examples/nextjs/todo/src/api/write-to-pg.ts new file mode 100644 index 00000000..1a0eb6ce --- /dev/null +++ b/examples/nextjs/todo/src/api/write-to-pg.ts @@ -0,0 +1,106 @@ +import type postgres from "postgres" +import type { PendingMutation } from "@tanstack/react-db" + +/** + * Get the table name from the relation metadata + */ +function getTableName(relation?: Array): string { + if (!relation || relation.length < 2) { + throw new Error(`could not find the table name`) + } + + // The table name is typically the second element in the relation array + // e.g. ['public', 'todos'] -> 'todos' + return relation[1] +} + +/** + * Process an array of PendingMutations and write to the database + */ +export async function processMutations( + sql: postgres.Sql>, + pendingMutations: Array +): Promise { + return await sql.begin(async (tx) => { + // Get the transaction ID + const [{ txid }] = await tx`SELECT txid_current() as txid` + + // Process each mutation in order + for (const mutation of pendingMutations) { + // Get the table name from the relation metadata + const tableName = getTableName( + mutation.syncMetadata.relation as Array | undefined + ) + + // Get the primary key columns from metadata + const primaryKey = (mutation.syncMetadata.primaryKey as + | Array + | undefined) || [`id`] + + // Process based on operation type + switch (mutation.type) { + case `insert`: { + const columns = Object.keys(mutation.modified) + const values = Object.values(mutation.modified) + const placeholders = values.map((_, i) => `$${i + 1}`).join(`, `) + + await tx.unsafe( + `INSERT INTO ${tableName} (${columns.join(`, `)}) VALUES (${placeholders})`, + values + ) + break + } + + case `update`: { + // Build SET clause + const setColumns = Object.keys(mutation.changes) + const setValues = Object.values(mutation.changes) + const setClause = setColumns + .map((col, i) => `${col} = $${i + 1}`) + .join(`, `) + + // Build WHERE clause for primary key columns starting after SET values + const whereClause = primaryKey + .map((column, i) => `${column} = $${i + setValues.length + 1}`) + .join(` AND `) + + // Combine all values + const allValues = [ + ...setValues, + ...primaryKey.map((k) => mutation.original[k]), + ] + + await tx.unsafe( + `UPDATE ${tableName} + SET ${setClause} + WHERE ${whereClause}`, + allValues + ) + break + } + + case `delete`: { + // Build WHERE clause for primary key columns + const whereClause = primaryKey + .map((column, i) => `${column} = $${i + 1}`) + .join(` AND `) + + // Extract primary key values in same order as columns + const primaryKeyValues = primaryKey.map((k) => mutation.original[k]) + + await tx.unsafe( + `DELETE FROM ${tableName} + WHERE ${whereClause}`, + primaryKeyValues + ) + break + } + + default: + throw new Error(`Unknown operation type: ${mutation.type}`) + } + } + + return Number(txid) + }) +} diff --git a/examples/nextjs/todo/src/app/TodoClient.tsx b/examples/nextjs/todo/src/app/TodoClient.tsx new file mode 100644 index 00000000..6730491c --- /dev/null +++ b/examples/nextjs/todo/src/app/TodoClient.tsx @@ -0,0 +1,214 @@ +"use client" + +import React, { useEffect, useState } from "react" +import { + useConfigCollection, + useTodosCollection, +} from "../lib/useCollectionQuery" +import { useTodos } from "../hooks/useTodos" +import { useConfig } from "../hooks/useConfig" +import { getComplementaryColor } from "../lib/utils" +import { useCollectionClient } from "../lib/CollectionClientProvider" +import type { UpdateTodo } from "../db/validation" +import type { FormEvent } from "react" + +export default function TodoClient() { + const collectionClient = useCollectionClient() + const [newTodo, setNewTodo] = useState(``) + const [mounted, setMounted] = useState(false) + + useEffect(() => { + setMounted(true) + }, []) + + const { data: todos, collection: todoCollection } = useTodosCollection() + const { data: configData, collection: configCollection } = + useConfigCollection() + + const { addTodo, updateTodo, deleteTodo } = useTodos(todoCollection) + const { getConfigValue, setConfigValue } = useConfig( + configData, + configCollection + ) + + const backgroundColor = getConfigValue(`backgroundColor`) || `#f5f5f5` + const titleColor = getComplementaryColor(backgroundColor) + + const handleColorChange = (e: React.ChangeEvent) => { + const newColor = e.target.value + setConfigValue(`backgroundColor`, newColor) + } + + const handleSubmit = (e: FormEvent) => { + e.preventDefault() + if (!newTodo.trim()) return + const newTodoItem = { + text: newTodo, + completed: false, + id: Math.round(Math.random() * 1000000), + } + addTodo.mutate(() => todoCollection.insert(newTodoItem)) + setNewTodo(``) + } + + const toggleTodo = (todo: UpdateTodo) => { + updateTodo.mutate(() => + todoCollection.update( + Array.from(todoCollection.state.values()).find( + (t: any) => t.id === todo.id + )!, + (draft: any) => { + draft.completed = !draft.completed + } + ) + ) + } + + const toggleAllTodos = () => { + const allCompleted = completedTodos.length === todos.length + const todosToToggle = allCompleted ? completedTodos : activeTodos + const togglingIds = new Set() + todosToToggle.forEach((t) => togglingIds.add(t.id)) + updateTodo.mutate(() => + todoCollection.update( + Array.from(todoCollection.state.values()).filter((t: any) => + togglingIds.has(t.id) + ), + (drafts: any) => { + drafts.forEach((draft: any) => (draft.completed = !allCompleted)) + } + ) + ) + } + + const clearCompleted = () => { + deleteTodo.mutate(() => + todoCollection.delete( + Array.from(todoCollection.state.values()).filter((t: any) => + completedTodos.some((ct) => ct.id === t.id) + ) + ) + ) + } + + const activeTodos = todos.filter((todo) => !todo.completed) + const completedTodos = todos.filter((todo) => todo.completed) + + return ( +
+
+

+ todos +

+ +
+
+ + +
+
+ +
+
+ {todos.length > 0 && ( + + )} + setNewTodo(e.target.value)} + placeholder="What needs to be done?" + className="w-full py-4 pl-[60px] pr-4 text-2xl font-light border-none shadow-[inset_0_-2px_1px_rgba(0,0,0,0.03)] box-border" + style={{ + background: `rgba(0, 0, 0, 0.003)`, + }} + /> +
+ + {todos.length > 0 && ( + <> +
    + {todos.map((todo) => ( +
  • +
    + toggleTodo(todo)} + className="absolute left-[12px] top-0 bottom-0 my-auto h-[40px] w-[40px] cursor-pointer" + /> + + +
    +
  • + ))} +
+ +
+ + {activeTodos.length} + {` `} + {activeTodos.length === 1 ? `item` : `items`} left + + {completedTodos.length > 0 && ( + + )} +
+ + )} +
+
+
+ ) +} diff --git a/examples/nextjs/todo/src/app/api/electric/route.ts b/examples/nextjs/todo/src/app/api/electric/route.ts new file mode 100644 index 00000000..7eb38a43 --- /dev/null +++ b/examples/nextjs/todo/src/app/api/electric/route.ts @@ -0,0 +1,50 @@ +export async function GET(request: Request) { + const url = new URL(request.url) + + const originUrl = new URL(`http://localhost:3003/v1/shape`) + + url.searchParams.forEach((value, key) => { + if ([`live`, `table`, `handle`, `offset`, `cursor`].includes(key)) { + originUrl.searchParams.set(key, value) + } + }) + + try { + const response = await fetch(originUrl) + + if (!response.ok) { + const responseText = await response.text() + return new Response(responseText, { + status: response.status, + statusText: response.statusText, + headers: response.headers, + }) + } + + const isLiveRequest = url.searchParams.get(`live`) === `true` + + if (isLiveRequest) { + const headers = new Headers(response.headers) + headers.delete(`content-encoding`) + headers.delete(`content-length`) + + return new Response(response.body, { + status: response.status, + statusText: response.statusText, + headers, + }) + } else { + const headers = new Headers(response.headers) + headers.delete(`content-encoding`) + headers.delete(`content-length`) + + return new Response(response.body, { + status: response.status, + statusText: response.statusText, + headers, + }) + } + } catch (error) { + return new Response(`Internal Server Error`, { status: 500 }) + } +} diff --git a/examples/nextjs/todo/src/app/globals.css b/examples/nextjs/todo/src/app/globals.css new file mode 100644 index 00000000..24944ae4 --- /dev/null +++ b/examples/nextjs/todo/src/app/globals.css @@ -0,0 +1,94 @@ +@import "tailwindcss"; + +html, +body { + margin: 0; + padding: 0; +} + +button { + margin: 0; + padding: 0; + border: 0; + background: none; + font-size: 100%; + vertical-align: baseline; + font-family: inherit; + font-weight: inherit; + color: inherit; + appearance: none; +} + +body { + font: + 14px "Helvetica Neue", + Helvetica, + Arial, + sans-serif; + line-height: 1.4em; + background: #f5f5f5; + color: #4d4d4d; + margin: 0 auto; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + font-weight: 300; +} + +:root { + font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif; + line-height: 1.5; + font-weight: 400; + + color-scheme: light dark; + color: rgba(255, 255, 255, 0.87); + background-color: #242424; + + font-synthesis: none; + text-rendering: optimizeLegibility; +} + +a { + font-weight: 500; + color: #646cff; + text-decoration: inherit; +} +a:hover { + color: #535bf2; +} + +h1 { + font-size: 3.2em; + line-height: 1.1; +} + +button { + border-radius: 8px; + border: 1px solid transparent; + padding: 0.6em 1.2em; + font-size: 1em; + font-weight: 500; + font-family: inherit; + background-color: #1a1a1a; + cursor: pointer; + transition: border-color 0.25s; +} +button:hover { + border-color: #646cff; +} +button:focus, +button:focus-visible { + outline: 4px auto -webkit-focus-ring-color; +} + +@media (prefers-color-scheme: light) { + :root { + color: #213547; + background-color: #ffffff; + } + a:hover { + color: #747bff; + } + button { + background-color: #f9f9f9; + } +} \ No newline at end of file diff --git a/examples/nextjs/todo/src/app/layout.tsx b/examples/nextjs/todo/src/app/layout.tsx new file mode 100644 index 00000000..cc6921b1 --- /dev/null +++ b/examples/nextjs/todo/src/app/layout.tsx @@ -0,0 +1,22 @@ +import Providers from "./providers" +import type { Metadata } from "next" +import "./globals.css" + +export const metadata: Metadata = { + title: `Todo App - TanStack DB Example`, + description: `A todo application built with Next.js and TanStack DB`, +} + +export default function RootLayout({ + children, +}: { + children: React.ReactNode +}) { + return ( + + + {children} + + + ) +} diff --git a/examples/nextjs/todo/src/app/page.tsx b/examples/nextjs/todo/src/app/page.tsx new file mode 100644 index 00000000..010c648d --- /dev/null +++ b/examples/nextjs/todo/src/app/page.tsx @@ -0,0 +1,21 @@ +import React from "react" +import { dehydrateCollections } from "@tanstack/db-collections" +import { CollectionHydrationBoundary } from "../lib/CollectionHydrationBoundary" +import TodoClient from "./TodoClient" +import { makeCollections } from "@/lib/collections" + +export default async function TodoPage() { + const collections = makeCollections() + const promises = Object.values(collections).map(async (collection) => { + await collection.stateWhenReady() + }) + await Promise.all(promises) + + const dehydratedState = dehydrateCollections(collections) + + return ( + + + + ) +} diff --git a/examples/nextjs/todo/src/app/providers.tsx b/examples/nextjs/todo/src/app/providers.tsx new file mode 100644 index 00000000..11b7d0a7 --- /dev/null +++ b/examples/nextjs/todo/src/app/providers.tsx @@ -0,0 +1,38 @@ +"use client" + +import type { CollectionClient } from "@/lib/CollectionHydrationBoundary" +import { CollectionClientProvider } from "@/lib/CollectionClientProvider" +import { makeCollectionClient } from "@/lib/collections" + +const isServer = typeof window === `undefined` + +let browserCollectionClient: CollectionClient | undefined = undefined + +function getCollectionClient() { + if (isServer) { + // Server: always make a new query client + return makeCollectionClient() + } else { + // Browser: make a new query client if we don't already have one + // This is very important, so we don't re-make a new client if React + // suspends during the initial render. This may not be needed if we + // have a suspense boundary BELOW the creation of the query client + if (!browserCollectionClient) + browserCollectionClient = makeCollectionClient() + return browserCollectionClient + } +} + +export default function Providers({ children }: { children: React.ReactNode }) { + // NOTE: Avoid useState when initializing the query client if you don't + // have a suspense boundary between this and the code that may + // suspend because React will throw away the client on the initial + // render if it suspends and there is no boundary + const collectionClient = getCollectionClient() + + return ( + + {children} + + ) +} diff --git a/examples/nextjs/todo/src/db/index.ts b/examples/nextjs/todo/src/db/index.ts new file mode 100644 index 00000000..5a856e69 --- /dev/null +++ b/examples/nextjs/todo/src/db/index.ts @@ -0,0 +1,15 @@ +import { drizzle } from "drizzle-orm/node-postgres" +import { Pool } from "pg" +import * as schema from "./schema" + +// Create a PostgreSQL pool +const pool = new Pool({ + host: process.env.DB_HOST || `localhost`, + port: parseInt(process.env.DB_PORT || `5432`), + user: process.env.DB_USER || `postgres`, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME || `todo_app`, +}) + +// Create drizzle database instance +export const db = drizzle(pool, { schema }) diff --git a/examples/nextjs/todo/src/db/postgres.ts b/examples/nextjs/todo/src/db/postgres.ts new file mode 100644 index 00000000..67434843 --- /dev/null +++ b/examples/nextjs/todo/src/db/postgres.ts @@ -0,0 +1,10 @@ +import postgres from "postgres" + +// Create a postgres instance +export const sql = postgres({ + host: `localhost`, + port: 54322, + user: `postgres`, + password: `postgres`, + database: `todo_app`, +}) diff --git a/examples/nextjs/todo/src/db/schema.ts b/examples/nextjs/todo/src/db/schema.ts new file mode 100644 index 00000000..4bf9e0d0 --- /dev/null +++ b/examples/nextjs/todo/src/db/schema.ts @@ -0,0 +1,31 @@ +import { boolean, pgTable, serial, text, timestamp } from "drizzle-orm/pg-core" + +export const todos = pgTable(`todos`, { + id: serial(`id`).primaryKey(), + text: text(`text`).notNull(), + completed: boolean(`completed`).notNull().default(false), + created_at: timestamp(`created_at`, { withTimezone: true }) + .defaultNow() + .notNull(), + updated_at: timestamp(`updated_at`, { withTimezone: true }) + .notNull() + .defaultNow(), +}) + +export type Todo = typeof todos.$inferSelect +export type NewTodo = typeof todos.$inferInsert + +export const config = pgTable(`config`, { + id: serial(`id`).primaryKey(), + key: text(`key`).notNull().unique(), + value: text(`value`).notNull(), + created_at: timestamp(`created_at`, { withTimezone: true }) + .notNull() + .defaultNow(), + updated_at: timestamp(`updated_at`, { withTimezone: true }) + .notNull() + .defaultNow(), +}) + +export type Config = typeof config.$inferSelect +export type NewConfig = typeof config.$inferInsert diff --git a/examples/nextjs/todo/src/db/validation.ts b/examples/nextjs/todo/src/db/validation.ts new file mode 100644 index 00000000..1fc244b5 --- /dev/null +++ b/examples/nextjs/todo/src/db/validation.ts @@ -0,0 +1,77 @@ +import { createInsertSchema, createSelectSchema } from "drizzle-zod" +import { config, todos } from "./schema" +import type { z } from "zod" + +// Auto-generated schemas from Drizzle schema +export const insertTodoSchema = createInsertSchema(todos) +export const selectTodoSchema = createSelectSchema(todos) + +// Partial schema for updates +export const updateTodoSchema = insertTodoSchema.partial().strict() + +// Config schemas +export const insertConfigSchema = createInsertSchema(config).strict() +export const selectConfigSchema = createSelectSchema(config) +export const updateConfigSchema = insertConfigSchema.partial().strict() + +// Type inference +export type InsertTodo = z.infer +export type SelectTodo = z.infer +export type UpdateTodo = z.infer + +export type InsertConfig = z.infer +export type SelectConfig = z.infer +export type UpdateConfig = z.infer + +// Validation functions +export const validateInsertTodo = (data: unknown): InsertTodo => { + if (data.text === `really hard todo`) { + throw new Error(`we don't want to do really hard todos`) + } + return insertTodoSchema.parse(data) +} + +export const validateSelectTodo = (data: unknown): SelectTodo => { + return selectTodoSchema.parse(data) +} + +export const validateUpdateTodo = (data: unknown): UpdateTodo => { + return updateTodoSchema.parse(data) +} + +export const validateInsertConfig = (data: unknown): InsertConfig => { + return insertConfigSchema.parse(data) +} + +export const validateSelectConfig = (data: unknown): SelectConfig => { + return selectConfigSchema.parse(data) +} + +export const validateUpdateConfig = (data: unknown): UpdateConfig => { + return updateConfigSchema.parse(data) +} + +// Safe parsing functions that return Result type instead of throwing +export const safeParseInsertTodo = (data: unknown) => { + return insertTodoSchema.safeParse(data) +} + +export const safeParseSelectTodo = (data: unknown) => { + return selectTodoSchema.safeParse(data) +} + +export const safeParseUpdateTodo = (data: unknown) => { + return updateTodoSchema.safeParse(data) +} + +export const safeParseInsertConfig = (data: unknown) => { + return insertConfigSchema.safeParse(data) +} + +export const safeParseSelectConfig = (data: unknown) => { + return selectConfigSchema.safeParse(data) +} + +export const safeParseUpdateConfig = (data: unknown) => { + return updateConfigSchema.safeParse(data) +} diff --git a/examples/nextjs/todo/src/hooks/useConfig.ts b/examples/nextjs/todo/src/hooks/useConfig.ts new file mode 100644 index 00000000..cda288e3 --- /dev/null +++ b/examples/nextjs/todo/src/hooks/useConfig.ts @@ -0,0 +1,70 @@ +/* eslint-disable @typescript-eslint/no-unnecessary-condition */ +import { useOptimisticMutation } from "@tanstack/react-db" +import { api } from "../lib/api" +import { collectionSync } from "../lib/collections" +import type { Collection, PendingMutation } from "@tanstack/react-db" +import type { UpdateConfig } from "../db/validation" + +export const useConfig = ( + configData: Array, + configCollection: Collection +) => { + const createConfig = useOptimisticMutation({ + mutationFn: async ({ transaction }) => { + const mutation = transaction.mutations[0] as PendingMutation + const { modified } = mutation + const response = await api.config.create(modified) + await collectionSync(mutation, response.txid) + }, + }) + + const updateConfig = useOptimisticMutation({ + mutationFn: async ({ transaction }) => { + const mutation = transaction.mutations[0] as PendingMutation + const { original, changes } = mutation + const response = await api.config.update(original.id as number, changes) + await collectionSync(mutation, response.txid) + }, + }) + + // Helper function to get config values + const getConfigValue = (key: string): string => { + if (!configData) return `` + for (const config of configData) { + if (config.key === key) { + return config.value! + } + } + return `` + } + + // Helper function to update config values + const setConfigValue = (key: string, value: string): void => { + for (const config of configData) { + if (config.key === key) { + updateConfig.mutate(() => + configCollection.update( + Array.from(configCollection.state.values())[0], + (draft) => { + draft.value = value + } + ) + ) + return + } + } + + // If the config doesn't exist yet, create it + createConfig.mutate(() => + configCollection.insert({ + key, + value, + }) + ) + } + + return { + getConfigValue, + setConfigValue, + } +} diff --git a/examples/nextjs/todo/src/hooks/useTodos.ts b/examples/nextjs/todo/src/hooks/useTodos.ts new file mode 100644 index 00000000..02a34a8d --- /dev/null +++ b/examples/nextjs/todo/src/hooks/useTodos.ts @@ -0,0 +1,40 @@ +import { useOptimisticMutation } from "@tanstack/react-db" +import { api } from "../lib/api" +import { collectionSync } from "../lib/collections" +import type { Collection, PendingMutation } from "@tanstack/react-db" +import type { UpdateTodo } from "../db/validation" + +export const useTodos = (todoCollection: Collection) => { + const addTodo = useOptimisticMutation({ + mutationFn: async ({ transaction }) => { + const mutation = transaction.mutations[0] as PendingMutation + const { modified } = mutation + const response = await api.todos.create(modified) + await collectionSync(mutation, response.txid) + }, + }) + + const updateTodo = useOptimisticMutation({ + mutationFn: async ({ transaction }) => { + const mutation = transaction.mutations[0] as PendingMutation + const { original, changes } = mutation + const response = await api.todos.update(original.id, changes) + await collectionSync(mutation, response.txid) + }, + }) + + const deleteTodo = useOptimisticMutation({ + mutationFn: async ({ transaction }) => { + const mutation = transaction.mutations[0] as PendingMutation + const { original } = mutation + const response = await api.todos.delete(original.id) + await collectionSync(mutation, response.txid) + }, + }) + + return { + addTodo, + updateTodo, + deleteTodo, + } +} diff --git a/examples/nextjs/todo/src/lib/CollectionClientProvider.tsx b/examples/nextjs/todo/src/lib/CollectionClientProvider.tsx new file mode 100644 index 00000000..df743acb --- /dev/null +++ b/examples/nextjs/todo/src/lib/CollectionClientProvider.tsx @@ -0,0 +1,40 @@ +"use client" + +import * as React from "react" +import type { CollectionClient } from "./CollectionHydrationBoundary" + +export const CollectionClientContext = React.createContext< + CollectionClient | undefined +>(undefined) + +export const useCollectionClient = (collectionClient?: CollectionClient) => { + const client = React.useContext(CollectionClientContext) + + if (collectionClient) { + return collectionClient + } + + if (!client) { + throw new Error( + `No CollectionClient set, use CollectionClientProvider to set one` + ) + } + + return client +} + +export type CollectionClientProviderProps = { + client: CollectionClient + children?: React.ReactNode +} + +export const CollectionClientProvider = ({ + client, + children, +}: CollectionClientProviderProps): React.JSX.Element => { + return ( + + {children} + + ) +} diff --git a/examples/nextjs/todo/src/lib/CollectionHydrationBoundary.tsx b/examples/nextjs/todo/src/lib/CollectionHydrationBoundary.tsx new file mode 100644 index 00000000..0f462b0d --- /dev/null +++ b/examples/nextjs/todo/src/lib/CollectionHydrationBoundary.tsx @@ -0,0 +1,34 @@ +"use client" + +import * as React from "react" +import { useCollectionClient } from "./CollectionClientProvider" +import type { + DehydratedState, + ElectricCollection, +} from "@tanstack/db-collections" + +export type CollectionClient = Record +export interface CollectionHydrationBoundaryProps { + state?: DehydratedState + children?: React.ReactNode + collectionClient?: CollectionClient +} + +export const CollectionHydrationBoundary = ({ + children, + state, + collectionClient: providedClient, +}: CollectionHydrationBoundaryProps) => { + const client = useCollectionClient(providedClient) + + React.useMemo(() => { + if (state && Object.keys(state).length > 0) { + const collections = client + Object.entries(state).forEach(([collectionName, collectionState]) => { + collections[collectionName].hydrate(collectionState) + }) + } + }, [state, client]) + + return children as React.ReactElement +} diff --git a/examples/nextjs/todo/src/lib/api.ts b/examples/nextjs/todo/src/lib/api.ts new file mode 100644 index 00000000..d59b6161 --- /dev/null +++ b/examples/nextjs/todo/src/lib/api.ts @@ -0,0 +1,73 @@ +import type { UpdateConfig, UpdateTodo } from "../db/validation" + +const API_BASE_URL = `http://localhost:3001/api` + +export const api = { + // Todo API methods + todos: { + create: async ( + todo: Partial + ): Promise<{ todo: UpdateTodo; txid: number }> => { + const response = await fetch(`${API_BASE_URL}/todos`, { + method: `POST`, + headers: { "Content-Type": `application/json` }, + body: JSON.stringify(todo), + }) + if (!response.ok) + throw new Error(`HTTP error! Status: ${response.status}`) + return response.json() + }, + update: async ( + id: unknown, + changes: Partial + ): Promise<{ todo: UpdateTodo; txid: number }> => { + const response = await fetch(`${API_BASE_URL}/todos/${id}`, { + method: `PUT`, + headers: { "Content-Type": `application/json` }, + body: JSON.stringify(changes), + }) + if (!response.ok) + throw new Error(`HTTP error! Status: ${response.status}`) + return response.json() + }, + delete: async ( + id: unknown + ): Promise<{ success: boolean; txid: number }> => { + const response = await fetch(`${API_BASE_URL}/todos/${id}`, { + method: `DELETE`, + }) + if (!response.ok) + throw new Error(`HTTP error! Status: ${response.status}`) + return response.json() + }, + }, + + // Config API methods + config: { + create: async ( + config: Partial + ): Promise<{ config: UpdateConfig; txid: number }> => { + const response = await fetch(`${API_BASE_URL}/config`, { + method: `POST`, + headers: { "Content-Type": `application/json` }, + body: JSON.stringify(config), + }) + if (!response.ok) + throw new Error(`HTTP error! Status: ${response.status}`) + return response.json() + }, + update: async ( + id: number, + changes: Partial + ): Promise<{ config: UpdateConfig; txid: number }> => { + const response = await fetch(`${API_BASE_URL}/config/${id}`, { + method: `PUT`, + headers: { "Content-Type": `application/json` }, + body: JSON.stringify(changes), + }) + if (!response.ok) + throw new Error(`HTTP error! Status: ${response.status}`) + return response.json() + }, + }, +} diff --git a/examples/nextjs/todo/src/lib/collections.ts b/examples/nextjs/todo/src/lib/collections.ts new file mode 100644 index 00000000..4890424b --- /dev/null +++ b/examples/nextjs/todo/src/lib/collections.ts @@ -0,0 +1,46 @@ +import { createElectricCollection } from "@tanstack/db-collections" +import { updateConfigSchema, updateTodoSchema } from "../db/validation" +import type { ElectricCollection } from "@tanstack/db-collections" +import type { PendingMutation, Row } from "@tanstack/react-db" +import type { UpdateConfig, UpdateTodo } from "../db/validation" +import type { CollectionClient } from "./CollectionHydrationBoundary" + +const isServer = typeof window === `undefined` +const baseUrl = isServer ? `http://localhost:3000` : window.location.origin +export function makeCollections() { + return { + todos: createElectricCollection({ + id: `todos`, + streamOptions: { + url: `${baseUrl}/api/electric`, + params: { table: `todos` }, + subscribe: !isServer, + parser: { + timestamptz: (date: string) => new Date(date), + }, + }, + primaryKey: [`id`], + schema: updateTodoSchema, + }), + config: createElectricCollection({ + id: `config`, + streamOptions: { + url: `${baseUrl}/api/electric`, + params: { table: `config` }, + subscribe: !isServer, + parser: { + timestamptz: (date: string) => new Date(date), + }, + }, + primaryKey: [`id`], + schema: updateConfigSchema, + }), + } +} + +export function makeCollectionClient(): CollectionClient { + return makeCollections() +} +export async function collectionSync(mutation: PendingMutation, txid: number) { + await (mutation.collection as ElectricCollection).awaitTxId(txid) +} diff --git a/examples/nextjs/todo/src/lib/useCollectionQuery.ts b/examples/nextjs/todo/src/lib/useCollectionQuery.ts new file mode 100644 index 00000000..4ff1f6b3 --- /dev/null +++ b/examples/nextjs/todo/src/lib/useCollectionQuery.ts @@ -0,0 +1,44 @@ +import { useLiveQuery } from "@tanstack/react-db" +import { useCollectionClient } from "./CollectionClientProvider" +import type { UpdateConfig, UpdateTodo } from "../db/validation" +import type { ElectricCollection } from "@tanstack/db-collections" +import type { CollectionClient } from "./CollectionHydrationBoundary" + +// Hook to use todos collection (similar to useQuery) +export function useTodosCollection(collectionClient?: CollectionClient) { + const client = useCollectionClient(collectionClient) + const todoCollection: ElectricCollection = client.todos + + const { data: todos } = useLiveQuery((q) => + q + .from({ todoCollection: todoCollection }) + .keyBy(`@id`) + .orderBy(`@created_at`) + .select(`@id`, `@created_at`, `@text`, `@completed`) + ) + + return { + data: todos, + collection: todoCollection, + } +} + +// Hook to use config collection (similar to useQuery) +export function useConfigCollection(collectionClient?: CollectionClient) { + const client = useCollectionClient(collectionClient) + const configCollection: ElectricCollection = client.config + + const { data: configData } = useLiveQuery((q) => + q + .from({ + configCollection: configCollection, + }) + .keyBy(`@id`) + .select(`@id`, `@key`, `@value`) + ) + + return { + data: configData, + collection: configCollection, + } +} diff --git a/examples/nextjs/todo/src/lib/utils.ts b/examples/nextjs/todo/src/lib/utils.ts new file mode 100644 index 00000000..f47551f0 --- /dev/null +++ b/examples/nextjs/todo/src/lib/utils.ts @@ -0,0 +1,43 @@ +// Function to generate a complementary color +export const getComplementaryColor = (hexColor: string): string => { + // Default to a nice blue if no color is provided + if (!hexColor) return `#3498db` + + // Remove the hash if it exists + const color = hexColor.replace(`#`, ``) + + // Convert hex to RGB + const r = parseInt(color.substr(0, 2), 16) + const g = parseInt(color.substr(2, 2), 16) + const b = parseInt(color.substr(4, 2), 16) + + // Calculate complementary color (inverting the RGB values) + const compR = 255 - r + const compG = 255 - g + const compB = 255 - b + + // Convert back to hex + const compHex = + `#` + + ((1 << 24) + (compR << 16) + (compG << 8) + compB).toString(16).slice(1) + + // Calculate brightness of the background + const brightness = r * 0.299 + g * 0.587 + b * 0.114 + + // If the complementary color doesn't have enough contrast, adjust it + const compBrightness = compR * 0.299 + compG * 0.587 + compB * 0.114 + const brightnessDiff = Math.abs(brightness - compBrightness) + + if (brightnessDiff < 128) { + // Not enough contrast, use a more vibrant alternative + if (brightness > 128) { + // Dark color for light background + return `#8e44ad` // Purple + } else { + // Light color for dark background + return `#f1c40f` // Yellow + } + } + + return compHex +} diff --git a/examples/nextjs/todo/tailwind.config.ts b/examples/nextjs/todo/tailwind.config.ts new file mode 100644 index 00000000..cc855140 --- /dev/null +++ b/examples/nextjs/todo/tailwind.config.ts @@ -0,0 +1,13 @@ +const config = { + content: [ + `./src/pages/**/*.{js,ts,jsx,tsx,mdx}`, + `./src/components/**/*.{js,ts,jsx,tsx,mdx}`, + `./src/app/**/*.{js,ts,jsx,tsx,mdx}`, + ], + theme: { + extend: {}, + }, + plugins: [], +} + +export default config diff --git a/examples/nextjs/todo/tsconfig.json b/examples/nextjs/todo/tsconfig.json new file mode 100644 index 00000000..873e9994 --- /dev/null +++ b/examples/nextjs/todo/tsconfig.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "baseUrl": ".", + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/packages/db-collections/src/electric.ts b/packages/db-collections/src/electric.ts index ca9f1c31..4a4c8d2c 100644 --- a/packages/db-collections/src/electric.ts +++ b/packages/db-collections/src/electric.ts @@ -9,15 +9,27 @@ import type { CollectionConfig, SyncConfig } from "@tanstack/db" import type { ControlMessage, Message, + Offset, Row, ShapeStreamOptions, } from "@electric-sql/client" +/** + * Initial data structure for server-side rendering + */ +export interface ElectricInitialData> { + data: Array<{ key: string; value: T; metadata?: Record }> + txids: Array + schema?: string + lastOffset?: string + shapeHandle?: string +} + /** * Configuration interface for ElectricCollection */ export interface ElectricCollectionConfig> - extends Omit, `sync`> { + extends Omit, `sync` | `initialData`> { /** * Configuration options for the ElectricSQL ShapeStream */ @@ -27,6 +39,19 @@ export interface ElectricCollectionConfig> * Array of column names that form the primary key of the shape */ primaryKey: Array + + /** + * Initial data from server-side rendering + * Allows hydration from server-loaded Electric data + */ + initialData?: ElectricInitialData +} + +/** + * Dehydrated state interface + */ +export interface DehydratedState { + [key: string]: ElectricInitialData } /** @@ -36,17 +61,97 @@ export class ElectricCollection< T extends Row = Record, > extends Collection { private seenTxids: Store> + private schema: Store + private lastOffset: Store + private shapeHandle: Store constructor(config: ElectricCollectionConfig) { - const seenTxids = new Store>(new Set([Math.random()])) + const initialTxids = config.initialData?.txids || [Math.random()] + const seenTxids = new Store>(new Set(initialTxids)) + const sync = createElectricSync(config.streamOptions, { primaryKey: config.primaryKey, seenTxids, + initialData: config.initialData, }) - super({ ...config, sync }) + super({ + ...config, + sync, + initialData: undefined, + }) this.seenTxids = seenTxids + + // Initialize stores for Electric-specific data + this.schema = new Store(config.initialData?.schema) + this.lastOffset = new Store( + config.initialData?.lastOffset + ) + this.shapeHandle = new Store( + config.initialData?.shapeHandle + ) + + if (config.initialData?.data && config.initialData.data.length > 0) { + this.seedFromElectricInitialData(config.initialData.data) + } + } + + /** + * Hydrates the collection with new data, typically from server-side rendering + * or a cache. This method updates the collection's data and transaction IDs. + * @param dataToHydrate The data to hydrate the collection with. + */ + public hydrate(dataToHydrate: ElectricInitialData): void { + if (dataToHydrate.txids.length > 0) { + this.seenTxids.setState((currentTxids) => { + const updatedTxids = new Set(currentTxids) + dataToHydrate.txids.forEach((txid) => updatedTxids.add(txid)) + return updatedTxids + }) + } + + // Update Electric-specific data + if (dataToHydrate.schema !== undefined) { + this.schema.setState(() => dataToHydrate.schema) + } + if (dataToHydrate.lastOffset !== undefined) { + this.lastOffset.setState(() => dataToHydrate.lastOffset) + } + if (dataToHydrate.shapeHandle !== undefined) { + this.shapeHandle.setState(() => dataToHydrate.shapeHandle) + } + + if (dataToHydrate.data.length > 0) { + this.seedFromElectricInitialData(dataToHydrate.data) + } + } + + private seedFromElectricInitialData( + items: Array<{ key: string; value: T; metadata?: Record }> + ): void { + const keys = new Set() + + this.syncedData.setState((prevData) => { + const newData = new Map(prevData) + items.forEach(({ key, value }) => { + keys.add(key) + newData.set(key, value) + this.objectKeyMap.set(value, key) + }) + return newData + }) + + this.syncedMetadata.setState((prevMetadata) => { + const newMetadata = new Map(prevMetadata) + items.forEach(({ key, metadata }) => { + const syncMetadata = this.config.sync.getSyncMetadata?.() || {} + newMetadata.set(key, { ...syncMetadata, ...metadata }) + }) + return newMetadata + }) + + this.onFirstCommit(() => {}) } /** @@ -74,6 +179,37 @@ export class ElectricCollection< }) }) } + + /** + * Dehydrates the collection's state into a serializable format + * @returns ElectricInitialData containing the collection's current state + */ + public dehydrate(): ElectricInitialData { + const collectionData: Array<{ + key: string + value: T + metadata?: Record + }> = [] + + this.state.forEach((value, key) => { + const metadata = this.syncedMetadata.state.get(key) as + | Record + | undefined + collectionData.push({ + key, + value, + metadata, + }) + }) + + return { + data: collectionData, + txids: Array.from(this.seenTxids.state), + schema: this.schema.state, + lastOffset: this.lastOffset.state, + shapeHandle: this.shapeHandle.state, + } + } } function isUpToDateMessage = Row>( @@ -114,12 +250,18 @@ export function createElectricCollection>( */ function createElectricSync>( streamOptions: ShapeStreamOptions, - options: { primaryKey: Array; seenTxids: Store> } + options: { + primaryKey: Array + seenTxids: Store> + initialData?: ElectricInitialData + } ): SyncConfig { - const { primaryKey, seenTxids } = options + const { primaryKey, seenTxids, initialData } = options // Store for the relation schema information - const relationSchema = new Store(undefined) + const relationSchema = new Store( + initialData?.schema || undefined + ) /** * Get the sync metadata for insert operations @@ -140,7 +282,19 @@ function createElectricSync>( return { sync: (params: Parameters[`sync`]>[0]) => { const { begin, write, commit } = params - const stream = new ShapeStream(streamOptions) + + // Resume from where server left off if we have initial data + const resumeOptions: ShapeStreamOptions = { + ...streamOptions, + ...(initialData?.lastOffset && { + offset: initialData.lastOffset as Offset, + }), + ...(initialData?.shapeHandle && { + shapeHandle: initialData.shapeHandle, + }), + } + + const stream = new ShapeStream(resumeOptions) let transactionStarted = false let newTxids = new Set() @@ -214,3 +368,20 @@ export interface ElectricSyncOptions { */ primaryKey: Array } + +/** + * Dehydrates multiple collections into a serializable format + * @param collections Record of collection instances to dehydrate + * @returns DehydratedState containing all collections' states + */ +export function dehydrateCollections( + collections: Record> +): DehydratedState { + const dehydratedState: DehydratedState = {} + + Object.entries(collections).forEach(([key, collection]) => { + dehydratedState[key] = collection.dehydrate() + }) + + return dehydratedState +} diff --git a/packages/db-collections/src/index.ts b/packages/db-collections/src/index.ts index 4e90ac87..63614520 100644 --- a/packages/db-collections/src/index.ts +++ b/packages/db-collections/src/index.ts @@ -1,10 +1,15 @@ export { ElectricCollection, createElectricCollection, - type ElectricCollectionConfig, + dehydrateCollections, +} from "./electric" +export type { + ElectricInitialData, + DehydratedState, + ElectricCollectionConfig, } from "./electric" export { QueryCollection, - createQueryCollection, type QueryCollectionConfig, + createQueryCollection, } from "./query" diff --git a/packages/db-collections/tests/electric.test.ts b/packages/db-collections/tests/electric.test.ts index 99a650fa..eb3ee2c0 100644 --- a/packages/db-collections/tests/electric.test.ts +++ b/packages/db-collections/tests/electric.test.ts @@ -1,7 +1,7 @@ import { beforeEach, describe, expect, it, vi } from "vitest" import { createTransaction } from "@tanstack/db" import { createElectricCollection } from "../src/electric" -import type { ElectricCollection } from "../src/electric" +import type { ElectricCollection, ElectricInitialData } from "../src/electric" import type { PendingMutation, Transaction } from "@tanstack/db" import type { Message, Row } from "@electric-sql/client" @@ -19,6 +19,26 @@ vi.mock(`@electric-sql/client`, async () => { } }) +const mockConstants = { + PRIMARY_KEY_COLUMN: `id`, +} + +// Initial data for testing the seeding functionality +const testInitialData: ElectricInitialData = { + data: [ + { + key: `initialKey1`, + value: { id: `initialId1`, name: `Initial User 1` }, + metadata: { source: `seed` }, + }, + { key: `initialKey2`, value: { id: `initialId2`, name: `Initial User 2` } }, + ], + txids: [99901, 99902], + schema: `seeded_schema`, + lastOffset: `seedOffset123`, + shapeHandle: `seedHandle456`, +} + describe(`Electric Integration`, () => { let collection: ElectricCollection let subscriber: (messages: Array>) => void @@ -398,4 +418,409 @@ describe(`Electric Integration`, () => { expect(metadata).toHaveProperty(`primaryKey`) expect(metadata.primaryKey).toEqual([`id`]) }) + + // Tests for initial data functionality + describe(`initial data support`, () => { + it(`should accept initial data during construction`, () => { + const initialData = { + data: [ + { + key: `user1`, + value: { id: 1, name: `Alice` }, + metadata: { source: `server` }, + }, + { + key: `user2`, + value: { id: 2, name: `Bob` }, + metadata: { source: `server` }, + }, + ], + txids: [100, 101], + schema: `public`, + lastOffset: `1234567890`, + shapeHandle: `shape_abc123`, + } + + const collection = createElectricCollection({ + id: `test-with-initial-data`, + streamOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + primaryKey: [`id`], + initialData, + }) + + // Should have initial data immediately available + expect(collection.state.size).toBe(2) + + // Check that the data is present (keys will be auto-generated) + const values = Array.from(collection.state.values()) + expect(values).toContainEqual({ id: 1, name: `Alice` }) + expect(values).toContainEqual({ id: 2, name: `Bob` }) + }) + + it(`should track txids from initial data`, async () => { + const initialData = { + data: [{ key: `user1`, value: { id: 1, name: `Alice` } }], + txids: [555, 556], + schema: `public`, + lastOffset: `1234567890`, + shapeHandle: `shape_abc123`, + } + + const collection = createElectricCollection({ + id: `test-txids`, + streamOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + primaryKey: [`id`], + initialData, + }) + + // Should have txids from initial data immediately available + await expect(collection.awaitTxId(555)).resolves.toBe(true) + await expect(collection.awaitTxId(556)).resolves.toBe(true) + }) + + it(`should resume from lastOffset and shapeHandle in stream options`, async () => { + // Get the actual mock from vitest + const electricModule = await import(`@electric-sql/client`) + const ShapeStreamMock = vi.mocked(electricModule.ShapeStream) + + const initialData = { + data: [], + txids: [], + lastOffset: `resume_offset_123`, + shapeHandle: `shape_handle_abc`, + } + + createElectricCollection({ + id: `test-resume`, + streamOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + primaryKey: [`id`], + initialData, + }) + + // Verify ShapeStream was constructed with resume options + expect(ShapeStreamMock).toHaveBeenCalledWith( + expect.objectContaining({ + url: `http://test-url`, + params: { table: `users` }, + offset: `resume_offset_123`, + shapeHandle: `shape_handle_abc`, + }) + ) + }) + + it(`should have proper object key mappings for initial data`, () => { + const initialData = { + data: [ + { key: `user1`, value: { id: 1, name: `Alice` } }, + { key: `user2`, value: { id: 2, name: `Bob` } }, + ], + txids: [100], + schema: `public`, + } + + const collection = createElectricCollection({ + id: `test-key-mapping`, + streamOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + primaryKey: [`id`], + initialData, + }) + + // Verify object key mappings are set correctly + const alice = Array.from(collection.state.values()).find( + (item) => item.name === `Alice` + )! + const bob = Array.from(collection.state.values()).find( + (item) => item.name === `Bob` + )! + + expect(collection.objectKeyMap.get(alice)).toBeDefined() + expect(collection.objectKeyMap.get(bob)).toBeDefined() + + // The keys should be different + expect(collection.objectKeyMap.get(alice)).not.toBe( + collection.objectKeyMap.get(bob) + ) + }) + + it(`should handle empty initial data gracefully`, () => { + const initialData = { + data: [], + txids: [], + } + + const collection = createElectricCollection({ + id: `test-empty-initial`, + streamOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + primaryKey: [`id`], + initialData, + }) + + expect(collection.state.size).toBe(0) + }) + + it(`should work normally when no initial data is provided`, () => { + const collection = createElectricCollection({ + id: `test-no-initial`, + streamOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + primaryKey: [`id`], + }) + + expect(collection.state.size).toBe(0) + + // Should still handle sync messages normally + subscriber([ + { + key: `1`, + value: { id: 1, name: `Test User` }, + headers: { operation: `insert` }, + }, + { + headers: { control: `up-to-date` }, + }, + ]) + + expect(collection.state.get(`1`)).toEqual({ id: 1, name: `Test User` }) + }) + + it(`should merge incoming sync data with initial data correctly`, () => { + const initialData = { + data: [{ key: `user1`, value: { id: 1, name: `Alice` } }], + txids: [100], + lastOffset: `initial_offset`, + shapeHandle: `initial_handle`, + } + + const collection = createElectricCollection({ + id: `test-merge`, + streamOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + primaryKey: [`id`], + initialData, + }) + + // Should have initial data + const initialValues = Array.from(collection.state.values()) + expect(initialValues).toContainEqual({ id: 1, name: `Alice` }) + + // Sync new data from server + subscriber([ + { + key: `user2`, + value: { id: 2, name: `Bob` }, + headers: { operation: `insert` }, + }, + { + headers: { control: `up-to-date` }, + }, + ]) + + // Should have both initial and synced data + expect(collection.state.size).toBe(2) + const allValues = Array.from(collection.state.values()) + expect(allValues).toContainEqual({ id: 1, name: `Alice` }) + expect(allValues).toContainEqual({ id: 2, name: `Bob` }) + }) + + it(`should update existing initial data with sync changes`, () => { + const initialData = { + data: [{ key: `user1`, value: { id: 1, name: `Alice` } }], + txids: [100], + } + + const collection = createElectricCollection({ + id: `test-update`, + streamOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + primaryKey: [`id`], + initialData, + }) + + // Should have initial data + const initialValues = Array.from(collection.state.values()) + expect(initialValues).toContainEqual({ id: 1, name: `Alice` }) + + // Find the auto-generated key for the initial user + const aliceKey = Array.from(collection.state.entries()).find( + ([key, value]) => value.name === `Alice` + )?.[0] + + // Update the existing user via sync using the same auto-generated key + subscriber([ + { + key: aliceKey!, + value: { id: 1, name: `Alice Updated`, email: `alice@example.com` }, + headers: { operation: `update` }, + }, + { + headers: { control: `up-to-date` }, + }, + ]) + + // Should have updated data + const updatedValues = Array.from(collection.state.values()) + expect(updatedValues).toContainEqual({ + id: 1, + name: `Alice Updated`, + email: `alice@example.com`, + }) + }) + + it(`should handle schema from initial data`, () => { + const mockShapeStreamConstructor = vi.mocked( + require(`@electric-sql/client`).ShapeStream + ) + + const initialData = { + data: [], + txids: [], + schema: `custom_schema`, + } + + const collection = createElectricCollection({ + id: `test-schema`, + streamOptions: { + url: `http://test-url`, + params: { table: `users` }, + }, + primaryKey: [`id`], + initialData, + }) + + // Verify getSyncMetadata includes the schema from initial data + const metadata = collection.config.sync.getSyncMetadata?.() + expect(metadata?.relation).toEqual([`custom_schema`, `users`]) + }) + }) +}) + +describe(`Initial Data Seeding`, () => { + let collectionWithInitialData: ElectricCollection + + beforeEach(() => { + // Create collection with Electric configuration specifically for initial data tests + collectionWithInitialData = createElectricCollection({ + id: `test-initial-seed`, + streamOptions: { + url: `http://test-url`, + params: { + table: `seed_table`, // Use a distinct table name for these tests + }, + }, + primaryKey: [mockConstants.PRIMARY_KEY_COLUMN], + initialData: testInitialData, + }) + }) + + // Tests for seedFromElectricInitialData will go here + it(`should correctly seed syncedData from initialData`, () => { + const expectedSyncedData = new Map() + testInitialData.data.forEach((item: { key: string; value: Row }) => { + expectedSyncedData.set(item.key, item.value) + }) + expect(collectionWithInitialData.syncedData.state).toEqual( + expectedSyncedData + ) + }) + + it(`should correctly seed syncedMetadata from initialData`, () => { + const expectedSyncedMetadata = new Map() + const defaultSyncMetadata = { + primaryKey: [mockConstants.PRIMARY_KEY_COLUMN], + relation: [testInitialData.schema, `seed_table`], // schema from initialData, table from streamOptions + } + testInitialData.data.forEach( + (item: { + key: string + value: Row + metadata?: Record + }) => { + expectedSyncedMetadata.set(item.key, { + ...defaultSyncMetadata, + ...item.metadata, + }) + } + ) + expect(collectionWithInitialData.syncedMetadata.state).toEqual( + expectedSyncedMetadata + ) + }) + + it(`should correctly populate objectKeyMap from initialData`, () => { + const expectedObjectKeyMapEntries = testInitialData.data.map( + (item: { key: string; value: Row }) => + [item.value, item.key] as [Row, string] + ) + const actualObjectKeyMap = (collectionWithInitialData as any) + .objectKeyMap as WeakMap + + // Check if all expected entries are present in the WeakMap + expectedObjectKeyMapEntries.forEach(([value, key]) => { + expect(actualObjectKeyMap.has(value)).toBe(true) + expect(actualObjectKeyMap.get(value)).toBe(key) + }) + + // Optionally, verify the size if possible and makes sense for WeakMap (though not directly possible) + // For a more thorough check, one might need to iterate over the known objects that were inserted. + // However, WeakMap's nature is that it doesn't prevent its keys (objects) from being garbage collected, + // so checking size or iterating isn't as straightforward as with a Map. + // The above check (all expected items are there) is usually sufficient. + }) + + it(`should call onFirstCommit when initialData is provided`, () => { + // onFirstCommit is called internally during the seeding process. + // A direct spy is hard due to its private nature and immediate invocation. + // This test primarily ensures that the collection setup completes without error, + // implying onFirstCommit was called as part of the seeding. + // A more robust test might involve checking a side effect of onFirstCommit if one exists + // or temporarily making it more testable. + expect(collectionWithInitialData).toBeDefined() + // We can also check if the collection considers itself 'committed' or 'synced' + // if such a public state exists and is set by onFirstCommit during seeding. + // For now, ensuring no error during setup is the main check. + }) + + it(`should track txids from initialData`, async () => { + // Ensure txids from initialData are defined and available before testing + const firstTxid = testInitialData.txids[0] + if (firstTxid !== undefined) { + await expect( + collectionWithInitialData.awaitTxId(firstTxid) + ).resolves.toBe(true) + } + + const secondTxid = testInitialData.txids[1] + if (secondTxid !== undefined) { + await expect( + collectionWithInitialData.awaitTxId(secondTxid) + ).resolves.toBe(true) + } + // Attempt to await a txid not in initialData to ensure it doesn't resolve immediately + const unknownTxid = 123456789 + const promise = collectionWithInitialData.awaitTxId(unknownTxid, 50) // Short timeout + await expect(promise).rejects.toThrow( + `Timeout waiting for txId: ${unknownTxid}` + ) + }) }) diff --git a/packages/db/src/collection.ts b/packages/db/src/collection.ts index f6a43d38..2d37a128 100644 --- a/packages/db/src/collection.ts +++ b/packages/db/src/collection.ts @@ -344,6 +344,11 @@ export class Collection> { this.derivedState.mount() + // Seed with initial data if provided + if (config.initialData && config.initialData.length > 0) { + this.seedFromInitialData(config.initialData) + } + // Start the sync process config.sync.sync({ collection: this, @@ -389,6 +394,52 @@ export class Collection> { }) } + /** + * Seeds the collection with initial data from server-side rendering + * @param items Array of items to seed the collection with + */ + private seedFromInitialData(items: Array): void { + this.hasReceivedFirstCommit = true + + const keys = new Set() + + batch(() => { + items.forEach((item, index) => { + // Validate the data against the schema if one exists + const validatedData = this.validateData(item, `insert`) + + // Generate unique key even for identical objects by including index + const key = `${this.generateKey(item)}_${index}` + keys.add(key) + + this.syncedKeys.add(key) + + this.syncedData.setState((prevData) => { + const newData = new Map(prevData) + newData.set(key, validatedData) + return newData + }) + + this.syncedMetadata.setState((prevData) => { + const newData = new Map(prevData) + newData.set(key, this.config.sync.getSyncMetadata?.() || {}) + return newData + }) + }) + }) + + keys.forEach((key) => { + const value = this.syncedData.state.get(key) + if (value) { + this.objectKeyMap.set(value, key) + } + }) + + const callbacks = [...this.onFirstCommitCallbacks] + this.onFirstCommitCallbacks = [] + callbacks.forEach((callback) => callback()) + } + /** * Attempts to commit pending synced transactions if there are no active transactions * This method processes operations from pending transactions and applies them to the synced data diff --git a/packages/db/src/types.ts b/packages/db/src/types.ts index f381b322..8584a7d0 100644 --- a/packages/db/src/types.ts +++ b/packages/db/src/types.ts @@ -118,6 +118,11 @@ export interface CollectionConfig> { id: string sync: SyncConfig schema?: StandardSchema + /** + * Initial data for server-side rendering + * Allows hydration from server-loaded data + */ + initialData?: Array } export type ChangesPayload> = Array< diff --git a/packages/db/tests/collection.test.ts b/packages/db/tests/collection.test.ts index 2d3f6917..17126830 100644 --- a/packages/db/tests/collection.test.ts +++ b/packages/db/tests/collection.test.ts @@ -626,3 +626,361 @@ describe(`Collection with schema validation`, () => { } }) }) + +// Tests for initial data functionality +describe(`initial data support`, () => { + it(`should accept initial data during construction`, () => { + const initialData = [ + { id: 1, name: `Alice`, email: `alice@example.com` }, + { id: 2, name: `Bob`, email: `bob@example.com` }, + { id: 3, name: `Charlie`, email: `charlie@example.com` }, + ] + + const collection = new Collection<{ + id: number + name: string + email: string + }>({ + id: `test-with-initial-data`, + sync: { + sync: () => { + // No-op sync for this test + }, + }, + initialData, + }) + + // Should have initial data immediately available + expect(collection.state.size).toBe(3) + + // Check each item was added with correct generated key + const items = Array.from(collection.state.values()) + expect(items).toContainEqual({ + id: 1, + name: `Alice`, + email: `alice@example.com`, + }) + expect(items).toContainEqual({ + id: 2, + name: `Bob`, + email: `bob@example.com`, + }) + expect(items).toContainEqual({ + id: 3, + name: `Charlie`, + email: `charlie@example.com`, + }) + }) + + it(`should have proper object key mappings for initial data`, () => { + const initialData = [ + { id: 1, name: `Alice` }, + { id: 2, name: `Bob` }, + ] + + const collection = new Collection<{ id: number; name: string }>({ + id: `test-key-mapping`, + sync: { + sync: () => {}, + }, + initialData, + }) + + // Verify object key mappings are set correctly + const alice = Array.from(collection.state.values()).find( + (item) => item.name === `Alice` + )! + const bob = Array.from(collection.state.values()).find( + (item) => item.name === `Bob` + )! + + expect(collection.objectKeyMap.get(alice)).toBeDefined() + expect(collection.objectKeyMap.get(bob)).toBeDefined() + + // The keys should be different + expect(collection.objectKeyMap.get(alice)).not.toBe( + collection.objectKeyMap.get(bob) + ) + }) + + it(`should mark hasReceivedFirstCommit as true when seeded with initial data`, async () => { + const initialData = [{ id: 1, name: `Test` }] + + const collection = new Collection<{ id: number; name: string }>({ + id: `test-first-commit`, + sync: { + sync: () => {}, + }, + initialData, + }) + + // Should resolve immediately since first commit already happened with initial data + const state = await collection.stateWhenReady() + expect(state.size).toBe(1) + }) + + it(`should trigger onFirstCommit callbacks for initial data`, () => { + const callback = vi.fn() + const initialData = [{ id: 1, name: `Test` }] + + const collection = new Collection<{ id: number; name: string }>({ + id: `test-callbacks`, + sync: { + sync: () => {}, + }, + initialData, + }) + + // Register callback after construction (should not be called since commit already happened) + collection.onFirstCommit(callback) + + // Since initial data already triggered first commit, new callbacks won't be called + expect(callback).not.toHaveBeenCalled() + }) + + it(`should resolve stateWhenReady immediately when seeded with initial data`, async () => { + const initialData = [{ id: 1, name: `Alice` }] + + const collection = new Collection<{ id: number; name: string }>({ + id: `test-ready`, + sync: { + sync: () => {}, + }, + initialData, + }) + + // Should resolve immediately since we have initial data + const state = await collection.stateWhenReady() + expect(state.size).toBe(1) + expect(Array.from(state.values())[0]).toEqual({ id: 1, name: `Alice` }) + }) + + it(`should resolve toArrayWhenReady immediately when seeded with initial data`, async () => { + const initialData = [ + { id: 1, name: `Alice` }, + { id: 2, name: `Bob` }, + ] + + const collection = new Collection<{ id: number; name: string }>({ + id: `test-array-ready`, + sync: { + sync: () => {}, + }, + initialData, + }) + + // Should resolve immediately since we have initial data + const items = await collection.toArrayWhenReady() + expect(items.length).toBe(2) + expect(items).toContainEqual({ id: 1, name: `Alice` }) + expect(items).toContainEqual({ id: 2, name: `Bob` }) + }) + + it(`should handle empty initial data gracefully`, () => { + const collection = new Collection<{ id: number; name: string }>({ + id: `test-empty-initial`, + sync: { + sync: () => {}, + }, + initialData: [], + }) + + expect(collection.state.size).toBe(0) + // Should have marked as having received first commit even with empty data + expect(collection.state.size).toBe(0) + }) + + it(`should work normally when no initial data is provided`, () => { + const collection = new Collection<{ id: number; name: string }>({ + id: `test-no-initial`, + sync: { + sync: ({ begin, write, commit }) => { + begin() + write({ + type: `insert`, + key: `user1`, + value: { id: 1, name: `Synced User` }, + }) + commit() + }, + }, + }) + + expect(collection.state.size).toBe(1) + expect(collection.state.get(`user1`)).toEqual({ + id: 1, + name: `Synced User`, + }) + }) + + it(`should merge sync data with initial data correctly`, () => { + const initialData = [{ id: 1, name: `Alice` }] + + const collection = new Collection<{ id: number; name: string }>({ + id: `test-merge`, + sync: { + sync: ({ begin, write, commit }) => { + begin() + write({ + type: `insert`, + key: `user2`, + value: { id: 2, name: `Synced Bob` }, + }) + commit() + }, + }, + initialData, + }) + + // Should have both initial and synced data + expect(collection.state.size).toBe(2) + + const values = Array.from(collection.state.values()) + expect(values).toContainEqual({ id: 1, name: `Alice` }) + expect(values).toContainEqual({ id: 2, name: `Synced Bob` }) + }) + + it(`should handle mutations on initial data`, async () => { + const initialData = [{ id: 1, name: `Alice`, value: `initial` }] + + const collection = new Collection<{ + id: number + name: string + value: string + }>({ + id: `test-mutations`, + sync: { + sync: () => {}, + }, + initialData, + }) + + const mutationFn: MutationFn = () => { + // Mock successful persistence + return Promise.resolve() + } + + // Find the initial item + const alice = Array.from(collection.state.values()).find( + (item) => item.name === `Alice` + )! + + // Create transaction and update the item + const tx = createTransaction({ mutationFn }) + tx.mutate(() => { + collection.update(alice, (draft) => { + draft.value = `updated` + }) + }) + + // The optimistic update should be visible immediately (before persistence) + const updatedAlice = Array.from(collection.state.values()).find( + (item) => item.name === `Alice` + )! + expect(updatedAlice.value).toBe(`updated`) + + await tx.isPersisted.promise + }) + + it(`should include initial data in currentStateAsChanges`, () => { + const initialData = [ + { id: 1, name: `Alice` }, + { id: 2, name: `Bob` }, + ] + + const collection = new Collection<{ id: number; name: string }>({ + id: `test-changes`, + sync: { + sync: () => {}, + }, + initialData, + }) + + const changes = collection.currentStateAsChanges() + expect(changes.length).toBe(2) + + // All changes should be inserts + expect(changes.every((change) => change.type === `insert`)).toBe(true) + + // Values should match initial data + const values = changes.map((change) => change.value) + expect(values).toContainEqual({ id: 1, name: `Alice` }) + expect(values).toContainEqual({ id: 2, name: `Bob` }) + }) + + it(`should emit initial data through subscribeChanges`, () => { + const changeCallback = vi.fn() + const initialData = [{ id: 1, name: `Alice` }] + + const collection = new Collection<{ id: number; name: string }>({ + id: `test-subscribe`, + sync: { + sync: () => {}, + }, + initialData, + }) + + // Subscribe to changes + collection.subscribeChanges(changeCallback) + + // Should have been called with initial data + expect(changeCallback).toHaveBeenCalledTimes(1) + const changes = changeCallback.mock.calls[0]![0] + expect(changes.length).toBe(1) + expect(changes[0].type).toBe(`insert`) + expect(changes[0].value).toEqual({ id: 1, name: `Alice` }) + }) + + it(`should generate different keys for identical objects in initial data`, () => { + const initialData = [ + { name: `Duplicate` }, + { name: `Duplicate` }, + { name: `Duplicate` }, + ] + + const collection = new Collection<{ name: string }>({ + id: `test-duplicate-keys`, + sync: { + sync: () => {}, + }, + initialData, + }) + + // Should have all 3 items despite being identical (due to index suffix) + expect(collection.state.size).toBe(3) + + // All keys should be different + const keys = Array.from(collection.state.keys()) + expect(new Set(keys).size).toBe(3) + }) + + it(`should respect schema validation for initial data if provided`, () => { + const schema = z.object({ + id: z.number(), + name: z.string().min(1), + email: z.string().email(), + }) + + // This should work fine with valid data + expect(() => { + new Collection<{ id: number; name: string; email: string }>({ + id: `test-valid-schema`, + sync: { sync: () => {} }, + schema, + initialData: [{ id: 1, name: `Alice`, email: `alice@example.com` }], + }) + }).not.toThrow() + + // This should throw with invalid data + expect(() => { + new Collection<{ id: number; name: string; email: string }>({ + id: `test-invalid-schema`, + sync: { sync: () => {} }, + schema, + initialData: [ + { id: `not-a-number` as any, name: ``, email: `not-an-email` }, + ], + }) + }).toThrow(SchemaValidationError) + }) +})