diff --git a/packages/db/CHANGELOG.md b/packages/db/CHANGELOG.md
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/db/components/Renderer.astro b/packages/db/components/Renderer.astro
new file mode 100644
index 0000000000..7cdc0c62bf
--- /dev/null
+++ b/packages/db/components/Renderer.astro
@@ -0,0 +1,14 @@
+---
+import { Renderer as MarkdocRenderer } from '@astrojs/markdoc/components';
+import { Markdoc } from '@astrojs/markdoc/config';
+
+interface Props {
+ content: string;
+}
+
+const { content } = Astro.props;
+
+const ast = Markdoc.parse(content);
+---
+
+
diff --git a/packages/db/components/astro-env.d.ts b/packages/db/components/astro-env.d.ts
new file mode 100644
index 0000000000..f964fe0cff
--- /dev/null
+++ b/packages/db/components/astro-env.d.ts
@@ -0,0 +1 @@
+///
diff --git a/packages/db/components/index.ts b/packages/db/components/index.ts
new file mode 100644
index 0000000000..3c9b6a600a
--- /dev/null
+++ b/packages/db/components/index.ts
@@ -0,0 +1,2 @@
+// @ts-expect-error: missing types
+export { default as Renderer } from './Renderer.astro';
diff --git a/packages/db/components/tsconfig.json b/packages/db/components/tsconfig.json
new file mode 100644
index 0000000000..107fc42fc7
--- /dev/null
+++ b/packages/db/components/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "extends": "astro/tsconfigs/strict",
+ "include": ["."],
+ "compilerOptions": {
+ "strictNullChecks": true
+ }
+}
diff --git a/packages/db/package.json b/packages/db/package.json
new file mode 100644
index 0000000000..6a9ae30b02
--- /dev/null
+++ b/packages/db/package.json
@@ -0,0 +1,76 @@
+{
+ "name": "@astrojs/db",
+ "version": "0.1.3",
+ "description": "",
+ "type": "module",
+ "license": "UNLICENSED",
+ "types": "./dist/index.d.ts",
+ "exports": {
+ ".": {
+ "import": "./dist/index.js",
+ "types": "./dist/index.d.ts"
+ },
+ "./components": "./components/index.ts",
+ "./internal": {
+ "import": "./dist/internal.js",
+ "types": "./dist/internal.d.ts"
+ },
+ "./internal-local": {
+ "import": "./dist/internal-local.js",
+ "types": "./dist/internal-local.d.ts"
+ }
+ },
+ "typesVersions": {
+ "*": {
+ ".": [
+ "./dist/index.d.ts"
+ ],
+ "internal": [
+ "./dist/internal.d.ts"
+ ]
+ }
+ },
+ "files": [
+ "dist",
+ "components"
+ ],
+ "scripts": {
+ "build": "astro-scripts build \"src/**/*.ts\" && tsc --project tsconfig.build.json",
+ "typecheck": "tsc -b",
+ "prepack": "pnpm run build",
+ "dev": "pnpm /^dev:/",
+ "dev:build": "astro-scripts dev \"src/**/*.ts\"",
+ "dev:types": "tsc --project tsconfig.build.json --watch",
+ "test": "pnpm run build && mocha test/**/*.js",
+ "test:match": "mocha test/**/*.js -g"
+ },
+ "dependencies": {
+ "@astrojs/cli-kit": "^0.3.0",
+ "@astrojs/markdoc": "^0.5.2",
+ "@libsql/client": "0.4.0-pre.5",
+ "astro": "^3.6.0",
+ "better-sqlite3": "^8.7.0",
+ "circle-rhyme-yes-measure": "workspace:^",
+ "drizzle-orm": "^0.28.6",
+ "esbuild": "^0.19.4",
+ "github-slugger": "^2.0.0",
+ "kleur": "^4.1.5",
+ "miniflare": "^3.20231002.1",
+ "nanoid": "^5.0.1",
+ "open": "^9.1.0",
+ "ora": "^7.0.1",
+ "prompts": "^2.4.2",
+ "yargs-parser": "^21.1.1",
+ "zod": "^3.22.4"
+ },
+ "devDependencies": {
+ "@types/chai": "^4.3.6",
+ "@types/mocha": "^10.0.2",
+ "@types/prompts": "^2.4.5",
+ "@types/yargs-parser": "^21.0.1",
+ "chai": "^4.3.10",
+ "mocha": "^10.2.0",
+ "typescript": "^5.2.2",
+ "vite": "^4.4.11"
+ }
+}
diff --git a/packages/db/src/config.ts b/packages/db/src/config.ts
new file mode 100644
index 0000000000..8514c43bcf
--- /dev/null
+++ b/packages/db/src/config.ts
@@ -0,0 +1,56 @@
+import {
+ type BooleanField,
+ type DBFieldInput,
+ type DateFieldInput,
+ type JsonField,
+ type NumberField,
+ type TextField,
+ type collectionSchema,
+ collectionsSchema,
+} from 'somewhere';
+import { z } from 'zod';
+
+export const adjustedConfigSchema = z.object({
+ collections: collectionsSchema.optional(),
+});
+
+export type DBUserConfig = z.input;
+
+export const astroConfigWithDBValidator = z.object({
+ db: adjustedConfigSchema.optional(),
+});
+
+export function defineCollection(
+ userConfig: z.input,
+): z.input {
+ return userConfig;
+}
+
+export type AstroConfigWithDB = z.infer;
+
+type FieldOpts = Omit;
+
+const baseDefaults = {
+ optional: false,
+ unique: false,
+ label: undefined,
+ default: undefined,
+};
+
+export const field = {
+ number(opts: FieldOpts = {}): NumberField {
+ return { type: 'number', ...baseDefaults, ...opts };
+ },
+ boolean(opts: FieldOpts = {}): BooleanField {
+ return { type: 'boolean', ...baseDefaults, ...opts };
+ },
+ text(opts: FieldOpts = {}): TextField {
+ return { type: 'text', multiline: false, ...baseDefaults, ...opts };
+ },
+ date(opts: FieldOpts = {}): DateFieldInput {
+ return { type: 'date', ...baseDefaults, ...opts };
+ },
+ json(opts: FieldOpts = {}): JsonField {
+ return { type: 'json', ...baseDefaults, ...opts };
+ },
+};
diff --git a/packages/db/src/error-map.ts b/packages/db/src/error-map.ts
new file mode 100644
index 0000000000..d2697c9caf
--- /dev/null
+++ b/packages/db/src/error-map.ts
@@ -0,0 +1,104 @@
+/**
+ * This is a modified version of Astro's error map. source:
+ * https://github.com/withastro/astro/blob/main/packages/astro/src/content/error-map.ts
+ */
+import type { z } from 'astro/zod';
+
+interface TypeOrLiteralErrByPathEntry {
+ code: 'invalid_type' | 'invalid_literal';
+ received: unknown;
+ expected: unknown[];
+}
+
+export const errorMap: z.ZodErrorMap = (baseError, ctx) => {
+ const baseErrorPath = flattenErrorPath(baseError.path);
+ if (baseError.code === 'invalid_union') {
+ // Optimization: Combine type and literal errors for keys that are common across ALL union types
+ // Ex. a union between `{ key: z.literal('tutorial') }` and `{ key: z.literal('blog') }` will
+ // raise a single error when `key` does not match:
+ // > Did not match union.
+ // > key: Expected `'tutorial' | 'blog'`, received 'foo'
+ const typeOrLiteralErrByPath = new Map();
+ for (const unionError of baseError.unionErrors.flatMap((e) => e.errors)) {
+ if (unionError.code === 'invalid_type' || unionError.code === 'invalid_literal') {
+ const flattenedErrorPath = flattenErrorPath(unionError.path);
+ const typeOrLiteralErr = typeOrLiteralErrByPath.get(flattenedErrorPath);
+ if (typeOrLiteralErr) {
+ typeOrLiteralErr.expected.push(unionError.expected);
+ } else {
+ typeOrLiteralErrByPath.set(flattenedErrorPath, {
+ code: unionError.code,
+ received: (unionError as any).received,
+ expected: [unionError.expected],
+ });
+ }
+ }
+ }
+ const messages: string[] = [
+ prefix(
+ baseErrorPath,
+ typeOrLiteralErrByPath.size ? 'Did not match union:' : 'Did not match union.',
+ ),
+ ];
+ return {
+ message: messages
+ .concat(
+ [...typeOrLiteralErrByPath.entries()]
+ // If type or literal error isn't common to ALL union types,
+ // filter it out. Can lead to confusing noise.
+ .filter(([, error]) => error.expected.length === baseError.unionErrors.length)
+ .map(([key, error]) =>
+ // Avoid printing the key again if it's a base error
+ key === baseErrorPath
+ ? `> ${getTypeOrLiteralMsg(error)}`
+ : `> ${prefix(key, getTypeOrLiteralMsg(error))}`,
+ ),
+ )
+ .join('\n'),
+ };
+ }
+ if (baseError.code === 'invalid_literal' || baseError.code === 'invalid_type') {
+ return {
+ message: prefix(
+ baseErrorPath,
+ getTypeOrLiteralMsg({
+ code: baseError.code,
+ received: (baseError as any).received,
+ expected: [baseError.expected],
+ }),
+ ),
+ };
+ } else if (baseError.message) {
+ return { message: prefix(baseErrorPath, baseError.message) };
+ } else {
+ return { message: prefix(baseErrorPath, ctx.defaultError) };
+ }
+};
+
+const getTypeOrLiteralMsg = (error: TypeOrLiteralErrByPathEntry): string => {
+ if (error.received === 'undefined') return 'Required';
+ const expectedDeduped = new Set(error.expected);
+ switch (error.code) {
+ case 'invalid_type':
+ return `Expected type \`${unionExpectedVals(expectedDeduped)}\`, received ${JSON.stringify(
+ error.received,
+ )}`;
+ case 'invalid_literal':
+ return `Expected \`${unionExpectedVals(expectedDeduped)}\`, received ${JSON.stringify(
+ error.received,
+ )}`;
+ }
+};
+
+const prefix = (key: string, msg: string) => (key.length ? `**${key}**: ${msg}` : msg);
+
+const unionExpectedVals = (expectedVals: Set) =>
+ [...expectedVals]
+ .map((expectedVal, idx) => {
+ if (idx === 0) return JSON.stringify(expectedVal);
+ const sep = ' | ';
+ return `${sep}${JSON.stringify(expectedVal)}`;
+ })
+ .join('');
+
+const flattenErrorPath = (errorPath: Array) => errorPath.join('.');
diff --git a/packages/db/src/index.ts b/packages/db/src/index.ts
new file mode 100644
index 0000000000..662ec7698c
--- /dev/null
+++ b/packages/db/src/index.ts
@@ -0,0 +1 @@
+export { defineCollection, field } from './config.js';
diff --git a/packages/db/src/internal-local.ts b/packages/db/src/internal-local.ts
new file mode 100644
index 0000000000..2d9fdf2b92
--- /dev/null
+++ b/packages/db/src/internal-local.ts
@@ -0,0 +1,25 @@
+import { createClient } from '@libsql/client';
+import type { DBCollections } from 'circle-rhyme-yes-measure';
+import { type SQL, sql } from 'drizzle-orm';
+import { LibSQLDatabase, drizzle } from 'drizzle-orm/libsql';
+import { getCreateTableQuery } from './cli/sync/queries.js';
+
+export async function createLocalDb(collections: DBCollections) {
+ const client = createClient({ url: ':memory:' });
+ const db = drizzle(client);
+
+ await createDbTables(db, collections);
+ return db;
+}
+
+async function createDbTables(db: LibSQLDatabase, collections: DBCollections) {
+ const setupQueries: SQL[] = [];
+ for (const [name, collection] of Object.entries(collections)) {
+ const dropQuery = sql.raw(`DROP TABLE IF EXISTS ${name}`);
+ const createQuery = sql.raw(getCreateTableQuery(name, collection));
+ setupQueries.push(dropQuery, createQuery);
+ }
+ for (const q of setupQueries) {
+ await db.run(q);
+ }
+}
diff --git a/packages/db/src/internal.ts b/packages/db/src/internal.ts
new file mode 100644
index 0000000000..fdd92ba359
--- /dev/null
+++ b/packages/db/src/internal.ts
@@ -0,0 +1,112 @@
+import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
+import type { SQLiteColumn, SQLiteTableWithColumns, TableConfig } from 'drizzle-orm/sqlite-core';
+import type { SqliteRemoteDatabase } from 'drizzle-orm/sqlite-proxy';
+
+export { collectionToTable, createDb } from 'circle-rhyme-yes-measure';
+
+export {
+ sql,
+ eq,
+ gt,
+ gte,
+ lt,
+ lte,
+ ne,
+ isNull,
+ isNotNull,
+ inArray,
+ notInArray,
+ exists,
+ notExists,
+ between,
+ notBetween,
+ like,
+ notIlike,
+ not,
+ asc,
+ desc,
+ and,
+ or,
+} from 'drizzle-orm';
+export type SqliteDB = SqliteRemoteDatabase;
+
+export type AstroTable> = SQLiteTableWithColumns<
+ T & {
+ schema: undefined;
+ dialect: 'sqlite';
+ }
+>;
+
+type GeneratedConfig = Pick<
+ ColumnBaseConfig,
+ 'name' | 'tableName' | 'notNull' | 'hasDefault'
+>;
+
+export type AstroText> = SQLiteColumn<
+ T & {
+ data: string;
+ dataType: 'string';
+ columnType: 'SQLiteText';
+ driverParam: string;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type AstroDate> = SQLiteColumn<
+ T & {
+ data: Date;
+ dataType: 'custom';
+ columnType: 'SQLiteCustomColumn';
+ driverParam: string;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type AstroBoolean> = SQLiteColumn<
+ T & {
+ data: boolean;
+ dataType: 'boolean';
+ columnType: 'SQLiteBoolean';
+ driverParam: number;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type AstroNumber> = SQLiteColumn<
+ T & {
+ data: number;
+ dataType: 'number';
+ columnType: 'SQLiteInteger';
+ driverParam: number;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type AstroJson> = SQLiteColumn<
+ T & {
+ data: unknown;
+ dataType: 'custom';
+ columnType: 'SQLiteCustomColumn';
+ driverParam: string;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
+
+export type AstroId, 'tableName'>> = SQLiteColumn<
+ T & {
+ name: 'id';
+ hasDefault: true;
+ notNull: true;
+ data: string;
+ dataType: 'custom';
+ columnType: 'SQLiteCustomColumn';
+ driverParam: string;
+ enumValues: never;
+ baseColumn: never;
+ }
+>;
diff --git a/packages/db/src/vite-plugin-db.ts b/packages/db/src/vite-plugin-db.ts
new file mode 100644
index 0000000000..52844607cc
--- /dev/null
+++ b/packages/db/src/vite-plugin-db.ts
@@ -0,0 +1,101 @@
+import { existsSync } from 'node:fs';
+import { fileURLToPath } from 'node:url';
+import type { DBCollections } from 'circle-rhyme-yes-measure';
+import { red } from 'kleur/colors';
+import {
+ INTERNAL_LOCAL_PKG_IMP,
+ INTERNAL_PKG_IMP,
+ ROOT,
+ SUPPORTED_SEED_FILES,
+ VIRTUAL_MODULE_ID,
+ drizzleFilterExps,
+} from './consts.js';
+import type { VitePlugin } from './utils.js';
+
+const resolvedVirtualModuleId = '\0' + VIRTUAL_MODULE_ID;
+
+type Opts = { mode: 'dev' } | { mode: 'prod'; projectId: string; token: string };
+
+export function vitePluginDb(collections: DBCollections, opts: Opts): VitePlugin {
+ return {
+ name: 'astro:db',
+ enforce: 'pre',
+ resolveId(id) {
+ if (id === VIRTUAL_MODULE_ID) {
+ return resolvedVirtualModuleId;
+ }
+ },
+ load(id) {
+ if (id !== resolvedVirtualModuleId) return;
+
+ if (opts.mode === 'dev') {
+ return getLocalVirtualModuleContents({ collections });
+ }
+
+ return getProdVirtualModuleContents({
+ collections,
+ projectId: opts.projectId,
+ appToken: opts.token,
+ });
+ },
+ };
+}
+
+const seedErrorMessage = `${red(
+ '⚠️ Failed to seed data.',
+)} Is the seed file out-of-date with recent schema changes?`;
+
+export function getLocalVirtualModuleContents({ collections }: { collections: DBCollections }) {
+ const seedFile = SUPPORTED_SEED_FILES.map((f) => fileURLToPath(new URL(f, ROOT))).find((f) =>
+ existsSync(f),
+ );
+ return `
+import { collectionToTable } from ${INTERNAL_PKG_IMP};
+import { createLocalDb } from ${INTERNAL_LOCAL_PKG_IMP};
+
+export const db = await createLocalDb(${JSON.stringify(collections)});
+${drizzleFilterExps}
+
+${getStringifiedCollectionExports(collections)}
+
+${
+ seedFile
+ ? `try {
+ await import(${JSON.stringify(seedFile)});
+} catch {
+ console.error(${JSON.stringify(seedErrorMessage)});
+}`
+ : ''
+}
+`;
+}
+
+export function getProdVirtualModuleContents({
+ collections,
+ projectId,
+ appToken,
+}: {
+ collections: DBCollections;
+ projectId: string;
+ appToken: string;
+}) {
+ return `
+import { collectionToTable, createDb } from ${INTERNAL_PKG_IMP};
+
+export const db = createDb(${JSON.stringify(projectId)}, ${JSON.stringify(appToken)});
+${drizzleFilterExps}
+
+${getStringifiedCollectionExports(collections)}
+`;
+}
+
+function getStringifiedCollectionExports(collections: DBCollections) {
+ return Object.entries(collections)
+ .map(
+ ([name, collection]) =>
+ `export const ${name} = collectionToTable(${JSON.stringify(name)}, ${JSON.stringify(
+ collection,
+ )}, false)`,
+ )
+ .join('\n');
+}
diff --git a/packages/db/src/vite-plugin-inject-env-ts.ts b/packages/db/src/vite-plugin-inject-env-ts.ts
new file mode 100644
index 0000000000..0d4b38660c
--- /dev/null
+++ b/packages/db/src/vite-plugin-inject-env-ts.ts
@@ -0,0 +1,54 @@
+import { existsSync } from 'node:fs';
+import { readFile, writeFile } from 'node:fs/promises';
+import path from 'node:path';
+import { fileURLToPath } from 'node:url';
+import type { AstroConfig } from 'astro';
+import { bold, cyan } from 'kleur/colors';
+import { normalizePath } from 'vite';
+import { DOT_ASTRO_DIR, DB_TYPES_FILE } from './consts.js';
+import type { VitePlugin } from './utils.js';
+
+export function getEnvTsPath({ srcDir }: { srcDir: URL }) {
+ return new URL('env.d.ts', srcDir);
+}
+
+export function vitePluginInjectEnvTs({ config }: { config: AstroConfig }): VitePlugin {
+ return {
+ name: 'db-inject-env-ts',
+ // Use `post` to ensure project setup is complete
+ // Ex. `.astro` types have been written
+ enforce: 'post',
+ async config() {
+ await setUpEnvTs({ config });
+ },
+ };
+}
+
+export async function setUpEnvTs({ config }: { config: AstroConfig }) {
+ const envTsPath = getEnvTsPath(config);
+ const envTsPathRelativetoRoot = normalizePath(
+ path.relative(fileURLToPath(config.root), fileURLToPath(envTsPath)),
+ );
+
+ if (existsSync(envTsPath)) {
+ let typesEnvContents = await readFile(envTsPath, 'utf-8');
+
+ if (!existsSync(DOT_ASTRO_DIR)) return;
+
+ const dbTypeReference = getDBTypeReference(config);
+
+ if (!typesEnvContents.includes(dbTypeReference)) {
+ typesEnvContents = `${dbTypeReference}\n${typesEnvContents}`;
+ await writeFile(envTsPath, typesEnvContents, 'utf-8');
+ console.info(`${cyan(bold('[astro:db]'))} Added ${bold(envTsPathRelativetoRoot)} types`);
+ }
+ }
+}
+
+function getDBTypeReference({ srcDir }: { srcDir: URL }) {
+ const contentTypesRelativeToSrcDir = normalizePath(
+ path.relative(fileURLToPath(srcDir), fileURLToPath(DB_TYPES_FILE)),
+ );
+
+ return `/// `;
+}
diff --git a/packages/db/test/sync.js b/packages/db/test/sync.js
new file mode 100644
index 0000000000..20a126fb01
--- /dev/null
+++ b/packages/db/test/sync.js
@@ -0,0 +1,464 @@
+// @ts-nocheck
+import { D1Database, D1DatabaseAPI } from '@miniflare/d1';
+import { createSQLiteDB } from '@miniflare/shared';
+import { expect } from 'chai';
+import { collectionSchema } from 'circle-rhyme-yes-measure';
+import { describe, it } from 'mocha';
+import { z } from 'zod';
+import {
+ getCollectionChangeQueries,
+ getCreateTableQuery,
+ getMigrationQueries,
+} from '../dist/cli/sync/queries.js';
+import { field } from '../dist/config.js';
+
+const COLLECTION_NAME = 'Users';
+
+const userInitial = collectionSchema.parse({
+ fields: {
+ name: field.text(),
+ age: field.number(),
+ email: field.text({ unique: true }),
+ mi: field.text({ optional: true }),
+ },
+});
+
+const defaultPromptResponse = {
+ allowDataLoss: false,
+ fieldRenames: new Proxy(
+ {},
+ {
+ get: () => false,
+ },
+ ),
+ collectionRenames: new Proxy(
+ {},
+ {
+ get: () => false,
+ },
+ ),
+};
+
+function userChangeQueries(oldCollection, newCollection, promptResponses = defaultPromptResponse) {
+ return getCollectionChangeQueries({
+ collectionName: COLLECTION_NAME,
+ oldCollection,
+ newCollection,
+ promptResponses,
+ });
+}
+
+function configChangeQueries(
+ oldCollections,
+ newCollections,
+ promptResponses = defaultPromptResponse,
+) {
+ return getMigrationQueries({
+ oldCollections,
+ newCollections,
+ promptResponses,
+ });
+}
+
+describe('getMigrationQueries', () => {
+ it('should be empty when collections are the same', async () => {
+ const oldCollections = { [COLLECTION_NAME]: userInitial };
+ const newCollections = { [COLLECTION_NAME]: userInitial };
+ const queries = await configChangeQueries(oldCollections, newCollections);
+ expect(queries).to.deep.equal([]);
+ });
+
+ it('should create table for new collections', async () => {
+ const oldCollections = {};
+ const newCollections = { [COLLECTION_NAME]: userInitial };
+ const queries = await configChangeQueries(oldCollections, newCollections);
+ expect(queries).to.deep.equal([getCreateTableQuery(COLLECTION_NAME, userInitial)]);
+ });
+
+ it('should drop table for removed collections', async () => {
+ const oldCollections = { [COLLECTION_NAME]: userInitial };
+ const newCollections = {};
+ const queries = await configChangeQueries(oldCollections, newCollections);
+ expect(queries).to.deep.equal([`DROP TABLE "${COLLECTION_NAME}"`]);
+ });
+
+ it('should rename table for renamed collections', async () => {
+ const rename = 'Peeps';
+ const oldCollections = { [COLLECTION_NAME]: userInitial };
+ const newCollections = { [rename]: userInitial };
+ const queries = await configChangeQueries(oldCollections, newCollections, {
+ ...defaultPromptResponse,
+ collectionRenames: { [rename]: COLLECTION_NAME },
+ });
+ expect(queries).to.deep.equal([`ALTER TABLE "${COLLECTION_NAME}" RENAME TO "${rename}"`]);
+ });
+});
+
+describe('getCollectionChangeQueries', () => {
+ it('should be empty when collections are the same', async () => {
+ const queries = await userChangeQueries(userInitial, userInitial);
+ expect(queries).to.deep.equal([]);
+ });
+
+ it('should be empty when type updated to same underlying SQL type', async () => {
+ const blogInitial = collectionSchema.parse({
+ fields: {
+ title: field.text(),
+ draft: field.boolean(),
+ },
+ });
+ const blogFinal = collectionSchema.parse({
+ fields: {
+ ...blogInitial.fields,
+ draft: field.number(),
+ },
+ });
+ const queries = await userChangeQueries(blogInitial, blogFinal);
+ expect(queries).to.deep.equal([]);
+ });
+
+ describe('ALTER RENAME COLUMN', () => {
+ it('when renaming a field', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ },
+ };
+ userFinal.fields.middleInitial = userFinal.fields.mi;
+ delete userFinal.fields.mi;
+
+ const queries = await userChangeQueries(userInitial, userFinal, {
+ ...defaultPromptResponse,
+ fieldRenames: { middleInitial: 'mi' },
+ });
+ expect(queries).to.deep.equal([
+ `ALTER TABLE "${COLLECTION_NAME}" RENAME COLUMN "mi" TO "middleInitial"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries });
+ });
+ });
+
+ describe('Lossy table recreate', () => {
+ it('when changing a field type', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ age: field.text(),
+ },
+ };
+
+ const queries = await userChangeQueries(userInitial, userFinal, {
+ ...defaultPromptResponse,
+ allowDataLoss: true,
+ });
+ expect(queries).to.have.lengthOf(3);
+
+ const tempTableName = getTempTableName(queries[0]);
+ expect(tempTableName).to.be.a('string');
+ expect(queries).to.deep.equal([
+ `CREATE TABLE "${tempTableName}" ("id" text PRIMARY KEY, "name" text NOT NULL, "age" text NOT NULL, "email" text NOT NULL UNIQUE, "mi" text)`,
+ 'DROP TABLE "Users"',
+ `ALTER TABLE "${tempTableName}" RENAME TO "Users"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries, allowDataLoss: true });
+ });
+
+ it('when changing a field to unique', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ age: field.text({ unique: true }),
+ },
+ };
+
+ const queries = await userChangeQueries(userInitial, userFinal, {
+ ...defaultPromptResponse,
+ allowDataLoss: true,
+ });
+ expect(queries).to.have.lengthOf(3);
+
+ const tempTableName = getTempTableName(queries[0]);
+ expect(tempTableName).to.be.a('string');
+ expect(queries).to.deep.equal([
+ `CREATE TABLE "${tempTableName}" ("id" text PRIMARY KEY, "name" text NOT NULL, "age" text NOT NULL UNIQUE, "email" text NOT NULL UNIQUE, "mi" text)`,
+ 'DROP TABLE "Users"',
+ `ALTER TABLE "${tempTableName}" RENAME TO "Users"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries, allowDataLoss: true });
+ });
+
+ it('when changing a field to required without default', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ mi: field.text(),
+ },
+ };
+
+ const queries = await userChangeQueries(userInitial, userFinal, {
+ ...defaultPromptResponse,
+ allowDataLoss: true,
+ });
+
+ expect(queries).to.have.lengthOf(3);
+
+ const tempTableName = getTempTableName(queries[0]);
+ expect(tempTableName).to.be.a('string');
+ expect(queries).to.deep.equal([
+ `CREATE TABLE "${tempTableName}" ("id" text PRIMARY KEY, "name" text NOT NULL, "age" integer NOT NULL, "email" text NOT NULL UNIQUE, "mi" text NOT NULL)`,
+ 'DROP TABLE "Users"',
+ `ALTER TABLE "${tempTableName}" RENAME TO "Users"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries, allowDataLoss: true });
+ });
+
+ it('when changing a field to required with default', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ mi: field.text({ default: 'A' }),
+ },
+ };
+
+ const queries = await userChangeQueries(userInitial, userFinal, {
+ ...defaultPromptResponse,
+ allowDataLoss: true,
+ });
+
+ expect(queries).to.have.lengthOf(3);
+
+ const tempTableName = getTempTableName(queries[0]);
+ expect(tempTableName).to.be.a('string');
+ expect(queries).to.deep.equal([
+ `CREATE TABLE "${tempTableName}" ("id" text PRIMARY KEY, "name" text NOT NULL, "age" integer NOT NULL, "email" text NOT NULL UNIQUE, "mi" text NOT NULL DEFAULT 'A')`,
+ 'DROP TABLE "Users"',
+ `ALTER TABLE "${tempTableName}" RENAME TO "Users"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries, allowDataLoss: true });
+ });
+
+ it('when adding a required field without a default', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ phoneNumber: field.text(),
+ },
+ };
+
+ const queries = await userChangeQueries(userInitial, userFinal, {
+ ...defaultPromptResponse,
+ allowDataLoss: true,
+ });
+ expect(queries).to.have.lengthOf(3);
+
+ const tempTableName = getTempTableName(queries[0]);
+ expect(tempTableName).to.be.a('string');
+ expect(queries).to.deep.equal([
+ `CREATE TABLE "${tempTableName}" ("id" text PRIMARY KEY, "name" text NOT NULL, "age" integer NOT NULL, "email" text NOT NULL UNIQUE, "mi" text, "phoneNumber" text NOT NULL)`,
+ 'DROP TABLE "Users"',
+ `ALTER TABLE "${tempTableName}" RENAME TO "Users"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries, allowDataLoss: true });
+ });
+ });
+
+ describe('Lossless table recreate', () => {
+ it('when adding an optional unique field', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ phoneNumber: field.text({ unique: true, optional: true }),
+ },
+ };
+
+ const queries = await userChangeQueries(userInitial, userFinal, {
+ ...defaultPromptResponse,
+ allowDataLoss: true,
+ });
+ expect(queries).to.have.lengthOf(4);
+
+ const tempTableName = getTempTableName(queries[0]);
+ expect(tempTableName).to.be.a('string');
+ expect(queries).to.deep.equal([
+ `CREATE TABLE "${tempTableName}" ("id" text PRIMARY KEY, "name" text NOT NULL, "age" integer NOT NULL, "email" text NOT NULL UNIQUE, "mi" text, "phoneNumber" text UNIQUE)`,
+ `INSERT INTO "${tempTableName}" ("id", "name", "age", "email", "mi") SELECT "id", "name", "age", "email", "mi" FROM "Users"`,
+ 'DROP TABLE "Users"',
+ `ALTER TABLE "${tempTableName}" RENAME TO "Users"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries });
+ });
+
+ it('when dropping unique column', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ },
+ };
+ delete userFinal.fields.email;
+
+ const queries = await userChangeQueries(userInitial, userFinal);
+ expect(queries).to.have.lengthOf(4);
+
+ const tempTableName = getTempTableName(queries[0]);
+ expect(tempTableName).to.be.a('string');
+ expect(queries).to.deep.equal([
+ `CREATE TABLE "${tempTableName}" ("id" text PRIMARY KEY, "name" text NOT NULL, "age" integer NOT NULL, "mi" text)`,
+ `INSERT INTO "${tempTableName}" ("id", "name", "age", "mi") SELECT "id", "name", "age", "mi" FROM "Users"`,
+ 'DROP TABLE "Users"',
+ `ALTER TABLE "${tempTableName}" RENAME TO "Users"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries });
+ });
+
+ it('when updating to a runtime default', async () => {
+ const initial = collectionSchema.parse({
+ fields: {
+ ...userInitial.fields,
+ age: field.date(),
+ },
+ });
+
+ const userFinal = {
+ fields: {
+ ...initial.fields,
+ age: field.date({ default: 'now' }),
+ },
+ };
+
+ const queries = await userChangeQueries(initial, userFinal);
+ expect(queries).to.have.lengthOf(4);
+
+ const tempTableName = getTempTableName(queries[0]);
+ expect(tempTableName).to.be.a('string');
+ expect(queries).to.deep.equal([
+ `CREATE TABLE "${tempTableName}" ("id" text PRIMARY KEY, "name" text NOT NULL, "age" text NOT NULL DEFAULT CURRENT_TIMESTAMP, "email" text NOT NULL UNIQUE, "mi" text)`,
+ `INSERT INTO "${tempTableName}" ("id", "name", "age", "email", "mi") SELECT "id", "name", "age", "email", "mi" FROM "Users"`,
+ 'DROP TABLE "Users"',
+ `ALTER TABLE "${tempTableName}" RENAME TO "Users"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries });
+ });
+
+ it('when adding a field with a runtime default', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ birthday: field.date({ default: 'now' }),
+ },
+ };
+
+ const queries = await userChangeQueries(userInitial, userFinal);
+ expect(queries).to.have.lengthOf(4);
+
+ const tempTableName = getTempTableName(queries[0]);
+ expect(tempTableName).to.be.a('string');
+ expect(queries).to.deep.equal([
+ `CREATE TABLE "${tempTableName}" ("id" text PRIMARY KEY, "name" text NOT NULL, "age" integer NOT NULL, "email" text NOT NULL UNIQUE, "mi" text, "birthday" text NOT NULL DEFAULT CURRENT_TIMESTAMP)`,
+ `INSERT INTO "${tempTableName}" ("id", "name", "age", "email", "mi") SELECT "id", "name", "age", "email", "mi" FROM "Users"`,
+ 'DROP TABLE "Users"',
+ `ALTER TABLE "${tempTableName}" RENAME TO "Users"`,
+ ]);
+ await runsOnD1WithoutFailing({ queries });
+ });
+ });
+
+ describe('ALTER ADD COLUMN', () => {
+ it('when adding an optional field', async () => {
+ const userFinal = {
+ fields: {
+ ...userInitial.fields,
+ birthday: field.date({ optional: true }),
+ },
+ };
+
+ const queries = await userChangeQueries(userInitial, userFinal);
+ expect(queries).to.deep.equal(['ALTER TABLE "Users" ADD COLUMN "birthday" text']);
+ await runsOnD1WithoutFailing({ queries });
+ });
+
+ it('when adding a required field with default', async () => {
+ const defaultDate = new Date('2023-01-01');
+ const userFinal = collectionSchema.parse({
+ fields: {
+ ...userInitial.fields,
+ birthday: field.date({ default: new Date('2023-01-01') }),
+ },
+ });
+
+ const queries = await userChangeQueries(userInitial, userFinal);
+ expect(queries).to.deep.equal([
+ `ALTER TABLE "Users" ADD COLUMN "birthday" text NOT NULL DEFAULT '${defaultDate.toISOString()}'`,
+ ]);
+ await runsOnD1WithoutFailing({ queries });
+ });
+ });
+
+ describe('ALTER DROP COLUMN', () => {
+ it('when removing optional or required fields', async () => {
+ const userFinal = {
+ fields: {
+ name: userInitial.fields.name,
+ email: userInitial.fields.email,
+ },
+ };
+
+ const queries = await userChangeQueries(userInitial, userFinal);
+ expect(queries).to.deep.equal([
+ 'ALTER TABLE "Users" DROP COLUMN "age"',
+ 'ALTER TABLE "Users" DROP COLUMN "mi"',
+ ]);
+ await runsOnD1WithoutFailing({ queries });
+ });
+ });
+});
+
+/** @param {string} query */
+function getTempTableName(query) {
+ return query.match(/Users_([a-z0-9]+)/)?.[0];
+}
+
+/** @param {{ queries: string[]; oldCollection?: typeof userInitial; allowDataLoss?: boolean }} queries */
+async function runsOnD1WithoutFailing({
+ queries,
+ oldCollection = userInitial,
+ allowDataLoss = false,
+}) {
+ const sqlite = await createSQLiteDB(':memory:');
+ const d1 = new D1Database(new D1DatabaseAPI(sqlite));
+
+ const createTable = getCreateTableQuery(COLLECTION_NAME, oldCollection);
+ const insertExampleEntries = [
+ `INSERT INTO "Users" ("id", "name", "age", "email") VALUES ('1', 'John', 20, 'john@test.gov')`,
+ `INSERT INTO "Users" ("id", "name", "age", "email") VALUES ('2', 'Jane', 21, 'jane@test.club')`,
+ ];
+ await d1.batch([createTable, ...insertExampleEntries].map((q) => d1.prepare(q)));
+
+ try {
+ await d1.batch(queries.map((q) => d1.prepare(q)));
+ const userQuery = d1.prepare(`SELECT * FROM "Users"`);
+ const { results } = await userQuery.all();
+ expect(results).to.have.lengthOf(allowDataLoss ? 0 : insertExampleEntries.length);
+ sqlite.close();
+ expect(true).to.be.true;
+ } catch (err) {
+ expect.fail(getErrorMessage(err));
+ }
+}
+
+const d1ErrorValidator = z.object({
+ message: z.string().refine((s) => s.startsWith('D1_')),
+ cause: z.object({ message: z.string() }),
+});
+
+/**
+ * @param {unknown} e
+ * @returns {string}
+ */
+function getErrorMessage(e) {
+ if (e instanceof Error) {
+ const d1Error = d1ErrorValidator.safeParse(e);
+ if (d1Error.success) return d1Error.data.cause.message;
+ return e.message;
+ }
+ return JSON.stringify(e);
+}
diff --git a/packages/db/tsconfig.build.json b/packages/db/tsconfig.build.json
new file mode 100644
index 0000000000..4f3345b924
--- /dev/null
+++ b/packages/db/tsconfig.build.json
@@ -0,0 +1,11 @@
+{
+ "extends": "./tsconfig.json",
+ "include": ["src"],
+ "compilerOptions": {
+ "noEmit": false,
+ "declaration": true,
+ "emitDeclarationOnly": true,
+ "outDir": "./dist",
+ "rootDir": "./src"
+ }
+}
diff --git a/packages/db/tsconfig.json b/packages/db/tsconfig.json
new file mode 100644
index 0000000000..9b5603dc69
--- /dev/null
+++ b/packages/db/tsconfig.json
@@ -0,0 +1,4 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "exclude": ["node_modules", "dist", "test", "bin"]
+}