0
Fork 0
mirror of https://github.com/withastro/astro.git synced 2025-03-24 23:21:57 -05:00

finalize WIP API (#10280)

* feat: no more readable / writable

* fix: table typegen

* wip: move data seeding

* chore: add scripts to basics

* feat: data() -> seed file

* refactor: ensure precedence of file name

* feat: db execute command

* fix: test imports

* chore: remove old readable error tests

* feat: support local db with `db execute`

* refactor: remove integrations from test for now

* chore: stray comment

* chore: remove `table` config object

* feat: `db.batch`!

* refactor: move migrations/ inside db/

* fix: move ticketing-example to seed file

* fix: disable foreign keys when recreating tables

* refactor: standardize migrations dir

* feat: move to db/config.ts

* feat: file watching for db/config.ts dependencies

* feat: remove unsafeDisableStudio

* chroe: remove bad import

* feat: parse config.ts from cli

* chore: remove async from localDatabaseClient

* fix: update recipes config and seed

* chore: update unit tests

* chore: update tests to dev server

* refactor: collectionToTable -> asDrizzleTable

* chore: tidy up collection -> table error states

* refactor: regexp -> endsWith

* feat: pretty error inserting into table

* refactor: try/catch -> catch()

* feat: expose utils for integration seed files

* fix: add config import to db client modules

* fix: just use generic "seeding database" error

* chore: remove unused link args

* fix: migration queries im,port

* chore: remove irrelevant glob/ example

* feat: format migration file path

* feat: support all config file names

* chore: remove db.batch() for now

* chore: remove `db` object

* core: remove unused integration file

* chore: changeset

* fix: foreign key empty error message

* chore: remove old TODO

* fix: bad context reference

* refactor: seedDev -> seedLocal

* wip: throw some console logs at github

* wip: avoid seeding astro:db imported by seed file

* wip: use anything in db/

* refactor: only seed when loaded within srcDir

* refactor: avoid resolution when not seeding

* chore: remove logs

* refactor: seed within create local db client

* refactor: use normalizePath

* wip: logs

* wip: logs

* refactor: early return

* chore: more logs

* refactor: no batch

* fix: use beforeAll

* refactor: move all tests to base block

* wip: log dev server starting

* chore: remove logs

* wip: demo ready

* chore: remove duplicate recreateTables() call

* Revert "wip: demo ready"

This reverts commit 37585ce5cb.

* refactor: beforeEach to isolate dev servers

* chore: remove useBundledDbUrl

* refactor: naming and seed scope

* chore: remove stray console logs

* wip: fix windows file import

* wip: try fileURLToPath

* Revert "wip: try fileURLToPath"

This reverts commit 46fd65d61a.

* Revert "wip: fix windows file import"

This reverts commit 1a669ea646.

* refactor: dir -> directory

* refactor: move execute file to cli

* refactor: remove seed.dev convention

* wip: attempt fileURLToPath

* wip: debug the file exists

* fix: use mjs??

* chore: remove duplicate seedLocal

* chore: remove log check

* refactor: use in memory db for tests

* chore: clean up test comment

* fix: avoid file writes for db setup on in memory db

* chore: bump db changeset to minor

---------

Co-authored-by: Nate Moore <nate@astro.build>
This commit is contained in:
Ben Holmes 2024-03-01 19:29:55 -05:00 committed by GitHub
parent 4b6e2fb69b
commit 3488be9b59
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
58 changed files with 1097 additions and 1240 deletions

View file

@ -0,0 +1,6 @@
---
"astro": patch
"@astrojs/db": minor
---
Finalize db API to a shared db/ directory.

View file

@ -108,7 +108,6 @@ export const AstroConfigSchema = z.object({
.optional()
.default('attribute'),
adapter: z.object({ name: z.string(), hooks: z.object({}).passthrough().default({}) }).optional(),
db: z.object({}).passthrough().default({}).optional(),
integrations: z.preprocess(
// preprocess
(val) => (Array.isArray(val) ? val.flat(Infinity).filter(Boolean) : val),

View file

@ -1,4 +0,0 @@
declare namespace Config {
type DBUserConfig = import('./dist/core/types.js').DBUserConfig;
export interface Database extends DBUserConfig {}
}

View file

@ -1,3 +1,5 @@
/// <reference types="./config-augment.d.ts" />
export * from './dist/index.js';
export { default } from './dist/index.js';
export { default, cli } from './dist/index.js';
declare module 'astro:db' {
export { defineTable, defineDB, column, sql, NOW, TRUE, FALSE } from './dist/index.js';
}

View file

@ -12,6 +12,10 @@
"types": "./index.d.ts",
"import": "./dist/index.js"
},
"./utils": {
"types": "./dist/utils.d.ts",
"import": "./dist/utils.js"
},
"./runtime": {
"types": "./dist/runtime/index.d.ts",
"import": "./dist/runtime/index.js"
@ -20,6 +24,10 @@
"types": "./dist/runtime/drizzle.d.ts",
"import": "./dist/runtime/drizzle.js"
},
"./runtime/config": {
"types": "./dist/runtime/config.d.ts",
"import": "./dist/runtime/config.js"
},
"./package.json": "./package.json"
},
"typesVersions": {
@ -27,11 +35,17 @@
".": [
"./index.d.ts"
],
"utils": [
"./dist/utils.d.ts"
],
"runtime": [
"./dist/runtime/index.d.ts"
],
"runtime/drizzle": [
"./dist/runtime/drizzle.d.ts"
],
"runtime/config": [
"./dist/runtime/config.d.ts"
]
}
},

View file

@ -0,0 +1,40 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import { MISSING_EXECUTE_PATH_ERROR, FILE_NOT_FOUND_ERROR } from '../../../errors.js';
import { existsSync } from 'node:fs';
import { getManagedAppTokenOrExit } from '../../../tokens.js';
import { type DBConfig } from '../../../types.js';
import { bundleFile, importBundledFile } from '../../../load-file.js';
import { getStudioVirtualModContents } from '../../../integration/vite-plugin-db.js';
export async function cmd({
astroConfig,
dbConfig,
flags,
}: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}) {
const filePath = flags._[4];
if (typeof filePath !== 'string') {
console.error(MISSING_EXECUTE_PATH_ERROR);
process.exit(1);
}
const fileUrl = new URL(filePath, astroConfig.root);
if (!existsSync(fileUrl)) {
console.error(FILE_NOT_FOUND_ERROR(filePath));
process.exit(1);
}
const appToken = await getManagedAppTokenOrExit(flags.token);
const virtualModContents = getStudioVirtualModContents({
tables: dbConfig.tables ?? {},
appToken: appToken.token,
});
const { code } = await bundleFile({ virtualModContents, root: astroConfig.root, fileUrl });
// Executable files use top-level await. Importing will run the file.
await importBundledFile({ code, root: astroConfig.root });
}

View file

@ -1,6 +1,7 @@
import { fileURLToPath } from 'node:url';
import { writeFile } from 'node:fs/promises';
import type { AstroConfig } from 'astro';
import { bgRed, red, reset } from 'kleur/colors';
import { bold, bgRed, red, reset } from 'kleur/colors';
import type { Arguments } from 'yargs-parser';
import { getMigrationQueries } from '../../migration-queries.js';
import {
@ -9,12 +10,23 @@ import {
getMigrationStatus,
initializeMigrationsDirectory,
} from '../../migrations.js';
import { getMigrationsDirectoryUrl } from '../../../utils.js';
import type { DBConfig } from '../../../types.js';
import { relative } from 'node:path';
export async function cmd({ config }: { config: AstroConfig; flags: Arguments }) {
const migration = await getMigrationStatus(config);
export async function cmd({
astroConfig,
dbConfig,
}: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}) {
const migration = await getMigrationStatus({ dbConfig, root: astroConfig.root });
const migrationsDir = getMigrationsDirectoryUrl(astroConfig.root);
if (migration.state === 'no-migrations-found') {
await initializeMigrationsDirectory(migration.currentSnapshot);
await initializeMigrationsDirectory(migration.currentSnapshot, migrationsDir);
console.log(MIGRATIONS_CREATED);
return;
} else if (migration.state === 'up-to-date') {
@ -30,14 +42,15 @@ export async function cmd({ config }: { config: AstroConfig; flags: Arguments })
// Warn the user about any changes that lead to data-loss.
// When the user runs `db push`, they will be prompted to confirm these changes.
confirmations.map((message) => console.log(bgRed(' !!! ') + ' ' + red(message)));
const migrationFileContent = {
const content = {
diff,
db: migrationQueries,
// TODO(fks): Encode the relevant data, instead of the raw message.
// This will give `db push` more control over the formatting of the message.
confirm: confirmations.map((c) => reset(c)),
};
const migrationFileName = `./migrations/${newFilename}`;
await writeFile(migrationFileName, JSON.stringify(migrationFileContent, undefined, 2));
console.log(migrationFileName + ' created!');
const fileUrl = new URL(newFilename, migrationsDir);
const relativePath = relative(fileURLToPath(astroConfig.root), fileURLToPath(fileUrl));
await writeFile(fileUrl, JSON.stringify(content, undefined, 2));
console.log(bold(relativePath) + ' created!');
}

View file

@ -1,17 +1,15 @@
import { mkdir, writeFile } from 'node:fs/promises';
import { homedir } from 'node:os';
import { basename } from 'node:path';
import type { AstroConfig } from 'astro';
import { slug } from 'github-slugger';
import { bgRed, cyan } from 'kleur/colors';
import ora from 'ora';
import prompts from 'prompts';
import type { Arguments } from 'yargs-parser';
import { MISSING_SESSION_ID_ERROR } from '../../../errors.js';
import { PROJECT_ID_FILE, getSessionIdFromFile } from '../../../tokens.js';
import { getAstroStudioUrl } from '../../../utils.js';
export async function cmd({}: { config: AstroConfig; flags: Arguments }) {
export async function cmd() {
const sessionToken = await getSessionIdFromFile();
if (!sessionToken) {
console.error(MISSING_SESSION_ID_ERROR);

View file

@ -7,6 +7,7 @@ import open from 'open';
import ora from 'ora';
import type { Arguments } from 'yargs-parser';
import { SESSION_LOGIN_FILE } from '../../../tokens.js';
import type { DBConfig } from '../../../types.js';
import { getAstroStudioUrl } from '../../../utils.js';
// NOTE(fks): How the Astro CLI login process works:
@ -47,7 +48,13 @@ async function createServer(): Promise<{ url: string; promise: Promise<string> }
return { url: serverUrl, promise: sessionPromise };
}
export async function cmd({ flags }: { config: AstroConfig; flags: Arguments }) {
export async function cmd({
flags,
}: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}) {
let session = flags.session;
if (!session) {

View file

@ -1,9 +1,7 @@
import { unlink } from 'node:fs/promises';
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import { SESSION_LOGIN_FILE } from '../../../tokens.js';
export async function cmd({}: { config: AstroConfig; flags: Arguments }) {
export async function cmd() {
await unlink(SESSION_LOGIN_FILE);
console.log('Successfully logged out of Astro Studio.');
}

View file

@ -1,32 +1,36 @@
import { type InStatement, createClient } from '@libsql/client';
import type { AstroConfig } from 'astro';
import { drizzle as drizzleLibsql } from 'drizzle-orm/libsql';
import { SQLiteAsyncDialect } from 'drizzle-orm/sqlite-core';
import { drizzle as drizzleProxy } from 'drizzle-orm/sqlite-proxy';
import { red } from 'kleur/colors';
import prompts from 'prompts';
import type { Arguments } from 'yargs-parser';
import { MISSING_SESSION_ID_ERROR } from '../../../errors.js';
import { recreateTables, seedData } from '../../../queries.js';
import { getManagedAppTokenOrExit } from '../../../tokens.js';
import { type AstroConfigWithDB, type DBSnapshot, tablesSchema } from '../../../types.js';
import { getRemoteDatabaseUrl } from '../../../utils.js';
import { type DBConfig, type DBSnapshot } from '../../../types.js';
import { getMigrationsDirectoryUrl, getRemoteDatabaseUrl } from '../../../utils.js';
import { getMigrationQueries } from '../../migration-queries.js';
import {
MIGRATIONS_NOT_INITIALIZED,
MIGRATIONS_UP_TO_DATE,
MIGRATION_NEEDED,
createEmptySnapshot,
getMigrationStatus,
getMigrations,
getMigrationStatus,
INITIAL_SNAPSHOT,
loadInitialSnapshot,
loadMigration,
MIGRATION_NEEDED,
MIGRATIONS_NOT_INITIALIZED,
MIGRATIONS_UP_TO_DATE,
} from '../../migrations.js';
import { MISSING_SESSION_ID_ERROR } from '../../../errors.js';
export async function cmd({ config, flags }: { config: AstroConfig; flags: Arguments }) {
export async function cmd({
astroConfig,
dbConfig,
flags,
}: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}) {
const isDryRun = flags.dryRun;
const appToken = await getManagedAppTokenOrExit(flags.token);
const migration = await getMigrationStatus(config);
const migration = await getMigrationStatus({ dbConfig, root: astroConfig.root });
if (migration.state === 'no-migrations-found') {
console.log(MIGRATIONS_NOT_INITIALIZED);
process.exit(1);
@ -34,9 +38,10 @@ export async function cmd({ config, flags }: { config: AstroConfig; flags: Argum
console.log(MIGRATION_NEEDED);
process.exit(1);
}
const migrationsDir = getMigrationsDirectoryUrl(astroConfig.root);
// get all migrations from the filesystem
const allLocalMigrations = await getMigrations();
const allLocalMigrations = await getMigrations(migrationsDir);
let missingMigrations: string[] = [];
try {
const { data } = await prepareMigrateQuery({
@ -63,14 +68,12 @@ export async function cmd({ config, flags }: { config: AstroConfig; flags: Argum
console.log(`Pushing ${missingMigrations.length} migrations...`);
await pushSchema({
migrations: missingMigrations,
migrationsDir,
appToken: appToken.token,
isDryRun,
currentSnapshot: migration.currentSnapshot,
});
}
// push the database seed data
console.info('Pushing data...');
await pushData({ config, appToken: appToken.token, isDryRun });
// cleanup and exit
await appToken.destroy();
console.info('Push complete!');
@ -78,25 +81,29 @@ export async function cmd({ config, flags }: { config: AstroConfig; flags: Argum
async function pushSchema({
migrations,
migrationsDir,
appToken,
isDryRun,
currentSnapshot,
}: {
migrations: string[];
migrationsDir: URL;
appToken: string;
isDryRun: boolean;
currentSnapshot: DBSnapshot;
}) {
// load all missing migrations
const initialSnapshot = migrations.find((m) => m === '0000_snapshot.json');
const filteredMigrations = migrations.filter((m) => m !== '0000_snapshot.json');
const missingMigrationContents = await Promise.all(filteredMigrations.map(loadMigration));
const initialSnapshot = migrations.find((m) => m === INITIAL_SNAPSHOT);
const filteredMigrations = migrations.filter((m) => m !== INITIAL_SNAPSHOT);
const missingMigrationContents = await Promise.all(
filteredMigrations.map((m) => loadMigration(m, migrationsDir))
);
// create a migration for the initial snapshot, if needed
const initialMigrationBatch = initialSnapshot
? (
await getMigrationQueries({
oldSnapshot: createEmptySnapshot(),
newSnapshot: await loadInitialSnapshot(),
newSnapshot: await loadInitialSnapshot(migrationsDir),
})
).queries
: [];
@ -130,76 +137,6 @@ async function pushSchema({
await runMigrateQuery({ queries, migrations, snapshot: currentSnapshot, appToken, isDryRun });
}
const sqlite = new SQLiteAsyncDialect();
async function pushData({
config,
appToken,
isDryRun,
}: {
config: AstroConfigWithDB;
appToken: string;
isDryRun?: boolean;
}) {
const queries: InStatement[] = [];
if (config.db?.data) {
const libsqlClient = createClient({ url: ':memory:' });
// Stand up tables locally to mirror inserts.
// Needed to generate return values.
await recreateTables({
db: drizzleLibsql(libsqlClient),
tables: tablesSchema.parse(config.db.tables ?? {}),
});
for (const [collectionName, { writable }] of Object.entries(config.db.tables ?? {})) {
if (!writable) {
queries.push({
sql: `DELETE FROM ${sqlite.escapeName(collectionName)}`,
args: [],
});
}
}
// Use proxy to trace all queries to queue up in a batch.
const db = await drizzleProxy(async (sqlQuery, params, method) => {
const stmt: InStatement = { sql: sqlQuery, args: params };
queries.push(stmt);
// Use in-memory database to generate results for `returning()`.
const { rows } = await libsqlClient.execute(stmt);
const rowValues: unknown[][] = [];
for (const row of rows) {
if (row != null && typeof row === 'object') {
rowValues.push(Object.values(row));
}
}
if (method === 'get') {
return { rows: rowValues[0] };
}
return { rows: rowValues };
});
await seedData({
db,
mode: 'build',
data: config.db.data,
});
}
const url = new URL('/db/query', getRemoteDatabaseUrl());
if (isDryRun) {
console.info('[DRY RUN] Batch data seed:', JSON.stringify(queries, null, 2));
return new Response(null, { status: 200 });
}
return await fetch(url, {
method: 'POST',
headers: new Headers({
Authorization: `Bearer ${appToken}`,
}),
body: JSON.stringify(queries),
});
}
async function runMigrateQuery({
queries: baseQueries,
migrations,

View file

@ -4,8 +4,15 @@ import type { Arguments } from 'yargs-parser';
import { createRemoteDatabaseClient } from '../../../../runtime/db-client.js';
import { getManagedAppTokenOrExit } from '../../../tokens.js';
import { getRemoteDatabaseUrl } from '../../../utils.js';
import type { DBConfigInput } from '../../../types.js';
export async function cmd({ flags }: { config: AstroConfig; flags: Arguments }) {
export async function cmd({
flags,
}: {
dbConfig: DBConfigInput;
astroConfig: AstroConfig;
flags: Arguments;
}) {
const query = flags.query;
const appToken = await getManagedAppTokenOrExit(flags.token);
const db = createRemoteDatabaseClient(appToken.token, getRemoteDatabaseUrl());

View file

@ -7,9 +7,18 @@ import {
MIGRATION_NEEDED,
getMigrationStatus,
} from '../../migrations.js';
import type { DBConfig } from '../../../types.js';
export async function cmd({ config, flags }: { config: AstroConfig; flags: Arguments }) {
const status = await getMigrationStatus(config);
export async function cmd({
astroConfig,
dbConfig,
flags,
}: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}) {
const status = await getMigrationStatus({ dbConfig, root: astroConfig.root });
const { state } = status;
if (flags.json) {
if (state === 'ahead') {

View file

@ -1,50 +1,56 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import { STUDIO_CONFIG_MISSING_CLI_ERROR } from '../errors.js';
import { loadDbConfigFile } from '../load-file.js';
import { dbConfigSchema } from '../types.js';
export async function cli({ flags, config }: { flags: Arguments; config: AstroConfig }) {
export async function cli({
flags,
config: astroConfig,
}: {
flags: Arguments;
config: AstroConfig;
}) {
const args = flags._ as string[];
// Most commands are `astro db foo`, but for now login/logout
// are also handled by this package, so first check if this is a db command.
const command = args[2] === 'db' ? args[3] : args[2];
switch (command) {
case 'login': {
const { cmd } = await import('./commands/login/index.js');
return await cmd({ config, flags });
}
case 'logout': {
const { cmd } = await import('./commands/logout/index.js');
return await cmd({ config, flags });
}
}
if (!config.db?.studio) {
console.log(STUDIO_CONFIG_MISSING_CLI_ERROR);
process.exit(1);
}
const { mod } = await loadDbConfigFile(astroConfig.root);
// TODO: parseConfigOrExit()
const dbConfig = dbConfigSchema.parse(mod?.default ?? {});
switch (command) {
case 'shell': {
const { cmd } = await import('./commands/shell/index.js');
return await cmd({ config, flags });
return await cmd({ astroConfig, dbConfig, flags });
}
case 'gen':
case 'sync': {
const { cmd } = await import('./commands/gen/index.js');
return await cmd({ config, flags });
return await cmd({ astroConfig, dbConfig, flags });
}
case 'push': {
const { cmd } = await import('./commands/push/index.js');
return await cmd({ config, flags });
return await cmd({ astroConfig, dbConfig, flags });
}
case 'verify': {
const { cmd } = await import('./commands/verify/index.js');
return await cmd({ config, flags });
return await cmd({ astroConfig, dbConfig, flags });
}
case 'execute': {
const { cmd } = await import('./commands/execute/index.js');
return await cmd({ astroConfig, dbConfig, flags });
}
case 'login': {
const { cmd } = await import('./commands/login/index.js');
return await cmd({ astroConfig, dbConfig, flags });
}
case 'logout': {
const { cmd } = await import('./commands/logout/index.js');
return await cmd();
}
case 'link': {
const { cmd } = await import('./commands/link/index.js');
return await cmd({ config, flags });
return await cmd();
}
default: {
if (command == null) {

View file

@ -12,7 +12,7 @@ import {
getReferencesConfig,
hasDefault,
schemaTypeToSqlType,
} from '../queries.js';
} from '../../runtime/queries.js';
import {
type BooleanColumn,
type ColumnType,

View file

@ -1,8 +1,8 @@
import type { AstroConfig } from 'astro';
import deepDiff from 'deep-diff';
import { mkdir, readFile, readdir, writeFile } from 'fs/promises';
import { type DBSnapshot, type DBConfig } from '../types.js';
import { cyan, green, yellow } from 'kleur/colors';
import { type DBSnapshot, tablesSchema } from '../types.js';
import { getMigrationsDirectoryUrl } from '../utils.js';
const { applyChange, diff: generateDiff } = deepDiff;
export type MigrationStatus =
@ -24,9 +24,18 @@ export type MigrationStatus =
currentSnapshot: DBSnapshot;
};
export async function getMigrationStatus(config: AstroConfig): Promise<MigrationStatus> {
const currentSnapshot = createCurrentSnapshot(config);
const allMigrationFiles = await getMigrations();
export const INITIAL_SNAPSHOT = '0000_snapshot.json';
export async function getMigrationStatus({
dbConfig,
root,
}: {
dbConfig: DBConfig;
root: URL;
}): Promise<MigrationStatus> {
const currentSnapshot = createCurrentSnapshot(dbConfig);
const dir = getMigrationsDirectoryUrl(root);
const allMigrationFiles = await getMigrations(dir);
if (allMigrationFiles.length === 0) {
return {
@ -35,7 +44,7 @@ export async function getMigrationStatus(config: AstroConfig): Promise<Migration
};
}
const previousSnapshot = await initializeFromMigrations(allMigrationFiles);
const previousSnapshot = await initializeFromMigrations(allMigrationFiles, dir);
const diff = generateDiff(previousSnapshot, currentSnapshot);
if (diff) {
@ -83,8 +92,8 @@ function getNewMigrationNumber(allMigrationFiles: string[]): number {
}, 0);
}
export async function getMigrations(): Promise<string[]> {
const migrationFiles = await readdir('./migrations').catch((err) => {
export async function getMigrations(dir: URL): Promise<string[]> {
const migrationFiles = await readdir(dir).catch((err) => {
if (err.code === 'ENOENT') {
return [];
}
@ -94,13 +103,14 @@ export async function getMigrations(): Promise<string[]> {
}
export async function loadMigration(
migration: string
migration: string,
dir: URL
): Promise<{ diff: any[]; db: string[]; confirm?: string[] }> {
return JSON.parse(await readFile(`./migrations/${migration}`, 'utf-8'));
return JSON.parse(await readFile(new URL(migration, dir), 'utf-8'));
}
export async function loadInitialSnapshot(): Promise<DBSnapshot> {
const snapshot = JSON.parse(await readFile('./migrations/0000_snapshot.json', 'utf-8'));
export async function loadInitialSnapshot(dir: URL): Promise<DBSnapshot> {
const snapshot = JSON.parse(await readFile(new URL(INITIAL_SNAPSHOT, dir), 'utf-8'));
// `experimentalVersion: 1` -- added the version column
if (snapshot.experimentalVersion === 1) {
return snapshot;
@ -112,16 +122,19 @@ export async function loadInitialSnapshot(): Promise<DBSnapshot> {
throw new Error('Invalid snapshot format');
}
export async function initializeMigrationsDirectory(currentSnapshot: DBSnapshot) {
await mkdir('./migrations', { recursive: true });
await writeFile('./migrations/0000_snapshot.json', JSON.stringify(currentSnapshot, undefined, 2));
export async function initializeMigrationsDirectory(currentSnapshot: DBSnapshot, dir: URL) {
await mkdir(dir, { recursive: true });
await writeFile(new URL(INITIAL_SNAPSHOT, dir), JSON.stringify(currentSnapshot, undefined, 2));
}
export async function initializeFromMigrations(allMigrationFiles: string[]): Promise<DBSnapshot> {
const prevSnapshot = await loadInitialSnapshot();
export async function initializeFromMigrations(
allMigrationFiles: string[],
dir: URL
): Promise<DBSnapshot> {
const prevSnapshot = await loadInitialSnapshot(dir);
for (const migration of allMigrationFiles) {
if (migration === '0000_snapshot.json') continue;
const migrationContent = await loadMigration(migration);
if (migration === INITIAL_SNAPSHOT) continue;
const migrationContent = await loadMigration(migration, dir);
migrationContent.diff.forEach((change: any) => {
applyChange(prevSnapshot, {}, change);
});
@ -129,10 +142,8 @@ export async function initializeFromMigrations(allMigrationFiles: string[]): Pro
return prevSnapshot;
}
export function createCurrentSnapshot(config: AstroConfig): DBSnapshot {
// Parse to resolve non-serializable types like () => references
const tablesConfig = tablesSchema.parse(config.db?.tables ?? {});
const schema = JSON.parse(JSON.stringify(tablesConfig));
export function createCurrentSnapshot({ tables = {} }: DBConfig): DBSnapshot {
const schema = JSON.parse(JSON.stringify(tables));
return { experimentalVersion: 1, schema };
}
export function createEmptySnapshot(): DBSnapshot {

View file

@ -6,9 +6,12 @@ export const PACKAGE_NAME = JSON.parse(
export const RUNTIME_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime`);
export const RUNTIME_DRIZZLE_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime/drizzle`);
export const RUNTIME_CONFIG_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime/config`);
export const DB_TYPES_FILE = 'db-types.d.ts';
export const VIRTUAL_MODULE_ID = 'astro:db';
export const DB_PATH = '.astro/content.db';
export const CONFIG_FILE_NAMES = ['config.ts', 'config.js', 'config.mts', 'config.mjs'];

View file

@ -10,44 +10,41 @@ export const MISSING_PROJECT_ID_ERROR = `${red('▶ Directory not linked.')}
To link this directory to an Astro Studio project, run
${cyan('astro db link')}\n`;
export const STUDIO_CONFIG_MISSING_WRITABLE_TABLE_ERROR = (tableName: string) => `${red(
`▶ Writable table ${bold(tableName)} requires Astro Studio or the ${yellow(
'unsafeWritable'
)} option.`
)}
Visit ${cyan('https://astro.build/studio')} to create your account
and set ${green('studio: true')} in your astro.config.mjs file to enable Studio.\n`;
export const UNSAFE_WRITABLE_WARNING = `${yellow(
'unsafeWritable'
)} option is enabled and you are using writable tables.
Redeploying your app may result in wiping away your database.
I hope you know what you are doing.\n`;
export const STUDIO_CONFIG_MISSING_CLI_ERROR = `${red('▶ This command requires Astro Studio.')}
Visit ${cyan('https://astro.build/studio')} to create your account
and set ${green('studio: true')} in your astro.config.mjs file to enable Studio.\n`;
export const MIGRATIONS_NOT_INITIALIZED = `${yellow(
'▶ No migrations found!'
)}\n\n To scaffold your migrations folder, run\n ${cyan('astro db sync')}\n`;
export const SEED_WRITABLE_IN_PROD_ERROR = (tableName: string) => {
return `${red(
`Writable tables should not be seeded in production with data().`
)} You can seed ${bold(
export const MISSING_EXECUTE_PATH_ERROR = `${red(
'▶ No file path provided.'
)} Provide a path by running ${cyan('astro db execute <path>')}\n`;
export const FILE_NOT_FOUND_ERROR = (path: string) =>
`${red('▶ File not found:')} ${bold(path)}\n`;
export const SEED_ERROR = (error: string) => {
return `${red(`Error while seeding database:`)}\n\n${error}`;
};
export const REFERENCE_DNE_ERROR = (columnName: string) => {
return `Column ${bold(
columnName
)} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
};
export const FOREIGN_KEY_DNE_ERROR = (tableName: string) => {
return `Table ${bold(
tableName
)} in development mode only using the "mode" flag. See the docs for more: https://www.notion.so/astroinc/astrojs-db-README-dcf6fa10de9a4f528be56cee96e8c054?pvs=4#278aed3fc37e4cec80240d1552ff6ac5`;
)} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
};
export const SEED_ERROR = (tableName: string, error: string) => {
return `${red(`Error seeding table ${bold(tableName)}:`)}\n\n${error}`;
export const FOREIGN_KEY_REFERENCES_LENGTH_ERROR = (tableName: string) => {
return `Foreign key on ${bold(
tableName
)} is misconfigured. \`columns\` and \`references\` must be the same length.`;
};
export const SEED_EMPTY_ARRAY_ERROR = (tableName: string) => {
// Drizzle error says "values() must be called with at least one value."
// This is specific to db.insert(). Prettify for seed().
return SEED_ERROR(tableName, `Empty array was passed. seed() must receive at least one value.`);
export const FOREIGN_KEY_REFERENCES_EMPTY_ERROR = (tableName: string) => {
return `Foreign key on ${bold(
tableName
)} is misconfigured. \`references\` array cannot be empty.`;
};

View file

@ -1,28 +1,32 @@
import { existsSync } from 'fs';
import { CONFIG_FILE_NAMES, DB_PATH } from '../consts.js';
import { dbConfigSchema, type DBConfig } from '../types.js';
import { getDbDirectoryUrl, type VitePlugin } from '../utils.js';
import { errorMap } from './error-map.js';
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import type { AstroIntegration } from 'astro';
import { mkdir, rm, writeFile } from 'fs/promises';
import { blue, yellow } from 'kleur/colors';
import { createLocalDatabaseClient } from '../../runtime/db-client.js';
import { DB_PATH } from '../consts.js';
import { STUDIO_CONFIG_MISSING_WRITABLE_TABLE_ERROR, UNSAFE_WRITABLE_WARNING } from '../errors.js';
import { recreateTables, seedData } from '../queries.js';
import { type ManagedAppToken, getManagedAppTokenOrExit } from '../tokens.js';
import { type DBTables, astroConfigWithDbSchema } from '../types.js';
import { type VitePlugin } from '../utils.js';
import { errorMap } from './error-map.js';
import { fileURLIntegration } from './file-url.js';
import { getManagedAppTokenOrExit, type ManagedAppToken } from '../tokens.js';
import { loadDbConfigFile } from '../load-file.js';
import { vitePluginDb, type LateTables } from './vite-plugin-db.js';
import { typegen } from './typegen.js';
import { vitePluginDb } from './vite-plugin-db.js';
import { vitePluginInjectEnvTs } from './vite-plugin-inject-env-ts.js';
function astroDBIntegration(): AstroIntegration {
let connectedToRemote = false;
let connectToStudio = false;
let configFileDependencies: string[] = [];
let root: URL;
let appToken: ManagedAppToken | undefined;
let schemas = {
tables(): DBTables {
throw new Error('tables not found');
let dbConfig: DBConfig;
// Make table loading "late" to pass to plugins from `config:setup`,
// but load during `config:done` to wait for integrations to settle.
let tables: LateTables = {
get() {
throw new Error('[astro:db] INTERNAL Tables not loaded yet');
},
};
let command: 'dev' | 'build' | 'preview';
@ -31,25 +35,28 @@ function astroDBIntegration(): AstroIntegration {
hooks: {
'astro:config:setup': async ({ updateConfig, config, command: _command, logger }) => {
command = _command;
if (_command === 'preview') return;
root = config.root;
if (command === 'preview') return;
let dbPlugin: VitePlugin | undefined = undefined;
const studio = config.db?.studio ?? false;
connectToStudio = command === 'build';
if (studio && command === 'build' && process.env.ASTRO_DB_TEST_ENV !== '1') {
if (connectToStudio) {
appToken = await getManagedAppTokenOrExit();
connectedToRemote = true;
dbPlugin = vitePluginDb({
connectToStudio: true,
connectToStudio,
appToken: appToken.token,
schemas,
tables,
root: config.root,
srcDir: config.srcDir,
});
} else {
dbPlugin = vitePluginDb({
connectToStudio: false,
schemas,
tables,
root: config.root,
srcDir: config.srcDir,
});
}
@ -60,67 +67,50 @@ function astroDBIntegration(): AstroIntegration {
},
});
},
'astro:config:done': async ({ config, logger }) => {
'astro:config:done': async ({ config }) => {
// TODO: refine where we load tables
// @matthewp: may want to load tables by path at runtime
const configWithDb = astroConfigWithDbSchema.parse(config, { errorMap });
const tables = configWithDb.db?.tables ?? {};
// Redefine getTables so our integration can grab them
schemas.tables = () => tables;
const { mod, dependencies } = await loadDbConfigFile(config.root);
configFileDependencies = dependencies;
dbConfig = dbConfigSchema.parse(mod?.default ?? {}, {
errorMap,
});
// TODO: resolve integrations here?
tables.get = () => dbConfig.tables ?? {};
const studio = configWithDb.db?.studio ?? false;
const unsafeWritable = Boolean(configWithDb.db?.unsafeWritable);
const foundWritableCollection = Object.entries(tables).find(([, c]) => c.writable);
const writableAllowed = studio || unsafeWritable;
if (!writableAllowed && foundWritableCollection) {
logger.error(STUDIO_CONFIG_MISSING_WRITABLE_TABLE_ERROR(foundWritableCollection[0]));
process.exit(1);
}
// Using writable tables with the opt-in flag. Warn them to let them
// know the risk.
else if (unsafeWritable && foundWritableCollection) {
logger.warn(UNSAFE_WRITABLE_WARNING);
}
if (!connectedToRemote) {
if (!connectToStudio && !process.env.TEST_IN_MEMORY_DB) {
const dbUrl = new URL(DB_PATH, config.root);
if (existsSync(dbUrl)) {
await rm(dbUrl);
}
await mkdir(dirname(fileURLToPath(dbUrl)), { recursive: true });
await writeFile(dbUrl, '');
using db = await createLocalDatabaseClient({
tables,
dbUrl: dbUrl.toString(),
seeding: true,
});
await recreateTables({ db, tables });
if (configWithDb.db?.data) {
await seedData({
db,
data: configWithDb.db.data,
logger,
mode: command === 'dev' ? 'dev' : 'build',
});
}
logger.debug('Database setup complete.');
}
await typegen({ tables, root: config.root });
await typegen({ tables: tables.get() ?? {}, root: config.root });
},
'astro:server:start': async ({ logger }) => {
// Wait for the server startup to log, so that this can come afterwards.
setTimeout(() => {
logger.info(
connectedToRemote ? 'Connected to remote database.' : 'New local database created.'
connectToStudio ? 'Connected to remote database.' : 'New local database created.'
);
}, 100);
},
'astro:server:setup': async ({ server }) => {
const filesToWatch = [
...CONFIG_FILE_NAMES.map((c) => new URL(c, getDbDirectoryUrl(root))),
...configFileDependencies.map((c) => new URL(c, root)),
];
server.watcher.on('all', (event, relativeEntry) => {
const entry = new URL(relativeEntry, root);
if (filesToWatch.some((f) => entry.href === f.href)) {
server.restart();
}
});
},
'astro:build:start': async ({ logger }) => {
logger.info(
'database: ' + (connectedToRemote ? yellow('remote') : blue('local database.'))
);
logger.info('database: ' + (connectToStudio ? yellow('remote') : blue('local database.')));
},
'astro:build:done': async ({}) => {
await appToken?.destroy();

View file

@ -28,19 +28,7 @@ ${Object.entries(tables)
function generateTableType(name: string, collection: DBTable): string {
let tableType = ` export const ${name}: import(${RUNTIME_IMPORT}).Table<
${JSON.stringify(name)},
${JSON.stringify(
Object.fromEntries(
Object.entries(collection.columns).map(([columnName, column]) => [
columnName,
{
// Only select columns Drizzle needs for inference
type: column.type,
optional: column.schema.optional,
default: column.schema.default,
},
])
)
)}
${JSON.stringify(collection.columns)}
>;`;
return tableType;
}

View file

@ -1,70 +1,125 @@
import { DB_PATH, RUNTIME_DRIZZLE_IMPORT, RUNTIME_IMPORT, VIRTUAL_MODULE_ID } from '../consts.js';
import { fileURLToPath } from 'node:url';
import { SEED_DEV_FILE_NAME } from '../../runtime/queries.js';
import {
DB_PATH,
RUNTIME_CONFIG_IMPORT,
RUNTIME_DRIZZLE_IMPORT,
RUNTIME_IMPORT,
VIRTUAL_MODULE_ID,
} from '../consts.js';
import type { DBTables } from '../types.js';
import { type VitePlugin, getRemoteDatabaseUrl } from '../utils.js';
import { getDbDirectoryUrl, getRemoteDatabaseUrl, type VitePlugin } from '../utils.js';
import { normalizePath } from 'vite';
const LOCAL_DB_VIRTUAL_MODULE_ID = 'astro:local';
const resolvedVirtualModuleId = '\0' + VIRTUAL_MODULE_ID;
const resolvedLocalDbVirtualModuleId = LOCAL_DB_VIRTUAL_MODULE_ID + '/local-db';
const resolvedSeedVirtualModuleId = '\0' + VIRTUAL_MODULE_ID + '?shouldSeed';
type LateSchema = {
tables: () => DBTables;
export type LateTables = {
get: () => DBTables;
};
type VitePluginDBParams =
| {
connectToStudio: false;
schemas: LateSchema;
tables: LateTables;
srcDir: URL;
root: URL;
}
| {
connectToStudio: true;
schemas: LateSchema;
tables: LateTables;
appToken: string;
srcDir: URL;
root: URL;
};
export function vitePluginDb(params: VitePluginDBParams): VitePlugin {
const srcDirPath = normalizePath(fileURLToPath(params.srcDir));
return {
name: 'astro:db',
enforce: 'pre',
resolveId(id) {
if (id === VIRTUAL_MODULE_ID) {
return resolvedVirtualModuleId;
async resolveId(id, rawImporter) {
if (id === LOCAL_DB_VIRTUAL_MODULE_ID) return resolvedLocalDbVirtualModuleId;
if (id !== VIRTUAL_MODULE_ID) return;
if (params.connectToStudio) return resolvedVirtualModuleId;
const importer = rawImporter ? await this.resolve(rawImporter) : null;
if (!importer) return resolvedVirtualModuleId;
if (importer.id.startsWith(srcDirPath)) {
// Seed only if the importer is in the src directory.
// Otherwise, we may get recursive seed calls (ex. import from db/seed.ts).
return resolvedSeedVirtualModuleId;
}
return resolvedVirtualModuleId;
},
load(id) {
if (id !== resolvedVirtualModuleId) return;
if (id === resolvedLocalDbVirtualModuleId) {
const dbUrl = new URL(DB_PATH, params.root);
return `import { createLocalDatabaseClient } from ${RUNTIME_IMPORT};
const dbUrl = ${JSON.stringify(dbUrl)};
export const db = createLocalDatabaseClient({ dbUrl });`;
}
if (id !== resolvedVirtualModuleId && id !== resolvedSeedVirtualModuleId) return;
if (params.connectToStudio) {
return getStudioVirtualModContents({
appToken: params.appToken,
tables: params.schemas.tables(),
tables: params.tables.get(),
});
}
return getVirtualModContents({
return getLocalVirtualModContents({
root: params.root,
tables: params.schemas.tables(),
tables: params.tables.get(),
shouldSeed: id === resolvedSeedVirtualModuleId,
});
},
};
}
export function getVirtualModContents({ tables, root }: { tables: DBTables; root: URL }) {
const dbUrl = new URL(DB_PATH, root);
export function getConfigVirtualModContents() {
return `export * from ${RUNTIME_CONFIG_IMPORT}`;
}
export function getLocalVirtualModContents({
tables,
shouldSeed,
}: {
tables: DBTables;
root: URL;
shouldSeed: boolean;
}) {
const seedFilePaths = SEED_DEV_FILE_NAME.map(
// Format as /db/[name].ts
// for Vite import.meta.glob
(name) => new URL(name, getDbDirectoryUrl('file:///')).pathname
);
return `
import { collectionToTable, createLocalDatabaseClient } from ${RUNTIME_IMPORT};
import dbUrl from ${JSON.stringify(`${dbUrl}?fileurl`)};
import { asDrizzleTable, seedLocal } from ${RUNTIME_IMPORT};
import { db as _db } from ${JSON.stringify(LOCAL_DB_VIRTUAL_MODULE_ID)};
const params = ${JSON.stringify({
tables,
seeding: false,
})};
params.dbUrl = dbUrl;
export const db = _db;
export const db = await createLocalDatabaseClient(params);
${
shouldSeed
? `await seedLocal({
db: _db,
tables: ${JSON.stringify(tables)},
fileGlob: import.meta.glob(${JSON.stringify(seedFilePaths)}),
})`
: ''
}
export * from ${RUNTIME_DRIZZLE_IMPORT};
export * from ${RUNTIME_CONFIG_IMPORT};
${getStringifiedCollectionExports(tables)}
`;
${getStringifiedCollectionExports(tables)}`;
}
export function getStudioVirtualModContents({
@ -75,13 +130,14 @@ export function getStudioVirtualModContents({
appToken: string;
}) {
return `
import {collectionToTable, createRemoteDatabaseClient} from ${RUNTIME_IMPORT};
import {asDrizzleTable, createRemoteDatabaseClient} from ${RUNTIME_IMPORT};
export const db = await createRemoteDatabaseClient(${JSON.stringify(
appToken
// Respect runtime env for user overrides in SSR
)}, import.meta.env.ASTRO_STUDIO_REMOTE_DB_URL ?? ${JSON.stringify(getRemoteDatabaseUrl())});
export * from ${RUNTIME_DRIZZLE_IMPORT};
export * from ${RUNTIME_CONFIG_IMPORT};
${getStringifiedCollectionExports(tables)}
`;
@ -91,7 +147,7 @@ function getStringifiedCollectionExports(tables: DBTables) {
return Object.entries(tables)
.map(
([name, collection]) =>
`export const ${name} = collectionToTable(${JSON.stringify(name)}, ${JSON.stringify(
`export const ${name} = asDrizzleTable(${JSON.stringify(name)}, ${JSON.stringify(
collection
)}, false)`
)

View file

@ -0,0 +1,117 @@
import { build as esbuild } from 'esbuild';
import { CONFIG_FILE_NAMES, VIRTUAL_MODULE_ID } from './consts.js';
import { fileURLToPath } from 'node:url';
import { getConfigVirtualModContents } from './integration/vite-plugin-db.js';
import { writeFile, unlink } from 'node:fs/promises';
import { existsSync } from 'node:fs';
import { getDbDirectoryUrl } from './utils.js';
export async function loadDbConfigFile(
root: URL
): Promise<{ mod: { default?: unknown } | undefined; dependencies: string[] }> {
let configFileUrl: URL | undefined;
for (const fileName of CONFIG_FILE_NAMES) {
const fileUrl = new URL(fileName, getDbDirectoryUrl(root));
if (existsSync(fileUrl)) {
configFileUrl = fileUrl;
}
}
if (!configFileUrl) {
return { mod: undefined, dependencies: [] };
}
const { code, dependencies } = await bundleFile({
virtualModContents: getConfigVirtualModContents(),
root,
fileUrl: configFileUrl,
});
return {
mod: await importBundledFile({ code, root }),
dependencies,
};
}
/**
* Bundle arbitrary `mjs` or `ts` file.
* Simplified fork from Vite's `bundleConfigFile` function.
*
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L961
*/
export async function bundleFile({
fileUrl,
root,
virtualModContents,
}: {
fileUrl: URL;
root: URL;
virtualModContents: string;
}) {
const result = await esbuild({
absWorkingDir: process.cwd(),
entryPoints: [fileURLToPath(fileUrl)],
outfile: 'out.js',
packages: 'external',
write: false,
target: ['node16'],
platform: 'node',
bundle: true,
format: 'esm',
sourcemap: 'inline',
metafile: true,
define: {
'import.meta.env.ASTRO_STUDIO_REMOTE_DB_URL': 'undefined',
},
plugins: [
{
name: 'resolve-astro-db',
setup(build) {
build.onResolve({ filter: /^astro:db$/ }, ({ path }) => {
return { path, namespace: VIRTUAL_MODULE_ID };
});
build.onLoad({ namespace: VIRTUAL_MODULE_ID, filter: /.*/ }, () => {
return {
contents: virtualModContents,
// Needed to resolve runtime dependencies
resolveDir: fileURLToPath(root),
};
});
},
},
],
});
const file = result.outputFiles[0];
if (!file) {
throw new Error(`Unexpected: no output file`);
}
return {
code: file.text,
dependencies: Object.keys(result.metafile.inputs),
};
}
/**
* Forked from Vite config loader, replacing CJS-based path concat with ESM only
*
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L1074
*/
export async function importBundledFile({
code,
root,
}: {
code: string;
root: URL;
}): Promise<{ default?: unknown }> {
// Write it to disk, load it with native Node ESM, then delete the file.
const tmpFileUrl = new URL(`./db.timestamp-${Date.now()}.mjs`, root);
await writeFile(tmpFileUrl, code, { encoding: 'utf8' });
try {
return await import(/* @vite-ignore */ tmpFileUrl.pathname);
} finally {
try {
await unlink(tmpFileUrl);
} catch {
// already removed if this function is called twice simultaneously
}
}
}

View file

@ -1,292 +0,0 @@
import type { AstroIntegrationLogger } from 'astro';
import { type SQL, getTableName, sql } from 'drizzle-orm';
import { SQLiteAsyncDialect, type SQLiteInsert } from 'drizzle-orm/sqlite-core';
import type { SqliteRemoteDatabase } from 'drizzle-orm/sqlite-proxy';
import { bold } from 'kleur/colors';
import {
type BooleanColumn,
type ColumnType,
type DBColumn,
type DBTable,
type DBTables,
type DateColumn,
type JsonColumn,
type NumberColumn,
type TextColumn,
} from '../core/types.js';
import type {
ColumnsConfig,
DBUserConfig,
MaybeArray,
ResolvedCollectionConfig,
} from '../core/types.js';
import { hasPrimaryKey } from '../runtime/index.js';
import { isSerializedSQL } from '../runtime/types.js';
import { SEED_EMPTY_ARRAY_ERROR, SEED_ERROR, SEED_WRITABLE_IN_PROD_ERROR } from './errors.js';
const sqlite = new SQLiteAsyncDialect();
export async function recreateTables({
db,
tables,
}: {
db: SqliteRemoteDatabase;
tables: DBTables;
}) {
const setupQueries: SQL[] = [];
for (const [name, collection] of Object.entries(tables)) {
const dropQuery = sql.raw(`DROP TABLE IF EXISTS ${sqlite.escapeName(name)}`);
const createQuery = sql.raw(getCreateTableQuery(name, collection));
const indexQueries = getCreateIndexQueries(name, collection);
setupQueries.push(dropQuery, createQuery, ...indexQueries.map((s) => sql.raw(s)));
}
for (const q of setupQueries) {
await db.run(q);
}
}
export async function seedData({
db,
data,
logger,
mode,
}: {
db: SqliteRemoteDatabase;
data: DBUserConfig['data'];
logger?: AstroIntegrationLogger;
mode: 'dev' | 'build';
}) {
const dataFns = Array.isArray(data) ? data : [data];
try {
for (const dataFn of dataFns) {
await dataFn({
seed: async (config, values) => {
seedErrorChecks(mode, config, values);
try {
await db.insert(config.table).values(values as any);
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
throw new Error(SEED_ERROR(getTableName(config.table), msg));
}
},
seedReturning: async (config, values) => {
seedErrorChecks(mode, config, values);
try {
let result: SQLiteInsert<any, any, any, any> = db
.insert(config.table)
.values(values as any)
.returning();
if (!Array.isArray(values)) {
result = result.get();
}
return result;
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
throw new Error(SEED_ERROR(getTableName(config.table), msg));
}
},
db,
mode,
});
}
} catch (e) {
if (!(e instanceof Error)) throw e;
(logger ?? console).error(e.message);
}
}
function seedErrorChecks<T extends ColumnsConfig>(
mode: 'dev' | 'build',
{ table, writable }: ResolvedCollectionConfig<T, boolean>,
values: MaybeArray<unknown>
) {
const tableName = getTableName(table);
if (writable && mode === 'build' && process.env.ASTRO_DB_TEST_ENV !== '1') {
throw new Error(SEED_WRITABLE_IN_PROD_ERROR(tableName));
}
if (Array.isArray(values) && values.length === 0) {
throw new Error(SEED_EMPTY_ARRAY_ERROR(tableName));
}
}
export function getCreateTableQuery(collectionName: string, collection: DBTable) {
let query = `CREATE TABLE ${sqlite.escapeName(collectionName)} (`;
const colQueries = [];
const colHasPrimaryKey = Object.entries(collection.columns).find(([, column]) =>
hasPrimaryKey(column)
);
if (!colHasPrimaryKey) {
colQueries.push('_id INTEGER PRIMARY KEY');
}
for (const [columnName, column] of Object.entries(collection.columns)) {
const colQuery = `${sqlite.escapeName(columnName)} ${schemaTypeToSqlType(
column.type
)}${getModifiers(columnName, column)}`;
colQueries.push(colQuery);
}
colQueries.push(...getCreateForeignKeyQueries(collectionName, collection));
query += colQueries.join(', ') + ')';
return query;
}
export function getCreateIndexQueries(
collectionName: string,
collection: Pick<DBTable, 'indexes'>
) {
let queries: string[] = [];
for (const [indexName, indexProps] of Object.entries(collection.indexes ?? {})) {
const onColNames = asArray(indexProps.on);
const onCols = onColNames.map((colName) => sqlite.escapeName(colName));
const unique = indexProps.unique ? 'UNIQUE ' : '';
const indexQuery = `CREATE ${unique}INDEX ${sqlite.escapeName(
indexName
)} ON ${sqlite.escapeName(collectionName)} (${onCols.join(', ')})`;
queries.push(indexQuery);
}
return queries;
}
export function getCreateForeignKeyQueries(collectionName: string, collection: DBTable) {
let queries: string[] = [];
for (const foreignKey of collection.foreignKeys ?? []) {
const columns = asArray(foreignKey.columns);
const references = asArray(foreignKey.references);
if (columns.length !== references.length) {
throw new Error(
`Foreign key on ${collectionName} is misconfigured. \`columns\` and \`references\` must be the same length.`
);
}
const referencedCollection = references[0]?.schema.collection;
if (!referencedCollection) {
throw new Error(
`Foreign key on ${collectionName} is misconfigured. \`references\` cannot be empty.`
);
}
const query = `FOREIGN KEY (${columns
.map((f) => sqlite.escapeName(f))
.join(', ')}) REFERENCES ${sqlite.escapeName(referencedCollection)}(${references
.map((r) => sqlite.escapeName(r.schema.name!))
.join(', ')})`;
queries.push(query);
}
return queries;
}
function asArray<T>(value: T | T[]) {
return Array.isArray(value) ? value : [value];
}
export function schemaTypeToSqlType(type: ColumnType): 'text' | 'integer' {
switch (type) {
case 'date':
case 'text':
case 'json':
return 'text';
case 'number':
case 'boolean':
return 'integer';
}
}
export function getModifiers(columnName: string, column: DBColumn) {
let modifiers = '';
if (hasPrimaryKey(column)) {
return ' PRIMARY KEY';
}
if (!column.schema.optional) {
modifiers += ' NOT NULL';
}
if (column.schema.unique) {
modifiers += ' UNIQUE';
}
if (hasDefault(column)) {
modifiers += ` DEFAULT ${getDefaultValueSql(columnName, column)}`;
}
const references = getReferencesConfig(column);
if (references) {
const { collection, name } = references.schema;
if (!collection || !name) {
throw new Error(
`Column ${collection}.${name} references a collection that does not exist. Did you apply the referenced collection to the \`tables\` object in your Astro config?`
);
}
modifiers += ` REFERENCES ${sqlite.escapeName(collection)} (${sqlite.escapeName(name)})`;
}
return modifiers;
}
export function getReferencesConfig(column: DBColumn) {
const canHaveReferences = column.type === 'number' || column.type === 'text';
if (!canHaveReferences) return undefined;
return column.schema.references;
}
// Using `DBColumn` will not narrow `default` based on the column `type`
// Handle each column separately
type WithDefaultDefined<T extends DBColumn> = T & {
schema: Required<Pick<T['schema'], 'default'>>;
};
type DBColumnWithDefault =
| WithDefaultDefined<TextColumn>
| WithDefaultDefined<DateColumn>
| WithDefaultDefined<NumberColumn>
| WithDefaultDefined<BooleanColumn>
| WithDefaultDefined<JsonColumn>;
// Type narrowing the default fails on union types, so use a type guard
export function hasDefault(column: DBColumn): column is DBColumnWithDefault {
if (column.schema.default !== undefined) {
return true;
}
if (hasPrimaryKey(column) && column.type === 'number') {
return true;
}
return false;
}
function toDefault<T>(def: T | SQL<any>): string {
const type = typeof def;
if (type === 'string') {
return sqlite.escapeString(def as string);
} else if (type === 'boolean') {
return def ? 'TRUE' : 'FALSE';
} else {
return def + '';
}
}
function getDefaultValueSql(columnName: string, column: DBColumnWithDefault): string {
if (isSerializedSQL(column.schema.default)) {
return column.schema.default.sql;
}
switch (column.type) {
case 'boolean':
case 'number':
case 'text':
case 'date':
return toDefault(column.schema.default);
case 'json': {
let stringified = '';
try {
stringified = JSON.stringify(column.schema.default);
} catch (e) {
// eslint-disable-next-line no-console
console.log(
`Invalid default value for column ${bold(
columnName
)}. Defaults must be valid JSON when using the \`json()\` type.`
);
process.exit(0);
}
return sqlite.escapeString(stringified);
}
}
}

View file

@ -1,8 +1,6 @@
import type { InferSelectModel } from 'drizzle-orm';
import { SQL } from 'drizzle-orm';
import { SQLiteAsyncDialect, type SQLiteInsertValue } from 'drizzle-orm/sqlite-core';
import { SQLiteAsyncDialect } from 'drizzle-orm/sqlite-core';
import { type ZodTypeDef, z } from 'zod';
import { type SqliteDB, type Table, collectionToTable } from '../runtime/index.js';
import { SERIALIZED_SQL_KEY, type SerializedSQL } from '../runtime/types.js';
import { errorMap } from './integration/error-map.js';
@ -26,6 +24,7 @@ const baseColumnSchema = z.object({
// Defined when `defineReadableTable()` is called
name: z.string().optional(),
// TODO: rename to `tableName`. Breaking schema change
collection: z.string().optional(),
});
@ -181,41 +180,26 @@ const foreignKeysSchema: z.ZodType<ForeignKeysOutput, ZodTypeDef, ForeignKeysInp
export type Indexes = Record<string, z.infer<typeof indexSchema>>;
const baseCollectionSchema = z.object({
export const tableSchema = z.object({
columns: columnsSchema,
indexes: z.record(indexSchema).optional(),
foreignKeys: z.array(foreignKeysSchema).optional(),
});
export const readableCollectionSchema = baseCollectionSchema.extend({
writable: z.literal(false),
});
export const writableCollectionSchema = baseCollectionSchema.extend({
writable: z.literal(true),
});
export const collectionSchema = z.union([readableCollectionSchema, writableCollectionSchema]);
export const tablesSchema = z.preprocess((rawCollections) => {
export const tablesSchema = z.preprocess((rawTables) => {
// Use `z.any()` to avoid breaking object references
const tables = z.record(z.any()).parse(rawCollections, { errorMap });
for (const [collectionName, collection] of Object.entries(tables)) {
// Append `table` object for data seeding.
// Must append at runtime so table name exists.
collection.table = collectionToTable(
collectionName,
collectionSchema.parse(collection, { errorMap })
);
// Append collection and column names to columns.
// Used to track collection info for references.
const { columns } = z.object({ columns: z.record(z.any()) }).parse(collection, { errorMap });
const tables = z.record(z.any()).parse(rawTables, { errorMap });
for (const [tableName, table] of Object.entries(tables)) {
// Append table and column names to columns.
// Used to track table info for references.
const { columns } = z.object({ columns: z.record(z.any()) }).parse(table, { errorMap });
for (const [columnName, column] of Object.entries(columns)) {
column.schema.name = columnName;
column.schema.collection = collectionName;
column.schema.collection = tableName;
}
}
return rawCollections;
}, z.record(collectionSchema));
return rawTables;
}, z.record(tableSchema));
export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
@ -243,7 +227,7 @@ export type DBColumnInput =
| TextColumnInput
| JsonColumnInput;
export type DBColumns = z.infer<typeof columnsSchema>;
export type DBTable = z.infer<typeof readableCollectionSchema | typeof writableCollectionSchema>;
export type DBTable = z.infer<typeof tableSchema>;
export type DBTables = Record<string, DBTable>;
export type DBSnapshot = {
schema: Record<string, DBTable>;
@ -253,62 +237,24 @@ export type DBSnapshot = {
*/
experimentalVersion: number;
};
export type ReadableDBTable = z.infer<typeof readableCollectionSchema>;
export type WritableDBTable = z.infer<typeof writableCollectionSchema>;
export type DBDataContext = {
db: SqliteDB;
seed: <TColumns extends ColumnsConfig>(
collection: ResolvedCollectionConfig<TColumns>,
data: MaybeArray<SQLiteInsertValue<Table<string, TColumns>>>
) => Promise<void>;
seedReturning: <
TColumns extends ColumnsConfig,
TData extends MaybeArray<SQLiteInsertValue<Table<string, TColumns>>>,
>(
collection: ResolvedCollectionConfig<TColumns>,
data: TData
) => Promise<
TData extends Array<SQLiteInsertValue<Table<string, TColumns>>>
? InferSelectModel<Table<string, TColumns>>[]
: InferSelectModel<Table<string, TColumns>>
>;
mode: 'dev' | 'build';
};
export function defineData(fn: (ctx: DBDataContext) => MaybePromise<void>) {
return fn;
}
const dbDataFn = z.function().returns(z.union([z.void(), z.promise(z.void())]));
export const dbConfigSchema = z.object({
studio: z.boolean().optional(),
tables: tablesSchema.optional(),
data: z.union([dbDataFn, z.array(dbDataFn)]).optional(),
unsafeWritable: z.boolean().optional().default(false),
});
type DataFunction = (params: DBDataContext) => MaybePromise<void>;
export type DBConfigInput = z.input<typeof dbConfigSchema>;
export type DBConfig = z.infer<typeof dbConfigSchema>;
export type DBUserConfig = Omit<z.input<typeof dbConfigSchema>, 'data'> & {
data: DataFunction | DataFunction[];
};
export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
export const astroConfigWithDbSchema = z.object({
db: dbConfigSchema.optional(),
});
export type ColumnsConfig = z.input<typeof collectionSchema>['columns'];
interface CollectionConfig<TColumns extends ColumnsConfig = ColumnsConfig>
export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig>
// use `extends` to ensure types line up with zod,
// only adding generics for type completions.
extends Pick<z.input<typeof collectionSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
columns: TColumns;
foreignKeys?: Array<{
columns: MaybeArray<Extract<keyof TColumns, string>>;
// TODO: runtime error if parent collection doesn't match for all columns. Can't put a generic here...
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
}>;
indexes?: Record<string, IndexConfig<TColumns>>;
@ -318,69 +264,11 @@ interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof ind
on: MaybeArray<Extract<keyof TColumns, string>>;
}
export type ResolvedCollectionConfig<
TColumns extends ColumnsConfig = ColumnsConfig,
Writable extends boolean = boolean,
> = CollectionConfig<TColumns> & {
writable: Writable;
table: Table<string, TColumns>;
};
function baseDefineCollection<TColumns extends ColumnsConfig, TWritable extends boolean>(
userConfig: CollectionConfig<TColumns>,
writable: TWritable
): ResolvedCollectionConfig<TColumns, TWritable> {
return {
...userConfig,
writable,
// set at runtime to get the table name
table: null!,
};
}
export function defineReadableTable<TColumns extends ColumnsConfig>(
userConfig: CollectionConfig<TColumns>
): ResolvedCollectionConfig<TColumns, false> {
return baseDefineCollection(userConfig, false);
}
export function defineWritableTable<TColumns extends ColumnsConfig>(
userConfig: CollectionConfig<TColumns>
): ResolvedCollectionConfig<TColumns, true> {
return baseDefineCollection(userConfig, true);
}
export type AstroConfigWithDB = z.input<typeof astroConfigWithDbSchema>;
/** @deprecated Use `TableConfig` instead */
export type ResolvedCollectionConfig<TColumns extends ColumnsConfig = ColumnsConfig> =
TableConfig<TColumns>;
// We cannot use `Omit<NumberColumn | TextColumn, 'type'>`,
// since Omit collapses our union type on primary key.
type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
function createColumn<S extends string, T extends Record<string, unknown>>(type: S, schema: T) {
return {
type,
/**
* @internal
*/
schema,
};
}
export const column = {
number: <T extends NumberColumnOpts>(opts: T = {} as T) => {
return createColumn('number', opts) satisfies { type: 'number' };
},
boolean: <T extends BooleanColumnInput['schema']>(opts: T = {} as T) => {
return createColumn('boolean', opts) satisfies { type: 'boolean' };
},
text: <T extends TextColumnOpts>(opts: T = {} as T) => {
return createColumn('text', opts) satisfies { type: 'text' };
},
date<T extends DateColumnInput['schema']>(opts: T = {} as T) {
return createColumn('date', opts) satisfies { type: 'date' };
},
json<T extends JsonColumnInput['schema']>(opts: T = {} as T) {
return createColumn('json', opts) satisfies { type: 'json' };
},
};
export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;

View file

@ -17,3 +17,11 @@ export function getAstroStudioUrl(): string {
const env = getAstroStudioEnv();
return env.ASTRO_STUDIO_URL || 'https://stardate.astro.build';
}
export function getDbDirectoryUrl(root: URL | string) {
return new URL('db/', root);
}
export function getMigrationsDirectoryUrl(root: URL | string) {
return new URL('migrations/', getDbDirectoryUrl(root));
}

View file

@ -1,5 +1,4 @@
export { defineReadableTable, defineWritableTable, defineData, column } from './core/types.js';
export type { ResolvedCollectionConfig, DBDataContext } from './core/types.js';
export type { ResolvedCollectionConfig, TableConfig } from './core/types.js';
export { cli } from './core/cli/index.js';
export { integration as default } from './core/integration/index.js';
export { sql, NOW, TRUE, FALSE } from './runtime/index.js';
export { sql, NOW, TRUE, FALSE, defineDB, defineTable, column } from './runtime/config.js';

View file

@ -0,0 +1,48 @@
import type {
BooleanColumnInput,
ColumnsConfig,
DBConfigInput,
DateColumnInput,
JsonColumnInput,
NumberColumnOpts,
TableConfig,
TextColumnOpts,
} from '../core/types.js';
function createColumn<S extends string, T extends Record<string, unknown>>(type: S, schema: T) {
return {
type,
/**
* @internal
*/
schema,
};
}
export const column = {
number: <T extends NumberColumnOpts>(opts: T = {} as T) => {
return createColumn('number', opts) satisfies { type: 'number' };
},
boolean: <T extends BooleanColumnInput['schema']>(opts: T = {} as T) => {
return createColumn('boolean', opts) satisfies { type: 'boolean' };
},
text: <T extends TextColumnOpts>(opts: T = {} as T) => {
return createColumn('text', opts) satisfies { type: 'text' };
},
date<T extends DateColumnInput['schema']>(opts: T = {} as T) {
return createColumn('date', opts) satisfies { type: 'date' };
},
json<T extends JsonColumnInput['schema']>(opts: T = {} as T) {
return createColumn('json', opts) satisfies { type: 'json' };
},
};
export function defineTable<TColumns extends ColumnsConfig>(userConfig: TableConfig<TColumns>) {
return userConfig;
}
export function defineDB(userConfig: DBConfigInput) {
return userConfig;
}
export { sql, NOW, TRUE, FALSE } from './index.js';

View file

@ -1,59 +1,19 @@
import type { InStatement } from '@libsql/client';
import { createClient } from '@libsql/client';
import { getTableName } from 'drizzle-orm';
import type { LibSQLDatabase } from 'drizzle-orm/libsql';
import { drizzle as drizzleLibsql } from 'drizzle-orm/libsql';
import { type SQLiteTable } from 'drizzle-orm/sqlite-core';
import { drizzle as drizzleProxy } from 'drizzle-orm/sqlite-proxy';
import { z } from 'zod';
import { type DBTables } from '../core/types.js';
const isWebContainer = !!process.versions?.webcontainer;
interface LocalDatabaseClient extends LibSQLDatabase, Disposable {}
export async function createLocalDatabaseClient({
tables,
dbUrl,
seeding,
}: {
dbUrl: string;
tables: DBTables;
seeding: boolean;
}): Promise<LocalDatabaseClient> {
export function createLocalDatabaseClient({ dbUrl }: { dbUrl: string }): LibSQLDatabase {
const url = isWebContainer ? 'file:content.db' : dbUrl;
const client = createClient({ url });
const db = Object.assign(drizzleLibsql(client), {
[Symbol.dispose || Symbol.for('Symbol.dispose')]() {
client.close();
},
});
console.log('memory', process.env.TEST_IN_MEMORY_DB);
const client = createClient({ url: process.env.TEST_IN_MEMORY_DB ? ':memory:' : url });
const db = drizzleLibsql(client);
if (seeding) return db;
const { insert: drizzleInsert, update: drizzleUpdate, delete: drizzleDelete } = db;
return Object.assign(db, {
insert(Table: SQLiteTable) {
checkIfModificationIsAllowed(tables, Table);
return drizzleInsert.call(this, Table);
},
update(Table: SQLiteTable) {
checkIfModificationIsAllowed(tables, Table);
return drizzleUpdate.call(this, Table);
},
delete(Table: SQLiteTable) {
checkIfModificationIsAllowed(tables, Table);
return drizzleDelete.call(this, Table);
},
});
}
function checkIfModificationIsAllowed(tables: DBTables, Table: SQLiteTable) {
const tableName = getTableName(Table);
const collection = tables[tableName];
if (!collection.writable) {
throw new Error(`The [${tableName}] collection is read-only.`);
}
return db;
}
export function createRemoteDatabaseClient(appToken: string, remoteDbURL: string) {
@ -61,8 +21,6 @@ export function createRemoteDatabaseClient(appToken: string, remoteDbURL: string
const db = drizzleProxy(async (sql, parameters, method) => {
const requestBody: InStatement = { sql, args: parameters };
// eslint-disable-next-line no-console
console.info(JSON.stringify(requestBody));
const res = await fetch(url, {
method: 'POST',
headers: {
@ -107,5 +65,9 @@ export function createRemoteDatabaseClient(appToken: string, remoteDbURL: string
return { rows: rowValues };
});
(db as any).batch = (_drizzleQueries: Array<Promise<unknown>>) => {
throw new Error('db.batch() is not currently supported.');
};
return db;
}

View file

@ -1,3 +1,4 @@
import type { LibSQLDatabase } from 'drizzle-orm/libsql';
import { type ColumnBuilderBaseConfig, type ColumnDataType, sql } from 'drizzle-orm';
import {
type IndexBuilder,
@ -8,14 +9,14 @@ import {
sqliteTable,
text,
} from 'drizzle-orm/sqlite-core';
import type { SqliteRemoteDatabase } from 'drizzle-orm/sqlite-proxy';
import { type DBColumn, type DBTable } from '../core/types.js';
import { type SerializedSQL, isSerializedSQL } from './types.js';
export { sql };
export type SqliteDB = SqliteRemoteDatabase;
export type SqliteDB = LibSQLDatabase;
export type { Table } from './types.js';
export { createRemoteDatabaseClient, createLocalDatabaseClient } from './db-client.js';
export { seedLocal } from './queries.js';
export function hasPrimaryKey(column: DBColumn) {
return 'primaryKey' in column.schema && !!column.schema.primaryKey;
@ -54,17 +55,17 @@ type D1ColumnBuilder = SQLiteColumnBuilderBase<
ColumnBuilderBaseConfig<ColumnDataType, string> & { data: unknown }
>;
export function collectionToTable(name: string, collection: DBTable) {
export function asDrizzleTable(name: string, table: DBTable) {
const columns: Record<string, D1ColumnBuilder> = {};
if (!Object.entries(collection.columns).some(([, column]) => hasPrimaryKey(column))) {
if (!Object.entries(table.columns).some(([, column]) => hasPrimaryKey(column))) {
columns['_id'] = integer('_id').primaryKey();
}
for (const [columnName, column] of Object.entries(collection.columns)) {
for (const [columnName, column] of Object.entries(table.columns)) {
columns[columnName] = columnMapper(columnName, column);
}
const table = sqliteTable(name, columns, (ormTable) => {
const drizzleTable = sqliteTable(name, columns, (ormTable) => {
const indexes: Record<string, IndexBuilder> = {};
for (const [indexName, indexProps] of Object.entries(collection.indexes ?? {})) {
for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
const onColNames = Array.isArray(indexProps.on) ? indexProps.on : [indexProps.on];
const onCols = onColNames.map((colName) => ormTable[colName]);
if (!atLeastOne(onCols)) continue;
@ -73,7 +74,7 @@ export function collectionToTable(name: string, collection: DBTable) {
}
return indexes;
});
return table;
return drizzleTable;
}
function atLeastOne<T>(arr: T[]): arr is [T, ...T[]] {

View file

@ -0,0 +1,244 @@
import type {
BooleanColumn,
DBTable,
DBTables,
DBColumn,
DateColumn,
ColumnType,
JsonColumn,
NumberColumn,
TextColumn,
} from '../core/types.js';
import { bold } from 'kleur/colors';
import { type SQL, sql } from 'drizzle-orm';
import { SQLiteAsyncDialect } from 'drizzle-orm/sqlite-core';
import { hasPrimaryKey, type SqliteDB } from './index.js';
import { isSerializedSQL } from './types.js';
import {
FOREIGN_KEY_REFERENCES_LENGTH_ERROR,
FOREIGN_KEY_REFERENCES_EMPTY_ERROR,
REFERENCE_DNE_ERROR,
FOREIGN_KEY_DNE_ERROR,
SEED_ERROR,
} from '../core/errors.js';
import { LibsqlError } from '@libsql/client';
const sqlite = new SQLiteAsyncDialect();
export const SEED_DEV_FILE_NAME = ['seed.ts', 'seed.js', 'seed.mjs', 'seed.mts'];
export async function seedLocal({
db,
tables,
// Glob all potential seed files to catch renames and deletions.
fileGlob,
}: {
db: SqliteDB;
tables: DBTables;
fileGlob: Record<string, () => Promise<void>>;
}) {
await recreateTables({ db, tables });
for (const fileName of SEED_DEV_FILE_NAME) {
const key = Object.keys(fileGlob).find((f) => f.endsWith(fileName));
if (key) {
await fileGlob[key]().catch((e) => {
if (e instanceof LibsqlError) {
throw new Error(SEED_ERROR(e.message));
}
throw e;
});
return;
}
}
}
export async function recreateTables({ db, tables }: { db: SqliteDB; tables: DBTables }) {
const setupQueries: SQL[] = [];
for (const [name, table] of Object.entries(tables)) {
const dropQuery = sql.raw(`DROP TABLE IF EXISTS ${sqlite.escapeName(name)}`);
const createQuery = sql.raw(getCreateTableQuery(name, table));
const indexQueries = getCreateIndexQueries(name, table);
setupQueries.push(dropQuery, createQuery, ...indexQueries.map((s) => sql.raw(s)));
}
await db.batch([
db.run(sql`pragma defer_foreign_keys=true;`),
...setupQueries.map((q) => db.run(q)),
]);
}
export function getCreateTableQuery(tableName: string, table: DBTable) {
let query = `CREATE TABLE ${sqlite.escapeName(tableName)} (`;
const colQueries = [];
const colHasPrimaryKey = Object.entries(table.columns).find(([, column]) =>
hasPrimaryKey(column)
);
if (!colHasPrimaryKey) {
colQueries.push('_id INTEGER PRIMARY KEY');
}
for (const [columnName, column] of Object.entries(table.columns)) {
const colQuery = `${sqlite.escapeName(columnName)} ${schemaTypeToSqlType(
column.type
)}${getModifiers(columnName, column)}`;
colQueries.push(colQuery);
}
colQueries.push(...getCreateForeignKeyQueries(tableName, table));
query += colQueries.join(', ') + ')';
return query;
}
export function getCreateIndexQueries(tableName: string, table: Pick<DBTable, 'indexes'>) {
let queries: string[] = [];
for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
const onColNames = asArray(indexProps.on);
const onCols = onColNames.map((colName) => sqlite.escapeName(colName));
const unique = indexProps.unique ? 'UNIQUE ' : '';
const indexQuery = `CREATE ${unique}INDEX ${sqlite.escapeName(
indexName
)} ON ${sqlite.escapeName(tableName)} (${onCols.join(', ')})`;
queries.push(indexQuery);
}
return queries;
}
export function getCreateForeignKeyQueries(tableName: string, table: DBTable) {
let queries: string[] = [];
for (const foreignKey of table.foreignKeys ?? []) {
const columns = asArray(foreignKey.columns);
const references = asArray(foreignKey.references);
if (columns.length !== references.length) {
throw new Error(FOREIGN_KEY_REFERENCES_LENGTH_ERROR(tableName));
}
const firstReference = references[0];
if (!firstReference) {
throw new Error(FOREIGN_KEY_REFERENCES_EMPTY_ERROR(tableName));
}
const referencedTable = firstReference.schema.collection;
if (!referencedTable) {
throw new Error(FOREIGN_KEY_DNE_ERROR(tableName));
}
const query = `FOREIGN KEY (${columns
.map((f) => sqlite.escapeName(f))
.join(', ')}) REFERENCES ${sqlite.escapeName(referencedTable)}(${references
.map((r) => sqlite.escapeName(r.schema.name!))
.join(', ')})`;
queries.push(query);
}
return queries;
}
function asArray<T>(value: T | T[]) {
return Array.isArray(value) ? value : [value];
}
export function schemaTypeToSqlType(type: ColumnType): 'text' | 'integer' {
switch (type) {
case 'date':
case 'text':
case 'json':
return 'text';
case 'number':
case 'boolean':
return 'integer';
}
}
export function getModifiers(columnName: string, column: DBColumn) {
let modifiers = '';
if (hasPrimaryKey(column)) {
return ' PRIMARY KEY';
}
if (!column.schema.optional) {
modifiers += ' NOT NULL';
}
if (column.schema.unique) {
modifiers += ' UNIQUE';
}
if (hasDefault(column)) {
modifiers += ` DEFAULT ${getDefaultValueSql(columnName, column)}`;
}
const references = getReferencesConfig(column);
if (references) {
const { collection: tableName, name } = references.schema;
if (!tableName || !name) {
throw new Error(REFERENCE_DNE_ERROR(columnName));
}
modifiers += ` REFERENCES ${sqlite.escapeName(tableName)} (${sqlite.escapeName(name)})`;
}
return modifiers;
}
export function getReferencesConfig(column: DBColumn) {
const canHaveReferences = column.type === 'number' || column.type === 'text';
if (!canHaveReferences) return undefined;
return column.schema.references;
}
// Using `DBColumn` will not narrow `default` based on the column `type`
// Handle each column separately
type WithDefaultDefined<T extends DBColumn> = T & {
schema: Required<Pick<T['schema'], 'default'>>;
};
type DBColumnWithDefault =
| WithDefaultDefined<TextColumn>
| WithDefaultDefined<DateColumn>
| WithDefaultDefined<NumberColumn>
| WithDefaultDefined<BooleanColumn>
| WithDefaultDefined<JsonColumn>;
// Type narrowing the default fails on union types, so use a type guard
export function hasDefault(column: DBColumn): column is DBColumnWithDefault {
if (column.schema.default !== undefined) {
return true;
}
if (hasPrimaryKey(column) && column.type === 'number') {
return true;
}
return false;
}
function toDefault<T>(def: T | SQL<any>): string {
const type = typeof def;
if (type === 'string') {
return sqlite.escapeString(def as string);
} else if (type === 'boolean') {
return def ? 'TRUE' : 'FALSE';
} else {
return def + '';
}
}
function getDefaultValueSql(columnName: string, column: DBColumnWithDefault): string {
if (isSerializedSQL(column.schema.default)) {
return column.schema.default.sql;
}
switch (column.type) {
case 'boolean':
case 'number':
case 'text':
case 'date':
return toDefault(column.schema.default);
case 'json': {
let stringified = '';
try {
stringified = JSON.stringify(column.schema.default);
} catch (e) {
// eslint-disable-next-line no-console
console.log(
`Invalid default value for column ${bold(
columnName
)}. Defaults must be valid JSON when using the \`json()\` type.`
);
process.exit(0);
}
return sqlite.escapeString(stringified);
}
}
}

View file

@ -1,6 +1,6 @@
import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
import type { SQLiteColumn, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core';
import type { ColumnsConfig, DBColumn } from '../core/types.js';
import type { ColumnsConfig, DBColumn, OutputColumnsConfig } from '../core/types.js';
type GeneratedConfig<T extends ColumnDataType = ColumnDataType> = Pick<
ColumnBaseConfig<T, string>,
@ -76,7 +76,7 @@ export type Column<T extends DBColumn['type'], S extends GeneratedConfig> = T ex
export type Table<
TTableName extends string,
TColumns extends ColumnsConfig,
TColumns extends OutputColumnsConfig | ColumnsConfig,
> = SQLiteTableWithColumns<{
name: TTableName;
schema: undefined;

1
packages/db/src/utils.ts Normal file
View file

@ -0,0 +1 @@
export { asDrizzleTable } from './runtime/index.js';

View file

@ -3,10 +3,6 @@ import { load as cheerioLoad } from 'cheerio';
import testAdapter from '../../astro/test/test-adapter.js';
import { loadFixture } from '../../astro/test/test-utils.js';
// TODO(fks): Rename this to something more generic/generally useful
// like `ASTRO_MONOREPO_TEST_ENV` if @astrojs/db is merged into astro.
process.env.ASTRO_DB_TEST_ENV = '1';
describe('astro:db', () => {
let fixture;
before(async () => {
@ -17,16 +13,25 @@ describe('astro:db', () => {
});
});
describe('production', () => {
// Note(bholmesdev): Use in-memory db to avoid
// Multiple dev servers trying to unlink and remount
// the same database file.
process.env.TEST_IN_MEMORY_DB = 'true';
describe('development', () => {
let devServer;
before(async () => {
await fixture.build();
console.log('starting dev server');
devServer = await fixture.startDevServer();
});
after(async () => {
await devServer.stop();
process.env.TEST_IN_MEMORY_DB = undefined;
});
it('Prints the list of authors', async () => {
const app = await fixture.loadTestAdapterApp();
const request = new Request('http://example.com/');
const res = await app.render(request);
const html = await res.text();
const html = await fixture.fetch('/').then((res) => res.text());
const $ = cheerioLoad(html);
const ul = $('.authors-list');
@ -34,71 +39,36 @@ describe('astro:db', () => {
expect(ul.children().eq(0).text()).to.equal('Ben');
});
it('Errors when inserting to a readonly collection', async () => {
const app = await fixture.loadTestAdapterApp();
const request = new Request('http://example.com/insert-into-readonly');
const res = await app.render(request);
const html = await res.text();
it('Allows expression defaults for date columns', async () => {
const html = await fixture.fetch('/').then((res) => res.text());
const $ = cheerioLoad(html);
expect($('#error').text()).to.equal('The [Author] collection is read-only.');
const themeAdded = $($('.themes-list .theme-added')[0]).text();
expect(new Date(themeAdded).getTime()).to.not.be.NaN;
});
it('Does not error when inserting into writable collection', async () => {
const app = await fixture.loadTestAdapterApp();
const request = new Request('http://example.com/insert-into-writable');
const res = await app.render(request);
const html = await res.text();
it('Defaults can be overridden for dates', async () => {
const html = await fixture.fetch('/').then((res) => res.text());
const $ = cheerioLoad(html);
expect($('#error').text()).to.equal('');
const themeAdded = $($('.themes-list .theme-added')[1]).text();
expect(new Date(themeAdded).getTime()).to.not.be.NaN;
});
describe('Expression defaults', () => {
let app;
before(async () => {
app = await fixture.loadTestAdapterApp();
});
it('Allows expression defaults for text columns', async () => {
const html = await fixture.fetch('/').then((res) => res.text());
const $ = cheerioLoad(html);
it('Allows expression defaults for date columns', async () => {
const request = new Request('http://example.com/');
const res = await app.render(request);
const html = await res.text();
const $ = cheerioLoad(html);
const themeOwner = $($('.themes-list .theme-owner')[0]).text();
expect(themeOwner).to.equal('');
});
const themeAdded = $($('.themes-list .theme-added')[0]).text();
expect(new Date(themeAdded).getTime()).to.not.be.NaN;
});
it('Allows expression defaults for boolean columns', async () => {
const html = await fixture.fetch('/').then((res) => res.text());
const $ = cheerioLoad(html);
it('Defaults can be overridden for dates', async () => {
const request = new Request('http://example.com/');
const res = await app.render(request);
const html = await res.text();
const $ = cheerioLoad(html);
const themeAdded = $($('.themes-list .theme-added')[1]).text();
expect(new Date(themeAdded).getTime()).to.not.be.NaN;
});
it('Allows expression defaults for text columns', async () => {
const request = new Request('http://example.com/');
const res = await app.render(request);
const html = await res.text();
const $ = cheerioLoad(html);
const themeOwner = $($('.themes-list .theme-owner')[0]).text();
expect(themeOwner).to.equal('');
});
it('Allows expression defaults for boolean columns', async () => {
const request = new Request('http://example.com/');
const res = await app.render(request);
const html = await res.text();
const $ = cheerioLoad(html);
const themeDark = $($('.themes-list .theme-dark')[0]).text();
expect(themeDark).to.equal('dark mode');
});
const themeDark = $($('.themes-list .theme-dark')[0]).text();
expect(themeDark).to.equal('dark mode');
});
});
});

View file

@ -0,0 +1,7 @@
import db from '@astrojs/db';
import { defineConfig } from 'astro/config';
// https://astro.build/config
export default defineConfig({
integrations: [db()],
});

View file

@ -1,28 +0,0 @@
import db, { defineReadableTable, column } from '@astrojs/db';
import { defineConfig } from 'astro/config';
import { themes } from './themes-integration';
const Author = defineReadableTable({
columns: {
name: column.text(),
},
});
// https://astro.build/config
export default defineConfig({
integrations: [db(), themes()],
db: {
studio: false,
unsafeWritable: true,
tables: { Author },
async data({ seed }) {
await seed(Author, [
{ name: 'Ben' },
{ name: 'Nate' },
{ name: 'Erika' },
{ name: 'Bjorn' },
{ name: 'Sarah' },
]);
},
},
});

View file

@ -0,0 +1,12 @@
import { defineDB, defineTable, column } from 'astro:db';
import { Themes } from './theme';
const Author = defineTable({
columns: {
name: column.text(),
},
});
export default defineDB({
tables: { Author, Themes },
});

View file

@ -0,0 +1,19 @@
import { db, Author } from 'astro:db';
import { Themes as ThemesConfig } from './theme';
import { asDrizzleTable } from '@astrojs/db/utils';
const Themes = asDrizzleTable('Themes', ThemesConfig);
await db
.insert(Themes)
.values([{ name: 'dracula' }, { name: 'monokai', added: new Date() }])
.returning({ name: Themes.name });
await db
.insert(Author)
.values([
{ name: 'Ben' },
{ name: 'Nate' },
{ name: 'Erika' },
{ name: 'Bjorn' },
{ name: 'Sarah' },
]);

View file

@ -0,0 +1,15 @@
import { defineTable, column, NOW, sql } from 'astro:db';
export const Themes = defineTable({
columns: {
name: column.text(),
added: column.date({
default: sql`CURRENT_TIMESTAMP`,
}),
updated: column.date({
default: NOW,
}),
isDark: column.boolean({ default: sql`TRUE` }),
owner: column.text({ optional: true, default: sql`NULL` }),
},
});

View file

@ -2,6 +2,11 @@
"name": "@test/db-aliases",
"version": "0.0.0",
"private": true,
"scripts": {
"dev": "astro dev",
"build": "astro build",
"preview": "astro preview"
},
"dependencies": {
"@astrojs/db": "workspace:*",
"astro": "workspace:*"

View file

@ -1,4 +1,5 @@
---
/// <reference path="../../.astro/db-types.d.ts" />
import { Author, db, Themes } from 'astro:db';
const authors = await db.select().from(Author);

View file

@ -1,14 +0,0 @@
---
import { Author, db } from 'astro:db';
const authors = await db.select().from(Author);
let error: any = {};
try {
db.insert(Author).values({ name: 'Person A' });
} catch (err) {
error = err;
}
---
<div id="error">{error.message}</div>

View file

@ -1,12 +0,0 @@
---
import { Themes, db } from 'astro:db';
let error: any = {};
try {
db.insert(Themes).values({ name: 'Person A' });
} catch (err) {
error = err;
}
---
<div id="error">{error.message}</div>

View file

@ -1,36 +0,0 @@
import { NOW, column, defineWritableTable, sql } from '@astrojs/db';
import type { AstroIntegration } from 'astro';
const Themes = defineWritableTable({
columns: {
name: column.text(),
added: column.date({
default: sql`CURRENT_TIMESTAMP`,
}),
updated: column.date({
default: NOW,
}),
isDark: column.boolean({ default: sql`TRUE` }),
owner: column.text({ optional: true, default: sql`NULL` }),
},
});
export function themes(): AstroIntegration {
return {
name: 'themes-integration',
hooks: {
'astro:config:setup': ({ updateConfig }) => {
updateConfig({
db: {
tables: { Themes },
async data({ seed }) {
// Seed writable tables in dev mode, only
// but in this case we do it for both, due to tests
await seed(Themes, [{ name: 'dracula' }, { name: 'monokai', added: new Date() }]);
},
},
});
},
},
};
}

View file

@ -1,25 +0,0 @@
import db, { defineReadableTable, column } from '@astrojs/db';
import { defineConfig } from 'astro/config';
import { asJson, createGlob } from './utils';
const Quote = defineReadableTable({
columns: {
author: column.text(),
body: column.text(),
file: column.text({ unique: true }),
},
});
export default defineConfig({
db: {
tables: { Quote },
data({ seed, ...ctx }) {
const glob = createGlob(ctx);
glob('quotes/*.json', {
into: Quote,
parse: asJson,
});
},
},
integrations: [db()],
});

View file

@ -1,21 +0,0 @@
{
"name": "glob",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"dev": "astro dev",
"build": "astro build",
"preview": "astro preview"
},
"dependencies": {
"@astrojs/db": "workspace:*",
"astro": "workspace:*",
"chokidar": "^3.5.3",
"drizzle-orm": "^0.28.6",
"fast-glob": "^3.3.2"
},
"keywords": [],
"author": "",
"license": "ISC"
}

View file

@ -1,4 +0,0 @@
{
"author": "Erika",
"body": "Put the quote in the database."
}

View file

@ -1,4 +0,0 @@
{
"author": "Tony Sull",
"body": "All content is data, but not all data is content."
}

View file

@ -1,25 +0,0 @@
---
/// <reference types="../../.astro/db-types.d.ts" />
import { Quote, db } from 'astro:db';
const quotes = await db.select().from(Quote);
---
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Document</title>
</head>
<body>
{
quotes.map((q) => (
<figure>
<blockquote>{q.body}</blockquote>
<figcaption>{q.author}</figcaption>
</figure>
))
}
</body>
</html>

View file

@ -1,60 +0,0 @@
import { type DBDataContext, type ResolvedCollectionConfig } from '@astrojs/db';
import chokidar from 'chokidar';
import { eq } from 'drizzle-orm';
import fastGlob from 'fast-glob';
import { readFile } from 'fs/promises';
export function createGlob({ db, mode }: Pick<DBDataContext, 'db' | 'mode'>) {
return async function glob(
pattern: string,
opts: {
into: ResolvedCollectionConfig;
parse: (params: { file: string; content: string }) => Record<string, any>;
}
) {
// TODO: expose `table`
const { table } = opts.into as any;
const fileColumn = table.file;
if (!fileColumn) {
throw new Error('`file` column is required for glob tables.');
}
if (mode === 'dev') {
chokidar
.watch(pattern)
.on('add', async (file) => {
const content = await readFile(file, 'utf-8');
const parsed = opts.parse({ file, content });
await db.insert(table).values({ ...parsed, file });
})
.on('change', async (file) => {
const content = await readFile(file, 'utf-8');
const parsed = opts.parse({ file, content });
await db
.insert(table)
.values({ ...parsed, file })
.onConflictDoUpdate({
target: fileColumn,
set: parsed,
});
})
.on('unlink', async (file) => {
await db.delete(table).where(eq(fileColumn, file));
});
} else {
const files = await fastGlob(pattern);
for (const file of files) {
const content = await readFile(file, 'utf-8');
const parsed = opts.parse({ file, content });
await db.insert(table).values({ ...parsed, file });
}
}
};
}
export function asJson(params: { file: string; content: string }) {
try {
return JSON.parse(params.content);
} catch (e) {
throw new Error(`Error parsing ${params.file}: ${e.message}`);
}
}

View file

@ -1,82 +1,6 @@
import astroDb, { defineReadableTable, column } from '@astrojs/db';
import astroDb from '@astrojs/db';
import { defineConfig } from 'astro/config';
const Recipe = defineReadableTable({
columns: {
id: column.number({ primaryKey: true }),
title: column.text(),
description: column.text(),
},
});
const Ingredient = defineReadableTable({
columns: {
id: column.number({ primaryKey: true }),
name: column.text(),
quantity: column.number(),
recipeId: column.number(),
},
indexes: {
recipeIdx: { on: 'recipeId' },
},
foreignKeys: [{ columns: 'recipeId', references: () => [Recipe.columns.id] }],
});
export default defineConfig({
integrations: [astroDb()],
db: {
tables: { Recipe, Ingredient },
async data({ seed, seedReturning }) {
const pancakes = await seedReturning(Recipe, {
title: 'Pancakes',
description: 'A delicious breakfast',
});
await seed(Ingredient, [
{
name: 'Flour',
quantity: 1,
recipeId: pancakes.id,
},
{
name: 'Eggs',
quantity: 2,
recipeId: pancakes.id,
},
{
name: 'Milk',
quantity: 1,
recipeId: pancakes.id,
},
]);
const pizza = await seedReturning(Recipe, {
title: 'Pizza',
description: 'A delicious dinner',
});
await seed(Ingredient, [
{
name: 'Flour',
quantity: 1,
recipeId: pizza.id,
},
{
name: 'Eggs',
quantity: 2,
recipeId: pizza.id,
},
{
name: 'Milk',
quantity: 1,
recipeId: pizza.id,
},
{
name: 'Tomato Sauce',
quantity: 1,
recipeId: pizza.id,
},
]);
},
},
});

View file

@ -0,0 +1,26 @@
import { defineTable, defineDB, column } from 'astro:db';
const Recipe = defineTable({
columns: {
id: column.number({ primaryKey: true }),
title: column.text(),
description: column.text(),
},
});
const Ingredient = defineTable({
columns: {
id: column.number({ primaryKey: true }),
name: column.text(),
quantity: column.number(),
recipeId: column.number(),
},
indexes: {
recipeIdx: { on: 'recipeId' },
},
foreignKeys: [{ columns: 'recipeId', references: () => [Recipe.columns.id] }],
});
export default defineDB({
tables: { Recipe, Ingredient },
});

View file

@ -0,0 +1,60 @@
import { db, Recipe, Ingredient } from 'astro:db';
const pancakes = await db
.insert(Recipe)
.values({
title: 'Pancakes',
description: 'A delicious breakfast',
})
.returning()
.get();
await db.insert(Ingredient).values([
{
name: 'Flour',
quantity: 1,
recipeId: pancakes.id,
},
{
name: 'Eggs',
quantity: 2,
recipeId: pancakes.id,
},
{
name: 'Milk',
quantity: 1,
recipeId: pancakes.id,
},
]);
const pizza = await db
.insert(Recipe)
.values({
title: 'Pizza',
description: 'A delicious dinner',
})
.returning()
.get();
await db.insert(Ingredient).values([
{
name: 'Flour',
quantity: 1,
recipeId: pizza.id,
},
{
name: 'Eggs',
quantity: 2,
recipeId: pizza.id,
},
{
name: 'Milk',
quantity: 1,
recipeId: pizza.id,
},
{
name: 'Tomato Sauce',
quantity: 1,
recipeId: pizza.id,
},
]);

View file

@ -1,32 +1,9 @@
import db, { defineReadableTable, defineWritableTable, column } from '@astrojs/db';
import db from '@astrojs/db';
import node from '@astrojs/node';
import react from '@astrojs/react';
import { defineConfig } from 'astro/config';
import simpleStackForm from 'simple-stack-form';
const Event = defineReadableTable({
columns: {
id: column.number({
primaryKey: true,
}),
name: column.text(),
description: column.text(),
ticketPrice: column.number(),
date: column.date(),
location: column.text(),
},
});
const Ticket = defineWritableTable({
columns: {
eventId: column.number({ references: () => Event.columns.id }),
email: column.text(),
quantity: column.number(),
newsletter: column.boolean({
default: false,
}),
},
});
// https://astro.build/config
export default defineConfig({
integrations: [simpleStackForm(), db(), react()],
@ -34,23 +11,4 @@ export default defineConfig({
adapter: node({
mode: 'standalone',
}),
db: {
studio: true,
tables: {
Event,
Ticket,
},
data({ seed }) {
seed(Event, [
{
name: 'Sampha LIVE in Brooklyn',
description:
'Sampha is on tour with his new, flawless album Lahai. Come see the live performance outdoors in Prospect Park. Yes, there will be a grand piano 🎹',
date: new Date('2024-01-01'),
ticketPrice: 10000,
location: 'Brooklyn, NY',
},
]);
},
},
});

View file

@ -0,0 +1,27 @@
import { defineDB, defineTable, column } from 'astro:db';
const Event = defineTable({
columns: {
id: column.number({
primaryKey: true,
}),
name: column.text(),
description: column.text(),
ticketPrice: column.number(),
date: column.date(),
location: column.text(),
},
});
const Ticket = defineTable({
columns: {
eventId: column.number({ references: () => Event.columns.id }),
email: column.text(),
quantity: column.number(),
newsletter: column.boolean({
default: true,
}),
},
});
export default defineDB({ tables: { Event, Ticket } });

View file

@ -0,0 +1,10 @@
import { Event, db } from 'astro:db';
await db.insert(Event).values({
name: 'Sampha LIVE in Brooklyn',
description:
'Sampha is on tour with his new, flawless album Lahai. Come see the live performance outdoors in Prospect Park. Yes, there will be a grand piano 🎹',
date: new Date('2024-01-01'),
ticketPrice: 10000,
location: 'Brooklyn, NY',
});

View file

@ -4,16 +4,17 @@ import {
getCollectionChangeQueries,
getMigrationQueries,
} from '../../dist/core/cli/migration-queries.js';
import { getCreateTableQuery } from '../../dist/core/queries.js';
import { collectionSchema, column, defineReadableTable } from '../../dist/core/types.js';
import { getCreateTableQuery } from '../../dist/runtime/queries.js';
import { column, defineTable } from '../../dist/runtime/config.js';
import { tableSchema } from '../../dist/core/types.js';
import { NOW } from '../../dist/runtime/index.js';
const COLLECTION_NAME = 'Users';
const TABLE_NAME = 'Users';
// `parse` to resolve schema transformations
// ex. convert column.date() to ISO strings
const userInitial = collectionSchema.parse(
defineReadableTable({
const userInitial = tableSchema.parse(
defineTable({
columns: {
name: column.text(),
age: column.number(),
@ -28,15 +29,11 @@ const defaultAmbiguityResponses = {
columnRenames: {},
};
function userChangeQueries(
oldCollection,
newCollection,
ambiguityResponses = defaultAmbiguityResponses
) {
function userChangeQueries(oldTable, newTable, ambiguityResponses = defaultAmbiguityResponses) {
return getCollectionChangeQueries({
collectionName: COLLECTION_NAME,
oldCollection,
newCollection,
collectionName: TABLE_NAME,
oldCollection: oldTable,
newCollection: newTable,
ambiguityResponses,
});
}
@ -56,35 +53,35 @@ function configChangeQueries(
describe('column queries', () => {
describe('getMigrationQueries', () => {
it('should be empty when tables are the same', async () => {
const oldCollections = { [COLLECTION_NAME]: userInitial };
const newCollections = { [COLLECTION_NAME]: userInitial };
const oldCollections = { [TABLE_NAME]: userInitial };
const newCollections = { [TABLE_NAME]: userInitial };
const { queries } = await configChangeQueries(oldCollections, newCollections);
expect(queries).to.deep.equal([]);
});
it('should create table for new tables', async () => {
const oldCollections = {};
const newCollections = { [COLLECTION_NAME]: userInitial };
const newCollections = { [TABLE_NAME]: userInitial };
const { queries } = await configChangeQueries(oldCollections, newCollections);
expect(queries).to.deep.equal([getCreateTableQuery(COLLECTION_NAME, userInitial)]);
expect(queries).to.deep.equal([getCreateTableQuery(TABLE_NAME, userInitial)]);
});
it('should drop table for removed tables', async () => {
const oldCollections = { [COLLECTION_NAME]: userInitial };
const oldCollections = { [TABLE_NAME]: userInitial };
const newCollections = {};
const { queries } = await configChangeQueries(oldCollections, newCollections);
expect(queries).to.deep.equal([`DROP TABLE "${COLLECTION_NAME}"`]);
expect(queries).to.deep.equal([`DROP TABLE "${TABLE_NAME}"`]);
});
it('should rename table for renamed tables', async () => {
const rename = 'Peeps';
const oldCollections = { [COLLECTION_NAME]: userInitial };
const oldCollections = { [TABLE_NAME]: userInitial };
const newCollections = { [rename]: userInitial };
const { queries } = await configChangeQueries(oldCollections, newCollections, {
...defaultAmbiguityResponses,
collectionRenames: { [rename]: COLLECTION_NAME },
collectionRenames: { [rename]: TABLE_NAME },
});
expect(queries).to.deep.equal([`ALTER TABLE "${COLLECTION_NAME}" RENAME TO "${rename}"`]);
expect(queries).to.deep.equal([`ALTER TABLE "${TABLE_NAME}" RENAME TO "${rename}"`]);
});
});
@ -95,14 +92,14 @@ describe('column queries', () => {
});
it('should be empty when type updated to same underlying SQL type', async () => {
const blogInitial = collectionSchema.parse({
const blogInitial = tableSchema.parse({
...userInitial,
columns: {
title: column.text(),
draft: column.boolean(),
},
});
const blogFinal = collectionSchema.parse({
const blogFinal = tableSchema.parse({
...userInitial,
columns: {
...blogInitial.columns,
@ -114,7 +111,7 @@ describe('column queries', () => {
});
it('should respect user primary key without adding a hidden id', async () => {
const user = collectionSchema.parse({
const user = tableSchema.parse({
...userInitial,
columns: {
...userInitial.columns,
@ -122,7 +119,7 @@ describe('column queries', () => {
},
});
const userFinal = collectionSchema.parse({
const userFinal = tableSchema.parse({
...user,
columns: {
...user.columns,
@ -155,10 +152,10 @@ describe('column queries', () => {
const { queries } = await userChangeQueries(userInitial, userFinal, {
collectionRenames: {},
columnRenames: { [COLLECTION_NAME]: { middleInitial: 'mi' } },
columnRenames: { [TABLE_NAME]: { middleInitial: 'mi' } },
});
expect(queries).to.deep.equal([
`ALTER TABLE "${COLLECTION_NAME}" RENAME COLUMN "mi" TO "middleInitial"`,
`ALTER TABLE "${TABLE_NAME}" RENAME COLUMN "mi" TO "middleInitial"`,
]);
});
});
@ -287,7 +284,7 @@ describe('column queries', () => {
});
it('when updating to a runtime default', async () => {
const initial = collectionSchema.parse({
const initial = tableSchema.parse({
...userInitial,
columns: {
...userInitial.columns,
@ -295,7 +292,7 @@ describe('column queries', () => {
},
});
const userFinal = collectionSchema.parse({
const userFinal = tableSchema.parse({
...initial,
columns: {
...initial.columns,
@ -317,7 +314,7 @@ describe('column queries', () => {
});
it('when adding a column with a runtime default', async () => {
const userFinal = collectionSchema.parse({
const userFinal = tableSchema.parse({
...userInitial,
columns: {
...userInitial.columns,
@ -407,7 +404,7 @@ describe('column queries', () => {
it('when adding a required column with default', async () => {
const defaultDate = new Date('2023-01-01');
const userFinal = collectionSchema.parse({
const userFinal = tableSchema.parse({
...userInitial,
columns: {
...userInitial.columns,

View file

@ -1,9 +1,10 @@
import { expect } from 'chai';
import { describe, it } from 'mocha';
import { getCollectionChangeQueries } from '../../dist/core/cli/migration-queries.js';
import { collectionSchema, column } from '../../dist/core/types.js';
import { column } from '../../dist/runtime/config.js';
import { tableSchema } from '../../dist/core/types.js';
const userInitial = collectionSchema.parse({
const userInitial = tableSchema.parse({
columns: {
name: column.text(),
age: column.number(),

View file

@ -1,9 +1,10 @@
import { expect } from 'chai';
import { describe, it } from 'mocha';
import { getCollectionChangeQueries } from '../../dist/core/cli/migration-queries.js';
import { column, defineReadableTable, tablesSchema } from '../../dist/core/types.js';
import { column, defineTable } from '../../dist/runtime/config.js';
import { tablesSchema } from '../../dist/core/types.js';
const BaseUser = defineReadableTable({
const BaseUser = defineTable({
columns: {
id: column.number({ primaryKey: true }),
name: column.text(),
@ -13,7 +14,7 @@ const BaseUser = defineReadableTable({
},
});
const BaseSentBox = defineReadableTable({
const BaseSentBox = defineTable({
columns: {
to: column.number(),
toName: column.text(),
@ -58,7 +59,7 @@ describe('reference queries', () => {
it('adds references with lossless table recreate', async () => {
const { SentBox: Initial } = resolveReferences();
const { SentBox: Final } = resolveReferences({
SentBox: defineReadableTable({
SentBox: defineTable({
columns: {
...BaseSentBox.columns,
to: column.number({ references: () => BaseUser.columns.id }),
@ -82,7 +83,7 @@ describe('reference queries', () => {
it('removes references with lossless table recreate', async () => {
const { SentBox: Initial } = resolveReferences({
SentBox: defineReadableTable({
SentBox: defineTable({
columns: {
...BaseSentBox.columns,
to: column.number({ references: () => BaseUser.columns.id }),
@ -108,7 +109,7 @@ describe('reference queries', () => {
it('does not use ADD COLUMN when adding optional column with reference', async () => {
const { SentBox: Initial } = resolveReferences();
const { SentBox: Final } = resolveReferences({
SentBox: defineReadableTable({
SentBox: defineTable({
columns: {
...BaseSentBox.columns,
from: column.number({ references: () => BaseUser.columns.id, optional: true }),
@ -131,13 +132,13 @@ describe('reference queries', () => {
it('adds and updates foreign key with lossless table recreate', async () => {
const { SentBox: InitialWithoutFK } = resolveReferences();
const { SentBox: InitialWithDifferentFK } = resolveReferences({
SentBox: defineReadableTable({
SentBox: defineTable({
...BaseSentBox,
foreignKeys: [{ columns: ['to'], references: () => [BaseUser.columns.id] }],
}),
});
const { SentBox: Final } = resolveReferences({
SentBox: defineReadableTable({
SentBox: defineTable({
...BaseSentBox,
foreignKeys: [
{