mirror of
https://github.com/withastro/astro.git
synced 2025-01-06 22:10:10 -05:00
add new migrations system (#10312)
This commit is contained in:
parent
718eed704a
commit
93ec9e264a
15 changed files with 189 additions and 604 deletions
5
.changeset/good-maps-deny.md
Normal file
5
.changeset/good-maps-deny.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
"@astrojs/db": minor
|
||||
---
|
||||
|
||||
Revamp migrations system
|
|
@ -1,56 +0,0 @@
|
|||
import { writeFile } from 'node:fs/promises';
|
||||
import { relative } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import type { AstroConfig } from 'astro';
|
||||
import { bgRed, bold, red, reset } from 'kleur/colors';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
import type { DBConfig } from '../../../types.js';
|
||||
import { getMigrationsDirectoryUrl } from '../../../utils.js';
|
||||
import { getMigrationQueries } from '../../migration-queries.js';
|
||||
import {
|
||||
MIGRATIONS_CREATED,
|
||||
MIGRATIONS_UP_TO_DATE,
|
||||
getMigrationStatus,
|
||||
initializeMigrationsDirectory,
|
||||
} from '../../migrations.js';
|
||||
|
||||
export async function cmd({
|
||||
astroConfig,
|
||||
dbConfig,
|
||||
}: {
|
||||
astroConfig: AstroConfig;
|
||||
dbConfig: DBConfig;
|
||||
flags: Arguments;
|
||||
}) {
|
||||
const migration = await getMigrationStatus({ dbConfig, root: astroConfig.root });
|
||||
const migrationsDir = getMigrationsDirectoryUrl(astroConfig.root);
|
||||
|
||||
if (migration.state === 'no-migrations-found') {
|
||||
await initializeMigrationsDirectory(migration.currentSnapshot, migrationsDir);
|
||||
console.log(MIGRATIONS_CREATED);
|
||||
return;
|
||||
} else if (migration.state === 'up-to-date') {
|
||||
console.log(MIGRATIONS_UP_TO_DATE);
|
||||
return;
|
||||
}
|
||||
|
||||
const { oldSnapshot, newSnapshot, newFilename, diff } = migration;
|
||||
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
|
||||
oldSnapshot,
|
||||
newSnapshot,
|
||||
});
|
||||
// Warn the user about any changes that lead to data-loss.
|
||||
// When the user runs `db push`, they will be prompted to confirm these changes.
|
||||
confirmations.map((message) => console.log(bgRed(' !!! ') + ' ' + red(message)));
|
||||
const content = {
|
||||
diff,
|
||||
db: migrationQueries,
|
||||
// TODO(fks): Encode the relevant data, instead of the raw message.
|
||||
// This will give `db push` more control over the formatting of the message.
|
||||
confirm: confirmations.map((c) => reset(c)),
|
||||
};
|
||||
const fileUrl = new URL(newFilename, migrationsDir);
|
||||
const relativePath = relative(fileURLToPath(astroConfig.root), fileURLToPath(fileUrl));
|
||||
await writeFile(fileUrl, JSON.stringify(content, undefined, 2));
|
||||
console.log(bold(relativePath) + ' created!');
|
||||
}
|
|
@ -1,26 +1,16 @@
|
|||
import type { AstroConfig } from 'astro';
|
||||
import { red } from 'kleur/colors';
|
||||
import prompts from 'prompts';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
import { MISSING_SESSION_ID_ERROR } from '../../../errors.js';
|
||||
import { getManagedAppTokenOrExit } from '../../../tokens.js';
|
||||
import { type DBConfig, type DBSnapshot } from '../../../types.js';
|
||||
import { getMigrationsDirectoryUrl, getRemoteDatabaseUrl } from '../../../utils.js';
|
||||
import { getMigrationQueries } from '../../migration-queries.js';
|
||||
import { getRemoteDatabaseUrl } from '../../../utils.js';
|
||||
import {
|
||||
INITIAL_SNAPSHOT,
|
||||
MIGRATIONS_NOT_INITIALIZED,
|
||||
MIGRATIONS_UP_TO_DATE,
|
||||
MIGRATION_NEEDED,
|
||||
getMigrationQueries,
|
||||
createCurrentSnapshot,
|
||||
createEmptySnapshot,
|
||||
getMigrationStatus,
|
||||
getMigrations,
|
||||
loadInitialSnapshot,
|
||||
loadMigration,
|
||||
} from '../../migrations.js';
|
||||
getProductionCurrentSnapshot,
|
||||
} from '../../migration-queries.js';
|
||||
|
||||
export async function cmd({
|
||||
astroConfig,
|
||||
dbConfig,
|
||||
flags,
|
||||
}: {
|
||||
|
@ -29,49 +19,31 @@ export async function cmd({
|
|||
flags: Arguments;
|
||||
}) {
|
||||
const isDryRun = flags.dryRun;
|
||||
const isForceReset = flags.forceReset;
|
||||
const appToken = await getManagedAppTokenOrExit(flags.token);
|
||||
const migration = await getMigrationStatus({ dbConfig, root: astroConfig.root });
|
||||
if (migration.state === 'no-migrations-found') {
|
||||
console.log(MIGRATIONS_NOT_INITIALIZED);
|
||||
process.exit(1);
|
||||
} else if (migration.state === 'ahead') {
|
||||
console.log(MIGRATION_NEEDED);
|
||||
process.exit(1);
|
||||
}
|
||||
const migrationsDir = getMigrationsDirectoryUrl(astroConfig.root);
|
||||
const productionSnapshot = await getProductionCurrentSnapshot({ appToken: appToken.token });
|
||||
const currentSnapshot = createCurrentSnapshot(dbConfig);
|
||||
const isFromScratch = isForceReset || JSON.stringify(productionSnapshot) === '{}';
|
||||
const { queries: migrationQueries } = await getMigrationQueries({
|
||||
oldSnapshot: isFromScratch ? createEmptySnapshot() : productionSnapshot,
|
||||
newSnapshot: currentSnapshot,
|
||||
});
|
||||
|
||||
// get all migrations from the filesystem
|
||||
const allLocalMigrations = await getMigrations(migrationsDir);
|
||||
let missingMigrations: string[] = [];
|
||||
try {
|
||||
const { data } = await prepareMigrateQuery({
|
||||
migrations: allLocalMigrations,
|
||||
appToken: appToken.token,
|
||||
});
|
||||
missingMigrations = data;
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
if (error.message.startsWith('{')) {
|
||||
const { error: { code } = { code: '' } } = JSON.parse(error.message);
|
||||
if (code === 'TOKEN_UNAUTHORIZED') {
|
||||
console.error(MISSING_SESSION_ID_ERROR);
|
||||
}
|
||||
}
|
||||
}
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
// push the database schema
|
||||
if (missingMigrations.length === 0) {
|
||||
console.log(MIGRATIONS_UP_TO_DATE);
|
||||
// // push the database schema
|
||||
if (migrationQueries.length === 0) {
|
||||
console.log('Database schema is up to date.');
|
||||
} else {
|
||||
console.log(`Pushing ${missingMigrations.length} migrations...`);
|
||||
console.log(`Database schema is out of date.`);
|
||||
}
|
||||
if (isDryRun) {
|
||||
console.log('Statements:', JSON.stringify(migrationQueries, undefined, 2));
|
||||
} else {
|
||||
console.log(`Pushing database schema updates...`);
|
||||
await pushSchema({
|
||||
migrations: missingMigrations,
|
||||
migrationsDir,
|
||||
statements: migrationQueries,
|
||||
appToken: appToken.token,
|
||||
isDryRun,
|
||||
currentSnapshot: migration.currentSnapshot,
|
||||
currentSnapshot: currentSnapshot,
|
||||
});
|
||||
}
|
||||
// cleanup and exit
|
||||
|
@ -80,92 +52,26 @@ export async function cmd({
|
|||
}
|
||||
|
||||
async function pushSchema({
|
||||
migrations,
|
||||
migrationsDir,
|
||||
statements,
|
||||
appToken,
|
||||
isDryRun,
|
||||
currentSnapshot,
|
||||
}: {
|
||||
migrations: string[];
|
||||
migrationsDir: URL;
|
||||
statements: string[];
|
||||
appToken: string;
|
||||
isDryRun: boolean;
|
||||
currentSnapshot: DBSnapshot;
|
||||
}) {
|
||||
// load all missing migrations
|
||||
const initialSnapshot = migrations.find((m) => m === INITIAL_SNAPSHOT);
|
||||
const filteredMigrations = migrations.filter((m) => m !== INITIAL_SNAPSHOT);
|
||||
const missingMigrationContents = await Promise.all(
|
||||
filteredMigrations.map((m) => loadMigration(m, migrationsDir))
|
||||
);
|
||||
// create a migration for the initial snapshot, if needed
|
||||
const initialMigrationBatch = initialSnapshot
|
||||
? (
|
||||
await getMigrationQueries({
|
||||
oldSnapshot: createEmptySnapshot(),
|
||||
newSnapshot: await loadInitialSnapshot(migrationsDir),
|
||||
})
|
||||
).queries
|
||||
: [];
|
||||
|
||||
// combine all missing migrations into a single batch
|
||||
const confirmations = missingMigrationContents.reduce((acc, curr) => {
|
||||
return [...acc, ...(curr.confirm || [])];
|
||||
}, [] as string[]);
|
||||
if (confirmations.length > 0) {
|
||||
const response = await prompts([
|
||||
...confirmations.map((message, index) => ({
|
||||
type: 'confirm' as const,
|
||||
name: String(index),
|
||||
message: red('Warning: ') + message + '\nContinue?',
|
||||
initial: true,
|
||||
})),
|
||||
]);
|
||||
if (
|
||||
Object.values(response).length === 0 ||
|
||||
Object.values(response).some((value) => value === false)
|
||||
) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// combine all missing migrations into a single batch
|
||||
const queries = missingMigrationContents.reduce((acc, curr) => {
|
||||
return [...acc, ...curr.db];
|
||||
}, initialMigrationBatch);
|
||||
// apply the batch to the DB
|
||||
await runMigrateQuery({ queries, migrations, snapshot: currentSnapshot, appToken, isDryRun });
|
||||
}
|
||||
|
||||
async function runMigrateQuery({
|
||||
queries: baseQueries,
|
||||
migrations,
|
||||
snapshot,
|
||||
appToken,
|
||||
isDryRun,
|
||||
}: {
|
||||
queries: string[];
|
||||
migrations: string[];
|
||||
snapshot: DBSnapshot;
|
||||
appToken: string;
|
||||
isDryRun?: boolean;
|
||||
}) {
|
||||
const queries = ['pragma defer_foreign_keys=true;', ...baseQueries];
|
||||
|
||||
const requestBody = {
|
||||
snapshot,
|
||||
migrations,
|
||||
sql: queries,
|
||||
snapshot: currentSnapshot,
|
||||
sql: statements,
|
||||
experimentalVersion: 1,
|
||||
};
|
||||
|
||||
if (isDryRun) {
|
||||
console.info('[DRY RUN] Batch query:', JSON.stringify(requestBody, null, 2));
|
||||
return new Response(null, { status: 200 });
|
||||
}
|
||||
|
||||
const url = new URL('/migrations/run', getRemoteDatabaseUrl());
|
||||
|
||||
const url = new URL('/db/push', getRemoteDatabaseUrl());
|
||||
return await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: new Headers({
|
||||
|
@ -174,28 +80,3 @@ async function runMigrateQuery({
|
|||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
}
|
||||
|
||||
async function prepareMigrateQuery({
|
||||
migrations,
|
||||
appToken,
|
||||
}: {
|
||||
migrations: string[];
|
||||
appToken: string;
|
||||
}) {
|
||||
const url = new URL('/migrations/prepare', getRemoteDatabaseUrl());
|
||||
const requestBody = {
|
||||
migrations,
|
||||
experimentalVersion: 1,
|
||||
};
|
||||
const result = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: new Headers({
|
||||
Authorization: `Bearer ${appToken}`,
|
||||
}),
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
if (result.status >= 400) {
|
||||
throw new Error(await result.text());
|
||||
}
|
||||
return await result.json();
|
||||
}
|
||||
|
|
|
@ -1,16 +1,13 @@
|
|||
import type { AstroConfig } from 'astro';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
import type { DBConfig } from '../../../types.js';
|
||||
import { getMigrationQueries } from '../../migration-queries.js';
|
||||
import {
|
||||
MIGRATIONS_NOT_INITIALIZED,
|
||||
MIGRATIONS_UP_TO_DATE,
|
||||
MIGRATION_NEEDED,
|
||||
getMigrationStatus,
|
||||
} from '../../migrations.js';
|
||||
import { getMigrationQueries,
|
||||
createCurrentSnapshot,
|
||||
createEmptySnapshot,
|
||||
getProductionCurrentSnapshot, } from '../../migration-queries.js';
|
||||
import { getManagedAppTokenOrExit } from '../../../tokens.js';
|
||||
|
||||
export async function cmd({
|
||||
astroConfig,
|
||||
dbConfig,
|
||||
flags,
|
||||
}: {
|
||||
|
@ -18,35 +15,20 @@ export async function cmd({
|
|||
dbConfig: DBConfig;
|
||||
flags: Arguments;
|
||||
}) {
|
||||
const status = await getMigrationStatus({ dbConfig, root: astroConfig.root });
|
||||
const { state } = status;
|
||||
if (flags.json) {
|
||||
if (state === 'ahead') {
|
||||
const { queries: migrationQueries } = await getMigrationQueries({
|
||||
oldSnapshot: status.oldSnapshot,
|
||||
newSnapshot: status.newSnapshot,
|
||||
});
|
||||
const newFileContent = {
|
||||
diff: status.diff,
|
||||
db: migrationQueries,
|
||||
};
|
||||
status.newFileContent = JSON.stringify(newFileContent, null, 2);
|
||||
}
|
||||
console.log(JSON.stringify(status));
|
||||
process.exit(state === 'up-to-date' ? 0 : 1);
|
||||
}
|
||||
switch (state) {
|
||||
case 'no-migrations-found': {
|
||||
console.log(MIGRATIONS_NOT_INITIALIZED);
|
||||
process.exit(1);
|
||||
}
|
||||
case 'ahead': {
|
||||
console.log(MIGRATION_NEEDED);
|
||||
process.exit(1);
|
||||
}
|
||||
case 'up-to-date': {
|
||||
console.log(MIGRATIONS_UP_TO_DATE);
|
||||
return;
|
||||
}
|
||||
const appToken = await getManagedAppTokenOrExit(flags.token);
|
||||
const productionSnapshot = await getProductionCurrentSnapshot({ appToken: appToken.token });
|
||||
const currentSnapshot = createCurrentSnapshot(dbConfig);
|
||||
const { queries: migrationQueries } = await getMigrationQueries({
|
||||
oldSnapshot: JSON.stringify(productionSnapshot) !== '{}' ? productionSnapshot : createEmptySnapshot(),
|
||||
newSnapshot: currentSnapshot,
|
||||
});
|
||||
|
||||
if (migrationQueries.length === 0) {
|
||||
console.log(`Database schema is up to date.`);
|
||||
} else {
|
||||
console.log(`Database schema is out of date.`);
|
||||
console.log(`Run 'astro db push' to push up your latest changes.`);
|
||||
}
|
||||
|
||||
await appToken.destroy();
|
||||
}
|
||||
|
|
|
@ -23,10 +23,13 @@ export async function cli({
|
|||
const { cmd } = await import('./commands/shell/index.js');
|
||||
return await cmd({ astroConfig, dbConfig, flags });
|
||||
}
|
||||
case 'gen':
|
||||
case 'gen': {
|
||||
console.log('"astro db gen" is no longer needed! Visit the docs for more information.');
|
||||
return;
|
||||
}
|
||||
case 'sync': {
|
||||
const { cmd } = await import('./commands/gen/index.js');
|
||||
return await cmd({ astroConfig, dbConfig, flags });
|
||||
console.log('"astro db sync" is no longer needed! Visit the docs for more information.');
|
||||
return;
|
||||
}
|
||||
case 'push': {
|
||||
const { cmd } = await import('./commands/push/index.js');
|
||||
|
@ -76,7 +79,7 @@ astro logout End your authenticated session with Astro Studio
|
|||
astro link Link this directory to an Astro Studio project
|
||||
|
||||
astro db gen Creates snapshot based on your schema
|
||||
astro db push Pushes migrations to Astro Studio
|
||||
astro db verify Verifies migrations have been pushed and errors if not`;
|
||||
astro db push Pushes schema updates to Astro Studio
|
||||
astro db verify Tests schema updates /w Astro Studio (good for CI)`;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,11 +2,11 @@ import deepDiff from 'deep-diff';
|
|||
import { SQLiteAsyncDialect } from 'drizzle-orm/sqlite-core';
|
||||
import * as color from 'kleur/colors';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import prompts from 'prompts';
|
||||
import { hasPrimaryKey } from '../../runtime/index.js';
|
||||
import {
|
||||
getCreateIndexQueries,
|
||||
getCreateTableQuery,
|
||||
getDropTableIfExistsQuery,
|
||||
getModifiers,
|
||||
getReferencesConfig,
|
||||
hasDefault,
|
||||
|
@ -18,6 +18,7 @@ import {
|
|||
type ColumnType,
|
||||
type DBColumn,
|
||||
type DBColumns,
|
||||
type DBConfig,
|
||||
type DBSnapshot,
|
||||
type DBTable,
|
||||
type DBTables,
|
||||
|
@ -28,49 +29,39 @@ import {
|
|||
type TextColumn,
|
||||
columnSchema,
|
||||
} from '../types.js';
|
||||
import { getRemoteDatabaseUrl } from '../utils.js';
|
||||
import { RENAME_COLUMN_ERROR, RENAME_TABLE_ERROR } from '../errors.js';
|
||||
|
||||
const sqlite = new SQLiteAsyncDialect();
|
||||
const genTempTableName = customAlphabet('abcdefghijklmnopqrstuvwxyz', 10);
|
||||
|
||||
/** Dependency injected for unit testing */
|
||||
type AmbiguityResponses = {
|
||||
collectionRenames: Record<string, string>;
|
||||
columnRenames: {
|
||||
[collectionName: string]: Record<string, string>;
|
||||
};
|
||||
};
|
||||
|
||||
export async function getMigrationQueries({
|
||||
oldSnapshot,
|
||||
newSnapshot,
|
||||
ambiguityResponses,
|
||||
}: {
|
||||
oldSnapshot: DBSnapshot;
|
||||
newSnapshot: DBSnapshot;
|
||||
ambiguityResponses?: AmbiguityResponses;
|
||||
}): Promise<{ queries: string[]; confirmations: string[] }> {
|
||||
const queries: string[] = [];
|
||||
const confirmations: string[] = [];
|
||||
let added = getAddedCollections(oldSnapshot, newSnapshot);
|
||||
let dropped = getDroppedCollections(oldSnapshot, newSnapshot);
|
||||
if (!isEmpty(added) && !isEmpty(dropped)) {
|
||||
const resolved = await resolveCollectionRenames(added, dropped, ambiguityResponses);
|
||||
added = resolved.added;
|
||||
dropped = resolved.dropped;
|
||||
for (const { from, to } of resolved.renamed) {
|
||||
const renameQuery = `ALTER TABLE ${sqlite.escapeName(from)} RENAME TO ${sqlite.escapeName(
|
||||
to
|
||||
)}`;
|
||||
queries.push(renameQuery);
|
||||
}
|
||||
const addedCollections = getAddedCollections(oldSnapshot, newSnapshot);
|
||||
const droppedTables = getDroppedCollections(oldSnapshot, newSnapshot);
|
||||
const notDeprecatedDroppedTables = Object.fromEntries(
|
||||
Object.entries(droppedTables).filter(([, table]) => !table.deprecated)
|
||||
);
|
||||
if (!isEmpty(addedCollections) && !isEmpty(notDeprecatedDroppedTables)) {
|
||||
throw new Error(
|
||||
RENAME_TABLE_ERROR(Object.keys(addedCollections)[0], Object.keys(notDeprecatedDroppedTables)[0])
|
||||
);
|
||||
}
|
||||
|
||||
for (const [collectionName, collection] of Object.entries(added)) {
|
||||
for (const [collectionName, collection] of Object.entries(addedCollections)) {
|
||||
queries.push(getDropTableIfExistsQuery(collectionName));
|
||||
queries.push(getCreateTableQuery(collectionName, collection));
|
||||
queries.push(...getCreateIndexQueries(collectionName, collection));
|
||||
}
|
||||
|
||||
for (const [collectionName] of Object.entries(dropped)) {
|
||||
for (const [collectionName] of Object.entries(droppedTables)) {
|
||||
const dropQuery = `DROP TABLE ${sqlite.escapeName(collectionName)}`;
|
||||
queries.push(dropQuery);
|
||||
}
|
||||
|
@ -78,6 +69,19 @@ export async function getMigrationQueries({
|
|||
for (const [collectionName, newCollection] of Object.entries(newSnapshot.schema)) {
|
||||
const oldCollection = oldSnapshot.schema[collectionName];
|
||||
if (!oldCollection) continue;
|
||||
const addedColumns = getAdded(oldCollection.columns, newCollection.columns);
|
||||
const droppedColumns = getDropped(oldCollection.columns, newCollection.columns);
|
||||
const notDeprecatedDroppedColumns = Object.fromEntries(
|
||||
Object.entries(droppedColumns).filter(([key, col]) => !col.schema.deprecated)
|
||||
);
|
||||
if (!isEmpty(addedColumns) && !isEmpty(notDeprecatedDroppedColumns)) {
|
||||
throw new Error(
|
||||
RENAME_COLUMN_ERROR(
|
||||
`${collectionName}.${Object.keys(addedColumns)[0]}`,
|
||||
`${collectionName}.${Object.keys(notDeprecatedDroppedColumns)[0]}`
|
||||
)
|
||||
);
|
||||
}
|
||||
const result = await getCollectionChangeQueries({
|
||||
collectionName,
|
||||
oldCollection,
|
||||
|
@ -93,18 +97,16 @@ export async function getCollectionChangeQueries({
|
|||
collectionName,
|
||||
oldCollection,
|
||||
newCollection,
|
||||
ambiguityResponses,
|
||||
}: {
|
||||
collectionName: string;
|
||||
oldCollection: DBTable;
|
||||
newCollection: DBTable;
|
||||
ambiguityResponses?: AmbiguityResponses;
|
||||
}): Promise<{ queries: string[]; confirmations: string[] }> {
|
||||
const queries: string[] = [];
|
||||
const confirmations: string[] = [];
|
||||
const updated = getUpdatedColumns(oldCollection.columns, newCollection.columns);
|
||||
let added = getAdded(oldCollection.columns, newCollection.columns);
|
||||
let dropped = getDropped(oldCollection.columns, newCollection.columns);
|
||||
const added = getAdded(oldCollection.columns, newCollection.columns);
|
||||
const dropped = getDropped(oldCollection.columns, newCollection.columns);
|
||||
/** Any foreign key changes require a full table recreate */
|
||||
const hasForeignKeyChanges = Boolean(
|
||||
deepDiff(oldCollection.foreignKeys, newCollection.foreignKeys)
|
||||
|
@ -120,12 +122,7 @@ export async function getCollectionChangeQueries({
|
|||
confirmations,
|
||||
};
|
||||
}
|
||||
if (!hasForeignKeyChanges && !isEmpty(added) && !isEmpty(dropped)) {
|
||||
const resolved = await resolveColumnRenames(collectionName, added, dropped, ambiguityResponses);
|
||||
added = resolved.added;
|
||||
dropped = resolved.dropped;
|
||||
queries.push(...getColumnRenameQueries(collectionName, resolved.renamed));
|
||||
}
|
||||
|
||||
if (
|
||||
!hasForeignKeyChanges &&
|
||||
isEmpty(updated) &&
|
||||
|
@ -207,116 +204,6 @@ function getChangeIndexQueries({
|
|||
return queries;
|
||||
}
|
||||
|
||||
type Renamed = Array<{ from: string; to: string }>;
|
||||
|
||||
async function resolveColumnRenames(
|
||||
collectionName: string,
|
||||
mightAdd: DBColumns,
|
||||
mightDrop: DBColumns,
|
||||
ambiguityResponses?: AmbiguityResponses
|
||||
): Promise<{ added: DBColumns; dropped: DBColumns; renamed: Renamed }> {
|
||||
const added: DBColumns = {};
|
||||
const dropped: DBColumns = {};
|
||||
const renamed: Renamed = [];
|
||||
|
||||
for (const [columnName, column] of Object.entries(mightAdd)) {
|
||||
let oldColumnName = ambiguityResponses
|
||||
? ambiguityResponses.columnRenames[collectionName]?.[columnName] ?? '__NEW__'
|
||||
: undefined;
|
||||
if (!oldColumnName) {
|
||||
const res = await prompts(
|
||||
{
|
||||
type: 'select',
|
||||
name: 'columnName',
|
||||
message:
|
||||
'New column ' +
|
||||
color.blue(color.bold(`${collectionName}.${columnName}`)) +
|
||||
' detected. Was this renamed from an existing column?',
|
||||
choices: [
|
||||
{ title: 'New column (not renamed from existing)', value: '__NEW__' },
|
||||
...Object.keys(mightDrop)
|
||||
.filter((key) => !(key in renamed))
|
||||
.map((key) => ({ title: key, value: key })),
|
||||
],
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(1);
|
||||
},
|
||||
}
|
||||
);
|
||||
oldColumnName = res.columnName as string;
|
||||
}
|
||||
|
||||
if (oldColumnName === '__NEW__') {
|
||||
added[columnName] = column;
|
||||
} else {
|
||||
renamed.push({ from: oldColumnName, to: columnName });
|
||||
}
|
||||
}
|
||||
for (const [droppedColumnName, droppedColumn] of Object.entries(mightDrop)) {
|
||||
if (!renamed.find((r) => r.from === droppedColumnName)) {
|
||||
dropped[droppedColumnName] = droppedColumn;
|
||||
}
|
||||
}
|
||||
|
||||
return { added, dropped, renamed };
|
||||
}
|
||||
|
||||
async function resolveCollectionRenames(
|
||||
mightAdd: DBTables,
|
||||
mightDrop: DBTables,
|
||||
ambiguityResponses?: AmbiguityResponses
|
||||
): Promise<{ added: DBTables; dropped: DBTables; renamed: Renamed }> {
|
||||
const added: DBTables = {};
|
||||
const dropped: DBTables = {};
|
||||
const renamed: Renamed = [];
|
||||
|
||||
for (const [collectionName, collection] of Object.entries(mightAdd)) {
|
||||
let oldCollectionName = ambiguityResponses
|
||||
? ambiguityResponses.collectionRenames[collectionName] ?? '__NEW__'
|
||||
: undefined;
|
||||
if (!oldCollectionName) {
|
||||
const res = await prompts(
|
||||
{
|
||||
type: 'select',
|
||||
name: 'collectionName',
|
||||
message:
|
||||
'New collection ' +
|
||||
color.blue(color.bold(collectionName)) +
|
||||
' detected. Was this renamed from an existing collection?',
|
||||
choices: [
|
||||
{ title: 'New collection (not renamed from existing)', value: '__NEW__' },
|
||||
...Object.keys(mightDrop)
|
||||
.filter((key) => !(key in renamed))
|
||||
.map((key) => ({ title: key, value: key })),
|
||||
],
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(1);
|
||||
},
|
||||
}
|
||||
);
|
||||
oldCollectionName = res.collectionName as string;
|
||||
}
|
||||
|
||||
if (oldCollectionName === '__NEW__') {
|
||||
added[collectionName] = collection;
|
||||
} else {
|
||||
renamed.push({ from: oldCollectionName, to: collectionName });
|
||||
}
|
||||
}
|
||||
|
||||
for (const [droppedCollectionName, droppedCollection] of Object.entries(mightDrop)) {
|
||||
if (!renamed.find((r) => r.from === droppedCollectionName)) {
|
||||
dropped[droppedCollectionName] = droppedCollection;
|
||||
}
|
||||
}
|
||||
|
||||
return { added, dropped, renamed };
|
||||
}
|
||||
|
||||
function getAddedCollections(oldCollections: DBSnapshot, newCollections: DBSnapshot): DBTables {
|
||||
const added: DBTables = {};
|
||||
for (const [key, newCollection] of Object.entries(newCollections.schema)) {
|
||||
|
@ -333,20 +220,6 @@ function getDroppedCollections(oldCollections: DBSnapshot, newCollections: DBSna
|
|||
return dropped;
|
||||
}
|
||||
|
||||
function getColumnRenameQueries(unescapedCollectionName: string, renamed: Renamed): string[] {
|
||||
const queries: string[] = [];
|
||||
const collectionName = sqlite.escapeName(unescapedCollectionName);
|
||||
|
||||
for (const { from, to } of renamed) {
|
||||
const q = `ALTER TABLE ${collectionName} RENAME COLUMN ${sqlite.escapeName(
|
||||
from
|
||||
)} TO ${sqlite.escapeName(to)}`;
|
||||
queries.push(q);
|
||||
}
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ALTER TABLE queries to update the table schema. Assumes all added and dropped columns pass
|
||||
* `canUseAlterTableAddColumn` and `canAlterTableDropColumn` checks!
|
||||
|
@ -552,3 +425,29 @@ type DBColumnWithDefault =
|
|||
function hasRuntimeDefault(column: DBColumn): column is DBColumnWithDefault {
|
||||
return !!(column.schema.default && isSerializedSQL(column.schema.default));
|
||||
}
|
||||
|
||||
export async function getProductionCurrentSnapshot({
|
||||
appToken,
|
||||
}: {
|
||||
appToken: string;
|
||||
}): Promise<DBSnapshot> {
|
||||
const url = new URL('/db/schema', getRemoteDatabaseUrl());
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: new Headers({
|
||||
Authorization: `Bearer ${appToken}`,
|
||||
}),
|
||||
});
|
||||
const result = await response.json();
|
||||
return result.data;
|
||||
}
|
||||
|
||||
export function createCurrentSnapshot({ tables = {} }: DBConfig): DBSnapshot {
|
||||
const schema = JSON.parse(JSON.stringify(tables));
|
||||
return { experimentalVersion: 1, schema };
|
||||
}
|
||||
|
||||
export function createEmptySnapshot(): DBSnapshot {
|
||||
return { experimentalVersion: 1, schema: {} };
|
||||
}
|
||||
|
|
|
@ -1,151 +0,0 @@
|
|||
import deepDiff from 'deep-diff';
|
||||
import { mkdir, readFile, readdir, writeFile } from 'fs/promises';
|
||||
import { cyan, green, yellow } from 'kleur/colors';
|
||||
import { type DBConfig, type DBSnapshot } from '../types.js';
|
||||
import { getMigrationsDirectoryUrl } from '../utils.js';
|
||||
const { applyChange, diff: generateDiff } = deepDiff;
|
||||
|
||||
export type MigrationStatus =
|
||||
| {
|
||||
state: 'no-migrations-found';
|
||||
currentSnapshot: DBSnapshot;
|
||||
}
|
||||
| {
|
||||
state: 'ahead';
|
||||
oldSnapshot: DBSnapshot;
|
||||
newSnapshot: DBSnapshot;
|
||||
diff: deepDiff.Diff<DBSnapshot, DBSnapshot>[];
|
||||
newFilename: string;
|
||||
summary: string;
|
||||
newFileContent?: string;
|
||||
}
|
||||
| {
|
||||
state: 'up-to-date';
|
||||
currentSnapshot: DBSnapshot;
|
||||
};
|
||||
|
||||
export const INITIAL_SNAPSHOT = '0000_snapshot.json';
|
||||
|
||||
export async function getMigrationStatus({
|
||||
dbConfig,
|
||||
root,
|
||||
}: {
|
||||
dbConfig: DBConfig;
|
||||
root: URL;
|
||||
}): Promise<MigrationStatus> {
|
||||
const currentSnapshot = createCurrentSnapshot(dbConfig);
|
||||
const dir = getMigrationsDirectoryUrl(root);
|
||||
const allMigrationFiles = await getMigrations(dir);
|
||||
|
||||
if (allMigrationFiles.length === 0) {
|
||||
return {
|
||||
state: 'no-migrations-found',
|
||||
currentSnapshot,
|
||||
};
|
||||
}
|
||||
|
||||
const previousSnapshot = await initializeFromMigrations(allMigrationFiles, dir);
|
||||
const diff = generateDiff(previousSnapshot, currentSnapshot);
|
||||
|
||||
if (diff) {
|
||||
const n = getNewMigrationNumber(allMigrationFiles);
|
||||
const newFilename = `${String(n + 1).padStart(4, '0')}_migration.json`;
|
||||
return {
|
||||
state: 'ahead',
|
||||
oldSnapshot: previousSnapshot,
|
||||
newSnapshot: currentSnapshot,
|
||||
diff,
|
||||
newFilename,
|
||||
summary: generateDiffSummary(diff),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
state: 'up-to-date',
|
||||
currentSnapshot,
|
||||
};
|
||||
}
|
||||
|
||||
export const MIGRATIONS_CREATED = `${green(
|
||||
'■ Migrations initialized!'
|
||||
)}\n\n To execute your migrations, run\n ${cyan('astro db push')}`;
|
||||
export const MIGRATIONS_UP_TO_DATE = `${green(
|
||||
'■ No migrations needed!'
|
||||
)}\n\n Your database is up to date.\n`;
|
||||
export const MIGRATIONS_NOT_INITIALIZED = `${yellow(
|
||||
'▶ No migrations found!'
|
||||
)}\n\n To scaffold your migrations folder, run\n ${cyan('astro db sync')}\n`;
|
||||
export const MIGRATION_NEEDED = `${yellow(
|
||||
'▶ Changes detected!'
|
||||
)}\n\n To create the necessary migration file, run\n ${cyan('astro db sync')}\n`;
|
||||
|
||||
function generateDiffSummary(diff: deepDiff.Diff<DBSnapshot, DBSnapshot>[]) {
|
||||
// TODO: human readable summary
|
||||
return JSON.stringify(diff, null, 2);
|
||||
}
|
||||
|
||||
function getNewMigrationNumber(allMigrationFiles: string[]): number {
|
||||
const len = allMigrationFiles.length - 1;
|
||||
return allMigrationFiles.reduce((acc, curr) => {
|
||||
const num = Number.parseInt(curr.split('_')[0] ?? len, 10);
|
||||
return num > acc ? num : acc;
|
||||
}, 0);
|
||||
}
|
||||
|
||||
export async function getMigrations(dir: URL): Promise<string[]> {
|
||||
const migrationFiles = await readdir(dir).catch((err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
return [];
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
return migrationFiles;
|
||||
}
|
||||
|
||||
export async function loadMigration(
|
||||
migration: string,
|
||||
dir: URL
|
||||
): Promise<{ diff: any[]; db: string[]; confirm?: string[] }> {
|
||||
return JSON.parse(await readFile(new URL(migration, dir), 'utf-8'));
|
||||
}
|
||||
|
||||
export async function loadInitialSnapshot(dir: URL): Promise<DBSnapshot> {
|
||||
const snapshot = JSON.parse(await readFile(new URL(INITIAL_SNAPSHOT, dir), 'utf-8'));
|
||||
// `experimentalVersion: 1` -- added the version column
|
||||
if (snapshot.experimentalVersion === 1) {
|
||||
return snapshot;
|
||||
}
|
||||
// `experimentalVersion: 0` -- initial format
|
||||
if (!snapshot.schema) {
|
||||
return { experimentalVersion: 1, schema: snapshot };
|
||||
}
|
||||
throw new Error('Invalid snapshot format');
|
||||
}
|
||||
|
||||
export async function initializeMigrationsDirectory(currentSnapshot: DBSnapshot, dir: URL) {
|
||||
await mkdir(dir, { recursive: true });
|
||||
await writeFile(new URL(INITIAL_SNAPSHOT, dir), JSON.stringify(currentSnapshot, undefined, 2));
|
||||
}
|
||||
|
||||
export async function initializeFromMigrations(
|
||||
allMigrationFiles: string[],
|
||||
dir: URL
|
||||
): Promise<DBSnapshot> {
|
||||
const prevSnapshot = await loadInitialSnapshot(dir);
|
||||
for (const migration of allMigrationFiles) {
|
||||
if (migration === INITIAL_SNAPSHOT) continue;
|
||||
const migrationContent = await loadMigration(migration, dir);
|
||||
migrationContent.diff.forEach((change: any) => {
|
||||
applyChange(prevSnapshot, {}, change);
|
||||
});
|
||||
}
|
||||
return prevSnapshot;
|
||||
}
|
||||
|
||||
export function createCurrentSnapshot({ tables = {} }: DBConfig): DBSnapshot {
|
||||
const schema = JSON.parse(JSON.stringify(tables));
|
||||
return { experimentalVersion: 1, schema };
|
||||
}
|
||||
export function createEmptySnapshot(): DBSnapshot {
|
||||
return { experimentalVersion: 1, schema: {} };
|
||||
}
|
|
@ -10,14 +10,26 @@ export const MISSING_PROJECT_ID_ERROR = `${red('▶ Directory not linked.')}
|
|||
To link this directory to an Astro Studio project, run
|
||||
${cyan('astro db link')}\n`;
|
||||
|
||||
export const MIGRATIONS_NOT_INITIALIZED = `${yellow(
|
||||
'▶ No migrations found!'
|
||||
)}\n\n To scaffold your migrations folder, run\n ${cyan('astro db sync')}\n`;
|
||||
|
||||
export const MISSING_EXECUTE_PATH_ERROR = `${red(
|
||||
'▶ No file path provided.'
|
||||
)} Provide a path by running ${cyan('astro db execute <path>')}\n`;
|
||||
|
||||
export const RENAME_TABLE_ERROR = (oldTable: string, newTable: string) => {
|
||||
return (
|
||||
red('▶ Potential table rename detected: ' + oldTable + ', ' + newTable) +
|
||||
`\n You cannot add and remove tables in the same schema update batch.` +
|
||||
`\n To resolve, add a 'deprecated: true' flag to '${oldTable}' instead.`
|
||||
);
|
||||
};
|
||||
|
||||
export const RENAME_COLUMN_ERROR = (oldSelector: string, newSelector: string) => {
|
||||
return (
|
||||
red('▶ Potential column rename detected: ' + oldSelector + ', ' + newSelector) +
|
||||
`\n You cannot add and remove columns in the same table.` +
|
||||
`\n To resolve, add a 'deprecated: true' flag to '${oldSelector}' instead.`
|
||||
);
|
||||
};
|
||||
|
||||
export const FILE_NOT_FOUND_ERROR = (path: string) =>
|
||||
`${red('▶ File not found:')} ${bold(path)}\n`;
|
||||
|
||||
|
|
|
@ -26,9 +26,14 @@ ${Object.entries(tables)
|
|||
}
|
||||
|
||||
function generateTableType(name: string, collection: DBTable): string {
|
||||
const sanitizedColumnsList = Object.entries(collection.columns)
|
||||
// Filter out deprecated columns from the typegen, so that they don't
|
||||
// appear as queryable fields in the generated types / your codebase.
|
||||
.filter(([key, val]) => !val.schema.deprecated);
|
||||
const sanitizedColumns = Object.fromEntries(sanitizedColumnsList);
|
||||
let tableType = ` export const ${name}: import(${RUNTIME_IMPORT}).Table<
|
||||
${JSON.stringify(name)},
|
||||
${JSON.stringify(collection.columns)}
|
||||
${JSON.stringify(sanitizedColumns)}
|
||||
>;`;
|
||||
return tableType;
|
||||
}
|
||||
|
|
|
@ -21,7 +21,8 @@ const baseColumnSchema = z.object({
|
|||
label: z.string().optional(),
|
||||
optional: z.boolean().optional().default(false),
|
||||
unique: z.boolean().optional().default(false),
|
||||
|
||||
deprecated: z.boolean().optional().default(false),
|
||||
|
||||
// Defined when `defineReadableTable()` is called
|
||||
name: z.string().optional(),
|
||||
// TODO: rename to `tableName`. Breaking schema change
|
||||
|
@ -184,6 +185,7 @@ export const tableSchema = z.object({
|
|||
columns: columnsSchema,
|
||||
indexes: z.record(indexSchema).optional(),
|
||||
foreignKeys: z.array(foreignKeysSchema).optional(),
|
||||
deprecated: z.boolean().optional().default(false),
|
||||
});
|
||||
|
||||
export const tablesSchema = z.preprocess((rawTables) => {
|
||||
|
@ -258,6 +260,7 @@ export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig>
|
|||
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
|
||||
}>;
|
||||
indexes?: Record<string, IndexConfig<TColumns>>;
|
||||
deprecated?: boolean;
|
||||
}
|
||||
|
||||
interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
|
||||
|
|
|
@ -21,7 +21,3 @@ export function getAstroStudioUrl(): string {
|
|||
export function getDbDirectoryUrl(root: URL | string) {
|
||||
return new URL('db/', root);
|
||||
}
|
||||
|
||||
export function getMigrationsDirectoryUrl(root: URL | string) {
|
||||
return new URL('migrations/', getDbDirectoryUrl(root));
|
||||
}
|
||||
|
|
|
@ -66,6 +66,10 @@ export async function recreateTables({ db, tables }: { db: SqliteDB; tables: DBT
|
|||
]);
|
||||
}
|
||||
|
||||
export function getDropTableIfExistsQuery(tableName: string) {
|
||||
return `DROP TABLE IF EXISTS ${sqlite.escapeName(tableName)}`;
|
||||
};
|
||||
|
||||
export function getCreateTableQuery(tableName: string, table: DBTable) {
|
||||
let query = `CREATE TABLE ${sqlite.escapeName(tableName)} (`;
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import { column, defineDB, defineTable } from 'astro:db';
|
|||
const Author = defineTable({
|
||||
columns: {
|
||||
name: column.text(),
|
||||
age2: column.number({optional: true}),
|
||||
},
|
||||
});
|
||||
|
||||
|
|
2
packages/db/test/fixtures/basics/db/theme.ts
vendored
2
packages/db/test/fixtures/basics/db/theme.ts
vendored
|
@ -9,7 +9,7 @@ export const Themes = defineTable({
|
|||
updated: column.date({
|
||||
default: NOW,
|
||||
}),
|
||||
isDark: column.boolean({ default: sql`TRUE` }),
|
||||
isDark: column.boolean({ default: sql`TRUE`, deprecated: true}),
|
||||
owner: column.text({ optional: true, default: sql`NULL` }),
|
||||
},
|
||||
});
|
||||
|
|
|
@ -24,29 +24,21 @@ const userInitial = tableSchema.parse(
|
|||
})
|
||||
);
|
||||
|
||||
const defaultAmbiguityResponses = {
|
||||
collectionRenames: {},
|
||||
columnRenames: {},
|
||||
};
|
||||
|
||||
function userChangeQueries(oldTable, newTable, ambiguityResponses = defaultAmbiguityResponses) {
|
||||
function userChangeQueries(oldTable, newTable) {
|
||||
return getCollectionChangeQueries({
|
||||
collectionName: TABLE_NAME,
|
||||
oldCollection: oldTable,
|
||||
newCollection: newTable,
|
||||
ambiguityResponses,
|
||||
});
|
||||
}
|
||||
|
||||
function configChangeQueries(
|
||||
oldCollections,
|
||||
newCollections,
|
||||
ambiguityResponses = defaultAmbiguityResponses
|
||||
) {
|
||||
return getMigrationQueries({
|
||||
oldSnapshot: { schema: oldCollections, experimentalVersion: 1 },
|
||||
newSnapshot: { schema: newCollections, experimentalVersion: 1 },
|
||||
ambiguityResponses,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -63,7 +55,10 @@ describe('column queries', () => {
|
|||
const oldCollections = {};
|
||||
const newCollections = { [TABLE_NAME]: userInitial };
|
||||
const { queries } = await configChangeQueries(oldCollections, newCollections);
|
||||
expect(queries).to.deep.equal([getCreateTableQuery(TABLE_NAME, userInitial)]);
|
||||
expect(queries).to.deep.equal([
|
||||
`DROP TABLE IF EXISTS "${TABLE_NAME}"`,
|
||||
`CREATE TABLE "${TABLE_NAME}" (_id INTEGER PRIMARY KEY, "name" text NOT NULL, "age" integer NOT NULL, "email" text NOT NULL UNIQUE, "mi" text)`,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should drop table for removed tables', async () => {
|
||||
|
@ -73,15 +68,42 @@ describe('column queries', () => {
|
|||
expect(queries).to.deep.equal([`DROP TABLE "${TABLE_NAME}"`]);
|
||||
});
|
||||
|
||||
it('should rename table for renamed tables', async () => {
|
||||
it('should error if possible table rename is detected', async () => {
|
||||
const rename = 'Peeps';
|
||||
const oldCollections = { [TABLE_NAME]: userInitial };
|
||||
const newCollections = { [rename]: userInitial };
|
||||
const { queries } = await configChangeQueries(oldCollections, newCollections, {
|
||||
...defaultAmbiguityResponses,
|
||||
collectionRenames: { [rename]: TABLE_NAME },
|
||||
let error = null;
|
||||
try {
|
||||
await configChangeQueries(oldCollections, newCollections, {
|
||||
collectionRenames: { [rename]: TABLE_NAME },
|
||||
});
|
||||
} catch (e) {
|
||||
error = e.message;
|
||||
}
|
||||
expect(error).to.include.string('Potential table rename detected');
|
||||
});
|
||||
|
||||
it('should error if possible column rename is detected', async () => {
|
||||
const blogInitial = tableSchema.parse({
|
||||
columns: {
|
||||
title: column.text(),
|
||||
},
|
||||
});
|
||||
expect(queries).to.deep.equal([`ALTER TABLE "${TABLE_NAME}" RENAME TO "${rename}"`]);
|
||||
const blogFinal = tableSchema.parse({
|
||||
columns: {
|
||||
title2: column.text(),
|
||||
},
|
||||
});
|
||||
let error = null;
|
||||
try {
|
||||
await configChangeQueries(
|
||||
{[TABLE_NAME]: blogInitial},
|
||||
{[TABLE_NAME]: blogFinal},
|
||||
);
|
||||
} catch (e) {
|
||||
error = e.message;
|
||||
}
|
||||
expect(error).to.include.string('Potential column rename detected');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -139,27 +161,6 @@ describe('column queries', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
describe('ALTER RENAME COLUMN', () => {
|
||||
it('when renaming a column', async () => {
|
||||
const userFinal = {
|
||||
...userInitial,
|
||||
columns: {
|
||||
...userInitial.columns,
|
||||
},
|
||||
};
|
||||
userFinal.columns.middleInitial = userFinal.columns.mi;
|
||||
delete userFinal.columns.mi;
|
||||
|
||||
const { queries } = await userChangeQueries(userInitial, userFinal, {
|
||||
collectionRenames: {},
|
||||
columnRenames: { [TABLE_NAME]: { middleInitial: 'mi' } },
|
||||
});
|
||||
expect(queries).to.deep.equal([
|
||||
`ALTER TABLE "${TABLE_NAME}" RENAME COLUMN "mi" TO "middleInitial"`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Lossy table recreate', () => {
|
||||
it('when changing a column type', async () => {
|
||||
const userFinal = {
|
||||
|
|
Loading…
Reference in a new issue