0
Fork 0
mirror of https://github.com/logto-io/logto.git synced 2024-12-16 20:26:19 -05:00

refactor(core,schemas): rename migration to alteration (#2002)

This commit is contained in:
wangsijie 2022-09-26 16:38:27 +08:00 committed by GitHub
parent 999867ea40
commit 2cb4091039
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 134 additions and 124 deletions

View file

@ -11,7 +11,7 @@
"prepack": "lerna run --stream prepack",
"dev": "lerna run --stream prepack -- --incremental && lerna --ignore=@logto/integration-test run --parallel dev",
"start": "cd packages/core && NODE_ENV=production node . --from-root",
"migration-deploy": "cd packages/core && pnpm migration-deploy",
"alteration": "cd packages/core && pnpm alteration",
"ci:build": "lerna run --stream build",
"ci:lint": "lerna run --parallel lint",
"ci:stylelint": "lerna run --parallel stylelint",

View file

@ -16,8 +16,8 @@
"start": "NODE_ENV=production node build/index.js",
"add-connector": "node build/cli/add-connector.js",
"add-official-connectors": "node build/cli/add-official-connectors.js",
"migration-deploy": "node build/cli/migration-deploy.js",
"test": "jest --testPathIgnorePatterns=/core/connectors/",
"alteration": "node build/cli/alteration.js",
"test": "jest",
"test:coverage": "jest --coverage --silent",
"test:report": "codecov -F core"
},

View file

@ -0,0 +1,3 @@
export const alterationStateKey = 'alterationState';
export const logtoConfigsTableFilePath = 'node_modules/@logto/schemas/tables/logto_configs.sql';
export const alterationFilesDirectory = 'node_modules/@logto/schemas/alterations';

View file

@ -5,7 +5,7 @@ import { convertToIdentifiers } from '@/database/utils';
import { QueryType, expectSqlAssert } from '@/utils/test-utils';
import * as functions from '.';
import { migrationStateKey } from './constants';
import { alterationStateKey } from './constants';
const mockQuery: jest.MockedFunction<QueryType> = jest.fn();
const {
@ -13,7 +13,7 @@ const {
isLogtoConfigsTableExists,
updateDatabaseTimestamp,
getCurrentDatabaseTimestamp,
getUndeployedMigrations,
getUndeployedAlterations,
} = functions;
const pool = createMockPool({
query: async (sql, values) => {
@ -59,7 +59,7 @@ describe('getCurrentDatabaseTimestamp()', () => {
mockQuery.mockImplementationOnce(async (sql, values) => {
expectSqlAssert(sql, expectSql.sql);
expect(values).toEqual([migrationStateKey]);
expect(values).toEqual([alterationStateKey]);
return createMockQueryResult([]);
});
@ -74,7 +74,7 @@ describe('getCurrentDatabaseTimestamp()', () => {
mockQuery.mockImplementationOnce(async (sql, values) => {
expectSqlAssert(sql, expectSql.sql);
expect(values).toEqual([migrationStateKey]);
expect(values).toEqual([alterationStateKey]);
return createMockQueryResult([{ value: 'some_value' }]);
});
@ -89,7 +89,7 @@ describe('getCurrentDatabaseTimestamp()', () => {
mockQuery.mockImplementationOnce(async (sql, values) => {
expectSqlAssert(sql, expectSql.sql);
expect(values).toEqual([migrationStateKey]);
expect(values).toEqual([alterationStateKey]);
// @ts-expect-error createMockQueryResult doesn't support jsonb
return createMockQueryResult([{ value: { timestamp, updatedAt: 'now' } }]);
@ -148,7 +148,7 @@ describe('updateDatabaseTimestamp()', () => {
it('sends upsert sql with timestamp and updatedAt', async () => {
mockQuery.mockImplementationOnce(async (sql, values) => {
expectSqlAssert(sql, expectSql.sql);
expect(values).toEqual([migrationStateKey, JSON.stringify({ timestamp, updatedAt })]);
expect(values).toEqual([alterationStateKey, JSON.stringify({ timestamp, updatedAt })]);
return createMockQueryResult([]);
});
@ -158,10 +158,10 @@ describe('updateDatabaseTimestamp()', () => {
});
});
describe('getUndeployedMigrations()', () => {
describe('getUndeployedAlterations()', () => {
beforeEach(() => {
jest
.spyOn(functions, 'getMigrationFiles')
.spyOn(functions, 'getAlterationFiles')
.mockResolvedValueOnce([
'1.0.0-1663923770-a.js',
'1.0.0-1663923772-c.js',
@ -169,10 +169,10 @@ describe('getUndeployedMigrations()', () => {
]);
});
it('returns all files with right order if database migration timestamp is null', async () => {
it('returns all files with right order if database timestamp is null', async () => {
jest.spyOn(functions, 'getCurrentDatabaseTimestamp').mockResolvedValueOnce(null);
await expect(getUndeployedMigrations(pool)).resolves.toEqual([
await expect(getUndeployedAlterations(pool)).resolves.toEqual([
'1.0.0-1663923770-a.js',
'1.0.0-1663923771-b.js',
'1.0.0-1663923772-c.js',
@ -182,7 +182,7 @@ describe('getUndeployedMigrations()', () => {
it('returns files whose timestamp is greater then database timstamp', async () => {
jest.spyOn(functions, 'getCurrentDatabaseTimestamp').mockResolvedValueOnce(1_663_923_770);
await expect(getUndeployedMigrations(pool)).resolves.toEqual([
await expect(getUndeployedAlterations(pool)).resolves.toEqual([
'1.0.0-1663923771-b.js',
'1.0.0-1663923772-c.js',
]);

View file

@ -4,10 +4,10 @@ import path from 'path';
import { LogtoConfig, LogtoConfigs } from '@logto/schemas';
import {
MigrationScript,
MigrationState,
migrationStateGuard,
} from '@logto/schemas/migrations/types';
AlterationScript,
AlterationState,
alterationStateGuard,
} from '@logto/schemas/alterations/types';
import { conditionalString } from '@silverhand/essentials';
import chalk from 'chalk';
import { DatabasePool, sql } from 'slonik';
@ -15,8 +15,12 @@ import { raw } from 'slonik-sql-tag-raw';
import { convertToIdentifiers } from '@/database/utils';
import { logtoConfigsTableFilePath, migrationFilesDirectory, migrationStateKey } from './constants';
import { getTimestampFromFileName, migrationFileNameRegex } from './utils';
import {
logtoConfigsTableFilePath,
alterationStateKey,
alterationFilesDirectory,
} from './constants';
import { getTimestampFromFileName, alterationFileNameRegex } from './utils';
const { table, fields } = convertToIdentifiers(LogtoConfigs);
@ -36,9 +40,9 @@ export const isLogtoConfigsTableExists = async (pool: DatabasePool) => {
export const getCurrentDatabaseTimestamp = async (pool: DatabasePool) => {
try {
const query = await pool.maybeOne<LogtoConfig>(
sql`select * from ${table} where ${fields.key}=${migrationStateKey}`
sql`select * from ${table} where ${fields.key}=${alterationStateKey}`
);
const { timestamp } = migrationStateGuard.parse(query?.value);
const { timestamp } = alterationStateGuard.parse(query?.value);
return timestamp;
} catch {
@ -56,48 +60,48 @@ export const updateDatabaseTimestamp = async (pool: DatabasePool, timestamp?: nu
await createLogtoConfigsTable(pool);
}
const value: MigrationState = {
timestamp: timestamp ?? (await getLatestMigrationTiemstamp()),
const value: AlterationState = {
timestamp: timestamp ?? (await getLatestAlterationTimestamp()),
updatedAt: new Date().toISOString(),
};
await pool.query(
sql`
insert into ${table} (${fields.key}, ${fields.value})
values (${migrationStateKey}, ${JSON.stringify(value)})
values (${alterationStateKey}, ${JSON.stringify(value)})
on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value}
`
);
};
export const getLatestMigrationTiemstamp = async () => {
const files = await getMigrationFiles();
export const getLatestAlterationTimestamp = async () => {
const files = await getAlterationFiles();
const latestFile = files[files.length - 1];
if (!latestFile) {
throw new Error('No migration files found.');
throw new Error('No alteration files found.');
}
return getTimestampFromFileName(latestFile);
};
export const getMigrationFiles = async () => {
if (!existsSync(migrationFilesDirectory)) {
export const getAlterationFiles = async () => {
if (!existsSync(alterationFilesDirectory)) {
return [];
}
const directory = await readdir(migrationFilesDirectory);
const files = directory.filter((file) => migrationFileNameRegex.test(file));
const directory = await readdir(alterationFilesDirectory);
const files = directory.filter((file) => alterationFileNameRegex.test(file));
return files
.slice()
.sort((file1, file2) => getTimestampFromFileName(file1) - getTimestampFromFileName(file2));
};
export const getUndeployedMigrations = async (pool: DatabasePool) => {
export const getUndeployedAlterations = async (pool: DatabasePool) => {
const databaseTimestamp = await getCurrentDatabaseTimestamp(pool);
const files = await getMigrationFiles();
const files = await getAlterationFiles();
return files
.filter((file) => !databaseTimestamp || getTimestampFromFileName(file) > databaseTimestamp)
@ -105,18 +109,18 @@ export const getUndeployedMigrations = async (pool: DatabasePool) => {
.sort((file1, file2) => getTimestampFromFileName(file1) - getTimestampFromFileName(file2));
};
const importMigration = async (file: string): Promise<MigrationScript> => {
const importAlteration = async (file: string): Promise<AlterationScript> => {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const module = await import(
path.join(migrationFilesDirectory, file).replace('node_modules/', '')
path.join(alterationFilesDirectory, file).replace('node_modules/', '')
);
// eslint-disable-next-line no-restricted-syntax
return module.default as MigrationScript;
return module.default as AlterationScript;
};
const runMigration = async (pool: DatabasePool, file: string) => {
const { up } = await importMigration(file);
const deployAlteration = async (pool: DatabasePool, file: string) => {
const { up } = await importAlteration(file);
try {
await pool.transaction(async (connect) => {
@ -124,7 +128,7 @@ const runMigration = async (pool: DatabasePool, file: string) => {
});
} catch (error: unknown) {
if (error instanceof Error) {
console.log(`${chalk.red('[migration]')} run ${file} failed: ${error.message}.`);
console.log(`${chalk.red('[alteration]')} run ${file} failed: ${error.message}.`);
return;
}
@ -133,21 +137,21 @@ const runMigration = async (pool: DatabasePool, file: string) => {
}
await updateDatabaseTimestamp(pool, getTimestampFromFileName(file));
console.log(`${chalk.blue('[migration]')} run ${file} succeeded.`);
console.log(`${chalk.blue('[alteration]')} run ${file} succeeded.`);
};
export const runMigrations = async (pool: DatabasePool) => {
const migrations = await getUndeployedMigrations(pool);
export const deployAlterations = async (pool: DatabasePool) => {
const alterations = await getUndeployedAlterations(pool);
console.log(
`${chalk.blue('[migration]')} found ${migrations.length} migration${conditionalString(
migrations.length > 1 && 's'
`${chalk.blue('[alteration]')} found ${alterations.length} alteration${conditionalString(
alterations.length > 1 && 's'
)}`
);
// The await inside the loop is intended, migrations should run in order
for (const migration of migrations) {
// The await inside the loop is intended, alterations should run in order
for (const alteration of alterations) {
// eslint-disable-next-line no-await-in-loop
await runMigration(pool, migration);
await deployAlteration(pool, alteration);
}
};

View file

@ -1,7 +1,7 @@
export const migrationFileNameRegex = /-(\d{10,11})-?.*\.js$/;
export const alterationFileNameRegex = /-(\d{10,11})-?.*\.js$/;
export const getTimestampFromFileName = (fileName: string) => {
const match = migrationFileNameRegex.exec(fileName);
const match = alterationFileNameRegex.exec(fileName);
if (!match?.[1]) {
throw new Error(`Can not get timestamp: ${fileName}`);

View file

@ -2,16 +2,22 @@ import 'module-alias/register';
import { assertEnv } from '@silverhand/essentials';
import { createPool } from 'slonik';
import { deployAlterations } from '@/alteration';
import { configDotEnv } from '@/env-set/dot-env';
import { runMigrations } from '@/migration';
configDotEnv();
const deploy = async () => {
const databaseUrl = assertEnv('DB_URL');
const pool = await createPool(databaseUrl);
await runMigrations(pool);
await deployAlterations(pool);
await pool.end();
};
const command = process.argv[2];
if (command !== 'deploy') {
throw new Error('Unsupported command.');
}
void deploy();

View file

@ -8,7 +8,7 @@ import { createPool, parseDsn, sql, stringifyDsn } from 'slonik';
import { createInterceptors } from 'slonik-interceptor-preset';
import { raw } from 'slonik-sql-tag-raw';
import { updateDatabaseTimestamp } from '@/migration';
import { updateDatabaseTimestamp } from '@/alteration';
import { buildApplicationSecret } from '@/utils/id';
import { convertToPrimitiveOrSql } from './utils';
@ -82,7 +82,7 @@ export const createDatabaseCli = async (dsn: string) => {
}
await updateDatabaseTimestamp(pool);
console.log(`${chalk.blue('[create-tables]')} Update migration state succeeded.`);
console.log(`${chalk.blue('[create-tables]')} Update alteration state succeeded.`);
};
const seedTables = async () => {

View file

@ -1,19 +1,19 @@
import inquirer from 'inquirer';
import { DatabasePool } from 'slonik';
import { getUndeployedMigrations, runMigrations } from '@/migration';
import { getUndeployedAlterations, deployAlterations } from '@/alteration';
import { allYes } from './parameters';
export const checkMigrationState = async (pool: DatabasePool) => {
const migrations = await getUndeployedMigrations(pool);
export const checkAlterationState = async (pool: DatabasePool) => {
const alterations = await getUndeployedAlterations(pool);
if (migrations.length === 0) {
if (alterations.length === 0) {
return;
}
const error = new Error(
`Found undeployed migrations, you must deploy them first by "pnpm migration-deploy" command, reference: https://docs.logto.io/docs/recipes/deployment/#migration`
`Found undeployed database alterations, you must deploy them first by "pnpm alteration deploy" command, reference: https://docs.logto.io/docs/recipes/deployment/#database-alteration`
);
if (allYes) {
@ -23,12 +23,12 @@ export const checkMigrationState = async (pool: DatabasePool) => {
const deploy = await inquirer.prompt({
type: 'confirm',
name: 'value',
message: `Found undeployed migrations, would you like to deploy now?`,
message: `Found undeployed alterations, would you like to deploy now?`,
});
if (!deploy.value) {
throw error;
}
await runMigrations(pool);
await deployAlterations(pool);
};

View file

@ -6,7 +6,7 @@ import { DatabasePool } from 'slonik';
import { appendPath } from '@/utils/url';
import { addConnectors } from './add-connectors';
import { checkMigrationState } from './check-migration-state';
import { checkAlterationState } from './check-migration-state';
import createPoolByEnv from './create-pool-by-env';
import loadOidcValues from './oidc';
import { isTrue } from './parameters';
@ -83,7 +83,7 @@ function createEnvSet() {
await addConnectors(values.connectorDirectory);
if (pool) {
await checkMigrationState(pool);
await checkAlterationState(pool);
}
},
};

View file

@ -1,3 +0,0 @@
export const migrationStateKey = 'migrationState';
export const logtoConfigsTableFilePath = 'node_modules/@logto/schemas/tables/logto_configs.sql';
export const migrationFilesDirectory = 'node_modules/@logto/schemas/migrations';

View file

@ -1,4 +1,4 @@
/src/db-entries
/migrations/*.d.ts
/migrations/*.js
/migrations/*.tsbuildinfo
/alterations/*.d.ts
/alterations/*.js
/alterations/*.tsbuildinfo

View file

@ -0,0 +1,42 @@
# Database Alteration
The folder for all alteration files.
## Format
The alteration files are named in the format of `<version>-<timestamp>-name.js` where `<timestamp>` is the unix timestamp of when the alteration was created and `name` is the name of the alteration, `version` is this npm package's version number.
As for development, the `version` is "next" until the package is released.
Note that, you SHOULD NOT change the content of the alteration files after they are created. If you need to change the alteration, you should create a new alteration file with the new content.
## Typing
```ts
type AlterationScript = {
up: (connection: DatabaseTransactionConnection) => Promise<void>;
down: (connection: DatabaseTransactionConnection) => Promise<void>;
};
```
When the alteration script is executed, the `up` function is called to alter the database schema.
The `down` function is designed for the future downgrade feature.
## Example
```ts
export const up = async (connection) => {
await connection.query(`
ALTER TABLE "user"
ADD COLUMN "email" VARCHAR(255) NOT NULL;
`);
};
export const down = async (connection) => {
await connection.query(`
ALTER TABLE "user"
DROP COLUMN "email";
`);
};
```

View file

@ -1,11 +1,11 @@
import { DatabasePool, sql } from 'slonik';
export type MigrationScript = {
export type AlterationScript = {
up: (pool: DatabasePool) => Promise<void>;
down: (pool: DatabasePool) => Promise<void>;
};
const migration: MigrationScript = {
const alteration: AlterationScript = {
up: async (pool) => {
// [Pull] feat(core): machine to machine apps #1973
await pool.query(sql`
@ -31,4 +31,4 @@ const migration: MigrationScript = {
},
};
export default migration;
export default alteration;

View file

@ -1,14 +1,14 @@
import { DatabaseTransactionConnection } from 'slonik';
import { z } from 'zod';
export const migrationStateGuard = z.object({
export const alterationStateGuard = z.object({
timestamp: z.number(),
updatedAt: z.string().optional(),
});
export type MigrationState = z.infer<typeof migrationStateGuard>;
export type AlterationState = z.infer<typeof alterationStateGuard>;
export type MigrationScript = {
export type AlterationScript = {
up: (connection: DatabaseTransactionConnection) => Promise<void>;
down: (connection: DatabaseTransactionConnection) => Promise<void>;
};

View file

@ -1,42 +0,0 @@
# Database Migrations
The folder for all migration files.
## Format
The migration files are named in the format of `<version>-<timestamp>-name.js` where `<timestamp>` is the unix timestamp of when the migration was created and `name` is the name of the migration, `version` is this npm package's version number.
As for development, the `version` is "next" until the package is released.
Note that, you SHOULD NOT change the content of the migration files after they are created. If you need to change the migration, you should create a new migration file with the new content.
## Typing
```ts
type MigrationScript = {
up: (connection: DatabaseTransactionConnection) => Promise<void>;
down: (connection: DatabaseTransactionConnection) => Promise<void>;
};
```
When the migration script is executed, the `up` function is called to alter the database schema.
The `down` function is designed for the future downgrade feature.
## Example
```ts
export const up = async (connection) => {
await connection.query(`
ALTER TABLE "user"
ADD COLUMN "email" VARCHAR(255) NOT NULL;
`);
};
export const down = async (connection) => {
await connection.query(`
ALTER TABLE "user"
DROP COLUMN "email";
`);
};
```

View file

@ -6,14 +6,14 @@
"license": "MPL-2.0",
"files": [
"lib",
"migrations"
"alterations"
],
"private": true,
"scripts": {
"precommit": "lint-staged",
"generate": "rm -rf src/db-entries && ts-node src/gen/index.ts && eslint \"src/db-entries/**\" --fix",
"build:migrations": "rm -rf migrations/*.d.ts migrations/*.js && tsc -p tsconfig.build.migrations.json",
"build": "pnpm generate && rm -rf lib/ && tsc -p tsconfig.build.json && pnpm build:migrations",
"build:alterations": "rm -rf alterations/*.d.ts alterations/*.js && tsc -p tsconfig.build.alterations.json",
"build": "pnpm generate && rm -rf lib/ && tsc -p tsconfig.build.json && pnpm build:alterations",
"dev": "tsc -p tsconfig.build.json --watch --preserveWatchOutput --incremental",
"lint": "eslint --ext .ts src",
"lint:report": "pnpm lint --format json --output-file report.json",

View file

@ -1,8 +1,8 @@
{
"extends": "./tsconfig",
"compilerOptions": {
"outDir": "migrations",
"outDir": "alterations",
},
"include": ["migrations"],
"include": ["alterations"],
"exclude": []
}

View file

@ -6,7 +6,7 @@
},
"include": [
"src",
"migrations",
"jest.config.ts", "migrations",
"alterations",
"jest.config.ts", "alterations",
]
}