mirror of
https://github.com/logto-io/logto.git
synced 2025-01-27 21:39:16 -05:00
Merge pull request #2050 from logto-io/gao-log-4314-cli-alteration-deploy-command
feat(cli): `db alteration deploy` command
This commit is contained in:
commit
5f61450400
14 changed files with 252 additions and 35 deletions
1
packages/cli/.gitignore
vendored
Normal file
1
packages/cli/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
alteration-scripts/
|
|
@ -35,9 +35,11 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@logto/schemas": "^1.0.0-beta.10",
|
||||
"@silverhand/essentials": "^1.2.1",
|
||||
"chalk": "^4.1.2",
|
||||
"decamelize": "^5.0.0",
|
||||
"find-up": "^5.0.0",
|
||||
"fs-extra": "^10.1.0",
|
||||
"got": "^11.8.2",
|
||||
"hpagent": "^1.0.0",
|
||||
"inquirer": "^8.2.2",
|
||||
|
@ -56,6 +58,7 @@
|
|||
"@silverhand/eslint-config": "1.0.0",
|
||||
"@silverhand/ts-config": "1.0.0",
|
||||
"@types/decompress": "^4.2.4",
|
||||
"@types/fs-extra": "^9.0.13",
|
||||
"@types/inquirer": "^8.2.1",
|
||||
"@types/node": "^16.0.0",
|
||||
"@types/semver": "^7.3.12",
|
||||
|
|
134
packages/cli/src/commands/database/alteration.ts
Normal file
134
packages/cli/src/commands/database/alteration.ts
Normal file
|
@ -0,0 +1,134 @@
|
|||
import path from 'path';
|
||||
|
||||
import { AlterationScript } from '@logto/schemas/lib/types/alteration';
|
||||
import { conditionalString } from '@silverhand/essentials';
|
||||
import chalk from 'chalk';
|
||||
import { copy, existsSync, remove, readdir } from 'fs-extra';
|
||||
import { DatabasePool } from 'slonik';
|
||||
import { CommandModule } from 'yargs';
|
||||
|
||||
import { createPoolFromConfig } from '../../database';
|
||||
import {
|
||||
getCurrentDatabaseAlterationTimestamp,
|
||||
updateDatabaseTimestamp,
|
||||
} from '../../queries/logto-config';
|
||||
import { getPathInModule, log } from '../../utilities';
|
||||
|
||||
const alterationFileNameRegex = /-(\d+)-?.*\.js$/;
|
||||
|
||||
const getTimestampFromFileName = (fileName: string) => {
|
||||
const match = alterationFileNameRegex.exec(fileName);
|
||||
|
||||
if (!match?.[1]) {
|
||||
throw new Error(`Can not get timestamp: ${fileName}`);
|
||||
}
|
||||
|
||||
return Number(match[1]);
|
||||
};
|
||||
|
||||
const importAlterationScript = async (filePath: string): Promise<AlterationScript> => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
const module = await import(filePath);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
return module.default as AlterationScript;
|
||||
};
|
||||
|
||||
type AlterationFile = { path: string; filename: string };
|
||||
|
||||
const getAlterationFiles = async (): Promise<AlterationFile[]> => {
|
||||
const alterationDirectory = getPathInModule('@logto/schemas', 'alterations');
|
||||
// Until we migrate to ESM
|
||||
// eslint-disable-next-line unicorn/prefer-module
|
||||
const localAlterationDirectory = path.resolve(__dirname, './alteration-scripts');
|
||||
|
||||
if (!existsSync(alterationDirectory)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// We need to copy alteration files to execute in the CLI context to make `slonik` available
|
||||
await remove(localAlterationDirectory);
|
||||
await copy(alterationDirectory, localAlterationDirectory);
|
||||
|
||||
const directory = await readdir(localAlterationDirectory);
|
||||
const files = directory.filter((file) => alterationFileNameRegex.test(file));
|
||||
|
||||
return files
|
||||
.slice()
|
||||
.sort((file1, file2) => getTimestampFromFileName(file1) - getTimestampFromFileName(file2))
|
||||
.map((filename) => ({ path: path.join(localAlterationDirectory, filename), filename }));
|
||||
};
|
||||
|
||||
export const getLatestAlterationTimestamp = async () => {
|
||||
const files = await getAlterationFiles();
|
||||
const lastFile = files[files.length - 1];
|
||||
|
||||
if (!lastFile) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return getTimestampFromFileName(lastFile.filename);
|
||||
};
|
||||
|
||||
const getUndeployedAlterations = async (pool: DatabasePool) => {
|
||||
const databaseTimestamp = await getCurrentDatabaseAlterationTimestamp(pool);
|
||||
const files = await getAlterationFiles();
|
||||
|
||||
return files.filter(({ filename }) => getTimestampFromFileName(filename) > databaseTimestamp);
|
||||
};
|
||||
|
||||
const deployAlteration = async (
|
||||
pool: DatabasePool,
|
||||
{ path: filePath, filename }: AlterationFile
|
||||
) => {
|
||||
const { up } = await importAlterationScript(filePath);
|
||||
|
||||
try {
|
||||
await pool.transaction(async (connection) => {
|
||||
await up(connection);
|
||||
await updateDatabaseTimestamp(connection, getTimestampFromFileName(filename));
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
console.error(error);
|
||||
|
||||
await pool.end();
|
||||
log.error(
|
||||
`Error ocurred during running alteration ${chalk.green(filename)}.\n\n` +
|
||||
" This alteration didn't change anything since it was in a transaction.\n" +
|
||||
' Try to fix the error and deploy again.'
|
||||
);
|
||||
}
|
||||
|
||||
log.info(`Run alteration ${filename} succeeded`);
|
||||
};
|
||||
|
||||
const alteration: CommandModule<unknown, { action: string }> = {
|
||||
command: ['alteration <action>', 'alt', 'alter'],
|
||||
describe: 'Perform database alteration',
|
||||
builder: (yargs) =>
|
||||
yargs.positional('action', {
|
||||
describe: 'The action to perform, now it only accepts `deploy`',
|
||||
type: 'string',
|
||||
demandOption: true,
|
||||
}),
|
||||
handler: async () => {
|
||||
const pool = await createPoolFromConfig();
|
||||
const alterations = await getUndeployedAlterations(pool);
|
||||
|
||||
log.info(
|
||||
`Found ${alterations.length} alteration${conditionalString(
|
||||
alterations.length > 1 && 's'
|
||||
)} to deploy`
|
||||
);
|
||||
|
||||
// The await inside the loop is intended, alterations should run in order
|
||||
for (const alteration of alterations) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await deployAlteration(pool, alteration);
|
||||
}
|
||||
|
||||
await pool.end();
|
||||
},
|
||||
};
|
||||
|
||||
export default alteration;
|
|
@ -1,6 +1,7 @@
|
|||
import { CommandModule } from 'yargs';
|
||||
|
||||
import { noop } from '../../utilities';
|
||||
import alteration from './alteration';
|
||||
import { getKey, setKey } from './key';
|
||||
import seed from './seed';
|
||||
import { getUrl, setUrl } from './url';
|
||||
|
@ -15,6 +16,7 @@ const database: CommandModule = {
|
|||
.command(getKey)
|
||||
.command(setKey)
|
||||
.command(seed)
|
||||
.command(alteration)
|
||||
.demandCommand(1),
|
||||
handler: noop,
|
||||
};
|
||||
|
|
|
@ -15,7 +15,9 @@ import { CommandModule } from 'yargs';
|
|||
import { z } from 'zod';
|
||||
|
||||
import { createPoolFromConfig, getDatabaseUrlFromConfig, insertInto } from '../../database';
|
||||
import { buildApplicationSecret, log } from '../../utilities';
|
||||
import { updateDatabaseTimestamp } from '../../queries/logto-config';
|
||||
import { buildApplicationSecret, getPathInModule, log } from '../../utilities';
|
||||
import { getLatestAlterationTimestamp } from './alteration';
|
||||
|
||||
/**
|
||||
* Create a database pool with the database URL in config.
|
||||
|
@ -57,13 +59,7 @@ const createDatabasePool = async () => {
|
|||
};
|
||||
|
||||
const createTables = async (connection: DatabaseTransactionConnection) => {
|
||||
// https://stackoverflow.com/a/49455609/12514940
|
||||
const tableDirectory = path.join(
|
||||
// Until we migrate to ESM
|
||||
// eslint-disable-next-line unicorn/prefer-module
|
||||
path.dirname(require.resolve('@logto/schemas/package.json')),
|
||||
'tables'
|
||||
);
|
||||
const tableDirectory = getPathInModule('@logto/schemas', 'tables');
|
||||
const directoryFiles = await readdir(tableDirectory);
|
||||
const tableFiles = directoryFiles.filter((file) => file.endsWith('.sql'));
|
||||
const queries = await Promise.all(
|
||||
|
@ -90,8 +86,6 @@ const seedTables = async (connection: DatabaseTransactionConnection) => {
|
|||
defaultRole,
|
||||
} = seeds;
|
||||
|
||||
// TODO: update database alteration timestamp when migrate alteration process from core
|
||||
|
||||
await Promise.all([
|
||||
connection.query(insertInto(managementResource, 'resources')),
|
||||
connection.query(insertInto(createDefaultSetting(), 'settings')),
|
||||
|
@ -100,6 +94,7 @@ const seedTables = async (connection: DatabaseTransactionConnection) => {
|
|||
insertInto(createDemoAppApplication(buildApplicationSecret()), 'applications')
|
||||
),
|
||||
connection.query(insertInto(defaultRole, 'roles')),
|
||||
updateDatabaseTimestamp(connection, await getLatestAlterationTimestamp()),
|
||||
]);
|
||||
log.info('Seed tables succeeded.');
|
||||
};
|
||||
|
|
|
@ -1,5 +1,12 @@
|
|||
import { LogtoConfig, logtoConfigGuards, LogtoConfigKey, LogtoConfigs } from '@logto/schemas';
|
||||
import { DatabasePool, sql } from 'slonik';
|
||||
import {
|
||||
AlterationState,
|
||||
alterationStateGuard,
|
||||
LogtoConfig,
|
||||
logtoConfigGuards,
|
||||
LogtoConfigKey,
|
||||
LogtoConfigs,
|
||||
} from '@logto/schemas';
|
||||
import { DatabasePool, DatabaseTransactionConnection, sql } from 'slonik';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { convertToIdentifiers } from '../database';
|
||||
|
@ -13,7 +20,7 @@ export const getRowsByKeys = async (pool: DatabasePool, keys: LogtoConfigKey[])
|
|||
`);
|
||||
|
||||
export const updateValueByKey = async <T extends LogtoConfigKey>(
|
||||
pool: DatabasePool,
|
||||
pool: DatabasePool | DatabaseTransactionConnection,
|
||||
key: T,
|
||||
value: z.infer<typeof logtoConfigGuards[T]>
|
||||
) =>
|
||||
|
@ -24,3 +31,36 @@ export const updateValueByKey = async <T extends LogtoConfigKey>(
|
|||
on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value}
|
||||
`
|
||||
);
|
||||
|
||||
export const getCurrentDatabaseAlterationTimestamp = async (pool: DatabasePool) => {
|
||||
try {
|
||||
const result = await pool.maybeOne<LogtoConfig>(
|
||||
sql`select * from ${table} where ${fields.key}=${LogtoConfigKey.AlterationState}`
|
||||
);
|
||||
const parsed = alterationStateGuard.safeParse(result?.value);
|
||||
|
||||
return (parsed.success && parsed.data.timestamp) || 0;
|
||||
} catch (error: unknown) {
|
||||
const result = z.object({ code: z.string() }).safeParse(error);
|
||||
|
||||
// Relation does not exist, treat as 0
|
||||
// https://www.postgresql.org/docs/14/errcodes-appendix.html
|
||||
if (result.success && result.data.code === '42P01') {
|
||||
return 0;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const updateDatabaseTimestamp = async (
|
||||
connection: DatabaseTransactionConnection,
|
||||
timestamp: number
|
||||
) => {
|
||||
const value: AlterationState = {
|
||||
timestamp,
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
return updateValueByKey(connection, LogtoConfigKey.AlterationState, value);
|
||||
};
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { execSync } from 'child_process';
|
||||
import { createWriteStream } from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import chalk from 'chalk';
|
||||
import got, { Progress } from 'got';
|
||||
|
@ -70,6 +71,15 @@ export const downloadFile = async (url: string, destination: string) => {
|
|||
});
|
||||
};
|
||||
|
||||
export const getPathInModule = (moduleName: string, relativePath = '/') =>
|
||||
// https://stackoverflow.com/a/49455609/12514940
|
||||
path.join(
|
||||
// Until we migrate to ESM
|
||||
// eslint-disable-next-line unicorn/prefer-module
|
||||
path.dirname(require.resolve(`${moduleName}/package.json`)),
|
||||
relativePath
|
||||
);
|
||||
|
||||
// TODO: Move to `@silverhand/essentials`
|
||||
// Intended
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
|
|
|
@ -8,5 +8,6 @@
|
|||
},
|
||||
"include": [
|
||||
"src"
|
||||
]
|
||||
],
|
||||
"exclude": ["**/alteration-scripts"]
|
||||
}
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
import { LogtoConfigKey } from '@logto/schemas';
|
||||
|
||||
export const alterationStateKey: LogtoConfigKey = 'alterationState';
|
||||
export const logtoConfigsTableFilePath = 'node_modules/@logto/schemas/tables/logto_configs.sql';
|
||||
export const alterationFilesDirectorySource = 'node_modules/@logto/schemas/alterations';
|
||||
export const alterationFilesDirectory = 'alterations/';
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import { LogtoConfigs } from '@logto/schemas';
|
||||
import { LogtoConfigKey, LogtoConfigs } from '@logto/schemas';
|
||||
import { createMockPool, createMockQueryResult, sql } from 'slonik';
|
||||
|
||||
import { convertToIdentifiers } from '@/database/utils';
|
||||
import { QueryType, expectSqlAssert } from '@/utils/test-utils';
|
||||
|
||||
import * as functions from '.';
|
||||
import { alterationStateKey } from './constants';
|
||||
|
||||
const mockQuery: jest.MockedFunction<QueryType> = jest.fn();
|
||||
const {
|
||||
|
@ -59,7 +58,7 @@ describe('getCurrentDatabaseTimestamp()', () => {
|
|||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([alterationStateKey]);
|
||||
expect(values).toEqual([LogtoConfigKey.AlterationState]);
|
||||
|
||||
return createMockQueryResult([]);
|
||||
});
|
||||
|
@ -74,7 +73,7 @@ describe('getCurrentDatabaseTimestamp()', () => {
|
|||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([alterationStateKey]);
|
||||
expect(values).toEqual([LogtoConfigKey.AlterationState]);
|
||||
|
||||
return createMockQueryResult([{ value: 'some_value' }]);
|
||||
});
|
||||
|
@ -89,7 +88,7 @@ describe('getCurrentDatabaseTimestamp()', () => {
|
|||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([alterationStateKey]);
|
||||
expect(values).toEqual([LogtoConfigKey.AlterationState]);
|
||||
|
||||
// @ts-expect-error createMockQueryResult doesn't support jsonb
|
||||
return createMockQueryResult([{ value: { timestamp, updatedAt: 'now' } }]);
|
||||
|
@ -148,7 +147,10 @@ describe('updateDatabaseTimestamp()', () => {
|
|||
it('sends upsert sql with timestamp and updatedAt', async () => {
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([alterationStateKey, JSON.stringify({ timestamp, updatedAt })]);
|
||||
expect(values).toEqual([
|
||||
LogtoConfigKey.AlterationState,
|
||||
JSON.stringify({ timestamp, updatedAt }),
|
||||
]);
|
||||
|
||||
return createMockQueryResult([]);
|
||||
});
|
||||
|
|
|
@ -2,7 +2,13 @@ import { existsSync } from 'fs';
|
|||
import { readdir, readFile } from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import { LogtoConfig, LogtoConfigs, AlterationState, alterationStateGuard } from '@logto/schemas';
|
||||
import {
|
||||
LogtoConfig,
|
||||
LogtoConfigs,
|
||||
AlterationState,
|
||||
alterationStateGuard,
|
||||
LogtoConfigKey,
|
||||
} from '@logto/schemas';
|
||||
import { AlterationScript } from '@logto/schemas/lib/types/alteration';
|
||||
import { conditionalString } from '@silverhand/essentials';
|
||||
import chalk from 'chalk';
|
||||
|
@ -14,7 +20,6 @@ import { convertToIdentifiers } from '@/database/utils';
|
|||
|
||||
import {
|
||||
logtoConfigsTableFilePath,
|
||||
alterationStateKey,
|
||||
alterationFilesDirectory,
|
||||
alterationFilesDirectorySource,
|
||||
} from './constants';
|
||||
|
@ -38,7 +43,7 @@ export const isLogtoConfigsTableExists = async (pool: DatabasePool) => {
|
|||
export const getCurrentDatabaseTimestamp = async (pool: DatabasePool) => {
|
||||
try {
|
||||
const query = await pool.maybeOne<LogtoConfig>(
|
||||
sql`select * from ${table} where ${fields.key}=${alterationStateKey}`
|
||||
sql`select * from ${table} where ${fields.key}=${LogtoConfigKey.AlterationState}`
|
||||
);
|
||||
const { timestamp } = alterationStateGuard.parse(query?.value);
|
||||
|
||||
|
@ -66,7 +71,7 @@ export const updateDatabaseTimestamp = async (pool: DatabasePool, timestamp?: nu
|
|||
await pool.query(
|
||||
sql`
|
||||
insert into ${table} (${fields.key}, ${fields.value})
|
||||
values (${alterationStateKey}, ${sql.jsonb(value)})
|
||||
values (${LogtoConfigKey.AlterationState}, ${sql.jsonb(value)})
|
||||
on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value}
|
||||
`
|
||||
);
|
||||
|
|
20
packages/schemas/alterations/1.0.0_beta.10-1-logto-config.ts
Normal file
20
packages/schemas/alterations/1.0.0_beta.10-1-logto-config.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
import { sql } from 'slonik';
|
||||
|
||||
import { AlterationScript } from '../lib/types/alteration';
|
||||
|
||||
const alteration: AlterationScript = {
|
||||
up: async (pool) => {
|
||||
await pool.query(sql`
|
||||
create table _logto_configs (
|
||||
key varchar(256) not null,
|
||||
value jsonb /* @use ArbitraryObject */ not null default '{}'::jsonb,
|
||||
primary key (key)
|
||||
);
|
||||
`);
|
||||
},
|
||||
down: async (pool) => {
|
||||
await pool.query(sql`drop table _logto_configs;`);
|
||||
},
|
||||
};
|
||||
|
||||
export default alteration;
|
|
@ -18,13 +18,14 @@ export const logtoOidcConfigGuard = z.object({
|
|||
export type LogtoOidcConfig = z.infer<typeof logtoOidcConfigGuard>;
|
||||
|
||||
// Summary
|
||||
export enum LogtoConfigKey {
|
||||
AlterationState = 'alterationState',
|
||||
OidcConfig = 'oidcConfig',
|
||||
}
|
||||
|
||||
export const logtoConfigKeys = Object.values(LogtoConfigKey);
|
||||
|
||||
export const logtoConfigGuards = Object.freeze({
|
||||
alterationState: alterationStateGuard,
|
||||
oidcConfig: logtoOidcConfigGuard,
|
||||
[LogtoConfigKey.AlterationState]: alterationStateGuard,
|
||||
[LogtoConfigKey.OidcConfig]: logtoOidcConfigGuard,
|
||||
} as const);
|
||||
|
||||
export type LogtoConfigKey = keyof typeof logtoConfigGuards;
|
||||
|
||||
// `as` is intended since we'd like to keep `logtoConfigGuards` as the SSOT of keys
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
export const logtoConfigKeys = Object.keys(logtoConfigGuards) as LogtoConfigKey[];
|
||||
|
|
6
pnpm-lock.yaml
generated
6
pnpm-lock.yaml
generated
|
@ -22,8 +22,10 @@ importers:
|
|||
specifiers:
|
||||
'@logto/schemas': ^1.0.0-beta.10
|
||||
'@silverhand/eslint-config': 1.0.0
|
||||
'@silverhand/essentials': ^1.2.1
|
||||
'@silverhand/ts-config': 1.0.0
|
||||
'@types/decompress': ^4.2.4
|
||||
'@types/fs-extra': ^9.0.13
|
||||
'@types/inquirer': ^8.2.1
|
||||
'@types/node': ^16.0.0
|
||||
'@types/semver': ^7.3.12
|
||||
|
@ -33,6 +35,7 @@ importers:
|
|||
decamelize: ^5.0.0
|
||||
eslint: ^8.21.0
|
||||
find-up: ^5.0.0
|
||||
fs-extra: ^10.1.0
|
||||
got: ^11.8.2
|
||||
hpagent: ^1.0.0
|
||||
inquirer: ^8.2.2
|
||||
|
@ -53,9 +56,11 @@ importers:
|
|||
zod: ^3.18.0
|
||||
dependencies:
|
||||
'@logto/schemas': link:../schemas
|
||||
'@silverhand/essentials': 1.2.1
|
||||
chalk: 4.1.2
|
||||
decamelize: 5.0.1
|
||||
find-up: 5.0.0
|
||||
fs-extra: 10.1.0
|
||||
got: 11.8.3
|
||||
hpagent: 1.0.0
|
||||
inquirer: 8.2.2
|
||||
|
@ -73,6 +78,7 @@ importers:
|
|||
'@silverhand/eslint-config': 1.0.0_swk2g7ygmfleszo5c33j4vooni
|
||||
'@silverhand/ts-config': 1.0.0_typescript@4.7.4
|
||||
'@types/decompress': 4.2.4
|
||||
'@types/fs-extra': 9.0.13
|
||||
'@types/inquirer': 8.2.1
|
||||
'@types/node': 16.11.12
|
||||
'@types/semver': 7.3.12
|
||||
|
|
Loading…
Add table
Reference in a new issue