mirror of
https://github.com/logto-io/logto.git
synced 2024-12-16 20:26:19 -05:00
refactor: remove alteration from core
This commit is contained in:
parent
ff900c3a65
commit
9696060997
20 changed files with 217 additions and 474 deletions
|
@ -11,6 +11,7 @@
|
|||
"prepack": "lerna run --stream prepack",
|
||||
"dev": "lerna run --stream prepack -- --incremental && lerna --ignore=@logto/integration-tests run --parallel dev",
|
||||
"start": "cd packages/core && NODE_ENV=production node . --from-root",
|
||||
"cli": "cd packages/core && logto",
|
||||
"alteration": "cd packages/core && pnpm alteration",
|
||||
"ci:build": "lerna run --stream build",
|
||||
"ci:lint": "lerna run --parallel lint",
|
||||
|
|
1
packages/cli/jest.config.ts
Normal file
1
packages/cli/jest.config.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export { default } from '@silverhand/jest-config';
|
|
@ -6,6 +6,9 @@
|
|||
"homepage": "https://github.com/logto-io/logto#readme",
|
||||
"license": "MPL-2.0",
|
||||
"main": "lib/index.js",
|
||||
"exports": {
|
||||
".": "./lib"
|
||||
},
|
||||
"bin": {
|
||||
"logto": "bin/logto",
|
||||
"lg": "bin/logto"
|
||||
|
@ -20,11 +23,13 @@
|
|||
},
|
||||
"scripts": {
|
||||
"precommit": "lint-staged",
|
||||
"build": "rimraf lib && tsc",
|
||||
"build": "rimraf lib && tsc -p tsconfig.build.json",
|
||||
"start": "node .",
|
||||
"start:dev": "ts-node --files src/index.ts",
|
||||
"lint": "eslint --ext .ts src",
|
||||
"lint:report": "pnpm lint --format json --output-file report.json",
|
||||
"test": "jest",
|
||||
"test:ci": "jest",
|
||||
"prepack": "pnpm build"
|
||||
},
|
||||
"engines": {
|
||||
|
@ -56,15 +61,18 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@silverhand/eslint-config": "1.0.0",
|
||||
"@silverhand/jest-config": "1.0.0",
|
||||
"@silverhand/ts-config": "1.0.0",
|
||||
"@types/decompress": "^4.2.4",
|
||||
"@types/fs-extra": "^9.0.13",
|
||||
"@types/inquirer": "^8.2.1",
|
||||
"@types/jest": "^28.1.6",
|
||||
"@types/node": "^16.0.0",
|
||||
"@types/semver": "^7.3.12",
|
||||
"@types/tar": "^6.1.2",
|
||||
"@types/yargs": "^17.0.13",
|
||||
"eslint": "^8.21.0",
|
||||
"jest": "^28.1.3",
|
||||
"lint-staged": "^13.0.0",
|
||||
"prettier": "^2.7.1",
|
||||
"rimraf": "^3.0.2",
|
||||
|
|
40
packages/cli/src/commands/database/alteration.test.ts
Normal file
40
packages/cli/src/commands/database/alteration.test.ts
Normal file
|
@ -0,0 +1,40 @@
|
|||
import { createMockPool } from 'slonik';
|
||||
|
||||
import * as queries from '../../queries/logto-config';
|
||||
import { QueryType } from '../../test-utilities';
|
||||
import * as functions from './alteration';
|
||||
|
||||
const mockQuery: jest.MockedFunction<QueryType> = jest.fn();
|
||||
|
||||
const pool = createMockPool({
|
||||
query: async (sql, values) => {
|
||||
return mockQuery(sql, values);
|
||||
},
|
||||
});
|
||||
|
||||
const files = Object.freeze([
|
||||
{ filename: '1.0.0-1663923770-a.js', path: '/alterations/1.0.0-1663923770-a.js' },
|
||||
{ filename: '1.0.0-1663923771-b.js', path: '/alterations/1.0.0-1663923771-b.js' },
|
||||
{ filename: '1.0.0-1663923772-c.js', path: '/alterations/1.0.0-1663923772-c.js' },
|
||||
]);
|
||||
|
||||
describe('getUndeployedAlterations()', () => {
|
||||
beforeEach(() => {
|
||||
// `getAlterationFiles()` will ensure the order
|
||||
jest.spyOn(functions, 'getAlterationFiles').mockResolvedValueOnce([...files]);
|
||||
});
|
||||
|
||||
it('returns all files if database timestamp is 0', async () => {
|
||||
jest.spyOn(queries, 'getCurrentDatabaseAlterationTimestamp').mockResolvedValueOnce(0);
|
||||
|
||||
await expect(functions.getUndeployedAlterations(pool)).resolves.toEqual(files);
|
||||
});
|
||||
|
||||
it('returns files whose timestamp is greater then database timestamp', async () => {
|
||||
jest
|
||||
.spyOn(queries, 'getCurrentDatabaseAlterationTimestamp')
|
||||
.mockResolvedValueOnce(1_663_923_770);
|
||||
|
||||
await expect(functions.getUndeployedAlterations(pool)).resolves.toEqual([files[1], files[2]]);
|
||||
});
|
||||
});
|
|
@ -36,7 +36,7 @@ const importAlterationScript = async (filePath: string): Promise<AlterationScrip
|
|||
|
||||
type AlterationFile = { path: string; filename: string };
|
||||
|
||||
const getAlterationFiles = async (): Promise<AlterationFile[]> => {
|
||||
export const getAlterationFiles = async (): Promise<AlterationFile[]> => {
|
||||
const alterationDirectory = getPathInModule('@logto/schemas', 'alterations');
|
||||
// Until we migrate to ESM
|
||||
// eslint-disable-next-line unicorn/prefer-module
|
||||
|
@ -70,7 +70,7 @@ export const getLatestAlterationTimestamp = async () => {
|
|||
return getTimestampFromFileName(lastFile.filename);
|
||||
};
|
||||
|
||||
const getUndeployedAlterations = async (pool: DatabasePool) => {
|
||||
export const getUndeployedAlterations = async (pool: DatabasePool) => {
|
||||
const databaseTimestamp = await getCurrentDatabaseAlterationTimestamp(pool);
|
||||
const files = await getAlterationFiles();
|
||||
|
||||
|
@ -111,7 +111,11 @@ const alteration: CommandModule<unknown, { action: string }> = {
|
|||
type: 'string',
|
||||
demandOption: true,
|
||||
}),
|
||||
handler: async () => {
|
||||
handler: async ({ action }) => {
|
||||
if (action !== 'deploy') {
|
||||
log.error('Unsupported action');
|
||||
}
|
||||
|
||||
const pool = await createPoolFromConfig();
|
||||
const alterations = await getUndeployedAlterations(pool);
|
||||
|
||||
|
|
102
packages/cli/src/queries/logto-config.test.ts
Normal file
102
packages/cli/src/queries/logto-config.test.ts
Normal file
|
@ -0,0 +1,102 @@
|
|||
import { LogtoConfigKey, LogtoConfigs } from '@logto/schemas';
|
||||
import { createMockPool, createMockQueryResult, sql } from 'slonik';
|
||||
|
||||
import { convertToIdentifiers } from '../database';
|
||||
import { expectSqlAssert, QueryType } from '../test-utilities';
|
||||
import { updateDatabaseTimestamp, getCurrentDatabaseAlterationTimestamp } from './logto-config';
|
||||
|
||||
const mockQuery: jest.MockedFunction<QueryType> = jest.fn();
|
||||
|
||||
const pool = createMockPool({
|
||||
query: async (sql, values) => {
|
||||
return mockQuery(sql, values);
|
||||
},
|
||||
});
|
||||
const { table, fields } = convertToIdentifiers(LogtoConfigs);
|
||||
const timestamp = 1_663_923_776;
|
||||
|
||||
describe('getCurrentDatabaseAlterationTimestamp()', () => {
|
||||
it('returns 0 if query failed (table not found)', async () => {
|
||||
mockQuery.mockRejectedValueOnce({ code: '42P01' });
|
||||
|
||||
await expect(getCurrentDatabaseAlterationTimestamp(pool)).resolves.toBe(0);
|
||||
});
|
||||
|
||||
it('returns 0 if the row is not found', async () => {
|
||||
const expectSql = sql`
|
||||
select * from ${table} where ${fields.key}=$1
|
||||
`;
|
||||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([LogtoConfigKey.AlterationState]);
|
||||
|
||||
return createMockQueryResult([]);
|
||||
});
|
||||
|
||||
await expect(getCurrentDatabaseAlterationTimestamp(pool)).resolves.toBe(0);
|
||||
});
|
||||
|
||||
it('returns 0 if the value is in bad format', async () => {
|
||||
const expectSql = sql`
|
||||
select * from ${table} where ${fields.key}=$1
|
||||
`;
|
||||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([LogtoConfigKey.AlterationState]);
|
||||
|
||||
return createMockQueryResult([{ value: 'some_value' }]);
|
||||
});
|
||||
|
||||
await expect(getCurrentDatabaseAlterationTimestamp(pool)).resolves.toBe(0);
|
||||
});
|
||||
|
||||
it('returns the timestamp from database', async () => {
|
||||
const expectSql = sql`
|
||||
select * from ${table} where ${fields.key}=$1
|
||||
`;
|
||||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([LogtoConfigKey.AlterationState]);
|
||||
|
||||
// @ts-expect-error createMockQueryResult doesn't support jsonb
|
||||
return createMockQueryResult([{ value: { timestamp, updatedAt: 'now' } }]);
|
||||
});
|
||||
|
||||
await expect(getCurrentDatabaseAlterationTimestamp(pool)).resolves.toEqual(timestamp);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateDatabaseTimestamp()', () => {
|
||||
const expectSql = sql`
|
||||
insert into ${table} (${fields.key}, ${fields.value})
|
||||
values ($1, $2::jsonb)
|
||||
on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value}
|
||||
`;
|
||||
const updatedAt = '2022-09-21T06:32:46.583Z';
|
||||
|
||||
beforeAll(() => {
|
||||
jest.useFakeTimers();
|
||||
jest.setSystemTime(new Date(updatedAt));
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('sends upsert sql with timestamp and updatedAt', async () => {
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([
|
||||
LogtoConfigKey.AlterationState,
|
||||
JSON.stringify({ timestamp, updatedAt }),
|
||||
]);
|
||||
|
||||
return createMockQueryResult([]);
|
||||
});
|
||||
|
||||
await updateDatabaseTimestamp(pool, timestamp);
|
||||
});
|
||||
});
|
26
packages/cli/src/test-utilities.ts
Normal file
26
packages/cli/src/test-utilities.ts
Normal file
|
@ -0,0 +1,26 @@
|
|||
// Copied from core
|
||||
|
||||
import { QueryResult, QueryResultRow } from 'slonik';
|
||||
import { PrimitiveValueExpression } from 'slonik/dist/src/types.d';
|
||||
|
||||
export type QueryType = (
|
||||
sql: string,
|
||||
values: readonly PrimitiveValueExpression[]
|
||||
) => Promise<QueryResult<QueryResultRow>>;
|
||||
|
||||
/**
|
||||
* Slonik Query Mock Utils
|
||||
**/
|
||||
export const expectSqlAssert = (sql: string, expectSql: string) => {
|
||||
expect(
|
||||
sql
|
||||
.split('\n')
|
||||
.map((row) => row.trim())
|
||||
.filter(Boolean)
|
||||
).toEqual(
|
||||
expectSql
|
||||
.split('\n')
|
||||
.map((row) => row.trim())
|
||||
.filter(Boolean)
|
||||
);
|
||||
};
|
4
packages/cli/tsconfig.build.json
Normal file
4
packages/cli/tsconfig.build.json
Normal file
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"extends": "./tsconfig",
|
||||
"include": ["src"],
|
||||
}
|
|
@ -7,7 +7,8 @@
|
|||
"target": "es2022"
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
"src",
|
||||
"jest.config.ts"
|
||||
],
|
||||
"exclude": ["**/alteration-scripts"]
|
||||
}
|
||||
|
|
6
packages/cli/tsconfig.test.json
Normal file
6
packages/cli/tsconfig.test.json
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"extends": "./tsconfig",
|
||||
"compilerOptions": {
|
||||
"allowJs": true
|
||||
}
|
||||
}
|
|
@ -16,7 +16,7 @@
|
|||
"start": "NODE_ENV=production node build/index.js",
|
||||
"add-connector": "node build/cli/add-connector.js",
|
||||
"add-official-connectors": "node build/cli/add-official-connectors.js",
|
||||
"alteration": "node build/cli/alteration.js",
|
||||
"alteration": "logto db alt",
|
||||
"cli": "logto",
|
||||
"test": "jest",
|
||||
"test:ci": "jest --coverage --silent",
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
export const logtoConfigsTableFilePath = 'node_modules/@logto/schemas/tables/logto_configs.sql';
|
||||
export const alterationFilesDirectorySource = 'node_modules/@logto/schemas/alterations';
|
||||
export const alterationFilesDirectory = 'alterations/';
|
|
@ -1,192 +0,0 @@
|
|||
import { LogtoConfigKey, LogtoConfigs } from '@logto/schemas';
|
||||
import { createMockPool, createMockQueryResult, sql } from 'slonik';
|
||||
|
||||
import { convertToIdentifiers } from '@/database/utils';
|
||||
import { QueryType, expectSqlAssert } from '@/utils/test-utils';
|
||||
|
||||
import * as functions from '.';
|
||||
|
||||
const mockQuery: jest.MockedFunction<QueryType> = jest.fn();
|
||||
const {
|
||||
createLogtoConfigsTable,
|
||||
isLogtoConfigsTableExists,
|
||||
updateDatabaseTimestamp,
|
||||
getCurrentDatabaseTimestamp,
|
||||
getUndeployedAlterations,
|
||||
} = functions;
|
||||
const pool = createMockPool({
|
||||
query: async (sql, values) => {
|
||||
return mockQuery(sql, values);
|
||||
},
|
||||
});
|
||||
const { table, fields } = convertToIdentifiers(LogtoConfigs);
|
||||
const timestamp = 1_663_923_776;
|
||||
|
||||
describe('isLogtoConfigsTableExists()', () => {
|
||||
it('generates "select exists" sql and query for result', async () => {
|
||||
const expectSql = sql`
|
||||
select exists (
|
||||
select from
|
||||
pg_tables
|
||||
where
|
||||
tablename = $1
|
||||
);
|
||||
`;
|
||||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([LogtoConfigs.table]);
|
||||
|
||||
return createMockQueryResult([{ exists: true }]);
|
||||
});
|
||||
|
||||
await expect(isLogtoConfigsTableExists(pool)).resolves.toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCurrentDatabaseTimestamp()', () => {
|
||||
it('returns null if query failed (table not found)', async () => {
|
||||
mockQuery.mockRejectedValueOnce(new Error('table not found'));
|
||||
|
||||
await expect(getCurrentDatabaseTimestamp(pool)).resolves.toBeNull();
|
||||
});
|
||||
|
||||
it('returns null if the row is not found', async () => {
|
||||
const expectSql = sql`
|
||||
select * from ${table} where ${fields.key}=$1
|
||||
`;
|
||||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([LogtoConfigKey.AlterationState]);
|
||||
|
||||
return createMockQueryResult([]);
|
||||
});
|
||||
|
||||
await expect(getCurrentDatabaseTimestamp(pool)).resolves.toBeNull();
|
||||
});
|
||||
|
||||
it('returns null if the value is in bad format', async () => {
|
||||
const expectSql = sql`
|
||||
select * from ${table} where ${fields.key}=$1
|
||||
`;
|
||||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([LogtoConfigKey.AlterationState]);
|
||||
|
||||
return createMockQueryResult([{ value: 'some_value' }]);
|
||||
});
|
||||
|
||||
await expect(getCurrentDatabaseTimestamp(pool)).resolves.toBeNull();
|
||||
});
|
||||
|
||||
it('returns the timestamp from database', async () => {
|
||||
const expectSql = sql`
|
||||
select * from ${table} where ${fields.key}=$1
|
||||
`;
|
||||
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([LogtoConfigKey.AlterationState]);
|
||||
|
||||
// @ts-expect-error createMockQueryResult doesn't support jsonb
|
||||
return createMockQueryResult([{ value: { timestamp, updatedAt: 'now' } }]);
|
||||
});
|
||||
|
||||
await expect(getCurrentDatabaseTimestamp(pool)).resolves.toEqual(timestamp);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createLogtoConfigsTable()', () => {
|
||||
it('sends sql to create target table', async () => {
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expect(sql).toContain(LogtoConfigs.table);
|
||||
expect(sql).toContain('create table');
|
||||
|
||||
return createMockQueryResult([]);
|
||||
});
|
||||
|
||||
await createLogtoConfigsTable(pool);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateDatabaseTimestamp()', () => {
|
||||
const expectSql = sql`
|
||||
insert into ${table} (${fields.key}, ${fields.value})
|
||||
values ($1, $2::jsonb)
|
||||
on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value}
|
||||
`;
|
||||
const updatedAt = '2022-09-21T06:32:46.583Z';
|
||||
|
||||
beforeAll(() => {
|
||||
jest.useFakeTimers();
|
||||
jest.setSystemTime(new Date(updatedAt));
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('calls createLogtoConfigsTable() if table does not exist', async () => {
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
|
||||
return createMockQueryResult([]);
|
||||
});
|
||||
|
||||
const mockCreateLogtoConfigsTable = jest
|
||||
.spyOn(functions, 'createLogtoConfigsTable')
|
||||
.mockImplementationOnce(jest.fn());
|
||||
jest.spyOn(functions, 'isLogtoConfigsTableExists').mockResolvedValueOnce(false);
|
||||
|
||||
await updateDatabaseTimestamp(pool, timestamp);
|
||||
expect(mockCreateLogtoConfigsTable).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('sends upsert sql with timestamp and updatedAt', async () => {
|
||||
mockQuery.mockImplementationOnce(async (sql, values) => {
|
||||
expectSqlAssert(sql, expectSql.sql);
|
||||
expect(values).toEqual([
|
||||
LogtoConfigKey.AlterationState,
|
||||
JSON.stringify({ timestamp, updatedAt }),
|
||||
]);
|
||||
|
||||
return createMockQueryResult([]);
|
||||
});
|
||||
jest.spyOn(functions, 'isLogtoConfigsTableExists').mockResolvedValueOnce(true);
|
||||
|
||||
await updateDatabaseTimestamp(pool, timestamp);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getUndeployedAlterations()', () => {
|
||||
beforeEach(() => {
|
||||
jest
|
||||
.spyOn(functions, 'getAlterationFiles')
|
||||
.mockResolvedValueOnce([
|
||||
'1.0.0-1663923770-a.js',
|
||||
'1.0.0-1663923772-c.js',
|
||||
'1.0.0-1663923771-b.js',
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns all files with right order if database timestamp is null', async () => {
|
||||
jest.spyOn(functions, 'getCurrentDatabaseTimestamp').mockResolvedValueOnce(null);
|
||||
|
||||
await expect(getUndeployedAlterations(pool)).resolves.toEqual([
|
||||
'1.0.0-1663923770-a.js',
|
||||
'1.0.0-1663923771-b.js',
|
||||
'1.0.0-1663923772-c.js',
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns files whose timestamp is greater then database timstamp', async () => {
|
||||
jest.spyOn(functions, 'getCurrentDatabaseTimestamp').mockResolvedValueOnce(1_663_923_770);
|
||||
|
||||
await expect(getUndeployedAlterations(pool)).resolves.toEqual([
|
||||
'1.0.0-1663923771-b.js',
|
||||
'1.0.0-1663923772-c.js',
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -1,163 +0,0 @@
|
|||
import { existsSync } from 'fs';
|
||||
import { readdir, readFile } from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import {
|
||||
LogtoConfig,
|
||||
LogtoConfigs,
|
||||
AlterationState,
|
||||
alterationStateGuard,
|
||||
LogtoConfigKey,
|
||||
} from '@logto/schemas';
|
||||
import { AlterationScript } from '@logto/schemas/lib/types/alteration';
|
||||
import { conditionalString } from '@silverhand/essentials';
|
||||
import chalk from 'chalk';
|
||||
import { copy, remove } from 'fs-extra';
|
||||
import { DatabasePool, sql } from 'slonik';
|
||||
import { raw } from 'slonik-sql-tag-raw';
|
||||
|
||||
import { convertToIdentifiers } from '@/database/utils';
|
||||
|
||||
import {
|
||||
logtoConfigsTableFilePath,
|
||||
alterationFilesDirectory,
|
||||
alterationFilesDirectorySource,
|
||||
} from './constants';
|
||||
import { getTimestampFromFileName, alterationFileNameRegex } from './utils';
|
||||
|
||||
const { table, fields } = convertToIdentifiers(LogtoConfigs);
|
||||
|
||||
export const isLogtoConfigsTableExists = async (pool: DatabasePool) => {
|
||||
const { exists } = await pool.one<{ exists: boolean }>(sql`
|
||||
select exists (
|
||||
select from
|
||||
pg_tables
|
||||
where
|
||||
tablename = ${LogtoConfigs.table}
|
||||
);
|
||||
`);
|
||||
|
||||
return exists;
|
||||
};
|
||||
|
||||
export const getCurrentDatabaseTimestamp = async (pool: DatabasePool) => {
|
||||
try {
|
||||
const query = await pool.maybeOne<LogtoConfig>(
|
||||
sql`select * from ${table} where ${fields.key}=${LogtoConfigKey.AlterationState}`
|
||||
);
|
||||
const { timestamp } = alterationStateGuard.parse(query?.value);
|
||||
|
||||
return timestamp;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export const createLogtoConfigsTable = async (pool: DatabasePool) => {
|
||||
const tableQuery = await readFile(logtoConfigsTableFilePath, 'utf8');
|
||||
await pool.query(sql`${raw(tableQuery)}`);
|
||||
};
|
||||
|
||||
export const updateDatabaseTimestamp = async (pool: DatabasePool, timestamp?: number) => {
|
||||
if (!(await isLogtoConfigsTableExists(pool))) {
|
||||
await createLogtoConfigsTable(pool);
|
||||
}
|
||||
|
||||
const value: AlterationState = {
|
||||
timestamp: timestamp ?? (await getLatestAlterationTimestamp()),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await pool.query(
|
||||
sql`
|
||||
insert into ${table} (${fields.key}, ${fields.value})
|
||||
values (${LogtoConfigKey.AlterationState}, ${sql.jsonb(value)})
|
||||
on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value}
|
||||
`
|
||||
);
|
||||
};
|
||||
|
||||
export const getLatestAlterationTimestamp = async () => {
|
||||
const files = await getAlterationFiles();
|
||||
|
||||
const latestFile = files[files.length - 1];
|
||||
|
||||
if (!latestFile) {
|
||||
throw new Error('No alteration files found.');
|
||||
}
|
||||
|
||||
return getTimestampFromFileName(latestFile);
|
||||
};
|
||||
|
||||
export const getAlterationFiles = async () => {
|
||||
if (!existsSync(alterationFilesDirectorySource)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
await remove(alterationFilesDirectory);
|
||||
await copy(alterationFilesDirectorySource, alterationFilesDirectory);
|
||||
|
||||
const directory = await readdir(alterationFilesDirectory);
|
||||
const files = directory.filter((file) => alterationFileNameRegex.test(file));
|
||||
|
||||
return files
|
||||
.slice()
|
||||
.sort((file1, file2) => getTimestampFromFileName(file1) - getTimestampFromFileName(file2));
|
||||
};
|
||||
|
||||
export const getUndeployedAlterations = async (pool: DatabasePool) => {
|
||||
const databaseTimestamp = await getCurrentDatabaseTimestamp(pool);
|
||||
const files = await getAlterationFiles();
|
||||
|
||||
return files
|
||||
.filter((file) => !databaseTimestamp || getTimestampFromFileName(file) > databaseTimestamp)
|
||||
.slice()
|
||||
.sort((file1, file2) => getTimestampFromFileName(file1) - getTimestampFromFileName(file2));
|
||||
};
|
||||
|
||||
const importAlteration = async (file: string): Promise<AlterationScript> => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
const module = await import(path.join(process.cwd(), alterationFilesDirectory, file));
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
return module.default as AlterationScript;
|
||||
};
|
||||
|
||||
const deployAlteration = async (pool: DatabasePool, file: string) => {
|
||||
const { up } = await importAlteration(file);
|
||||
|
||||
try {
|
||||
await pool.transaction(async (connect) => {
|
||||
await up(connect);
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
console.log(`${chalk.red('[alteration]')} run ${file} failed: ${error.message}.`);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
await updateDatabaseTimestamp(pool, getTimestampFromFileName(file));
|
||||
console.log(`${chalk.blue('[alteration]')} run ${file} succeeded.`);
|
||||
};
|
||||
|
||||
export const deployAlterations = async (pool: DatabasePool) => {
|
||||
const alterations = await getUndeployedAlterations(pool);
|
||||
|
||||
console.log(
|
||||
`${chalk.blue('[alteration]')} found ${alterations.length} alteration${conditionalString(
|
||||
alterations.length > 1 && 's'
|
||||
)}`
|
||||
);
|
||||
|
||||
// The await inside the loop is intended, alterations should run in order
|
||||
for (const alteration of alterations) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await deployAlteration(pool, alteration);
|
||||
}
|
||||
|
||||
console.log(`${chalk.blue('[alteration]')} ✓ done`);
|
||||
};
|
|
@ -1,15 +0,0 @@
|
|||
import { getTimestampFromFileName } from './utils';
|
||||
|
||||
describe('getTimestampFromFileName()', () => {
|
||||
it('should get for 1.0.0-1663923211.js', () => {
|
||||
expect(getTimestampFromFileName('1.0.0-1663923211.js')).toEqual(1_663_923_211);
|
||||
});
|
||||
|
||||
it('should get for 1.0.0-1663923211-user-table.js', () => {
|
||||
expect(getTimestampFromFileName('1.0.0-1663923211-user-table.js')).toEqual(1_663_923_211);
|
||||
});
|
||||
|
||||
it('should throw for 166392321.js', () => {
|
||||
expect(() => getTimestampFromFileName('166392321.js')).toThrowError();
|
||||
});
|
||||
});
|
|
@ -1,11 +0,0 @@
|
|||
export const alterationFileNameRegex = /-(\d{10,11})-?.*\.js$/;
|
||||
|
||||
export const getTimestampFromFileName = (fileName: string) => {
|
||||
const match = alterationFileNameRegex.exec(fileName);
|
||||
|
||||
if (!match?.[1]) {
|
||||
throw new Error(`Can not get timestamp: ${fileName}`);
|
||||
}
|
||||
|
||||
return Number(match[1]);
|
||||
};
|
|
@ -1,23 +0,0 @@
|
|||
import 'module-alias/register';
|
||||
import { assertEnv } from '@silverhand/essentials';
|
||||
import { createPool } from 'slonik';
|
||||
|
||||
import { deployAlterations } from '@/alteration';
|
||||
import { configDotEnv } from '@/env-set/dot-env';
|
||||
|
||||
configDotEnv();
|
||||
|
||||
const deploy = async () => {
|
||||
const databaseUrl = assertEnv('DB_URL');
|
||||
const pool = await createPool(databaseUrl);
|
||||
await deployAlterations(pool);
|
||||
await pool.end();
|
||||
};
|
||||
|
||||
const command = process.argv[2];
|
||||
|
||||
if (command !== 'deploy') {
|
||||
throw new Error('Unsupported command.');
|
||||
}
|
||||
|
||||
void deploy();
|
|
@ -1,10 +1,6 @@
|
|||
import inquirer from 'inquirer';
|
||||
import { getUndeployedAlterations } from '@logto/cli/lib/commands/database/alteration';
|
||||
import { DatabasePool } from 'slonik';
|
||||
|
||||
import { getUndeployedAlterations, deployAlterations } from '@/alteration';
|
||||
|
||||
import { allYes } from './parameters';
|
||||
|
||||
export const checkAlterationState = async (pool: DatabasePool) => {
|
||||
const alterations = await getUndeployedAlterations(pool);
|
||||
|
||||
|
@ -12,23 +8,7 @@ export const checkAlterationState = async (pool: DatabasePool) => {
|
|||
return;
|
||||
}
|
||||
|
||||
const error = new Error(
|
||||
throw new Error(
|
||||
`Found undeployed database alterations, you must deploy them first by "npm run alteration deploy" command, reference: https://docs.logto.io/docs/recipes/deployment/#database-alteration`
|
||||
);
|
||||
|
||||
if (allYes) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const deploy = await inquirer.prompt({
|
||||
type: 'confirm',
|
||||
name: 'value',
|
||||
message: `Found undeployed alterations, would you like to deploy now?`,
|
||||
});
|
||||
|
||||
if (!deploy.value) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
await deployAlterations(pool);
|
||||
};
|
||||
|
|
|
@ -7,6 +7,6 @@
|
|||
"include": [
|
||||
"src",
|
||||
"alterations",
|
||||
"jest.config.ts", "alterations",
|
||||
"jest.config.ts"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -23,10 +23,12 @@ importers:
|
|||
'@logto/schemas': ^1.0.0-beta.10
|
||||
'@silverhand/eslint-config': 1.0.0
|
||||
'@silverhand/essentials': ^1.2.1
|
||||
'@silverhand/jest-config': 1.0.0
|
||||
'@silverhand/ts-config': 1.0.0
|
||||
'@types/decompress': ^4.2.4
|
||||
'@types/fs-extra': ^9.0.13
|
||||
'@types/inquirer': ^8.2.1
|
||||
'@types/jest': ^28.1.6
|
||||
'@types/node': ^16.0.0
|
||||
'@types/semver': ^7.3.12
|
||||
'@types/tar': ^6.1.2
|
||||
|
@ -39,6 +41,7 @@ importers:
|
|||
got: ^11.8.2
|
||||
hpagent: ^1.0.0
|
||||
inquirer: ^8.2.2
|
||||
jest: ^28.1.3
|
||||
lint-staged: ^13.0.0
|
||||
nanoid: ^3.3.4
|
||||
ora: ^5.0.0
|
||||
|
@ -76,15 +79,18 @@ importers:
|
|||
zod: 3.18.0
|
||||
devDependencies:
|
||||
'@silverhand/eslint-config': 1.0.0_swk2g7ygmfleszo5c33j4vooni
|
||||
'@silverhand/jest-config': 1.0.0_bi2kohzqnxavgozw3csgny5hju
|
||||
'@silverhand/ts-config': 1.0.0_typescript@4.7.4
|
||||
'@types/decompress': 4.2.4
|
||||
'@types/fs-extra': 9.0.13
|
||||
'@types/inquirer': 8.2.1
|
||||
'@types/jest': 28.1.6
|
||||
'@types/node': 16.11.12
|
||||
'@types/semver': 7.3.12
|
||||
'@types/tar': 6.1.2
|
||||
'@types/yargs': 17.0.13
|
||||
eslint: 8.21.0
|
||||
jest: 28.1.3_k5ytkvaprncdyzidqqws5bqksq
|
||||
lint-staged: 13.0.0
|
||||
prettier: 2.7.1
|
||||
rimraf: 3.0.2
|
||||
|
@ -696,7 +702,7 @@ packages:
|
|||
resolution: {integrity: sha512-hoyByceqwKirw7w3Z7gnIIZC3Wx3J484Y3L/cMpXFbr7d9ZQj2mODrirNzcJa+SM3UlpWXYvKV4RlRpFXlWgXg==}
|
||||
engines: {node: '>=6.0.0'}
|
||||
dependencies:
|
||||
'@jridgewell/trace-mapping': 0.3.4
|
||||
'@jridgewell/trace-mapping': 0.3.15
|
||||
dev: true
|
||||
|
||||
/@babel/code-frame/7.16.7:
|
||||
|
@ -1557,7 +1563,7 @@ packages:
|
|||
'@jest/test-result': 28.1.3
|
||||
'@jest/transform': 28.1.3
|
||||
'@jest/types': 28.1.3
|
||||
'@jridgewell/trace-mapping': 0.3.14
|
||||
'@jridgewell/trace-mapping': 0.3.15
|
||||
'@types/node': 17.0.23
|
||||
chalk: 4.1.2
|
||||
collect-v8-coverage: 1.0.1
|
||||
|
@ -1592,7 +1598,7 @@ packages:
|
|||
resolution: {integrity: sha512-cV8Lx3BeStJb8ipPHnqVw/IM2VCMWO3crWZzYodSIkxXnRcXJipCdx1JCK0K5MsJJouZQTH73mzf4vgxRaH9ww==}
|
||||
engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0}
|
||||
dependencies:
|
||||
'@jridgewell/trace-mapping': 0.3.14
|
||||
'@jridgewell/trace-mapping': 0.3.15
|
||||
callsites: 3.1.0
|
||||
graceful-fs: 4.2.9
|
||||
dev: true
|
||||
|
@ -1623,7 +1629,7 @@ packages:
|
|||
dependencies:
|
||||
'@babel/core': 7.17.9
|
||||
'@jest/types': 28.1.3
|
||||
'@jridgewell/trace-mapping': 0.3.14
|
||||
'@jridgewell/trace-mapping': 0.3.15
|
||||
babel-plugin-istanbul: 6.1.1
|
||||
chalk: 4.1.2
|
||||
convert-source-map: 1.8.0
|
||||
|
@ -1659,7 +1665,7 @@ packages:
|
|||
'@types/istanbul-lib-coverage': 2.0.3
|
||||
'@types/istanbul-reports': 3.0.1
|
||||
'@types/node': 17.0.23
|
||||
'@types/yargs': 17.0.10
|
||||
'@types/yargs': 17.0.13
|
||||
chalk: 4.1.2
|
||||
dev: true
|
||||
|
||||
|
@ -1672,11 +1678,6 @@ packages:
|
|||
'@jridgewell/trace-mapping': 0.3.15
|
||||
dev: true
|
||||
|
||||
/@jridgewell/resolve-uri/3.0.5:
|
||||
resolution: {integrity: sha512-VPeQ7+wH0itvQxnG+lIzWgkysKIr3L9sslimFW55rHMdGu/qCQ5z5h9zq4gI8uBtqkpHhsF4Z/OwExufUCThew==}
|
||||
engines: {node: '>=6.0.0'}
|
||||
dev: true
|
||||
|
||||
/@jridgewell/resolve-uri/3.1.0:
|
||||
resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==}
|
||||
engines: {node: '>=6.0.0'}
|
||||
|
@ -1694,21 +1695,10 @@ packages:
|
|||
'@jridgewell/trace-mapping': 0.3.15
|
||||
dev: true
|
||||
|
||||
/@jridgewell/sourcemap-codec/1.4.11:
|
||||
resolution: {integrity: sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg==}
|
||||
dev: true
|
||||
|
||||
/@jridgewell/sourcemap-codec/1.4.14:
|
||||
resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==}
|
||||
dev: true
|
||||
|
||||
/@jridgewell/trace-mapping/0.3.14:
|
||||
resolution: {integrity: sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ==}
|
||||
dependencies:
|
||||
'@jridgewell/resolve-uri': 3.0.5
|
||||
'@jridgewell/sourcemap-codec': 1.4.11
|
||||
dev: true
|
||||
|
||||
/@jridgewell/trace-mapping/0.3.15:
|
||||
resolution: {integrity: sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==}
|
||||
dependencies:
|
||||
|
@ -1716,13 +1706,6 @@ packages:
|
|||
'@jridgewell/sourcemap-codec': 1.4.14
|
||||
dev: true
|
||||
|
||||
/@jridgewell/trace-mapping/0.3.4:
|
||||
resolution: {integrity: sha512-vFv9ttIedivx0ux3QSjhgtCVjPZd5l46ZOMDSCwnH1yUO2e964gO8LZGyv2QkqcgR6TnBU1v+1IFqmeoG+0UJQ==}
|
||||
dependencies:
|
||||
'@jridgewell/resolve-uri': 3.0.5
|
||||
'@jridgewell/sourcemap-codec': 1.4.11
|
||||
dev: true
|
||||
|
||||
/@jridgewell/trace-mapping/0.3.9:
|
||||
resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==}
|
||||
dependencies:
|
||||
|
@ -4740,12 +4723,6 @@ packages:
|
|||
'@types/yargs-parser': 20.2.1
|
||||
dev: true
|
||||
|
||||
/@types/yargs/17.0.10:
|
||||
resolution: {integrity: sha512-gmEaFwpj/7f/ROdtIlci1R1VYU1J4j95m8T+Tj3iBgiBFKg1foE/PSl93bBd5T9LDXNPo8UlNN6W0qwD8O5OaA==}
|
||||
dependencies:
|
||||
'@types/yargs-parser': 20.2.1
|
||||
dev: true
|
||||
|
||||
/@types/yargs/17.0.13:
|
||||
resolution: {integrity: sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg==}
|
||||
dependencies:
|
||||
|
@ -5826,7 +5803,7 @@ packages:
|
|||
dev: false
|
||||
|
||||
/co/4.6.0:
|
||||
resolution: {integrity: sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=}
|
||||
resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==}
|
||||
engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'}
|
||||
|
||||
/code-point-at/1.1.0:
|
||||
|
@ -6443,7 +6420,7 @@ packages:
|
|||
mimic-response: 3.1.0
|
||||
|
||||
/dedent/0.7.0:
|
||||
resolution: {integrity: sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw=}
|
||||
resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==}
|
||||
dev: true
|
||||
|
||||
/deep-equal/1.0.1:
|
||||
|
@ -9162,7 +9139,7 @@ packages:
|
|||
jest-util: 28.1.3
|
||||
jest-validate: 28.1.3
|
||||
prompts: 2.4.2
|
||||
yargs: 17.4.1
|
||||
yargs: 17.6.0
|
||||
transitivePeerDependencies:
|
||||
- '@types/node'
|
||||
- supports-color
|
||||
|
@ -15395,7 +15372,7 @@ packages:
|
|||
resolution: {integrity: sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==}
|
||||
engines: {node: '>=10.12.0'}
|
||||
dependencies:
|
||||
'@jridgewell/trace-mapping': 0.3.14
|
||||
'@jridgewell/trace-mapping': 0.3.15
|
||||
'@types/istanbul-lib-coverage': 2.0.3
|
||||
convert-source-map: 1.8.0
|
||||
dev: true
|
||||
|
|
Loading…
Reference in a new issue