diff --git a/package.json b/package.json index bf98e7676..1d0cd2a61 100644 --- a/package.json +++ b/package.json @@ -11,6 +11,7 @@ "prepack": "lerna run --stream prepack", "dev": "lerna run --stream prepack -- --incremental && lerna --ignore=@logto/integration-test run --parallel dev", "start": "cd packages/core && NODE_ENV=production node . --from-root", + "migration-deploy": "cd packages/core && pnpm migration-deploy", "ci:build": "lerna run --stream build", "ci:lint": "lerna run --parallel lint", "ci:stylelint": "lerna run --parallel stylelint", diff --git a/packages/core/src/env-set/check-migration-state.ts b/packages/core/src/env-set/check-migration-state.ts new file mode 100644 index 000000000..6ce0c13cb --- /dev/null +++ b/packages/core/src/env-set/check-migration-state.ts @@ -0,0 +1,34 @@ +import inquirer from 'inquirer'; +import { DatabasePool } from 'slonik'; + +import { getUndeployedMigrations, runMigrations } from '@/migration'; + +import { allYes } from './parameters'; + +export const checkMigrationState = async (pool: DatabasePool) => { + const migrations = await getUndeployedMigrations(pool); + + if (migrations.length === 0) { + return; + } + + const error = new Error( + `Found undeployed migrations, you must deploy them first by "pnpm migration-deploy" command, reference: https://docs.logto.io/docs/recipes/deployment/#migration` + ); + + if (allYes) { + throw error; + } + + const deploy = await inquirer.prompt({ + type: 'confirm', + name: 'value', + message: `Found undeployed migrations, would you like to deploy now?`, + }); + + if (!deploy.value) { + throw error; + } + + await runMigrations(pool); +}; diff --git a/packages/core/src/env-set/index.ts b/packages/core/src/env-set/index.ts index 2f7487375..3383417e5 100644 --- a/packages/core/src/env-set/index.ts +++ b/packages/core/src/env-set/index.ts @@ -6,6 +6,7 @@ import { DatabasePool } from 'slonik'; import { appendPath } from '@/utils/url'; import { addConnectors } from './add-connectors'; +import { checkMigrationState } from './check-migration-state'; import createPoolByEnv from './create-pool-by-env'; import loadOidcValues from './oidc'; import { isTrue } from './parameters'; @@ -80,6 +81,10 @@ function createEnvSet() { values = await loadEnvValues(); pool = await createPoolByEnv(values.isTest); await addConnectors(values.connectorDirectory); + + if (pool) { + await checkMigrationState(pool); + } }, }; } diff --git a/packages/core/src/migration/index.test.ts b/packages/core/src/migration/index.test.ts index 7e773b1a6..f1d87a33d 100644 --- a/packages/core/src/migration/index.test.ts +++ b/packages/core/src/migration/index.test.ts @@ -22,17 +22,6 @@ const pool = createMockPool({ }, }); const { table, fields } = convertToIdentifiers(LogtoConfigs); -const existsSync = jest.fn(); -const readdir = jest.fn(); - -jest.mock('fs', () => ({ - existsSync: () => existsSync(), -})); - -jest.mock('fs/promises', () => ({ - ...jest.requireActual('fs/promises'), - readdir: async () => readdir(), -})); describe('isLogtoConfigsTableExists()', () => { it('generates "select exists" sql and query for result', async () => { @@ -171,20 +160,6 @@ describe('updateDatabaseVersion()', () => { }); }); -describe('getMigrationFiles()', () => { - it('returns [] if directory does not exist', async () => { - existsSync.mockReturnValueOnce(false); - await expect(getMigrationFiles()).resolves.toEqual([]); - }); - - it('returns files without "next"', async () => { - existsSync.mockReturnValueOnce(true); - readdir.mockResolvedValueOnce(['next.js', '1.0.0.js', '1.0.2.js', '1.0.1.js']); - - await expect(getMigrationFiles()).resolves.toEqual(['1.0.0.js', '1.0.2.js', '1.0.1.js']); - }); -}); - describe('getUndeployedMigrations()', () => { beforeEach(() => { jest