0
Fork 0
mirror of https://github.com/logto-io/logto.git synced 2025-01-20 21:32:31 -05:00

feat(core): update migration state after db init

This commit is contained in:
wangsijie 2022-09-26 12:02:52 +08:00
parent bb4bfd3d41
commit f904b88f56
No known key found for this signature in database
GPG key ID: C72642FE24F7D42B
3 changed files with 22 additions and 5 deletions

View file

@ -8,6 +8,7 @@ import { createPool, parseDsn, sql, stringifyDsn } from 'slonik';
import { createInterceptors } from 'slonik-interceptor-preset'; import { createInterceptors } from 'slonik-interceptor-preset';
import { raw } from 'slonik-sql-tag-raw'; import { raw } from 'slonik-sql-tag-raw';
import { updateDatabaseTimestamp } from '@/migration';
import { buildApplicationSecret } from '@/utils/id'; import { buildApplicationSecret } from '@/utils/id';
import { convertToPrimitiveOrSql } from './utils'; import { convertToPrimitiveOrSql } from './utils';
@ -79,6 +80,9 @@ export const createDatabaseCli = async (dsn: string) => {
await pool.query(sql`${raw(query)}`); await pool.query(sql`${raw(query)}`);
console.log(`${chalk.blue('[create-tables]')} Run ${file} succeeded.`); console.log(`${chalk.blue('[create-tables]')} Run ${file} succeeded.`);
} }
await updateDatabaseTimestamp(pool);
console.log(`${chalk.blue('[create-tables]')} Update migration state succeeded.`);
}; };
const seedTables = async () => { const seedTables = async () => {

View file

@ -82,8 +82,7 @@ function createEnvSet() {
pool = await createPoolByEnv(values.isTest); pool = await createPoolByEnv(values.isTest);
await addConnectors(values.connectorDirectory); await addConnectors(values.connectorDirectory);
// FIXME: @sijie temparaly disable migration for integration test if (pool) {
if (pool && !values.isIntegrationTest) {
await checkMigrationState(pool); await checkMigrationState(pool);
} }
}, },

View file

@ -51,13 +51,13 @@ export const createLogtoConfigsTable = async (pool: DatabasePool) => {
await pool.query(sql`${raw(tableQuery)}`); await pool.query(sql`${raw(tableQuery)}`);
}; };
export const updateDatabaseTimestamp = async (pool: DatabasePool, timestamp: number) => { export const updateDatabaseTimestamp = async (pool: DatabasePool, timestamp?: number) => {
if (!(await isLogtoConfigsTableExists(pool))) { if (!(await isLogtoConfigsTableExists(pool))) {
await createLogtoConfigsTable(pool); await createLogtoConfigsTable(pool);
} }
const value: MigrationState = { const value: MigrationState = {
timestamp, timestamp: timestamp ?? (await getLatestMigrationTiemstamp()),
updatedAt: new Date().toISOString(), updatedAt: new Date().toISOString(),
}; };
@ -70,6 +70,18 @@ export const updateDatabaseTimestamp = async (pool: DatabasePool, timestamp: num
); );
}; };
export const getLatestMigrationTiemstamp = async () => {
const files = await getMigrationFiles();
const latestFile = files[files.length - 1];
if (!latestFile) {
throw new Error('No migration files found.');
}
return getTimestampFromFileName(latestFile);
};
export const getMigrationFiles = async () => { export const getMigrationFiles = async () => {
if (!existsSync(migrationFilesDirectory)) { if (!existsSync(migrationFilesDirectory)) {
return []; return [];
@ -78,7 +90,9 @@ export const getMigrationFiles = async () => {
const directory = await readdir(migrationFilesDirectory); const directory = await readdir(migrationFilesDirectory);
const files = directory.filter((file) => migrationFileNameRegex.test(file)); const files = directory.filter((file) => migrationFileNameRegex.test(file));
return files; return files
.slice()
.sort((file1, file2) => getTimestampFromFileName(file1) - getTimestampFromFileName(file2));
}; };
export const getUndeployedMigrations = async (pool: DatabasePool) => { export const getUndeployedMigrations = async (pool: DatabasePool) => {