mirror of
https://github.com/logto-io/logto.git
synced 2024-12-16 20:26:19 -05:00
refactor: remove database seed from core
This commit is contained in:
parent
c324e29df3
commit
a3dc967442
10 changed files with 183 additions and 279 deletions
|
@ -93,7 +93,7 @@ const deployAlteration = async (
|
|||
|
||||
await pool.end();
|
||||
log.error(
|
||||
`Error ocurred during running alteration ${chalk.green(filename)}.\n\n` +
|
||||
`Error ocurred during running alteration ${chalk.blue(filename)}.\n\n` +
|
||||
" This alteration didn't change anything since it was in a transaction.\n" +
|
||||
' Try to fix the error and deploy again.'
|
||||
);
|
||||
|
|
|
@ -2,63 +2,22 @@ import { readdir, readFile } from 'fs/promises';
|
|||
import path from 'path';
|
||||
|
||||
import { seeds } from '@logto/schemas';
|
||||
import {
|
||||
createPool,
|
||||
DatabasePool,
|
||||
DatabaseTransactionConnection,
|
||||
parseDsn,
|
||||
sql,
|
||||
stringifyDsn,
|
||||
} from 'slonik';
|
||||
import chalk from 'chalk';
|
||||
import ora from 'ora';
|
||||
import { DatabasePool, DatabaseTransactionConnection, sql } from 'slonik';
|
||||
import { raw } from 'slonik-sql-tag-raw';
|
||||
import { CommandModule } from 'yargs';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { createPoolFromConfig, getDatabaseUrlFromConfig, insertInto } from '../../database';
|
||||
import { createPoolAndDatabaseIfNeeded, insertInto } from '../../database';
|
||||
import { updateDatabaseTimestamp } from '../../queries/logto-config';
|
||||
import { buildApplicationSecret, getPathInModule, log } from '../../utilities';
|
||||
import { getLatestAlterationTimestamp } from './alteration';
|
||||
|
||||
/**
|
||||
* Create a database pool with the database URL in config.
|
||||
* If the given database does not exists, it will try to create a new database by connecting to the maintenance database `postgres`.
|
||||
*
|
||||
* @returns A new database pool with the database URL in config.
|
||||
*/
|
||||
const createDatabasePool = async () => {
|
||||
try {
|
||||
return await createPoolFromConfig();
|
||||
} catch (error: unknown) {
|
||||
const result = z.object({ code: z.string() }).safeParse(error);
|
||||
|
||||
// Database does not exist, try to create one
|
||||
// https://www.postgresql.org/docs/14/errcodes-appendix.html
|
||||
if (!(result.success && result.data.code === '3D000')) {
|
||||
log.error(error);
|
||||
}
|
||||
|
||||
const databaseUrl = await getDatabaseUrlFromConfig();
|
||||
const dsn = parseDsn(databaseUrl);
|
||||
// It's ok to fall back to '?' since:
|
||||
// - Database name is required to connect in the previous pool
|
||||
// - It will throw error when creating database using '?'
|
||||
const databaseName = dsn.databaseName ?? '?';
|
||||
const maintenancePool = await createPool(stringifyDsn({ ...dsn, databaseName: 'postgres' }));
|
||||
await maintenancePool.query(sql`
|
||||
create database ${sql.identifier([databaseName])}
|
||||
with
|
||||
encoding = 'UTF8'
|
||||
connection_limit = -1;
|
||||
`);
|
||||
await maintenancePool.end();
|
||||
|
||||
log.info(`Database ${databaseName} successfully created.`);
|
||||
|
||||
return createPoolFromConfig();
|
||||
}
|
||||
};
|
||||
|
||||
const createTables = async (connection: DatabaseTransactionConnection) => {
|
||||
const spinner = ora({
|
||||
text: 'Create tables',
|
||||
prefixText: chalk.blue('[info]'),
|
||||
}).start();
|
||||
const tableDirectory = getPathInModule('@logto/schemas', 'tables');
|
||||
const directoryFiles = await readdir(tableDirectory);
|
||||
const tableFiles = directoryFiles.filter((file) => file.endsWith('.sql'));
|
||||
|
@ -69,12 +28,17 @@ const createTables = async (connection: DatabaseTransactionConnection) => {
|
|||
])
|
||||
);
|
||||
|
||||
// Disable for spinner
|
||||
/* eslint-disable @silverhand/fp/no-mutation */
|
||||
// Await in loop is intended for better error handling
|
||||
for (const [file, query] of queries) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await connection.query(sql`${raw(query)}`);
|
||||
log.info(`Run ${file} succeeded.`);
|
||||
spinner.text = `Run ${file} succeeded`;
|
||||
}
|
||||
|
||||
spinner.succeed(`Created ${queries.length} tables`);
|
||||
/* eslint-enable @silverhand/fp/no-mutation */
|
||||
};
|
||||
|
||||
const seedTables = async (connection: DatabaseTransactionConnection) => {
|
||||
|
@ -86,6 +50,11 @@ const seedTables = async (connection: DatabaseTransactionConnection) => {
|
|||
defaultRole,
|
||||
} = seeds;
|
||||
|
||||
const spinner = ora({
|
||||
text: 'Seed data',
|
||||
prefixText: chalk.blue('[info]'),
|
||||
}).start();
|
||||
|
||||
await Promise.all([
|
||||
connection.query(insertInto(managementResource, 'resources')),
|
||||
connection.query(insertInto(createDefaultSetting(), 'settings')),
|
||||
|
@ -96,7 +65,8 @@ const seedTables = async (connection: DatabaseTransactionConnection) => {
|
|||
connection.query(insertInto(defaultRole, 'roles')),
|
||||
updateDatabaseTimestamp(connection, await getLatestAlterationTimestamp()),
|
||||
]);
|
||||
log.info('Seed tables succeeded.');
|
||||
|
||||
spinner.succeed();
|
||||
};
|
||||
|
||||
export const seedByPool = async (pool: DatabasePool) => {
|
||||
|
@ -110,7 +80,7 @@ const seed: CommandModule = {
|
|||
command: 'seed',
|
||||
describe: 'Create database and seed tables and data',
|
||||
handler: async () => {
|
||||
const pool = await createDatabasePool();
|
||||
const pool = await createPoolAndDatabaseIfNeeded();
|
||||
|
||||
try {
|
||||
await seedByPool(pool);
|
||||
|
|
|
@ -4,14 +4,18 @@ import { mkdir } from 'fs/promises';
|
|||
import os from 'os';
|
||||
import path from 'path';
|
||||
|
||||
import { conditional } from '@silverhand/essentials';
|
||||
import chalk from 'chalk';
|
||||
import { remove, writeFile } from 'fs-extra';
|
||||
import inquirer from 'inquirer';
|
||||
import ora from 'ora';
|
||||
import * as semver from 'semver';
|
||||
import tar from 'tar';
|
||||
import { CommandModule } from 'yargs';
|
||||
|
||||
import { createPoolAndDatabaseIfNeeded, getDatabaseUrlFromConfig } from '../database';
|
||||
import { downloadFile, log, safeExecSync } from '../utilities';
|
||||
import { seedByPool } from './database/seed';
|
||||
|
||||
export type InstallArgs = {
|
||||
path?: string;
|
||||
|
@ -36,12 +40,26 @@ const validateNodeVersion = () => {
|
|||
}
|
||||
};
|
||||
|
||||
const validatePath = (value: string) =>
|
||||
const inquireInstancePath = async (initialPath?: string) => {
|
||||
const { instancePath } = await inquirer.prompt<{ instancePath: string }>(
|
||||
{
|
||||
name: 'instancePath',
|
||||
message: 'Where should we create your Logto instance?',
|
||||
type: 'input',
|
||||
default: defaultPath,
|
||||
filter: (value: string) => value.trim(),
|
||||
validate: (value: string) =>
|
||||
existsSync(path.resolve(value))
|
||||
? `The path ${chalk.green(value)} already exists, please try another.`
|
||||
: true;
|
||||
: true,
|
||||
},
|
||||
{ instancePath: initialPath }
|
||||
);
|
||||
|
||||
const getInstancePath = async () => {
|
||||
return instancePath;
|
||||
};
|
||||
|
||||
const validateDatabase = async () => {
|
||||
const { hasPostgresUrl } = await inquirer.prompt<{ hasPostgresUrl?: boolean }>({
|
||||
name: 'hasPostgresUrl',
|
||||
message: `Logto requires PostgreSQL >=${pgRequired.version} but cannot find in the current environment.\n Do you have a remote PostgreSQL instance ready?`,
|
||||
|
@ -59,17 +77,6 @@ const getInstancePath = async () => {
|
|||
if (hasPostgresUrl === false) {
|
||||
log.error('Logto requires a Postgres instance to run.');
|
||||
}
|
||||
|
||||
const { instancePath } = await inquirer.prompt<{ instancePath: string }>({
|
||||
name: 'instancePath',
|
||||
message: 'Where should we create your Logto instance?',
|
||||
type: 'input',
|
||||
default: defaultPath,
|
||||
filter: (value: string) => value.trim(),
|
||||
validate: validatePath,
|
||||
});
|
||||
|
||||
return instancePath;
|
||||
};
|
||||
|
||||
const downloadRelease = async () => {
|
||||
|
@ -96,8 +103,6 @@ const decompress = async (toPath: string, tarPath: string) => {
|
|||
} catch (error: unknown) {
|
||||
decompressSpinner.fail();
|
||||
log.error(error);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
decompressSpinner.succeed();
|
||||
|
@ -106,17 +111,56 @@ const decompress = async (toPath: string, tarPath: string) => {
|
|||
const installLogto = async ({ path: pathArgument = defaultPath, silent = false }: InstallArgs) => {
|
||||
validateNodeVersion();
|
||||
|
||||
const instancePath = (!silent && (await getInstancePath())) || pathArgument;
|
||||
const isValidPath = validatePath(instancePath);
|
||||
// Get instance path
|
||||
const instancePath = await inquireInstancePath(conditional(silent && pathArgument));
|
||||
|
||||
if (isValidPath !== true) {
|
||||
log.error(isValidPath);
|
||||
}
|
||||
// Validate database URL
|
||||
await validateDatabase();
|
||||
|
||||
// Download and decompress
|
||||
const tarPath = await downloadRelease();
|
||||
|
||||
await decompress(instancePath, tarPath);
|
||||
|
||||
try {
|
||||
// Seed database
|
||||
const pool = await createPoolAndDatabaseIfNeeded(); // It will ask for database URL and save to config
|
||||
await seedByPool(pool);
|
||||
await pool.end();
|
||||
} catch (error: unknown) {
|
||||
console.error(error);
|
||||
|
||||
const { value } = await inquirer.prompt<{ value: boolean }>({
|
||||
name: 'value',
|
||||
type: 'confirm',
|
||||
message:
|
||||
'Error occurred during seeding your Logto database. Would you like to continue without seed?',
|
||||
default: false,
|
||||
});
|
||||
|
||||
if (!value) {
|
||||
const spinner = ora({
|
||||
text: 'Clean up',
|
||||
prefixText: chalk.blue('[info]'),
|
||||
}).start();
|
||||
|
||||
await remove(instancePath);
|
||||
spinner.succeed();
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
log.info(`You can use ${chalk.green('db seed')} command to seed when ready.`);
|
||||
}
|
||||
|
||||
// Save to dot env
|
||||
const databaseUrl = await getDatabaseUrlFromConfig();
|
||||
const dotEnvPath = path.resolve(instancePath, '.env');
|
||||
await writeFile(dotEnvPath, `DB_URL=${databaseUrl}`, {
|
||||
encoding: 'utf8',
|
||||
});
|
||||
log.info(`Saved database URL to ${chalk.blue(dotEnvPath)}`);
|
||||
|
||||
// Finale
|
||||
const startCommand = `cd ${instancePath} && npm start`;
|
||||
log.info(
|
||||
`Use the command below to start Logto. Happy hacking!\n\n ${chalk.green(startCommand)}`
|
||||
|
|
|
@ -41,5 +41,5 @@ export const getConfig = async () => {
|
|||
export const patchConfig = async (config: LogtoConfig) => {
|
||||
const configPath = await getConfigPath();
|
||||
await writeFile(configPath, JSON.stringify({ ...(await getConfig()), ...config }, undefined, 2));
|
||||
log.info(`Updated config in ${chalk.green(configPath)}`);
|
||||
log.info(`Updated config in ${chalk.blue(configPath)}`);
|
||||
};
|
||||
|
|
|
@ -1,21 +1,45 @@
|
|||
import { SchemaLike, SchemaValue, SchemaValuePrimitive } from '@logto/schemas';
|
||||
import chalk from 'chalk';
|
||||
import decamelize from 'decamelize';
|
||||
import { createPool, IdentifierSqlToken, sql, SqlToken } from 'slonik';
|
||||
import inquirer from 'inquirer';
|
||||
import { createPool, IdentifierSqlToken, parseDsn, sql, SqlToken, stringifyDsn } from 'slonik';
|
||||
import { createInterceptors } from 'slonik-interceptor-preset';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { getConfig } from './config';
|
||||
import { getConfig, patchConfig } from './config';
|
||||
import { log } from './utilities';
|
||||
|
||||
export const defaultDatabaseUrl = 'postgresql://localhost:5432/logto';
|
||||
|
||||
export const getDatabaseUrlFromConfig = async () => {
|
||||
const { databaseUrl } = await getConfig();
|
||||
|
||||
if (!databaseUrl) {
|
||||
const { value } = await inquirer
|
||||
.prompt<{ value: string }>({
|
||||
type: 'input',
|
||||
name: 'value',
|
||||
message: 'Enter your Logto database URL',
|
||||
default: defaultDatabaseUrl,
|
||||
})
|
||||
.catch(async (error) => {
|
||||
if (error.isTtyError) {
|
||||
log.error(
|
||||
`No database URL configured. Set it via ${chalk.green('database set-url')} command first.`
|
||||
`No database URL configured. Set it via ${chalk.green(
|
||||
'database set-url'
|
||||
)} command first.`
|
||||
);
|
||||
}
|
||||
|
||||
// The type definition does not give us type except `any`, throw it directly will honor the original behavior.
|
||||
// eslint-disable-next-line @typescript-eslint/no-throw-literal
|
||||
throw error;
|
||||
});
|
||||
await patchConfig({ databaseUrl: value });
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
return databaseUrl;
|
||||
};
|
||||
|
||||
|
@ -27,6 +51,45 @@ export const createPoolFromConfig = async () => {
|
|||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a database pool with the database URL in config.
|
||||
* If the given database does not exists, it will try to create a new database by connecting to the maintenance database `postgres`.
|
||||
*
|
||||
* @returns A new database pool with the database URL in config.
|
||||
*/
|
||||
export const createPoolAndDatabaseIfNeeded = async () => {
|
||||
try {
|
||||
return await createPoolFromConfig();
|
||||
} catch (error: unknown) {
|
||||
const result = z.object({ code: z.string() }).safeParse(error);
|
||||
|
||||
// Database does not exist, try to create one
|
||||
// https://www.postgresql.org/docs/14/errcodes-appendix.html
|
||||
if (!(result.success && result.data.code === '3D000')) {
|
||||
log.error(error);
|
||||
}
|
||||
|
||||
const databaseUrl = await getDatabaseUrlFromConfig();
|
||||
const dsn = parseDsn(databaseUrl);
|
||||
// It's ok to fall back to '?' since:
|
||||
// - Database name is required to connect in the previous pool
|
||||
// - It will throw error when creating database using '?'
|
||||
const databaseName = dsn.databaseName ?? '?';
|
||||
const maintenancePool = await createPool(stringifyDsn({ ...dsn, databaseName: 'postgres' }));
|
||||
await maintenancePool.query(sql`
|
||||
create database ${sql.identifier([databaseName])}
|
||||
with
|
||||
encoding = 'UTF8'
|
||||
connection_limit = -1;
|
||||
`);
|
||||
await maintenancePool.end();
|
||||
|
||||
log.info(`${chalk.green('✔')} Created database ${databaseName}`);
|
||||
|
||||
return createPoolFromConfig();
|
||||
}
|
||||
};
|
||||
|
||||
// TODO: Move database utils to `core-kit`
|
||||
export type Table = { table: string; fields: Record<string, string> };
|
||||
export type FieldIdentifiers<Key extends string | number | symbol> = {
|
||||
|
|
|
@ -17,11 +17,13 @@
|
|||
"add-connector": "node build/cli/add-connector.js",
|
||||
"add-official-connectors": "node build/cli/add-official-connectors.js",
|
||||
"alteration": "node build/cli/alteration.js",
|
||||
"cli": "logto",
|
||||
"test": "jest",
|
||||
"test:coverage": "jest --coverage --silent",
|
||||
"test:report": "codecov -F core"
|
||||
},
|
||||
"dependencies": {
|
||||
"@logto/cli": "^1.0.0-beta.10",
|
||||
"@logto/connector-kit": "^1.0.0-beta.13",
|
||||
"@logto/core-kit": "^1.0.0-beta.13",
|
||||
"@logto/phrases": "^1.0.0-beta.10",
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
import { readdir, readFile } from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import { SchemaLike, seeds } from '@logto/schemas';
|
||||
import chalk from 'chalk';
|
||||
import decamelize from 'decamelize';
|
||||
import { createPool, parseDsn, sql, stringifyDsn } from 'slonik';
|
||||
import { createInterceptors } from 'slonik-interceptor-preset';
|
||||
import { raw } from 'slonik-sql-tag-raw';
|
||||
|
||||
import { updateDatabaseTimestamp } from '@/alteration';
|
||||
import { buildApplicationSecret } from '@/utils/id';
|
||||
|
||||
import { convertToPrimitiveOrSql } from './utils';
|
||||
|
||||
const {
|
||||
managementResource,
|
||||
defaultSignInExperience,
|
||||
createDefaultSetting,
|
||||
createDemoAppApplication,
|
||||
defaultRole,
|
||||
} = seeds;
|
||||
const tableDirectory = 'node_modules/@logto/schemas/tables';
|
||||
|
||||
export const replaceDsnDatabase = (dsn: string, databaseName: string): string =>
|
||||
stringifyDsn({ ...parseDsn(dsn), databaseName });
|
||||
|
||||
/**
|
||||
* Create a database.
|
||||
* @returns DSN with the created database name.
|
||||
*/
|
||||
export const createDatabase = async (dsn: string, databaseName: string): Promise<string> => {
|
||||
const pool = await createPool(replaceDsnDatabase(dsn, 'postgres'));
|
||||
|
||||
await pool.query(sql`
|
||||
create database ${sql.identifier([databaseName])}
|
||||
with
|
||||
encoding = 'UTF8'
|
||||
connection_limit = -1;
|
||||
`);
|
||||
await pool.end();
|
||||
|
||||
console.log(`${chalk.blue('[create]')} Database ${databaseName} successfully created.`);
|
||||
|
||||
return replaceDsnDatabase(dsn, databaseName);
|
||||
};
|
||||
|
||||
export const insertInto = <T extends SchemaLike>(object: T, table: string) => {
|
||||
const keys = Object.keys(object);
|
||||
|
||||
return sql`
|
||||
insert into ${sql.identifier([table])}
|
||||
(${sql.join(
|
||||
keys.map((key) => sql.identifier([decamelize(key)])),
|
||||
sql`, `
|
||||
)})
|
||||
values (${sql.join(
|
||||
keys.map((key) => convertToPrimitiveOrSql(key, object[key] ?? null)),
|
||||
sql`, `
|
||||
)})
|
||||
`;
|
||||
};
|
||||
|
||||
export const createDatabaseCli = async (dsn: string) => {
|
||||
const pool = await createPool(dsn, { interceptors: createInterceptors() });
|
||||
|
||||
const createTables = async () => {
|
||||
const directory = await readdir(tableDirectory);
|
||||
const tableFiles = directory.filter((file) => file.endsWith('.sql'));
|
||||
const queries = await Promise.all(
|
||||
tableFiles.map<Promise<[string, string]>>(async (file) => [
|
||||
file,
|
||||
await readFile(path.join(tableDirectory, file), 'utf8'),
|
||||
])
|
||||
);
|
||||
|
||||
// Await in loop is intended for better error handling
|
||||
for (const [file, query] of queries) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await pool.query(sql`${raw(query)}`);
|
||||
console.log(`${chalk.blue('[create-tables]')} Run ${file} succeeded.`);
|
||||
}
|
||||
|
||||
await updateDatabaseTimestamp(pool);
|
||||
console.log(`${chalk.blue('[create-tables]')} Update alteration state succeeded.`);
|
||||
};
|
||||
|
||||
const seedTables = async () => {
|
||||
await Promise.all([
|
||||
pool.query(insertInto(managementResource, 'resources')),
|
||||
pool.query(insertInto(createDefaultSetting(), 'settings')),
|
||||
pool.query(insertInto(defaultSignInExperience, 'sign_in_experiences')),
|
||||
pool.query(insertInto(createDemoAppApplication(buildApplicationSecret()), 'applications')),
|
||||
pool.query(insertInto(defaultRole, 'roles')),
|
||||
]);
|
||||
console.log(`${chalk.blue('[seed-tables]')} Seed tables succeeded.`);
|
||||
};
|
||||
|
||||
return { createTables, seedTables, pool };
|
||||
};
|
|
@ -13,7 +13,7 @@ export const checkAlterationState = async (pool: DatabasePool) => {
|
|||
}
|
||||
|
||||
const error = new Error(
|
||||
`Found undeployed database alterations, you must deploy them first by "npm alteration deploy" command, reference: https://docs.logto.io/docs/recipes/deployment/#database-alteration`
|
||||
`Found undeployed database alterations, you must deploy them first by "npm run alteration deploy" command, reference: https://docs.logto.io/docs/recipes/deployment/#database-alteration`
|
||||
);
|
||||
|
||||
if (allYes) {
|
||||
|
|
|
@ -1,82 +1,7 @@
|
|||
import { assertEnv, conditional, getEnv, Optional } from '@silverhand/essentials';
|
||||
import inquirer from 'inquirer';
|
||||
import { assertEnv } from '@silverhand/essentials';
|
||||
import chalk from 'chalk';
|
||||
import { createPool } from 'slonik';
|
||||
import { createInterceptors } from 'slonik-interceptor-preset';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { createDatabase, createDatabaseCli, replaceDsnDatabase } from '@/database/seed';
|
||||
|
||||
import { appendDotEnv } from './dot-env';
|
||||
import { allYes, noInquiry } from './parameters';
|
||||
|
||||
const defaultDatabaseUrl = getEnv('DB_URL_DEFAULT', 'postgres://@localhost:5432');
|
||||
const defaultDatabaseName = 'logto';
|
||||
|
||||
const initDatabase = async (dsn: string): Promise<[string, boolean]> => {
|
||||
try {
|
||||
return [await createDatabase(dsn, defaultDatabaseName), true];
|
||||
} catch (error: unknown) {
|
||||
const result = z.object({ code: z.string() }).safeParse(error);
|
||||
|
||||
// https://www.postgresql.org/docs/12/errcodes-appendix.html
|
||||
const databaseExists = result.success && result.data.code === '42P04';
|
||||
|
||||
if (!databaseExists) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (allYes) {
|
||||
return [replaceDsnDatabase(dsn, defaultDatabaseName), false];
|
||||
}
|
||||
|
||||
const useCurrent = await inquirer.prompt({
|
||||
type: 'confirm',
|
||||
name: 'value',
|
||||
message: `A database named "${defaultDatabaseName}" already exists. Would you like to use it without filling the initial data?`,
|
||||
});
|
||||
|
||||
if (useCurrent.value) {
|
||||
return [replaceDsnDatabase(dsn, defaultDatabaseName), false];
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const inquireForLogtoDsn = async (key: string): Promise<[Optional<string>, boolean]> => {
|
||||
if (allYes) {
|
||||
return initDatabase(defaultDatabaseUrl);
|
||||
}
|
||||
|
||||
const setUp = await inquirer.prompt({
|
||||
type: 'confirm',
|
||||
name: 'value',
|
||||
message: `No Postgres DSN (${key}) found in env variables. Would you like to set up a new Logto database?`,
|
||||
});
|
||||
|
||||
if (!setUp.value) {
|
||||
const dsn = await inquirer.prompt({
|
||||
name: 'value',
|
||||
default: new URL(defaultDatabaseName, defaultDatabaseUrl).href,
|
||||
message: 'Please input the DSN which points to an existing Logto database:',
|
||||
});
|
||||
|
||||
return [conditional<string>(dsn.value && String(dsn.value)), false];
|
||||
}
|
||||
|
||||
const dsnAnswer = await inquirer.prompt({
|
||||
name: 'value',
|
||||
default: new URL(defaultDatabaseUrl).href,
|
||||
message: `Please input the DSN _WITHOUT_ database name:`,
|
||||
});
|
||||
const dsn = conditional<string>(dsnAnswer.value && String(dsnAnswer.value));
|
||||
|
||||
if (!dsn) {
|
||||
return [dsn, false];
|
||||
}
|
||||
|
||||
return initDatabase(dsn);
|
||||
};
|
||||
|
||||
const createPoolByEnv = async (isTest: boolean) => {
|
||||
// Database connection is disabled in unit test environment
|
||||
|
@ -92,27 +17,25 @@ const createPoolByEnv = async (isTest: boolean) => {
|
|||
|
||||
return await createPool(databaseDsn, { interceptors });
|
||||
} catch (error: unknown) {
|
||||
if (noInquiry) {
|
||||
if (error instanceof Error && error.message === `env variable ${key} not found`) {
|
||||
console.error(
|
||||
`${chalk.red('[error]')} No Postgres DSN (${chalk.green(
|
||||
key
|
||||
)}) found in env variables.\n\n` +
|
||||
` Either provide it in your env, or add it to the ${chalk.blue(
|
||||
'.env'
|
||||
)} file in the Logto project root.\n\n` +
|
||||
` If you want to set up a new Logto database, run ${chalk.green(
|
||||
'npm run cli db seed'
|
||||
)} before setting env ${chalk.green(key)}.\n\n` +
|
||||
` Visit ${chalk.blue(
|
||||
'https://docs.logto.io/docs/references/core/configuration'
|
||||
)} for more info about setting up env.\n`
|
||||
);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
const [dsn, needsSeed] = await inquireForLogtoDsn(key);
|
||||
|
||||
if (!dsn) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const cli = await createDatabaseCli(dsn);
|
||||
|
||||
if (needsSeed) {
|
||||
await cli.createTables();
|
||||
await cli.seedTables();
|
||||
}
|
||||
|
||||
appendDotEnv(key, dsn);
|
||||
|
||||
return cli.pool;
|
||||
}
|
||||
};
|
||||
|
||||
export default createPoolByEnv;
|
||||
|
|
|
@ -223,6 +223,7 @@ importers:
|
|||
|
||||
packages/core:
|
||||
specifiers:
|
||||
'@logto/cli': ^1.0.0-beta.10
|
||||
'@logto/connector-kit': ^1.0.0-beta.13
|
||||
'@logto/core-kit': ^1.0.0-beta.13
|
||||
'@logto/phrases': ^1.0.0-beta.10
|
||||
|
@ -301,6 +302,7 @@ importers:
|
|||
typescript: ^4.7.4
|
||||
zod: ^3.18.0
|
||||
dependencies:
|
||||
'@logto/cli': link:../cli
|
||||
'@logto/connector-kit': 1.0.0-beta.13
|
||||
'@logto/core-kit': 1.0.0-beta.13
|
||||
'@logto/phrases': link:../phrases
|
||||
|
|
Loading…
Reference in a new issue