mirror of
https://github.com/logto-io/logto.git
synced 2024-12-16 20:26:19 -05:00
commit
2c6d2a6b1c
51 changed files with 550 additions and 263 deletions
10
.github/workflows/integration-test.yml
vendored
10
.github/workflows/integration-test.yml
vendored
|
@ -88,7 +88,7 @@ jobs:
|
|||
|
||||
- name: Run Logto
|
||||
working-directory: logto/
|
||||
run: npm start &
|
||||
run: nohup npm start > nohup.out 2> nohup.err < /dev/null &
|
||||
env:
|
||||
INTEGRATION_TEST: true
|
||||
|
||||
|
@ -101,3 +101,11 @@ jobs:
|
|||
cd tests/packages/integration-tests
|
||||
pnpm build
|
||||
pnpm test:${{ matrix.test_target }}
|
||||
|
||||
- name: Show logs
|
||||
working-directory: logto/
|
||||
run: cat nohup.out
|
||||
|
||||
- name: Show error logs
|
||||
working-directory: logto/
|
||||
run: cat nohup.err
|
||||
|
|
|
@ -117,20 +117,12 @@ assert.deepStrictEqual(...manifests);
|
|||
const queryDatabaseData = async (database) => {
|
||||
const pool = new pg.Pool({ database, user: 'postgres', password: 'postgres' });
|
||||
const result = await Promise.all(manifests[0].tables
|
||||
.filter(({ table_name }) => !['logto_configs', '_logto_configs'].includes(table_name))
|
||||
// system configs are usually generated or time-relative, ignore for now
|
||||
.filter(({ table_name }) => !['logto_configs', '_logto_configs', 'systems'].includes(table_name))
|
||||
.map(async ({ table_name }) => {
|
||||
const { rows } = await pool.query(/* sql */`select * from ${table_name};`);
|
||||
|
||||
if (table_name === 'systems') {
|
||||
return [
|
||||
table_name,
|
||||
rows.map(({ value, ...rest }) =>
|
||||
({ ...rest, value: omit(value, 'createdAt', 'updatedAt') })
|
||||
),
|
||||
];
|
||||
}
|
||||
|
||||
return [table_name, omitArray(rows, 'created_at', 'updated_at', 'secret')];
|
||||
return [table_name, omitArray(rows, 'created_at', 'updated_at', 'secret', 'db_user', 'db_user_password')];
|
||||
})
|
||||
);
|
||||
|
||||
|
|
|
@ -1,102 +1,20 @@
|
|||
import { readdir, readFile } from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import { generateStandardId } from '@logto/core-kit';
|
||||
import {
|
||||
logtoConfigGuards,
|
||||
LogtoOidcConfigKey,
|
||||
managementResource,
|
||||
defaultSignInExperience,
|
||||
createDefaultAdminConsoleConfig,
|
||||
createDemoAppApplication,
|
||||
defaultRole,
|
||||
managementResourceScope,
|
||||
defaultRoleScopeRelation,
|
||||
defaultTenant,
|
||||
} from '@logto/schemas';
|
||||
import { Hooks, Tenants } from '@logto/schemas/models';
|
||||
import { logtoConfigGuards, LogtoOidcConfigKey } from '@logto/schemas';
|
||||
import chalk from 'chalk';
|
||||
import type { DatabasePool, DatabaseTransactionConnection } from 'slonik';
|
||||
import { sql } from 'slonik';
|
||||
import { raw } from 'slonik-sql-tag-raw';
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { createPoolAndDatabaseIfNeeded, insertInto } from '../../../database.js';
|
||||
import { createPoolAndDatabaseIfNeeded } from '../../../database.js';
|
||||
import {
|
||||
getRowsByKeys,
|
||||
doesConfigsTableExist,
|
||||
updateValueByKey,
|
||||
} from '../../../queries/logto-config.js';
|
||||
import { updateDatabaseTimestamp } from '../../../queries/system.js';
|
||||
import { getPathInModule, log, oraPromise } from '../../../utilities.js';
|
||||
import { log, oraPromise } from '../../../utilities.js';
|
||||
import { getLatestAlterationTimestamp } from '../alteration/index.js';
|
||||
import { getAlterationDirectory } from '../alteration/utils.js';
|
||||
import { oidcConfigReaders } from './oidc-config.js';
|
||||
|
||||
const getExplicitOrder = (query: string) => {
|
||||
const matched = /\/\*\s*init_order\s*=\s*([\d.]+)\s*\*\//.exec(query)?.[1];
|
||||
|
||||
return matched ? Number(matched) : undefined;
|
||||
};
|
||||
|
||||
const compareQuery = ([t1, q1]: [string, string], [t2, q2]: [string, string]) => {
|
||||
const o1 = getExplicitOrder(q1);
|
||||
const o2 = getExplicitOrder(q2);
|
||||
|
||||
if (o1 === undefined && o2 === undefined) {
|
||||
return t1.localeCompare(t2);
|
||||
}
|
||||
|
||||
if (o1 === undefined) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (o2 === undefined) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return o1 - o2;
|
||||
};
|
||||
|
||||
const createTables = async (connection: DatabaseTransactionConnection) => {
|
||||
const tableDirectory = getPathInModule('@logto/schemas', 'tables');
|
||||
const directoryFiles = await readdir(tableDirectory);
|
||||
const tableFiles = directoryFiles.filter((file) => file.endsWith('.sql'));
|
||||
const queries = await Promise.all(
|
||||
tableFiles.map<Promise<[string, string]>>(async (file) => [
|
||||
file,
|
||||
await readFile(path.join(tableDirectory, file), 'utf8'),
|
||||
])
|
||||
);
|
||||
|
||||
const allQueries: Array<[string, string]> = [
|
||||
[Hooks.tableName, Hooks.raw],
|
||||
[Tenants.tableName, Tenants.raw],
|
||||
...queries,
|
||||
];
|
||||
const sorted = allQueries.slice().sort(compareQuery);
|
||||
|
||||
for (const [, query] of sorted) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await connection.query(sql`${raw(query)}`);
|
||||
}
|
||||
};
|
||||
|
||||
const seedTables = async (connection: DatabaseTransactionConnection, latestTimestamp: number) => {
|
||||
await connection.query(insertInto(defaultTenant, 'tenants'));
|
||||
|
||||
await Promise.all([
|
||||
connection.query(insertInto(managementResource, 'resources')),
|
||||
connection.query(insertInto(managementResourceScope, 'scopes')),
|
||||
connection.query(insertInto(createDefaultAdminConsoleConfig(), 'logto_configs')),
|
||||
connection.query(insertInto(defaultSignInExperience, 'sign_in_experiences')),
|
||||
connection.query(insertInto(createDemoAppApplication(generateStandardId()), 'applications')),
|
||||
connection.query(insertInto(defaultRole, 'roles')),
|
||||
connection.query(insertInto(defaultRoleScopeRelation, 'roles_scopes')),
|
||||
updateDatabaseTimestamp(connection, latestTimestamp),
|
||||
]);
|
||||
};
|
||||
import { createTables, seedTables } from './tables.js';
|
||||
|
||||
const seedOidcConfigs = async (pool: DatabaseTransactionConnection) => {
|
||||
const configGuard = z.object({
|
||||
|
|
127
packages/cli/src/commands/database/seed/tables.ts
Normal file
127
packages/cli/src/commands/database/seed/tables.ts
Normal file
|
@ -0,0 +1,127 @@
|
|||
import { readdir, readFile } from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import { generateStandardId } from '@logto/core-kit';
|
||||
import {
|
||||
managementResource,
|
||||
defaultSignInExperience,
|
||||
createDefaultAdminConsoleConfig,
|
||||
createDemoAppApplication,
|
||||
defaultRole,
|
||||
managementResourceScope,
|
||||
defaultRoleScopeRelation,
|
||||
defaultTenantId,
|
||||
} from '@logto/schemas';
|
||||
import { Hooks, Tenants } from '@logto/schemas/models';
|
||||
import type { DatabaseTransactionConnection } from 'slonik';
|
||||
import { sql } from 'slonik';
|
||||
import { raw } from 'slonik-sql-tag-raw';
|
||||
|
||||
import { insertInto } from '../../../database.js';
|
||||
import { getDatabaseName } from '../../../queries/database.js';
|
||||
import { updateDatabaseTimestamp } from '../../../queries/system.js';
|
||||
import { getPathInModule } from '../../../utilities.js';
|
||||
import { createTenant } from './tenant.js';
|
||||
|
||||
const getExplicitOrder = (query: string) => {
|
||||
const matched = /\/\*\s*init_order\s*=\s*([\d.]+)\s*\*\//.exec(query)?.[1];
|
||||
|
||||
return matched ? Number(matched) : undefined;
|
||||
};
|
||||
|
||||
const compareQuery = ([t1, q1]: [string, string], [t2, q2]: [string, string]) => {
|
||||
const o1 = getExplicitOrder(q1);
|
||||
const o2 = getExplicitOrder(q2);
|
||||
|
||||
if (o1 === undefined && o2 === undefined) {
|
||||
return t1.localeCompare(t2);
|
||||
}
|
||||
|
||||
if (o1 === undefined) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (o2 === undefined) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return o1 - o2;
|
||||
};
|
||||
|
||||
type Lifecycle = 'before_all' | 'after_all' | 'after_each';
|
||||
|
||||
const lifecycleNames: readonly string[] = Object.freeze([
|
||||
'before_all',
|
||||
'after_all',
|
||||
'after_each',
|
||||
] satisfies Lifecycle[]);
|
||||
|
||||
export const createTables = async (connection: DatabaseTransactionConnection) => {
|
||||
const tableDirectory = getPathInModule('@logto/schemas', 'tables');
|
||||
const directoryFiles = await readdir(tableDirectory);
|
||||
const tableFiles = directoryFiles.filter((file) => file.endsWith('.sql'));
|
||||
const queries = await Promise.all(
|
||||
tableFiles.map<Promise<[string, string]>>(async (file) => [
|
||||
file,
|
||||
await readFile(path.join(tableDirectory, file), 'utf8'),
|
||||
])
|
||||
);
|
||||
|
||||
const runLifecycleQuery = async (
|
||||
lifecycle: Lifecycle,
|
||||
parameters: { name?: string; database?: string } = {}
|
||||
) => {
|
||||
const query = queries.find(([file]) => file.slice(1, -4) === lifecycle)?.[1];
|
||||
|
||||
if (query) {
|
||||
await connection.query(
|
||||
sql`${raw(
|
||||
/* eslint-disable no-template-curly-in-string */
|
||||
query
|
||||
.replaceAll('${name}', parameters.name ?? '')
|
||||
.replaceAll('${database}', parameters.database ?? '')
|
||||
/* eslint-enable no-template-curly-in-string */
|
||||
)}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const allQueries: Array<[string, string]> = [
|
||||
[Hooks.tableName, Hooks.raw],
|
||||
[Tenants.tableName, Tenants.raw],
|
||||
...queries.filter(([file]) => !lifecycleNames.includes(file.slice(1, -4))),
|
||||
];
|
||||
const sorted = allQueries.slice().sort(compareQuery);
|
||||
const database = await getDatabaseName(connection, true);
|
||||
|
||||
await runLifecycleQuery('before_all', { database });
|
||||
|
||||
/* eslint-disable no-await-in-loop */
|
||||
for (const [file, query] of sorted) {
|
||||
await connection.query(sql`${raw(query)}`);
|
||||
|
||||
if (!query.includes('/* no_after_each */')) {
|
||||
await runLifecycleQuery('after_each', { name: file.split('.')[0], database });
|
||||
}
|
||||
}
|
||||
/* eslint-enable no-await-in-loop */
|
||||
|
||||
await runLifecycleQuery('after_all', { database });
|
||||
};
|
||||
|
||||
export const seedTables = async (
|
||||
connection: DatabaseTransactionConnection,
|
||||
latestTimestamp: number
|
||||
) => {
|
||||
await createTenant(connection, defaultTenantId);
|
||||
await Promise.all([
|
||||
connection.query(insertInto(managementResource, 'resources')),
|
||||
connection.query(insertInto(managementResourceScope, 'scopes')),
|
||||
connection.query(insertInto(createDefaultAdminConsoleConfig(), 'logto_configs')),
|
||||
connection.query(insertInto(defaultSignInExperience, 'sign_in_experiences')),
|
||||
connection.query(insertInto(createDemoAppApplication(generateStandardId()), 'applications')),
|
||||
connection.query(insertInto(defaultRole, 'roles')),
|
||||
connection.query(insertInto(defaultRoleScopeRelation, 'roles_scopes')),
|
||||
updateDatabaseTimestamp(connection, latestTimestamp),
|
||||
]);
|
||||
};
|
23
packages/cli/src/commands/database/seed/tenant.ts
Normal file
23
packages/cli/src/commands/database/seed/tenant.ts
Normal file
|
@ -0,0 +1,23 @@
|
|||
import { generateStandardId } from '@logto/core-kit';
|
||||
import type { TenantModel } from '@logto/schemas';
|
||||
import type { DatabaseTransactionConnection } from 'slonik';
|
||||
import { sql } from 'slonik';
|
||||
import { raw } from 'slonik-sql-tag-raw';
|
||||
|
||||
import { insertInto } from '../../../database.js';
|
||||
import { getDatabaseName } from '../../../queries/database.js';
|
||||
|
||||
export const createTenant = async (connection: DatabaseTransactionConnection, tenantId: string) => {
|
||||
const database = await getDatabaseName(connection, true);
|
||||
const parentRole = `logto_tenant_${database}`;
|
||||
const role = `logto_tenant_${database}_${tenantId}`;
|
||||
const password = generateStandardId(32);
|
||||
const tenantModel: TenantModel = { id: tenantId, dbUser: role, dbUserPassword: password };
|
||||
|
||||
await connection.query(insertInto(tenantModel, 'tenants'));
|
||||
await connection.query(sql`
|
||||
create role ${sql.identifier([role])} with inherit login
|
||||
password '${raw(password)}'
|
||||
in role ${sql.identifier([parentRole])};
|
||||
`);
|
||||
};
|
10
packages/cli/src/queries/database.ts
Normal file
10
packages/cli/src/queries/database.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
import type { CommonQueryMethods } from 'slonik';
|
||||
import { sql } from 'slonik';
|
||||
|
||||
export const getDatabaseName = async (pool: CommonQueryMethods, normalized = false) => {
|
||||
const { currentDatabase } = await pool.one<{ currentDatabase: string }>(sql`
|
||||
select current_database();
|
||||
`);
|
||||
|
||||
return normalized ? currentDatabase.replaceAll('-', '_') : currentDatabase;
|
||||
};
|
|
@ -3,7 +3,6 @@
|
|||
*/
|
||||
|
||||
import { createMockUtils } from '@logto/shared/esm';
|
||||
import { createMockQueryResult, createMockPool } from 'slonik';
|
||||
|
||||
const { jest } = import.meta;
|
||||
const { mockEsm, mockEsmWithActual, mockEsmDefault } = createMockUtils(jest);
|
||||
|
@ -12,6 +11,7 @@ process.env.DB_URL = 'postgres://mock.db.url';
|
|||
process.env.ENDPOINT = 'https://logto.test';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
/* Mock for EnvSet */
|
||||
mockEsm('#src/libraries/logto-config.js', () => ({
|
||||
createLogtoConfigLibrary: () => ({ getOidcConfigs: () => ({}) }),
|
||||
}));
|
||||
|
@ -24,6 +24,7 @@ mockEsm('#src/env-set/check-alteration-state.js', () => ({
|
|||
mockEsmDefault('#src/env-set/oidc.js', () => () => ({
|
||||
issuer: 'https://logto.test/oidc',
|
||||
}));
|
||||
/* End */
|
||||
|
||||
await mockEsmWithActual('#src/env-set/index.js', () => ({
|
||||
MountedApps: {
|
||||
|
@ -33,18 +34,6 @@ await mockEsmWithActual('#src/env-set/index.js', () => ({
|
|||
DemoApp: 'demo-app',
|
||||
Welcome: 'welcome',
|
||||
},
|
||||
// TODO: Remove after clean up of default env sets
|
||||
default: {
|
||||
get oidc() {
|
||||
return {
|
||||
issuer: 'https://logto.test/oidc',
|
||||
};
|
||||
},
|
||||
get pool() {
|
||||
return createMockPool({ query: async () => createMockQueryResult([]) });
|
||||
},
|
||||
load: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Logger is not considered in all test cases
|
||||
|
|
|
@ -34,8 +34,8 @@
|
|||
"@logto/schemas": "workspace:*",
|
||||
"@logto/shared": "workspace:*",
|
||||
"@silverhand/essentials": "2.1.0",
|
||||
"@withtyped/postgres": "^0.4.0",
|
||||
"@withtyped/server": "^0.4.0",
|
||||
"@withtyped/postgres": "^0.5.1",
|
||||
"@withtyped/server": "^0.5.1",
|
||||
"chalk": "^5.0.0",
|
||||
"clean-deep": "^3.4.0",
|
||||
"date-fns": "^2.29.3",
|
||||
|
|
|
@ -17,15 +17,18 @@ const logListening = (type: 'core' | 'admin' = 'core') => {
|
|||
};
|
||||
|
||||
const getTenantId = () => {
|
||||
if (!EnvSet.values.isMultiTenancy) {
|
||||
const { isDomainBasedMultiTenancy, isProduction, isIntegrationTest, developmentTenantId } =
|
||||
EnvSet.values;
|
||||
|
||||
if (!isDomainBasedMultiTenancy) {
|
||||
if ((!isProduction || isIntegrationTest) && developmentTenantId) {
|
||||
return developmentTenantId;
|
||||
}
|
||||
|
||||
return defaultTenant;
|
||||
}
|
||||
|
||||
if (EnvSet.values.multiTenancyMode === 'domain') {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
|
||||
return !EnvSet.values.isProduction && EnvSet.values.developmentTenantId;
|
||||
throw new Error('Not implemented');
|
||||
};
|
||||
|
||||
export default async function initApp(app: Koa): Promise<void> {
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
import net from 'net';
|
||||
|
||||
import { tryThat } from '@logto/shared';
|
||||
import { assertEnv, getEnv, getEnvAsStringArray } from '@silverhand/essentials';
|
||||
|
||||
|
@ -7,7 +5,6 @@ import UrlSet from './UrlSet.js';
|
|||
import { isTrue } from './parameters.js';
|
||||
import { throwErrorWithDsnMessage } from './throw-errors.js';
|
||||
|
||||
const enableMultiTenancyKey = 'ENABLE_MULTI_TENANCY';
|
||||
const developmentTenantIdKey = 'DEVELOPMENT_TENANT_ID';
|
||||
|
||||
type MultiTenancyMode = 'domain' | 'env';
|
||||
|
@ -21,11 +18,11 @@ export default class GlobalValues {
|
|||
public readonly httpsKey = process.env.HTTPS_KEY_PATH;
|
||||
public readonly isHttpsEnabled = Boolean(this.httpsCert && this.httpsKey);
|
||||
|
||||
public readonly isMultiTenancy = isTrue(getEnv(enableMultiTenancyKey));
|
||||
|
||||
public readonly urlSet = new UrlSet(this.isHttpsEnabled, 3001);
|
||||
public readonly adminUrlSet = new UrlSet(this.isHttpsEnabled, 3002, 'ADMIN_');
|
||||
|
||||
public readonly isDomainBasedMultiTenancy = this.urlSet.endpoint.includes('*');
|
||||
|
||||
// eslint-disable-next-line unicorn/consistent-function-scoping
|
||||
public readonly databaseUrl = tryThat(() => assertEnv('DB_URL'), throwErrorWithDsnMessage);
|
||||
public readonly developmentTenantId = getEnv(developmentTenantIdKey);
|
||||
|
@ -41,12 +38,4 @@ export default class GlobalValues {
|
|||
public get endpoint(): string {
|
||||
return this.urlSet.endpoint;
|
||||
}
|
||||
|
||||
public get multiTenancyMode(): MultiTenancyMode {
|
||||
const { hostname } = new URL(this.endpoint);
|
||||
|
||||
return this.isMultiTenancy && !net.isIP(hostname) && hostname !== 'localhost'
|
||||
? 'domain'
|
||||
: 'env';
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ import { createLogtoConfigLibrary } from '#src/libraries/logto-config.js';
|
|||
import { appendPath } from '#src/utils/url.js';
|
||||
|
||||
import GlobalValues from './GlobalValues.js';
|
||||
import { checkAlterationState } from './check-alteration-state.js';
|
||||
import createPool from './create-pool.js';
|
||||
import createQueryClient from './create-query-client.js';
|
||||
import loadOidcValues from './oidc.js';
|
||||
|
@ -23,12 +22,20 @@ export enum MountedApps {
|
|||
|
||||
export class EnvSet {
|
||||
static values = new GlobalValues();
|
||||
static default = new EnvSet(EnvSet.values.dbUrl);
|
||||
|
||||
static get isTest() {
|
||||
return this.values.isTest;
|
||||
}
|
||||
|
||||
static get dbUrl() {
|
||||
return this.values.dbUrl;
|
||||
}
|
||||
|
||||
static queryClient = createQueryClient(this.dbUrl, this.isTest);
|
||||
|
||||
/** @deprecated Only for backward compatibility; Will be replaced soon. */
|
||||
static pool = createPool(this.dbUrl, this.isTest);
|
||||
|
||||
#pool: Optional<DatabasePool>;
|
||||
// Use another pool for `withtyped` while adopting the new model,
|
||||
// as we cannot extract the original PgPool from slonik
|
||||
|
@ -76,12 +83,11 @@ export class EnvSet {
|
|||
this.#queryClient = createQueryClient(this.databaseUrl, EnvSet.isTest);
|
||||
|
||||
const { getOidcConfigs } = createLogtoConfigLibrary(pool);
|
||||
const [, oidcConfigs] = await Promise.all([checkAlterationState(pool), getOidcConfigs()]);
|
||||
|
||||
const oidcConfigs = await getOidcConfigs();
|
||||
this.#oidc = await loadOidcValues(
|
||||
appendPath(EnvSet.values.endpoint, '/oidc').toString(),
|
||||
oidcConfigs
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await EnvSet.default.load();
|
||||
|
|
|
@ -3,6 +3,8 @@ import dotenv from 'dotenv';
|
|||
import { findUp } from 'find-up';
|
||||
import Koa from 'koa';
|
||||
|
||||
import { checkAlterationState } from './env-set/check-alteration-state.js';
|
||||
|
||||
dotenv.config({ path: await findUp('.env', {}) });
|
||||
|
||||
// Import after env has been configured
|
||||
|
@ -17,10 +19,10 @@ try {
|
|||
});
|
||||
await initI18n();
|
||||
await loadConnectorFactories();
|
||||
|
||||
if (EnvSet.values.isMultiTenancy) {
|
||||
await checkRowLevelSecurity(EnvSet.default.queryClient);
|
||||
}
|
||||
await Promise.all([
|
||||
checkRowLevelSecurity(EnvSet.queryClient),
|
||||
checkAlterationState(await EnvSet.pool),
|
||||
]);
|
||||
|
||||
// Import last until init completed
|
||||
const { default: initApp } = await import('./app/init.js');
|
||||
|
|
|
@ -6,6 +6,7 @@ import Sinon from 'sinon';
|
|||
|
||||
import { EnvSet } from '#src/env-set/index.js';
|
||||
import RequestError from '#src/errors/RequestError/index.js';
|
||||
import { mockEnvSet } from '#src/test-utils/env-set.js';
|
||||
import { createContextWithRouteParameters } from '#src/utils/test-utils.js';
|
||||
|
||||
import type { WithAuthContext } from './koa-auth.js';
|
||||
|
@ -63,7 +64,7 @@ describe('koaAuth middleware', () => {
|
|||
developmentUserId: 'foo',
|
||||
});
|
||||
|
||||
await koaAuth(EnvSet.default)(ctx, next);
|
||||
await koaAuth(mockEnvSet)(ctx, next);
|
||||
expect(ctx.auth).toEqual({ type: 'user', id: 'foo' });
|
||||
|
||||
stub.restore();
|
||||
|
@ -78,7 +79,7 @@ describe('koaAuth middleware', () => {
|
|||
},
|
||||
};
|
||||
|
||||
await koaAuth(EnvSet.default)(mockCtx, next);
|
||||
await koaAuth(mockEnvSet)(mockCtx, next);
|
||||
expect(mockCtx.auth).toEqual({ type: 'user', id: 'foo' });
|
||||
});
|
||||
|
||||
|
@ -90,7 +91,7 @@ describe('koaAuth middleware', () => {
|
|||
isIntegrationTest: true,
|
||||
});
|
||||
|
||||
await koaAuth(EnvSet.default)(ctx, next);
|
||||
await koaAuth(mockEnvSet)(ctx, next);
|
||||
expect(ctx.auth).toEqual({ type: 'user', id: 'foo' });
|
||||
|
||||
stub.restore();
|
||||
|
@ -111,7 +112,7 @@ describe('koaAuth middleware', () => {
|
|||
},
|
||||
};
|
||||
|
||||
await koaAuth(EnvSet.default)(mockCtx, next);
|
||||
await koaAuth(mockEnvSet)(mockCtx, next);
|
||||
expect(mockCtx.auth).toEqual({ type: 'user', id: 'foo' });
|
||||
|
||||
stub.restore();
|
||||
|
@ -124,12 +125,12 @@ describe('koaAuth middleware', () => {
|
|||
authorization: 'Bearer access_token',
|
||||
},
|
||||
};
|
||||
await koaAuth(EnvSet.default)(ctx, next);
|
||||
await koaAuth(mockEnvSet)(ctx, next);
|
||||
expect(ctx.auth).toEqual({ type: 'user', id: 'fooUser' });
|
||||
});
|
||||
|
||||
it('expect to throw if authorization header is missing', async () => {
|
||||
await expect(koaAuth(EnvSet.default)(ctx, next)).rejects.toMatchError(authHeaderMissingError);
|
||||
await expect(koaAuth(mockEnvSet)(ctx, next)).rejects.toMatchError(authHeaderMissingError);
|
||||
});
|
||||
|
||||
it('expect to throw if authorization header token type not recognized ', async () => {
|
||||
|
@ -140,7 +141,7 @@ describe('koaAuth middleware', () => {
|
|||
},
|
||||
};
|
||||
|
||||
await expect(koaAuth(EnvSet.default)(ctx, next)).rejects.toMatchError(tokenNotSupportedError);
|
||||
await expect(koaAuth(mockEnvSet)(ctx, next)).rejects.toMatchError(tokenNotSupportedError);
|
||||
});
|
||||
|
||||
it('expect to throw if jwt sub is missing', async () => {
|
||||
|
@ -153,7 +154,7 @@ describe('koaAuth middleware', () => {
|
|||
},
|
||||
};
|
||||
|
||||
await expect(koaAuth(EnvSet.default)(ctx, next)).rejects.toMatchError(jwtSubMissingError);
|
||||
await expect(koaAuth(mockEnvSet)(ctx, next)).rejects.toMatchError(jwtSubMissingError);
|
||||
});
|
||||
|
||||
it('expect to have `client` type per jwt verify result', async () => {
|
||||
|
@ -166,7 +167,7 @@ describe('koaAuth middleware', () => {
|
|||
},
|
||||
};
|
||||
|
||||
await koaAuth(EnvSet.default)(ctx, next);
|
||||
await koaAuth(mockEnvSet)(ctx, next);
|
||||
expect(ctx.auth).toEqual({ type: 'app', id: 'bar' });
|
||||
});
|
||||
|
||||
|
@ -180,7 +181,7 @@ describe('koaAuth middleware', () => {
|
|||
},
|
||||
};
|
||||
|
||||
await expect(koaAuth(EnvSet.default, UserRole.Admin)(ctx, next)).rejects.toMatchError(
|
||||
await expect(koaAuth(mockEnvSet, UserRole.Admin)(ctx, next)).rejects.toMatchError(
|
||||
forbiddenError
|
||||
);
|
||||
});
|
||||
|
@ -197,7 +198,7 @@ describe('koaAuth middleware', () => {
|
|||
},
|
||||
};
|
||||
|
||||
await expect(koaAuth(EnvSet.default, UserRole.Admin)(ctx, next)).rejects.toMatchError(
|
||||
await expect(koaAuth(mockEnvSet, UserRole.Admin)(ctx, next)).rejects.toMatchError(
|
||||
forbiddenError
|
||||
);
|
||||
});
|
||||
|
@ -213,7 +214,7 @@ describe('koaAuth middleware', () => {
|
|||
},
|
||||
};
|
||||
|
||||
await expect(koaAuth(EnvSet.default)(ctx, next)).rejects.toMatchError(
|
||||
await expect(koaAuth(mockEnvSet)(ctx, next)).rejects.toMatchError(
|
||||
new RequestError({ code: 'auth.unauthorized', status: 401 }, new Error('unknown error'))
|
||||
);
|
||||
});
|
||||
|
|
|
@ -19,7 +19,7 @@ export default function koaConsoleRedirectProxy<
|
|||
return;
|
||||
}
|
||||
|
||||
if (ctx.path === '/console/welcome' && hasUser) {
|
||||
if ((ctx.path === '/' || ctx.path === '/console/welcome') && hasUser) {
|
||||
ctx.redirect('/console');
|
||||
|
||||
return;
|
||||
|
|
|
@ -3,7 +3,7 @@ import { createMockUtils } from '@logto/shared/esm';
|
|||
import snakecaseKeys from 'snakecase-keys';
|
||||
|
||||
import { mockApplication } from '#src/__mocks__/index.js';
|
||||
import { EnvSet } from '#src/env-set/index.js';
|
||||
import { mockEnvSet } from '#src/test-utils/env-set.js';
|
||||
import { MockQueries } from '#src/test-utils/tenant.js';
|
||||
|
||||
import { getConstantClientMetadata } from './utils.js';
|
||||
|
@ -48,7 +48,7 @@ const now = Date.now();
|
|||
describe('postgres Adapter', () => {
|
||||
it('Client Modal', async () => {
|
||||
const rejectError = new Error('Not implemented');
|
||||
const adapter = postgresAdapter(EnvSet.default, queries, 'Client');
|
||||
const adapter = postgresAdapter(mockEnvSet, queries, 'Client');
|
||||
|
||||
await expect(adapter.upsert('client', {}, 0)).rejects.toMatchError(rejectError);
|
||||
await expect(adapter.findByUserCode('foo')).rejects.toMatchError(rejectError);
|
||||
|
@ -72,7 +72,7 @@ describe('postgres Adapter', () => {
|
|||
client_id,
|
||||
client_name,
|
||||
client_secret,
|
||||
...getConstantClientMetadata(EnvSet.default, type),
|
||||
...getConstantClientMetadata(mockEnvSet, type),
|
||||
...snakecaseKeys(oidcClientMetadata),
|
||||
...customClientMetadata,
|
||||
});
|
||||
|
@ -85,7 +85,7 @@ describe('postgres Adapter', () => {
|
|||
const id = 'fooId';
|
||||
const grantId = 'grantId';
|
||||
const expireAt = 60;
|
||||
const adapter = postgresAdapter(EnvSet.default, queries, modelName);
|
||||
const adapter = postgresAdapter(mockEnvSet, queries, modelName);
|
||||
|
||||
await adapter.upsert(id, { uid, userCode }, expireAt);
|
||||
expect(upsertInstance).toBeCalledWith({
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { EnvSet } from '#src/env-set/index.js';
|
||||
import { mockEnvSet } from '#src/test-utils/env-set.js';
|
||||
import { MockTenant } from '#src/test-utils/tenant.js';
|
||||
|
||||
import initOidc from './init.js';
|
||||
|
@ -7,6 +7,6 @@ describe('oidc provider init', () => {
|
|||
it('init should not throw', async () => {
|
||||
const { queries, libraries } = new MockTenant();
|
||||
|
||||
expect(() => initOidc(EnvSet.default, queries, libraries)).not.toThrow();
|
||||
expect(() => initOidc(mockEnvSet, queries, libraries)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { ApplicationType, CustomClientMetadataKey, GrantType } from '@logto/schemas';
|
||||
|
||||
import { EnvSet } from '#src/env-set/index.js';
|
||||
import { mockEnvSet } from '#src/test-utils/env-set.js';
|
||||
|
||||
import {
|
||||
isOriginAllowed,
|
||||
|
@ -10,22 +10,22 @@ import {
|
|||
} from './utils.js';
|
||||
|
||||
describe('getConstantClientMetadata()', () => {
|
||||
expect(getConstantClientMetadata(EnvSet.default, ApplicationType.SPA)).toEqual({
|
||||
expect(getConstantClientMetadata(mockEnvSet, ApplicationType.SPA)).toEqual({
|
||||
application_type: 'web',
|
||||
grant_types: [GrantType.AuthorizationCode, GrantType.RefreshToken],
|
||||
token_endpoint_auth_method: 'none',
|
||||
});
|
||||
expect(getConstantClientMetadata(EnvSet.default, ApplicationType.Native)).toEqual({
|
||||
expect(getConstantClientMetadata(mockEnvSet, ApplicationType.Native)).toEqual({
|
||||
application_type: 'native',
|
||||
grant_types: [GrantType.AuthorizationCode, GrantType.RefreshToken],
|
||||
token_endpoint_auth_method: 'none',
|
||||
});
|
||||
expect(getConstantClientMetadata(EnvSet.default, ApplicationType.Traditional)).toEqual({
|
||||
expect(getConstantClientMetadata(mockEnvSet, ApplicationType.Traditional)).toEqual({
|
||||
application_type: 'web',
|
||||
grant_types: [GrantType.AuthorizationCode, GrantType.RefreshToken],
|
||||
token_endpoint_auth_method: 'client_secret_basic',
|
||||
});
|
||||
expect(getConstantClientMetadata(EnvSet.default, ApplicationType.MachineToMachine)).toEqual({
|
||||
expect(getConstantClientMetadata(mockEnvSet, ApplicationType.MachineToMachine)).toEqual({
|
||||
application_type: 'web',
|
||||
grant_types: [GrantType.ClientCredentials],
|
||||
token_endpoint_auth_method: 'client_secret_basic',
|
||||
|
|
|
@ -25,6 +25,10 @@ const middlewareList = [
|
|||
return mock;
|
||||
});
|
||||
|
||||
mockEsm('./utils.js', () => ({
|
||||
getTenantDatabaseDsn: async () => 'postgres://mock.db.url',
|
||||
}));
|
||||
|
||||
// eslint-disable-next-line unicorn/consistent-function-scoping
|
||||
mockEsmDefault('#src/oidc/init.js', () => () => createMockProvider());
|
||||
|
||||
|
|
|
@ -23,23 +23,12 @@ import initRouter from '#src/routes/init.js';
|
|||
import Libraries from './Libraries.js';
|
||||
import Queries from './Queries.js';
|
||||
import type TenantContext from './TenantContext.js';
|
||||
import { defaultTenant } from './consts.js';
|
||||
import { getTenantDatabaseDsn } from './utils.js';
|
||||
|
||||
export default class Tenant implements TenantContext {
|
||||
static async create(id: string): Promise<Tenant> {
|
||||
if (!EnvSet.values.isMultiTenancy) {
|
||||
if (id !== defaultTenant) {
|
||||
throw new Error(
|
||||
`Trying to create a tenant instance with ID ${id} in single-tenancy mode. This is a no-op.`
|
||||
);
|
||||
}
|
||||
|
||||
return new Tenant(EnvSet.default, id);
|
||||
}
|
||||
|
||||
// In multi-tenancy mode, treat the default database URL as the management URL
|
||||
const envSet = new EnvSet(await getTenantDatabaseDsn(EnvSet.default, id));
|
||||
// Treat the default database URL as the management URL
|
||||
const envSet = new EnvSet(await getTenantDatabaseDsn(id));
|
||||
await envSet.load();
|
||||
|
||||
return new Tenant(envSet, id);
|
||||
|
@ -56,7 +45,7 @@ export default class Tenant implements TenantContext {
|
|||
return mount(this.app);
|
||||
}
|
||||
|
||||
constructor(public readonly envSet: EnvSet, public readonly id: string) {
|
||||
private constructor(public readonly envSet: EnvSet, public readonly id: string) {
|
||||
const modelRouters = createModelRouters(envSet.queryClient);
|
||||
const queries = new Queries(envSet.pool);
|
||||
const libraries = new Libraries(queries, modelRouters);
|
||||
|
|
|
@ -12,6 +12,7 @@ class TenantPool {
|
|||
return tenant;
|
||||
}
|
||||
|
||||
console.log('Init tenant:', tenantId);
|
||||
const newTenant = await Tenant.create(tenantId);
|
||||
this.cache.set(tenantId, newTenant);
|
||||
|
||||
|
|
|
@ -1,37 +1,44 @@
|
|||
import { Systems } from '@logto/schemas';
|
||||
import { Tenants } from '@logto/schemas/models';
|
||||
import { isKeyInObject } from '@logto/shared';
|
||||
import { conditionalString } from '@silverhand/essentials';
|
||||
import { conditional, conditionalString } from '@silverhand/essentials';
|
||||
import { identifier, sql } from '@withtyped/postgres';
|
||||
import type { QueryClient } from '@withtyped/server';
|
||||
import { parseDsn, stringifyDsn } from 'slonik';
|
||||
|
||||
import type { EnvSet } from '#src/env-set/index.js';
|
||||
import { EnvSet } from '#src/env-set/index.js';
|
||||
|
||||
/**
|
||||
* This function is to fetch the tenant password for the corresponding Postgres user.
|
||||
*
|
||||
* In multi-tenancy mode, Logto should ALWAYS use a restricted user with RLS enforced to ensure data isolation between tenants.
|
||||
*/
|
||||
export const getTenantDatabaseDsn = async (defaultEnvSet: EnvSet, tenantId: string) => {
|
||||
export const getTenantDatabaseDsn = async (tenantId: string) => {
|
||||
const { queryClient, dbUrl } = EnvSet;
|
||||
const {
|
||||
tableName,
|
||||
rawKeys: { id, dbUserPassword },
|
||||
rawKeys: { id, dbUser, dbUserPassword },
|
||||
} = Tenants;
|
||||
|
||||
const { rows } = await defaultEnvSet.queryClient.query(sql`
|
||||
select ${identifier(dbUserPassword)}
|
||||
const { rows } = await queryClient.query(sql`
|
||||
select ${identifier(dbUser)}, ${identifier(dbUserPassword)}
|
||||
from ${identifier(tableName)}
|
||||
where ${identifier(id)} = ${tenantId}
|
||||
`);
|
||||
const password = rows[0]?.db_user_password;
|
||||
|
||||
if (!password || typeof password !== 'string') {
|
||||
if (!rows[0]) {
|
||||
throw new Error(`Cannot find valid tenant credentials for ID ${tenantId}`);
|
||||
}
|
||||
|
||||
const options = parseDsn(defaultEnvSet.databaseUrl);
|
||||
const options = parseDsn(dbUrl);
|
||||
const username = rows[0][dbUser];
|
||||
const password = rows[0][dbUserPassword];
|
||||
|
||||
return stringifyDsn({ ...options, username: `tenant_${tenantId}`, password });
|
||||
return stringifyDsn({
|
||||
...options,
|
||||
username: conditional(typeof username === 'string' && username),
|
||||
password: conditional(typeof password === 'string' && password),
|
||||
});
|
||||
};
|
||||
|
||||
export const checkRowLevelSecurity = async (client: QueryClient) => {
|
||||
|
@ -42,9 +49,9 @@ export const checkRowLevelSecurity = async (client: QueryClient) => {
|
|||
and rowsecurity=false
|
||||
`);
|
||||
|
||||
if (rows.length > 0) {
|
||||
if (rows.some(({ tablename }) => tablename !== Systems.table)) {
|
||||
throw new Error(
|
||||
'Row-level security has to be enforced on EVERY table when starting Logto in multi-tenancy mode.\n' +
|
||||
'Row-level security has to be enforced on EVERY business table when starting Logto.\n' +
|
||||
`Found following table(s) without RLS: ${rows
|
||||
.map((row) => conditionalString(isKeyInObject(row, 'tablename') && String(row.tablename)))
|
||||
.join(', ')}\n\n` +
|
||||
|
|
5
packages/core/src/test-utils/env-set.ts
Normal file
5
packages/core/src/test-utils/env-set.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
import { EnvSet } from '#src/env-set/index.js';
|
||||
|
||||
export const mockEnvSet = new EnvSet(EnvSet.values.dbUrl);
|
||||
|
||||
await mockEnvSet.load();
|
|
@ -1,11 +1,11 @@
|
|||
import { createMockPool, createMockQueryResult } from 'slonik';
|
||||
|
||||
import { EnvSet } from '#src/env-set/index.js';
|
||||
import { createModelRouters } from '#src/model-routers/index.js';
|
||||
import Libraries from '#src/tenants/Libraries.js';
|
||||
import Queries from '#src/tenants/Queries.js';
|
||||
import type TenantContext from '#src/tenants/TenantContext.js';
|
||||
|
||||
import { mockEnvSet } from './env-set.js';
|
||||
import type { GrantMock } from './oidc-provider.js';
|
||||
import { createMockProvider } from './oidc-provider.js';
|
||||
import { MockQueryClient } from './query-client.js';
|
||||
|
@ -45,7 +45,7 @@ export type DeepPartial<T> = T extends object
|
|||
export type Partial2<T> = { [key in keyof T]?: Partial<T[key]> };
|
||||
|
||||
export class MockTenant implements TenantContext {
|
||||
public envSet = EnvSet.default;
|
||||
public envSet = mockEnvSet;
|
||||
public queries: Queries;
|
||||
public libraries: Libraries;
|
||||
public modelRouters = createModelRouters(new MockQueryClient());
|
||||
|
|
|
@ -53,6 +53,6 @@
|
|||
},
|
||||
"prettier": "@silverhand/eslint-config/.prettierrc",
|
||||
"dependencies": {
|
||||
"@withtyped/server": "^0.4.0"
|
||||
"@withtyped/server": "^0.5.1"
|
||||
}
|
||||
}
|
||||
|
|
24
packages/schemas/README.md
Normal file
24
packages/schemas/README.md
Normal file
|
@ -0,0 +1,24 @@
|
|||
# @logto/schemas
|
||||
|
||||
The central packages for all database schemas and their TypeScript definitions and utilities.
|
||||
|
||||
## Table init
|
||||
|
||||
The Logto CLI will pick up all necessary SQL queries in `tables/` and `src/models/` and run them in the following order:
|
||||
|
||||
1. Run `tables/_before_all.sql`
|
||||
2. Run `tables/*.sql` with the snippet `/* init_order = <number> */` in ascending order of `<number>`
|
||||
3. Run `tables/*.sql` without the `init_order` snippet in ascending order of filename (`tables/`) or table name (`src/models/`)
|
||||
4. Run `tables/_after_all.sql`
|
||||
|
||||
Additional rules for step 2 and 3:
|
||||
|
||||
- If no snippet `/* no_after_each */` found, run `tables/_after_each.sql` after each SQL file
|
||||
- Exclude lifecycle scripts `tables/_[lifecycle].sql` where `[lifecycle]` could be one of:
|
||||
- `after_all`
|
||||
- `after_each`
|
||||
- `before_all`
|
||||
|
||||
In the `after_each` lifecycle script, you can use `${name}` to represent the current filename (`tables/`) or table name (`src/models/`).
|
||||
|
||||
In all lifecycle scripts, you can use `${database}` to represent the current database.
|
|
@ -0,0 +1,181 @@
|
|||
import { generateStandardId } from '@logto/core-kit';
|
||||
import type { CommonQueryMethods } from 'slonik';
|
||||
import { sql } from 'slonik';
|
||||
import { raw } from 'slonik-sql-tag-raw';
|
||||
|
||||
import type { AlterationScript } from '../lib/types/alteration.js';
|
||||
|
||||
const tables: string[] = [
|
||||
'applications_roles',
|
||||
'applications',
|
||||
'connectors',
|
||||
'custom_phrases',
|
||||
'logs',
|
||||
'logto_configs',
|
||||
'oidc_model_instances',
|
||||
'passcodes',
|
||||
'resources',
|
||||
'roles_scopes',
|
||||
'roles',
|
||||
'scopes',
|
||||
'sign_in_experiences',
|
||||
'users_roles',
|
||||
'users',
|
||||
'hooks',
|
||||
];
|
||||
|
||||
const defaultTenantId = 'default';
|
||||
|
||||
const getId = (value: string) => sql.identifier([value]);
|
||||
|
||||
const getDatabaseName = async (pool: CommonQueryMethods) => {
|
||||
const { currentDatabase } = await pool.one<{ currentDatabase: string }>(sql`
|
||||
select current_database();
|
||||
`);
|
||||
|
||||
return currentDatabase.replaceAll('-', '_');
|
||||
};
|
||||
|
||||
const alteration: AlterationScript = {
|
||||
up: async (pool) => {
|
||||
const database = await getDatabaseName(pool);
|
||||
|
||||
// Alter hooks table for multi-tenancy (missed before)
|
||||
await pool.query(sql`
|
||||
alter table hooks
|
||||
add column tenant_id varchar(21) not null default 'default'
|
||||
references tenants (id) on update cascade on delete cascade,
|
||||
alter column id type varchar(21); -- OK to downsize since we use length 21 for ID generation in core
|
||||
|
||||
alter table hooks
|
||||
alter column tenant_id drop default;
|
||||
|
||||
create index hooks__id on hooks (tenant_id, id);
|
||||
|
||||
drop index hooks__event;
|
||||
create index hooks__event on hooks (tenant_id, event);
|
||||
|
||||
create trigger set_tenant_id before insert on hooks
|
||||
for each row execute procedure set_tenant_id();
|
||||
`);
|
||||
|
||||
// Add db_user column to tenants table
|
||||
await pool.query(sql`
|
||||
alter table tenants
|
||||
add column db_user varchar(128),
|
||||
add constraint tenants__db_user
|
||||
unique (db_user);
|
||||
`);
|
||||
|
||||
// Create role and setup privileges
|
||||
const baseRole = `logto_tenant_${database}`;
|
||||
const baseRoleId = getId(baseRole);
|
||||
|
||||
// See `_after_all.sql` for comments
|
||||
await pool.query(sql`
|
||||
create role ${baseRoleId} noinherit;
|
||||
|
||||
grant select, insert, update, delete
|
||||
on all tables
|
||||
in schema public
|
||||
to ${baseRoleId};
|
||||
|
||||
revoke all privileges
|
||||
on table tenants
|
||||
from ${baseRoleId};
|
||||
|
||||
grant select (id, db_user)
|
||||
on table tenants
|
||||
to ${baseRoleId};
|
||||
|
||||
alter table tenants enable row level security;
|
||||
|
||||
create policy tenants_tenant_id on tenants
|
||||
to ${baseRoleId}
|
||||
using (db_user = current_user);
|
||||
|
||||
revoke all privileges
|
||||
on table systems
|
||||
from ${baseRoleId};
|
||||
`);
|
||||
|
||||
// Enable RLS
|
||||
await Promise.all(
|
||||
tables.map(async (tableName) =>
|
||||
pool.query(sql`
|
||||
alter table ${getId(tableName)} enable row level security;
|
||||
|
||||
create policy ${getId(`${tableName}_tenant_id`)} on ${getId(tableName)}
|
||||
to ${baseRoleId}
|
||||
using (tenant_id = (select id from tenants where db_user = current_user));
|
||||
`)
|
||||
)
|
||||
);
|
||||
|
||||
// Create database role for default tenant
|
||||
const role = `logto_tenant_${database}_${defaultTenantId}`;
|
||||
const password = generateStandardId(32);
|
||||
|
||||
await pool.query(sql`
|
||||
update tenants
|
||||
set db_user=${role}, db_user_password=${password}
|
||||
where id=${defaultTenantId};
|
||||
`);
|
||||
await pool.query(sql`
|
||||
create role ${sql.identifier([role])} with inherit login
|
||||
password '${raw(password)}'
|
||||
in role ${sql.identifier([baseRole])};
|
||||
`);
|
||||
},
|
||||
down: async (pool) => {
|
||||
const database = await getDatabaseName(pool);
|
||||
const baseRoleId = getId(`logto_tenant_${database}`);
|
||||
const role = `logto_tenant_${database}_${defaultTenantId}`;
|
||||
|
||||
// Disable RLS
|
||||
await Promise.all(
|
||||
tables.map(async (tableName) =>
|
||||
pool.query(sql`
|
||||
drop policy ${getId(`${tableName}_tenant_id`)} on ${getId(tableName)};
|
||||
alter table ${getId(tableName)} disable row level security;
|
||||
`)
|
||||
)
|
||||
);
|
||||
|
||||
// Drop role
|
||||
await pool.query(sql`
|
||||
drop role ${getId(role)};
|
||||
|
||||
revoke all privileges
|
||||
on all tables
|
||||
in schema public
|
||||
from ${baseRoleId};
|
||||
|
||||
drop policy tenants_tenant_id on tenants;
|
||||
alter table tenants disable row level security;
|
||||
|
||||
drop role ${baseRoleId};
|
||||
`);
|
||||
|
||||
// Drop db_user column from tenants table
|
||||
await pool.query(sql`
|
||||
alter table tenants
|
||||
drop column db_user;
|
||||
`);
|
||||
|
||||
// Revert hooks table from multi-tenancy
|
||||
await pool.query(sql`
|
||||
drop index hooks__id;
|
||||
|
||||
alter table hooks
|
||||
drop column tenant_id,
|
||||
alter column id type varchar(32);
|
||||
|
||||
create index hooks__event on hooks (event);
|
||||
|
||||
drop trigger set_tenant_id on hooks;
|
||||
`);
|
||||
},
|
||||
};
|
||||
|
||||
export default alteration;
|
|
@ -83,7 +83,7 @@
|
|||
"@logto/language-kit": "workspace:*",
|
||||
"@logto/phrases": "workspace:*",
|
||||
"@logto/phrases-ui": "workspace:*",
|
||||
"@withtyped/server": "^0.4.0",
|
||||
"@withtyped/server": "^0.5.1",
|
||||
"zod": "^3.20.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,15 +45,20 @@ export const hookConfigGuard: z.ZodType<HookConfig> = z.object({
|
|||
|
||||
export const Hooks = createModel(/* sql */ `
|
||||
create table hooks (
|
||||
id varchar(32) not null,
|
||||
tenant_id varchar(21) not null
|
||||
references tenants (id) on update cascade on delete cascade,
|
||||
id varchar(21) not null,
|
||||
event varchar(128) not null,
|
||||
config jsonb /* @use HookConfig */ not null,
|
||||
created_at timestamptz not null default(now()),
|
||||
primary key (id)
|
||||
);
|
||||
|
||||
create index hooks__event on hooks (event);
|
||||
create index hooks__id on hooks (tenant_id, id);
|
||||
|
||||
create index hooks__event on hooks (tenant_id, event);
|
||||
`)
|
||||
.extend('id', { default: () => generateStandardId(), readonly: true })
|
||||
.extend('event', z.nativeEnum(HookEvent)) // Tried to use `.refine()` to show the correct error path, but not working.
|
||||
.extend('config', hookConfigGuard);
|
||||
.extend('config', hookConfigGuard)
|
||||
.exclude('tenantId');
|
||||
|
|
|
@ -4,7 +4,11 @@ export const Tenants = createModel(/* sql */ `
|
|||
/* init_order = 0 */
|
||||
create table tenants (
|
||||
id varchar(21) not null,
|
||||
db_user varchar(128),
|
||||
db_user_password varchar(128),
|
||||
primary key (id)
|
||||
primary key (id),
|
||||
constraint tenants__db_user
|
||||
unique (db_user)
|
||||
);
|
||||
/* no_after_each */
|
||||
`);
|
||||
|
|
|
@ -4,8 +4,4 @@ import type { Tenants } from '../models/tenants.js';
|
|||
|
||||
export const defaultTenantId = 'default';
|
||||
export const adminTenantId = 'admin';
|
||||
|
||||
export const defaultTenant: InferModelType<typeof Tenants> = {
|
||||
id: defaultTenantId,
|
||||
dbUserPassword: null,
|
||||
};
|
||||
export type TenantModel = InferModelType<typeof Tenants>;
|
||||
|
|
31
packages/schemas/tables/_after_all.sql
Normal file
31
packages/schemas/tables/_after_all.sql
Normal file
|
@ -0,0 +1,31 @@
|
|||
/* This SQL will run after all other queries. */
|
||||
|
||||
grant select, insert, update, delete
|
||||
on all tables
|
||||
in schema public
|
||||
to logto_tenant_${database};
|
||||
|
||||
-- Security policies for tenants table --
|
||||
|
||||
revoke all privileges
|
||||
on table tenants
|
||||
from logto_tenant_${database};
|
||||
|
||||
/* Allow limited select to perform RLS query in `after_each` (using select ... from tenants ...) */
|
||||
grant select (id, db_user)
|
||||
on table tenants
|
||||
to logto_tenant_${database};
|
||||
|
||||
alter table tenants enable row level security;
|
||||
|
||||
/* Create RLS policy to minimize the privilege */
|
||||
create policy tenants_tenant_id on tenants
|
||||
to logto_tenant_${database}
|
||||
using (db_user = current_user);
|
||||
|
||||
-- End --
|
||||
|
||||
/* Revoke all privileges on systems table for tenant roles */
|
||||
revoke all privileges
|
||||
on table systems
|
||||
from logto_tenant_${database};
|
10
packages/schemas/tables/_after_each.sql
Normal file
10
packages/schemas/tables/_after_each.sql
Normal file
|
@ -0,0 +1,10 @@
|
|||
/* This SQL will run after each query files except lifecycle scripts and files that explicitly exclude `after_each`. */
|
||||
|
||||
create trigger set_tenant_id before insert on ${name}
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
||||
alter table ${name} enable row level security;
|
||||
|
||||
create policy ${name}_tenant_id on ${name}
|
||||
to logto_tenant_${database}
|
||||
using (tenant_id = (select id from tenants where db_user = current_user));
|
3
packages/schemas/tables/_before_all.sql
Normal file
3
packages/schemas/tables/_before_all.sql
Normal file
|
@ -0,0 +1,3 @@
|
|||
/* This SQL will run before all other queries. */
|
||||
|
||||
create role logto_tenant_${database} noinherit;
|
|
@ -12,3 +12,5 @@ $$ begin
|
|||
|
||||
return new;
|
||||
end; $$ language plpgsql;
|
||||
|
||||
/* no_after_each */
|
||||
|
|
|
@ -18,6 +18,3 @@ create table applications (
|
|||
|
||||
create index applications__id
|
||||
on applications (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on applications
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -13,6 +13,3 @@ create table applications_roles (
|
|||
|
||||
create index applications_roles__id
|
||||
on applications_roles (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on applications_roles
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -12,6 +12,3 @@ create table connectors (
|
|||
|
||||
create index connectors__id
|
||||
on connectors (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on connectors
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -11,6 +11,3 @@ create table custom_phrases (
|
|||
|
||||
create index custom_phrases__id
|
||||
on custom_phrases (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on custom_phrases
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -19,6 +19,3 @@ create index logs__user_id
|
|||
|
||||
create index logs__application_id
|
||||
on logs (tenant_id, (payload->>'application_id') nulls last);
|
||||
|
||||
create trigger set_tenant_id before insert on logs
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -5,6 +5,3 @@ create table logto_configs (
|
|||
value jsonb /* @use ArbitraryObject */ not null default '{}'::jsonb,
|
||||
primary key (tenant_id, key)
|
||||
);
|
||||
|
||||
create trigger set_tenant_id before insert on logto_configs
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -31,6 +31,3 @@ create index oidc_model_instances__model_name_payload_grant_id
|
|||
model_name,
|
||||
(payload->>'grantId')
|
||||
);
|
||||
|
||||
create trigger set_tenant_id before insert on oidc_model_instances
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -24,6 +24,3 @@ create index passcodes__email_type
|
|||
|
||||
create index passcodes__phone_type
|
||||
on passcodes (tenant_id, phone, type);
|
||||
|
||||
create trigger set_tenant_id before insert on passcodes
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -14,6 +14,3 @@ create table resources (
|
|||
|
||||
create index resources__id
|
||||
on resources (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on resources
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -13,6 +13,3 @@ create table roles (
|
|||
|
||||
create index roles__id
|
||||
on roles (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on roles
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -13,6 +13,3 @@ create table roles_scopes (
|
|||
|
||||
create index roles_scopes__id
|
||||
on roles_scopes (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on roles_scopes
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -16,6 +16,3 @@ create table scopes (
|
|||
|
||||
create index scopes__id
|
||||
on scopes (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on scopes
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -17,6 +17,3 @@ create table sign_in_experiences (
|
|||
|
||||
create index sign_in_experiences__id
|
||||
on sign_in_experiences (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on sign_in_experiences
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -3,3 +3,5 @@ create table systems (
|
|||
value jsonb /* @use ArbitraryObject */ not null default '{}'::jsonb,
|
||||
primary key (key)
|
||||
);
|
||||
|
||||
/* no_after_each */
|
||||
|
|
|
@ -27,6 +27,3 @@ create index users__id
|
|||
|
||||
create index users__name
|
||||
on users (tenant_id, name);
|
||||
|
||||
create trigger set_tenant_id before insert on users
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -13,6 +13,3 @@ create table users_roles (
|
|||
|
||||
create index users_roles__id
|
||||
on users_roles (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on users_roles
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -280,8 +280,8 @@ importers:
|
|||
'@types/semver': ^7.3.12
|
||||
'@types/sinon': ^10.0.13
|
||||
'@types/supertest': ^2.0.11
|
||||
'@withtyped/postgres': ^0.4.0
|
||||
'@withtyped/server': ^0.4.0
|
||||
'@withtyped/postgres': ^0.5.1
|
||||
'@withtyped/server': ^0.5.1
|
||||
chalk: ^5.0.0
|
||||
clean-deep: ^3.4.0
|
||||
copyfiles: ^2.4.1
|
||||
|
@ -339,8 +339,8 @@ importers:
|
|||
'@logto/schemas': link:../schemas
|
||||
'@logto/shared': link:../shared
|
||||
'@silverhand/essentials': 2.1.0
|
||||
'@withtyped/postgres': 0.4.0_@withtyped+server@0.4.0
|
||||
'@withtyped/server': 0.4.0
|
||||
'@withtyped/postgres': 0.5.1_@withtyped+server@0.5.1
|
||||
'@withtyped/server': 0.5.1
|
||||
chalk: 5.1.2
|
||||
clean-deep: 3.4.0
|
||||
date-fns: 2.29.3
|
||||
|
@ -487,7 +487,7 @@ importers:
|
|||
'@types/jest': ^29.1.2
|
||||
'@types/jest-environment-puppeteer': ^5.0.2
|
||||
'@types/node': ^18.11.18
|
||||
'@withtyped/server': ^0.4.0
|
||||
'@withtyped/server': ^0.5.1
|
||||
dotenv: ^16.0.0
|
||||
eslint: ^8.21.0
|
||||
got: ^12.5.3
|
||||
|
@ -501,7 +501,7 @@ importers:
|
|||
text-encoder: ^0.0.4
|
||||
typescript: ^4.9.4
|
||||
dependencies:
|
||||
'@withtyped/server': 0.4.0
|
||||
'@withtyped/server': 0.5.1
|
||||
devDependencies:
|
||||
'@jest/types': 29.1.2
|
||||
'@logto/connector-kit': link:../toolkit/connector-kit
|
||||
|
@ -593,7 +593,7 @@ importers:
|
|||
'@types/jest': ^29.1.2
|
||||
'@types/node': ^18.11.18
|
||||
'@types/pluralize': ^0.0.29
|
||||
'@withtyped/server': ^0.4.0
|
||||
'@withtyped/server': ^0.5.1
|
||||
camelcase: ^7.0.0
|
||||
eslint: ^8.21.0
|
||||
jest: ^29.1.2
|
||||
|
@ -611,7 +611,7 @@ importers:
|
|||
'@logto/language-kit': link:../toolkit/language-kit
|
||||
'@logto/phrases': link:../phrases
|
||||
'@logto/phrases-ui': link:../phrases-ui
|
||||
'@withtyped/server': 0.4.0
|
||||
'@withtyped/server': 0.5.1
|
||||
zod: 3.20.2
|
||||
devDependencies:
|
||||
'@silverhand/eslint-config': 1.3.0_k3lfx77tsvurbevhk73p7ygch4
|
||||
|
@ -4473,21 +4473,21 @@ packages:
|
|||
eslint-visitor-keys: 3.3.0
|
||||
dev: true
|
||||
|
||||
/@withtyped/postgres/0.4.0_@withtyped+server@0.4.0:
|
||||
resolution: {integrity: sha512-jzDdXhGNkIBeWlnEU3hft2CriyWgabI46a5n5T7faMUkHzjHlgIH4IscdT8Vq7n3YIdAC6ovFtQW8g6SNyVvlg==}
|
||||
/@withtyped/postgres/0.5.1_@withtyped+server@0.5.1:
|
||||
resolution: {integrity: sha512-Le4iIHEc4LRgDn4rjnwbGJ/J15PpqEoltgoZAOhYgnZznKBzkp4W3vxbav29x7IMOvzgum+Jo5HOW1q0kRfROg==}
|
||||
peerDependencies:
|
||||
'@withtyped/server': ^0.4.0
|
||||
'@withtyped/server': ^0.5.1
|
||||
dependencies:
|
||||
'@types/pg': 8.6.6
|
||||
'@withtyped/server': 0.4.0
|
||||
'@withtyped/server': 0.5.1
|
||||
'@withtyped/shared': 0.2.0
|
||||
pg: 8.8.0
|
||||
transitivePeerDependencies:
|
||||
- pg-native
|
||||
dev: false
|
||||
|
||||
/@withtyped/server/0.4.0:
|
||||
resolution: {integrity: sha512-72WUKDnhJl5FZurPUrvrwCcyIrj+U5Vq4vghmB/Lg+Bb9eTgSFbsaKujJtJNFor+1eSEDdCNNNUvOxfwZEz2JQ==}
|
||||
/@withtyped/server/0.5.1:
|
||||
resolution: {integrity: sha512-CR7Y4R2YsUNJ7STEzhJjBjCKIJg49r2Jun5tFuTmmH8IAdHacisWPuKyGMz8o8jnatGTBRJNvc2wjjhg0l8ptw==}
|
||||
dependencies:
|
||||
'@withtyped/shared': 0.2.0
|
||||
dev: false
|
||||
|
|
Loading…
Reference in a new issue