mirror of
https://github.com/logto-io/logto.git
synced 2024-12-16 20:26:19 -05:00
refactor: add function and trigger to set tenant_id
This commit is contained in:
parent
52c2222f4a
commit
de4b0bae16
31 changed files with 169 additions and 24 deletions
|
@ -49,6 +49,33 @@ const queryDatabaseManifest = async (database) => {
|
|||
order by indexname asc;
|
||||
`);
|
||||
|
||||
const { rows: funcs } = await pool.query(/* sql */`
|
||||
select n.nspname as schema_name,
|
||||
p.proname as specific_name,
|
||||
case p.prokind
|
||||
when 'f' then 'FUNCTION'
|
||||
when 'p' then 'PROCEDURE'
|
||||
when 'a' then 'AGGREGATE'
|
||||
when 'w' then 'WINDOW'
|
||||
end as kind,
|
||||
l.lanname as language,
|
||||
case when l.lanname = 'internal' then p.prosrc
|
||||
else pg_get_functiondef(p.oid)
|
||||
end as definition,
|
||||
pg_get_function_arguments(p.oid) as arguments,
|
||||
t.typname as return_type
|
||||
from pg_proc p
|
||||
left join pg_namespace n on p.pronamespace = n.oid
|
||||
left join pg_language l on p.prolang = l.oid
|
||||
left join pg_type t on t.oid = p.prorettype
|
||||
where n.nspname not in ('pg_catalog', 'information_schema')
|
||||
and l.lanname != 'c' -- Filter out c functions since we don't use them
|
||||
order by schema_name,
|
||||
specific_name;
|
||||
`);
|
||||
|
||||
const { rows: triggers } = await pool.query(/* sql */`select * from information_schema.triggers;`);
|
||||
|
||||
// Omit generated ids and values
|
||||
return {
|
||||
tables: omitArray(tables, 'table_catalog'),
|
||||
|
@ -72,6 +99,8 @@ const queryDatabaseManifest = async (database) => {
|
|||
'conexclop',
|
||||
),
|
||||
indexes,
|
||||
funcs,
|
||||
triggers: omitArray(triggers, 'trigger_catalog', 'event_object_catalog'),
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ import { getPathInModule } from '../../../utilities.js';
|
|||
import type { AlterationFile } from './type.js';
|
||||
|
||||
const currentDirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const alterationFilenameRegex = /-(\d+)-?.*\.js$/;
|
||||
const alterationFilenameRegex = /-([\d.]+)-?.*\.js$/;
|
||||
|
||||
export const getTimestampFromFilename = (filename: string) => {
|
||||
const match = alterationFilenameRegex.exec(filename);
|
||||
|
|
|
@ -35,7 +35,7 @@ import { getAlterationDirectory } from '../alteration/utils.js';
|
|||
import { oidcConfigReaders } from './oidc-config.js';
|
||||
|
||||
const getExplicitOrder = (query: string) => {
|
||||
const matched = /\/\*\s*init_order\s*=\s*(\d+)\s*\*\//.exec(query)?.[1];
|
||||
const matched = /\/\*\s*init_order\s*=\s*([\d.]+)\s*\*\//.exec(query)?.[1];
|
||||
|
||||
return matched ? Number(matched) : undefined;
|
||||
};
|
||||
|
|
|
@ -43,7 +43,7 @@ export const createCustomPhraseQueries = (pool: CommonQueryMethods) => {
|
|||
{
|
||||
returning: true,
|
||||
onConflict: {
|
||||
fields: [fields.languageTag],
|
||||
fields: [fields.tenantId, fields.languageTag],
|
||||
setExcludedFields: [fields.translation],
|
||||
},
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ describe('oidc-model-instance query', () => {
|
|||
const expectSql = sql`
|
||||
insert into ${table} ("model_name", "id", "payload", "expires_at")
|
||||
values ($1, $2, $3, to_timestamp($4::double precision / 1000))
|
||||
on conflict ("model_name", "id") do update
|
||||
on conflict ("tenant_id", "model_name", "id") do update
|
||||
set "payload"=excluded."payload", "expires_at"=excluded."expires_at"
|
||||
`;
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ export const createOidcModelInstanceQueries = (pool: CommonQueryMethods) => {
|
|||
OidcModelInstances,
|
||||
{
|
||||
onConflict: {
|
||||
fields: [fields.modelName, fields.id],
|
||||
fields: [fields.tenantId, fields.modelName, fields.id],
|
||||
setExcludedFields: [fields.payload, fields.expiresAt],
|
||||
},
|
||||
}
|
||||
|
|
|
@ -11,9 +11,9 @@ const { table, fields } = convertToIdentifiers(RolesScopes);
|
|||
export const createRolesScopesQueries = (pool: CommonQueryMethods) => {
|
||||
const insertRolesScopes = async (rolesScopes: CreateRolesScope[]) =>
|
||||
pool.query(sql`
|
||||
insert into ${table} (${fields.scopeId}, ${fields.roleId}) values
|
||||
insert into ${table} (${fields.id}, ${fields.scopeId}, ${fields.roleId}) values
|
||||
${sql.join(
|
||||
rolesScopes.map(({ scopeId, roleId }) => sql`(${scopeId}, ${roleId})`),
|
||||
rolesScopes.map(({ id, scopeId, roleId }) => sql`(${id}, ${scopeId}, ${roleId})`),
|
||||
sql`, `
|
||||
)}
|
||||
`);
|
||||
|
|
|
@ -44,9 +44,9 @@ export const createUsersRolesQueries = (pool: CommonQueryMethods) => {
|
|||
|
||||
const insertUsersRoles = async (usersRoles: CreateUsersRole[]) =>
|
||||
pool.query(sql`
|
||||
insert into ${table} (${fields.userId}, ${fields.roleId}) values
|
||||
insert into ${table} (${fields.id}, ${fields.userId}, ${fields.roleId}) values
|
||||
${sql.join(
|
||||
usersRoles.map(({ userId, roleId }) => sql`(${userId}, ${roleId})`),
|
||||
usersRoles.map(({ id, userId, roleId }) => sql`(${id}, ${userId}, ${roleId})`),
|
||||
sql`, `
|
||||
)}
|
||||
`);
|
||||
|
|
|
@ -143,7 +143,6 @@ export default async function submitInteraction(
|
|||
log?: LogEntry
|
||||
) {
|
||||
const { hasActiveUsers, findUserById, updateUserById } = queries.users;
|
||||
const { insertUsersRoles } = queries.usersRoles;
|
||||
|
||||
const {
|
||||
users: { generateUserId, insertUser },
|
||||
|
|
|
@ -7,19 +7,16 @@ const alteration: AlterationScript = {
|
|||
await pool.query(sql`
|
||||
alter table oidc_model_instances
|
||||
drop constraint oidc_model_instances_pkey,
|
||||
add primary key (id);
|
||||
`);
|
||||
|
||||
await pool.query(sql`
|
||||
create index oidc_model_instances__model_name_id
|
||||
on oidc_model_instances (model_name, id);
|
||||
add primary key (id),
|
||||
add constraint oidc_model_instances__model_name_id
|
||||
unique (model_name, id);
|
||||
`);
|
||||
},
|
||||
down: async (pool) => {
|
||||
await pool.query(sql`drop index oidc_model_instances__model_name_id;`);
|
||||
await pool.query(sql`
|
||||
alter table oidc_model_instances
|
||||
drop constraint oidc_model_instances_pkey,
|
||||
drop constraint oidc_model_instances__model_name_id,
|
||||
add primary key (model_name, id);
|
||||
`);
|
||||
},
|
|
@ -7,8 +7,8 @@ const getId = (value: string) => sql.identifier([value]);
|
|||
const tenantId = sql.identifier(['tenant_id']);
|
||||
|
||||
const tables: string[] = [
|
||||
'applications_roles',
|
||||
'applications',
|
||||
'applications_roles',
|
||||
'connectors',
|
||||
'custom_phrases',
|
||||
'logs',
|
||||
|
@ -42,7 +42,6 @@ const indexes: IndexInfo[] = [
|
|||
{
|
||||
table: 'oidc_model_instances',
|
||||
indexes: [
|
||||
{ columns: ['model_name', 'id'] },
|
||||
{ name: 'model_name_payload_user_code', columns: ['model_name', "(payload->>'userCode')"] },
|
||||
{ name: 'model_name_payload_uid', columns: ['model_name', "(payload->>'uid')"] },
|
||||
{ name: 'model_name_payload_grant_id', columns: ['model_name', "(payload->>'grantId')"] },
|
||||
|
@ -71,6 +70,7 @@ type ConstraintInfo = {
|
|||
const constraints: ConstraintInfo[] = [
|
||||
{ table: 'applications_roles', columns: ['application_id', 'role_id'] },
|
||||
{ table: 'custom_phrases', columns: ['language_tag'] },
|
||||
{ table: 'oidc_model_instances', columns: ['model_name', 'id'] },
|
||||
{ table: 'roles_scopes', columns: ['role_id', 'scope_id'] },
|
||||
{ table: 'users_roles', columns: ['user_id', 'role_id'] },
|
||||
{ table: 'resources', columns: ['indicator'], original: 'index' },
|
|
@ -0,0 +1,62 @@
|
|||
import { sql } from 'slonik';
|
||||
|
||||
import type { AlterationScript } from '../lib/types/alteration.js';
|
||||
|
||||
const tables: string[] = [
|
||||
'applications',
|
||||
'applications_roles',
|
||||
'connectors',
|
||||
'custom_phrases',
|
||||
'logs',
|
||||
'oidc_model_instances',
|
||||
'passcodes',
|
||||
'resources',
|
||||
'roles_scopes',
|
||||
'roles',
|
||||
'scopes',
|
||||
'settings',
|
||||
'sign_in_experiences',
|
||||
'users_roles',
|
||||
'users',
|
||||
];
|
||||
|
||||
const alteration: AlterationScript = {
|
||||
up: async (pool) => {
|
||||
await pool.query(sql`
|
||||
create function set_tenant_id() returns trigger as
|
||||
$$ begin
|
||||
select tenants.id into new.tenant_id
|
||||
from tenants
|
||||
where ('tenant_user_' || tenants.id) = current_user;
|
||||
|
||||
if new.tenant_id is null then
|
||||
new.tenant_id := 'default';
|
||||
end if;
|
||||
|
||||
return new;
|
||||
end; $$ language plpgsql;
|
||||
`);
|
||||
|
||||
await Promise.all(
|
||||
tables.map(async (table) =>
|
||||
pool.query(sql`
|
||||
create trigger set_tenant_id before insert on ${sql.identifier([table])}
|
||||
for each row execute procedure set_tenant_id();
|
||||
`)
|
||||
)
|
||||
);
|
||||
},
|
||||
down: async (pool) => {
|
||||
await Promise.all(
|
||||
tables.map(async (table) =>
|
||||
pool.query(sql`
|
||||
drop trigger set_tenant_id on ${sql.identifier([table])};
|
||||
`)
|
||||
)
|
||||
);
|
||||
|
||||
await pool.query(sql`drop function set_tenant_id;`);
|
||||
},
|
||||
};
|
||||
|
||||
export default alteration;
|
14
packages/schemas/tables/_functions.sql
Normal file
14
packages/schemas/tables/_functions.sql
Normal file
|
@ -0,0 +1,14 @@
|
|||
/* init_order = 0.5 */
|
||||
|
||||
create function set_tenant_id() returns trigger as
|
||||
$$ begin
|
||||
select tenants.id into new.tenant_id
|
||||
from tenants
|
||||
where ('tenant_user_' || tenants.id) = current_user;
|
||||
|
||||
if new.tenant_id is null then
|
||||
new.tenant_id := 'default';
|
||||
end if;
|
||||
|
||||
return new;
|
||||
end; $$ language plpgsql;
|
|
@ -17,4 +17,7 @@ create table applications (
|
|||
);
|
||||
|
||||
create index applications__id
|
||||
on applications (tenant_id, id)
|
||||
on applications (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on applications
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -13,3 +13,6 @@ create table applications_roles (
|
|||
|
||||
create index applications_roles__id
|
||||
on applications_roles (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on applications_roles
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -12,3 +12,6 @@ create table connectors (
|
|||
|
||||
create index connectors__id
|
||||
on connectors (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on connectors
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -11,3 +11,6 @@ create table custom_phrases (
|
|||
|
||||
create index custom_phrases__id
|
||||
on custom_phrases (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on custom_phrases
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -19,3 +19,6 @@ create index logs__user_id
|
|||
|
||||
create index logs__application_id
|
||||
on logs (tenant_id, (payload->>'application_id') nulls last);
|
||||
|
||||
create trigger set_tenant_id before insert on logs
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -6,12 +6,11 @@ create table oidc_model_instances (
|
|||
payload jsonb /* @use OidcModelInstancePayload */ not null,
|
||||
expires_at timestamptz not null,
|
||||
consumed_at timestamptz,
|
||||
primary key (id)
|
||||
primary key (id),
|
||||
constraint oidc_model_instances__model_name_id
|
||||
unique (tenant_id, model_name, id)
|
||||
);
|
||||
|
||||
create index oidc_model_instances__model_name_id
|
||||
on oidc_model_instances (tenant_id, model_name, id);
|
||||
|
||||
create index oidc_model_instances__model_name_payload_user_code
|
||||
on oidc_model_instances (
|
||||
tenant_id,
|
||||
|
@ -32,3 +31,6 @@ create index oidc_model_instances__model_name_payload_grant_id
|
|||
model_name,
|
||||
(payload->>'grantId')
|
||||
);
|
||||
|
||||
create trigger set_tenant_id before insert on oidc_model_instances
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -24,3 +24,6 @@ create index passcodes__email_type
|
|||
|
||||
create index passcodes__phone_type
|
||||
on passcodes (tenant_id, phone, type);
|
||||
|
||||
create trigger set_tenant_id before insert on passcodes
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -14,3 +14,6 @@ create table resources (
|
|||
|
||||
create index resources__id
|
||||
on resources (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on resources
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -13,3 +13,6 @@ create table roles (
|
|||
|
||||
create index roles__id
|
||||
on roles (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on roles
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -13,3 +13,6 @@ create table roles_scopes (
|
|||
|
||||
create index roles_scopes__id
|
||||
on roles_scopes (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on roles_scopes
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -16,3 +16,6 @@ create table scopes (
|
|||
|
||||
create index scopes__id
|
||||
on scopes (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on scopes
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -8,3 +8,6 @@ create table settings (
|
|||
|
||||
create index settings__id
|
||||
on settings (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on settings
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -17,3 +17,6 @@ create table sign_in_experiences (
|
|||
|
||||
create index sign_in_experiences__id
|
||||
on sign_in_experiences (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on sign_in_experiences
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -27,3 +27,6 @@ create index users__id
|
|||
|
||||
create index users__name
|
||||
on users (tenant_id, name);
|
||||
|
||||
create trigger set_tenant_id before insert on users
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
|
@ -13,3 +13,6 @@ create table users_roles (
|
|||
|
||||
create index users_roles__id
|
||||
on users_roles (tenant_id, id);
|
||||
|
||||
create trigger set_tenant_id before insert on users_roles
|
||||
for each row execute procedure set_tenant_id();
|
||||
|
|
Loading…
Reference in a new issue