0
Fork 0
mirror of https://github.com/logto-io/logto.git synced 2025-02-17 22:04:19 -05:00

Merge pull request #1990 from logto-io/sijie-log-4232-migration-timestamp

feat(core,schemas): use timestamp to version migrations
This commit is contained in:
wangsijie 2022-09-26 13:54:46 +08:00 committed by GitHub
commit 450130b4b8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 77 additions and 106 deletions

View file

@ -57,7 +57,6 @@
"query-string": "^7.0.1", "query-string": "^7.0.1",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
"roarr": "^7.11.0", "roarr": "^7.11.0",
"semver": "^7.3.7",
"slonik": "^30.0.0", "slonik": "^30.0.0",
"slonik-interceptor-preset": "^1.2.10", "slonik-interceptor-preset": "^1.2.10",
"slonik-sql-tag-raw": "^1.1.4", "slonik-sql-tag-raw": "^1.1.4",
@ -86,7 +85,6 @@
"@types/node": "^16.3.1", "@types/node": "^16.3.1",
"@types/oidc-provider": "^7.11.1", "@types/oidc-provider": "^7.11.1",
"@types/rimraf": "^3.0.2", "@types/rimraf": "^3.0.2",
"@types/semver": "^7.3.12",
"@types/supertest": "^2.0.11", "@types/supertest": "^2.0.11",
"@types/tar": "^6.1.2", "@types/tar": "^6.1.2",
"copyfiles": "^2.4.1", "copyfiles": "^2.4.1",

View file

@ -82,7 +82,8 @@ function createEnvSet() {
pool = await createPoolByEnv(values.isTest); pool = await createPoolByEnv(values.isTest);
await addConnectors(values.connectorDirectory); await addConnectors(values.connectorDirectory);
if (pool) { // FIXME: @sijie temparaly disable migration for integration test
if (pool && !values.isIntegrationTest) {
await checkMigrationState(pool); await checkMigrationState(pool);
} }
}, },

View file

@ -1,3 +1,3 @@
export const databaseVersionKey = 'databaseVersion'; export const migrationStateKey = 'migrationState';
export const logtoConfigsTableFilePath = 'node_modules/@logto/schemas/tables/logto_configs.sql'; export const logtoConfigsTableFilePath = 'node_modules/@logto/schemas/tables/logto_configs.sql';
export const migrationFilesDirectory = 'node_modules/@logto/schemas/lib/migrations'; export const migrationFilesDirectory = 'node_modules/@logto/schemas/migrations';

View file

@ -5,15 +5,14 @@ import { convertToIdentifiers } from '@/database/utils';
import { QueryType, expectSqlAssert } from '@/utils/test-utils'; import { QueryType, expectSqlAssert } from '@/utils/test-utils';
import * as functions from '.'; import * as functions from '.';
import { databaseVersionKey } from './constants'; import { migrationStateKey } from './constants';
const mockQuery: jest.MockedFunction<QueryType> = jest.fn(); const mockQuery: jest.MockedFunction<QueryType> = jest.fn();
const { const {
createLogtoConfigsTable, createLogtoConfigsTable,
getCurrentDatabaseVersion,
isLogtoConfigsTableExists, isLogtoConfigsTableExists,
updateDatabaseVersion, updateDatabaseTimestamp,
getMigrationFiles, getCurrentDatabaseTimestamp,
getUndeployedMigrations, getUndeployedMigrations,
} = functions; } = functions;
const pool = createMockPool({ const pool = createMockPool({
@ -22,6 +21,7 @@ const pool = createMockPool({
}, },
}); });
const { table, fields } = convertToIdentifiers(LogtoConfigs); const { table, fields } = convertToIdentifiers(LogtoConfigs);
const timestamp = 1_663_923_776;
describe('isLogtoConfigsTableExists()', () => { describe('isLogtoConfigsTableExists()', () => {
it('generates "select exists" sql and query for result', async () => { it('generates "select exists" sql and query for result', async () => {
@ -45,11 +45,11 @@ describe('isLogtoConfigsTableExists()', () => {
}); });
}); });
describe('getCurrentDatabaseVersion()', () => { describe('getCurrentDatabaseTimestamp()', () => {
it('returns null if query failed (table not found)', async () => { it('returns null if query failed (table not found)', async () => {
mockQuery.mockRejectedValueOnce(new Error('table not found')); mockQuery.mockRejectedValueOnce(new Error('table not found'));
await expect(getCurrentDatabaseVersion(pool)).resolves.toBeNull(); await expect(getCurrentDatabaseTimestamp(pool)).resolves.toBeNull();
}); });
it('returns null if the row is not found', async () => { it('returns null if the row is not found', async () => {
@ -59,12 +59,12 @@ describe('getCurrentDatabaseVersion()', () => {
mockQuery.mockImplementationOnce(async (sql, values) => { mockQuery.mockImplementationOnce(async (sql, values) => {
expectSqlAssert(sql, expectSql.sql); expectSqlAssert(sql, expectSql.sql);
expect(values).toEqual([databaseVersionKey]); expect(values).toEqual([migrationStateKey]);
return createMockQueryResult([]); return createMockQueryResult([]);
}); });
await expect(getCurrentDatabaseVersion(pool)).resolves.toBeNull(); await expect(getCurrentDatabaseTimestamp(pool)).resolves.toBeNull();
}); });
it('returns null if the value is in bad format', async () => { it('returns null if the value is in bad format', async () => {
@ -74,29 +74,28 @@ describe('getCurrentDatabaseVersion()', () => {
mockQuery.mockImplementationOnce(async (sql, values) => { mockQuery.mockImplementationOnce(async (sql, values) => {
expectSqlAssert(sql, expectSql.sql); expectSqlAssert(sql, expectSql.sql);
expect(values).toEqual([databaseVersionKey]); expect(values).toEqual([migrationStateKey]);
return createMockQueryResult([{ value: 'some_version' }]); return createMockQueryResult([{ value: 'some_value' }]);
}); });
await expect(getCurrentDatabaseVersion(pool)).resolves.toBeNull(); await expect(getCurrentDatabaseTimestamp(pool)).resolves.toBeNull();
}); });
it('returns the version from database', async () => { it('returns the timestamp from database', async () => {
const expectSql = sql` const expectSql = sql`
select * from ${table} where ${fields.key}=$1 select * from ${table} where ${fields.key}=$1
`; `;
const version = 'version';
mockQuery.mockImplementationOnce(async (sql, values) => { mockQuery.mockImplementationOnce(async (sql, values) => {
expectSqlAssert(sql, expectSql.sql); expectSqlAssert(sql, expectSql.sql);
expect(values).toEqual([databaseVersionKey]); expect(values).toEqual([migrationStateKey]);
// @ts-expect-error createMockQueryResult doesn't support jsonb // @ts-expect-error createMockQueryResult doesn't support jsonb
return createMockQueryResult([{ value: { version, updatedAt: 'now' } }]); return createMockQueryResult([{ value: { timestamp, updatedAt: 'now' } }]);
}); });
await expect(getCurrentDatabaseVersion(pool)).resolves.toEqual(version); await expect(getCurrentDatabaseTimestamp(pool)).resolves.toEqual(timestamp);
}); });
}); });
@ -113,13 +112,12 @@ describe('createLogtoConfigsTable()', () => {
}); });
}); });
describe('updateDatabaseVersion()', () => { describe('updateDatabaseTimestamp()', () => {
const expectSql = sql` const expectSql = sql`
insert into ${table} (${fields.key}, ${fields.value}) insert into ${table} (${fields.key}, ${fields.value})
values ($1, $2) values ($1, $2)
on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value} on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value}
`; `;
const version = 'version';
const updatedAt = '2022-09-21T06:32:46.583Z'; const updatedAt = '2022-09-21T06:32:46.583Z';
beforeAll(() => { beforeAll(() => {
@ -143,20 +141,20 @@ describe('updateDatabaseVersion()', () => {
.mockImplementationOnce(jest.fn()); .mockImplementationOnce(jest.fn());
jest.spyOn(functions, 'isLogtoConfigsTableExists').mockResolvedValueOnce(false); jest.spyOn(functions, 'isLogtoConfigsTableExists').mockResolvedValueOnce(false);
await updateDatabaseVersion(pool, version); await updateDatabaseTimestamp(pool, timestamp);
expect(mockCreateLogtoConfigsTable).toHaveBeenCalled(); expect(mockCreateLogtoConfigsTable).toHaveBeenCalled();
}); });
it('sends upsert sql with version and updatedAt', async () => { it('sends upsert sql with timestamp and updatedAt', async () => {
mockQuery.mockImplementationOnce(async (sql, values) => { mockQuery.mockImplementationOnce(async (sql, values) => {
expectSqlAssert(sql, expectSql.sql); expectSqlAssert(sql, expectSql.sql);
expect(values).toEqual([databaseVersionKey, JSON.stringify({ version, updatedAt })]); expect(values).toEqual([migrationStateKey, JSON.stringify({ timestamp, updatedAt })]);
return createMockQueryResult([]); return createMockQueryResult([]);
}); });
jest.spyOn(functions, 'isLogtoConfigsTableExists').mockResolvedValueOnce(true); jest.spyOn(functions, 'isLogtoConfigsTableExists').mockResolvedValueOnce(true);
await updateDatabaseVersion(pool, version); await updateDatabaseTimestamp(pool, timestamp);
}); });
}); });
@ -164,22 +162,29 @@ describe('getUndeployedMigrations()', () => {
beforeEach(() => { beforeEach(() => {
jest jest
.spyOn(functions, 'getMigrationFiles') .spyOn(functions, 'getMigrationFiles')
.mockResolvedValueOnce(['1.0.0.js', '1.0.2.js', '1.0.1.js']); .mockResolvedValueOnce([
'1.0.0-1663923770-a.js',
'1.0.0-1663923772-c.js',
'1.0.0-1663923771-b.js',
]);
}); });
it('returns all files with right order if database version is null', async () => { it('returns all files with right order if database migration timestamp is null', async () => {
jest.spyOn(functions, 'getCurrentDatabaseVersion').mockResolvedValueOnce(null); jest.spyOn(functions, 'getCurrentDatabaseTimestamp').mockResolvedValueOnce(null);
await expect(getUndeployedMigrations(pool)).resolves.toEqual([ await expect(getUndeployedMigrations(pool)).resolves.toEqual([
'1.0.0.js', '1.0.0-1663923770-a.js',
'1.0.1.js', '1.0.0-1663923771-b.js',
'1.0.2.js', '1.0.0-1663923772-c.js',
]); ]);
}); });
it('returns files whose version is greater then database version', async () => { it('returns files whose timestamp is greater then database timstamp', async () => {
jest.spyOn(functions, 'getCurrentDatabaseVersion').mockResolvedValueOnce('1.0.0'); jest.spyOn(functions, 'getCurrentDatabaseTimestamp').mockResolvedValueOnce(1_663_923_770);
await expect(getUndeployedMigrations(pool)).resolves.toEqual(['1.0.1.js', '1.0.2.js']); await expect(getUndeployedMigrations(pool)).resolves.toEqual([
'1.0.0-1663923771-b.js',
'1.0.0-1663923772-c.js',
]);
}); });
}); });

View file

@ -4,9 +4,9 @@ import path from 'path';
import { LogtoConfig, LogtoConfigs } from '@logto/schemas'; import { LogtoConfig, LogtoConfigs } from '@logto/schemas';
import { import {
DatabaseVersion,
databaseVersionGuard,
MigrationScript, MigrationScript,
MigrationState,
migrationStateGuard,
} from '@logto/schemas/migrations/types'; } from '@logto/schemas/migrations/types';
import { conditionalString } from '@silverhand/essentials'; import { conditionalString } from '@silverhand/essentials';
import chalk from 'chalk'; import chalk from 'chalk';
@ -15,12 +15,8 @@ import { raw } from 'slonik-sql-tag-raw';
import { convertToIdentifiers } from '@/database/utils'; import { convertToIdentifiers } from '@/database/utils';
import { import { logtoConfigsTableFilePath, migrationFilesDirectory, migrationStateKey } from './constants';
databaseVersionKey, import { getTimestampFromFileName, migrationFileNameRegex } from './utils';
logtoConfigsTableFilePath,
migrationFilesDirectory,
} from './constants';
import { compareVersion, getVersionFromFileName, migrationFileNameRegex } from './utils';
const { table, fields } = convertToIdentifiers(LogtoConfigs); const { table, fields } = convertToIdentifiers(LogtoConfigs);
@ -37,14 +33,14 @@ export const isLogtoConfigsTableExists = async (pool: DatabasePool) => {
return exists; return exists;
}; };
export const getCurrentDatabaseVersion = async (pool: DatabasePool) => { export const getCurrentDatabaseTimestamp = async (pool: DatabasePool) => {
try { try {
const query = await pool.maybeOne<LogtoConfig>( const query = await pool.maybeOne<LogtoConfig>(
sql`select * from ${table} where ${fields.key}=${databaseVersionKey}` sql`select * from ${table} where ${fields.key}=${migrationStateKey}`
); );
const databaseVersion = databaseVersionGuard.parse(query?.value); const { timestamp } = migrationStateGuard.parse(query?.value);
return databaseVersion.version; return timestamp;
} catch { } catch {
return null; return null;
} }
@ -55,20 +51,20 @@ export const createLogtoConfigsTable = async (pool: DatabasePool) => {
await pool.query(sql`${raw(tableQuery)}`); await pool.query(sql`${raw(tableQuery)}`);
}; };
export const updateDatabaseVersion = async (pool: DatabasePool, version: string) => { export const updateDatabaseTimestamp = async (pool: DatabasePool, timestamp: number) => {
if (!(await isLogtoConfigsTableExists(pool))) { if (!(await isLogtoConfigsTableExists(pool))) {
await createLogtoConfigsTable(pool); await createLogtoConfigsTable(pool);
} }
const value: DatabaseVersion = { const value: MigrationState = {
version, timestamp,
updatedAt: new Date().toISOString(), updatedAt: new Date().toISOString(),
}; };
await pool.query( await pool.query(
sql` sql`
insert into ${table} (${fields.key}, ${fields.value}) insert into ${table} (${fields.key}, ${fields.value})
values (${databaseVersionKey}, ${JSON.stringify(value)}) values (${migrationStateKey}, ${JSON.stringify(value)})
on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value} on conflict (${fields.key}) do update set ${fields.value}=excluded.${fields.value}
` `
); );
@ -86,18 +82,13 @@ export const getMigrationFiles = async () => {
}; };
export const getUndeployedMigrations = async (pool: DatabasePool) => { export const getUndeployedMigrations = async (pool: DatabasePool) => {
const databaseVersion = await getCurrentDatabaseVersion(pool); const databaseTimestamp = await getCurrentDatabaseTimestamp(pool);
const files = await getMigrationFiles(); const files = await getMigrationFiles();
return files return files
.filter( .filter((file) => !databaseTimestamp || getTimestampFromFileName(file) > databaseTimestamp)
(file) =>
!databaseVersion || compareVersion(getVersionFromFileName(file), databaseVersion) > 0
)
.slice() .slice()
.sort((file1, file2) => .sort((file1, file2) => getTimestampFromFileName(file1) - getTimestampFromFileName(file2));
compareVersion(getVersionFromFileName(file1), getVersionFromFileName(file2))
);
}; };
const importMigration = async (file: string): Promise<MigrationScript> => { const importMigration = async (file: string): Promise<MigrationScript> => {
@ -127,7 +118,7 @@ const runMigration = async (pool: DatabasePool, file: string) => {
throw error; throw error;
} }
await updateDatabaseVersion(pool, getVersionFromFileName(file)); await updateDatabaseTimestamp(pool, getTimestampFromFileName(file));
console.log(`${chalk.blue('[migration]')} run ${file} succeeded.`); console.log(`${chalk.blue('[migration]')} run ${file} succeeded.`);
}; };

View file

@ -1,25 +1,15 @@
import { compareVersion, getVersionFromFileName } from './utils'; import { getTimestampFromFileName } from './utils';
describe('compareVersion', () => { describe('getTimestampFromFileName()', () => {
it('should return 1 for 1.0.0 and 1.0.0-beta.9', () => { it('should get for 1.0.0-1663923211.js', () => {
expect(compareVersion('1.0.0', '1.0.0-beta.9')).toBe(1); expect(getTimestampFromFileName('1.0.0-1663923211.js')).toEqual(1_663_923_211);
}); });
it('should return 1 for 1.0.0-beta.10 and 1.0.0-beta.9', () => { it('should get for 1.0.0-1663923211-user-table.js', () => {
expect(compareVersion('1.0.0-beta.10', '1.0.0-beta.9')).toBe(1); expect(getTimestampFromFileName('1.0.0-1663923211-user-table.js')).toEqual(1_663_923_211);
}); });
it('should return 1 for 1.0.0 and 0.0.8', () => { it('should throw for 166392321.js', () => {
expect(compareVersion('1.0.0', '0.0.8')).toBe(1); expect(() => getTimestampFromFileName('166392321.js')).toThrowError();
});
});
describe('getVersionFromFileName', () => {
it('should get version for 1.0.2.js', () => {
expect(getVersionFromFileName('1.0.2.js')).toEqual('1.0.2');
});
it('should throw for next.js', () => {
expect(() => getVersionFromFileName('next.js')).toThrowError();
}); });
}); });

View file

@ -1,21 +1,11 @@
import semver from 'semver'; export const migrationFileNameRegex = /-(\d{10,11})-?.*\.js$/;
export const migrationFileNameRegex = /^(((?!next).)*)\.js$/; export const getTimestampFromFileName = (fileName: string) => {
export const getVersionFromFileName = (fileName: string) => {
const match = migrationFileNameRegex.exec(fileName); const match = migrationFileNameRegex.exec(fileName);
if (!match?.[1]) { if (!match?.[1]) {
throw new Error(`Can not find version name: ${fileName}`); throw new Error(`Can not get timestamp: ${fileName}`);
} }
return match[1]; return Number(match[1]);
};
export const compareVersion = (version1: string, version2: string) => {
if (semver.eq(version1, version2)) {
return 0;
}
return semver.gt(version1, version2) ? 1 : -1;
}; };

View file

@ -4,10 +4,12 @@ The folder for all migration files.
## Format ## Format
The migration files are named in the format of `<version>.ts` where `version` is this npm package's version number. The migration files are named in the format of `<version>-<timestamp>-name.js` where `<timestamp>` is the unix timestamp of when the migration was created and `name` is the name of the migration, `version` is this npm package's version number.
As for development, the `version` is "next" until the package is released. As for development, the `version` is "next" until the package is released.
Note that, you SHOULD NOT change the content of the migration files after they are created. If you need to change the migration, you should create a new migration file with the new content.
## Typing ## Typing
```ts ```ts

View file

@ -1,12 +1,12 @@
import { DatabaseTransactionConnection } from 'slonik'; import { DatabaseTransactionConnection } from 'slonik';
import { z } from 'zod'; import { z } from 'zod';
export const databaseVersionGuard = z.object({ export const migrationStateGuard = z.object({
version: z.string(), timestamp: z.number(),
updatedAt: z.string().optional(), updatedAt: z.string().optional(),
}); });
export type DatabaseVersion = z.infer<typeof databaseVersionGuard>; export type MigrationState = z.infer<typeof migrationStateGuard>;
export type MigrationScript = { export type MigrationScript = {
up: (connection: DatabaseTransactionConnection) => Promise<void>; up: (connection: DatabaseTransactionConnection) => Promise<void>;

12
pnpm-lock.yaml generated
View file

@ -174,7 +174,6 @@ importers:
'@types/node': ^16.3.1 '@types/node': ^16.3.1
'@types/oidc-provider': ^7.11.1 '@types/oidc-provider': ^7.11.1
'@types/rimraf': ^3.0.2 '@types/rimraf': ^3.0.2
'@types/semver': ^7.3.12
'@types/supertest': ^2.0.11 '@types/supertest': ^2.0.11
'@types/tar': ^6.1.2 '@types/tar': ^6.1.2
chalk: ^4 chalk: ^4
@ -217,7 +216,6 @@ importers:
query-string: ^7.0.1 query-string: ^7.0.1
rimraf: ^3.0.2 rimraf: ^3.0.2
roarr: ^7.11.0 roarr: ^7.11.0
semver: ^7.3.7
slonik: ^30.0.0 slonik: ^30.0.0
slonik-interceptor-preset: ^1.2.10 slonik-interceptor-preset: ^1.2.10
slonik-sql-tag-raw: ^1.1.4 slonik-sql-tag-raw: ^1.1.4
@ -263,7 +261,6 @@ importers:
query-string: 7.0.1 query-string: 7.0.1
rimraf: 3.0.2 rimraf: 3.0.2
roarr: 7.11.0 roarr: 7.11.0
semver: 7.3.7
slonik: 30.1.2 slonik: 30.1.2
slonik-interceptor-preset: 1.2.10 slonik-interceptor-preset: 1.2.10
slonik-sql-tag-raw: 1.1.4_roarr@7.11.0+slonik@30.1.2 slonik-sql-tag-raw: 1.1.4_roarr@7.11.0+slonik@30.1.2
@ -291,7 +288,6 @@ importers:
'@types/node': 16.11.12 '@types/node': 16.11.12
'@types/oidc-provider': 7.11.1 '@types/oidc-provider': 7.11.1
'@types/rimraf': 3.0.2 '@types/rimraf': 3.0.2
'@types/semver': 7.3.12
'@types/supertest': 2.0.11 '@types/supertest': 2.0.11
'@types/tar': 6.1.2 '@types/tar': 6.1.2
copyfiles: 2.4.1 copyfiles: 2.4.1
@ -4557,10 +4553,6 @@ packages:
resolution: {integrity: sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==} resolution: {integrity: sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==}
dev: true dev: true
/@types/semver/7.3.12:
resolution: {integrity: sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A==}
dev: true
/@types/serve-static/1.13.10: /@types/serve-static/1.13.10:
resolution: {integrity: sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==} resolution: {integrity: sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==}
dependencies: dependencies:
@ -5917,8 +5909,8 @@ packages:
engines: {node: '>=10'} engines: {node: '>=10'}
hasBin: true hasBin: true
dependencies: dependencies:
JSONStream: 1.3.5
is-text-path: 1.0.1 is-text-path: 1.0.1
JSONStream: 1.3.5
lodash: 4.17.21 lodash: 4.17.21
meow: 8.1.2 meow: 8.1.2
split2: 3.2.2 split2: 3.2.2
@ -10337,6 +10329,7 @@ packages:
engines: {node: '>=10'} engines: {node: '>=10'}
dependencies: dependencies:
yallist: 4.0.0 yallist: 4.0.0
dev: true
/lru-cache/7.10.1: /lru-cache/7.10.1:
resolution: {integrity: sha512-BQuhQxPuRl79J5zSXRP+uNzPOyZw2oFI9JLRQ80XswSvg21KMKNtQza9eF42rfI/3Z40RvzBdXgziEkudzjo8A==} resolution: {integrity: sha512-BQuhQxPuRl79J5zSXRP+uNzPOyZw2oFI9JLRQ80XswSvg21KMKNtQza9eF42rfI/3Z40RvzBdXgziEkudzjo8A==}
@ -13678,6 +13671,7 @@ packages:
hasBin: true hasBin: true
dependencies: dependencies:
lru-cache: 6.0.0 lru-cache: 6.0.0
dev: true
/serialize-error/7.0.1: /serialize-error/7.0.1:
resolution: {integrity: sha512-8I8TjW5KMOKsZQTvoxjuSIa7foAwPWGOts+6o7sgjz41/qMD9VQHEDxi6PBvK2l0MXUmqZyNpUK+T2tQaaElvw==} resolution: {integrity: sha512-8I8TjW5KMOKsZQTvoxjuSIa7foAwPWGOts+6o7sgjz41/qMD9VQHEDxi6PBvK2l0MXUmqZyNpUK+T2tQaaElvw==}