mirror of
https://github.com/withastro/astro.git
synced 2025-01-20 22:12:38 -05:00
feat: glob support with { db, table } signature
This commit is contained in:
parent
83933143ac
commit
1c25081767
5 changed files with 90 additions and 25 deletions
|
@ -14,6 +14,7 @@ import {
|
|||
} from './types.js';
|
||||
import { z } from 'zod';
|
||||
import type { SQLiteInsertValue } from 'drizzle-orm/sqlite-core';
|
||||
import type { SqliteDB } from './internal.js';
|
||||
|
||||
export const dbConfigSchema = z.object({
|
||||
studio: z.boolean().optional(),
|
||||
|
@ -26,18 +27,33 @@ export const dbConfigSchema = z.object({
|
|||
.optional(),
|
||||
});
|
||||
|
||||
export type SetDataFn<
|
||||
TFields extends z.input<typeof collectionSchema>['fields'] = z.input<
|
||||
typeof collectionSchema
|
||||
>['fields'],
|
||||
> = (params: {
|
||||
db: SqliteDB;
|
||||
table: Table<
|
||||
string,
|
||||
/** TODO: true type inference */ Record<Extract<keyof TFields, string>, DBField>
|
||||
>;
|
||||
mode: 'dev' | 'build';
|
||||
}) => MaybePromise<void>;
|
||||
|
||||
export type DBUserConfig = Omit<z.input<typeof dbConfigSchema>, 'data'> & {
|
||||
data(params: {
|
||||
set<TFields extends z.input<typeof collectionSchema>['fields']>(
|
||||
collection: ResolvedCollectionConfig<TFields, boolean>,
|
||||
data: MaybeArray<
|
||||
SQLiteInsertValue<
|
||||
Table<
|
||||
string,
|
||||
/** TODO: true type inference */ Record<Extract<keyof TFields, string>, DBField>
|
||||
>
|
||||
>
|
||||
>
|
||||
data:
|
||||
| MaybeArray<
|
||||
SQLiteInsertValue<
|
||||
Table<
|
||||
string,
|
||||
/** TODO: true type inference */ Record<Extract<keyof TFields, string>, DBField>
|
||||
>
|
||||
>
|
||||
>
|
||||
| SetDataFn<TFields>
|
||||
): Promise<any> /** TODO: type output */;
|
||||
}): MaybePromise<void>;
|
||||
};
|
||||
|
@ -63,7 +79,6 @@ type CollectionConfig<
|
|||
seed?: Writable extends false
|
||||
? never
|
||||
: () => MaybePromise<Array<Record<keyof TFields, any> & { id?: string }>>;
|
||||
_: CollectionMeta;
|
||||
}
|
||||
: {
|
||||
fields: TFields;
|
||||
|
@ -71,7 +86,6 @@ type CollectionConfig<
|
|||
data?: Writable extends true
|
||||
? never
|
||||
: () => MaybePromise<Array<Record<keyof TFields, any> & { id?: string }>>;
|
||||
_: CollectionMeta;
|
||||
};
|
||||
|
||||
type ResolvedCollectionConfig<
|
||||
|
@ -79,6 +93,7 @@ type ResolvedCollectionConfig<
|
|||
Writable extends boolean,
|
||||
> = CollectionConfig<TFields, Writable> & {
|
||||
writable: Writable;
|
||||
_: CollectionMeta;
|
||||
};
|
||||
|
||||
export function defineCollection<TFields extends z.input<typeof collectionSchema>['fields']>(
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
export { defineCollection, defineWritableCollection, field } from './config.js';
|
||||
export { defineCollection, defineWritableCollection, field, type SetDataFn } from './config.js';
|
||||
|
||||
export { cli } from './cli/index.js';
|
||||
export { integration as default } from './integration.js';
|
||||
|
|
|
@ -120,6 +120,9 @@ export async function setupDbTables({
|
|||
);
|
||||
}
|
||||
const table = collectionToTable(collectionName, collectionSchema.parse(collection));
|
||||
if (typeof values === 'function') {
|
||||
return await values({ db, table: table as any, mode });
|
||||
}
|
||||
const result = Array.isArray(values)
|
||||
? await db.insert(table).values(values).returning()
|
||||
: await db.insert(table).values(values).returning().get();
|
||||
|
|
19
packages/db/test/fixtures/glob/astro.config.ts
vendored
19
packages/db/test/fixtures/glob/astro.config.ts
vendored
|
@ -1,30 +1,21 @@
|
|||
import { defineConfig } from 'astro/config';
|
||||
import db, { defineCollection, field } from '@astrojs/db';
|
||||
import glob from 'fast-glob';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { asJson, glob } from './utils';
|
||||
|
||||
const Quote = defineCollection({
|
||||
fields: {
|
||||
author: field.text(),
|
||||
body: field.text(),
|
||||
},
|
||||
async data() {
|
||||
const quotes = await glob('quotes/*.json');
|
||||
return Promise.all(
|
||||
quotes.map(async (quote) => {
|
||||
const data = JSON.parse(await readFile(quote, 'utf-8'));
|
||||
return {
|
||||
author: data.author,
|
||||
body: data.body,
|
||||
};
|
||||
})
|
||||
);
|
||||
file: field.text({ unique: true }),
|
||||
},
|
||||
});
|
||||
|
||||
export default defineConfig({
|
||||
db: {
|
||||
collections: { Quote },
|
||||
data({ set }) {
|
||||
set(Quote, glob('quotes/*.json', asJson));
|
||||
},
|
||||
},
|
||||
integrations: [db()],
|
||||
});
|
||||
|
|
56
packages/db/test/fixtures/glob/utils.ts
vendored
Normal file
56
packages/db/test/fixtures/glob/utils.ts
vendored
Normal file
|
@ -0,0 +1,56 @@
|
|||
import fastGlob from 'fast-glob';
|
||||
import { readFile } from 'fs/promises';
|
||||
import chokidar from 'chokidar';
|
||||
import { eq } from 'drizzle-orm';
|
||||
import { type SetDataFn } from '@astrojs/db';
|
||||
|
||||
export function glob(
|
||||
pattern: string,
|
||||
parser: (params: { file: string; content: string }) => Record<string, any>
|
||||
) {
|
||||
const setDataFn: SetDataFn = async (ctx) => {
|
||||
const fileField = ctx.table.file;
|
||||
if (!fileField) {
|
||||
throw new Error('`file` field is required for glob collections.');
|
||||
}
|
||||
if (ctx.mode === 'dev') {
|
||||
chokidar
|
||||
.watch(pattern)
|
||||
.on('add', async (file) => {
|
||||
const content = await readFile(file, 'utf-8');
|
||||
const parsed = parser({ file, content });
|
||||
await ctx.db.insert(ctx.table).values({ ...parsed, file });
|
||||
})
|
||||
.on('change', async (file) => {
|
||||
const content = await readFile(file, 'utf-8');
|
||||
const parsed = parser({ file, content });
|
||||
await ctx.db
|
||||
.insert(ctx.table)
|
||||
.values({ ...parsed, file })
|
||||
.onConflictDoUpdate({
|
||||
target: fileField,
|
||||
set: parsed,
|
||||
});
|
||||
})
|
||||
.on('unlink', async (file) => {
|
||||
await ctx.db.delete(ctx.table).where(eq(fileField, file));
|
||||
});
|
||||
} else {
|
||||
const files = await fastGlob(pattern);
|
||||
for (const file of files) {
|
||||
const content = await readFile(file, 'utf-8');
|
||||
const parsed = parser({ file, content });
|
||||
await ctx.db.insert(ctx.table).values({ ...parsed, file });
|
||||
}
|
||||
}
|
||||
};
|
||||
return setDataFn;
|
||||
}
|
||||
|
||||
export function asJson(params: { file: string; content: string }) {
|
||||
try {
|
||||
return JSON.parse(params.content);
|
||||
} catch (e) {
|
||||
throw new Error(`Error parsing ${params.file}: ${e.message}`);
|
||||
}
|
||||
}
|
Loading…
Add table
Reference in a new issue