mirror of
https://github.com/withastro/astro.git
synced 2024-12-30 22:03:56 -05:00
fix: separate image extraction from schema parsing in content layer (#11884)
* fix: separate image extraction from schema parsing in content layer * rm unused imports
This commit is contained in:
parent
11ebf3bd15
commit
e45070459f
9 changed files with 45 additions and 28 deletions
5
.changeset/odd-tips-jam.md
Normal file
5
.changeset/odd-tips-jam.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
'astro': patch
|
||||
---
|
||||
|
||||
Correctly handles content layer data where the transformed value does not match the input schema
|
|
@ -1,6 +1,4 @@
|
|||
import { promises as fs, existsSync } from 'node:fs';
|
||||
import { isAbsolute } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import * as fastq from 'fastq';
|
||||
import type { FSWatcher } from 'vite';
|
||||
import xxhash from 'xxhash-wasm';
|
||||
|
@ -19,7 +17,6 @@ import {
|
|||
getEntryConfigByExtMap,
|
||||
getEntryDataAndImages,
|
||||
globalContentConfigObserver,
|
||||
posixRelative,
|
||||
} from './utils.js';
|
||||
|
||||
export interface ContentLayerOptions {
|
||||
|
@ -188,7 +185,7 @@ export class ContentLayer {
|
|||
const collectionWithResolvedSchema = { ...collection, schema };
|
||||
|
||||
const parseData: LoaderContext['parseData'] = async ({ id, data, filePath = '' }) => {
|
||||
const { imageImports, data: parsedData } = await getEntryDataAndImages(
|
||||
const { data: parsedData } = await getEntryDataAndImages(
|
||||
{
|
||||
id,
|
||||
collection: name,
|
||||
|
@ -201,15 +198,6 @@ export class ContentLayer {
|
|||
collectionWithResolvedSchema,
|
||||
false,
|
||||
);
|
||||
if (imageImports?.length) {
|
||||
this.#store.addAssetImports(
|
||||
imageImports,
|
||||
// This path may already be relative, if we're re-parsing an existing entry
|
||||
isAbsolute(filePath)
|
||||
? posixRelative(fileURLToPath(this.#settings.config.root), filePath)
|
||||
: filePath,
|
||||
);
|
||||
}
|
||||
|
||||
return parsedData;
|
||||
};
|
||||
|
|
|
@ -33,6 +33,7 @@ export interface DataEntry<TData extends Record<string, unknown> = Record<string
|
|||
* If an entry is a deferred, its rendering phase is delegated to a virtual module during the runtime phase when calling `renderEntry`.
|
||||
*/
|
||||
deferredRender?: boolean;
|
||||
assetImports?: Array<string>;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -107,15 +107,11 @@ export function glob(globOptions: GlobOptions): Loader {
|
|||
store.addModuleImport(existingEntry.filePath);
|
||||
}
|
||||
|
||||
if (existingEntry.rendered?.metadata?.imagePaths?.length) {
|
||||
if (existingEntry.assetImports?.length) {
|
||||
// Add asset imports for existing entries
|
||||
store.addAssetImports(
|
||||
existingEntry.rendered.metadata.imagePaths,
|
||||
existingEntry.filePath,
|
||||
);
|
||||
store.addAssetImports(existingEntry.assetImports, existingEntry.filePath);
|
||||
}
|
||||
// Re-parsing to resolve images and other effects
|
||||
await parseData(existingEntry);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -156,10 +152,9 @@ export function glob(globOptions: GlobOptions): Loader {
|
|||
filePath: relativePath,
|
||||
digest,
|
||||
rendered,
|
||||
assetImports: rendered?.metadata?.imagePaths,
|
||||
});
|
||||
if (rendered?.metadata?.imagePaths?.length) {
|
||||
store.addAssetImports(rendered.metadata.imagePaths, relativePath);
|
||||
}
|
||||
|
||||
// todo: add an explicit way to opt in to deferred rendering
|
||||
} else if ('contentModuleTypes' in entryType) {
|
||||
store.set({
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import { promises as fs, type PathLike, existsSync } from 'node:fs';
|
||||
import * as devalue from 'devalue';
|
||||
import { Traverse } from 'neotraverse/modern';
|
||||
import { imageSrcToImportId, importIdToSymbolName } from '../assets/utils/resolveImports.js';
|
||||
import { AstroError, AstroErrorData } from '../core/errors/index.js';
|
||||
import { IMAGE_IMPORT_PREFIX } from './consts.js';
|
||||
import { type DataEntry, DataStore, type RenderedContent } from './data-store.js';
|
||||
import { contentModuleToId } from './utils.js';
|
||||
|
||||
|
@ -53,7 +55,7 @@ export class MutableDataStore extends DataStore {
|
|||
this.#saveToDiskDebounced();
|
||||
}
|
||||
|
||||
addAssetImport(assetImport: string, filePath: string) {
|
||||
addAssetImport(assetImport: string, filePath?: string) {
|
||||
const id = imageSrcToImportId(assetImport, filePath);
|
||||
if (id) {
|
||||
this.#assetImports.add(id);
|
||||
|
@ -64,7 +66,7 @@ export class MutableDataStore extends DataStore {
|
|||
}
|
||||
}
|
||||
|
||||
addAssetImports(assets: Array<string>, filePath: string) {
|
||||
addAssetImports(assets: Array<string>, filePath?: string) {
|
||||
assets.forEach((asset) => this.addAssetImport(asset, filePath));
|
||||
}
|
||||
|
||||
|
@ -195,7 +197,7 @@ export default new Map([\n${lines.join(',\n')}]);
|
|||
entries: () => this.entries(collectionName),
|
||||
values: () => this.values(collectionName),
|
||||
keys: () => this.keys(collectionName),
|
||||
set: ({ id: key, data, body, filePath, deferredRender, digest, rendered }) => {
|
||||
set: ({ id: key, data, body, filePath, deferredRender, digest, rendered, assetImports }) => {
|
||||
if (!key) {
|
||||
throw new Error(`ID must be a non-empty string`);
|
||||
}
|
||||
|
@ -206,6 +208,15 @@ export default new Map([\n${lines.join(',\n')}]);
|
|||
return false;
|
||||
}
|
||||
}
|
||||
const foundAssets = new Set<string>(assetImports);
|
||||
// Check for image imports in the data. These will have been prefixed during schema parsing
|
||||
new Traverse(data).forEach((_, val) => {
|
||||
if (typeof val === 'string' && val.startsWith(IMAGE_IMPORT_PREFIX)) {
|
||||
const src = val.replace(IMAGE_IMPORT_PREFIX, '');
|
||||
foundAssets.add(src);
|
||||
}
|
||||
});
|
||||
|
||||
const entry: DataEntry = {
|
||||
id,
|
||||
data,
|
||||
|
@ -221,6 +232,12 @@ export default new Map([\n${lines.join(',\n')}]);
|
|||
}
|
||||
entry.filePath = filePath;
|
||||
}
|
||||
|
||||
if (foundAssets.size) {
|
||||
entry.assetImports = Array.from(foundAssets);
|
||||
this.addAssetImports(entry.assetImports, filePath);
|
||||
}
|
||||
|
||||
if (digest) {
|
||||
entry.digest = digest;
|
||||
}
|
||||
|
@ -334,6 +351,12 @@ export interface ScopedDataStore {
|
|||
* If an entry is a deferred, its rendering phase is delegated to a virtual module during the runtime phase.
|
||||
*/
|
||||
deferredRender?: boolean;
|
||||
/**
|
||||
* Assets such as images to process during the build. These should be files on disk, with a path relative to filePath.
|
||||
* Any values that use image() in the schema will already be added automatically.
|
||||
* @internal
|
||||
*/
|
||||
assetImports?: Array<string>;
|
||||
}) => boolean;
|
||||
values: () => Array<DataEntry>;
|
||||
keys: () => Array<string>;
|
||||
|
|
|
@ -162,17 +162,21 @@ describe('Content Layer', () => {
|
|||
|
||||
it('updates the store on new builds', async () => {
|
||||
assert.equal(json.increment.data.lastValue, 1);
|
||||
assert.equal(json.entryWithReference.data.something?.content, 'transform me');
|
||||
await fixture.build();
|
||||
const newJson = devalue.parse(await fixture.readFile('/collections.json'));
|
||||
assert.equal(newJson.increment.data.lastValue, 2);
|
||||
assert.equal(newJson.entryWithReference.data.something?.content, 'transform me');
|
||||
});
|
||||
|
||||
it('clears the store on new build with force flag', async () => {
|
||||
let newJson = devalue.parse(await fixture.readFile('/collections.json'));
|
||||
assert.equal(newJson.increment.data.lastValue, 2);
|
||||
assert.equal(newJson.entryWithReference.data.something?.content, 'transform me');
|
||||
await fixture.build({ force: true }, {});
|
||||
newJson = devalue.parse(await fixture.readFile('/collections.json'));
|
||||
assert.equal(newJson.increment.data.lastValue, 1);
|
||||
assert.equal(newJson.entryWithReference.data.something?.content, 'transform me');
|
||||
});
|
||||
|
||||
it('clears the store on new build if the config has changed', async () => {
|
||||
|
|
|
@ -5,6 +5,7 @@ publishedDate: 'Sat May 21 2022 00:00:00 GMT-0400 (Eastern Daylight Time)'
|
|||
tags: [space, 90s]
|
||||
cat: tabby
|
||||
heroImage: "./shuttle.jpg"
|
||||
something: "transform me"
|
||||
---
|
||||
|
||||
**Source:** [Wikipedia](https://en.wikipedia.org/wiki/Space_Shuttle_Endeavour)
|
||||
|
|
|
@ -78,6 +78,7 @@ const spacecraft = defineCollection({
|
|||
tags: z.array(z.string()),
|
||||
heroImage: image().optional(),
|
||||
cat: reference('cats').optional(),
|
||||
something: z.string().optional().transform(str => ({ type: 'test', content: str }))
|
||||
}),
|
||||
});
|
||||
|
||||
|
@ -120,9 +121,9 @@ const increment = defineCollection({
|
|||
schema: async () => z.object({
|
||||
lastValue: z.number(),
|
||||
lastUpdated: z.date(),
|
||||
|
||||
}),
|
||||
},
|
||||
|
||||
});
|
||||
|
||||
export const collections = { blog, dogs, cats, numbers, spacecraft, increment, images };
|
||||
|
|
|
@ -17,7 +17,6 @@ export async function GET() {
|
|||
const increment = await getEntry('increment', 'value');
|
||||
|
||||
const images = await getCollection('images');
|
||||
|
||||
return new Response(
|
||||
devalue.stringify({
|
||||
customLoader,
|
||||
|
|
Loading…
Reference in a new issue