0
Fork 0
mirror of https://github.com/withastro/astro.git synced 2024-12-16 21:46:22 -05:00

chore: use Biome VCS ignore file (#12465)

This commit is contained in:
Emanuele Stoppa 2024-11-18 13:34:43 +00:00 committed by GitHub
parent cc03d907c1
commit 3bde4aa751
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 110 additions and 106 deletions

View file

@ -18,34 +18,34 @@ const descriptors = [
'updates', 'updates',
]; ];
const verbs = [ const verbs = [
"just went out!", 'just went out!',
"just launched!", 'just launched!',
"now available!", 'now available!',
"in the wild!", 'in the wild!',
"now live!", 'now live!',
"hit the registry!", 'hit the registry!',
"to share!", 'to share!',
"for you!", 'for you!',
"for yall! 🤠", 'for yall! 🤠',
"comin your way!", 'comin your way!',
"comin atcha!", 'comin atcha!',
"comin in hot!", 'comin in hot!',
"freshly minted on the blockchain! (jk)", 'freshly minted on the blockchain! (jk)',
"[is] out (now with 100% more reticulated splines!)", '[is] out (now with 100% more reticulated splines!)',
"(as seen on TV!)", '(as seen on TV!)',
"just dropped!", 'just dropped!',
" artisanally hand-crafted just for you.", ' artisanally hand-crafted just for you.',
" oh happy day!", ' oh happy day!',
" enjoy!", ' enjoy!',
"now out. Be the first on your block to download!", 'now out. Be the first on your block to download!',
"made with love 💕", 'made with love 💕',
"[is] out! Our best [version] yet!", '[is] out! Our best [version] yet!',
"[is] here. DOWNLOAD! DOWNLOAD! DOWNLOAD!", '[is] here. DOWNLOAD! DOWNLOAD! DOWNLOAD!',
"... HUZZAH!", '... HUZZAH!',
"[has] landed!", '[has] landed!',
"landed! The internet just got a little more fun.", 'landed! The internet just got a little more fun.',
" from our family to yours.", ' from our family to yours.',
" go forth and build!" ' go forth and build!',
]; ];
const extraVerbs = [ const extraVerbs = [
'new', 'new',
@ -72,7 +72,7 @@ const plurals = new Map([
function pluralize(text) { function pluralize(text) {
return text.replace(/(\[([^\]]+)\])/gm, (_, _full, match) => return text.replace(/(\[([^\]]+)\])/gm, (_, _full, match) =>
plurals.has(match) ? plurals.get(match) : `${match}s` plurals.has(match) ? plurals.get(match) : `${match}s`,
); );
} }
@ -91,7 +91,7 @@ async function generatePackageMap() {
const pkgFile = fileURLToPath(new URL(pkg, packageRoot)); const pkgFile = fileURLToPath(new URL(pkg, packageRoot));
const content = await readFile(pkgFile).then((res) => JSON.parse(res.toString())); const content = await readFile(pkgFile).then((res) => JSON.parse(res.toString()));
packageMap.set(content.name, `./packages/${pkg.replace('/package.json', '')}`); packageMap.set(content.name, `./packages/${pkg.replace('/package.json', '')}`);
}) }),
); );
} }
@ -110,7 +110,7 @@ async function generateMessage() {
version, version,
url: new URL(`${p}/CHANGELOG.md#${version.replace(/\./g, '')}`, baseUrl).toString(), url: new URL(`${p}/CHANGELOG.md#${version.replace(/\./g, '')}`, baseUrl).toString(),
}; };
}) }),
); );
const emoji = item(emojis); const emoji = item(emojis);
@ -122,7 +122,7 @@ async function generateMessage() {
if (packages.length === 1) { if (packages.length === 1) {
const { name, version, url } = packages[0]; const { name, version, url } = packages[0];
message += `${emoji} \`${name}@${version}\` ${singularlize( message += `${emoji} \`${name}@${version}\` ${singularlize(
verb verb,
)}\nRead the [release notes ](<${url}>)\n`; )}\nRead the [release notes ](<${url}>)\n`;
} else { } else {
message += `${emoji} Some ${descriptor} ${pluralize(verb)}\n\n`; message += `${emoji} Some ${descriptor} ${pluralize(verb)}\n\n`;

View file

@ -4,15 +4,15 @@ import { existsSync } from 'node:fs';
const CLIENT_RUNTIME_PATH = 'packages/astro/src/runtime/client/'; const CLIENT_RUNTIME_PATH = 'packages/astro/src/runtime/client/';
function formatBytes(bytes, decimals = 2) { function formatBytes(bytes, decimals = 2) {
if (bytes === 0) return '0 B'; if (bytes === 0) return '0 B';
const k = 1024; const k = 1024;
const dm = decimals < 0 ? 0 : decimals; const dm = decimals < 0 ? 0 : decimals;
const sizes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']; const sizes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
const i = Math.floor(Math.log(bytes) / Math.log(k)); const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i]; return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i];
} }
export default async function checkBundleSize({ github, context }) { export default async function checkBundleSize({ github, context }) {
@ -24,7 +24,7 @@ export default async function checkBundleSize({ github, context }) {
pull_number: PR_NUM, pull_number: PR_NUM,
}); });
const clientRuntimeFiles = files.filter((file) => { const clientRuntimeFiles = files.filter((file) => {
return file.filename.startsWith(CLIENT_RUNTIME_PATH) && file.status !== 'removed' return file.filename.startsWith(CLIENT_RUNTIME_PATH) && file.status !== 'removed';
}); });
if (clientRuntimeFiles.length === 0) return; if (clientRuntimeFiles.length === 0) return;
@ -35,17 +35,24 @@ export default async function checkBundleSize({ github, context }) {
const output = await bundle(clientRuntimeFiles); const output = await bundle(clientRuntimeFiles);
for (let [filename, { oldSize, newSize, sourceFile }] of Object.entries(output)) { for (let [filename, { oldSize, newSize, sourceFile }] of Object.entries(output)) {
filename = ['idle', 'load', 'media', 'only', 'visible'].includes(filename) ? `client:${filename}` : filename; filename = ['idle', 'load', 'media', 'only', 'visible'].includes(filename)
const prefix = (newSize - oldSize) === 0 ? '' : (newSize - oldSize) > 0 ? '+ ' : '- '; ? `client:${filename}`
: filename;
const prefix = newSize - oldSize === 0 ? '' : newSize - oldSize > 0 ? '+ ' : '- ';
const change = `${prefix}${formatBytes(newSize - oldSize)}`; const change = `${prefix}${formatBytes(newSize - oldSize)}`;
table.push(`| [\`${filename}\`](https://github.com/${context.repo.owner}/${context.repo.repo}/tree/${context.payload.pull_request.head.ref}/${sourceFile}) | ${formatBytes(oldSize)} | ${formatBytes(newSize)} | ${change} |`); table.push(
`| [\`${filename}\`](https://github.com/${context.repo.owner}/${context.repo.repo}/tree/${context.payload.pull_request.head.ref}/${sourceFile}) | ${formatBytes(oldSize)} | ${formatBytes(newSize)} | ${change} |`,
);
} }
const { data: comments } = await github.rest.issues.listComments({ const { data: comments } = await github.rest.issues.listComments({
...context.repo, ...context.repo,
issue_number: PR_NUM issue_number: PR_NUM,
}) });
const comment = comments.find(comment => comment.user.login === 'github-actions[bot]' && comment.body.includes('Bundle Size Check')); const comment = comments.find(
(comment) =>
comment.user.login === 'github-actions[bot]' && comment.body.includes('Bundle Size Check'),
);
const method = comment ? 'updateComment' : 'createComment'; const method = comment ? 'updateComment' : 'createComment';
const payload = comment ? { comment_id: comment.id } : { issue_number: PR_NUM }; const payload = comment ? { comment_id: comment.id } : { issue_number: PR_NUM };
await github.rest.issues[method]({ await github.rest.issues[method]({
@ -60,9 +67,11 @@ ${table.join('\n')}`,
} }
async function bundle(files) { async function bundle(files) {
const { metafile } = await build({ const { metafile } = await build({
entryPoints: [...files.map(({ filename }) => filename), ...files.map(({ filename }) => `main/${filename}`).filter(f => existsSync(f))], entryPoints: [
...files.map(({ filename }) => filename),
...files.map(({ filename }) => `main/${filename}`).filter((f) => existsSync(f)),
],
bundle: true, bundle: true,
minify: true, minify: true,
sourcemap: false, sourcemap: false,
@ -70,17 +79,24 @@ async function bundle(files) {
outdir: 'out', outdir: 'out',
external: ['astro:*', 'aria-query', 'axobject-query'], external: ['astro:*', 'aria-query', 'axobject-query'],
metafile: true, metafile: true,
}) });
return Object.entries(metafile.outputs).reduce((acc, [filename, info]) => { return Object.entries(metafile.outputs).reduce((acc, [filename, info]) => {
filename = filename.slice('out/'.length); filename = filename.slice('out/'.length);
if (filename.startsWith('main/')) { if (filename.startsWith('main/')) {
filename = filename.slice('main/'.length).replace(CLIENT_RUNTIME_PATH, '').replace('.js', ''); filename = filename.slice('main/'.length).replace(CLIENT_RUNTIME_PATH, '').replace('.js', '');
const oldSize = info.bytes; const oldSize = info.bytes;
return Object.assign(acc, { [filename]: Object.assign(acc[filename] ?? { oldSize: 0, newSize: 0 }, { oldSize }) }); return Object.assign(acc, {
[filename]: Object.assign(acc[filename] ?? { oldSize: 0, newSize: 0 }, { oldSize }),
});
} }
filename = filename.replace(CLIENT_RUNTIME_PATH, '').replace('.js', ''); filename = filename.replace(CLIENT_RUNTIME_PATH, '').replace('.js', '');
const newSize = info.bytes; const newSize = info.bytes;
return Object.assign(acc, { [filename]: Object.assign(acc[filename] ?? { oldSize: 0, newSize: 0 }, { newSize, sourceFile: Object.keys(info.inputs).find(src => src.endsWith('.ts')) }) }); return Object.assign(acc, {
[filename]: Object.assign(acc[filename] ?? { oldSize: 0, newSize: 0 }, {
newSize,
sourceFile: Object.keys(info.inputs).find((src) => src.endsWith('.ts')),
}),
});
}, {}); }, {});
} }

View file

@ -1,59 +1,53 @@
import * as fs from 'node:fs' import * as fs from 'node:fs';
import * as os from 'node:os' import * as os from 'node:os';
import * as crypto from 'node:crypto' import * as crypto from 'node:crypto';
/** Based on https://github.com/actions/toolkit/blob/4e3b068ce116d28cb840033c02f912100b4592b0/packages/core/src/file-command.ts */ /** Based on https://github.com/actions/toolkit/blob/4e3b068ce116d28cb840033c02f912100b4592b0/packages/core/src/file-command.ts */
export function setOutput(key, value) { export function setOutput(key, value) {
const filePath = process.env['GITHUB_OUTPUT'] || '' const filePath = process.env['GITHUB_OUTPUT'] || '';
if (filePath) { if (filePath) {
return issueFileCommand('OUTPUT', prepareKeyValueMessage(key, value)) return issueFileCommand('OUTPUT', prepareKeyValueMessage(key, value));
} }
process.stdout.write(os.EOL) process.stdout.write(os.EOL);
} }
function issueFileCommand(command, message) { function issueFileCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`] const filePath = process.env[`GITHUB_${command}`];
if (!filePath) { if (!filePath) {
throw new Error( throw new Error(`Unable to find environment variable for file command ${command}`);
`Unable to find environment variable for file command ${command}` }
) if (!fs.existsSync(filePath)) {
} throw new Error(`Missing file at path: ${filePath}`);
if (!fs.existsSync(filePath)) { }
throw new Error(`Missing file at path: ${filePath}`)
}
fs.appendFileSync(filePath, `${toCommandValue(message)}${os.EOL}`, { fs.appendFileSync(filePath, `${toCommandValue(message)}${os.EOL}`, {
encoding: 'utf8' encoding: 'utf8',
}) });
} }
function prepareKeyValueMessage(key, value) { function prepareKeyValueMessage(key, value) {
const delimiter = `gh-delimiter-${crypto.randomUUID()}` const delimiter = `gh-delimiter-${crypto.randomUUID()}`;
const convertedValue = toCommandValue(value) const convertedValue = toCommandValue(value);
// These should realistically never happen, but just in case someone finds a // These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain // way to exploit uuid generation let's not allow keys or values that contain
// the delimiter. // the delimiter.
if (key.includes(delimiter)) { if (key.includes(delimiter)) {
throw new Error( throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
`Unexpected input: name should not contain the delimiter "${delimiter}"` }
)
}
if (convertedValue.includes(delimiter)) { if (convertedValue.includes(delimiter)) {
throw new Error( throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
`Unexpected input: value should not contain the delimiter "${delimiter}"` }
)
}
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}` return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
} }
function toCommandValue(input) { function toCommandValue(input) {
if (input === null || input === undefined) { if (input === null || input === undefined) {
return '' return '';
} else if (typeof input === 'string' || input instanceof String) { } else if (typeof input === 'string' || input instanceof String) {
return input return input;
} }
return JSON.stringify(input) return JSON.stringify(input);
} }

View file

@ -1,19 +1,13 @@
{ {
"$schema": "https://biomejs.dev/schemas/1.9.3/schema.json", "$schema": "https://biomejs.dev/schemas/1.9.3/schema.json",
"files": { "files": {
"ignore": [ "ignore": ["**/smoke/**", "**/fixtures/**", "**/_temp-fixtures/**", "**/vendor/**"],
"vendor", "include": ["test/**", "e2e/**", "packages/**", "scripts/**", "benchmark/bench"],
"**/dist/**", },
"**/smoke/**", "vcs": {
"**/fixtures/**", "enabled": true,
"**/_temp-fixtures/**", "clientKind": "git",
"**/vendor/**", "useIgnoreFile": true,
"**/.vercel/**",
"benchmark/projects/",
"benchmark/results/",
"benchmark/bench/_template.js",
],
"include": ["test/**", "e2e/**", "packages/**", "/scripts/**", "benchmark/bench"],
}, },
"formatter": { "formatter": {
"indentStyle": "tab", "indentStyle": "tab",

View file

@ -26,7 +26,7 @@ for (const workspaceDir of workspaceDirs) {
const packageJson = await readAndParsePackageJson(packageJsonPath); const packageJson = await readAndParsePackageJson(packageJsonPath);
if (!packageJson) continue; if (!packageJson) continue;
if (packageJson.private === true) continue if (packageJson.private === true) continue;
if (!packageJson.name) { if (!packageJson.name) {
throw new Error(`${packageJsonPath} does not contain a "name" field.`); throw new Error(`${packageJsonPath} does not contain a "name" field.`);