2023-03-01 03:46:06 -05:00
|
|
|
import mri from 'mri';
|
2023-07-17 19:17:59 -05:00
|
|
|
import fs from 'node:fs/promises';
|
|
|
|
import path from 'node:path';
|
2024-12-03 12:03:24 -05:00
|
|
|
import { fileURLToPath, pathToFileURL } from 'node:url';
|
|
|
|
import { makeProject } from './bench/_util.js';
|
2023-03-01 03:46:06 -05:00
|
|
|
|
|
|
|
const args = mri(process.argv.slice(2));
|
|
|
|
|
|
|
|
if (args.help || args.h) {
|
|
|
|
console.log(`\
|
|
|
|
astro-benchmark <command> [options]
|
|
|
|
|
|
|
|
Command
|
|
|
|
[empty] Run all benchmarks
|
|
|
|
memory Run build memory and speed test
|
2023-03-06 09:55:44 -05:00
|
|
|
render Run rendering speed test
|
2023-03-01 03:46:06 -05:00
|
|
|
server-stress Run server stress test
|
2023-07-10 10:43:01 -05:00
|
|
|
cli-startup Run CLI startup speed test
|
2023-03-01 03:46:06 -05:00
|
|
|
|
|
|
|
Options
|
|
|
|
--project <project-name> Project to use for benchmark, see benchmark/make-project/ for available names
|
|
|
|
--output <output-file> Output file to write results to
|
|
|
|
`);
|
|
|
|
process.exit(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
const commandName = args._[0];
|
|
|
|
const benchmarks = {
|
|
|
|
memory: () => import('./bench/memory.js'),
|
2023-03-06 09:58:51 -05:00
|
|
|
render: () => import('./bench/render.js'),
|
2023-03-01 03:46:06 -05:00
|
|
|
'server-stress': () => import('./bench/server-stress.js'),
|
2023-07-10 10:43:01 -05:00
|
|
|
'cli-startup': () => import('./bench/cli-startup.js'),
|
2023-03-01 03:46:06 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
if (commandName && !(commandName in benchmarks)) {
|
|
|
|
console.error(`Invalid benchmark name: ${commandName}`);
|
|
|
|
process.exit(1);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (commandName) {
|
2024-12-03 12:03:24 -05:00
|
|
|
// Run single benchmark
|
|
|
|
const bench = benchmarks[commandName];
|
|
|
|
const benchMod = await bench();
|
|
|
|
const projectDir = await makeProject(args.project || benchMod.defaultProject);
|
|
|
|
const outputFile = await getOutputFile(commandName);
|
|
|
|
await benchMod.run(projectDir, outputFile);
|
2023-03-01 03:46:06 -05:00
|
|
|
} else {
|
|
|
|
// Run all benchmarks
|
|
|
|
for (const name in benchmarks) {
|
|
|
|
const bench = benchmarks[name];
|
|
|
|
const benchMod = await bench();
|
|
|
|
const projectDir = await makeProject(args.project || benchMod.defaultProject);
|
|
|
|
const outputFile = await getOutputFile(name);
|
|
|
|
await benchMod.run(projectDir, outputFile);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {string} benchmarkName
|
|
|
|
*/
|
2024-12-03 12:03:24 -05:00
|
|
|
export async function getOutputFile(benchmarkName) {
|
2023-03-01 03:46:06 -05:00
|
|
|
let file;
|
|
|
|
if (args.output) {
|
|
|
|
file = pathToFileURL(path.resolve(args.output));
|
|
|
|
} else {
|
|
|
|
file = new URL(`./results/${benchmarkName}-bench-${Date.now()}.json`, import.meta.url);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Prepare output file directory
|
|
|
|
await fs.mkdir(new URL('./', file), { recursive: true });
|
|
|
|
return file;
|
|
|
|
}
|