mirror of
https://github.com/withastro/astro.git
synced 2024-12-16 21:46:22 -05:00
Prevent duplicate snowpack logs (#316)
* Prevent duplicate snowpack logs * Fix test to wait on expected messages * Update the snowpack/plugin-vue * Add changeset * Debugging * Skip the logging test on windows :( * arg
This commit is contained in:
parent
c57e18d2f3
commit
b1364afbae
8 changed files with 136 additions and 8 deletions
6
.changeset/cool-pots-double.md
Normal file
6
.changeset/cool-pots-double.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
'astro': patch
|
||||
'@astrojs/renderer-vue': patch
|
||||
---
|
||||
|
||||
Updates logging to display messages from Snowpack
|
|
@ -104,7 +104,8 @@
|
|||
"@types/node": "^14.14.31",
|
||||
"@types/sass": "^1.16.0",
|
||||
"@types/yargs-parser": "^20.2.0",
|
||||
"astro-scripts": "0.0.1"
|
||||
"astro-scripts": "0.0.1",
|
||||
"is-windows": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0",
|
||||
|
|
|
@ -12,6 +12,7 @@ import { CompileError } from 'astro-parser';
|
|||
import { loadConfiguration, logger as snowpackLogger, startServer as startSnowpackServer } from 'snowpack';
|
||||
import { canonicalURL, getSrcPath, stopTimer } from './build/util.js';
|
||||
import { debug, info } from './logger.js';
|
||||
import { configureSnowpackLogger } from './snowpack-logger.js';
|
||||
import { searchForPage } from './search.js';
|
||||
import snowpackExternals from './external.js';
|
||||
|
||||
|
@ -45,7 +46,7 @@ type LoadResultError = { statusCode: 500 } & ({ type: 'parse-error'; error: Comp
|
|||
export type LoadResult = (LoadResultSuccess | LoadResultNotFound | LoadResultRedirect | LoadResultError) & { collectionInfo?: CollectionInfo };
|
||||
|
||||
// Disable snowpack from writing to stdout/err.
|
||||
snowpackLogger.level = process.argv.includes('--verbose') ? 'debug' : 'silent';
|
||||
configureSnowpackLogger(snowpackLogger);
|
||||
|
||||
/** Pass a URL to Astro to resolve and build */
|
||||
async function load(config: RuntimeConfig, rawPathname: string | undefined): Promise<LoadResult> {
|
||||
|
|
34
packages/astro/src/snowpack-logger.ts
Normal file
34
packages/astro/src/snowpack-logger.ts
Normal file
|
@ -0,0 +1,34 @@
|
|||
import { logger as snowpackLogger } from 'snowpack';
|
||||
|
||||
function verboseLogging() {
|
||||
return process.argv.includes('--verbose');
|
||||
}
|
||||
|
||||
const onceMessages = [
|
||||
'Ready!',
|
||||
'watching for file changes'
|
||||
].map(str => new RegExp(`\\[snowpack\\](.*?)${str}`));
|
||||
|
||||
export function configureSnowpackLogger(logger: typeof snowpackLogger) {
|
||||
const messageCache = new Set<string>();
|
||||
|
||||
if(verboseLogging()) {
|
||||
logger.level = 'debug';
|
||||
}
|
||||
|
||||
logger.on('info', message => {
|
||||
// Cache messages that should only be shown once.
|
||||
// This is due to having 2 snowpack instances. Once that is removed we can
|
||||
// get rid of this workaround.
|
||||
if(messageCache.has(message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const shouldBeCached = onceMessages.some(exp => exp.test(message));
|
||||
if(shouldBeCached) {
|
||||
messageCache.add(message);
|
||||
}
|
||||
|
||||
console.log(message);
|
||||
});
|
||||
}
|
|
@ -4,6 +4,7 @@ import { readFile } from 'fs/promises';
|
|||
import { createRuntime } from '#astro/runtime';
|
||||
import { loadConfig } from '#astro/config';
|
||||
import execa from 'execa';
|
||||
import del from 'del';
|
||||
|
||||
const MAX_STARTUP_TIME = 7000; // max time startup may take
|
||||
const MAX_TEST_TIME = 10000; // max time an individual test may take
|
||||
|
@ -93,3 +94,8 @@ export function runDevServer(root, additionalArgs = []) {
|
|||
const proc = execa('node', args);
|
||||
return proc;
|
||||
}
|
||||
|
||||
export async function clearCache() {
|
||||
const cacheDir = new URL('../../../node_modules/.cache', import.meta.url);
|
||||
await del(fileURLToPath(cacheDir));
|
||||
}
|
80
packages/astro/test/snowpack-log.test.js
Normal file
80
packages/astro/test/snowpack-log.test.js
Normal file
|
@ -0,0 +1,80 @@
|
|||
import { suite } from 'uvu';
|
||||
import * as assert from 'uvu/assert';
|
||||
import { clearCache, runDevServer } from './helpers.js';
|
||||
import isWindows from 'is-windows';
|
||||
|
||||
// For some reason Windows isn't getting anything from stdout in this test, not sure why.
|
||||
if(!isWindows()) {
|
||||
const SnowpackLogging = suite('snowpack logging');
|
||||
const MAX_TEST_TIME = 10000; // max time this test suite may take
|
||||
|
||||
function numberOfEntries(stdout, message) {
|
||||
const exp = new RegExp(message, 'g');
|
||||
let count = 0;
|
||||
let res;
|
||||
while(res = exp.exec(stdout)) {
|
||||
count++;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
const root = new URL('./fixtures/astro/basic/', import.meta.url);
|
||||
const timers = {};
|
||||
let runError = null;
|
||||
SnowpackLogging.before(async context => {
|
||||
await clearCache();
|
||||
|
||||
let importantMessages = 0;
|
||||
let stdout = '';
|
||||
try {
|
||||
const process = runDevServer(root, []);
|
||||
|
||||
process.stdout.setEncoding('utf8');
|
||||
for await (const chunk of process.stdout) {
|
||||
stdout += chunk;
|
||||
if (/Server started/.test(chunk)) {
|
||||
importantMessages++;
|
||||
}
|
||||
if(/Ready/.test(chunk)) {
|
||||
importantMessages++;
|
||||
}
|
||||
if(/watching for file changes/.test(chunk)) {
|
||||
importantMessages++;
|
||||
}
|
||||
if(importantMessages === 3) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
context.stdout = stdout;
|
||||
process.kill();
|
||||
} catch(err) {
|
||||
console.error(err);
|
||||
runError = runError;
|
||||
}
|
||||
});
|
||||
|
||||
SnowpackLogging.before.each(({ __test__ }) => {
|
||||
timers[__test__] = setTimeout(() => {
|
||||
throw new Error(`Test "${__test__}" did not finish within allowed time`);
|
||||
}, MAX_TEST_TIME);
|
||||
});
|
||||
|
||||
SnowpackLogging('dev server started up', () => {
|
||||
assert.equal(runError, null);
|
||||
});
|
||||
|
||||
SnowpackLogging('Logs Ready message once', async ({ stdout }) => {
|
||||
assert.equal(numberOfEntries(stdout, 'Ready'), 1);
|
||||
});
|
||||
|
||||
SnowpackLogging('Logs [waiting for file changes] once', ({ stdout }) => {
|
||||
assert.equal(numberOfEntries(stdout, 'watching for file changes'), 1);
|
||||
})
|
||||
|
||||
SnowpackLogging.after.each(({ __test__ }) => {
|
||||
clearTimeout(timers[__test__]);
|
||||
});
|
||||
|
||||
SnowpackLogging.run();
|
||||
}
|
|
@ -11,6 +11,6 @@
|
|||
"dependencies": {
|
||||
"vue": "^3.0.10",
|
||||
"@vue/server-renderer": "^3.0.10",
|
||||
"@snowpack/plugin-vue": "^2.6.0"
|
||||
"@snowpack/plugin-vue": "^2.6.1"
|
||||
}
|
||||
}
|
||||
|
|
10
yarn.lock
10
yarn.lock
|
@ -1338,10 +1338,10 @@
|
|||
svelte-hmr "^0.13.2"
|
||||
svelte-preprocess "^4.7.2"
|
||||
|
||||
"@snowpack/plugin-vue@^2.6.0":
|
||||
version "2.6.0"
|
||||
resolved "https://registry.yarnpkg.com/@snowpack/plugin-vue/-/plugin-vue-2.6.0.tgz#9e8ac6eeb15d5e17d3ca297364c140c96d88a667"
|
||||
integrity sha512-kRjfSHMckf2wwPwpQdgDzxxX637rwC2MZIk9ib6GWlrvbFDAjLozeQMY883naQSu8RwZ4jUC0gW1OHzgz8lLPw==
|
||||
"@snowpack/plugin-vue@^2.6.1":
|
||||
version "2.6.1"
|
||||
resolved "https://registry.yarnpkg.com/@snowpack/plugin-vue/-/plugin-vue-2.6.1.tgz#ef883dd0b666500d1f401c069d2623ea573ce8c4"
|
||||
integrity sha512-UsnhuSDT7yzoKWBe4sKHmcA1hi9h66hF1Bds2aa6UUcYLPFYZiBozMCZecqQD4LEtHASuTydNET6BYqrmqoAaw==
|
||||
dependencies:
|
||||
"@rollup/plugin-replace" "^2.4.2"
|
||||
"@vue/compiler-sfc" "^3.0.10"
|
||||
|
@ -5454,7 +5454,7 @@ is-whitespace@^0.3.0:
|
|||
resolved "https://registry.yarnpkg.com/is-whitespace/-/is-whitespace-0.3.0.tgz#1639ecb1be036aec69a54cbb401cfbed7114ab7f"
|
||||
integrity sha1-Fjnssb4DauxppUy7QBz77XEUq38=
|
||||
|
||||
is-windows@^1.0.0, is-windows@^1.0.1:
|
||||
is-windows@^1.0.0, is-windows@^1.0.1, is-windows@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
|
||||
integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
|
||||
|
|
Loading…
Reference in a new issue