diff --git a/packages/snowpack/package.json b/packages/snowpack/package.json index 4c434808b2..96e8276fa1 100644 --- a/packages/snowpack/package.json +++ b/packages/snowpack/package.json @@ -59,7 +59,6 @@ "@rollup/plugin-replace": "^2.1.0", "@snowpack/plugin-build-script": "^2.0.4", "@snowpack/plugin-run-script": "^2.0.4", - "ansi-regex": "^5.0.0", "cacache": "^15.0.0", "cachedir": "^2.3.0", "chokidar": "^3.4.0", @@ -79,7 +78,7 @@ "http-proxy": "^1.18.1", "is-builtin-module": "^3.0.0", "jsonschema": "^1.2.5", - "kleur": "^4.0.1", + "kleur": "^4.1.0", "mime-types": "^2.1.26", "mkdirp": "^1.0.3", "npm-run-path": "^4.0.1", diff --git a/packages/snowpack/src/commands/build.ts b/packages/snowpack/src/commands/build.ts index 5d1a0f1883..1683880558 100644 --- a/packages/snowpack/src/commands/build.ts +++ b/packages/snowpack/src/commands/build.ts @@ -20,14 +20,14 @@ import {removeLeadingSlash} from '../config'; import createLogger from '../logger'; import {stopEsbuild} from '../plugins/plugin-esbuild'; import {transformFileImports} from '../rewrite-imports'; -import {CommandOptions, SnowpackSourceFile} from '../types/snowpack'; -import {getEncodingType, replaceExt, jsSourceMappingURL, cssSourceMappingURL} from '../util'; +import {CommandOptions, ImportMap, SnowpackConfig, SnowpackSourceFile} from '../types/snowpack'; +import {cssSourceMappingURL, getEncodingType, jsSourceMappingURL, replaceExt} from '../util'; import {getInstallTargets, run as installRunner} from './install'; const logger = createLogger({name: 'snowpack'}); async function installOptimizedDependencies( - allFilesToResolveImports: Record, + scannedFiles: SnowpackSourceFile[], installDest: string, commandOptions: CommandOptions, ) { @@ -35,14 +35,13 @@ async function installOptimizedDependencies( installOptions: { dest: installDest, env: {NODE_ENV: process.env.NODE_ENV || 'production'}, - treeshake: commandOptions.config.installOptions.treeshake ?? true, + treeshake: commandOptions.config.buildOptions.watch + ? false + : commandOptions.config.installOptions.treeshake ?? true, }, }); // 1. Scan imports from your final built JS files. - const installTargets = await getInstallTargets( - installConfig, - Object.values(allFilesToResolveImports), - ); + const installTargets = await getInstallTargets(installConfig, scannedFiles); // 2. Install dependencies, based on the scan of your final build. const installResult = await installRunner({ ...commandOptions, @@ -52,6 +51,181 @@ async function installOptimizedDependencies( return installResult; } +/** + * FileBuilder - This class is responsible for building a file. It is broken into + * individual stages so that the entire application build process can be tackled + * in stages (build -> resolve -> write to disk). + */ +class FileBuilder { + output: Record = {}; + filesToResolve: Record = {}; + filesToProxy: string[] = []; + + readonly filepath: string; + readonly outDir: string; + readonly config: SnowpackConfig; + + constructor({ + filepath, + outDir, + config, + }: { + filepath: string; + outDir: string; + config: SnowpackConfig; + }) { + this.filepath = filepath; + this.outDir = outDir; + this.config = config; + } + + async buildFile() { + this.filesToResolve = {}; + const srcExt = path.extname(this.filepath); + const builtFileOutput = await buildFile(this.filepath, { + plugins: this.config.plugins, + isDev: false, + isHmrEnabled: false, + sourceMaps: this.config.buildOptions.sourceMaps, + logLevel: 'info', + }); + for (const [fileExt, buildResult] of Object.entries(builtFileOutput)) { + let {code, map} = buildResult; + if (!code) { + continue; + } + + const outFilename = replaceExt(path.basename(this.filepath), srcExt, fileExt); + const outLoc = path.join(this.outDir, outFilename); + const sourceMappingURL = outFilename + '.map'; + switch (fileExt) { + case '.css': { + if (map) code = cssSourceMappingURL(code, sourceMappingURL); + break; + } + + case '.js': { + if (builtFileOutput['.css']) { + // inject CSS if imported directly + const cssFilename = outFilename.replace(/\.js$/i, '.css'); + code = `import './${cssFilename}';\n` + code; + } + code = wrapImportMeta({code, env: true, isDev: false, hmr: false, config: this.config}); + if (map) code = jsSourceMappingURL(code, sourceMappingURL); + this.filesToResolve[outLoc] = { + baseExt: fileExt, + expandedExt: fileExt, + contents: code, + locOnDisk: this.filepath, + }; + break; + } + + case '.html': { + code = wrapHtmlResponse({code, isDev: false, hmr: false, config: this.config}); + this.filesToResolve[outLoc] = { + baseExt: fileExt, + expandedExt: fileExt, + contents: code, + locOnDisk: this.filepath, + }; + break; + } + } + + this.output[outLoc] = code; + if (map) { + this.output[path.join(this.outDir, sourceMappingURL)] = map; + } + } + } + + async resolveImports(importMap: ImportMap) { + let isSuccess = true; + this.filesToProxy = []; + for (const [outLoc, file] of Object.entries(this.filesToResolve)) { + const resolveImportSpecifier = createImportResolver({ + fileLoc: file.locOnDisk!, // we’re confident these are reading from disk because we just read them + dependencyImportMap: importMap, + config: this.config, + }); + const resolvedCode = await transformFileImports(file, (spec) => { + // Try to resolve the specifier to a known URL in the project + let resolvedImportUrl = resolveImportSpecifier(spec); + // NOTE: If the import cannot be resolved, we'll need to re-install + // your dependencies. We don't support this yet, but we will. + // Until supported, just exit here. + if (!resolvedImportUrl) { + isSuccess = false; + console.error(`${file.locOnDisk} - Could not resolve unkonwn import "${spec}".`); + return spec; + } + // Ignore "http://*" imports + if (url.parse(resolvedImportUrl).protocol) { + return spec; + } + // Handle normal "./" & "../" import specifiers + const extName = path.extname(resolvedImportUrl); + const isProxyImport = extName && extName !== '.js'; + const isAbsoluteUrlPath = path.posix.isAbsolute(resolvedImportUrl); + let resolvedImportPath = removeLeadingSlash(path.normalize(resolvedImportUrl)); + // We treat ".proxy.js" files special: we need to make sure that they exist on disk + // in the final build, so we mark them to be written to disk at the next step. + if (isProxyImport) { + if (isAbsoluteUrlPath) { + this.filesToProxy.push(path.resolve(this.config.devOptions.out, resolvedImportPath)); + } else { + this.filesToProxy.push(path.resolve(path.dirname(outLoc), resolvedImportPath)); + } + } + + if (isProxyImport) { + resolvedImportPath = resolvedImportPath + '.proxy.js'; + resolvedImportUrl = resolvedImportUrl + '.proxy.js'; + } + + // When dealing with an absolute import path, we need to honor the baseUrl + if (isAbsoluteUrlPath) { + resolvedImportUrl = path + .relative( + path.dirname(outLoc), + path.resolve(this.config.devOptions.out, resolvedImportPath), + ) + .replace(/\\/g, '/'); // replace Windows backslashes at the end, after resolution + } + // Make sure that a relative URL always starts with "./" + if (!resolvedImportUrl.startsWith('.') && !resolvedImportUrl.startsWith('/')) { + resolvedImportUrl = './' + resolvedImportUrl; + } + return resolvedImportUrl; + }); + this.output[outLoc] = resolvedCode; + } + return isSuccess; + } + + async writeToDisk() { + mkdirp.sync(this.outDir); + for (const [outLoc, code] of Object.entries(this.output)) { + await fs.writeFile(outLoc, code, getEncodingType(path.extname(outLoc))); + } + } + + async writeProxyToDisk(originalFileLoc: string) { + const proxiedCode = this.output[originalFileLoc]; + const importProxyFileLoc = originalFileLoc + '.proxy.js'; + const proxiedUrl = originalFileLoc.substr(this.config.devOptions.out.length).replace(/\\/g, '/'); + const proxyCode = await wrapImportProxy({ + url: proxiedUrl, + code: proxiedCode, + isDev: false, + hmr: false, + config: this.config, + }); + await fs.writeFile(importProxyFileLoc, proxyCode, getEncodingType('.js')); + } +} + export async function command(commandOptions: CommandOptions) { const {cwd, config, logLevel = 'info'} = commandOptions; @@ -59,6 +233,14 @@ export async function command(commandOptions: CommandOptions) { const buildDirectoryLoc = config.devOptions.out; const internalFilesBuildLoc = path.join(buildDirectoryLoc, config.buildOptions.metaDir); + const mountedDirectories: [string, string][] = Object.entries(config.mount).map( + ([fromDisk, toUrl]) => { + return [ + path.resolve(cwd, fromDisk), + path.resolve(buildDirectoryLoc, removeLeadingSlash(toUrl)), + ]; + }, + ); if (config.buildOptions.clean) { rimraf.sync(buildDirectoryLoc); @@ -66,11 +248,6 @@ export async function command(commandOptions: CommandOptions) { mkdirp.sync(buildDirectoryLoc); mkdirp.sync(internalFilesBuildLoc); - let relDest = path.relative(cwd, config.devOptions.out); - if (!relDest.startsWith(`..${path.sep}`)) { - relDest = `.${path.sep}` + relDest; - } - for (const runPlugin of config.plugins) { if (runPlugin.run) { await runPlugin @@ -93,104 +270,70 @@ export async function command(commandOptions: CommandOptions) { // Write the `import.meta.env` contents file to disk await fs.writeFile(path.join(internalFilesBuildLoc, 'env.js'), generateEnvModule('production')); - const includeFileSets: [string, string, string[]][] = []; - for (const [fromDisk, toUrl] of Object.entries(config.mount)) { - const dirDisk = path.resolve(cwd, fromDisk); - const dirDest = path.resolve(buildDirectoryLoc, removeLeadingSlash(toUrl)); + logger.info(colors.yellow('! building source…')); + const buildStart = performance.now(); + const buildPipelineFiles: Record = {}; + + /** Install all needed dependencies, based on the master buildPipelineFiles list. */ + async function installDependencies() { + const scannedFiles = Object.values(buildPipelineFiles) + .map((f) => Object.values(f.filesToResolve)) + .filter(Boolean) + .reduce((flat, item) => flat.concat(item), []); + const installDest = path.join(buildDirectoryLoc, config.buildOptions.webModulesUrl); + const installResult = await installOptimizedDependencies(scannedFiles, installDest, { + ...commandOptions, + logLevel: 'error', + }); + if (!installResult.success || installResult.hasError || !installResult.importMap) { + process.exit(1); + } const allFiles = glob.sync(`**/*`, { ignore: config.exclude, - cwd: dirDisk, + cwd: installDest, absolute: true, nodir: true, dot: true, }); - const allBuildNeededFiles: string[] = []; - await Promise.all( - allFiles.map(async (f) => { - f = path.resolve(f); // this is necessary since glob.sync() returns paths with / on windows. path.resolve() will switch them to the native path separator. - allBuildNeededFiles.push(f); - }), - ); - includeFileSets.push([dirDisk, dirDest, allBuildNeededFiles]); + for (const installedFileLoc of allFiles) { + if ( + !installedFileLoc.endsWith('import-map.json') && + path.extname(installedFileLoc) !== '.js' + ) { + const proxiedCode = await fs.readFile(installedFileLoc, {encoding: 'utf-8'}); + const importProxyFileLoc = installedFileLoc + '.proxy.js'; + const proxiedUrl = installedFileLoc.substr(installDest.length).replace(/\\/g, '/'); + const proxyCode = await wrapImportProxy({ + url: proxiedUrl, + code: proxiedCode, + isDev: false, + hmr: false, + config: config, + }); + await fs.writeFile(importProxyFileLoc, proxyCode, getEncodingType('.js')); + } + } + return installResult; } - const allBuiltFromFiles = new Set(); - const allFilesToResolveImports: Record = {}; - - logger.info(colors.yellow('! building source…')); - const buildStart = performance.now(); - - for (const [dirDisk, dirDest, allFiles] of includeFileSets) { - for (const locOnDisk of allFiles) { - const srcExt = path.extname(locOnDisk); - const builtFileOutput = await buildFile(locOnDisk, { - plugins: config.plugins, - isDev: false, - isHmrEnabled: false, - sourceMaps: config.buildOptions.sourceMaps, - logLevel, - }); - allBuiltFromFiles.add(locOnDisk); - - for (const [fileExt, buildResult] of Object.entries(builtFileOutput)) { - let {code, map} = buildResult; - if (!code) { - continue; - } - - const outDir = path.dirname(locOnDisk.replace(dirDisk, dirDest)); - const outFilename = replaceExt(path.basename(locOnDisk), srcExt, fileExt); - const outLoc = path.join(outDir, outFilename); - const sourceMappingURL = outFilename + '.map'; - - switch (fileExt) { - case '.css': { - if (map) code = cssSourceMappingURL(code, sourceMappingURL); - break; - } - case '.js': { - if (builtFileOutput['.css']) { - // inject CSS if imported directly - const cssFilename = outFilename.replace(/\.js$/i, '.css'); - code = `import './${cssFilename}';\n` + code; - } - - code = wrapImportMeta({code, env: true, isDev: false, hmr: false, config}); - - if (map) code = jsSourceMappingURL(code, sourceMappingURL); - - allFilesToResolveImports[outLoc] = { - baseExt: fileExt, - expandedExt: fileExt, - contents: code, - locOnDisk, - }; - break; - } - case '.html': { - code = wrapHtmlResponse({code, isDev: false, hmr: false, config}); - allFilesToResolveImports[outLoc] = { - baseExt: fileExt, - expandedExt: fileExt, - contents: code, - locOnDisk, - }; - break; - } - } - - // make directory if doesn’t exist - mkdirp.sync(outDir); - - // write source map - if (map) await fs.writeFile(path.join(outDir, sourceMappingURL), map, 'utf-8'); - - // write file - await fs.writeFile(outLoc, code, getEncodingType(fileExt)); - } + // 1. Load & build all files for the first time, from source. + for (const [fromDisk, dirDest] of mountedDirectories) { + const allFiles = glob.sync(`**/*`, { + ignore: config.exclude, + cwd: fromDisk, + absolute: true, + nodir: true, + dot: true, + }); + for (const rawLocOnDisk of allFiles) { + const locOnDisk = path.resolve(rawLocOnDisk); // this is necessary since glob.sync() returns paths with / on windows. path.resolve() will switch them to the native path separator. + const finalDest = locOnDisk.replace(fromDisk, dirDest); + const outDir = path.dirname(finalDest); + const buildPipelineFile = new FileBuilder({filepath: locOnDisk, outDir, config}); + buildPipelineFiles[locOnDisk] = buildPipelineFile; + await buildPipelineFile.buildFile(); } } - stopEsbuild(); const buildEnd = performance.now(); logger.info( @@ -199,107 +342,121 @@ export async function command(commandOptions: CommandOptions) { )}`, ); - // install - const installDest = path.join(buildDirectoryLoc, config.buildOptions.webModulesUrl); - const installResult = await installOptimizedDependencies(allFilesToResolveImports, installDest, { - ...commandOptions, - logLevel: 'error', - }); - if (!installResult.success || installResult.hasError) { - process.exit(1); - } + // 2. Install all dependencies. This gets us the import map we need to resolve imports. + let installResult = await installDependencies(); - const allImportProxyFiles = new Set(); - for (const [outLoc, file] of Object.entries(allFilesToResolveImports)) { - const resolveImportSpecifier = createImportResolver({ - fileLoc: file.locOnDisk!, // we’re confident these are reading from disk because we just read them - dependencyImportMap: installResult.importMap, - config, - }); - const resolvedCode = await transformFileImports(file, (spec) => { - // Try to resolve the specifier to a known URL in the project - let resolvedImportUrl = resolveImportSpecifier(spec); - if (!resolvedImportUrl || url.parse(resolvedImportUrl).protocol) { - return spec; - } - const extName = path.extname(resolvedImportUrl); - const isProxyImport = extName && extName !== '.js'; - if (isProxyImport) { - resolvedImportUrl = resolvedImportUrl + '.proxy.js'; - } - const isAbsoluteUrlPath = path.posix.isAbsolute(resolvedImportUrl); - const resolvedImportPath = removeLeadingSlash(path.normalize(resolvedImportUrl)); + // 3. Resolve all built file imports. + const allBuildPipelineFiles = Object.values(buildPipelineFiles); + for (const buildPipelineFile of allBuildPipelineFiles) { + await buildPipelineFile.resolveImports(installResult.importMap!); + } - // We treat ".proxy.js" files special: we need to make sure that they exist on disk - // in the final build, so we mark them to be written to disk at the next step. - if (isProxyImport) { - if (isAbsoluteUrlPath) { - allImportProxyFiles.add(path.resolve(buildDirectoryLoc, resolvedImportPath)); - } else { - allImportProxyFiles.add(path.resolve(path.dirname(outLoc), resolvedImportPath)); - } - } - // When dealing with an absolute import path, we need to honor the baseUrl - if (isAbsoluteUrlPath) { - resolvedImportUrl = path - .relative(path.dirname(outLoc), path.resolve(buildDirectoryLoc, resolvedImportPath)) - .replace(/\\/g, '/'); // replace Windows backslashes at the end, after resolution - } - // Make sure that a relative URL always starts with "./" - if (!resolvedImportUrl.startsWith('.') && !resolvedImportUrl.startsWith('/')) { - resolvedImportUrl = './' + resolvedImportUrl; + // 4. Write files to disk. + const allImportProxyFiles = new Set( + allBuildPipelineFiles.map((b) => b.filesToProxy).reduce((flat, item) => flat.concat(item), []), + ); + for (const buildPipelineFile of allBuildPipelineFiles) { + await buildPipelineFile.writeToDisk(); + for (const builtFile of Object.keys(buildPipelineFile.output)) { + if (allImportProxyFiles.has(builtFile)) { + await buildPipelineFile.writeProxyToDisk(builtFile); } - return resolvedImportUrl; - }); - await fs.mkdir(path.dirname(outLoc), {recursive: true}); - await fs.writeFile(outLoc, resolvedCode); + } } - for (const importProxyFileLoc of allImportProxyFiles) { - const originalFileLoc = importProxyFileLoc.replace('.proxy.js', ''); - const proxiedExt = path.extname(originalFileLoc); - const proxiedCode = await fs.readFile(originalFileLoc, getEncodingType(proxiedExt)); - const proxiedUrl = originalFileLoc.substr(buildDirectoryLoc.length).replace(/\\/g, '/'); - const proxyCode = await wrapImportProxy({ - url: proxiedUrl, - code: proxiedCode, + // 5. Optimize the build. + if (!config.buildOptions.watch) { + stopEsbuild(); + await runPipelineOptimizeStep(buildDirectoryLoc, { + plugins: config.plugins, isDev: false, - hmr: false, - config, + isHmrEnabled: false, + sourceMaps: config.buildOptions.sourceMaps, }); - await fs.writeFile(importProxyFileLoc, proxyCode, getEncodingType('.js')); + + // minify + if (config.buildOptions.minify) { + const minifierStart = performance.now(); + logger.info(colors.yellow('! minifying javascript...')); + const minifierService = await esbuild.startService(); + const allJsFiles = glob.sync(path.join(buildDirectoryLoc, '**/*.js'), { + ignore: [`**/${config.buildOptions.metaDir}/**/*`], // don’t minify meta dir + }); + await Promise.all( + allJsFiles.map(async (jsFile) => { + const jsFileContents = await fs.readFile(jsFile, 'utf-8'); + const {js} = await minifierService.transform(jsFileContents, {minify: true}); + return fs.writeFile(jsFile, js, 'utf-8'); + }), + ); + const minifierEnd = performance.now(); + logger.info( + `${colors.green('✔')} minification complete ${colors.dim( + `[${((minifierEnd - minifierStart) / 1000).toFixed(2)}s]`, + )}`, + ); + minifierService.stop(); + } + + logger.info(`${colors.underline(colors.green(colors.bold('▶ Build Complete!')))}\n\n`); + return; } - await runPipelineOptimizeStep(buildDirectoryLoc, { - plugins: config.plugins, - isDev: false, - isHmrEnabled: false, - sourceMaps: config.buildOptions.sourceMaps, - }); + // "--watch" mode - Start watching the file system. + // Defer "chokidar" loading to here, to reduce impact on overall startup time + const chokidar = await import('chokidar'); - // minify - if (config.buildOptions.minify) { - const minifierStart = performance.now(); - logger.info(colors.yellow('! minifying javascript...')); - const minifierService = await esbuild.startService(); - const allJsFiles = glob.sync(path.join(buildDirectoryLoc, '**/*.js'), { - ignore: [`**/${config.buildOptions.metaDir}/**/*`], // don’t minify meta dir - }); - await Promise.all( - allJsFiles.map(async (jsFile) => { - const jsFileContents = await fs.readFile(jsFile, 'utf-8'); - const {js} = await minifierService.transform(jsFileContents, {minify: true}); - return fs.writeFile(jsFile, js, 'utf-8'); - }), - ); - const minifierEnd = performance.now(); - logger.info( - `${colors.green('✔')} minification complete ${colors.dim( - `[${((minifierEnd - minifierStart) / 1000).toFixed(2)}s]`, - )}`, + function onDeleteEvent(fileLoc: string) { + delete buildPipelineFiles[fileLoc]; + } + async function onWatchEvent(fileLoc: string) { + const [fromDisk, dirDest] = + mountedDirectories.find(([fromDisk]) => fileLoc.startsWith(fromDisk)) || []; + if (!fromDisk || !dirDest) { + return; + } + const finalDest = fileLoc.replace(fromDisk, dirDest); + const outDir = path.dirname(finalDest); + const changedPipelineFile = new FileBuilder({filepath: fileLoc, outDir, config}); + buildPipelineFiles[fileLoc] = changedPipelineFile; + // 1. Build the file. + await changedPipelineFile.buildFile(); + // 2. Resolve any ESM imports. Handle new imports by triggering a re-install. + let resolveSuccess = await changedPipelineFile.resolveImports(installResult.importMap!); + if (!resolveSuccess) { + await installDependencies(); + resolveSuccess = await changedPipelineFile.resolveImports(installResult.importMap!); + if (!resolveSuccess) { + console.error('Exiting...'); + process.exit(1); + } + } + // 3. Write to disk. If any proxy imports are needed, write those as well. + await changedPipelineFile.writeToDisk(); + const allBuildPipelineFiles = Object.values(buildPipelineFiles); + const allImportProxyFiles = new Set( + allBuildPipelineFiles + .map((b) => b.filesToProxy) + .reduce((flat, item) => flat.concat(item), []), ); - minifierService.stop(); + for (const builtFile of Object.keys(changedPipelineFile.output)) { + if (allImportProxyFiles.has(builtFile)) { + await changedPipelineFile.writeProxyToDisk(builtFile); + } + } } + const watcher = chokidar.watch( + mountedDirectories.map(([dirDisk]) => dirDisk), + { + ignored: config.exclude, + ignoreInitial: true, + persistent: true, + disableGlobbing: false, + }, + ); + watcher.on('add', (fileLoc) => onWatchEvent(fileLoc)); + watcher.on('change', (fileLoc) => onWatchEvent(fileLoc)); + watcher.on('unlink', (fileLoc) => onDeleteEvent(fileLoc)); - logger.info(`${colors.underline(colors.green(colors.bold('▶ Build Complete!')))}\n\n`); + return new Promise(() => {}); } diff --git a/packages/snowpack/src/config.ts b/packages/snowpack/src/config.ts index 5a91a3fc4e..886dbd6148 100644 --- a/packages/snowpack/src/config.ts +++ b/packages/snowpack/src/config.ts @@ -62,6 +62,7 @@ const DEFAULT_CONFIG: Partial = { metaDir: '__snowpack__', minify: true, sourceMaps: false, + watch: false, }, }; @@ -139,6 +140,7 @@ const configSchema = { metaDir: {type: 'string'}, minify: {type: 'boolean'}, sourceMaps: {type: 'boolean'}, + watch: {type: 'boolean'}, }, }, proxy: { diff --git a/packages/snowpack/src/logger.ts b/packages/snowpack/src/logger.ts index 219b79a718..1f2d79e2ec 100644 --- a/packages/snowpack/src/logger.ts +++ b/packages/snowpack/src/logger.ts @@ -1,8 +1,5 @@ import * as colors from 'kleur/colors'; import pino, {LoggerOptions} from 'pino'; -import ansiRegex from 'ansi-regex'; - -const NO_COLOR_ENABLED = process.env.FORCE_COLOR === '0' || process.env.NO_COLOR; const LEVEL = { TRACE: 10, @@ -13,11 +10,6 @@ const LEVEL = { FATAL: 60, }; -// @see https://no-color.org -function stripColor(msg: string) { - return msg.replace(ansiRegex(), ''); -} - /** http://getpino.io/#/docs/pretty */ export function prettifier(options: LoggerOptions) { return (inputData: pino.LogDescriptor) => { @@ -28,7 +20,7 @@ export function prettifier(options: LoggerOptions) { if (inputData.level === LEVEL.ERROR || inputData.level === LEVEL.FATAL) msg = colors.red(msg); if (inputData.level === LEVEL.WARN) msg = colors.yellow(msg); - return NO_COLOR_ENABLED ? stripColor(msg) : msg; + return msg; }; } diff --git a/packages/snowpack/src/types/snowpack.ts b/packages/snowpack/src/types/snowpack.ts index ac30bc8219..6187596dc3 100644 --- a/packages/snowpack/src/types/snowpack.ts +++ b/packages/snowpack/src/types/snowpack.ts @@ -140,6 +140,7 @@ export interface SnowpackConfig { metaDir: string; minify: boolean; sourceMaps: boolean; + watch: boolean; }; _extensionMap: Record; } diff --git a/yarn.lock b/yarn.lock index 49c66b45c0..c0b9393652 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9319,16 +9319,16 @@ kleur@^3.0.3: resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== -kleur@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.0.2.tgz#57b36cc5235601f824c33e6e45db10cd5493dbf5" - integrity sha512-FGCCxczbrZuF5CtMeO0xfnjhzkVZSXfcWK90IPLucDWZwskrpYN7pmRIgvd8muU0mrPrzy4A2RBGuwCjLHI+nw== - kleur@^4.0.2: version "4.0.3" resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.0.3.tgz#c66f27b804dfb89f804de5f44290c9001979e685" integrity sha512-s84+52u0qkMmOI+eFJSxOCS/2Xt4j2A7Uacg8j+Ja1uFrED2zV3bILKtdAmpJCER4ufTej7p1OwuHbUz5ZVUyg== +kleur@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.0.tgz#eea37aa8c210a06c42a591ee56a13f137a13add3" + integrity sha512-s9jD+1dPhJVOzEAGjuabDC1NJdNr1UGj84SftcGukJDpQaLbwuJTakb7FqAhm4cFkhaIvS2DV092FXMyUgPt+w== + last-call-webpack-plugin@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/last-call-webpack-plugin/-/last-call-webpack-plugin-3.0.0.tgz#9742df0e10e3cf46e5c0381c2de90d3a7a2d7555"