// @ts-check /* Produces production builds and stitches together d.ts files. To specify the package to build, simply pass its name and the desired build formats to output (defaults to `buildOptions.formats` specified in that package, or "esm,cjs"): ``` # name supports fuzzy match. will build all packages with name containing "dom": nr build dom # specify the format to output nr build core --formats cjs ``` */ import fs from 'node:fs' import { parseArgs } from 'node:util' import { existsSync, readFileSync } from 'node:fs' import path from 'node:path' import { brotliCompressSync, gzipSync } from 'node:zlib' import pico from 'picocolors' import { cpus } from 'node:os' import { targets as allTargets, exec, fuzzyMatchTarget } from './utils.js' import { scanEnums } from './inline-enums.js' import prettyBytes from 'pretty-bytes' import { spawnSync } from 'node:child_process' const commit = spawnSync('git', ['rev-parse', '--short=7', 'HEAD']) .stdout.toString() .trim() const { values, positionals: targets } = parseArgs({ allowPositionals: true, options: { formats: { type: 'string', short: 'f', }, devOnly: { type: 'boolean', short: 'd', }, prodOnly: { type: 'boolean', short: 'p', }, withTypes: { type: 'boolean', short: 't', }, sourceMap: { type: 'boolean', short: 's', }, release: { type: 'boolean', }, all: { type: 'boolean', short: 'a', }, size: { type: 'boolean', }, }, }) const { formats, all: buildAllMatching, devOnly, prodOnly, withTypes: buildTypes, sourceMap, release: isRelease, size: writeSize, } = values const sizeDir = path.resolve('temp/size') run() async function run() { if (writeSize) fs.mkdirSync(sizeDir, { recursive: true }) const removeCache = scanEnums() try { const resolvedTargets = targets.length ? fuzzyMatchTarget(targets, buildAllMatching) : allTargets await buildAll(resolvedTargets) await checkAllSizes(resolvedTargets) if (buildTypes) { await exec( 'pnpm', [ 'run', 'build-dts', ...(targets.length ? ['--environment', `TARGETS:${resolvedTargets.join(',')}`] : []), ], { stdio: 'inherit', }, ) } } finally { removeCache() } } /** * Builds all the targets in parallel. * @param {Array} targets - An array of targets to build. * @returns {Promise} - A promise representing the build process. */ async function buildAll(targets) { await runParallel(cpus().length, targets, build) } /** * Runs iterator function in parallel. * @template T - The type of items in the data source * @param {number} maxConcurrency - The maximum concurrency. * @param {Array} source - The data source * @param {(item: T) => Promise} iteratorFn - The iteratorFn * @returns {Promise} - A Promise array containing all iteration results. */ async function runParallel(maxConcurrency, source, iteratorFn) { /**@type {Promise[]} */ const ret = [] /**@type {Promise[]} */ const executing = [] for (const item of source) { const p = Promise.resolve().then(() => iteratorFn(item)) ret.push(p) if (maxConcurrency <= source.length) { const e = p.then(() => { executing.splice(executing.indexOf(e), 1) }) executing.push(e) if (executing.length >= maxConcurrency) { await Promise.race(executing) } } } return Promise.all(ret) } const privatePackages = fs.readdirSync('packages-private') /** * Builds the target. * @param {string} target - The target to build. * @returns {Promise} - A promise representing the build process. */ async function build(target) { const pkgBase = privatePackages.includes(target) ? `packages-private` : `packages` const pkgDir = path.resolve(`${pkgBase}/${target}`) const pkg = JSON.parse(readFileSync(`${pkgDir}/package.json`, 'utf-8')) // if this is a full build (no specific targets), ignore private packages if ((isRelease || !targets.length) && pkg.private) { return } // if building a specific format, do not remove dist. if (!formats && existsSync(`${pkgDir}/dist`)) { fs.rmSync(`${pkgDir}/dist`, { recursive: true }) } const env = (pkg.buildOptions && pkg.buildOptions.env) || (devOnly ? 'development' : 'production') await exec( 'rollup', [ '-c', '--environment', [ `COMMIT:${commit}`, `NODE_ENV:${env}`, `TARGET:${target}`, formats ? `FORMATS:${formats}` : ``, prodOnly ? `PROD_ONLY:true` : ``, sourceMap ? `SOURCE_MAP:true` : ``, ] .filter(Boolean) .join(','), ], { stdio: 'inherit' }, ) } /** * Checks the sizes of all targets. * @param {string[]} targets - The targets to check sizes for. * @returns {Promise} */ async function checkAllSizes(targets) { if (devOnly || (formats && !formats.includes('global'))) { return } console.log() for (const target of targets) { await checkSize(target) } console.log() } /** * Checks the size of a target. * @param {string} target - The target to check the size for. * @returns {Promise} */ async function checkSize(target) { const pkgDir = path.resolve(`packages/${target}`) await checkFileSize(`${pkgDir}/dist/${target}.global.prod.js`) if (!formats || formats.includes('global-runtime')) { await checkFileSize(`${pkgDir}/dist/${target}.runtime.global.prod.js`) } } /** * Checks the file size. * @param {string} filePath - The path of the file to check the size for. * @returns {Promise} */ async function checkFileSize(filePath) { if (!existsSync(filePath)) { return } const file = fs.readFileSync(filePath) const fileName = path.basename(filePath) const gzipped = gzipSync(file) const brotli = brotliCompressSync(file) console.log( `${pico.gray(pico.bold(fileName))} min:${prettyBytes( file.length, )} / gzip:${prettyBytes(gzipped.length)} / brotli:${prettyBytes( brotli.length, )}`, ) if (writeSize) fs.writeFileSync( path.resolve(sizeDir, `${fileName}.json`), JSON.stringify({ file: fileName, size: file.length, gzip: gzipped.length, brotli: brotli.length, }), 'utf-8', ) }