refactor: move build system and don't bundle code

This commit is contained in:
Derrick Hammer 2023-07-11 18:37:07 -04:00
parent 8f97d626a5
commit 655dfe72b0
Signed by: pcfreak30
GPG Key ID: C997C339BE476FF2
106 changed files with 19367 additions and 10827 deletions

View File

@ -1,116 +0,0 @@
name: build, test (node and browser), coverage, publish to NPM
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
jobs:
build:
name: build
runs-on: ubuntu-latest
steps:
- name: Git checkout
uses: actions/checkout@v3
- name: Use Node.js 18
uses: actions/setup-node@v3
with:
node-version: 18.x
registry-url: "https://registry.npmjs.org"
- name: install
run: npm ci
- name: build
run: npm run build
nodetests:
name: tests in Node.js
needs: [build]
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
node-version: [16.x, 18.x, 20.x]
# When set to true, GitHub cancels all in-progress jobs if any matrix job fails.
fail-fast: false
# The maximum number of jobs that can run simultaneously. Set to 1 if you can't run tests in parallel
# max-parallel: 1
steps:
- name: Git checkout
uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
registry-url: "https://registry.npmjs.org"
- name: install
run: npm i
- name: node esm tests
run: npm run test:node-esm
# env:
# VARIABLE1: ${{ secrets.VARIABLE1 }}
# VARIABLE2: ${{ secrets.VARIABLE2 }}
- name: node cjs tests
run: npm run test:node-cjs
# env:
# VARIABLE1: ${{ secrets.VARIABLE1 }}
# VARIABLE2: ${{ secrets.VARIABLE2 }}
browsertests:
needs: [build]
name: tests in browser
runs-on: ubuntu-latest
steps:
- name: Git checkout
uses: actions/checkout@v3
- name: Use Node.js 18
uses: actions/setup-node@v3
with:
node-version: 18.x
registry-url: "https://registry.npmjs.org"
- name: install
run: npm ci
- name: browser tests
run: npm run test:browser-headless
# env:
# VARIABLE1: ${{ secrets.VARIABLE1 }}
# VARIABLE2: ${{ secrets.VARIABLE2 }}
publish:
needs: [nodetests, browsertests]
runs-on: ubuntu-latest
steps:
- name: Git checkout
uses: actions/checkout@v3
- name: Install Node.js, NPM and Yarn
uses: actions/setup-node@v3
with:
node-version: "18.x"
registry-url: "https://registry.npmjs.org"
- name: install
run: npm ci
- name: coverage
run: npm run coverage
- name: send report to coveralls.io
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: NPM publish
run: npm publish --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

13
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,13 @@
name: Build/Publish
on:
push:
branches:
- master
- develop
- develop-*
jobs:
main:
uses: lumeweb/github-node-deploy-workflow/.github/workflows/main.yml@master
secrets: inherit

5
.presetterrc.json Normal file
View File

@ -0,0 +1,5 @@
{
"preset": [
"@lumeweb/node-library-preset"
]
}

View File

@ -1,134 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, caste, color, religion, or sexual
identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the overall
community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or advances of
any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email address,
without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
[INSERT CONTACT METHOD].
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series of
actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or permanent
ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within the
community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.1, available at
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
[https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations

View File

@ -1,168 +0,0 @@
#! /usr/bin/env node
const fs = require('fs')
const path = require('path')
const glob = require('glob')
const minimatch = require('minimatch').minimatch
const rimraf = require('rimraf')
const runScript = require('../run-script.cjs')
const rootDir = path.join(__dirname, '../..')
const pkgJson = require(path.join(rootDir, 'package.json'))
const mochaTsRelativeDir = pkgJson.directories['mocha-ts']
const mochaTsDir = path.join(rootDir, mochaTsRelativeDir)
// clean .mocha-ts directory
rimraf.sync(mochaTsDir)
const semaphorePath = `${mochaTsRelativeDir}/semaphore`
const tempDir = mochaTsDir
fs.mkdirSync(tempDir, { recursive: true })
const usage = `Usage: mocha-ts [options] [spec]
mocha against ts tests and modules
Arguments:
spec One or more files, directories, or globs to test (default:
"{src/ts/**/*.spec.ts,test/**/*.ts}")
Options:
-w, --watch run in watch mode. Since mocha only supports CJS in watch
mode. This option implies -cjs as well (default: false)
-cjs, --commonjs run tests against the CJS bundle instead of the ESM one
(default: false)
-h, --help display help for command
`
function parse () {
const args = process.argv.slice(2)
const help = getBooleanOption(args, '--help', '-h')
if (help) {
console.log(usage)
process.exit()
}
const requiredFile = getOption(args, '--require')
const watch = getBooleanOption(args, '--watch', '-w')
const commonjs = getBooleanOption(args, '--commonjs', '-cjs')
if (commonjs === false && watch === true) {
console.log('ERROR: mocha in watch mode only supports commonjs')
console.log(usage)
process.exit(1)
}
let testsGlob = args.pop()
if (testsGlob === undefined) {
testsGlob = '{src/ts/**/*.spec.ts,test/**/*.ts}'
} else {
testsGlob = testsGlob.replace(/^['"]/, '').replace(/['"]$/, '') // Let us remove surrounding quotes in string (it gives issues in windows)
}
const mochaArgs = []
if (requiredFile !== '') {
mochaArgs.push('--require')
mochaArgs.push(requiredFile)
}
mochaArgs.push('--require')
mochaArgs.push('build/testing/mocha/mocha-init.cjs')
if (watch) {
mochaArgs.push('-w')
mochaArgs.push('--watch-files')
mochaArgs.push(semaphorePath)
}
if (testsGlob.substring(0, 1) === '-') {
console.log(usage)
process.exit(9)
}
let filenames = []
try {
filenames = glob.sync(testsGlob, { cwd: rootDir, matchBase: true })
} catch (error) {}
if (filenames.length === 0) {
console.error('invalid or empty glob pattern: ' + testsGlob)
console.log()
console.log(usage)
process.exit(9)
}
const testFiles = []
const jsTestFiles = []
if (filenames.length > 0) {
filenames.forEach(file => {
const isTsTestFile = minimatch(file, '{test/**/*.ts,src/**/*.spec.ts}', { matchBase: true })
if (isTsTestFile) {
testFiles.push(file)
const extension = commonjs ? 'cjs' : 'js'
jsTestFiles.push(`${mochaTsRelativeDir}/${file.slice(0, -3)}.${extension}`)
}
})
}
mochaArgs.push(...jsTestFiles)
return {
mochaArgs,
testFiles,
commonjs
}
}
const processedArgs = parse()
const commonjs = processedArgs.commonjs
const testFiles = processedArgs.testFiles
const mochaArgs = processedArgs.mochaArgs
// prepare setup for mocha (it should be written to a JSON file that will be loaded by the mocha-init.cjs)
const mochaSetup = {
testFiles,
commonjs
}
fs.writeFileSync(path.join(tempDir, 'testSetup.json'), JSON.stringify(mochaSetup, undefined, 2), { encoding: 'utf-8' })
if (commonjs) {
console.log('\x1b[33m [mocha-ts] Running tests against the CommonJS module \x1b[0m\n')
} else {
console.log('\x1b[33m [mocha-ts] Running tests against the ESM module \x1b[0m\n')
}
const rollupBuilder = require('../testing/mocha/builders/RollupBuilder.cjs').rollupBuilder
rollupBuilder.start({ commonjs, watch: false }).then(() => {
rollupBuilder.close()
const testsBuilder = require('../testing/mocha/builders/TestsBuilder.cjs').testBuilder
testsBuilder.start({ commonjs, testFiles }).then(() => {
testsBuilder.close()
// Now run mocha
runScript(path.join(rootDir, 'node_modules/mocha/bin/mocha'), mochaArgs)
})
})
function getBooleanOption (args, ...optionNames) {
let found = false
optionNames.forEach((option) => {
const index = args.indexOf(option)
if (index > -1) {
found = true
args.splice(index, 1)
}
})
return found
}
function getOption (args, option) {
const index = args.indexOf(option)
if (index > -1 && index < args.length - 2) {
return args.splice(index, 2)[1]
}
return ''
}

View File

@ -1,204 +0,0 @@
'use strict'
const fs = require('fs')
const TypeDoc = require('typedoc')
const path = require('path')
const json5 = require('json5')
const pkgJson = require('../package.json')
const rimraf = require('rimraf')
const rootDir = path.join(__dirname, '..')
const templateFilePath = path.join(rootDir, pkgJson.directories.src, 'docs/index.md')
let template = fs.readFileSync(templateFilePath, { encoding: 'utf-8' })
async function main () {
// Generate API doc with typedoc
await typedoc()
// Translate relaitive links to project's root
replaceRelativeLinks()
// Let us replace variables and badges
variableReplacements()
const readmeFile = path.join(rootDir, 'README.md')
fs.writeFileSync(readmeFile, template)
}
main()
/* ------------------------------------------------------------------------- |
| UTILITY FUNCTIONS |
| ------------------------------------------------------------------------- */
function camelise (str) {
return str.replace(/-([a-z])/g,
function (m, w) {
return w.toUpperCase()
})
}
async function typedoc () {
const app = new TypeDoc.Application()
// prepare tsconfig
const tsConfigPath = path.join(rootDir, 'tsconfig.json')
const tempTsConfigPath = path.join(rootDir, '.tsconfig.json')
const tsConfig = json5.parse(fs.readFileSync(tsConfigPath, 'utf8'))
tsConfig.include = ['src/ts/**/*', 'build/typings/**/*.d.ts']
tsConfig.exclude = ['src/**/*.spec.ts']
fs.writeFileSync(tempTsConfigPath, JSON.stringify(tsConfig, undefined, 2))
// If you want TypeDoc to load tsconfig.json / typedoc.json files
app.options.addReader(new TypeDoc.TSConfigReader())
// app.options.addReader(new TypeDoc.TypeDocReader())
app.bootstrap({
// typedoc options here
tsconfig: tempTsConfigPath,
entryPoints: ['src/ts/index.ts'],
plugin: ['typedoc-plugin-markdown'],
includeVersion: true,
entryDocument: 'API.md',
readme: 'none',
hideBreadcrumbs: true,
excludePrivate: true
})
const project = app.convert()
if (project) {
// Project may not have converted correctly
const output = path.join(rootDir, './docs')
// Rendered docs
await app.generateDocs(project, output)
}
rimraf.sync(tempTsConfigPath)
}
function getRepositoryData () {
let ret
if (typeof pkgJson.repository === 'string') {
const repodata = pkgJson.repository.split(/[:/]/)
const repoProvider = repodata[0]
if (repoProvider === 'github' || repoProvider === 'gitlab' || repoProvider === 'bitbucket') {
ret = {
repoProvider,
repoUsername: repodata[1],
repoName: repodata.slice(2).join('/')
}
}
} else if (typeof pkgJson.repository === 'object' && pkgJson.repository.type === 'git' && pkgJson.repository.url !== 'undefined') {
const regex = /(?:.+?\+)?http[s]?:\/\/(?<repoProvider>[\w._-]+)\.\w{2,3}\/(?<repoUsername>[\w._-]+)\/(?<repoName>[\w._\-/]+?)\.git/
const match = pkgJson.repository.url.match(regex)
ret = {
repoProvider: match[1],
repoUsername: match[2],
repoName: match[3],
repoDirectory: pkgJson.repository.directory
}
}
if (typeof ret === 'object') {
if (typeof pkgJson.nodeBrowserSkel === 'object' && typeof pkgJson.nodeBrowserSkel.git === 'object' && typeof pkgJson.nodeBrowserSkel.git.branch === 'string') {
ret.branch = pkgJson.nodeBrowserSkel.git.branch
} else {
ret.branch = (ret.repoProvider === 'github') ? 'main' : 'master'
}
}
return ret
}
function variableReplacements () {
const { repoProvider, repoUsername, repoName, repoDirectory, branch } = getRepositoryData() || {}
const regex = /^(?:(?<scope>@.*?)\/)?(?<name>.*)/ // We are going to take only the package name part if there is a scope, e.g. @my-org/package-name
const { name } = pkgJson.name.match(regex).groups
const camelCaseName = camelise(name)
const iifeBundlePath = pkgJson.exports['./iife-browser-bundle'] !== undefined ? path.relative('.', pkgJson.exports['./iife-browser-bundle']) : undefined
const esmBundlePath = pkgJson.exports['./esm-browser-bundle'] !== undefined ? path.relative('.', pkgJson.exports['./esm-browser-bundle']) : undefined
const umdBundlePath = pkgJson.exports['./umd-browser-bundle'] !== undefined ? path.relative('.', pkgJson.exports['./umd-browser-bundle']) : undefined
let useWorkflowBadge = false
let useCoverallsBadge = false
if (pkgJson.nodeBrowserSkel !== undefined && pkgJson.nodeBrowserSkel.badges !== undefined) {
if (pkgJson.nodeBrowserSkel.badges.workflow === true) {
useWorkflowBadge = true
}
if (pkgJson.nodeBrowserSkel.badges.coveralls === true) {
useCoverallsBadge = true
}
}
let iifeBundle, esmBundle, umdBundle, workflowBadge, coverallsBadge
if (repoProvider) {
switch (repoProvider) {
case 'github':
iifeBundle = iifeBundlePath !== undefined ? `[IIFE bundle](https://raw.githubusercontent.com/${repoUsername}/${repoName}/${branch}/${repoDirectory !== undefined ? repoDirectory + '/' : ''}${iifeBundlePath})` : undefined
esmBundle = esmBundlePath !== undefined ? `[ESM bundle](https://raw.githubusercontent.com/${repoUsername}/${repoName}/${branch}/${repoDirectory !== undefined ? repoDirectory + '/' : ''}${esmBundlePath})` : undefined
umdBundle = umdBundlePath !== undefined ? `[UMD bundle](https://raw.githubusercontent.com/${repoUsername}/${repoName}/${branch}/${repoDirectory !== undefined ? repoDirectory + '/' : ''}${umdBundlePath})` : undefined
workflowBadge = useWorkflowBadge ? `[![Node.js CI](https://github.com/${repoUsername}/${repoName}/actions/workflows/build-and-test.yml/badge.svg)](https://github.com/${repoUsername}/${repoName}/actions/workflows/build-and-test.yml)` : undefined
coverallsBadge = useCoverallsBadge ? `[![Coverage Status](https://coveralls.io/repos/github/${repoUsername}/${repoName}/badge.svg?branch=${branch})](https://coveralls.io/github/${repoUsername}/${repoName}?branch=${branch})` : undefined
break
case 'gitlab':
iifeBundle = iifeBundlePath !== undefined ? `[IIFE bundle](https://gitlab.com/${repoUsername}/${repoName}/-/raw/${branch}/${repoDirectory !== undefined ? repoDirectory + '/' : ''}${iifeBundlePath}?inline=false)` : undefined
esmBundle = esmBundlePath !== undefined ? `[ESM bundle](https://gitlab.com/${repoUsername}/${repoName}/-/raw/${branch}/${repoDirectory !== undefined ? repoDirectory + '/' : ''}${esmBundlePath}?inline=false)` : undefined
umdBundle = umdBundlePath !== undefined ? `[UMD bundle](https://gitlab.com/${repoUsername}/${repoName}/-/raw/${branch}/${repoDirectory !== undefined ? repoDirectory + '/' : ''}${umdBundlePath}?inline=false)` : undefined
break
default:
break
}
}
template = template
.replace(/\{\{PKG_NAME\}\}/g, pkgJson.name)
.replace(/\{\{PKG_LICENSE\}\}/g, pkgJson.license.replace('-', '_'))
.replace(/\{\{PKG_DESCRIPTION\}\}/g, pkgJson.description)
.replace(/\{\{PKG_CAMELCASE\}\}/g, camelCaseName)
.replace(/\{\{IIFE_BUNDLE\}\}/g, iifeBundle || 'IIFE bundle')
.replace(/\{\{ESM_BUNDLE\}\}/g, esmBundle || 'ESM bundle')
.replace(/\{\{UMD_BUNDLE\}\}/g, umdBundle || 'UMD bundle')
if (repoProvider && repoProvider === 'github') {
template = template.replace(/\{\{GITHUB_ACTIONS_BADGES\}\}\n/gs, (workflowBadge ? `${workflowBadge}\n` : '') + (coverallsBadge ? `${coverallsBadge}\n` : ''))
} else {
template = template.replace(/\{\{GITHUB_ACTIONS_BADGES\}\}\n/gs, '')
}
}
function replaceRelativeLinks () {
const replacements = []
const relativePathRegex = /(\[[\w\s\d]+\]\()(?!(?:http:\/\/)|(?:https:\/\/))([\w\d;,/?:@&=+$-_.!~*'()\\#]+)\)/g
const matches = template.matchAll(relativePathRegex)
if (matches) {
for (const match of matches) {
const index = (match.index ?? 0) + match[1].length
const filepath = match[2]
if (!path.isAbsolute(filepath)) {
const absoluteFilePath = path.join(path.dirname(templateFilePath), filepath)
if (!fs.existsSync(absoluteFilePath)) {
console.warn(`File ${absoluteFilePath} is linked in your index.md but it does not exist. Ignoring`)
} else {
const replacement = path.relative(rootDir, absoluteFilePath)
replacements.push({ index, length: filepath.length, replacement })
}
}
}
const sortedReplacements = replacements.sort((a, b) => a.index - b.index)
let ret = ''
let index = 0
for (const replacement of sortedReplacements) {
ret += template.slice(index, replacement.index)
ret += replacement.replacement
index = replacement.index + replacement.length
}
ret += template.slice(index)
template = ret
}
}

View File

@ -1,52 +0,0 @@
import { mkdirSync, writeFileSync } from 'fs'
import ts from 'typescript'
import { join, dirname, extname } from 'path'
import { sync } from 'rimraf'
import * as url from 'url'
const __dirname = url.fileURLToPath(new URL('.', import.meta.url))
const { readJsonConfigFile, sys, parseJsonSourceFileConfigFileContent, createCompilerHost, createProgram } = ts
const rootDir = join(__dirname, '..')
const srcFile = join(rootDir, 'src/ts/index.ts')
const tsConfigPath = join(rootDir, 'tsconfig.json')
const configFile = readJsonConfigFile(tsConfigPath, (file) => {
return sys.readFile(file)
})
const tsConfig = parseJsonSourceFileConfigFileContent(configFile, sys, dirname(tsConfigPath))
export const compile = (outDir) => {
const compilerOptions = {
...tsConfig.options,
removeComments: false,
declaration: true,
declarationMap: true,
emitDeclarationOnly: true,
outDir
}
const host = createCompilerHost(compilerOptions)
host.writeFile = (fileName, contents) => {
mkdirSync(dirname(fileName), { recursive: true })
writeFileSync(fileName, contents)
// we also write the .d.cts types
let fileName2 = ''
if (extname(fileName) === '.ts') {
fileName2 = fileName.slice(0, -2) + 'cts'
} else { // ext is .d.ts.map
fileName2 = fileName.slice(0, -6) + 'cts.map'
}
writeFileSync(fileName2, contents)
}
// Clear the types dir
sync(outDir)
// Prepare and emit the d.ts files
const program = createProgram([srcFile], compilerOptions, host)
program.emit()
}

View File

@ -1,243 +0,0 @@
'use strict'
import commonjs from '@rollup/plugin-commonjs'
import inject from '@rollup/plugin-inject'
import json from '@rollup/plugin-json'
import { nodeResolve as resolve } from '@rollup/plugin-node-resolve'
import replace from '@rollup/plugin-replace'
import terser from '@rollup/plugin-terser'
import rollupPluginTs from '@rollup/plugin-typescript'
import { existsSync, readFileSync } from 'fs'
import { builtinModules } from 'module'
import { join } from 'path'
import dts from 'rollup-plugin-dts'
import { compile } from './rollup-plugin-dts.js'
import * as url from 'url'
const __dirname = url.fileURLToPath(new URL('.', import.meta.url))
const rootDir = join(__dirname, '..')
const pkgJson = JSON.parse(readFileSync(join(rootDir, 'package.json')))
const pkgJsonLock = JSON.parse(readFileSync(join(rootDir, 'package-lock.json')))
const srcDir = join(rootDir, 'src', 'ts')
const tsConfigPath = join(rootDir, 'tsconfig.json')
function camelise (str) {
return str.replace(/-([a-z])/g,
function (m, w) {
return w.toUpperCase()
})
}
function isDevDependency (moduleName) {
const packageEntry = pkgJsonLock.packages['node_modules/' + moduleName]
return (packageEntry ?? {}).dev === true
}
const regex = /^(?:(?<scope>@.*?)\/)?(?<name>.*)/ // We are going to take only the package name part if there is a scope, e.g. @my-org/package-name
const { name } = pkgJson.name.match(regex).groups
const pkgCamelisedName = camelise(name)
const input = join(srcDir, 'index.ts')
if (existsSync(input) !== true) throw new Error('The entry point should be index.ts')
const tsPluginOptions = {
tsconfig: tsConfigPath,
outDir: undefined,
include: ['src/ts/**/*', 'build/typings/**/*.d.ts'],
exclude: ['src/**/*.spec.ts']
}
const sourcemapOutputOptions = {
sourcemap: 'inline',
sourcemapExcludeSources: true
}
function compileDts (outDir) {
return {
name: 'compile-dts',
closeBundle () {
compile(outDir)
}
}
}
function resolveOnly (module) { // if a dev dependency is imported we will resolve it so that the dist modules always work
const moduleNameMatch = module.match(/^(?:@[a-z0-9_-]+\/)?(?:node:)?[a-z0-9_-]+/)
if (moduleNameMatch === null || moduleNameMatch.length !== 1) {
return false
}
const moduleName = moduleNameMatch[0].replace(/^node:/, '')
// don't resolve if it is a native module
if (builtinModules.includes(moduleName)) {
return false
}
if (isDevDependency(moduleName)) {
console.warn(`\x1b[33m⚠ WARM: dev dependency \x1b[0m${module}\x1b[33m being bundled. Should it be a dependency instead?\x1b[0m`)
return true
}
return false
}
const tmpDeclarationsDir = join(rootDir, '.types')
export default [
{ // Browser ESM
input,
output: [
{
file: join(rootDir, pkgJson.exports['.'].default.default),
...sourcemapOutputOptions,
format: 'es',
plugins: [
terser()
]
}
],
plugins: [
replace({
IS_BROWSER: true,
environment: 'browser',
_MODULE_TYPE: "'ESM'",
_NPM_PKG_VERSION: `'${process.env.npm_package_version}'` ?? "'0.0.1'",
preventAssignment: true
}),
rollupPluginTs(tsPluginOptions),
commonjs({ extensions: ['.js', '.cjs', '.jsx', '.cjsx'] }),
json(),
resolve({
browser: true,
exportConditions: ['browser', 'default'],
mainFields: ['browser', 'module', 'main'],
resolveOnly
})
]
},
{ // Browser bundles
input,
output: [
{
file: join(rootDir, pkgJson.exports['./esm-browser-bundle-nomin']),
format: 'es'
},
{
file: join(rootDir, pkgJson.exports['./esm-browser-bundle']),
format: 'es',
plugins: [terser()]
},
{
file: join(rootDir, pkgJson.exports['./iife-browser-bundle']),
format: 'iife',
name: pkgCamelisedName,
plugins: [terser()]
},
{
file: join(rootDir, pkgJson.exports['./umd-browser-bundle']),
format: 'umd',
name: pkgCamelisedName,
plugins: [terser()]
}
],
plugins: [
replace({
IS_BROWSER: true,
environment: 'browser',
_MODULE_TYPE: "'BUNDLE'",
_NPM_PKG_VERSION: `'${process.env.npm_package_version}'` ?? "'0.0.1'",
preventAssignment: true
}),
rollupPluginTs({
...tsPluginOptions,
sourceMap: false
}),
commonjs({ extensions: ['.js', '.cjs', '.jsx', '.cjsx'] }),
json(),
resolve({ browser: true })
]
},
{ // Node CJS
input,
output: [
{
file: join(rootDir, pkgJson.exports['.'].node.require.default),
...sourcemapOutputOptions,
format: 'cjs',
exports: 'auto',
interop: 'auto',
dynamicImportInCjs: false,
plugins: [terser()]
}
],
plugins: [
replace({
IS_BROWSER: false,
environment: 'nodejs',
_MODULE_TYPE: "'CJS'",
_NPM_PKG_VERSION: `'${process.env.npm_package_version}'` ?? "'0.0.1'",
preventAssignment: true
}),
rollupPluginTs(tsPluginOptions),
inject({
crypto: ['crypto', 'webcrypto']
}),
commonjs({ extensions: ['.js', '.cjs', '.jsx', '.cjsx'] }),
json(),
resolve({
exportConditions: ['node'],
resolveOnly
})
]
},
{ // Node ESM and type declarations
input,
output: [
{
file: join(rootDir, pkgJson.exports['.'].node.import.default),
...sourcemapOutputOptions,
format: 'es',
plugins: [terser()]
}
],
plugins: [
replace({
IS_BROWSER: false,
environment: 'nodejs',
_MODULE_TYPE: "'ESM'",
_NPM_PKG_VERSION: `'${process.env.npm_package_version}'` ?? "'0.0.1'",
__filename: 'fileURLToPath(import.meta.url)',
__dirname: 'fileURLToPath(new URL(\'.\', import.meta.url))',
preventAssignment: true
}),
rollupPluginTs(tsPluginOptions),
compileDts(tmpDeclarationsDir),
inject({
crypto: ['crypto', 'webcrypto'],
fileURLToPath: ['url', 'fileURLToPath']
}),
commonjs({ extensions: ['.js', '.cjs', '.jsx', '.cjsx'] }),
json(),
resolve({
exportConditions: ['node'],
resolveOnly
})
]
},
{
input: join(tmpDeclarationsDir, 'index.d.ts'),
output: [{ file: 'dist/index.d.ts', format: 'es' }],
plugins: [
dts({
respectExternal: true
})
],
external: (module) => {
if (/^[./]/.test(module)) {
return false
}
return !resolveOnly(module)
}
}
]

View File

@ -1,26 +0,0 @@
const childProcess = require('child_process')
const rootDir = require('path').join(__dirname, '../')
function runScript (scriptPath, args) {
return new Promise((resolve, reject) => {
const cmd = childProcess.fork(scriptPath, args, {
cwd: rootDir
})
cmd.on('error', (error) => {
reject(error)
})
// execute the callback once the process has finished running
cmd.on('exit', function (code) {
if (code !== 0) {
const error = new Error('exit code ' + code)
reject(error)
}
resolve()
})
})
}
module.exports = runScript

Binary file not shown.

Before

Width:  |  Height:  |  Size: 326 B

View File

@ -1,120 +0,0 @@
const path = require('path')
const puppeteer = require('puppeteer')
const minimatch = require('minimatch').minimatch
const glob = require('glob')
const rootDir = path.join(__dirname, '../../..')
const pkgJson = require(path.join(rootDir, 'package.json'))
const browserTests = async (
{
logWarnings = false,
serverPort = 38000,
keepServerRunning = false,
puppeteerOptions = {
headless: false,
devtools: true
}
}, testFiles) => {
const server = require('./server.cjs').server
await server.init(testFiles)
await server.listen(serverPort)
const browser = await puppeteer.launch(puppeteerOptions)
const page = (await browser.pages())[0]
page.on('console', function (message) {
const ignore = message.type() === 'warning' && !logWarnings
if (ignore) return
let text = (message.args().length > 0) ? message.args()[0].remoteObject().value : message.text()
const args = []
if (message.args() !== undefined && message.args().length > 1) {
for (let i = 1; i < message.args().length; i++) {
args.push(message.args()[i].remoteObject().value)
}
}
if (message.type() === 'error' && message.location()) {
text = `${message.location().url} : ${text}`
}
let consoleType = 'log'
switch (message.type()) {
case 'error':
consoleType = 'error'
break
case 'warning':
consoleType = 'warn'
break
default:
break
}
console[consoleType](text, ...args)
})
page.on('error', function (err) { page.emit(new Error(err)) })
page.on('close', async () => {
await close()
})
page.goto('http://localhost:38000/').then(async () => {
const watchDog = page.waitForFunction('_mocha.state === \'stopped\'', { timeout: 0 })
await watchDog.catch(async (reason) => {
console.error(reason)
})
if (puppeteerOptions.headless === 'new') {
await close()
}
}).catch(async (reason) => {
console.error(reason)
})
async function close () {
console.log('Closing browser tests...')
await browser.close().catch(() => {})
if (keepServerRunning !== true) {
await server.close().catch(() => {})
}
}
}
function processedTestFiles (testFilesStr) {
if (testFilesStr === undefined) {
testFilesStr = [pkgJson.directories.test + '/**/*.ts', pkgJson.directories.src + '/**/*.spec.ts']
} else {
// Let us first remove surrounding quotes in string (it gives issues in windows)
testFilesStr = testFilesStr.replace(/^['"]/, '').replace(/['"]$/, '')
}
const filenames = glob.sync(testFilesStr, { cwd: rootDir, matchBase: true })
if (filenames.length === 0) {
throw new Error('no test files found for ' + testFilesStr)
} else {
filenames.forEach(file => {
const isTsTestFile = minimatch(file, '{test/**/*.ts,src/**/*.spec.ts}', { matchBase: true })
if (!isTsTestFile) {
throw new Error(`test file '${file}' not found`)
}
})
}
return filenames
}
const opts = {
// puppeteer options
puppeteerOptions: {
headless: false,
devtools: true,
// slowMo: 100,
timeout: 0
},
logWarnings: false, // log warnings in Node console (usually not needed)
keepServerRunning: false, // keep server running until manually closed with ctrl-c. In combination with puppeteerOptions.headless (or just connecting any browser to the test page) allows debugging in browser
serverPort: 38000
}
const args = process.argv.slice(2)
if (args[0] === 'headless') {
opts.puppeteerOptions.headless = 'new'
args.shift()
}
browserTests(opts, processedTestFiles(args[0]))

View File

@ -1,211 +0,0 @@
'use strict'
const fs = require('fs')
const http = require('http')
const path = require('path')
const pkgJson = require('../../../package.json')
require('dotenv').config()
const rollup = require('rollup')
const resolve = require('@rollup/plugin-node-resolve').nodeResolve
const replace = require('@rollup/plugin-replace')
const typescriptPlugin = require('@rollup/plugin-typescript')
const commonjs = require('@rollup/plugin-commonjs')
const json = require('@rollup/plugin-json')
const multi = require('@rollup/plugin-multi-entry')
const runScript = require('../../run-script.cjs')
const rootDir = path.join(__dirname, '..', '..', '..')
const regex = /^(?:(?<scope>@.*?)\/)?(?<name>.*)/ // We are going to take only the package name part if there is a scope, e.g. @my-org/package-name
const { name } = pkgJson.name.match(regex).groups
const indexHtml = `<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>${name}</title>
<script src="/mocha.js"></script>
<script src="/chai.js"></script>
</head>
<body>
</body>
<div id="mocha"></div>
<script>
mocha.setup({
ui: 'bdd',
reporter: 'spec',
color: 'true',
timeout: 90000
})
</script>
<script type="module">
import './tests.js'
window._mocha = mocha.run()
</script>
</html>`
const tsBundleOptions = {
tsconfig: path.join(rootDir, 'tsconfig.json'),
outDir: undefined, // ignore outDir in tsconfig.json
sourceMap: false
// include: ['src/ts/**/*', 'build/typings/**/*.d.ts']
}
async function buildTests (testFiles) {
// create a bundle
const inputOptions = {
input: testFiles,
plugins: [
multi(),
replace({
IS_BROWSER: true,
_MODULE_TYPE: "'ESM'",
_NPM_PKG_VERSION: `'${process.env.npm_package_version}'` ?? "'0.0.1'",
preventAssignment: true
}),
typescriptPlugin(tsBundleOptions),
commonjs({ extensions: ['.js', '.cjs', '.jsx', '.cjsx'] }),
json(),
resolve({ browser: true }),
replace({
'#pkg': `/${name}.esm.js`,
delimiters: ['', ''],
preventAssignment: true
})
],
external: [`/${name}.esm.js`]
}
const bundle = await rollup.rollup(inputOptions)
const { output } = await bundle.generate({ format: 'es' })
await bundle.close()
let bundledCode = output[0].code
const replacements = _getEnvVarsReplacements(bundledCode)
for (const replacement in replacements) {
const regExp = new RegExp(replacement, 'g')
bundledCode = bundledCode.replace(regExp, replacements[replacement])
}
return bundledCode
}
class TestServer {
constructor () {
this.server = http.createServer()
}
async init (testFiles) {
/** Let us first check if the necessary files are built, and if not, build */
if (!fs.existsSync(pkgJson.exports['./esm-browser-bundle-nomin'])) {
await runScript(path.join(rootDir, 'node_modules', '.bin', 'rollup'), ['-c', 'build/rollup.config.js'])
}
const tests = await buildTests(testFiles)
this.server.on('request', function (req, res) {
if (req.url === `/${name}.esm.js`) {
fs.readFile(path.join(rootDir, pkgJson.exports['./esm-browser-bundle-nomin']), function (err, data) {
if (err) {
res.writeHead(404)
res.end(JSON.stringify(err))
return
}
res.writeHead(200, { 'Content-Type': 'text/javascript' })
res.end(data)
})
} else if (req.url === '/index.html' || req.url === '/') {
res.writeHead(200)
res.end(indexHtml)
} else if (req.url === '/tests.js') {
res.writeHead(200, { 'Content-Type': 'text/javascript' })
res.end(tests)
} else if (req.url === '/mocha.js') {
fs.readFile(path.join(rootDir, 'node_modules/mocha/mocha.js'), function (err, data) {
if (err) {
res.writeHead(404)
res.end(JSON.stringify(err))
return
}
res.writeHead(200, { 'Content-Type': 'text/javascript' })
res.end(data)
})
} else if (req.url === '/mocha.js.map') {
fs.readFile(path.join(rootDir, 'node_modules/mocha/mocha.js.map'), function (err, data) {
if (err) {
res.writeHead(404)
res.end(JSON.stringify(err))
return
}
res.writeHead(200, { 'Content-Type': 'text/javascript' })
res.end(data)
})
} else if (req.url === '/chai.js' || req.url === '/chai') {
fs.readFile(path.join(rootDir, 'node_modules/chai/chai.js'), function (err, data) {
if (err) {
res.writeHead(404)
res.end(JSON.stringify(err))
return
}
res.writeHead(200, { 'Content-Type': 'text/javascript' })
res.end(data)
})
} else if (req.url === '/favicon.ico') {
fs.readFile(path.join(__dirname, 'favicon.ico'), function (err, data) {
if (err) {
res.writeHead(404)
res.end(JSON.stringify(err))
return
}
res.writeHead(200, { 'Content-Type': 'application/ico' })
res.end(data)
})
} else {
res.writeHead(404)
res.end()
}
})
}
listen (port = 38080) {
return new Promise((resolve, reject) => {
this.server.listen(port, error => {
if (error) return reject(error)
console.log(`Testing server listenning at http://localhost:${port}`)
return resolve()
})
})
}
close () {
return new Promise((resolve, reject) => {
this.server.close(error => (error) ? reject(error) : resolve())
})
}
}
function _getEnvVarsReplacements (testsCode) {
const replacements = {}
const missingEnvVars = []
for (const match of testsCode.matchAll(/process\.env\.(\w+)/g)) {
const envVar = match[1]
if (process.env[envVar] !== undefined) {
replacements[match[0]] = '`' + process.env[envVar] + '`'
} else {
replacements[match[0]] = undefined
}
}
for (const match of testsCode.matchAll(/process\.env\[['"](\w+)['"]\]/g)) {
const envVar = match[1]
if (process.env[envVar] !== undefined) {
replacements[match[0]] = '`' + process.env[envVar] + '`'
} else {
replacements[match[0]] = undefined
}
}
if (missingEnvVars.length > 0) {
console.warn('The following environment variables are missing in your .env file and will be replaced with "undefined": ' + [...(new Set(missingEnvVars)).values()].join(', '))
}
return replacements
}
exports.server = new TestServer()

View File

@ -1,62 +0,0 @@
const EventEmitter = require('events')
const fs = require('fs')
const path = require('path')
module.exports = class Builder extends EventEmitter {
constructor (semaphoreFile, name = 'builder') {
super()
this.name = name
fs.mkdirSync(path.dirname(semaphoreFile), { recursive: true })
this.semaphoreFile = semaphoreFile
if (!fs.existsSync(this.semaphoreFile)) {
fs.writeFileSync(this.semaphoreFile, '', { encoding: 'utf8' })
}
this._ready = false
this.on('message', (...message) => {
if (message !== undefined) {
console.log(`\x1b[33m [${this.name}]`, ...message, '\x1b[0m')
}
})
this.on('error', (...error) => {
if (error !== undefined) {
console.error(`\x1b[31m❗ [${this.name}]`, ...error, '\x1b[0m')
}
})
this.on('ready', (updateSemaphore = true) => {
const now = Date.now()
if (updateSemaphore) {
fs.utimesSync(this.semaphoreFile, now, now)
}
this._ready = true
})
this.on('busy', () => {
this._ready = false
})
}
ready () {
return new Promise(resolve => {
if (this._ready === true) return resolve()
this.once('ready', () => {
resolve()
})
})
}
async start () {
}
async close () {}
clean () {
fs.rmSync(this.semaphoreFile, { force: true })
}
}

View File

@ -1,154 +0,0 @@
const EventEmitter = require('events')
const fs = require('fs')
const path = require('path')
const rollup = require('rollup')
const loadAndParseConfigFile = require('rollup/loadConfigFile').loadConfigFile
const Builder = require('./Builder.cjs')
const rootDir = path.join(__dirname, '../../../../')
const pkgJson = require(path.join(rootDir, 'package.json'))
const mochaTsRelativeDir = pkgJson.directories['mocha-ts']
const mochaTsDir = path.join(rootDir, mochaTsRelativeDir)
class RollupBuilder extends Builder {
constructor ({ name, configPath, tempDir }) {
super(path.join(tempDir, 'semaphore'), name)
this.tempDir = tempDir
this.configPath = configPath
this.firstBuild = true
}
async start ({ watch = false, commonjs = false }) {
await super.start()
this.watch = watch
this.commonjs = commonjs
this.watchedModule = commonjs ? pkgJson.exports['.'].node.require.default : pkgJson.exports['.'].node.import.default
const { options } = await loadAndParseConfigFile(this.configPath)
// Instead of compiling all the outputs let us just take the one we are using with mocha (either cjs or esm)
const rollupOptions = options.filter(bundle => {
const file = (bundle.output[0].dir !== undefined)
? path.join(bundle.output[0].dir, bundle.output[0].entryFileNames)
: bundle.output[0].file
return file === path.join(rootDir, this.watchedModule)
})[0]
if (rollupOptions.output.length > 1) {
rollupOptions.output = rollupOptions.output[0]
}
this.builder = new RollupBundler({ rollupOptions, watch: this.watch, watchedModule: this.watchedModule })
this.builder.on('event', event => {
let updateSemaphore = true
switch (event.code) {
case 'START':
this.emit('busy')
if (this.firstBuild === true) {
this.emit('message', 'building your module...')
} else {
this.emit('message', 'file changes detected. Rebuilding module files...')
}
break
case 'BUNDLE_END':
if (event.result) event.result.close()
break
case 'END':
if (event.result) event.result.close()
// fs.mkdirSync(path.join(this.tempDir, path.dirname(this.watchedModule)), { recursive: true })