diff --git a/.editorconfig b/.editorconfig index 3361fd10..0835e8ee 100644 --- a/.editorconfig +++ b/.editorconfig @@ -9,7 +9,7 @@ charset = utf-8 trim_trailing_whitespace = true insert_final_newline = true -[*.js] +[*.{js,ts,[cm]js,[cm]ts}] quote_type = single curly_bracket_next_line = true indent_brace_style = Allman diff --git a/.eslintrc b/.eslintrc index cfe35f51..75260ea5 100644 --- a/.eslintrc +++ b/.eslintrc @@ -1,20 +1,21 @@ { "root": true, - "extends": "airbnb-base", - "rules": { - "comma-dangle": 0, - "indent": 2, - "object-shorthand": 0, - "func-names": 0, - "max-len": [1, 120, 2], - "no-use-before-define": 1, - "no-param-reassign": 0, - "strict": 0, - "import/no-extraneous-dependencies": 1, - "prefer-spread": 0, - "prefer-rest-params": 0 - }, - "parser-options": { - "ecmaVersion": 6 + "parserOptions": { + "ecmaVersion": 2018, + "sourceType": "module" + }, + "extends": ["airbnb-base", "prettier"], + "plugins": ["prettier", "import"], + "rules": { + "comma-dangle": 0, + "indent": 2, + "func-names": 0, + "max-len": [1, 120, 2], + "no-cond-assign": ["error", "except-parens"], + "no-use-before-define": ["warn"], + "no-param-reassign": 0, + "no-plusplus": ["error", { "allowForLoopAfterthoughts": true }], + "strict": 1, + "import/no-extraneous-dependencies": 1 } } diff --git a/.gitattributes b/.gitattributes index 70d5e00f..307a0319 100644 --- a/.gitattributes +++ b/.gitattributes @@ -8,13 +8,21 @@ # for example, after the build script is run) .* text eol=lf *.css text eol=lf +*.scss text eol=lf *.html text eol=lf *.js text eol=lf +*.cjs text eol=lf +*.mjs text eol=lf +*.ts text eol=lf +*.cts text eol=lf +*.mts text eol=lf *.json text eol=lf *.md text eol=lf *.sh text eol=lf *.txt text eol=lf *.xml text eol=lf +*.yml text eol=lf +.husky/* text eol=lf # Exclude the `.htaccess` file from GitHub's language statistics # https://github.com/github/linguist#using-gitattributes diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 00000000..efb34a09 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,72 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "master" ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ "master" ] + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'javascript' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/node.js.yml b/.github/workflows/node.js.yml new file mode 100644 index 00000000..4f47b100 --- /dev/null +++ b/.github/workflows/node.js.yml @@ -0,0 +1,82 @@ +# This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions + +name: Node.js CI + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + workflow_dispatch: {} + +jobs: + build: + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: bash + + strategy: + matrix: + os: [ubuntu-latest, windows-latest] + node-version: [8.x, 10.x, 12.x, 14.x, 16.x, 18.x] + # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ + include: + - node-version: 8.x + npm-i: "eslint@6.x eslint-config-airbnb-base@14.x eslint-config-prettier@6.x eslint-plugin-prettier@3.x fs-extra@8.x nyc@14.x tap@14.x" + + - node-version: 10.x + npm-i: "eslint@7.x fs-extra@9.x nyc@14.x tap@14.x" + + - node-version: 12.x + npm-i: "fs-extra@10.x nyc@14.x tap@14.x" + + - node-version: 14.x + npm-i: "nyc@14.x tap@14.x" + + steps: + - uses: actions/checkout@v3 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + cache: 'npm' + + - name: Configure npm + run: npm config set loglevel error + + - name: Get npm version + id: npm-version + run: | + npm -v + npmMajorVer=$(npm -v | cut -d. -f1) + echo "major=$npmMajorVer" >> $GITHUB_OUTPUT + + - name: Disable prettier on older Node.js (8.x, 10.x, 12.x) + run: | + sed -i '/"prettier": "prettier/d' package.json + if: contains(fromJson('["8.x", "10.x", "12.x"]'), matrix.node-version) + + - name: Install downgraded modules ${{ matrix.npm-i }} + run: | + npm install --save-dev ${{ matrix.npm-i }} + if [ ${{ steps.npm-version.outputs.major }} -le 5 ]; then + npm install + fi + if: matrix.npm-i != '' + + - run: npm install + if: matrix.npm-i == '' && steps.npm-version.outputs.major <= 5 + + - run: npm ci + if: matrix.npm-i == '' && steps.npm-version.outputs.major > 5 + + - name: List dependencies + run: npm ls --depth=0 --dev && npm ls --depth=0 --prod + + - run: npm run build --if-present + - run: npm test + - run: npm run typings diff --git a/.github/workflows/npm-audit.yml b/.github/workflows/npm-audit.yml new file mode 100644 index 00000000..59fe7176 --- /dev/null +++ b/.github/workflows/npm-audit.yml @@ -0,0 +1,35 @@ +# This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions + +name: npm audit + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + workflow_dispatch: {} + schedule: + - cron: '0 0 * * 3' + +jobs: + build: + + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [latest] + # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ + + steps: + - uses: actions/checkout@v3 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + cache: 'npm' + + - run: npm audit + env: + NODE_ENV: production diff --git a/.github/workflows/npm-publish.yml b/.github/workflows/npm-publish.yml new file mode 100644 index 00000000..97f38df4 --- /dev/null +++ b/.github/workflows/npm-publish.yml @@ -0,0 +1,39 @@ +# This workflow will run tests using node and then publish a package to GitHub Packages when a milestone is closed +# For more information see: https://help.github.com/actions/language-and-framework-guides/publishing-nodejs-packages + +name: Node.js Package + +on: + milestone: + types: [closed] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 16 + - run: npm ci + - run: npm test + - run: npm run typings + + publish-npm: + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 16 + registry-url: https://registry.npmjs.org/ + - run: npm ci + - run: | + git config user.name github-actions + git config user.email github-actions@github.com + - run: npm version ${{ github.event.milestone.title }} + - run: git push && git push --tags + - run: npm publish + env: + NODE_AUTH_TOKEN: ${{secrets.npm_token}} diff --git a/.gitignore b/.gitignore index 580c0303..a8415082 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,10 @@ test/streams/test-* yarn.lock coverage/ .nyc_output/ +_site +Gemfile.lock +Dockerfile +docker-compose.yml + +#personal config +.env diff --git a/.husky/commit_msg b/.husky/commit_msg new file mode 100644 index 00000000..e8105222 --- /dev/null +++ b/.husky/commit_msg @@ -0,0 +1,4 @@ +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +npx --no -- commitlint --edit $1 diff --git a/.husky/pre-push b/.husky/pre-push new file mode 100644 index 00000000..d5a85978 --- /dev/null +++ b/.husky/pre-push @@ -0,0 +1,4 @@ +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +npm test && npm run typings diff --git a/.npmignore b/.npmignore index 27089378..3c480294 100644 --- a/.npmignore +++ b/.npmignore @@ -12,3 +12,8 @@ lib-cov coverage.html Makefile coverage +Gemfile +Gemfile.lock +docker-compose.yml +Dockerfile +.bob diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..b6f4cc40 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,2 @@ +**/.* +coverage diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 45f1ad0e..00000000 --- a/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: node_js -sudo: false -node_js: - - "7" - - "6" - - "5" - - "4" -after_success: - - npm run codecov diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..ba2281f0 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,477 @@ +# log4js-node Changelog + +## [6.9.1](https://github.com/log4js-node/log4js-node/milestone/94) + +- [fix(7922e82): regex for stacktrace](https://github.com/log4js-node/log4js-node/pull/1378) - thanks [@lamweili](https://github.com/lamweili) + - addresses [#1377](https://github.com/log4js-node/log4js-node/issues/1377) which has a regression since [6.8.0](https://github.com/log4js-node/log4js-node/milestone/92) from [#1363](https://github.com/log4js-node/log4js-node/pull/1363) at commit [7922e82](https://github.com/log4js-node/log4js-node/commit/7922e8257806811666c1d4cf6774b4426276ea16) + +## [6.9.0](https://github.com/log4js-node/log4js-node/milestone/93) + +- [feat: support for idempotent logging on browser](https://github.com/log4js-node/log4js-node/pull/1374) - thanks [@aellerton](https://github.com/aellerton) + - addresses [#968](https://github.com/log4js-node/log4js-node/issues/968), [#1270](https://github.com/log4js-node/log4js-node/issues/1270), [#1288](https://github.com/log4js-node/log4js-node/issues/1288), [#1372](https://github.com/log4js-node/log4js-node/issues/1372) +- [docs: added that `log4js.getLogger()` may call `log4js.configure()`](https://github.com/log4js-node/log4js-node/pull/1375) - thanks [@lamweili](https://github.com/lamweili) + +## [6.8.0](https://github.com/log4js-node/log4js-node/milestone/92) + +- [feat: added `log4js.isConfigured()` API](https://github.com/log4js-node/log4js-node/pull/1364) - thanks [@lamweili](https://github.com/lamweili) + - [docs: added `log4js.isConfigured()`](https://github.com/log4js-node/log4js-node/pull/1369) - thanks [@lamweili](https://github.com/lamweili) +- [feat(layout): support a specifier on %m](https://github.com/log4js-node/log4js-node/pull/1346) - thanks [@lamweili](https://github.com/lamweili) +- [fix: tilde expansion for windows](https://github.com/log4js-node/log4js-node/pull/1363) - thanks [@lamweili](https://github.com/lamweili) +- [docs: updated typescript usage](https://github.com/log4js-node/log4js-node/pull/1361) - thanks [@lamweili](https://github.com/lamweili) +- [test: improved test for fileAppender](https://github.com/log4js-node/log4js-node/pull/1365) - thanks [@lamweili](https://github.com/lamweili) +- [ci: generate coverage report in both text and html](https://github.com/log4js-node/log4js-node/pull/1368) - thanks [@lamweili](https://github.com/lamweili) +- [ci: replaced deprecated github set-output](https://github.com/log4js-node/log4js-node/pull/1351) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1367) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps): bump streamroller from 3.1.3 to 3.1.5 + - chore(deps): updated package-lock.json +- [chore(deps-dev): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1366) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump @commitlint/cli from 17.3.0 to 17.4.4 + - chore(deps-dev): bump @commitlint/config-conventional from 17.3.0 to 17.4.4 + - chore(deps-dev): bump eslint from 8.28.0 to 8.34.0 + - chore(deps-dev): bump eslint-config-prettier from 8.5.0 to 8.6.0 + - chore(deps-dev): bump eslint-import-resolver-node from 0.3.6 to 0.3.7 + - chore(deps-dev): bump eslint-plugin-import from 2.26.0 to 2.27.5 + - chore(deps-dev): bump fs-extra from 10.1.0 to 11.1.0 + - chore(deps-dev): bump husky from 8.0.2 to 8.0.3 + - chore(deps-dev): bump prettier from 2.8.0 to 2.8.4 + - chore(deps-dev): bump tap from 16.3.2 to 16.3.4 + - chore(deps-dev): bump typescript from 4.9.3 to 4.9.5 + - chore(deps-dev): updated package-lock.json +- [chore(deps-dev): bump json5 from 1.0.1 to 1.0.2](https://github.com/log4js-node/log4js-node/pull/1357) - thanks [@Dependabot](https://github.com/dependabot) + +## [6.7.1](https://github.com/log4js-node/log4js-node/milestone/91) + +- [type: updated Configuration.levels type to allow for custom log levels](https://github.com/log4js-node/log4js-node/pull/1348) - thanks [@lamweili](https://github.com/lamweili) +- [docs: fixed typo in `layouts.md`](https://github.com/log4js-node/log4js-node/pull/1338) - thanks [@dtslvr](https://github.com/dtslvr) +- [chore(deps-dev): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1349) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump @commitlint/cli from 17.1.2 to 17.3.0 + - chore(deps-dev): bump @commitlint/config-conventional from 17.1.0 to 17.3.0 + - chore(deps-dev): bump eslint from 8.24.0 to 8.28.0 + - chore(deps-dev): bump husky from 8.0.1 to 8.0.2 + - chore(deps-dev): bump prettier from 2.7.1 to 2.8.0 + - chore(deps-dev): bump tap from 16.3.0 to 16.3.2 + - chore(deps-dev): bump typescript from 4.8.4 to 4.9.3 + - chore(deps-dev): updated package-lock.json + +## [6.7.0](https://github.com/log4js-node/log4js-node/milestone/90) + +- [feat(log4js): if cb is passed to shutdown(), it must be a function or it will throw error immediately](https://github.com/log4js-node/log4js-node/pull/1334) - thanks [@lamweili](https://github.com/lamweili) +- [feat: patternLayout function name, class name and function alias](https://github.com/log4js-node/log4js-node/pull/1316) - thanks [@l0ner](https://github.com/l0ner) + - [refactor(#1316): code flow and readability (%C:class, %M:function, %A:alias, %F:full - in order)](https://github.com/log4js-node/log4js-node/pull/1317) - thanks [@lamweili](https://github.com/lamweili) +- [feat: add ability to use passed in Errors for callstacks and adjust how deeply you want to look for information](https://github.com/log4js-node/log4js-node/pull/1269) - thanks [@ZachHaber](https://github.com/ZachHaber) +- [fix(LoggingEvent): serde for NaN, Infinity, -Infinity, undefined](https://github.com/log4js-node/log4js-node/pull/1332) - thanks [@lamweili](https://github.com/lamweili) +- [fix: make shutdown return value consistent](https://github.com/log4js-node/log4js-node/pull/1082) - thanks [@richardhinkamp](https://github.com/richardhinkamp) + - [refactor(#1082): removed return value for `log4js.shutdown()`](https://github.com/log4js-node/log4js-node/pull/1319) - thanks [@lamweili](https://github.com/lamweili) +- [test: adjusted timeout interval for OS operations](https://github.com/log4js-node/log4js-node/pull/1318) - thanks [@lamweili](https://github.com/lamweili) +- [refactor(LoggingEvent): loop through location keys instead of hard-coding one-by-one](https://github.com/log4js-node/log4js-node/pull/1333) - thanks [@lamweili](https://github.com/lamweili) +- [docs(noLogFilter): fix heading (`Category Filter` --> `No-Log Filter`)](https://github.com/log4js-node/log4js-node/pull/1322) - thanks [@gkalpak](https://github.com/gkalpak) +- [docs(migration-guide): fix typo (`have been remove` --> `have been removed`)](https://github.com/log4js-node/log4js-node/pull/1321) - thanks [@gkalpak](https://github.com/gkalpak) +- [docs: added quotes for `just-errors` JSON property key ](https://github.com/log4js-node/log4js-node/pull/1312) - thanks [@lamweili](https://github.com/lamweili) +- [ci: skip prettier on older Node.js (8.x, 10.x, 12.x)](https://github.com/log4js-node/log4js-node/pull/1328) - thanks [@lamweili](https://github.com/lamweili) +- [ci: manually downgrade dev dependencies for older versions](https://github.com/log4js-node/log4js-node/pull/1326) - thanks [@lamweili](https://github.com/lamweili) +- [ci: removed scheduled job from codeql and separated npm audit](https://github.com/log4js-node/log4js-node/pull/1325) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): bump typescript from 4.8.3 to 4.8.4](https://github.com/log4js-node/log4js-node/pull/1330) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps-dev): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1329) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump nyc from 14.1.1 to 15.1.0 + - chore(deps-dev): bump tap from 14.11.0 to 16.3.0 + - chore(deps-dev): bump eslint-plugin-prettier from 3.4.1 to 4.2.1 + - chore(deps-dev): bump prettier from 1.19.1 to 2.7.1 + - chore(deps-dev): updated package-lock.json +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1327) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump @commitlint/cli from 17.0.3 to 17.1.2 + - chore(deps-dev): bump @commitlint/config-conventional from 17.0.3 to 17.1.0 + - chore(deps-dev): bump eslint from 6.8.0 to 8.24.0 + - chore(deps-dev): bump eslint-config-airbnb-base from 14.2.1 to 15.0.0 + - chore(deps-dev): bump eslint-config-prettier from 6.15.0 to 8.5.0 + - chore(deps-dev): bump fs-extra from 8.1.0 to 10.1.0 + - chore(deps-dev): bump typescript from 4.7.4 to 4.8.3 + - chore(deps): bump date-format from 4.0.13 to 4.0.14 + - chore(deps): bump flatted from 3.2.6 to 3.2.7 + - chore(deps): bump streamroller from 3.1.2 to 3.1.3 + - chore(deps): updated package-lock.json + +## [6.6.1](https://github.com/log4js-node/log4js-node/milestone/89) + +- [fix: connectlogger nolog function](https://github.com/log4js-node/log4js-node/pull/1285) - thanks [@eyoboue](https://github.com/eyoboue) +- [type: corrected AppenderModule interface and Recording interface](https://github.com/log4js-node/log4js-node/pull/1304) - thanks [@lamweili](https://github.com/lamweili) +- test: extended timeout interval for OS operations - thanks [@lamweili](https://github.com/lamweili) + - test: [#1306](https://github.com/log4js-node/log4js-node/pull/1306) + - test: [#1297](https://github.com/log4js-node/log4js-node/pull/1297) +- [test: support older Node.js versions](https://github.com/log4js-node/log4js-node/pull/1295) - thanks [@lamweili](https://github.com/lamweili) +- [ci: added tests for Node.js 8.x](https://github.com/log4js-node/log4js-node/pull/1303) - thanks [@lamweili](https://github.com/lamweili) +- [ci: added tests for Node.js 10.x, 18.x](https://github.com/log4js-node/log4js-node/pull/1301) - thanks [@lamweili](https://github.com/lamweili) +- [ci: updated codeql from v1 to v2](https://github.com/log4js-node/log4js-node/pull/1302) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps-dev): bump lodash from 4.17.19 to 4.17.21](https://github.com/log4js-node/log4js-node/pull/1309) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps-dev): bump path-parse from 1.0.6 to 1.0.7](https://github.com/log4js-node/log4js-node/pull/1308) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps-dev): downgraded nyc from 15.1.0 to 14.1.1](https://github.com/log4js-node/log4js-node/pull/1305) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1296) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps): bump date-format from 4.0.11 to 4.0.13 + - chore(deps): bump flatted from 3.2.5 to 3.2.6 + - chore(deps): bump streamroller from 3.1.1 to 3.1.2 + - chore(deps-dev): bump @commitlint/cli from 17.0.2 to 17.0.3 + - chore(deps-dev): bump @commitlint/config-conventional from 17.0.2 to 17.0.3 + - [chore(deps-dev): bump eslint from 8.16.0 to 8.20.0](https://github.com/log4js-node/log4js-node/pull/1300) + - chore(deps-dev): bump eslint-plugin-prettier from 4.0.0 to 4.2.1 + - chore(deps-dev): bump prettier from 2.6.0 to 2.7.1 + - chore(deps-dev): bump tap from 16.2.0 to 16.3.0 + - chore(deps-dev): bump typescript from 4.7.2 to 4.7.4 + - chore(deps): updated package-lock.json + +## [6.6.0](https://github.com/log4js-node/log4js-node/milestone/87) + +- [feat: adding function(req, res) support to connectLogger nolog](https://github.com/log4js-node/log4js-node/pull/1279) - thanks [@eyoboue](https://github.com/eyoboue) +- [fix: ability to load CJS appenders (through .cjs extension) for ESM packages](https://github.com/log4js-node/log4js-node/pull/1280) - thanks [@lamweili](https://github.com/lamweili) +- [type: consistent typing for Logger](https://github.com/log4js-node/log4js-node/pull/1276) - thanks [@taozi0818](https://github.com/taozi0818) +- [type: Make Appender Type extensible from other modules and the user](https://github.com/log4js-node/log4js-node/pull/1267) - thanks [@ZachHaber](https://github.com/ZachHaber) +- [refactor: clearer logic for invalid level and LOG synonym](https://github.com/log4js-node/log4js-node/pull/1264) - thanks [@lamweili](https://github.com/lamweili) +- [style: ran prettier and requires prettier for ci](https://github.com/log4js-node/log4js-node/pull/1271) - thanks [@ZachHaber](https://github.com/ZachHaber) +- [docs: renamed peteriman to lamweili in changelog](https://github.com/log4js-node/log4js-node/pull/1272) - thanks [@lamweili](https://github.com/lamweili) +- [ci: replaced validate-commit-msg, fixed husky config, removed codecov](https://github.com/log4js-node/log4js-node/pull/1274) - thanks [@ZachHaber](https://github.com/ZachHaber) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1266) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump typescript from 4.6.4 to 4.7.2 + - chore(deps): bump date-format from 4.0.10 to 4.0.11 + - chore(deps): updated package-lock.json + +## [6.5.2](https://github.com/log4js-node/log4js-node/milestone/86) + +- [type: add LogEvent.serialise](https://github.com/log4js-node/log4js-node/pull/1260) - thanks [@marrowleaves](https://github.com/marrowleaves) + +## [6.5.1](https://github.com/log4js-node/log4js-node/milestone/85) + +- [fix: fs.appendFileSync should use flag instead of flags](https://github.com/log4js-node/log4js-node/pull/1257) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1258) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps): bump streamroller from 3.1.0 to 3.1.1 + - chore(deps): updated package-lock.json + +## [6.5.0](https://github.com/log4js-node/log4js-node/milestone/84) + +- [feat: logger.log() to be synonym of logger.info()](https://github.com/log4js-node/log4js-node/pull/1254) - thanks [@lamweili](https://github.com/lamweili) +- [feat: tilde expansion for filename](https://github.com/log4js-node/log4js-node/pull/1252) - thanks [@lamweili](https://github.com/lamweili) +- [fix: better file validation](https://github.com/log4js-node/log4js-node/pull/1251) - thanks [@lamweili](https://github.com/lamweili) +- [fix: fallback for logger.log outputs nothing](https://github.com/log4js-node/log4js-node/pull/1247) - thanks [@lamweili](https://github.com/lamweili) +- [docs: updated fileAppender maxLogSize documentation](https://github.com/log4js-node/log4js-node/pull/1248) - thanks [@lamweili](https://github.com/lamweili) +- [ci: enforced 100% test coverage tests](https://github.com/log4js-node/log4js-node/pull/1253) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1256) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump eslint from 8.15.0 to 8.16.0 + - chore(deps): bump streamroller from 3.0.9 to 3.1.0 + - chore(deps): updated package-lock.json + +## [6.4.7](https://github.com/log4js-node/log4js-node/milestone/83) + +- [fix: dateFileAppender unable to use units in maxLogSize](https://github.com/log4js-node/log4js-node/pull/1243) - thanks [@lamweili](https://github.com/lamweili) +- [type: added fileNameSep for FileAppender and DateFileAppender](https://github.com/log4js-node/log4js-node/pull/1241) - thanks [@lamweili](https://github.com/lamweili) +- [docs: updated usage of units for maxLogSize](https://github.com/log4js-node/log4js-node/pull/1242) - thanks [@lamweili](https://github.com/lamweili) +- [docs: updated comments in typescript def](https://github.com/log4js-node/log4js-node/pull/1240) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1244) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump eslint from 8.14.0 to 8.15.0 + - chore(deps-dev): bump husky from 7.0.4 to 8.0.1 + - chore(deps-dev): bump tap from 16.1.0 to 16.2.0 + - chore(deps-dev): bump typescript from 4.6.3 to 4.6.4 + - chore(deps): bump date-format from 4.0.9 to 4.0.10 + - chore(deps): bump streamroller from 3.0.8 to 3.0.9 + - chore(deps): updated package-lock.json +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1238) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump tap from 16.0.1 to 16.1.0 + - chore(deps-dev): updated package-lock.json + +## [6.4.6](https://github.com/log4js-node/log4js-node/milestone/82) + +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1236) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump eslint from 8.13.0 to 8.14.0 + - chore(deps): bump date-format from 4.0.7 to 4.0.9 + - chore(deps): bump streamroller from 3.0.7 to 3.0.8 + - fix: [#1216](https://github.com/log4js-node/log4js-node/issues/1216) where promise rejection is not handled ([streamroller@3.0.8 changelog](https://github.com/log4js-node/streamroller/blob/master/CHANGELOG.md)) + - chore(deps): updated package-lock.json +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1234) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps): bump fs-extra from 10.0.1 to 10.1.0 + - chore(deps): updated package-lock.json + +## [6.4.5](https://github.com/log4js-node/log4js-node/milestone/81) + +- [fix: deserialise for enableCallStack features: filename, lineNumber, columnNumber, callStack](https://github.com/log4js-node/log4js-node/pull/1230) - thanks [@lamweili](https://github.com/lamweili) +- [fix: fileDepth for ESM](https://github.com/log4js-node/log4js-node/pull/1224) - thanks [@lamweili](https://github.com/lamweili) +- [refactor: replace deprecated String.prototype.substr()](https://github.com/log4js-node/log4js-node/pull/1223) - thanks [@CommanderRoot](https://github.com/CommanderRoot) +- [type: LogEvent types](https://github.com/log4js-node/log4js-node/pull/1231) - thanks [@lamweili](https://github.com/lamweili) +- [docs: updated typescript usage](https://github.com/log4js-node/log4js-node/pull/1229) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1232) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps): bump date-format from 4.0.6 to 4.0.7 + - chore(deps): bump streamroller from 3.0.6 to 3.0.7 + - fix: [#1225](https://github.com/log4js-node/log4js-node/issues/1225) where fs-extra throws error when fs.realpath.native is undefined ([streamroller@3.0.7 changelog](https://github.com/log4js-node/streamroller/blob/master/CHANGELOG.md)) + - chore(deps): updated package-lock.json +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1228) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump eslint from 8.11.0 to 8.13.0 + - chore(deps-dev): bump eslint-plugin-import from 2.25.4 to 2.26.0 + - chore(deps-dev): bump tap from 16.0.0 to 16.0.1 + - chore(deps-dev): bump typescript from 4.6.2 to 4.6.3 + - chore(deps-dev): updated package-lock.json +- [chore(deps-dev): bump minimist from 1.2.5 to 1.2.6](https://github.com/log4js-node/log4js-node/pull/1227) - thanks [@Dependabot](https://github.com/dependabot) + +## [6.4.4](https://github.com/log4js-node/log4js-node/milestone/80) + +- [fix: set logger.level on runtime will no longer wrongly reset useCallStack](https://github.com/log4js-node/log4js-node/pull/1217) - thanks [@lamweili](https://github.com/lamweili) +- [docs: updated docs for broken links and inaccessible pages](https://github.com/log4js-node/log4js-node/pull/1219) - thanks [@lamweili](https://github.com/lamweili) +- [docs: broken link to gelf appender](https://github.com/log4js-node/log4js-node/pull/1218) - thanks [@mattalexx](https://github.com/mattalexx) +- [docs: updated docs for appenders module loading](https://github.com/log4js-node/log4js-node/pull/985) - thanks [@leonimurilo](https://github.com/leonimurilo) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1221) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps): bump streamroller from 3.0.5 to 3.0.6 + - chore(deps): bump debug from 4.3.3 to 4.3.4 + - chore(deps): bump date-format from 4.0.5 to 4.0.6 + - chore(deps-dev): bump prettier from 2.5.1 to 2.6.0 + - chore(deps): updated package-lock.json + +## [6.4.3](https://github.com/log4js-node/log4js-node/milestone/79) + +- [fix: added filename validation](https://github.com/log4js-node/log4js-node/pull/1201) - thanks [@lamweili](https://github.com/lamweili) +- [refactor: do not initialise default appenders as it will be done again by configure()](https://github.com/log4js-node/log4js-node/pull/1210) - thanks [@lamweili](https://github.com/lamweili) +- [refactor: defensive coding for cluster=null if require('cluster') fails in try-catch ](https://github.com/log4js-node/log4js-node/pull/1199) - thanks [@lamweili](https://github.com/lamweili) +- [refactor: removed redundant logic in tcp-serverAppender](https://github.com/log4js-node/log4js-node/pull/1198) - thanks [@lamweili](https://github.com/lamweili) +- [refactor: removed redundant logic in multiprocessAppender](https://github.com/log4js-node/log4js-node/pull/1197) - thanks [@lamweili](https://github.com/lamweili) +- test: 100% test coverage - thanks [@lamweili](https://github.com/lamweili) + - test: part 1 of 3: [#1200](https://github.com/log4js-node/log4js-node/pull/1200) + - test: part 2 of 3: [#1204](https://github.com/log4js-node/log4js-node/pull/1204) + - test: part 3 of 3: [#1205](https://github.com/log4js-node/log4js-node/pull/1205) + - [test: improved test cases](https://github.com/log4js-node/log4js-node/pull/1211) +- [docs: updated README.md with badges](https://github.com/log4js-node/log4js-node/pull/1209) - thanks [@lamweili](https://github.com/lamweili) +- [docs: added docs for istanbul ignore](https://github.com/log4js-node/log4js-node/pull/1208) - thanks [@lamweili](https://github.com/lamweili) +- [docs: updated logger api docs](https://github.com/log4js-node/log4js-node/pull/1203) - thanks [@lamweili](https://github.com/lamweili) +- [docs: updated file and fileSync appender docs](https://github.com/log4js-node/log4js-node/pull/1202) - thanks [@lamweili](https://github.com/lamweili) +- [chore(lint): improve eslint rules](https://github.com/log4js-node/log4js-node/pull/1206) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1207) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump eslint from 8.10.0 to 8.11.0 + - chore(deps-dev): bump eslint-config-airbnb-base from 13.2.0 to 15.0.0 + - chore(deps-dev): bump eslint-config-prettier from 8.4.0 to 8.5.0 + - chore(deps-dev): bump tap from 15.1.6 to 16.0.0 + - chore(deps): bump date-format from 4.0.4 to 4.0.5 + - chore(deps): bump streamroller from 3.0.4 to 3.0.5 + - chore(deps): updated package-lock.json + +## [6.4.2](https://github.com/log4js-node/log4js-node/milestone/78) + +- [fix: fileSync appender to create directory recursively](https://github.com/log4js-node/log4js-node/pull/1191) - thanks [@lamweili](https://github.com/lamweili) +- [fix: serialise() for NaN, Infinity, -Infinity and undefined](https://github.com/log4js-node/log4js-node/pull/1188) - thanks [@lamweili](https://github.com/lamweili) +- [fix: connectLogger not logging on close](https://github.com/log4js-node/log4js-node/pull/1179) - thanks [@lamweili](https://github.com/lamweili) +- [refactor: defensive coding](https://github.com/log4js-node/log4js-node/pull/1183) - thanks [@lamweili](https://github.com/lamweili) +- [type: fixed Logger constructor](https://github.com/log4js-node/log4js-node/pull/1177) - thanks [@lamweili](https://github.com/lamweili) +- [test: improve test coverage](https://github.com/log4js-node/log4js-node/pull/1184) - thanks [@lamweili](https://github.com/lamweili) +- [test: refactor and replaced tap deprecation in preparation for tap v15](https://github.com/log4js-node/log4js-node/pull/1172) - thanks [@lamweili](https://github.com/lamweili) +- [test: added e2e test for multiprocess Appender](https://github.com/log4js-node/log4js-node/pull/1170) - thanks [@nicojs](https://github.com/nicojs) +- [docs: updated file appender docs](https://github.com/log4js-node/log4js-node/pull/1182) - thanks [@lamweili](https://github.com/lamweili) +- [docs: updated dateFile appender docs](https://github.com/log4js-node/log4js-node/pull/1181) - thanks [@lamweili](https://github.com/lamweili) +- [docs: corrected typo in sample code for multiFile appender](https://github.com/log4js-node/log4js-node/pull/1180) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated deps-dev](https://github.com/log4js-node/log4js-node/pull/1194) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps): bump date-format from 4.0.3 to 4.0.4 + - chore(deps): bump streamroller from 3.0.2 to 3.0.4 + - fix: [#1189](https://github.com/log4js-node/log4js-node/issues/1189) for an compatibility issue with directory creation for NodeJS < 10.12.0 ([streamroller@3.0.3 changelog](https://github.com/log4js-node/streamroller/blob/master/CHANGELOG.md)) + - chore(deps-dev): bump eslint from 8.8.0 to 8.10.0 + - chore(deps-dev): bump eslint-config-prettier from 8.3.0 to 8.4.0 + - chore(deps-dev): bump fs-extra from 10.0.0 to 10.0.1 + - chore(deps-dev): bump typescript from 4.5.5 to 4.6.2 +- [chore(deps): updated deps-dev](https://github.com/log4js-node/log4js-node/pull/1185) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps): bump flatted from 3.2.4 to 3.2.5 + - chore(deps-dev): bump eslint from 8.7.0 to 8.8.0 +- [chore(deps): updated package-lock.json](https://github.com/log4js-node/log4js-node/pull/1174) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps-dev): bump tap from 14.10.7 to 15.1.6](https://github.com/log4js-node/log4js-node/pull/1173) - thanks [@lamweili](https://github.com/lamweili) + +## [6.4.1](https://github.com/log4js-node/log4js-node/milestone/77) + +- [fix: startup multiprocess even when no direct appenders](https://github.com/log4js-node/log4js-node/pull/1162) - thanks [@nicojs](https://github.com/nicojs) + - [refactor: fixed eslint warnings](https://github.com/log4js-node/log4js-node/pull/1165) - thanks [@lamweili](https://github.com/lamweili) +- [refactor: additional alias for date patterns](https://github.com/log4js-node/log4js-node/pull/1163) - thanks [@lamweili](https://github.com/lamweili) +- [refactor: added emitWarning for deprecation](https://github.com/log4js-node/log4js-node/pull/1164) - thanks [@lamweili](https://github.com/lamweili) +- [type: Fixed wrong types from 6.4.0 regression](https://github.com/log4js-node/log4js-node/pull/1158) - thanks [@glasser](https://github.com/glasser) +- [docs: changed author to contributors in package.json](https://github.com/log4js-node/log4js-node/pull/1153) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): bump node-fetch from 2.6.6 to 2.6.7](https://github.com/log4js-node/log4js-node/pull/1167) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps-dev): bump typescript from 4.5.4 to 4.5.5](https://github.com/log4js-node/log4js-node/pull/1166) - thanks [@lamweili](https://github.com/lamweili) + +## [6.4.0](https://github.com/log4js-node/log4js-node/milestone/76) - BREAKING CHANGE 💥 + +New default file permissions may cause external applications unable to read logs. +A [manual code/configuration change](https://github.com/log4js-node/log4js-node/pull/1141#issuecomment-1076224470) is required. + +- [feat: added warnings when log() is used with invalid levels before fallbacking to INFO](https://github.com/log4js-node/log4js-node/pull/1062) - thanks [@abernh](https://github.com/abernh) +- [feat: exposed Recording](https://github.com/log4js-node/log4js-node/pull/1103) - thanks [@polo-language](https://github.com/polo-language) +- [fix: default file permission to be 0o600 instead of 0o644](https://github.com/log4js-node/log4js-node/pull/1141) - thanks [ranjit-git](https://www.huntr.dev/users/ranjit-git) and [@lamweili](https://github.com/lamweili) + - [docs: updated fileSync.md and misc comments](https://github.com/log4js-node/log4js-node/pull/1148) - thanks [@lamweili](https://github.com/lamweili) +- [fix: file descriptor leak if repeated configure()](https://github.com/log4js-node/log4js-node/pull/1113) - thanks [@lamweili](https://github.com/lamweili) +- [fix: MaxListenersExceededWarning from NodeJS](https://github.com/log4js-node/log4js-node/pull/1110) - thanks [@lamweili](https://github.com/lamweili) + - [test: added assertion for increase of SIGHUP listeners on log4js.configure()](https://github.com/log4js-node/log4js-node/pull/1142) - thanks [@lamweili](https://github.com/lamweili) +- [fix: missing TCP appender with Webpack and Typescript](https://github.com/log4js-node/log4js-node/pull/1028) - thanks [@techmunk](https://github.com/techmunk) +- [fix: dateFile appender exiting NodeJS on error](https://github.com/log4js-node/log4js-node/pull/1097) - thanks [@4eb0da](https://github.com/4eb0da) + - [refactor: using writer.writable instead of alive for checking](https://github.com/log4js-node/log4js-node/pull/1144) - thanks [@lamweili](https://github.com/lamweili) +- [fix: TCP appender exiting NodeJS on error](https://github.com/log4js-node/log4js-node/pull/1089) - thanks [@jhonatanTeixeira](https://github.com/jhonatanTeixeira) +- [fix: multiprocess appender exiting NodeJS on error](https://github.com/log4js-node/log4js-node/pull/529) - thanks [@harlentan](https://github.com/harlentan) +- [test: update fakeFS.read as graceful-fs uses it](https://github.com/log4js-node/log4js-node/pull/1127) - thanks [@lamweili](https://github.com/lamweili) +- [test: update fakeFS.realpath as fs-extra uses it](https://github.com/log4js-node/log4js-node/pull/1128) - thanks [@lamweili](https://github.com/lamweili) +- test: added tap.tearDown() to clean up test files + - test: [#1143](https://github.com/log4js-node/log4js-node/pull/1143) - thanks [@lamweili](https://github.com/lamweili) + - test: [#1022](https://github.com/log4js-node/log4js-node/pull/1022) - thanks [@abetomo](https://github.com/abetomo) +- [type: improved @types for AppenderModule](https://github.com/log4js-node/log4js-node/pull/1079) - thanks [@nicobao](https://github.com/nicobao) +- [type: Updated fileSync appender types](https://github.com/log4js-node/log4js-node/pull/1116) - thanks [@lamweili](https://github.com/lamweili) +- [type: Removed erroneous type in file appender](https://github.com/log4js-node/log4js-node/pull/1031) - thanks [@vdmtrv](https://github.com/vdmtrv) +- [type: Updated Logger.log type](https://github.com/log4js-node/log4js-node/pull/1115) - thanks [@ZLundqvist](https://github.com/ZLundqvist) +- [type: Updated Logger.\_log type](https://github.com/log4js-node/log4js-node/pull/1117) - thanks [@lamweili](https://github.com/lamweili) +- [type: Updated Logger.level type](https://github.com/log4js-node/log4js-node/pull/1118) - thanks [@lamweili](https://github.com/lamweili) +- [type: Updated Levels.getLevel type](https://github.com/log4js-node/log4js-node/pull/1072) - thanks [@saulzhong](https://github.com/saulzhong) +- [chore(deps): bump streamroller from 3.0.1 to 3.0.2](https://github.com/log4js-node/log4js-node/pull/1147) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): bump date-format from 4.0.2 to 4.0.3](https://github.com/log4js-node/log4js-node/pull/1146) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps-dev): bump eslint from from 8.6.0 to 8.7.0](https://github.com/log4js-node/log4js-node/pull/1145) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps-dev): bump nyc from 14.1.1 to 15.1.0](https://github.com/log4js-node/log4js-node/pull/1140) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps-dev): bump eslint from 5.16.0 to 8.6.0](https://github.com/log4js-node/log4js-node/pull/1138) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): bump flatted from 2.0.2 to 3.2.4](https://github.com/log4js-node/log4js-node/pull/1137) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps-dev): bump fs-extra from 8.1.0 to 10.0.0](https://github.com/log4js-node/log4js-node/pull/1136) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): bump streamroller from 2.2.4 to 3.0.1](https://github.com/log4js-node/log4js-node/pull/1135) - thanks [@lamweili](https://github.com/lamweili) + - [fix: compressed file ignores dateFile appender "mode"](https://github.com/log4js-node/streamroller/pull/65) - thanks [@rnd-debug](https://github.com/rnd-debug) + - fix: [#1039](https://github.com/log4js-node/log4js-node/issues/1039) where there is an additional separator in filename ([streamroller@3.0.0 changelog](https://github.com/log4js-node/streamroller/blob/master/CHANGELOG.md)) + - fix: [#1035](https://github.com/log4js-node/log4js-node/issues/1035), [#1080](https://github.com/log4js-node/log4js-node/issues/1080) for daysToKeep naming confusion ([streamroller@3.0.0 changelog](https://github.com/log4js-node/streamroller/blob/master/CHANGELOG.md)) + - [refactor: migrated from daysToKeep to numBackups due to streamroller@^3.0.0](https://github.com/log4js-node/log4js-node/pull/1149) - thanks [@lamweili](https://github.com/lamweili) + - [feat: allows for zero backups](https://github.com/log4js-node/log4js-node/pull/1151) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): bump date-format from 3.0.0 to 4.0.2](https://github.com/log4js-node/log4js-node/pull/1134) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1130) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump eslint-config-prettier from 6.15.0 to 8.3.0 + - chore(deps-dev): bump eslint-plugin-prettier from 3.4.1 to 4.0.0 + - chore(deps-dev): bump husky from 3.1.0 to 7.0.4 + - chore(deps-dev): bump prettier from 1.19.0 to 2.5.1 + - chore(deps-dev): bump typescript from 3.9.10 to 4.5.4 +- [chore(deps-dev): bump eslint-config-prettier from 6.15.0 to 8.3.0](https://github.com/log4js-node/log4js-node/pull/1129) - thanks [@lamweili](https://github.com/lamweili) +- [chore(deps): updated dependencies](https://github.com/log4js-node/log4js-node/pull/1121) - thanks [@lamweili](https://github.com/lamweili) + - chore(deps-dev): bump codecov from 3.6.1 to 3.8.3 + - chore(deps-dev): bump eslint-config-prettier from 6.5.0 to 6.15.0 + - chore(deps-dev): bump eslint-import-resolver-node from 0.3.2 to 0.3.6 + - chore(deps-dev): bump eslint-plugin-import" from 2.18.2 to 2.25.4 + - chore(deps-dev): bump eslint-plugin-prettier from 3.1.1 to 3.4.1 + - chore(deps-dev): bump husky from 3.0.9 to 3.1.0 + - chore(deps-dev): bump prettier from 1.18.2 to 1.19.1 + - chore(deps-dev): bump typescript from 3.7.2 to 3.9.10 +- [chore(deps): bump path-parse from 1.0.6 to 1.0.7](https://github.com/log4js-node/log4js-node/pull/1120) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps): bump glob-parent from 5.1.1 to 5.1.2](https://github.com/log4js-node/log4js-node/pull/1084) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps): bump hosted-git-info from 2.7.1 to 2.8.9](https://github.com/log4js-node/log4js-node/pull/1076) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps): bump lodash from 4.17.14 to 4.17.21](https://github.com/log4js-node/log4js-node/pull/1075) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps): bump y18n from 4.0.0 to 4.0.1](https://github.com/log4js-node/log4js-node/pull/1070) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps): bump node-fetch from 2.6.0 to 2.6.1](https://github.com/log4js-node/log4js-node/pull/1047) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps): bump yargs-parser from 13.1.1 to 13.1.2](https://github.com/log4js-node/log4js-node/pull/1045) - thanks [@Dependabot](https://github.com/dependabot) +- [chore(deps-dev): bump codecov from 3.6.5 to 3.7.1](https://github.com/log4js-node/log4js-node/pull/1033) - thanks [@Dependabot](https://github.com/dependabot) + +## [6.3.0](https://github.com/log4js-node/log4js-node/milestone/75) + +- [Add option to file appender to remove ANSI colours](https://github.com/log4js-node/log4js-node/pull/1001) - thanks [@BlueCocoa](https://github.com/BlueCocoa) +- [Do not create appender if no categories use it](https://github.com/log4js-node/log4js-node/pull/1002) - thanks [@rnd-debug](https://github.com/rnd-debug) +- [Docs: better categories inheritance description](https://github.com/log4js-node/log4js-node/pull/1003) - thanks [@rnd-debug](https://github.com/rnd-debug) +- [Better jsdoc docs](https://github.com/log4js-node/log4js-node/pull/1004) - thanks [@wataash](https://github.com/wataash) +- [Typescript: access category field in Logger](https://github.com/log4js-node/log4js-node/pull/1006) - thanks [@rtvd](https://github.com/rtvd) +- [Docs: influxdb appender](https://github.com/log4js-node/log4js-node/pull/1014) - thanks [@rnd-debug](https://github.com/rnd-debug) +- [Support for fileSync appender in webpack](https://github.com/log4js-node/log4js-node/pull/1015) - thanks [@lauren-li](https://github.com/lauren-li) +- [Docs: UDP appender](https://github.com/log4js-node/log4js-node/pull/1018) - thanks [@iassasin](https://github.com/iassasin) +- [Style: spaces and tabs](https://github.com/log4js-node/log4js-node/pull/1016) - thanks [@abetomo](https://github.com/abetomo) + +## [6.2.1](https://github.com/log4js-node/log4js-node/milestone/74) + +- [Update streamroller to 2.2.4 to fix incorrect filename matching during log rotation](https://github.com/log4js-node/log4js-node/pull/996) + +## [6.2.0](https://github.com/log4js-node/log4js-node/milestone/73) + +- [Add custom message end token to TCP appender](https://github.com/log4js-node/log4js-node/pull/994) - thanks [@rnd-debug](https://github.com/rnd-debug) +- [Update acorn (dev dep of a dep)](https://github.com/log4js-node/log4js-node/pull/992) - thanks Github Robots. + +## [6.1.2](https://github.com/log4js-node/log4js-node/milestone/72) + +- [Handle out-of-order appender loading](https://github.com/log4js-node/log4js-node/pull/986) - thanks [@mvastola](https://github.com/mvastola) + +## [6.1.1](https://github.com/log4js-node/log4js-node/milestone/71) + +- [Add guards for undefined shutdown callback](https://github.com/log4js-node/log4js-node/pull/972) - thanks [@aaron-edwards](https://github.com/aaron-edwards) +- [Ignore .bob files](https://github.com/log4js-node/log4js-node/pull/975) - thanks [@cesine](https://github.com/cesine) +- [Add mark method to type definitions](https://github.com/log4js-node/log4js-node/pull/984) - thanks [@techmunk](https://github.com/techmunk) + +## [6.1.0](https://github.com/log4js-node/log4js-node/milestone/70) + +- [Add pause event to dateFile appender](https://github.com/log4js-node/log4js-node/pull/965) - thanks [@shayantabatabaee](https://github.com/shayantabatabaee) +- [Add pause event to file appender](https://github.com/log4js-node/log4js-node/pull/938) - thanks [@shayantabatabaee](https://github.com/shayantabatabaee) +- [Add pause/resume event to docs](https://github.com/log4js-node/log4js-node/pull/966) + +## [6.0.0](https://github.com/log4js-node/log4js-node/milestone/69) + +- [Update streamroller to fix unhandled promise rejection](https://github.com/log4js-node/log4js-node/pull/962) +- [Updated date-format library](https://github.com/log4js-node/log4js-node/pull/960) + +## [5.3.0](https://github.com/log4js-node/log4js-node/milestone/68) + +- [Padding and truncation changes](https://github.com/log4js-node/log4js-node/pull/956) + +## [5.2.2](https://github.com/log4js-node/log4js-node/milestone/67) + +- [Update streamroller to fix overwriting old files when using date rolling](https://github.com/log4js-node/log4js-node/pull/951) + +## [5.2.1](https://github.com/log4js-node/log4js-node/milestone/66) + +- [Update streamroller to fix numToKeep not working with dateFile pattern that is all digits](https://github.com/log4js-node/log4js-node/pull/949) + +## [5.2.0](https://github.com/log4js-node/log4js-node/milestone/65) + +- [Update streamroller to 2.2.0 (copy and truncate when file is busy)](https://github.com/log4js-node/log4js-node/pull/948) + +## [5.1.0](https://github.com/log4js-node/log4js-node/milestone/64) + +- [Update streamroller to 2.1.0 (windows fixes)](https://github.com/log4js-node/log4js-node/pull/933) + +## [5.0.0](https://github.com/log4js-node/log4js-node/milestone/61) + +- [Update streamroller to 2.0.0 (remove support for node v6)](https://github.com/log4js-node/log4js-node/pull/922) +- [Update dependencies (mostly dev deps)](https://github.com/log4js-node/log4js-node/pull/923) +- [Fix error when cluster not available](https://github.com/log4js-node/log4js-node/pull/930) +- [Test coverage improvements](https://github.com/log4js-node/log4js-node/pull/925) + +## [4.5.1](https://github.com/log4js-node/log4js-node/milestone/63) + +- [Update streamroller 1.0.5 -> 1.0.6 (to fix overwriting old backup log files)](https://github.com/log4js-node/log4js-node/pull/918) +- [Dependency update: lodash 4.17.4 (dependency of a dependency, not log4js)](https://github.com/log4js-node/log4js-node/pull/917) - thanks Github Automated Security Thing. +- [Dependency update: lodash 4.4.0 -> 4.5.0 (dependency of a dependency, not log4js)](https://github.com/log4js-node/log4js-node/pull/915) - thanks Github Automated Security Thing. + +## [4.5.0](https://github.com/log4js-node/log4js-node/milestone/62) + +- [Override call stack parsing](https://github.com/log4js-node/log4js-node/pull/914) - thanks [@rommni](https://github.com/rommni) +- [patternLayout filename depth token](https://github.com/log4js-node/log4js-node/pull/913) - thanks [@rommni](https://github.com/rommni) + +## [4.4.0](https://github.com/log4js-node/log4js-node/milestone/60) + +- [Add option to pass appender module in config](https://github.com/log4js-node/log4js-node/pull/833) - thanks [@kaxelson](https://github.com/kaxelson) +- [Added docs for passing appender module](https://github.com/log4js-node/log4js-node/pull/904) +- [Updated dependencies](https://github.com/log4js-node/log4js-node/pull/900) + +## [4.3.2](https://github.com/log4js-node/log4js-node/milestone/59) + +- [Types for enableCallStack](https://github.com/log4js-node/log4js-node/pull/897) - thanks [@citrusjunoss](https://github.com/citrusjunoss) + +## [4.3.1](https://github.com/log4js-node/log4js-node/milestone/58) + +- [Fix for maxLogSize in dateFile appender](https://github.com/log4js-node/log4js-node/pull/889) + +## [4.3.0](https://github.com/log4js-node/log4js-node/milestone/57) + +- [Feature: line number support](https://github.com/log4js-node/log4js-node/pull/879) - thanks [@victor0801x](https://github.com/victor0801x) +- [Fix for missing core appenders in webpack](https://github.com/log4js-node/log4js-node/pull/882) + +## [4.2.0](https://github.com/log4js-node/log4js-node/milestone/56) + +- [Feature: add appender and level inheritance](https://github.com/log4js-node/log4js-node/pull/863) - thanks [@pharapiak](https://github.com/pharapiak) +- [Feature: add response to context for connectLogger](https://github.com/log4js-node/log4js-node/pull/862) - thanks [@leak4mk0](https://github.com/leak4mk0) +- [Fix for broken sighup handler](https://github.com/log4js-node/log4js-node/pull/873) +- [Add missing types for Level](https://github.com/log4js-node/log4js-node/pull/872) - thanks [@Ivkaa](https://github.com/Ivkaa) +- [Typescript fixes for connect logger context](https://github.com/log4js-node/log4js-node/pull/876) - thanks [@leak4mk0](https://github.com/leak4mk0) +- [Upgrade to streamroller-1.0.5 to fix log rotation bug](https://github.com/log4js-node/log4js-node/pull/878) + +## [4.1.1](https://github.com/log4js-node/log4js-node/milestone/55) + +- [Various test fixes for node v12](https://github.com/log4js-node/log4js-node/pull/870) +- [Fix layout problem in node v12](https://github.com/log4js-node/log4js-node/pull/860) - thanks [@bjornstar](https://github.com/bjornstar) +- [Add missing types for addLevels](https://github.com/log4js-node/log4js-node/pull/867) - thanks [@Ivkaa](https://github.com/Ivkaa) +- [Allow any return type for layout function](https://github.com/log4js-node/log4js-node/pull/845) - thanks [@xinbenlv](https://github.com/xinbenlv) + +## [4.1.0](https://github.com/log4js-node/log4js-node/milestone/54) + +- Updated streamroller to 1.0.4, to fix a bug where the inital size of an existing file was ignored when appending +- [Updated streamroller to 1.0.3](https://github.com/log4js-node/log4js-node/pull/841), to fix a crash bug if the date pattern was all digits. +- [Updated dependencies](https://github.com/log4js-node/log4js-node/pull/840) + +## Previous versions + +Change information for older versions can be found by looking at the [milestones](https://github.com/log4js-node/log4js-node/milestones) in github. diff --git a/README.md b/README.md index e3d28b8e..8621b63c 100644 --- a/README.md +++ b/README.md @@ -1,148 +1,117 @@ -# log4js-node [![Build Status](https://secure.travis-ci.org/nomiddlename/log4js-node.png?branch=master)](http://travis-ci.org/nomiddlename/log4js-node) +# log4js-node [![CodeQL](https://github.com/log4js-node/log4js-node/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/log4js-node/log4js-node/actions/workflows/codeql-analysis.yml) [![Node.js CI](https://github.com/log4js-node/log4js-node/actions/workflows/node.js.yml/badge.svg)](https://github.com/log4js-node/log4js-node/actions/workflows/node.js.yml) [![NPM](https://nodei.co/npm/log4js.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/log4js/) This is a conversion of the [log4js](https://github.com/stritti/log4js) -framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript. +framework to work with [node](http://nodejs.org). I started out just stripping out the browser-specific code and tidying up some of the javascript to work better in node. It grew from there. Although it's got a similar name to the Java library [log4j](https://logging.apache.org/log4j/2.x/), thinking that it will behave the same way will only bring you sorrow and confusion. -Out of the box it supports the following features: +The full documentation is available [here](https://log4js-node.github.io/log4js-node/). -* coloured console logging to stdout or stderr -* replacement of node's console.log functions (optional) -* file appender, with configurable log rolling based on file size or date -* SMTP appender -* GELF appender -* Loggly appender -* Logstash UDP appender -* logFaces appender -* multiprocess appender (useful when you've got worker processes) -* a logger for connect/express servers -* configurable log message layout/patterns -* different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.) +[Changes in version 3.x](https://log4js-node.github.io/log4js-node/v3-changes.md) -## Important changes in 1.0 +There have been a few changes between log4js 1.x and 2.x (and 0.x too). You should probably read this [migration guide](https://log4js-node.github.io/log4js-node/migration-guide.html) if things aren't working. -The default appender has been changed from `console` to `stdout` - this alleviates a memory problem that happens when logging using console. If you're using log4js in a browser (via browserify), then you'll probably need to explicitly configure log4js to use the console appender now (unless browserify handles process.stdout). +Out of the box it supports the following features: -I'm also trying to move away from `vows` for the tests, and use `tape` instead. New tests should be added to `test/tape`, not the vows ones. +- coloured console logging to stdout or stderr +- file appender, with configurable log rolling based on file size or date +- a logger for connect/express servers +- configurable log message layout/patterns +- different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.) -log4js also no longer supports node versions below 0.12.x. +Optional appenders are available: -NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of node's console.log functions. Do this either by calling `log4js.replaceConsole()` or configuring with an object or json file like this: +- [SMTP](https://github.com/log4js-node/smtp) +- [GELF](https://github.com/log4js-node/gelf) +- [Loggly](https://github.com/log4js-node/loggly) +- Logstash ([UDP](https://github.com/log4js-node/logstashUDP) and [HTTP](https://github.com/log4js-node/logstashHTTP)) +- logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) +- [RabbitMQ](https://github.com/log4js-node/rabbitmq) +- [Redis](https://github.com/log4js-node/redis) +- [Hipchat](https://github.com/log4js-node/hipchat) +- [Slack](https://github.com/log4js-node/slack) +- [mailgun](https://github.com/log4js-node/mailgun) +- [InfluxDB](https://github.com/rnd-debug/log4js-influxdb-appender) -```javascript -{ - appenders: [ - { type: "console" } - ], - replaceConsole: true -} -``` +## Getting help + +Having problems? Jump on the [slack](https://join.slack.com/t/log4js-node/shared_invite/enQtODkzMDQ3MzExMDczLWUzZmY0MmI0YWI1ZjFhODY0YjI0YmU1N2U5ZTRkOTYyYzg3MjY5NWI4M2FjZThjYjdiOGM0NjU2NzBmYTJjOGI) channel, or create an issue. If you want to help out with the development, the slack channel is a good place to go as well. ## installation +```bash npm install log4js - +``` ## usage Minimalist version: + ```javascript -var log4js = require('log4js'); +var log4js = require("log4js"); var logger = log4js.getLogger(); +logger.level = "debug"; logger.debug("Some debug messages"); ``` -By default, log4js outputs to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see: + +By default, log4js will not output any logs (so that it can safely be used in libraries). The `level` for the `default` category is set to `OFF`. To enable logs, set the level (as in the example). This will then output to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see: + ```bash [2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages ``` -See example.js for a full example, but here's a snippet (also in fromreadme.js): + +See example.js for a full example, but here's a snippet (also in `examples/fromreadme.js`): + ```javascript -var log4js = require('log4js'); -//console log is loaded by default, so you won't normally need to do this -//log4js.loadAppender('console'); -log4js.loadAppender('file'); -//log4js.addAppender(log4js.appenders.console()); -log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese'); - -var logger = log4js.getLogger('cheese'); -logger.setLevel('ERROR'); - -logger.trace('Entering cheese testing'); -logger.debug('Got cheese.'); -logger.info('Cheese is Gouda.'); -logger.warn('Cheese is quite smelly.'); -logger.error('Cheese is too ripe!'); -logger.fatal('Cheese was breeding ground for listeria.'); +const log4js = require("log4js"); +log4js.configure({ + appenders: { cheese: { type: "file", filename: "cheese.log" } }, + categories: { default: { appenders: ["cheese"], level: "error" } }, +}); + +const logger = log4js.getLogger("cheese"); +logger.trace("Entering cheese testing"); +logger.debug("Got cheese."); +logger.info("Cheese is Comté."); +logger.warn("Cheese is quite smelly."); +logger.error("Cheese is too ripe!"); +logger.fatal("Cheese was breeding ground for listeria."); ``` -Output: + +Output (in `cheese.log`): + ```bash [2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe! [2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria. -``` -The first 5 lines of the code above could also be written as: -```javascript -var log4js = require('log4js'); -log4js.configure({ - appenders: [ - { type: 'console' }, - { type: 'file', filename: 'logs/cheese.log', category: 'cheese' } - ] -}); ``` -## configuration +## Note for library makers -You can configure the appenders and log levels manually (as above), or provide a -configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The -configuration file location may also be specified via the environment variable -LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`). -An example file can be found in `test/vows/log4js.json`. An example config file with log rolling is in `test/vows/with-log-rolling.json`. -You can configure log4js to check for configuration file changes at regular intervals, and if changed, reload. This allows changes to logging levels to occur without restarting the application. +If you're writing a library and would like to include support for log4js, without introducing a dependency headache for your users, take a look at [log4js-api](https://github.com/log4js-node/log4js-api). -To turn it on and specify a period: +## Documentation -```javascript -log4js.configure('file.json', { reloadSecs: 300 }); -``` -For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored. +Available [here](https://log4js-node.github.io/log4js-node/). -```javascript -log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' }); -``` -If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file: - -#### my_log4js_configuration.json #### -```json -{ - "appenders": [ - { - "type": "file", - "filename": "relative/path/to/log_file.log", - "maxLogSize": 20480, - "backups": 3, - "category": "relative-logger" - }, - { - "type": "file", - "absolute": true, - "filename": "/absolute/path/to/log_file.log", - "maxLogSize": 20480, - "backups": 10, - "category": "absolute-logger" - } - ] -} -``` -Documentation for most of the core appenders can be found on the [wiki](https://github.com/nomiddlename/log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples. +There's also [an example application](https://github.com/log4js-node/log4js-example). -## Documentation -See the [wiki](https://github.com/nomiddlename/log4js-node/wiki). Improve the [wiki](https://github.com/nomiddlename/log4js-node/wiki), please. +## TypeScript + +```ts +import * as log4js from "log4js"; +log4js.configure({ + appenders: { cheese: { type: "file", filename: "cheese.log" } }, + categories: { default: { appenders: ["cheese"], level: "error" } }, +}); -There's also [an example application](https://github.com/nomiddlename/log4js-example). +const logger = log4js.getLogger(); +logger.level = "debug"; +logger.debug("Some debug messages"); +``` ## Contributing -Contributions welcome, but take a look at the [rules](https://github.com/nomiddlename/log4js-node/wiki/Contributing) first. + +We're always looking for people to help out. Jump on [slack](https://join.slack.com/t/log4js-node/shared_invite/enQtODkzMDQ3MzExMDczLWUzZmY0MmI0YWI1ZjFhODY0YjI0YmU1N2U5ZTRkOTYyYzg3MjY5NWI4M2FjZThjYjdiOGM0NjU2NzBmYTJjOGI) and discuss what you want to do. Also, take a look at the [rules](https://log4js-node.github.io/log4js-node/contrib-guidelines.html) before submitting a pull request. ## License diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..83060309 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,19 @@ +# Security Policy + +## Supported Versions + +We're aiming to only support the latest major version of log4js. Older than that is usually _very_ old. + +| Version | Supported | +| ------- | ------------------ | +| 6.x | :white_check_mark: | +| < 6.0 | :x: | + +## Reporting a Vulnerability + +Report vulnerabilities via email to: + +- Gareth Jones +- Lam Wei Li + +Please put "[log4js:security]" in the subject line. We will aim to respond within a day or two. diff --git a/commitlint.config.js b/commitlint.config.js new file mode 100644 index 00000000..422b1944 --- /dev/null +++ b/commitlint.config.js @@ -0,0 +1 @@ +module.exports = { extends: ['@commitlint/config-conventional'] }; diff --git a/docs/Gemfile b/docs/Gemfile new file mode 100644 index 00000000..37f5eaa4 --- /dev/null +++ b/docs/Gemfile @@ -0,0 +1,2 @@ +source 'https://rubygems.org' +gem 'github-pages', group: :jekyll_plugins diff --git a/docs/_config.yml b/docs/_config.yml new file mode 100644 index 00000000..8ce87bfc --- /dev/null +++ b/docs/_config.yml @@ -0,0 +1,2 @@ +theme: jekyll-theme-minimal +repository: nomiddlename/log4js-node diff --git a/docs/_layouts/default.html b/docs/_layouts/default.html new file mode 100644 index 00000000..ba8235bb --- /dev/null +++ b/docs/_layouts/default.html @@ -0,0 +1,115 @@ + + + + + + + {{ site.title | default: site.github.repository_name }} by {{ + site.github.owner_name }} + + + + + + + +
+
+

{{ site.title | default: site.github.repository_name }}

+

{{ site.description | default: site.github.project_tagline }}

+ + {% if site.github.is_project_page %} +

+ View the Project on GitHub {{ github_name }} +

+ {% endif %} + + + + {% if site.github.is_user_page %} +

+ View My GitHub Profile +

+ {% endif %} {% if site.show_downloads %} + + {% endif %} +
+
{{ content }}
+ +
+ + + {% if site.google_analytics %} + + {% endif %} + + diff --git a/docs/api.md b/docs/api.md new file mode 100644 index 00000000..95b69f7b --- /dev/null +++ b/docs/api.md @@ -0,0 +1,58 @@ +## API + +## configuration - `log4js.configure(object || string)` + +There is one entry point for configuring log4js. A string argument is treated as a filename to load configuration from. Config files should be JSON, and contain a configuration object (see format below). You can also pass a configuration object directly to `configure`. + +Configuration should take place immediately after requiring log4js for the first time in your application. If you do not call `configure`, log4js will use `LOG4JS_CONFIG` (if defined) or the default config. The default config defines one appender, which would log to stdout with the coloured layout, but also defines the default log level to be `OFF` - which means no logs will be output. + +If you are using `cluster`, then include the call to `configure` in the worker processes as well as the master. That way the worker processes will pick up the right levels for your categories, and any custom levels you may have defined. Appenders will only be defined on the master process, so there is no danger of multiple processes attempting to write to the same appender. No special configuration is needed to use log4js with clusters, unlike previous versions. + +Configuration objects must define at least one appender, and a default category. Log4js will throw an exception if the configuration is invalid. + +`configure` method call returns the configured log4js object. + +### Configuration Object + +Properties: + +- `levels` (optional, object) - used for defining custom log levels, or redefining existing ones; this is a map with the level name as the key (string, case insensitive), and an object as the value. The object should have two properties: the level value (integer) as the value, and the colour. Log levels are used to assign importance to log messages, with the integer value being used to sort them. If you do not specify anything in your configuration, the default values are used (ALL < TRACE < DEBUG < INFO < WARN < ERROR < FATAL < MARK < OFF - note that OFF is intended to be used to turn off logging, not as a level for actual logging, i.e. you would never call `logger.off('some log message')`). Levels defined here are used in addition to the default levels, with the integer value being used to determine their relation to the default levels. If you define a level with the same name as a default level, then the integer value in the config takes precedence. Level names must begin with a letter, and can only contain letters, numbers and underscores. +- `appenders` (object) - a map of named appenders (string) to appender definitions (object); appender definitions must have a property `type` (string) - other properties depend on the appender type. +- `categories` (object) - a map of named categories (string) to category definitions (object). You must define the `default` category which is used for all log events that do not match a specific category. Category definitions have two properties: + - `appenders` (array of strings) - the list of appender names to be used for this category. A category must have at least one appender. + - `level` (string, case insensitive) - the minimum log level that this category will send to the appenders. For example, if set to 'error' then the appenders will only receive log events of level 'error', 'fatal', 'mark' - log events of 'info', 'warn', 'debug', or 'trace' will be ignored. + - `enableCallStack` (boolean, optional, defaults to `false`) - setting this to `true` will make log events for this category use the call stack to generate line numbers and file names in the event. See [pattern layout](layouts.md) for how to output these values in your appenders. If you log an Error object, that Error object (or the first of many) will be used to generate the line numbers and file name instead. +- `pm2` (boolean) (optional) - set this to true if you're running your app using [pm2](http://pm2.keymetrics.io), otherwise logs will not work (you'll also need to install pm2-intercom as pm2 module: `pm2 install pm2-intercom`) +- `pm2InstanceVar` (string) (optional, defaults to 'NODE_APP_INSTANCE') - set this if you're using pm2 and have changed the default name of the NODE_APP_INSTANCE variable. +- `disableClustering` (boolean) (optional) - set this to true if you liked the way log4js used to just ignore clustered environments, or you're having trouble with PM2 logging. Each worker process will do its own logging. Be careful with this if you're logging to files, weirdness can occur. + +## configured - `log4js.isConfigured()` + +`isConfigured` method call returns a boolean on whether `log4js.configure()` was successfully called previously. Implicit `log4js.configure()` call by `log4js.getLogger()` is will also affect this value. + +## Loggers - `log4js.getLogger([category])` + +To support the minimalist usage, this function will implicitly call `log4js.configure()` with the [default configurations](https://github.com/log4js-node/log4js-node/blob/faebee0e8235cf00227ca88642eeff3471fc407c/lib/log4js.js#L158-L163) if it hasn't been configured before. + +This function takes a single optional string argument to denote the category to be used for log events on this logger. If no category is specified, the events will be routed to the appender for the `default` category. The function returns a `Logger` object which has its level set to the level specified for that category in the config and implements the following functions: + +- `(args...)` - where `` can be any of the lower case names of the levels (including any custom levels defined). For example: `logger.info('some info')` will dispatch a log event with a level of info. If you're using the basic, coloured or message pass-through [layouts](layouts.md), the logged string will have its formatting (placeholders like `%s`, `%d`, etc) delegated to [util.format](https://nodejs.org/api/util.html#util_util_format_format_args). +- `isEnabled()` - returns true if a log event of level (camel case) would be dispatched to the appender defined for the logger's category. For example: `logger.isInfoEnabled()` will return true if the level for the logger is INFO or lower. +- `addContext(,)` - where `` is a string, `` can be anything. This stores a key-value pair that is added to all log events generated by the logger. Uses would be to add ids for tracking a user through your application. Currently only the `logFaces` appenders make use of the context values. +- `removeContext()` - removes a previously defined key-value pair from the context. +- `clearContext()` - removes all context pairs from the logger. +- `setParseCallStackFunction(function | undefined)` - Allow to override the default way to parse the callstack data for the layout pattern, a generic javascript Error object is passed to the function. Must return an object with properties : `fileName` / `lineNumber` / `columnNumber` / `callStack` / `className` / `functionName` / `functionAlias` / `callerName`. Can, for example, be used if all of your log call are made from one "debug" class and you would to "erase" this class from the callstack to only show the function which called your "debug" class. If you pass `undefined` as the argument, it will be reset to the default parser. + +The `Logger` object has the following properties: + +- `level` - where `level` is a log4js level or a string that matches a level (e.g. 'info', 'INFO', etc). This allows overriding the configured level for this logger. Changing this value applies to all loggers of the same category. +- `useCallStack` - where `useCallStack` is a boolean to indicate if log events for this category use the call stack to generate line numbers and file names in the event. This allows overriding the configured useCallStack for this logger. Changing this value applies to all loggers of the same category. +- `callStackLinesToSkip` - where `callStackLinesToSkip` is a number (0 by default) that allows you to customize how many lines of the call stack should be skipped when parsing the Error stack. For example, if you call the logger from within a dedicated logging function, you can use `callStackLinesToSkip = 1` to ignore that function when looking at stack traces. + +## Shutdown - `log4js.shutdown([callback])` + +`shutdown` accepts a callback that will be called when log4js has closed all appenders and finished writing log events. Use this when your programme exits to make sure all your logs are written to files, sockets are closed, etc. + +## Custom Layouts - `log4js.addLayout(type, fn)` + +This function is used to add user-defined layout functions. See [layouts](layouts.md) for more details and an example. diff --git a/docs/appenders.md b/docs/appenders.md new file mode 100644 index 00000000..d85979be --- /dev/null +++ b/docs/appenders.md @@ -0,0 +1,101 @@ +# Log4js - Appenders + +Appenders serialise log events to some form of output. They can write to files, send emails, send data over the network. All appenders have a `type` which determines which appender gets used. For example: + +```javascript +const log4js = require("log4js"); +log4js.configure({ + appenders: { + out: { type: "stdout" }, + app: { type: "file", filename: "application.log" }, + }, + categories: { + default: { appenders: ["out", "app"], level: "debug" }, + }, +}); +``` + +This defines two appenders named 'out' and 'app'. 'out' uses the [stdout](stdout.md) appender which writes to standard out. 'app' uses the [file](file.md) appender, configured to write to 'application.log'. + +## Core Appenders + +The following appenders are included with log4js. Some require extra dependencies that are not included as part of log4js (the [smtp](https://github.com/log4js-node/smtp) appender needs [nodemailer](https://www.npmjs.com/package/nodemailer) for example), and these will be noted in the docs for that appender. If you don't use those appenders, then you don't need the extra dependencies. + +- [categoryFilter](categoryFilter.md) +- [console](console.md) +- [dateFile](dateFile.md) +- [file](file.md) +- [fileSync](fileSync.md) +- [logLevelFilter](logLevelFilter.md) +- [multiFile](multiFile.md) +- [multiprocess](multiprocess.md) +- [noLogFilter](noLogFilter.md) +- [recording](recording.md) +- [stderr](stderr.md) +- [stdout](stdout.md) +- [tcp](tcp.md) +- [tcp-server](tcp-server.md) + +## Optional Appenders + +The following appenders are supported by log4js, but are no longer distributed with log4js core from version 3 onwards. + +- [gelf](https://github.com/log4js-node/gelf) +- [hipchat](https://github.com/log4js-node/hipchat) +- [logFaces-HTTP](https://github.com/log4js-node/logFaces-HTTP) +- [logFaces-UDP](https://github.com/log4js-node/logFaces-UDP) +- [loggly](https://github.com/log4js-node/loggly) +- [logstashHTTP](https://github.com/log4js-node/logstashHTTP) +- [logstashUDP](https://github.com/log4js-node/logstashUDP) +- [mailgun](https://github.com/log4js-node/mailgun) +- [rabbitmq](https://github.com/log4js-node/rabbitmq) +- [redis](https://github.com/log4js-node/redis) +- [slack](https://github.com/log4js-node/slack) +- [smtp](https://github.com/log4js-node/smtp) + +For example, if you were previously using the gelf appender (`type: 'gelf'`) then you should add `@log4js-node/gelf` to your dependencies and change the type to `type: '@log4js-node/gelf'`. + +## Other Appenders + +These appenders are maintained by its own authors and may be useful for you: + +- [udp](https://github.com/iassasin/log4js-udp-appender) +- [cloudwatch](https://github.com/arch-group/log4js-appender-cloudwatch) +- [sentry](https://github.com/arch-group/log4js-appender-sentry) + +## Custom Appenders + +Log4js can load appenders from outside the core appenders. The `type` config value is used as a require path if no matching appender can be found. For example, the following configuration will attempt to load an appender from the module 'cheese/appender', passing the rest of the config for the appender to that module: + +```javascript +log4js.configure({ + appenders: { gouda: { type: "cheese/appender", flavour: "tasty" } }, + categories: { default: { appenders: ["gouda"], level: "debug" } }, +}); +``` + +Log4js checks the following places (in this order) for appenders based on the type value: + +1. The core appenders: `require('./appenders/' + type)` +2. node_modules: `require(type)` +3. relative to the main file of your application: `require(path.dirname(require.main.filename) + '/' + type)` +4. relative to the process' current working directory: `require(process.cwd() + '/' + type)` + +If you want to write your own appender, read the [documentation](writing-appenders.md) first. + +## Advanced configuration + +If you've got a custom appender of your own, or are using webpack (or some other bundler), you may find it easier to pass +in the appender module in the config instead of loading from the node.js require path. Here's an example: + +```javascript +const myAppenderModule = { + configure: (config, layouts, findAppender, levels) => { + /* ...your appender config... */ + }, +}; +log4js.configure({ + appenders: { custom: { type: myAppenderModule } }, + categories: { default: { appenders: ["custom"], level: "debug" } }, +}); +``` diff --git a/docs/assets/css/style.scss b/docs/assets/css/style.scss new file mode 100644 index 00000000..6cf088a4 --- /dev/null +++ b/docs/assets/css/style.scss @@ -0,0 +1,29 @@ +@import "{{ site.theme }}"; + +header ul { + display: block; + background-color: white; + border: none; + width: auto; + height: auto; + padding: 0; + list-style: disc; + clear: both; + margin-left: 20px; +} + +header li { + display: list-item; + width: auto; + border: none; + float: none; + height: auto; +} + +header ul a { + display: inline; + width: auto; + text-align: left; + color: #39c; + font-size: 14px; +} diff --git a/docs/categories.md b/docs/categories.md new file mode 100644 index 00000000..ed16e0de --- /dev/null +++ b/docs/categories.md @@ -0,0 +1,64 @@ +# Categories + +Categories are groups of log events. The category for log events is defined when you get a _Logger_ from log4js (`log4js.getLogger('somecategory')`). Log events with the same _category_ will go to the same _appenders_. + +## Default configuration + +When defining your appenders through a configuration, at least one category must be defined. + +```javascript +const log4js = require("log4js"); +log4js.configure({ + appenders: { + out: { type: "stdout" }, + app: { type: "file", filename: "application.log" }, + }, + categories: { + default: { appenders: ["out"], level: "trace" }, + app: { appenders: ["app"], level: "trace" }, + }, +}); + +const logger = log4js.getLogger(); +logger.trace("This will use the default category and go to stdout"); +const logToFile = log4js.getLogger("app"); +logToFile.trace("This will go to a file"); +``` + +## Categories inheritance + +Log4js supports a hierarchy for categories, using dots to separate layers - for example, log events in the category 'myapp.submodule' will use the level for 'myapp' if none is defined for 'myapp.submodule', and also any appenders defined for 'myapp'. +This behaviour can be disabled by setting inherit=false on the sub-category. + +```javascript +const log4js = require("log4js"); +log4js.configure({ + appenders: { + console: { type: "console" }, + app: { type: "file", filename: "application.log" }, + }, + categories: { + default: { appenders: ["console"], level: "trace" }, + catA: { appenders: ["console"], level: "error" }, + "catA.catB": { appenders: ["app"], level: "trace" }, + }, +}); + +const loggerA = log4js.getLogger("catA"); +loggerA.error("This will be written to console with log level ERROR"); +loggerA.trace("This will not be written"); +const loggerAB = log4js.getLogger("catA.catB"); +loggerAB.error( + "This will be written with log level ERROR to console and to a file" +); +loggerAB.trace( + "This will be written with log level TRACE to console and to a file" +); +``` + +Two categories are defined: + +- Log events with category 'catA' will go to appender 'console' only. +- Log events with category 'catA.catB' will go to appenders 'console' and 'app'. + +Appenders will see and log an event only if the category level is less than or equal to the event's level. diff --git a/docs/categoryFilter.md b/docs/categoryFilter.md new file mode 100644 index 00000000..e9c3aaee --- /dev/null +++ b/docs/categoryFilter.md @@ -0,0 +1,53 @@ +# Category Filter + +This is not strictly an appender - it wraps around another appender and stops log events from specific categories from being written to that appender. This could be useful when debugging your application, but you have one component that logs noisily, or is irrelevant to your investigation. + +## Configuration + +- `type` - `"categoryFilter"` +- `exclude` - `string | Array` - the category (or categories if you provide an array of values) that will be excluded from the appender. +- `appender` - `string` - the name of the appender to filter. + +## Example + +```javascript +log4js.configure({ + appenders: { + everything: { type: "file", filename: "all-the-logs.log" }, + "no-noise": { + type: "categoryFilter", + exclude: "noisy.component", + appender: "everything", + }, + }, + categories: { + default: { appenders: ["no-noise"], level: "debug" }, + }, +}); + +const logger = log4js.getLogger(); +const noisyLogger = log4js.getLogger("noisy.component"); +logger.debug("I will be logged in all-the-logs.log"); +noisyLogger.debug("I will not be logged."); +``` + +Note that you can achieve the same outcome without using the category filter, like this: + +```javascript +log4js.configure({ + appenders: { + everything: { type: "file", filename: "all-the-logs.log" }, + }, + categories: { + default: { appenders: ["everything"], level: "debug" }, + "noisy.component": { appenders: ["everything"], level: "off" }, + }, +}); + +const logger = log4js.getLogger(); +const noisyLogger = log4js.getLogger("noisy.component"); +logger.debug("I will be logged in all-the-logs.log"); +noisyLogger.debug("I will not be logged."); +``` + +Category filter becomes useful when you have many categories you want to exclude, passing them as an array. diff --git a/docs/clustering.md b/docs/clustering.md new file mode 100644 index 00000000..6696bdb6 --- /dev/null +++ b/docs/clustering.md @@ -0,0 +1,32 @@ +# Clustering / Multi-process Logging + +If you're running log4js in an application that uses [node's core cluster](https://nodejs.org/dist/latest-v8.x/docs/api/cluster.html) then log4js will transparently handle making sure the processes don't try to log at the same time. All logging is done on the master process, with the worker processes sending their log messages to the master via `process.send`. This ensures that you don't get multiple processes trying to write to the same file (or rotate the log files) at the same time. + +This can cause problems in some rare circumstances, if you're experiencing weird logging problems, then use the `disableClustering: true` option in your log4js configuration to have every process behave as if it were the master process. Be careful if you're logging to files. + +## I'm using PM2, but I'm not getting any logs! + +To get log4js working with [PM2](http://pm2.keymetrics.io), you'll need to install the [pm2-intercom](https://www.npmjs.com/package/pm2-intercom) module. + +```bash +pm2 install pm2-intercom +``` + +Then add the value `pm2: true` to your log4js configuration. If you're also using `node-config`, then you'll probably have renamed your `NODE_APP_INSTANCE` environment variable. If so, you'll also need to add `pm2InstanceVar: ''` where `` should be replaced with the new name you gave the instance environment variable. + +```javascript +log4js.configure({ + appenders: { out: { type: "stdout" } }, + categories: { default: { appenders: ["out"], level: "info" } }, + pm2: true, + pm2InstanceVar: "INSTANCE_ID", +}); +``` + +## I'm using Passenger, but I'm not getting any logs! + +[Passenger](https://www.phusionpassenger.com/library/) replaces the node.js core cluster module with a non-functional stub, so you won't see any output using log4js. To fix this, add `disableClustering: true` to your configuration. Again, be careful if you're logging to files. + +## I'm not using clustering/pm2/passenger but I do have multiple processes that I'd like to all log to the same place + +Ok, you probably want to look at the [tcp-server](tcp-server.md) and [tcp appender](tcp.md) documentation. diff --git a/docs/connect-logger.md b/docs/connect-logger.md new file mode 100644 index 00000000..6dd59398 --- /dev/null +++ b/docs/connect-logger.md @@ -0,0 +1,155 @@ +# Connect / Express Logger + +The connect/express logger was added to log4js by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See `example-connect-logger.js`. + +```javascript +var log4js = require("log4js"); +var express = require("express"); + +log4js.configure({ + appenders: { + console: { type: "console" }, + file: { type: "file", filename: "cheese.log" }, + }, + categories: { + cheese: { appenders: ["file"], level: "info" }, + default: { appenders: ["console"], level: "info" }, + }, +}); + +var logger = log4js.getLogger("cheese"); +var app = express(); +app.use(log4js.connectLogger(logger, { level: "info" })); +app.get("/", function (req, res) { + res.send("hello world"); +}); +app.listen(5000); +``` + +The log4js.connectLogger supports the passing of an options object that can be used to set the following: + +- log level +- log format string or function (the same as the connect/express logger) +- nolog expressions (represented as a string, regexp, array, or function(req, res)) +- status code rulesets + +For example: + +```javascript +app.use( + log4js.connectLogger(logger, { + level: log4js.levels.INFO, + format: ":method :url", + }) +); +``` + +or: + +```javascript +app.use( + log4js.connectLogger(logger, { + level: "auto", + // include the Express request ID in the logs + format: (req, res, format) => + format( + `:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"` + ), + }) +); +``` + +When you request of POST, you want to log the request body parameter like JSON. +The log format function is very useful. +Please use log format function instead "tokens" property for use express's request or response. + +```javascript +app.use( + log4js.connectLogger(logger, { + level: "info", + format: (req, res, format) => + format(`:remote-addr :method :url ${JSON.stringify(req.body)}`), + }) +); +``` + +Added automatic level detection to connect-logger, depends on http status response, compatible with express 3.x and 4.x. + +- http responses 3xx, level = WARN +- http responses 4xx & 5xx, level = ERROR +- else, level = INFO + +```javascript +app.use(log4js.connectLogger(logger, { level: "auto" })); +``` + +The levels of returned status codes can be configured via status code rulesets. + +```javascript +app.use( + log4js.connectLogger(logger, { + level: "auto", + statusRules: [ + { from: 200, to: 299, level: "debug" }, + { codes: [303, 304], level: "info" }, + ], + }) +); +``` + +The log4js.connectLogger also supports a nolog option where you can specify a string, regexp, array, or function(req, res) to omit certain log messages. Example of 1.2 below. + +```javascript +app.use( + log4js.connectLogger(logger, { + level: "auto", + format: ":method :url", + nolog: "\\.gif|\\.jpg$", + }) +); +``` + +or + +```javascript +app.use( + log4js.connectLogger(logger, { + level: "auto", + format: ":method :url", + nolog: (req, res) => res.statusCode < 400, + }) +); +``` + +The log4js.connectLogger can add a response of express to context if `context` flag is set to `true`. +Application can use it in layouts or appenders. + +In application: + +```javascript +app.use(log4js.connectLogger(logger, { context: true })); +``` + +In layout: + +```javascript +log4js.addLayout("customLayout", () => { + return (loggingEvent) => { + const res = loggingEvent.context.res; + return util.format( + ...loggingEvent.data, + res ? `status: ${res.statusCode}` : "" + ); + }; +}); +``` + +## Example nolog values + +| nolog value | Will Not Log | Will Log | +| --------------------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------ | +| `"\\.gif"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga | http://example.com/hoge.agif | +| `"\\.gif\|\\.jpg$"` | http://example.com/hoge.gif http://example.com/hoge.gif?fuga http://example.com/hoge.jpg?fuga | http://example.com/hoge.agif http://example.com/hoge.ajpg http://example.com/hoge.jpg?hoge | +| `"\\.(gif\|jpe?g\|png)$"` | http://example.com/hoge.gif http://example.com/hoge.jpeg | http://example.com/hoge.gif?uid=2 http://example.com/hoge.jpg?pid=3 | +| `/\.(gif\|jpe?g\|png)$/` | as above | as above | +| `["\\.jpg$", "\\.png", "\\.gif"]` | same as `"\\.jpg\|\\.png\|\\.gif"` | same as `"\\.jpg\|\\.png\|\\.gif"` | diff --git a/docs/console.md b/docs/console.md new file mode 100644 index 00000000..abce5ac3 --- /dev/null +++ b/docs/console.md @@ -0,0 +1,19 @@ +# Console Appender + +This appender uses node's console object to write log events. It can also be used in the browser, if you're using browserify or something similar. Be aware that writing a high volume of output to the console can make your application use a lot of memory. If you experience this problem, try switching to the [stdout](stdout.md) appender. + +# Configuration + +- `type` - `console` +- `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md) + +Note that all log events are output using `console.log` regardless of the event's level (so `ERROR` events will not be logged using `console.error`). + +# Example + +```javascript +log4js.configure({ + appenders: { console: { type: "console" } }, + categories: { default: { appenders: ["console"], level: "info" } }, +}); +``` diff --git a/docs/contrib-guidelines.md b/docs/contrib-guidelines.md new file mode 100644 index 00000000..67a55615 --- /dev/null +++ b/docs/contrib-guidelines.md @@ -0,0 +1,8 @@ +# Want to help? + +I love pull requests, and I need all the help I can get. However, there are a few rules to follow if you want a better chance of having your pull request merged: + +- Fork the repo, make a feature branch just for your changes +- On the branch, only commit changes for the feature you're adding. Each pull request should concentrate on a single change - don't mix multiple features. +- Your feature should be covered by tests. Run the tests with npm test. This is very important - without tests, your feature may be broken by subsequent changes and I may never know. Plus it's always nice to know that your changes work :-) +- Don't bump the npm version - yours may not be the only feature that makes it into a version, and you don't know when your pull request may get merged (the version may have changed by then). diff --git a/docs/contributors.md b/docs/contributors.md new file mode 100644 index 00000000..edac88f6 --- /dev/null +++ b/docs/contributors.md @@ -0,0 +1,9 @@ +# Contributors + +Many people have helped make log4js what it is today. Here's a list of everyone who has contributed to the code. There are lots of people who've helped by submitting bug reports or pull requests that I haven't merged, but I have used their ideas to implement a different way. Thanks to you all. This library also owes a huge amount to the [original log4js project](https://github.com/stritti/log4js). If you'd like to help out, take a look at the [contributor guidelines](contrib-guidelines.md). + + diff --git a/docs/dateFile.md b/docs/dateFile.md new file mode 100644 index 00000000..35eea3c4 --- /dev/null +++ b/docs/dateFile.md @@ -0,0 +1,81 @@ +# Date Rolling File Appender + +This is a file appender that rolls log files based on a configurable time, rather than the file size. When using the date file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the date file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself. + +## Configuration + +- `type` - `"dateFile"` +- `filename` - `string` - the path of the file where you want your logs written. +- `pattern` - `string` (optional, defaults to `yyyy-MM-dd`) - the pattern to use to determine when to roll the logs. +- `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md) + +Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams): + +- `encoding` - `string` (default "utf-8") +- `mode` - `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes)) +- `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags)) +- `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension) +- `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`). +- `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt) +- `alwaysIncludePattern` - `boolean` (default false) - include the pattern in the name of the current log file. +- `numBackups` - `integer` (default 1) - the number of old files that matches the pattern to keep (excluding the hot file). + +The `pattern` is used to determine when the current log file should be renamed and a new log file created. For example, with a filename of 'cheese.log', and the default pattern of `.yyyy-MM-dd` - on startup this will result in a file called `cheese.log` being created and written to until the next write after midnight. When this happens, `cheese.log` will be renamed to `cheese.log.2017-04-30` and a new `cheese.log` file created. The appender uses the [date-format](https://github.com/nomiddlename/date-format) library to parse the `pattern`, and any of the valid formats can be used. Also note that there is no timer controlling the log rolling - changes in the pattern are determined on every log write. If no writes occur, then no log rolling will happen. If your application logs infrequently this could result in no log file being written for a particular time period. + +Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [file appender](file.md) as well. So you could roll files by both date and size. + +## Example (default daily log rolling) + +```javascript +log4js.configure({ + appenders: { + everything: { type: "dateFile", filename: "all-the-logs.log" }, + }, + categories: { + default: { appenders: ["everything"], level: "debug" }, + }, +}); +``` + +This example will result in files being rolled every day. The initial file will be `all-the-logs.log`, with the daily backups being `all-the-logs.log.2017-04-30`, etc. + +## Example with hourly log rolling (and compressed backups) + +```javascript +log4js.configure({ + appenders: { + everything: { + type: "dateFile", + filename: "all-the-logs.log", + pattern: "yyyy-MM-dd-hh", + compress: true, + }, + }, + categories: { + default: { appenders: ["everything"], level: "debug" }, + }, +}); +``` + +This will result in one current log file (`all-the-logs.log`). Every hour this file will be compressed and renamed to `all-the-logs.log.2017-04-30-08.gz` (for example) and a new `all-the-logs.log` created. + +## Memory usage + +If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`. + +```javascript +log4js.configure({ + appenders: { + output: { type: "dateFile", filename: "out.log" }, + }, + categories: { default: { appenders: ["output"], level: "debug" } }, +}); + +let paused = false; +process.on("log4js:pause", (value) => (paused = value)); + +const logger = log4js.getLogger(); +while (!paused) { + logger.info("I'm logging, but I will stop once we start buffering"); +} +``` diff --git a/docs/faq.md b/docs/faq.md new file mode 100644 index 00000000..667f8691 --- /dev/null +++ b/docs/faq.md @@ -0,0 +1,79 @@ +# Frequently Asked Questions + +## I want errors to go to a special file, but still want everything written to another file - how do I do that? + +You'll need to use the [logLevelFilter](logLevelFilter.md). Here's an example configuration: + +```javascript +log4js.configure({ + appenders: { + everything: { type: "file", filename: "all-the-logs.log" }, + emergencies: { type: "file", filename: "oh-no-not-again.log" }, + "just-errors": { + type: "logLevelFilter", + appender: "emergencies", + level: "error", + }, + }, + categories: { + default: { appenders: ["just-errors", "everything"], level: "debug" }, + }, +}); + +const logger = log4js.getLogger(); +logger.debug("This goes to all-the-logs.log"); +logger.info("As does this."); +logger.error("This goes to all-the-logs.log and oh-no-not-again.log"); +``` + +## I want to reload the configuration when I change my config file - how do I do that? + +Previous versions of log4js used to watch for changes in the configuration file and reload when it changed. It didn't always work well, sometimes leaving file handles or sockets open. This feature was removed in version 2.x. As a replacement, I'd suggest using a library like [watchr](https://www.npmjs.com/package/watchr) to notify you of file changes. Then you can call `log4js.shutdown` followed by `log4js.configure` again. + +## What happened to `replaceConsole` - it doesn't work any more? + +I removed `replaceConsole` - it caused a few weird errors, and I wasn't entirely comfortable with messing around with a core part of node. If you still want to do this, then code like this should do the trick: + +```javascript +log4js.configure(...); // set up your categories and appenders +const logger = log4js.getLogger('console'); +console.log = logger.info.bind(logger); // do the same for others - console.debug, etc. +``` + +## I'm using pm2/passenger/some other third thing and I'm not getting any logs! + +Take a look at the [clustering](clustering.md) docs, they should help you out. + +## NPM complains about nodemailer being deprecated, what should I do? + +Nodemailer version 4.0.1 (the not-deprecated version) requires a node version >= 6, but log4js supports node versions >= 4. So until I stop supporting node versions less than 6 I can't update the dependency. It's only an optional dependency anyway, so you're free to install nodemailer@4.0.1 if you want - as far as I know it should work, the API looks the same to me. If you know that the smtp appender definitely doesn't work with nodemailer v4, then please create an issue with some details about the problem. + +## I want line numbers in my logs! + +You need to enable call stack for the category, and use pattern layout to output the values. e.g. + +```javascript +const log4js = require("log4js"); +log4js.configure({ + appenders: { + out: { + type: "stdout", + layout: { + type: "pattern", + pattern: "%d %p %c %f:%l %m%n", + }, + }, + }, + categories: { + default: { appenders: ["out"], level: "info", enableCallStack: true }, + }, +}); +const logger = log4js.getLogger("thing"); +logger.info("this should give me a line number now"); +``` + +Would output something like this: + +```bash +2019-05-22T08:41:07.312 INFO thing index.js:16 this should give me a line number now +``` diff --git a/docs/file.md b/docs/file.md new file mode 100644 index 00000000..838b197c --- /dev/null +++ b/docs/file.md @@ -0,0 +1,84 @@ +# File Appender + +The file appender writes log events to a file. It supports an optional maximum file size, and will keep a configurable number of backups. When using the file appender, you should also call `log4js.shutdown` when your application terminates, to ensure that any remaining asynchronous writes have finished. Although the file appender uses the [streamroller](https://github.com/nomiddlename/streamroller) library, this is included as a dependency of log4js so you do not need to include it yourself. + +## Configuration + +- `type` - `"file"` +- `filename` - `string` - the path of the file where you want your logs written. +- `maxLogSize` - `integer` (optional, defaults to undefined) - the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen. + `maxLogSize` can also accept `string` with the size suffixes: **_K_**, **_M_**, **_G_** such as `1K`, `1M`, `1G`. +- `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling (excluding the hot file). +- `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md) +- `removeColor` - `boolean` (optional, defaults to false) - remove embedded ANSI color sequence + +Any other configuration parameters will be passed to the underlying [streamroller](https://github.com/nomiddlename/streamroller) implementation (see also node.js core file streams): + +- `encoding` - `string` (default "utf-8") +- `mode` - `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes)) +- `flags` - `string` (default 'a' - [node.js file flags](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_system_flags)) +- `compress` - `boolean` (default false) - compress the backup files using gzip (backup files will have `.gz` extension) +- `keepFileExt` - `boolean` (default false) - preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`). +- `fileNameSep` - `string` (default '.') - the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt) + +Note that, from version 4.x of log4js onwards, the file appender can take any of the options for the [dateFile appender](dateFile.md) as well. So you could roll files by both date and size. + +## Example + +```javascript +log4js.configure({ + appenders: { + everything: { type: "file", filename: "all-the-logs.log" }, + }, + categories: { + default: { appenders: ["everything"], level: "debug" }, + }, +}); + +const logger = log4js.getLogger(); +logger.debug("I will be logged in all-the-logs.log"); +``` + +This example will result in a single log file (`all-the-logs.log`) containing the log messages. + +## Example with log rolling (and compressed backups) + +```javascript +log4js.configure({ + appenders: { + everything: { + type: "file", + filename: "all-the-logs.log", + maxLogSize: 10485760, + backups: 3, + compress: true, + }, + }, + categories: { + default: { appenders: ["everything"], level: "debug" }, + }, +}); +``` + +This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on. + +## Memory usage + +If your application logs a large volume of messages, and find memory usage increasing due to buffering log messages before being written to a file, then you can listen for "log4js:pause" events emitted by the file appenders. Your application should stop logging when it receives one of these events with a value of `true` and resume when it receives an event with a value of `false`. + +```javascript +log4js.configure({ + appenders: { + output: { type: "file", filename: "out.log" }, + }, + categories: { default: { appenders: ["output"], level: "debug" } }, +}); + +let paused = false; +process.on("log4js:pause", (value) => (paused = value)); + +const logger = log4js.getLogger(); +while (!paused) { + logger.info("I'm logging, but I will stop once we start buffering"); +} +``` diff --git a/docs/fileSync.md b/docs/fileSync.md new file mode 100644 index 00000000..a3ca50fc --- /dev/null +++ b/docs/fileSync.md @@ -0,0 +1,56 @@ +# Synchronous File Appender + +The sync file appender writes log events to a file, the only difference to the normal file appender is that all the writes are synchronous. This can make writing tests easier, or in situations where you need an absolute guarantee that a log message has been written to the file. Making synchronous I/O calls does mean you lose a lot of the benefits of using node.js though. It supports an optional maximum file size, and will keep a configurable number of backups. Note that the synchronous file appender, unlike the asynchronous version, does not support compressing the backup files. + +## Configuration + +- `type` - `"fileSync"` +- `filename` - `string` - the path of the file where you want your logs written. +- `maxLogSize` - `integer` (optional, defaults to undefined) - the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen. + `maxLogSize` can also accept `string` with the size suffixes: **_K_**, **_M_**, **_G_** such as `1K`, `1M`, `1G`. +- `backups` - `integer` (optional, defaults to 5) - the number of old log files to keep during log rolling (excluding the hot file). +- `layout` - (optional, defaults to basic layout) - see [layouts](layouts.md) + +Any other configuration parameters will be passed to the underlying node.js core stream implementation: + +- `encoding` - `string` (default "utf-8") +- `mode` - `integer` (default 0o600 - [node.js file modes](https://nodejs.org/dist/latest-v12.x/docs/api/fs.html#fs_file_modes)) +- `flags` - `string` (default 'a') + +## Example + +```javascript +log4js.configure({ + appenders: { + everything: { type: "fileSync", filename: "all-the-logs.log" }, + }, + categories: { + default: { appenders: ["everything"], level: "debug" }, + }, +}); + +const logger = log4js.getLogger(); +logger.debug("I will be logged in all-the-logs.log"); +``` + +This example will result in a single log file (`all-the-logs.log`) containing the log messages. + +## Example with log rolling + +```javascript +log4js.configure({ + appenders: { + everything: { + type: "file", + filename: "all-the-logs.log", + maxLogSize: 10458760, + backups: 3, + }, + }, + categories: { + default: { appenders: ["everything"], level: "debug" }, + }, +}); +``` + +This will result in one current log file (`all-the-logs.log`). When that reaches 10Mb in size, it will be renamed and compressed to `all-the-logs.log.1.gz` and a new file opened called `all-the-logs.log`. When `all-the-logs.log` reaches 10Mb again, then `all-the-logs.log.1.gz` will be renamed to `all-the-logs.log.2.gz`, and so on. diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..78da762d --- /dev/null +++ b/docs/index.md @@ -0,0 +1,57 @@ +# log4js-node + +This is a conversion of the [log4js](https://github.com/stritti/log4js) +framework to work with [node](http://nodejs.org). I started out just stripping out the browser-specific code and tidying up some of the javascript to work better in node. It grew from there. Although it's got a similar name to the Java library [log4j](https://logging.apache.org/log4j/2.x/), thinking that it will behave the same way will only bring you sorrow and confusion. + +[Changes in version 3.x](v3-changes.md) + +## Migrating from log4js < v2.x? + +There have been a few changes between log4js 1.x and 2.x (and 0.x too). You should probably read this [migration guide](migration-guide.md) if things aren't working. + +## Features + +- coloured console logging to [stdout](stdout.md) or [stderr](stderr.md) +- [file appender](file.md), with configurable log rolling based on file size or [date](dateFile.md) +- [SMTP appender](https://github.com/log4js-node/smtp) +- [GELF appender](https://github.com/log4js-node/gelf) +- [Loggly appender](https://github.com/log4js-node/loggly) +- [Logstash UDP appender](https://github.com/log4js-node/logstashUDP) +- logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender +- [TCP appender](tcp.md) (useful when you've got multiple servers but want to centralise logging) +- a [logger for connect/express](connect-logger.md) servers +- configurable log message [layout/patterns](layouts.md) +- different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.) +- built-in support for logging with node core's `cluster` module +- third-party [InfluxDB appender](https://github.com/rnd-debug/log4js-influxdb-appender) + +## Installation + +```bash +npm install log4js +``` + +## Usage + +Minimalist version: + +```javascript +var log4js = require("log4js"); +var logger = log4js.getLogger(); +logger.level = "debug"; // default level is OFF - which means no logs at all. +logger.debug("Some debug messages"); +``` + +## Clustering + +If you use node's cluster, or passenger, or pm2, then you should read this [clustering guide](clustering.md) + +## Note for library makers + +If you're writing a library and would like to include support for log4js, without introducing a dependency headache for your users, take a look at [log4js-api](https://github.com/log4js-node/log4js-api). + +## License + +The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to +keep the original copyright and author credits in place, except in sections that I have rewritten +extensively. diff --git a/docs/layouts.md b/docs/layouts.md new file mode 100644 index 00000000..1e11e2ea --- /dev/null +++ b/docs/layouts.md @@ -0,0 +1,256 @@ +# Layouts + +Layouts are functions used by appenders to format log events for output. They take a log event as an argument and return a string. Log4js comes with several appenders built-in, and provides ways to create your own if these are not suitable. + +For most use cases you will not need to configure layouts - there are some appenders which do not need layouts defined (for example, [logFaces-UDP](https://github.com/log4js-node/logFaces-UDP)); all the appenders that use layouts will have a sensible default defined. + +## Configuration + +Most appender configuration will take a field called `layout`, which is an object - typically with a single field `type` which is the name of a layout defined below. Some layouts require extra configuration options, which should be included in the same object. + +## Example + +```javascript +log4js.configure({ + appenders: { out: { type: "stdout", layout: { type: "basic" } } }, + categories: { default: { appenders: ["out"], level: "info" } }, +}); +``` + +This configuration replaces the [stdout](stdout.md) appender's default `coloured` layout with `basic` layout. + +# Built-in Layouts + +## Basic + +- `type` - `basic` + +Basic layout will output the timestamp, level, category, followed by the formatted log event data. + +## Example + +```javascript +log4js.configure({ + appenders: { out: { type: "stdout", layout: { type: "basic" } } }, + categories: { default: { appenders: ["out"], level: "info" } }, +}); +const logger = log4js.getLogger("cheese"); +logger.error("Cheese is too ripe!"); +``` + +This will output: + +``` +[2017-03-30 07:57:00.113] [ERROR] cheese - Cheese is too ripe! +``` + +## Coloured + +- `type` - `coloured` (or `colored`) + +This layout is the same as `basic`, except that the timestamp, level and category will be coloured according to the log event's level (if your terminal/file supports it - if you see some weird characters in your output and no colour then you should probably switch to `basic`). The colours used are: + +- `TRACE` - 'blue' +- `DEBUG` - 'cyan' +- `INFO` - 'green' +- `WARN` - 'yellow' +- `ERROR` - 'red' +- `FATAL` - 'magenta' + +## Message Pass-Through + +- `type` - `messagePassThrough` + +This layout just formats the log event data, and does not output a timestamp, level or category. It is typically used in appenders that serialise the events using a specific format (e.g. [gelf](https://github.com/log4js-node/gelf)). + +## Example + +```javascript +log4js.configure({ + appenders: { + out: { type: "stdout", layout: { type: "messagePassThrough" } }, + }, + categories: { default: { appenders: ["out"], level: "info" } }, +}); +const logger = log4js.getLogger("cheese"); +const cheeseName = "gouda"; +logger.error("Cheese is too ripe! Cheese was: ", cheeseName); +``` + +This will output: + +``` +Cheese is too ripe! Cheese was: gouda +``` + +## Dummy + +- `type` - `dummy` + +This layout only outputs the first value in the log event's data. It was added for the [logstashUDP](https://github.com/log4js-node/logstashUDP) appender, and I'm not sure there's much use for it outside that. + +## Example + +```javascript +log4js.configure({ + appenders: { out: { type: "stdout", layout: { type: "dummy" } } }, + categories: { default: { appenders: ["out"], level: "info" } }, +}); +const logger = log4js.getLogger("cheese"); +const cheeseName = "gouda"; +logger.error("Cheese is too ripe! Cheese was: ", cheeseName); +``` + +This will output: + +``` +Cheese is too ripe! Cheese was: +``` + +## Pattern + +- `type` - `pattern` +- `pattern` - `string` - specifier for the output format, using placeholders as described below +- `tokens` - `object` (optional) - user-defined tokens to be used in the pattern + +## Pattern format + +The pattern string can contain any characters, but sequences beginning with `%` will be replaced with values taken from the log event, and other environmental values. +Format for specifiers is `%[padding].[truncation][field]{[format]}` - padding and truncation are optional, and format only applies to a few tokens (notably, date). Both padding and truncation values can be negative. + +- Positive truncation - truncate the string starting from the beginning +- Negative truncation - truncate the string starting from the end of the string +- Positive padding - left pad the string to make it this length, if the string is longer than the padding value then nothing happens +- Negative padding - right pad the string to make it this length, if the string is longer than the padding value then nothing happens + To make fixed-width columns in your log output, set padding and truncation to the same size (they don't have to have the same sign though, you could have right truncated, left padded columns that are always 10 characters wide with a pattern like "%10.-10m"). + +e.g. %5.10p - left pad the log level by up to 5 characters, keep the whole string to a max length of 10. +So, for a log level of INFO the output would be " INFO", for DEBUG it would be "DEBUG" and for a (custom) log level of CATASTROPHIC it would be "CATASTROPH". + +Fields can be any of: + +- `%r` time in toLocaleTimeString format +- `%p` log level +- `%c` log category +- `%h` hostname +- `%m` log data +- `%m{l}` where l is an integer, log data.slice(l) +- `%m{l,u}` where l and u are integers, log data.slice(l, u) +- `%d` date, formatted - default is `ISO8601`, format options are: `ISO8601`, `ISO8601_WITH_TZ_OFFSET`, `ABSOLUTETIME`, `DATETIME`, or any string compatible with the [date-format](https://www.npmjs.com/package/date-format) library. e.g. `%d{DATETIME}`, `%d{yyyy/MM/dd-hh.mm.ss}` +- `%%` % - for when you want a literal `%` in your output +- `%n` newline +- `%z` process id (from `process.pid`) +- `%f` full path of filename (requires `enableCallStack: true` on the category, see [configuration object](api.md)) +- `%f{depth}` path's depth let you chose to have only filename (`%f{1}`) or a chosen number of directories +- `%l` line number (requires `enableCallStack: true` on the category, see [configuration object](api.md)) +- `%o` column position (requires `enableCallStack: true` on the category, see [configuration object](api.md)) +- `%s` call stack (requires `enableCallStack: true` on the category, see [configuration object](api.md)) +- `%C` class name (requires `enableCallStack: true` on the category, see [configuration object](api.md) and [#1316](https://github.com/log4js-node/log4js-node/pull/1316)) +- `%M` method or function name (requires `enableCallStack: true` on the category, see [configuration object](api.md) and [#1316](https://github.com/log4js-node/log4js-node/pull/1316)) +- `%A` method or function alias (requires `enableCallStack: true` on the category, see [configuration object](api.md) and [#1316](https://github.com/log4js-node/log4js-node/pull/1316)) +- `%F` fully qualified caller name (requires `enableCallStack: true` on the category, see [configuration object](api.md) and [#1316](https://github.com/log4js-node/log4js-node/pull/1316)) +- `%x{}` add dynamic tokens to your log. Tokens are specified in the tokens parameter. +- `%X{}` add values from the Logger context. Tokens are keys into the context values. +- `%[` start a coloured block (colour will be taken from the log level, similar to `colouredLayout`) +- `%]` end a coloured block + +## Tokens + +User-defined tokens can be either a string or a function. Functions will be passed the log event, and should return a string. For example, you could define a custom token that outputs the log event's context value for 'user' like so: + +```javascript +log4js.configure({ + appenders: { + out: { + type: "stdout", + layout: { + type: "pattern", + pattern: "%d %p %c %x{user} %m%n", + tokens: { + user: function (logEvent) { + return AuthLibrary.currentUser(); + }, + }, + }, + }, + }, + categories: { default: { appenders: ["out"], level: "info" } }, +}); +const logger = log4js.getLogger(); +logger.info("doing something."); +``` + +This would output: + +``` +2017-06-01 08:32:56.283 INFO default charlie doing something. +``` + +You can also use the Logger context to store tokens (sometimes called Nested Diagnostic Context, or Mapped Diagnostic Context) and use them in your layouts. + +```javascript +log4js.configure({ + appenders: { + out: { + type: "stdout", + layout: { + type: "pattern", + pattern: "%d %p %c %X{user} %m%n", + }, + }, + }, + categories: { default: { appenders: ["out"], level: "info" } }, +}); +const logger = log4js.getLogger(); +logger.addContext("user", "charlie"); +logger.info("doing something."); +``` + +This would output: + +``` +2017-06-01 08:32:56.283 INFO default charlie doing something. +``` + +Note that you can also add functions to the Logger Context, and they will be passed the logEvent as well. + +# Adding your own layouts + +You can add your own layouts by calling `log4js.addLayout(type, fn)` before calling `log4js.configure`. `type` is the label you want to use to refer to your layout in appender configuration. `fn` is a function that takes a single object argument, which will contain the configuration for the layout instance, and returns a layout function. A layout function takes a log event argument and returns a string (usually, although you could return anything as long as the appender knows what to do with it). + +## Custom Layout Example + +This example can also be found in examples/custom-layout.js. + +```javascript +const log4js = require("log4js"); + +log4js.addLayout("json", function (config) { + return function (logEvent) { + return JSON.stringify(logEvent) + config.separator; + }; +}); + +log4js.configure({ + appenders: { + out: { type: "stdout", layout: { type: "json", separator: "," } }, + }, + categories: { + default: { appenders: ["out"], level: "info" }, + }, +}); + +const logger = log4js.getLogger("json-test"); +logger.info("this is just a test"); +logger.error("of a custom appender"); +logger.warn("that outputs json"); +log4js.shutdown(() => {}); +``` + +This example outputs the following: + +```javascript +{"startTime":"2017-06-05T22:23:08.479Z","categoryName":"json-test","data":["this is just a test"],"level":{"level":20000,"levelStr":"INFO"},"context":{}}, +{"startTime":"2017-06-05T22:23:08.483Z","categoryName":"json-test","data":["of a custom appender"],"level":{"level":40000,"levelStr":"ERROR"},"context":{}}, +{"startTime":"2017-06-05T22:23:08.483Z","categoryName":"json-test","data":["that outputs json"],"level":{"level":30000,"levelStr":"WARN"},"context":{}}, +``` diff --git a/docs/logLevelFilter.md b/docs/logLevelFilter.md new file mode 100644 index 00000000..1c300387 --- /dev/null +++ b/docs/logLevelFilter.md @@ -0,0 +1,33 @@ +# Log Level Filter + +The log level filter allows you to restrict the log events that an appender will record based on the level of those events. This is useful when you want most logs to go to a file, but errors to be sent as emails, for example. The filter works by wrapping around another appender and controlling which events get sent to it. + +## Configuration + +- `type` - `logLevelFilter` +- `appender` - `string` - the name of an appender, defined in the same configuration, that you want to filter +- `level` - `string` - the minimum level of event to allow through the filter +- `maxLevel` - `string` (optional, defaults to `FATAL`) - the maximum level of event to allow through the filter + +If an event's level is greater than or equal to `level` and less than or equal to `maxLevel` then it will be sent to the appender. + +## Example + +```javascript +log4js.configure({ + appenders: { + everything: { type: "file", filename: "all-the-logs.log" }, + emergencies: { type: "file", filename: "panic-now.log" }, + "just-errors": { + type: "logLevelFilter", + appender: "emergencies", + level: "error", + }, + }, + categories: { + default: { appenders: ["just-errors", "everything"], level: "debug" }, + }, +}); +``` + +Log events of `debug`, `info`, `error`, and `fatal` will go to `all-the-logs.log`. Events of `error` and `fatal` will also go to `panic-now.log`. diff --git a/docs/migration-guide.md b/docs/migration-guide.md new file mode 100644 index 00000000..c565e81f --- /dev/null +++ b/docs/migration-guide.md @@ -0,0 +1,75 @@ +# Migrating from log4js versions older than 2.x + +## Configuration + +If you try to use your v1 configuration with v2 code, you'll most likely get an error that says something like 'must have property "appenders" of type object'. The format of the configuration object has changed (see the [api](api.md) docs for details). The main changes are a need for you to name your appenders, and you also have to define the default category. For example, if your v1 config looked like this: + +```javascript +{ + appenders: [ + { type: "console" }, + { + type: "dateFile", + filename: "logs/task", + pattern: "-dd.log", + alwaysIncludePattern: true, + category: "task", + }, + ]; +} +``` + +Then your v2 config should be something like this: + +```javascript +{ + appenders: { + out: { type: 'console' }, + task: { + type: 'dateFile', + filename: 'logs/task', + pattern: '-dd.log', + alwaysIncludePattern: true + } + }, + categories: { + default: { appenders: [ 'out' ], level: 'info' }, + task: { appenders: [ 'task' ], level: 'info' } + } +}} +``` + +The functions to define the configuration programmatically have been removed (`addAppender`, `loadAppender`, etc). All configuration should now be done through the single `configure` function, passing in a filename or object. + +## Console replacement + +V1 used to allow you to replace the node.js console functions with versions that would log to a log4js appender. This used to cause some weird errors, so I decided it was better to remove it from the log4js core functionality. If you still want to do this, you can replicate the behaviour with code similar to this: + +```javascript +log4js.configure(...); // set up your categories and appenders +const logger = log4js.getLogger('console'); // any category will work +console.log = logger.info.bind(logger); // do the same for others - console.debug, etc. +``` + +## Config Reloading + +Previous versions of log4js used to watch for changes in the configuration file and reload when it changed. It didn't always work well, sometimes leaving file handles or sockets open. This feature was removed in version 2.x. As a replacement, I'd suggest using a library like [watchr](https://www.npmjs.com/package/watchr) to notify you of file changes. Then you can call `log4js.shutdown` followed by `log4js.configure` again. + +## Appenders + +If you have written your own custom appenders, they will not work without modification in v2. See the guide to [writing appenders](writing-appenders.md) for details on how appenders work in 2.x. Note that if you want to write your appender to work with both 1.x and 2.x, then you can tell what version you're running in by examining the number of arguments passed to the `configure` function of your appender: 2 arguments means v1, 4 arguments means v2. + +All the core appenders have been upgraded to work with v2, except for the clustered appender which has been removed. The core log4js code handles cluster mode transparently. + +The `logFaces` appender was split into two versions to make testing easier and the code simpler; one has HTTP support, the other UDP. + +## Exit listeners + +Some appenders used to define their own `exit` listeners, and it was never clear whose responsibility it was to clean up resources. Now log4js does not define any `exit` listeners. Instead your application should register an `exit` listener, and call `log4js.shutdown` to be sure that all log messages get written before your application terminates. + +## New Features + +- MDC contexts - you can now add key-value pairs to a logger (for grouping all log messages from a particular user, for example). Support for these values exists in the [pattern layout](layouts.md), the logFaces ([UDP](https://github.com/log4js-node/logFaces-UDP) and [HTTP](https://github.com/log4js-node/logFaces-HTTP)) appender, and the [multi-file appender](multiFile.md). +- Automatic cluster support - log4js now handles clusters transparently +- Custom levels - you can define your own log levels in the configuration object, including the colours +- Improved performance - several changes have been made to improve performance, especially for the file appenders. diff --git a/docs/multiFile.md b/docs/multiFile.md new file mode 100644 index 00000000..c69a04c6 --- /dev/null +++ b/docs/multiFile.md @@ -0,0 +1,65 @@ +# MultiFile Appender + +The multiFile appender can be used to dynamically write logs to multiple files, based on a property of the logging event. Use this as a way to write separate log files for each category when the number of categories is unknown, for instance. It creates [file](file.md) appenders under the hood, so all the options that apply to that appender (apart from filename) can be used with this one, allowing the log files to be rotated and capped at a certain size. + +## Configuration + +- `type` - `"multiFile"` +- `base` - `string` - the base part of the generated log filename +- `property` - `string` - the value to use to split files (see below). +- `extension` - `string` - the suffix for the generated log filename. +- `timeout` - `integer` - optional activity timeout in ms after which the file will be closed. + +All other properties will be passed to the created [file](file.md) appenders. For the property value, `categoryName` is probably the most useful - although you could use `pid` or `level`. If the property is not found then the appender will look for the value in the context map. If that fails, then the logger will not output the logging event, without an error. This is to allow for dynamic properties which may not exist for all log messages. + +## Example (split on category) + +```javascript +log4js.configure({ + appenders: { + multi: { + type: "multiFile", + base: "logs/", + property: "categoryName", + extension: ".log", + }, + }, + categories: { + default: { appenders: ["multi"], level: "debug" }, + }, +}); + +const logger = log4js.getLogger(); +logger.debug("I will be logged in logs/default.log"); +const otherLogger = log4js.getLogger("cheese"); +otherLogger.info("Cheese is cheddar - this will be logged in logs/cheese.log"); +``` + +This example will result in two log files (`logs/default.log` and `logs/cheese.log`) containing the log messages. + +## Example with log rolling (and compressed backups) + +```javascript +log4js.configure({ + appenders: { + everything: { + type: "multiFile", + base: "logs/", + property: "userID", + extension: ".log", + maxLogSize: 10485760, + backups: 3, + compress: true, + }, + }, + categories: { + default: { appenders: ["everything"], level: "debug" }, + }, +}); + +const userLogger = log4js.getLogger("user"); +userLogger.addContext("userID", user.getID()); +userLogger.info("this user just logged in"); +``` + +This will result in one log file (`logs/u12345.log`), capped at 10Mb in size, with three backups kept when rolling the file. If more users were logged, each user would get their own files, and their own backups. diff --git a/docs/multiprocess.md b/docs/multiprocess.md new file mode 100644 index 00000000..f9a1fd7c --- /dev/null +++ b/docs/multiprocess.md @@ -0,0 +1,51 @@ +# Multiprocess Appender + +_You probably want to use the [tcp server](tcp-server.md) or [tcp appender](tcp.md) instead of this - they are more flexible_ + +_Note that if you're just using node core's `cluster` module then you don't need to use this appender - log4js will handle logging within the cluster transparently._ + +The multiprocess appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. + +## Configuration + +- `type` - `multiprocess` +- `mode` - `master|worker` - controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server. +- `appender` - `string` (only needed if `mode` == `master`)- the name of the appender to send the log events to +- `loggerPort` - `integer` (optional, defaults to `5000`) - the port to listen on, or send to +- `loggerHost` - `string` (optional, defaults to `localhost`) - the host/IP address to listen on, or send to + +## Example (master) + +```javascript +log4js.configure({ + appenders: { + file: { type: "file", filename: "all-the-logs.log" }, + server: { + type: "multiprocess", + mode: "master", + appender: "file", + loggerHost: "0.0.0.0", + }, + }, + categories: { + default: { appenders: ["file"], level: "info" }, + }, +}); +``` + +This creates a log server listening on port 5000, on all IP addresses the host has assigned to it. Note that the appender is not included in the appenders listed for the categories. Also note that the multiprocess master appender will send every event it receives to the underlying appender, regardless of level settings. + +## Example (worker) + +```javascript +log4js.configure({ + appenders: { + network: { type: "multiprocess", mode: "worker", loggerHost: "log.server" }, + }, + categories: { + default: { appenders: ["network"], level: "error" }, + }, +}); +``` + +This will send all error messages to `log.server:5000`. diff --git a/docs/noLogFilter.md b/docs/noLogFilter.md new file mode 100644 index 00000000..0b70137f --- /dev/null +++ b/docs/noLogFilter.md @@ -0,0 +1,61 @@ +# No-Log Filter + +The no log filter allows you to exclude the log events that an appender will record. +The log events will be excluded depending on the regular expressions provided in the configuration. +This can be useful when you debug your application and you want to exclude some noisily logs that are irrelevant to your investigation. +You can stop to log them through a regular expression. + +## Configuration + +- `type` - `"noLogFilter"` +- `exclude` - `string | Array` - the regular expression (or the regular expressions if you provide an array of values) will be used for evaluating the events to pass to the appender. The events, which will match the regular expression, will be excluded and so not logged. +- `appender` - `string` - the name of an appender, defined in the same configuration, that you want to filter. + +## Example + +```javascript +log4js.configure({ + appenders: { + everything: { type: "file", filename: "all-the-logs.log" }, + filtered: { + type: "noLogFilter", + exclude: "not", + appender: "everything", + }, + }, + categories: { + default: { appenders: ["filtered"], level: "debug" }, + }, +}); + +const logger = log4js.getLogger(); +logger.debug("I will be logged in all-the-logs.log"); +logger.debug("I will be not logged in all-the-logs.log"); +``` + +Note that: + +- an array of strings can be specified in the configuration +- a case insensitive match will be done +- empty strings will be not considered and so removed from the array of values + +```javascript +log4js.configure({ + appenders: { + everything: { type: "file", filename: "all-the-logs.log" }, + filtered: { + type: "noLogFilter", + exclude: ["NOT", "\\d", ""], + appender: "everything", + }, + }, + categories: { + default: { appenders: ["filtered"], level: "debug" }, + }, +}); + +const logger = log4js.getLogger(); +logger.debug("I will be logged in all-the-logs.log"); +logger.debug("I will be not logged in all-the-logs.log"); +logger.debug("A 2nd message that will be excluded in all-the-logs.log"); +``` diff --git a/docs/recording.md b/docs/recording.md new file mode 100644 index 00000000..ecdafb46 --- /dev/null +++ b/docs/recording.md @@ -0,0 +1,36 @@ +# Recording Appender + +This appender stores the log events in memory. It is mainly useful for testing (see the tests for the category filter, for instance). + +## Configuration + +- `type` - `recording` +- `maxLength` - `integer` (optional, defaults to undefined) - the maximum array length for the recording. If not specified, the array will grow until cleared + +There is no other configuration for this appender. + +## Usage + +The array that stores log events is shared across all recording appender instances, and is accessible from the recording module. `require('/appenders/recording')` returns a module with the following functions exported: + +- `replay` - returns `Array` - get all the events recorded. +- `playback` - synonym for `replay` +- `reset` - clears the array of events recorded. +- `erase` - synonyms for `reset` + +## Example + +```javascript +const recording = require("log4js/lib/appenders/recording"); +const log4js = require("log4js"); +log4js.configure({ + appenders: { vcr: { type: "recording" } }, + categories: { default: { appenders: ["vcr"], level: "info" } }, +}); + +const logger = log4js.getLogger(); +logger.info("some log event"); + +const events = recording.replay(); // events is an array of LogEvent objects. +recording.erase(); // clear the appender's array. +``` diff --git a/docs/stderr.md b/docs/stderr.md new file mode 100644 index 00000000..396b73d3 --- /dev/null +++ b/docs/stderr.md @@ -0,0 +1,17 @@ +# Standard Error Appender + +This appender writes all log events to the standard error stream. + +# Configuration + +- `type` - `stderr` +- `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md) + +# Example + +```javascript +log4js.configure({ + appenders: { err: { type: "stderr" } }, + categories: { default: { appenders: ["err"], level: "ERROR" } }, +}); +``` diff --git a/docs/stdout.md b/docs/stdout.md new file mode 100644 index 00000000..e8cd9275 --- /dev/null +++ b/docs/stdout.md @@ -0,0 +1,17 @@ +# Standard Output Appender + +This appender writes all log events to the standard output stream. It is the default appender for log4js. + +# Configuration + +- `type` - `stdout` +- `layout` - `object` (optional, defaults to colouredLayout) - see [layouts](layouts.md) + +# Example + +```javascript +log4js.configure({ + appenders: { out: { type: "stdout" } }, + categories: { default: { appenders: ["out"], level: "info" } }, +}); +``` diff --git a/docs/tcp-server.md b/docs/tcp-server.md new file mode 100644 index 00000000..28f0f786 --- /dev/null +++ b/docs/tcp-server.md @@ -0,0 +1,25 @@ +# TCP Server Appender + +Strictly speaking, this is not an appender - but it is configured as one. The TCP server listens for log messages on a port, taking JSON-encoded log events and then forwarding them to the other appenders. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. It is designed to work with the [tcp appender](tcp.md), but could work with anything that sends correctly formatted JSON log events. + +## Configuration + +- `type` - `tcp-server` +- `port` - `integer` (optional, defaults to `5000`) - the port to listen on +- `host` - `string` (optional, defaults to `localhost`) - the host/IP address to listen on + +## Example (master) + +```javascript +log4js.configure({ + appenders: { + file: { type: "file", filename: "all-the-logs.log" }, + server: { type: "tcp-server", host: "0.0.0.0" }, + }, + categories: { + default: { appenders: ["file"], level: "info" }, + }, +}); +``` + +This creates a log server listening on port 5000, on all IP addresses the host has assigned to it. Note that the appender is not included in the appenders listed for the categories. All events received on the socket will be forwarded to the other appenders, as if they had originated on the same server. diff --git a/docs/tcp.md b/docs/tcp.md new file mode 100644 index 00000000..1a2e70c6 --- /dev/null +++ b/docs/tcp.md @@ -0,0 +1,26 @@ +# TCP Appender + +The TCP appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. It's designed to work with the [tcp-server](tcp-server.md), but it doesn't necessarily have to, just make sure whatever is listening at the other end is expecting JSON objects as strings. + +## Configuration + +- `type` - `tcp` +- `port` - `integer` (optional, defaults to `5000`) - the port to send to +- `host` - `string` (optional, defaults to `localhost`) - the host/IP address to send to +- `endMsg` - `string` (optional, defaults to `__LOG4JS__`) - the delimiter that marks the end of a log message +- `layout` - `object` (optional, defaults to a serialized log event) - see [layouts](layouts.md) + +## Example + +```javascript +log4js.configure({ + appenders: { + network: { type: "tcp", host: "log.server" }, + }, + categories: { + default: { appenders: ["network"], level: "error" }, + }, +}); +``` + +This will send all error messages to `log.server:5000`. diff --git a/docs/terms.md b/docs/terms.md new file mode 100644 index 00000000..dc7371e1 --- /dev/null +++ b/docs/terms.md @@ -0,0 +1,13 @@ +## Terminology + +`Level` - a log level is the severity or priority of a log event (debug, info, etc). Whether an _appender_ will see the event or not is determined by the _category_'s level. If this is less than or equal to the event's level, it will be sent to the category's appender(s). + +`Category` - a label for grouping log events. This can be based on module (e.g. 'auth', 'payment', 'http'), or anything you like. Log events with the same _category_ will go to the same _appenders_. Log4js supports a hierarchy for categories, using dots to separate layers - for example, log events in the category 'myapp.submodule' will use the level for 'myapp' if none is defined for 'myapp.submodule', and also any appenders defined for 'myapp'. (This behaviour can be disabled by setting inherit=false on the sub-category.) The category for log events is defined when you get a _Logger_ from log4js (`log4js.getLogger('somecategory')`). + +`Appender` - appenders are responsible for output of log events. They may write events to files, send emails, store them in a database, or anything. Most appenders use _layouts_ to serialise the events to strings for output. + +`Logger` - this is your code's main interface with log4js. A logger instance may have an optional _category_, defined when you create the instance. Loggers provide the `info`, `debug`, `error`, etc functions that create _LogEvents_ and pass them on to appenders. + +`Layout` - a function for converting a _LogEvent_ into a string representation. Log4js comes with a few different implementations: basic, coloured, and a more configurable pattern based layout. + +`LogEvent` - a log event has a timestamp, a level, and optional category, data, and context properties. When you call `logger.info('cheese value:', edam)` the _logger_ will create a log event with the timestamp of now, a _level_ of INFO, a _category_ that was chosen when the logger was created, and a data array with two values (the string 'cheese value:', and the object 'edam'), along with any context data that was added to the logger. diff --git a/docs/v3-changes.md b/docs/v3-changes.md new file mode 100644 index 00000000..9016e960 --- /dev/null +++ b/docs/v3-changes.md @@ -0,0 +1,28 @@ +# Changes in version 3.x of log4js + +log4js no longer supports node versions less than 6. + +The following appenders have been removed from the core, and moved to their own projects: + +- [gelf](https://github.com/log4js-node/gelf) +- [hipchat](https://github.com/log4js-node/hipchat) +- [logFaces-HTTP](https://github.com/log4js-node/logFaces-HTTP) +- [logFaces-UDP](https://github.com/log4js-node/logFaces-UDP) +- [loggly](https://github.com/log4js-node/loggly) +- [logstashHTTP](https://github.com/log4js-node/logstashHTTP) +- [logstashUDP](https://github.com/log4js-node/logstashUDP) +- [mailgun](https://github.com/log4js-node/mailgun) +- [rabbitmq](https://github.com/log4js-node/rabbitmq) +- [redis](https://github.com/log4js-node/redis) +- [slack](https://github.com/log4js-node/slack) +- [smtp](https://github.com/log4js-node/smtp) + +If you were using them, you'll need to `npm i @log4js-node/`. + +Removing the optional appenders removed all the security vulnerabilities. + +The TCP [client](tcp.md)/[server](tcp-server.md) was introduced to replace the multiprocess appender. + +[Issues resolved in 3.0.0](https://github.com/log4js-node/log4js-node/milestone/31?closed=1) + +[PR for the code changes](https://github.com/log4js-node/log4js-node/pull/754) diff --git a/docs/webpack.md b/docs/webpack.md new file mode 100644 index 00000000..6d1fec7b --- /dev/null +++ b/docs/webpack.md @@ -0,0 +1,12 @@ +# Working with webpack + +Log4js uses dynamic require for loading appenders. Webpack doesn't know at build time which appender will be used at runtime so a small workaround is necessary. + +``` +const stdout = require('log4js/lib/appenders/stdout'); +import * as Configuration from 'log4js/lib/configuration'; + +Configuration.prototype.loadAppenderModule = function(type) { + return stdout; +}; +``` diff --git a/docs/writing-appenders.md b/docs/writing-appenders.md new file mode 100644 index 00000000..b1a0648d --- /dev/null +++ b/docs/writing-appenders.md @@ -0,0 +1,81 @@ +# Writing Appenders for Log4js + +Log4js can load appenders from outside its core set. To add a custom appender, the easiest way is to make it a stand-alone module and publish to npm. You can also load appenders from your own application, but they must be defined in a module. + +## Loading mechanism + +When log4js parses your configuration, it loops through the defined appenders. For each one, it will `require` the appender initially using the `type` value prepended with './appenders' as the module identifier - this is to try loading from the core appenders first. If that fails (the module could not be found in the core appenders), then log4js will try to require the module using variations of the `type` value. + +Log4js checks the following places (in this order) for appenders based on the type value: + +1. Bundled core appenders (within appenders directory): `require('./' + type)` +2. node_modules: `require(type)` +3. relative to the main file of your application: `require(path.dirname(require.main.filename) + '/' + type)` +4. relative to the process' current working directory: `require(process.cwd() + '/' + type)` + +If that fails, an error will be raised. + +## Appender Modules + +An appender module should export a single function called `configure`. The function should accept the following arguments: + +- `config` - `object` - the appender's configuration object +- `layouts` - `module` - gives access to the [layouts](layouts.md) module, which most appenders will need + - `layout` - `function(type, config)` - this is the main function that appenders will use to find a layout +- `findAppender` - `function(name)` - if your appender is a wrapper around another appender (like the [logLevelFilter](logLevelFilter.md) for example), this function can be used to find another appender by name +- `levels` - `module` - gives access to the [levels](levels.md) module, which most appenders will need + +`configure` should return a function which accepts a logEvent, which is the appender itself. One of the simplest examples is the [stdout](stdout.md) appender. Let's run through the code. + +## Example + +```javascript +// This is the function that generates an appender function +function stdoutAppender(layout, timezoneOffset) { + // This is the appender function itself + return (loggingEvent) => { + process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`); + }; +} + +// stdout configure doesn't need to use findAppender, or levels +function configure(config, layouts) { + // the default layout for the appender + let layout = layouts.colouredLayout; + // check if there is another layout specified + if (config.layout) { + // load the layout + layout = layouts.layout(config.layout.type, config.layout); + } + //create a new appender instance + return stdoutAppender(layout, config.timezoneOffset); +} + +//export the only function needed +exports.configure = configure; +``` + +# Shutdown functions + +It's a good idea to implement a `shutdown` function on your appender instances. This function will get called by `log4js.shutdown` and signals that `log4js` has been asked to stop logging. Usually this is because of a fatal exception, or the application is being stopped. Your shutdown function should make sure that all asynchronous operations finish, and that any resources are cleaned up. The function must be named `shutdown`, take one callback argument, and be a property of the appender instance. Let's add a shutdown function to the `stdout` appender as an example. + +## Example (shutdown) + +```javascript +// This is the function that generates an appender function +function stdoutAppender(layout, timezoneOffset) { + // This is the appender function itself + const appender = (loggingEvent) => { + process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`); + }; + + // add a shutdown function. + appender.shutdown = (done) => { + process.stdout.write("", done); + }; + + return appender; +} + +// ... rest of the code as above +``` diff --git a/examples/cluster.js b/examples/cluster.js new file mode 100644 index 00000000..12d9a44b --- /dev/null +++ b/examples/cluster.js @@ -0,0 +1,24 @@ +'use strict'; + +const cluster = require('cluster'); +const log4js = require('../lib/log4js'); + +log4js.configure({ + appenders: { + out: { type: 'stdout' }, + }, + categories: { default: { appenders: ['out'], level: 'debug' } }, +}); + +let logger; +if (cluster.isMaster) { + logger = log4js.getLogger('master'); + cluster.fork(); + logger.info('master is done', process.pid, new Error('flaps')); +} else { + logger = log4js.getLogger('worker'); + logger.info("I'm a worker, with pid ", process.pid, new Error('pants')); + logger.info("I'm a worker, with pid ", process.pid, new Error()); + logger.info('cluster.worker ', cluster.worker); + cluster.worker.disconnect(); +} diff --git a/examples/custom-layout.js b/examples/custom-layout.js new file mode 100644 index 00000000..42b78d0e --- /dev/null +++ b/examples/custom-layout.js @@ -0,0 +1,24 @@ +const log4js = require('../lib/log4js'); + +log4js.addLayout( + 'json', + (config) => + function (logEvent) { + return JSON.stringify(logEvent) + config.separator; + } +); + +log4js.configure({ + appenders: { + out: { type: 'stdout', layout: { type: 'json', separator: ',' } }, + }, + categories: { + default: { appenders: ['out'], level: 'info' }, + }, +}); + +const logger = log4js.getLogger('json-test'); +logger.info('this is just a test'); +logger.error('of a custom appender'); +logger.warn('that outputs json'); +log4js.shutdown(() => {}); diff --git a/examples/date-file-rolling.js b/examples/date-file-rolling.js new file mode 100644 index 00000000..7cba62b2 --- /dev/null +++ b/examples/date-file-rolling.js @@ -0,0 +1,23 @@ +'use strict'; + +const log4js = require('../lib/log4js'); + +log4js.configure({ + appenders: { + file: { + type: 'dateFile', + filename: 'thing.log', + numBackups: 3, + pattern: '.mm', + }, + }, + categories: { + default: { appenders: ['file'], level: 'debug' }, + }, +}); + +const logger = log4js.getLogger('thing'); + +setInterval(() => { + logger.info('just doing the thing'); +}, 1000); diff --git a/examples/example-connect-logger.js b/examples/example-connect-logger.js index ed7b0133..95bd2d1b 100644 --- a/examples/example-connect-logger.js +++ b/examples/example-connect-logger.js @@ -1,44 +1,47 @@ -//The connect/express logger was added to log4js by danbell. This allows connect/express servers to log using log4js. -//https://github.com/nomiddlename/log4js-node/wiki/Connect-Logger +// The connect/express logger was added to log4js by danbell. This allows connect/express servers to log using log4js. +// https://github.com/nomiddlename/log4js-node/wiki/Connect-Logger // load modules -var log4js = require('log4js'); -var express = require("express"); -var app = express(); +const log4js = require('log4js'); +const express = require('express'); -//config +const app = express(); + +// config log4js.configure({ - appenders: [ - { type: 'console' }, - { type: 'file', filename: 'logs/log4jsconnect.log', category: 'log4jslog' } - ] + appenders: { + console: { type: 'console' }, + file: { type: 'file', filename: 'logs/log4jsconnect.log' }, + }, + categories: { + default: { appenders: ['console'], level: 'debug' }, + log4jslog: { appenders: ['file'], level: 'debug' }, + }, }); -//define logger -var logger = log4js.getLogger('log4jslog'); +// define logger +const logger = log4js.getLogger('log4jslog'); // set at which time msg is logged print like: only on error & above // logger.setLevel('ERROR'); -//express app -app.configure(function() { - app.use(express.favicon('')); - // app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO })); - // app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url :status' })); - - //### AUTO LEVEL DETECTION - //http responses 3xx, level = WARN - //http responses 4xx & 5xx, level = ERROR - //else.level = INFO - app.use(log4js.connectLogger(logger, { level: 'auto' })); -}); +// express app +app.use(express.favicon('')); +// app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO })); +// app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url :status' })); + +// ### AUTO LEVEL DETECTION +// http responses 3xx, level = WARN +// http responses 4xx & 5xx, level = ERROR +// else.level = INFO +app.use(log4js.connectLogger(logger, { level: 'auto' })); -//route -app.get('/', function(req,res) { - res.send('hello world'); +// route +app.get('/', (req, res) => { + res.send('hello world'); }); -//start app +// start app app.listen(5000); console.log('server runing at localhost:5000'); diff --git a/examples/example-socket.js b/examples/example-socket.js index 31bb5ab2..d88d2986 100644 --- a/examples/example-socket.js +++ b/examples/example-socket.js @@ -1,45 +1,48 @@ -var log4js = require('./lib/log4js') -, cluster = require('cluster') -, numCPUs = require('os').cpus().length -, i = 0; +const log4js = require('../lib/log4js'); +const cluster = require('cluster'); +const numCPUs = require('os').cpus().length; + +let i = 0; if (cluster.isMaster) { - log4js.configure({ - appenders: [ - { - type: "multiprocess", - mode: "master", - appender: { - type: "console" - } - } - ] - }); + log4js.configure({ + appenders: { + console: { type: 'console' }, + master: { + type: 'multiprocess', + mode: 'master', + appender: 'console', + }, + }, + categories: { + default: { appenders: ['console'], level: 'info' }, + }, + }); - console.info("Master creating %d workers", numCPUs); - for (i=0; i < numCPUs; i++) { - cluster.fork(); - } + console.info('Master creating %d workers', numCPUs); + for (i = 0; i < numCPUs; i++) { + cluster.fork(); + } - cluster.on('death', function(worker) { - console.info("Worker %d died.", worker.pid); - }); + cluster.on('death', (worker) => { + console.info('Worker %d died.', worker.pid); + }); } else { - log4js.configure({ - appenders: [ - { - type: "multiprocess", - mode: "worker" - } - ] - }); - var logger = log4js.getLogger('example-socket'); + log4js.configure({ + appenders: { + worker: { type: 'multiprocess', mode: 'worker' }, + }, + categories: { + default: { appenders: ['worker'], level: 'info' }, + }, + }); + const logger = log4js.getLogger('example-socket'); - console.info("Worker %d started.", process.pid); - for (i=0; i < 1000; i++) { - logger.info("Worker %d - logging something %d", process.pid, i); - } + console.info('Worker %d started.', process.pid); + for (i = 0; i < 1000; i++) { + logger.info('Worker %d - logging something %d', process.pid, i); + } + log4js.shutdown(() => { + process.exit(); + }); } - - - diff --git a/examples/example.js b/examples/example.js index 8879a232..440ff1c5 100644 --- a/examples/example.js +++ b/examples/example.js @@ -1,58 +1,50 @@ -"use strict"; -var log4js = require('../lib/log4js'); -//log the cheese logger messages to a file, and the console ones as well. +'use strict'; + +const log4js = require('../lib/log4js'); +// log the cheese logger messages to a file, and the console ones as well. log4js.configure({ - appenders: [ - { - type: "file", - filename: "cheese.log", - category: [ 'cheese','console' ] - }, - { - type: "console" - } - ], - replaceConsole: true + appenders: { + cheeseLogs: { type: 'file', filename: 'cheese.log' }, + console: { type: 'console' }, + }, + categories: { + cheese: { appenders: ['cheeseLogs'], level: 'error' }, + another: { appenders: ['console'], level: 'trace' }, + default: { appenders: ['console', 'cheeseLogs'], level: 'trace' }, + }, }); -//to add an appender programmatically, and without clearing other appenders -//loadAppender is only necessary if you haven't already configured an appender of this type -log4js.loadAppender('file'); -log4js.addAppender(log4js.appenders.file('pants.log'), 'pants'); -//a custom logger outside of the log4js/lib/appenders directory can be accessed like so -//log4js.loadAppender('what/you/would/put/in/require'); -//log4js.addAppender(log4js.appenders['what/you/would/put/in/require'](args)); -//or through configure as: -//log4js.configure({ -// appenders: [ { type: 'what/you/would/put/in/require', otherArgs: 'blah' } ] -//}); +// a custom logger outside of the log4js/lib/appenders directory can be accessed like so +// log4js.configure({ +// appenders: { outside: { type: 'what/you/would/put/in/require', otherArgs: 'blah' } } +// ... +// }); -var logger = log4js.getLogger('cheese'); -//only errors and above get logged. -//you can also set this log level in the config object -//via the levels field. -logger.setLevel('ERROR'); +const logger = log4js.getLogger('cheese'); +// only errors and above get logged. +const otherLogger = log4js.getLogger(); -//console logging methods have been replaced with log4js ones. -//so this will get coloured output on console, and appear in cheese.log -console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" }); -console.log("This should appear as info output"); +// this will get coloured output on console, and appear in cheese.log +otherLogger.error('AAArgh! Something went wrong', { + some: 'otherObject', + useful_for: 'debug purposes', +}); +otherLogger.log('This should appear as info output'); -//these will not appear (logging level beneath error) +// these will not appear (logging level beneath error) logger.trace('Entering cheese testing'); logger.debug('Got cheese.'); logger.info('Cheese is Gouda.'); logger.log('Something funny about cheese.'); logger.warn('Cheese is quite smelly.'); -//these end up on the console and in cheese.log -logger.error('Cheese %s is too ripe!', "gouda"); +// these end up only in cheese.log +logger.error('Cheese %s is too ripe!', 'gouda'); logger.fatal('Cheese was breeding ground for listeria.'); -//these don't end up in cheese.log, but will appear on the console -var anotherLogger = log4js.getLogger('another'); -anotherLogger.debug("Just checking"); +// these don't end up in cheese.log, but will appear on the console +const anotherLogger = log4js.getLogger('another'); +anotherLogger.debug('Just checking'); -//one for pants.log -//will also go to console, since that's configured for all categories -var pantsLog = log4js.getLogger('pants'); -pantsLog.debug("Something for pants"); +// will also go to console and cheese.log, since that's configured for all categories +const pantsLog = log4js.getLogger('pants'); +pantsLog.debug('Something for pants'); diff --git a/examples/flush-on-exit.js b/examples/flush-on-exit.js index 19c661c4..d27ffd9d 100644 --- a/examples/flush-on-exit.js +++ b/examples/flush-on-exit.js @@ -2,26 +2,31 @@ * run this, then "ab -c 10 -n 100 localhost:4444/" to test (in * another shell) */ -var log4js = require('../lib/log4js'); +const log4js = require('../lib/log4js'); + log4js.configure({ - appenders: [ - { type: 'file', filename: 'cheese.log', category: 'cheese' }, - { type: 'console'} - ] + appenders: { + cheese: { type: 'file', filename: 'cheese.log' }, + }, + categories: { + default: { appenders: ['cheese'], level: 'debug' }, + }, }); -var logger = log4js.getLogger('cheese'); -logger.setLevel('INFO'); - -var http=require('http'); +const logger = log4js.getLogger('cheese'); +const http = require('http'); -var server = http.createServer(function(request, response){ - response.writeHead(200, {'Content-Type': 'text/plain'}); - var rd = Math.random() * 50; - logger.info("hello " + rd); +http + .createServer((request, response) => { + response.writeHead(200, { 'Content-Type': 'text/plain' }); + const rd = Math.random() * 50; + logger.info(`hello ${rd}`); response.write('hello '); - if (Math.floor(rd) == 30){ - log4js.shutdown(function() { process.exit(1); }); + if (Math.floor(rd) === 30) { + log4js.shutdown(() => { + process.exit(1); + }); } response.end(); -}).listen(4444); + }) + .listen(4444); diff --git a/examples/fromreadme.js b/examples/fromreadme.js index 8d837f42..f9f0017b 100644 --- a/examples/fromreadme.js +++ b/examples/fromreadme.js @@ -1,15 +1,13 @@ -//remember to change the require to just 'log4js' if you've npm install'ed it -var log4js = require('../lib/log4js'); -//by default the console appender is loaded -//log4js.loadAppender('console'); -//you'd only need to add the console appender if you -//had previously called log4js.clearAppenders(); -//log4js.addAppender(log4js.appenders.console()); -log4js.loadAppender('file'); -log4js.addAppender(log4js.appenders.file('cheese.log'), 'cheese'); +// remember to change the require to just 'log4js' if you've npm install'ed it +const log4js = require('../lib/log4js'); -var logger = log4js.getLogger('cheese'); -logger.setLevel('ERROR'); +log4js.configure({ + appenders: { cheese: { type: 'file', filename: 'cheese.log' } }, + categories: { default: { appenders: ['cheese'], level: 'error' } }, +}); + +const logger = log4js.getLogger('cheese'); +logger.level = 'ERROR'; logger.trace('Entering cheese testing'); logger.debug('Got cheese.'); diff --git a/examples/hipchat-appender.js b/examples/hipchat-appender.js index 1cdf674a..8e04ba83 100644 --- a/examples/hipchat-appender.js +++ b/examples/hipchat-appender.js @@ -4,51 +4,56 @@ * - npm install hipchat-notifier */ -var log4js = require('../lib/log4js'); +const log4js = require('../lib/log4js'); log4js.configure({ - "appenders": [ - { - "type" : "hipchat", - "hipchat_token": process.env.HIPCHAT_TOKEN || '< User token with Notification Privileges >', - "hipchat_room": process.env.HIPCHAT_ROOM || '< Room ID or Name >' - } - ] + appenders: { + hipchat: { + type: 'hipchat', + hipchat_token: + process.env.HIPCHAT_TOKEN || + '< User token with Notification Privileges >', + hipchat_room: process.env.HIPCHAT_ROOM || '< Room ID or Name >', + }, + }, + categories: { + default: { appenders: ['hipchat'], level: 'trace' }, + }, }); -var logger = log4js.getLogger("hipchat"); -logger.warn("Test Warn message"); -logger.info("Test Info message"); -logger.debug("Test Debug Message"); -logger.trace("Test Trace Message"); -logger.fatal("Test Fatal Message"); -logger.error("Test Error Message"); - +const logger = log4js.getLogger('hipchat'); +logger.warn('Test Warn message'); +logger.info('Test Info message'); +logger.debug('Test Debug Message'); +logger.trace('Test Trace Message'); +logger.fatal('Test Fatal Message'); +logger.error('Test Error Message'); // alternative configuration demonstrating callback + custom layout -/////////////////////////////////////////////////////////////////// +// ///////////////////////////////////////////////////////////////// // use a custom layout function (in this case, the provided basicLayout) // format: [TIMESTAMP][LEVEL][category] - [message] -var customLayout = require('../lib/layouts').basicLayout; - log4js.configure({ - "appenders": [ - { - "type" : "hipchat", - "hipchat_token": process.env.HIPCHAT_TOKEN || '< User token with Notification Privileges >', - "hipchat_room": process.env.HIPCHAT_ROOM || '< Room ID or Name >', - "hipchat_from": "Mr. Semantics", - "hipchat_notify": false, - "hipchat_response_callback": function(err, response, body){ - if(err || response.statusCode > 300){ + appenders: { + hipchat: { + type: 'hipchat', + hipchat_token: + process.env.HIPCHAT_TOKEN || + '< User token with Notification Privileges >', + hipchat_room: process.env.HIPCHAT_ROOM || '< Room ID or Name >', + hipchat_from: 'Mr. Semantics', + hipchat_notify: false, + hipchat_response_callback: function (err, response, body) { + if (err || response.statusCode > 300) { throw new Error('hipchat-notifier failed'); } console.log('mr semantics callback success'); }, - "layout": customLayout - } - ] + layout: { type: 'basic' }, + }, + }, + categories: { default: { appenders: ['hipchat'], level: 'trace' } }, }); -logger.info("Test customLayout from Mr. Semantics"); +logger.info('Test customLayout from Mr. Semantics'); diff --git a/examples/layouts.js b/examples/layouts.js new file mode 100644 index 00000000..338a505c --- /dev/null +++ b/examples/layouts.js @@ -0,0 +1,13 @@ +const log4js = require('../lib/log4js'); + +log4js.configure({ + appenders: { + out: { type: 'stdout', layout: { type: 'messagePassThrough' } }, + }, + categories: { + default: { appenders: ['out'], level: 'info' }, + }, +}); + +const logger = log4js.getLogger('thing'); +logger.info('This should not have a timestamp'); diff --git a/examples/log-rolling-bug.js b/examples/log-rolling-bug.js new file mode 100644 index 00000000..a23e62a2 --- /dev/null +++ b/examples/log-rolling-bug.js @@ -0,0 +1,31 @@ +const log4js = require('../lib/log4js'); + +log4js.configure({ + appenders: { + handler: { + type: 'file', + filename: 'logs/handler.log', + maxLogSize: 100000, + backups: 5, + keepFileExt: true, + compress: true, + }, + }, + categories: { + default: { appenders: ['handler'], level: 'debug' }, + handler: { appenders: ['handler'], level: 'debug' }, + }, +}); + +const logsToTest = ['handler']; + +const logStartDate = new Date(); + +const loggers = logsToTest.map((log) => log4js.getLogger(log)); + +// write out a lot +setInterval(() => { + loggers.forEach((logger) => + logger.info(`TESTING LOGGER!!!!!!${logStartDate}`) + ); +}, 10); diff --git a/examples/log-rolling.js b/examples/log-rolling.js index 7519c5f2..7f1cc880 100644 --- a/examples/log-rolling.js +++ b/examples/log-rolling.js @@ -1,27 +1,27 @@ -var log4js = require('../lib/log4js') -, log -, i = 0; +const log4js = require('../lib/log4js'); + log4js.configure({ - "appenders": [ - { - type: "console" - , category: "console" - }, - { - "type": "file", - "filename": "tmp-test.log", - "maxLogSize": 1024, - "backups": 3, - "category": "test" - } - ] + appenders: { + console: { + type: 'console', + }, + file: { + type: 'file', + filename: 'tmp-test.log', + maxLogSize: 1024, + backups: 3, + }, + }, + categories: { + default: { appenders: ['console', 'file'], level: 'info' }, + }, }); -log = log4js.getLogger("test"); +const log = log4js.getLogger('test'); function doTheLogging(x) { - log.info("Logging something %d", x); + log.info('Logging something %d', x); +} +let i = 0; +for (; i < 5000; i += 1) { + doTheLogging(i); } - -for ( ; i < 5000; i++) { - doTheLogging(i); -} \ No newline at end of file diff --git a/examples/log-to-files.js b/examples/log-to-files.js index 6f140daa..fc19d897 100644 --- a/examples/log-to-files.js +++ b/examples/log-to-files.js @@ -1,36 +1,35 @@ -"use strict"; -var path = require('path') -, log4js = require('../lib/log4js'); +const log4js = require('../lib/log4js'); -log4js.configure( - { - appenders: [ - { - type: "file", - filename: "important-things.log", - maxLogSize: 10*1024*1024, // = 10Mb - numBackups: 5, // keep five backup files - compress: true, // compress the backups - encoding: 'utf-8', - mode: parseInt('0640', 8), - flags: 'w+' - }, - { - type: "dateFile", - filename: "more-important-things.log", - pattern: "yyyy-MM-dd-hh", - compress: true - }, - { - type: "stdout" - } - ] - } -); +log4js.configure({ + appenders: { + file: { + type: 'file', + filename: 'important-things.log', + maxLogSize: 10 * 1024 * 1024, // = 10Mb + backups: 5, // keep five backup files + compress: true, // compress the backups + encoding: 'utf-8', + mode: 0o0640, + flags: 'w+', + }, + dateFile: { + type: 'dateFile', + filename: 'more-important-things.log', + pattern: 'yyyy-MM-dd-hh', + compress: true, + }, + out: { + type: 'stdout', + }, + }, + categories: { + default: { appenders: ['file', 'dateFile', 'out'], level: 'trace' }, + }, +}); -var logger = log4js.getLogger('things'); -logger.debug("This little thing went to market"); -logger.info("This little thing stayed at home"); -logger.error("This little thing had roast beef"); -logger.fatal("This little thing had none"); -logger.trace("and this little thing went wee, wee, wee, all the way home."); +const logger = log4js.getLogger('things'); +logger.debug('This little thing went to market'); +logger.info('This little thing stayed at home'); +logger.error('This little thing had roast beef'); +logger.fatal('This little thing had none'); +logger.trace('and this little thing went wee, wee, wee, all the way home.'); diff --git a/examples/logFaces-appender.js b/examples/logFaces-appender.js index 2f398f0c..12a8b15c 100644 --- a/examples/logFaces-appender.js +++ b/examples/logFaces-appender.js @@ -1,4 +1,4 @@ -var log4js = require('../lib/log4js'); +const log4js = require('../lib/log4js'); /* logFaces server configured with UDP receiver, using JSON format, @@ -6,19 +6,21 @@ var log4js = require('../lib/log4js'); */ log4js.configure({ - "appenders": [ - { - "type": "logFacesAppender", // (mandatory) appender type - "application": "MY-NODEJS", // (optional) name of the application (domain) - "remoteHost": "localhost", // (optional) logFaces server host or IP address - "port": 55201, // (optional) logFaces UDP receiver port (must use JSON format) - "layout": { // (optional) the layout to use for messages - "type": "pattern", - "pattern": "%m" - } - } - ] + appenders: { + logFaces: { + type: '@log4js-node/logfaces-udp', // (mandatory) appender type + application: 'MY-NODEJS', // (optional) name of the application (domain) + remoteHost: 'localhost', // (optional) logFaces server host or IP address + port: 55201, // (optional) logFaces UDP receiver port (must use JSON format) + layout: { + // (optional) the layout to use for messages + type: 'pattern', + pattern: '%m', + }, + }, + }, + categories: { default: { appenders: ['logFaces'], level: 'info' } }, }); -var logger = log4js.getLogger("myLogger"); -logger.info("Testing message %s", "arg1"); +const logger = log4js.getLogger('myLogger'); +logger.info('Testing message %s', 'arg1'); diff --git a/examples/loggly-appender.js b/examples/loggly-appender.js index 1465c922..a03b7aa8 100644 --- a/examples/loggly-appender.js +++ b/examples/loggly-appender.js @@ -1,24 +1,26 @@ -//Note that loggly appender needs node-loggly to work. -//If you haven't got node-loggly installed, you'll get cryptic -//"cannot find module" errors when using the loggly appender -var log4js = require('../lib/log4js'); +// Note that loggly appender needs node-loggly to work. +// If you haven't got node-loggly installed, you'll get cryptic +// "cannot find module" errors when using the loggly appender +const log4js = require('../lib/log4js'); log4js.configure({ - "appenders": [ - { - type: "console", - category: "test" + appenders: { + console: { + type: 'console', }, - { - "type" : "loggly", - "token" : "12345678901234567890", - "subdomain": "your-subdomain", - "tags" : ["test"], - "category" : "loggly" - } - ] + loggly: { + type: 'loggly', + token: '12345678901234567890', + subdomain: 'your-subdomain', + tags: ['test'], + }, + }, + categories: { + default: { appenders: ['console'], level: 'info' }, + loggly: { appenders: ['loggly'], level: 'info' }, + }, }); -var logger = log4js.getLogger("loggly"); -logger.info("Test log message"); -//logger.debug("Test log message"); \ No newline at end of file +const logger = log4js.getLogger('loggly'); +logger.info('Test log message'); +// logger.debug("Test log message"); diff --git a/examples/logstashHTTP.js b/examples/logstashHTTP.js new file mode 100644 index 00000000..ab48f6d0 --- /dev/null +++ b/examples/logstashHTTP.js @@ -0,0 +1,26 @@ +const log4js = require('../lib/log4js'); + +log4js.configure({ + appenders: { + console: { + type: 'console', + }, + logstash: { + url: 'http://172.17.0.5:9200/_bulk', + type: '@log4js-node/logstash-http', + logType: 'application', + logChannel: 'node', + application: 'logstash-log4js', + layout: { + type: 'pattern', + pattern: '%m', + }, + }, + }, + categories: { + default: { appenders: ['console', 'logstash'], level: 'info' }, + }, +}); + +const logger = log4js.getLogger('myLogger'); +logger.info('Test log message %s', 'arg1', 'arg2'); diff --git a/examples/logstashUDP.js b/examples/logstashUDP.js index 871f1570..2322dfa0 100644 --- a/examples/logstashUDP.js +++ b/examples/logstashUDP.js @@ -1,4 +1,4 @@ -var log4js = require('../lib/log4js'); +const log4js = require('../lib/log4js'); /* Sample logstash config: @@ -12,28 +12,30 @@ var log4js = require('../lib/log4js'); */ log4js.configure({ - "appenders": [ - { - type: "console", - category: "myLogger" + appenders: { + console: { + type: 'console', }, - { - "host": "127.0.0.1", - "port": 10001, - "type": "logstashUDP", - "logType": "myAppType", // Optional, defaults to 'category' - "fields": { // Optional, will be added to the 'fields' object in logstash - "field1": "value1", - "field2": "value2" + logstash: { + host: '127.0.0.1', + port: 10001, + type: 'logstashUDP', + logType: 'myAppType', // Optional, defaults to 'category' + fields: { + // Optional, will be added to the 'fields' object in logstash + field1: 'value1', + field2: 'value2', }, - "layout": { - "type": "pattern", - "pattern": "%m" + layout: { + type: 'pattern', + pattern: '%m', }, - "category": "myLogger" - } - ] + }, + }, + categories: { + default: { appenders: ['console', 'logstash'], level: 'info' }, + }, }); -var logger = log4js.getLogger("myLogger"); -logger.info("Test log message %s", "arg1", "arg2"); +const logger = log4js.getLogger('myLogger'); +logger.info('Test log message %s', 'arg1', 'arg2'); diff --git a/examples/memory-test.js b/examples/memory-test.js index 4cc6f2db..256cc812 100644 --- a/examples/memory-test.js +++ b/examples/memory-test.js @@ -1,37 +1,35 @@ -var log4js = require('../lib/log4js') -, logger -, usage -, i; +const log4js = require('../lib/log4js'); -log4js.configure( - { - appenders: [ - { - category: "memory-test" - , type: "file" - , filename: "memory-test.log" - }, - { - type: "console" - , category: "memory-usage" - }, - { - type: "file" - , filename: "memory-usage.log" - , category: "memory-usage" - , layout: { - type: "messagePassThrough" - } - } - ] - } -); -logger = log4js.getLogger("memory-test"); -usage = log4js.getLogger("memory-usage"); +log4js.configure({ + appenders: { + logs: { + type: 'file', + filename: 'memory-test.log', + }, + console: { + type: 'stdout', + }, + file: { + type: 'file', + filename: 'memory-usage.log', + layout: { + type: 'messagePassThrough', + }, + }, + }, + categories: { + default: { appenders: ['console'], level: 'info' }, + 'memory-test': { appenders: ['logs'], level: 'info' }, + 'memory-usage': { appenders: ['console', 'file'], level: 'info' }, + }, +}); +const logger = log4js.getLogger('memory-test'); +const usage = log4js.getLogger('memory-usage'); -for (i=0; i < 1000000; i++) { - if ( (i % 5000) === 0) { - usage.info("%d %d", i, process.memoryUsage().rss); - } - logger.info("Doing something."); +for (let i = 0; i < 1000000; i += 1) { + if (i % 5000 === 0) { + usage.info('%d %d', i, process.memoryUsage().rss); + } + logger.info('Doing something.'); } +log4js.shutdown(() => {}); diff --git a/examples/patternLayout-tokens.js b/examples/patternLayout-tokens.js index 84b171c4..764508b1 100644 --- a/examples/patternLayout-tokens.js +++ b/examples/patternLayout-tokens.js @@ -1,21 +1,24 @@ -var log4js = require('./lib/log4js'); +const log4js = require('../lib/log4js'); -var config = { - "appenders": [ - { - "type": "console", - "layout": { - "type": "pattern", - "pattern": "%[%r (%x{pid}) %p %c -%] %m%n", - "tokens": { - "pid" : function() { return process.pid; } - } - } - } - ] - }; +log4js.configure({ + appenders: { + out: { + type: 'console', + layout: { + type: 'pattern', + pattern: '%[%r (%x{pid}) %p %c -%] %m%n', + tokens: { + pid: function () { + return process.pid; + }, + }, + }, + }, + }, + categories: { + default: { appenders: ['out'], level: 'info' }, + }, +}); -log4js.configure(config, {}); - -var logger = log4js.getLogger("app"); -logger.info("Test log message"); \ No newline at end of file +const logger = log4js.getLogger('app'); +logger.info('Test log message'); diff --git a/examples/pm2.js b/examples/pm2.js new file mode 100644 index 00000000..910969e0 --- /dev/null +++ b/examples/pm2.js @@ -0,0 +1,26 @@ +const log4js = require('../lib/log4js'); + +// NOTE: for PM2 support to work you'll need to install the pm2-intercom module +// `pm2 install pm2-intercom` +log4js.configure({ + appenders: { + out: { type: 'file', filename: 'pm2logs.log' }, + }, + categories: { + default: { appenders: ['out'], level: 'info' }, + }, + pm2: true, + pm2InstanceVar: 'INSTANCE_ID', +}); +const logger = log4js.getLogger('app'); +logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID); +logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID); +logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID); +logger.info("I'm forever blowing bubbles ", process.env.INSTANCE_ID); +logger.info('last bubbles', process.env.INSTANCE_ID); +// give pm2 time to set everything up, before we tear it down +setTimeout(() => { + log4js.shutdown(() => { + console.error('All done, shutdown cb returned.'); + }); +}, 5000); diff --git a/examples/pm2.json b/examples/pm2.json new file mode 100644 index 00000000..e03a0711 --- /dev/null +++ b/examples/pm2.json @@ -0,0 +1,11 @@ +{ + "apps": [ + { + "name": "testing", + "script": "pm2.js", + "instances": 0, + "instance_var": "INSTANCE_ID", + "exec_mode": "cluster" + } + ] +} diff --git a/examples/rabbitmq-appender.js b/examples/rabbitmq-appender.js new file mode 100755 index 00000000..23c60112 --- /dev/null +++ b/examples/rabbitmq-appender.js @@ -0,0 +1,49 @@ +// Note that rabbitmq appender needs install amqplib to work. + +const log4js = require('../lib/log4js'); + +log4js.configure({ + appenders: { + out: { + type: 'console', + }, + file: { + type: 'dateFile', + filename: 'logs/log.txt', + pattern: 'yyyyMMdd', + alwaysIncludePattern: false, + }, + mq: { + type: '@log4js-node/rabbitmq', + host: '127.0.0.1', + port: 5672, + username: 'guest', + password: 'guest', + routing_key: 'logstash', + exchange: 'exchange_logs', + mq_type: 'direct', + durable: true, + layout: { + type: 'pattern', + pattern: '%d{yyyy-MM-dd hh:mm:ss:SSS}#%p#%m', + }, + }, + }, + categories: { + default: { appenders: ['out'], level: 'info' }, + dateFile: { appenders: ['file'], level: 'info' }, + rabbitmq: { appenders: ['mq'], level: 'info' }, + }, +}); + +const log = log4js.getLogger('console'); +const logRabbitmq = log4js.getLogger('rabbitmq'); + +function doTheLogging(x) { + log.info('Logging something %d', x); + logRabbitmq.info('Logging something %d', x); +} + +for (let i = 0; i < 500; i += 1) { + doTheLogging(i); +} diff --git a/examples/redis-appender.js b/examples/redis-appender.js new file mode 100644 index 00000000..d45d5936 --- /dev/null +++ b/examples/redis-appender.js @@ -0,0 +1,45 @@ +// Note that redis appender needs install redis to work. + +const log4js = require('../lib/log4js'); + +log4js.configure({ + appenders: { + out: { + type: 'console', + }, + file: { + type: 'dateFile', + filename: 'logs/log.txt', + pattern: 'yyyyMMdd', + alwaysIncludePattern: false, + }, + db: { + type: '@log4js-node/redis', + host: '127.0.0.1', + port: 6379, + pass: '', + channel: 'q_log', + layout: { + type: 'pattern', + pattern: '%d{yyyy-MM-dd hh:mm:ss:SSS}#%p#%m', + }, + }, + }, + categories: { + default: { appenders: ['out'], level: 'info' }, + dateFile: { appenders: ['file'], level: 'info' }, + redis: { appenders: ['db'], level: 'info' }, + }, +}); + +const log = log4js.getLogger('console'); +const logRedis = log4js.getLogger('redis'); + +function doTheLogging(x) { + log.info('Logging something %d', x); + logRedis.info('Logging something %d', x); +} + +for (let i = 0; i < 500; i += 1) { + doTheLogging(i); +} diff --git a/examples/reload.js b/examples/reload.js deleted file mode 100644 index a8ede43c..00000000 --- a/examples/reload.js +++ /dev/null @@ -1,14 +0,0 @@ -"use strict"; -var path = require('path') -, log4js = require('../lib/log4js'); - -log4js.configure( - // config reloading only works with file-based config (obvs) - path.join(__dirname, '../test/tape/test-config.json'), - { reloadSecs: 10 } -); - -log4js.getLogger('testing').info("Just testing"); -log4js.shutdown(function() { - //callback gets you notified when log4js has finished shutting down. -}); diff --git a/examples/slack-appender.js b/examples/slack-appender.js index eb8d4198..0f11967d 100644 --- a/examples/slack-appender.js +++ b/examples/slack-appender.js @@ -1,24 +1,26 @@ -//Note that slack appender needs slack-node package to work. -var log4js = require('../lib/log4js'); +// Note that slack appender needs slack-node package to work. +const log4js = require('../lib/log4js'); log4js.configure({ - "appenders": [ - { - "type" : "slack", - "token": 'TOKEN', - "channel_id": "#CHANNEL", - "username": "USERNAME", - "format": "text", - "category" : "slack", - "icon_url" : "ICON_URL" - } - ] + appenders: { + slack: { + type: '@log4js-node/slack', + token: 'TOKEN', + channel_id: '#CHANNEL', + username: 'USERNAME', + format: 'text', + icon_url: 'ICON_URL', + }, + }, + categories: { + default: { appenders: ['slack'], level: 'info' }, + }, }); -var logger = log4js.getLogger("slack"); -logger.warn("Test Warn message"); -logger.info("Test Info message"); -logger.debug("Test Debug Message"); -logger.trace("Test Trace Message"); -logger.fatal("Test Fatal Message"); -logger.error("Test Error Message"); +const logger = log4js.getLogger('slack'); +logger.warn('Test Warn message'); +logger.info('Test Info message'); +logger.debug('Test Debug Message'); +logger.trace('Test Trace Message'); +logger.fatal('Test Fatal Message'); +logger.error('Test Error Message'); diff --git a/examples/smtp-appender.js b/examples/smtp-appender.js index 134ce900..716b3991 100644 --- a/examples/smtp-appender.js +++ b/examples/smtp-appender.js @@ -1,43 +1,43 @@ -//Note that smtp appender needs nodemailer to work. -//If you haven't got nodemailer installed, you'll get cryptic -//"cannot find module" errors when using the smtp appender -var log4js = require('../lib/log4js') -, log -, logmailer -, i = 0; +// Note that smtp appender needs nodemailer to work. +// If you haven't got nodemailer installed, you'll get cryptic +// "cannot find module" errors when using the smtp appender +const log4js = require('../lib/log4js'); + log4js.configure({ - "appenders": [ - { - type: "console", - category: "test" + appenders: { + out: { + type: 'console', }, - { - "type": "smtp", - "recipients": "logfilerecipient@logging.com", - "sendInterval": 5, - "transport": "SMTP", - "SMTP": { - "host": "smtp.gmail.com", - "secureConnection": true, - "port": 465, - "auth": { - "user": "someone@gmail", - "pass": "********************" + mail: { + type: '@log4js-node/smtp', + recipients: 'logfilerecipient@logging.com', + sendInterval: 5, + transport: 'SMTP', + SMTP: { + host: 'smtp.gmail.com', + secureConnection: true, + port: 465, + auth: { + user: 'someone@gmail', + pass: '********************', }, - "debug": true + debug: true, }, - "category": "mailer" - } - ] + }, + }, + categories: { + default: { appenders: ['out'], level: 'info' }, + mailer: { appenders: ['mail'], level: 'info' }, + }, }); -log = log4js.getLogger("test"); -logmailer = log4js.getLogger("mailer"); +const log = log4js.getLogger('test'); +const logmailer = log4js.getLogger('mailer'); function doTheLogging(x) { - log.info("Logging something %d", x); - logmailer.info("Logging something %d", x); + log.info('Logging something %d', x); + logmailer.info('Logging something %d', x); } -for ( ; i < 500; i++) { - doTheLogging(i); +for (let i = 0; i < 500; i += 1) { + doTheLogging(i); } diff --git a/examples/stacktrace.js b/examples/stacktrace.js new file mode 100644 index 00000000..ff3abf49 --- /dev/null +++ b/examples/stacktrace.js @@ -0,0 +1,22 @@ +const log4js = require('../lib/log4js'); + +log4js.configure({ + appenders: { + 'console-appender': { + type: 'console', + layout: { + type: 'pattern', + pattern: '%[[%p]%] - %10.-100f{2} | %7.12l:%7.12o - %[%m%]', + }, + }, + }, + categories: { + default: { + appenders: ['console-appender'], + enableCallStack: true, + level: 'info', + }, + }, +}); + +log4js.getLogger().info('This should not cause problems'); diff --git a/lib/LoggingEvent.js b/lib/LoggingEvent.js new file mode 100644 index 00000000..d6704a02 --- /dev/null +++ b/lib/LoggingEvent.js @@ -0,0 +1,161 @@ +/* eslint max-classes-per-file: ["error", 2] */ +/* eslint no-underscore-dangle: ["error", { "allow": ["_getLocationKeys"] }] */ + +const flatted = require('flatted'); +const levels = require('./levels'); + +class SerDe { + constructor() { + const deserialise = { + __LOG4JS_undefined__: undefined, + __LOG4JS_NaN__: Number('abc'), + __LOG4JS_Infinity__: 1 / 0, + '__LOG4JS_-Infinity__': -1 / 0, + }; + this.deMap = deserialise; + this.serMap = {}; + Object.keys(this.deMap).forEach((key) => { + const value = this.deMap[key]; + this.serMap[value] = key; + }); + } + + canSerialise(key) { + if (typeof key === 'string') return false; + try { + return key in this.serMap; + } catch (e) { + return false; + } + } + + serialise(key) { + if (this.canSerialise(key)) return this.serMap[key]; + return key; + } + + canDeserialise(key) { + return key in this.deMap; + } + + deserialise(key) { + if (this.canDeserialise(key)) return this.deMap[key]; + return key; + } +} +const serde = new SerDe(); + +/** + * @name LoggingEvent + * @namespace Log4js + */ +class LoggingEvent { + /** + * Models a logging event. + * @constructor + * @param {string} categoryName name of category + * @param {Log4js.Level} level level of message + * @param {Array} data objects to log + * @param {Error} [error] + * @author Seth Chisamore + */ + constructor(categoryName, level, data, context, location, error) { + this.startTime = new Date(); + this.categoryName = categoryName; + this.data = data; + this.level = level; + this.context = Object.assign({}, context); // eslint-disable-line prefer-object-spread + this.pid = process.pid; + this.error = error; + + if (typeof location !== 'undefined') { + if (!location || typeof location !== 'object' || Array.isArray(location)) + throw new TypeError( + 'Invalid location type passed to LoggingEvent constructor' + ); + + this.constructor._getLocationKeys().forEach((key) => { + if (typeof location[key] !== 'undefined') this[key] = location[key]; + }); + } + } + + /** @private */ + static _getLocationKeys() { + return [ + 'fileName', + 'lineNumber', + 'columnNumber', + 'callStack', + 'className', + 'functionName', + 'functionAlias', + 'callerName', + ]; + } + + serialise() { + return flatted.stringify(this, (key, value) => { + // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. + // The following allows us to serialize errors (semi) correctly. + if (value instanceof Error) { + // eslint-disable-next-line prefer-object-spread + value = Object.assign( + { message: value.message, stack: value.stack }, + value + ); + } + // JSON.stringify({a: Number('abc'), b: 1/0, c: -1/0}) returns {a: null, b: null, c: null}. + // The following allows us to serialize to NaN, Infinity and -Infinity correctly. + // JSON.stringify([undefined]) returns [null]. + // The following allows us to serialize to undefined correctly. + return serde.serialise(value); + }); + } + + static deserialise(serialised) { + let event; + try { + const rehydratedEvent = flatted.parse(serialised, (key, value) => { + if (value && value.message && value.stack) { + const fakeError = new Error(value); + Object.keys(value).forEach((k) => { + fakeError[k] = value[k]; + }); + value = fakeError; + } + return serde.deserialise(value); + }); + this._getLocationKeys().forEach((key) => { + if (typeof rehydratedEvent[key] !== 'undefined') { + if (!rehydratedEvent.location) rehydratedEvent.location = {}; + rehydratedEvent.location[key] = rehydratedEvent[key]; + } + }); + event = new LoggingEvent( + rehydratedEvent.categoryName, + levels.getLevel(rehydratedEvent.level.levelStr), + rehydratedEvent.data, + rehydratedEvent.context, + rehydratedEvent.location, + rehydratedEvent.error + ); + event.startTime = new Date(rehydratedEvent.startTime); + event.pid = rehydratedEvent.pid; + if (rehydratedEvent.cluster) { + event.cluster = rehydratedEvent.cluster; + } + } catch (e) { + event = new LoggingEvent('log4js', levels.ERROR, [ + 'Unable to parse log:', + serialised, + 'because: ', + e, + ]); + } + + return event; + } +} + +module.exports = LoggingEvent; diff --git a/lib/appenders/adapters.js b/lib/appenders/adapters.js new file mode 100644 index 00000000..648c3621 --- /dev/null +++ b/lib/appenders/adapters.js @@ -0,0 +1,46 @@ +function maxFileSizeUnitTransform(maxLogSize) { + if (typeof maxLogSize === 'number' && Number.isInteger(maxLogSize)) { + return maxLogSize; + } + + const units = { + K: 1024, + M: 1024 * 1024, + G: 1024 * 1024 * 1024, + }; + const validUnit = Object.keys(units); + const unit = maxLogSize.slice(-1).toLocaleUpperCase(); + const value = maxLogSize.slice(0, -1).trim(); + + if (validUnit.indexOf(unit) < 0 || !Number.isInteger(Number(value))) { + throw Error(`maxLogSize: "${maxLogSize}" is invalid`); + } else { + return value * units[unit]; + } +} + +function adapter(configAdapter, config) { + const newConfig = Object.assign({}, config); // eslint-disable-line prefer-object-spread + Object.keys(configAdapter).forEach((key) => { + if (newConfig[key]) { + newConfig[key] = configAdapter[key](config[key]); + } + }); + return newConfig; +} + +function fileAppenderAdapter(config) { + const configAdapter = { + maxLogSize: maxFileSizeUnitTransform, + }; + return adapter(configAdapter, config); +} + +const adapters = { + dateFile: fileAppenderAdapter, + file: fileAppenderAdapter, + fileSync: fileAppenderAdapter, +}; + +module.exports.modifyConfig = (config) => + adapters[config.type] ? adapters[config.type](config) : config; diff --git a/lib/appenders/categoryFilter.js b/lib/appenders/categoryFilter.js index c4ab9d7f..e3373c11 100644 --- a/lib/appenders/categoryFilter.js +++ b/lib/appenders/categoryFilter.js @@ -1,21 +1,19 @@ -'use strict'; - -const log4js = require('../log4js'); +const debug = require('debug')('log4js:categoryFilter'); function categoryFilter(excludes, appender) { if (typeof excludes === 'string') excludes = [excludes]; return (logEvent) => { + debug(`Checking ${logEvent.categoryName} against ${excludes}`); if (excludes.indexOf(logEvent.categoryName) === -1) { + debug('Not excluded, sending to appender'); appender(logEvent); } }; } -function configure(config, options) { - log4js.loadAppender(config.appender.type); - const appender = log4js.appenderMakers[config.appender.type](config.appender, options); +function configure(config, layouts, findAppender) { + const appender = findAppender(config.appender); return categoryFilter(config.exclude, appender); } -module.exports.appender = categoryFilter; module.exports.configure = configure; diff --git a/lib/appenders/clustered.js b/lib/appenders/clustered.js deleted file mode 100755 index 350209fc..00000000 --- a/lib/appenders/clustered.js +++ /dev/null @@ -1,140 +0,0 @@ -/* eslint-disable no-plusplus */ - -'use strict'; - -const cluster = require('cluster'); -const log4js = require('../log4js'); - -/** - * Takes a loggingEvent object, returns string representation of it. - */ -function serializeLoggingEvent(loggingEvent) { - // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. - // The following allows us to serialize errors correctly. - for (let i = 0; i < loggingEvent.data.length; i++) { - const item = loggingEvent.data[i]; - // Validate that we really are in this case - if (item && item.stack && JSON.stringify(item) === '{}') { - loggingEvent.data[i] = { stack: item.stack }; - } - } - return JSON.stringify(loggingEvent); -} - -/** - * Takes a string, returns an object with - * the correct log properties. - * - * This method has been "borrowed" from the `multiprocess` appender - * by `nomiddlename` - * (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js) - * - * Apparently, node.js serializes everything to strings when using `process.send()`, - * so we need smart deserialization that will recreate log date and level for further - * processing by log4js internals. - */ -function deserializeLoggingEvent(loggingEventString) { - let loggingEvent; - - try { - loggingEvent = JSON.parse(loggingEventString); - loggingEvent.startTime = new Date(loggingEvent.startTime); - loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr); - // Unwrap serialized errors - for (let i = 0; i < loggingEvent.data.length; i++) { - const item = loggingEvent.data[i]; - if (item && item.stack) { - loggingEvent.data[i] = item.stack; - } - } - } catch (e) { - // JSON.parse failed, just log the contents probably a naughty. - loggingEvent = { - startTime: new Date(), - categoryName: 'log4js', - level: log4js.levels.ERROR, - data: ['Unable to parse log:', loggingEventString] - }; - } - return loggingEvent; -} - -/** - * Creates an appender. - * - * If the current process is a master (`cluster.isMaster`), then this will be a "master appender". - * Otherwise this will be a worker appender, that just sends loggingEvents to the master process. - * - * If you are using this method directly, make sure to provide it with `config.actualAppenders` - * array of actual appender instances. - * - * Or better use `configure(config, options)` - */ -function createAppender(config) { - if (cluster.isMaster) { - const masterAppender = (loggingEvent) => { - if (config.actualAppenders) { - const size = config.actualAppenders.length; - for (let i = 0; i < size; i++) { - if ( - !config.appenders[i].category || - config.appenders[i].category === loggingEvent.categoryName - ) { - // Relying on the index is not a good practice but otherwise - // the change would have been bigger. - config.actualAppenders[i](loggingEvent); - } - } - } - }; - - // Listen on new workers - cluster.on('fork', (worker) => { - worker.on('message', (message) => { - if (message.type && message.type === '::log-message') { - const loggingEvent = deserializeLoggingEvent(message.event); - - // Adding PID metadata - loggingEvent.pid = worker.process.pid; - loggingEvent.cluster = { - master: process.pid, - worker: worker.process.pid, - workerId: worker.id - }; - - masterAppender(loggingEvent); - } - }); - }); - - return masterAppender; - } - - return (loggingEvent) => { - // If inside the worker process, then send the logger event to master. - if (cluster.isWorker) { - // console.log("worker " + cluster.worker.id + " is sending message"); - process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent) }); - } - }; -} - -function configure(config, options) { - if (config.appenders && cluster.isMaster) { - const size = config.appenders.length; - config.actualAppenders = new Array(size); - - for (let i = 0; i < size; i++) { - log4js.loadAppender(config.appenders[i].type); - config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type]( - config.appenders[i], - options - ); - } - } - - return createAppender(config); -} - -module.exports.appender = createAppender; -module.exports.configure = configure; diff --git a/lib/appenders/console.js b/lib/appenders/console.js index 6b2e6919..2e2efa61 100644 --- a/lib/appenders/console.js +++ b/lib/appenders/console.js @@ -1,23 +1,18 @@ -'use strict'; - -const layouts = require('../layouts'); - +// eslint-disable-next-line no-console const consoleLog = console.log.bind(console); function consoleAppender(layout, timezoneOffset) { - layout = layout || layouts.colouredLayout; return (loggingEvent) => { consoleLog(layout(loggingEvent, timezoneOffset)); }; } -function configure(config) { - let layout; +function configure(config, layouts) { + let layout = layouts.colouredLayout; if (config.layout) { layout = layouts.layout(config.layout.type, config.layout); } return consoleAppender(layout, config.timezoneOffset); } -module.exports.appender = consoleAppender; module.exports.configure = configure; diff --git a/lib/appenders/dateFile.js b/lib/appenders/dateFile.js index 3b31bdc0..36811b34 100644 --- a/lib/appenders/dateFile.js +++ b/lib/appenders/dateFile.js @@ -1,51 +1,58 @@ -'use strict'; - const streams = require('streamroller'); -const layouts = require('../layouts'); -const path = require('path'); const os = require('os'); -const eol = os.EOL || '\n'; -const openFiles = []; +const eol = os.EOL; -// close open files on process exit. -process.on('exit', () => { - openFiles.forEach((file) => { - file.end(); +function openTheStream(filename, pattern, options) { + const stream = new streams.DateRollingFileStream(filename, pattern, options); + stream.on('error', (err) => { + // eslint-disable-next-line no-console + console.error( + 'log4js.dateFileAppender - Writing to file %s, error happened ', + filename, + err + ); + }); + stream.on('drain', () => { + process.emit('log4js:pause', false); }); -}); + return stream; +} /** * File appender that rolls files according to a date pattern. - * @filename base filename. - * @pattern the format that will be added to the end of filename when rolling, + * @param filename base filename. + * @param pattern the format that will be added to the end of filename when rolling, * also used to check when to roll files - defaults to '.yyyy-MM-dd' - * @layout layout function for log messages - defaults to basicLayout - * @timezoneOffset optional timezone offset in minutes - defaults to system local + * @param layout layout function for log messages - defaults to basicLayout + * @param options - options to be passed to the underlying stream + * @param timezoneOffset - optional timezone offset in minutes (default system local) */ -function appender( - filename, - pattern, - layout, - options, - timezoneOffset -) { - layout = layout || layouts.basicLayout; - const logFile = new streams.DateRollingFileStream( - filename, - pattern, - options - ); - openFiles.push(logFile); +function appender(filename, pattern, layout, options, timezoneOffset) { + // the options for file appender use maxLogSize, but the docs say any file appender + // options should work for dateFile as well. + options.maxSize = options.maxLogSize; + + const writer = openTheStream(filename, pattern, options); - return (logEvent) => { - logFile.write(layout(logEvent, timezoneOffset) + eol, 'utf8'); + const app = function (logEvent) { + if (!writer.writable) { + return; + } + if (!writer.write(layout(logEvent, timezoneOffset) + eol, 'utf8')) { + process.emit('log4js:pause', true); + } + }; + + app.shutdown = function (complete) { + writer.end('', 'utf-8', complete); }; -} -function configure(config, options) { - let layout; + return app; +} +function configure(config, layouts) { + let layout = layouts.basicLayout; if (config.layout) { layout = layouts.layout(config.layout.type, config.layout); } @@ -54,9 +61,8 @@ function configure(config, options) { config.alwaysIncludePattern = false; } - if (options && options.cwd && !config.absolute) { - config.filename = path.join(options.cwd, config.filename); - } + // security default (instead of relying on streamroller default) + config.mode = config.mode || 0o600; return appender( config.filename, @@ -67,31 +73,4 @@ function configure(config, options) { ); } -function shutdown(cb) { - let completed = 0; - let error; - const complete = (err) => { - error = error || err; - completed++; // eslint-disable-line no-plusplus - if (completed >= openFiles.length) { - cb(error); - } - }; - if (!openFiles.length) { - return cb(); - } - - return openFiles.forEach((file) => { - if (!file.write(eol, 'utf-8')) { - file.once('drain', () => { - file.end(complete); - }); - } else { - file.end(complete); - } - }); -} - -module.exports.appender = appender; module.exports.configure = configure; -module.exports.shutdown = shutdown; diff --git a/lib/appenders/file.js b/lib/appenders/file.js index 9284e146..379e3e67 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -1,36 +1,22 @@ -'use strict'; - const debug = require('debug')('log4js:file'); -const layouts = require('../layouts'); const path = require('path'); const streams = require('streamroller'); const os = require('os'); -const eol = os.EOL || '\n'; -const openFiles = []; +const eol = os.EOL; -// close open files on process exit. -process.on('exit', () => { - debug('Exit handler called.'); - openFiles.forEach((file) => { - file.end(); - }); -}); - -// On SIGHUP, close and reopen all files. This allows this appender to work with -// logrotate. Note that if you are using logrotate, you should not set -// `logSize`. -process.on('SIGHUP', () => { - debug('SIGHUP handler called.'); - openFiles.forEach((writer) => { - writer.closeTheStream(writer.openTheStream.bind(writer)); +let mainSighupListenerStarted = false; +const sighupListeners = new Set(); +function mainSighupHandler() { + sighupListeners.forEach((app) => { + app.sighupHandler(); }); -}); +} /** * File Appender writing the logs to a text file. Supports rolling of logs by size. * - * @param file file log messages will be written to + * @param file the file log messages will be written to * @param layout a function that takes a logEvent and returns a string * (defaults to basicLayout). * @param logSize - the maximum size (in bytes) for a log file, @@ -40,53 +26,120 @@ process.on('SIGHUP', () => { * @param options - options to be passed to the underlying stream * @param timezoneOffset - optional timezone offset in minutes (default system local) */ -function fileAppender(file, layout, logSize, numBackups, options, timezoneOffset) { +function fileAppender( + file, + layout, + logSize, + numBackups, + options, + timezoneOffset +) { + if (typeof file !== 'string' || file.length === 0) { + throw new Error(`Invalid filename: ${file}`); + } else if (file.endsWith(path.sep)) { + throw new Error(`Filename is a directory: ${file}`); + } else if (file.indexOf(`~${path.sep}`) === 0) { + // handle ~ expansion: https://github.com/nodejs/node/issues/684 + // exclude ~ and ~filename as these can be valid files + file = file.replace('~', os.homedir()); + } file = path.normalize(file); - layout = layout || layouts.basicLayout; - numBackups = numBackups === undefined ? 5 : numBackups; - // there has to be at least one backup if logSize has been specified - numBackups = numBackups === 0 ? 1 : numBackups; - - debug('Creating file appender (', - file, ', ', - logSize, ', ', - numBackups, ', ', - options, ', ', - timezoneOffset, ')' + numBackups = !numBackups && numBackups !== 0 ? 5 : numBackups; + + debug( + 'Creating file appender (', + file, + ', ', + logSize, + ', ', + numBackups, + ', ', + options, + ', ', + timezoneOffset, + ')' ); - const writer = openTheStream(file, logSize, numBackups, options); - // push file to the stack of open handlers - openFiles.push(writer); + function openTheStream(filePath, fileSize, numFiles, opt) { + const stream = new streams.RollingFileStream( + filePath, + fileSize, + numFiles, + opt + ); + stream.on('error', (err) => { + // eslint-disable-next-line no-console + console.error( + 'log4js.fileAppender - Writing to file %s, error happened ', + filePath, + err + ); + }); + stream.on('drain', () => { + process.emit('log4js:pause', false); + }); + return stream; + } + + let writer = openTheStream(file, logSize, numBackups, options); - return function (loggingEvent) { - writer.write(layout(loggingEvent, timezoneOffset) + eol, 'utf8'); + const app = function (loggingEvent) { + if (!writer.writable) { + return; + } + if (options.removeColor === true) { + // eslint-disable-next-line no-control-regex + const regex = /\x1b[[0-9;]*m/g; + loggingEvent.data = loggingEvent.data.map((d) => { + if (typeof d === 'string') return d.replace(regex, ''); + return d; + }); + } + if (!writer.write(layout(loggingEvent, timezoneOffset) + eol, 'utf8')) { + process.emit('log4js:pause', true); + } }; -} -function openTheStream(file, fileSize, numFiles, options) { - const stream = new streams.RollingFileStream( - file, - fileSize, - numFiles, - options - ); - stream.on('error', (err) => { - console.error('log4js.fileAppender - Writing to file %s, error happened ', file, err); - }); - return stream; -} + app.reopen = function () { + writer.end(() => { + writer = openTheStream(file, logSize, numBackups, options); + }); + }; + app.sighupHandler = function () { + debug('SIGHUP handler called.'); + app.reopen(); + }; -function configure(config, options) { - let layout; + app.shutdown = function (complete) { + sighupListeners.delete(app); + if (sighupListeners.size === 0 && mainSighupListenerStarted) { + process.removeListener('SIGHUP', mainSighupHandler); + mainSighupListenerStarted = false; + } + writer.end('', 'utf-8', complete); + }; + + // On SIGHUP, close and reopen all files. This allows this appender to work with + // logrotate. Note that if you are using logrotate, you should not set + // `logSize`. + sighupListeners.add(app); + if (!mainSighupListenerStarted) { + process.on('SIGHUP', mainSighupHandler); + mainSighupListenerStarted = true; + } + + return app; +} + +function configure(config, layouts) { + let layout = layouts.basicLayout; if (config.layout) { layout = layouts.layout(config.layout.type, config.layout); } - if (options && options.cwd && !config.absolute) { - config.filename = path.join(options.cwd, config.filename); - } + // security default (instead of relying on streamroller default) + config.mode = config.mode || 0o600; return fileAppender( config.filename, @@ -98,31 +151,4 @@ function configure(config, options) { ); } -function shutdown(cb) { - let completed = 0; - let error; - const complete = (err) => { - error = error || err; - completed++; // eslint-disable-line no-plusplus - if (completed >= openFiles.length) { - cb(error); - } - }; - if (!openFiles.length) { - return cb(); - } - - return openFiles.forEach((file) => { - if (!file.write(eol, 'utf-8')) { - file.once('drain', () => { - file.end(complete); - }); - } else { - file.end(complete); - } - }); -} - -module.exports.appender = fileAppender; module.exports.configure = configure; -module.exports.shutdown = shutdown; diff --git a/lib/appenders/fileSync.js b/lib/appenders/fileSync.js index dab551c3..5f22dd64 100755 --- a/lib/appenders/fileSync.js +++ b/lib/appenders/fileSync.js @@ -1,38 +1,70 @@ -'use strict'; - const debug = require('debug')('log4js:fileSync'); -const layouts = require('../layouts'); const path = require('path'); const fs = require('fs'); const os = require('os'); -const eol = os.EOL || '\n'; +const eol = os.EOL; + +function touchFile(file, options) { + // attempt to create the directory + const mkdir = (dir) => { + try { + return fs.mkdirSync(dir, { recursive: true }); + } catch (e) { + // backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option) + // recursive creation of parent first + if (e.code === 'ENOENT') { + mkdir(path.dirname(dir)); + return mkdir(dir); + } -class RollingFileSync { - constructor(filename, size, backups, options) { - debug('In RollingFileStream'); + // throw error for all except EEXIST and EROFS (read-only filesystem) + if (e.code !== 'EEXIST' && e.code !== 'EROFS') { + throw e; + } - function throwErrorIfArgumentsAreNotValid() { - if (!filename || !size || size <= 0) { - throw new Error('You must specify a filename and file size'); + // EEXIST: throw if file and not directory + // EROFS : throw if directory not found + else { + try { + if (fs.statSync(dir).isDirectory()) { + return dir; + } + throw e; + } catch (err) { + throw e; + } } } + }; + mkdir(path.dirname(file)); + + // try to throw EISDIR, EROFS, EACCES + fs.appendFileSync(file, '', { mode: options.mode, flag: options.flags }); +} + +class RollingFileSync { + constructor(filename, maxLogSize, backups, options) { + debug('In RollingFileStream'); - throwErrorIfArgumentsAreNotValid(); + if (maxLogSize < 0) { + throw new Error(`maxLogSize (${maxLogSize}) should be > 0`); + } this.filename = filename; - this.size = size; - this.backups = backups || 1; - this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' }; // eslint-disable-line + this.size = maxLogSize; + this.backups = backups; + this.options = options; this.currentSize = 0; function currentFileSize(file) { let fileSize = 0; + try { fileSize = fs.statSync(file).size; } catch (e) { // file does not exist - fs.appendFileSync(filename, ''); + touchFile(file, options); } return fileSize; } @@ -41,7 +73,11 @@ class RollingFileSync { } shouldRoll() { - debug('should roll with current size %d, and max size %d', this.currentSize, this.size); + debug( + 'should roll with current size %d, and max size %d', + this.currentSize, + this.size + ); return this.currentSize >= this.size; } @@ -54,23 +90,21 @@ class RollingFileSync { } function index(filename_) { - return parseInt(filename_.substring((`${path.basename(filename)}.`).length), 10) || 0; + return ( + parseInt(filename_.slice(`${path.basename(filename)}.`.length), 10) || 0 + ); } function byIndex(a, b) { - if (index(a) > index(b)) { - return 1; - } else if (index(a) < index(b)) { - return -1; - } - - return 0; + return index(a) - index(b); } function increaseFileIndex(fileToRename) { const idx = index(fileToRename); debug(`Index of ${fileToRename} is ${idx}`); - if (idx < that.backups) { + if (that.backups === 0) { + fs.truncateSync(filename, 0); + } else if (idx < that.backups) { // on windows, you can get a EEXIST error if you rename a file to an existing file // so, we'll try to delete the file we're renaming to first try { @@ -80,7 +114,10 @@ class RollingFileSync { } debug(`Renaming ${fileToRename} -> ${filename}.${idx + 1}`); - fs.renameSync(path.join(path.dirname(filename), fileToRename), `${filename}.${idx + 1}`); + fs.renameSync( + path.join(path.dirname(filename), fileToRename), + `${filename}.${idx + 1}` + ); } } @@ -89,18 +126,21 @@ class RollingFileSync { debug('Renaming the old files'); const files = fs.readdirSync(path.dirname(filename)); - files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex); + files + .filter(justTheseFiles) + .sort(byIndex) + .reverse() + .forEach(increaseFileIndex); } debug('Rolling, rolling, rolling'); renameTheFiles(); } - /* eslint no-unused-vars:0 */ + // eslint-disable-next-line no-unused-vars write(chunk, encoding) { const that = this; - function writeTheChunk() { debug('writing the chunk to the file'); that.currentSize += chunk.length; @@ -109,7 +149,6 @@ class RollingFileSync { debug('in write'); - if (this.shouldRoll()) { this.currentSize = 0; this.roll(this.filename); @@ -122,46 +161,66 @@ class RollingFileSync { /** * File Appender writing the logs to a text file. Supports rolling of logs by size. * - * @param file file log messages will be written to + * @param file the file log messages will be written to * @param layout a function that takes a logevent and returns a string * (defaults to basicLayout). * @param logSize - the maximum size (in bytes) for a log file, * if not provided then logs won't be rotated. * @param numBackups - the number of log files to keep after logSize * has been reached (default 5) - * @param timezoneOffset - optional timezone offset in minutes - * (default system local) + * @param options - options to be passed to the underlying stream + * @param timezoneOffset - optional timezone offset in minutes (default system local) */ -function fileAppender(file, layout, logSize, numBackups, timezoneOffset) { - debug('fileSync appender created'); +function fileAppender( + file, + layout, + logSize, + numBackups, + options, + timezoneOffset +) { + if (typeof file !== 'string' || file.length === 0) { + throw new Error(`Invalid filename: ${file}`); + } else if (file.endsWith(path.sep)) { + throw new Error(`Filename is a directory: ${file}`); + } else if (file.indexOf(`~${path.sep}`) === 0) { + // handle ~ expansion: https://github.com/nodejs/node/issues/684 + // exclude ~ and ~filename as these can be valid files + file = file.replace('~', os.homedir()); + } file = path.normalize(file); - layout = layout || layouts.basicLayout; - numBackups = numBackups === undefined ? 5 : numBackups; - // there has to be at least one backup if logSize has been specified - numBackups = numBackups === 0 ? 1 : numBackups; + numBackups = !numBackups && numBackups !== 0 ? 5 : numBackups; + + debug( + 'Creating fileSync appender (', + file, + ', ', + logSize, + ', ', + numBackups, + ', ', + options, + ', ', + timezoneOffset, + ')' + ); function openTheStream(filePath, fileSize, numFiles) { let stream; if (fileSize) { - stream = new RollingFileSync( - filePath, - fileSize, - numFiles - ); + stream = new RollingFileSync(filePath, fileSize, numFiles, options); } else { - stream = (((f) => { - // create file if it doesn't exist - if (!fs.existsSync(f)) { - fs.appendFileSync(f, ''); - } + stream = ((f) => { + // touch the file to apply flags (like w to truncate the file) + touchFile(f, options); return { write(data) { fs.appendFileSync(f, data); - } + }, }; - }))(filePath); + })(filePath); } return stream; @@ -174,24 +233,26 @@ function fileAppender(file, layout, logSize, numBackups, timezoneOffset) { }; } -function configure(config, options) { - let layout; +function configure(config, layouts) { + let layout = layouts.basicLayout; if (config.layout) { layout = layouts.layout(config.layout.type, config.layout); } - if (options && options.cwd && !config.absolute) { - config.filename = path.join(options.cwd, config.filename); - } + const options = { + flags: config.flags || 'a', + encoding: config.encoding || 'utf8', + mode: config.mode || 0o600, + }; return fileAppender( config.filename, layout, config.maxLogSize, config.backups, + options, config.timezoneOffset ); } -module.exports.appender = fileAppender; module.exports.configure = configure; diff --git a/lib/appenders/gelf.js b/lib/appenders/gelf.js deleted file mode 100644 index eb809edc..00000000 --- a/lib/appenders/gelf.js +++ /dev/null @@ -1,159 +0,0 @@ -'use strict'; - -const zlib = require('zlib'); -const layouts = require('../layouts'); -const levels = require('../levels'); -const dgram = require('dgram'); -const util = require('util'); -const OS = require('os'); -const debug = require('debug')('log4js:gelf'); - -/* eslint no-unused-vars:0 */ -const LOG_EMERG = 0; // system is unusable(unused) -const LOG_ALERT = 1; // action must be taken immediately(unused) -const LOG_CRIT = 2; // critical conditions -const LOG_ERROR = 3; // error conditions -const LOG_WARNING = 4; // warning conditions -const LOG_NOTICE = 5; // normal, but significant, condition(unused) -const LOG_INFO = 6; // informational message -const LOG_DEBUG = 7; // debug-level message - -const levelMapping = {}; -levelMapping[levels.ALL] = LOG_DEBUG; -levelMapping[levels.TRACE] = LOG_DEBUG; -levelMapping[levels.DEBUG] = LOG_DEBUG; -levelMapping[levels.INFO] = LOG_INFO; -levelMapping[levels.WARN] = LOG_WARNING; -levelMapping[levels.ERROR] = LOG_ERROR; -levelMapping[levels.FATAL] = LOG_CRIT; - -let client; - -/** - * GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog - * - * @param layout a function that takes a logevent and returns a string (defaults to none). - * @param host - host to which to send logs (default:localhost) - * @param port - port at which to send logs to (default:12201) - * @param hostname - hostname of the current host (default:OS hostname) - * @param facility - facility to log to (default:nodejs-server) - */ -/* eslint no-underscore-dangle:0 */ -function gelfAppender(layout, host, port, hostname, facility) { - let config; - let customFields; - if (typeof host === 'object') { - config = host; - host = config.host; - port = config.port; - hostname = config.hostname; - facility = config.facility; - customFields = config.customFields; - } - - host = host || 'localhost'; - port = port || 12201; - hostname = hostname || OS.hostname(); - layout = layout || layouts.messagePassThroughLayout; - - const defaultCustomFields = customFields || {}; - - if (facility) { - defaultCustomFields._facility = facility; - } - - client = dgram.createSocket('udp4'); - - process.on('exit', () => { - if (client) client.close(); - }); - - /** - * Add custom fields (start with underscore ) - * - if the first object passed to the logger contains 'GELF' field, - * copy the underscore fields to the message - * @param loggingEvent - * @param msg - */ - function addCustomFields(loggingEvent, msg) { - /* append defaultCustomFields firsts */ - Object.keys(defaultCustomFields).forEach((key) => { - // skip _id field for graylog2, skip keys not starts with UNDERSCORE - if (key.match(/^_/) && key !== '_id') { - msg[key] = defaultCustomFields[key]; - } - }); - - /* append custom fields per message */ - const data = loggingEvent.data; - if (!Array.isArray(data) || data.length === 0) return; - const firstData = data[0]; - - if (!firstData.GELF) return; // identify with GELF field defined - // Remove the GELF key, some gelf supported logging systems drop the message with it - delete firstData.GELF; - Object.keys(firstData).forEach((key) => { - // skip _id field for graylog2, skip keys not starts with UNDERSCORE - if (key.match(/^_/) || key !== '_id') { - msg[key] = firstData[key]; - } - }); - - /* the custom field object should be removed, so it will not be looged by the later appenders */ - loggingEvent.data.shift(); - } - - function preparePacket(loggingEvent) { - const msg = {}; - addCustomFields(loggingEvent, msg); - msg.short_message = layout(loggingEvent); - - msg.version = '1.1'; - msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond - msg.host = hostname; - msg.level = levelMapping[loggingEvent.level || levels.DEBUG]; - return msg; - } - - function sendPacket(packet) { - client.send(packet, 0, packet.length, port, host, (err) => { - if (err) { - console.error(err); - } - }); - } - - return (loggingEvent) => { - const message = preparePacket(loggingEvent); - zlib.gzip(new Buffer(JSON.stringify(message)), (err, packet) => { - if (err) { - console.error(err.stack); - } else { - if (packet.length > 8192) { // eslint-disable-line - debug(`Message packet length (${packet.length}) is larger than 8k. Not sending`); - } else { - sendPacket(packet); - } - } - }); - }; -} - -function configure(config) { - let layout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - return gelfAppender(layout, config); -} - -function shutdown(cb) { - if (client) { - client.close(cb); - client = null; - } -} - -module.exports.appender = gelfAppender; -module.exports.configure = configure; -module.exports.shutdown = shutdown; diff --git a/lib/appenders/hipchat.js b/lib/appenders/hipchat.js deleted file mode 100644 index 8c3a3bea..00000000 --- a/lib/appenders/hipchat.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict'; - -const hipchat = require('hipchat-notifier'); -const layouts = require('../layouts'); - -module.exports.name = 'hipchat'; -module.exports.appender = hipchatAppender; -module.exports.configure = hipchatConfigure; - -/** - @invoke as - - log4js.configure({ - 'appenders': [ - { - 'type' : 'hipchat', - 'hipchat_token': '< User token with Notification Privileges >', - 'hipchat_room': '< Room ID or Name >', - // optionl - 'hipchat_from': '[ additional from label ]', - 'hipchat_notify': '[ notify boolean to bug people ]', - 'hipchat_host' : 'api.hipchat.com' - } - ] - }); - - var logger = log4js.getLogger('hipchat'); - logger.warn('Test Warn message'); - - @invoke - */ -/* eslint no-unused-vars:0 */ -function hipchatNotifierResponseCallback(err, response, body) { - if (err) { - throw err; - } -} - -function hipchatAppender(config) { - const notifier = hipchat.make(config.hipchat_room, config.hipchat_token); - - // @lint W074 This function's cyclomatic complexity is too high. (10) - return (loggingEvent) => { - let notifierFn; - - notifier.setRoom(config.hipchat_room); - notifier.setFrom(config.hipchat_from || ''); - notifier.setNotify(config.hipchat_notify || false); - - if (config.hipchat_host) { - notifier.setHost(config.hipchat_host); - } - - switch (loggingEvent.level.toString()) { - case 'TRACE': - case 'DEBUG': - notifierFn = 'info'; - break; - case 'WARN': - notifierFn = 'warning'; - break; - case 'ERROR': - case 'FATAL': - notifierFn = 'failure'; - break; - default: - notifierFn = 'success'; - } - - // @TODO, re-work in timezoneOffset ? - const layoutMessage = config.layout(loggingEvent); - - // dispatch hipchat api request, do not return anything - // [overide hipchatNotifierResponseCallback] - notifier[notifierFn](layoutMessage, config.hipchat_response_callback || - hipchatNotifierResponseCallback); - }; -} - -function hipchatConfigure(config) { - let layout; - - if (!config.layout) { - config.layout = layouts.messagePassThroughLayout; - } - - return hipchatAppender(config, layout); -} diff --git a/lib/appenders/ignoreBrowser.js b/lib/appenders/ignoreBrowser.js new file mode 100644 index 00000000..e69de29b diff --git a/lib/appenders/index.js b/lib/appenders/index.js new file mode 100644 index 00000000..aa81599c --- /dev/null +++ b/lib/appenders/index.js @@ -0,0 +1,182 @@ +const path = require('path'); +const debug = require('debug')('log4js:appenders'); +const configuration = require('../configuration'); +const clustering = require('../clustering'); +const levels = require('../levels'); +const layouts = require('../layouts'); +const adapters = require('./adapters'); + +// pre-load the core appenders so that webpack can find them +const coreAppenders = new Map(); +coreAppenders.set('console', require('./console')); +coreAppenders.set('stdout', require('./stdout')); +coreAppenders.set('stderr', require('./stderr')); +coreAppenders.set('logLevelFilter', require('./logLevelFilter')); +coreAppenders.set('categoryFilter', require('./categoryFilter')); +coreAppenders.set('noLogFilter', require('./noLogFilter')); +coreAppenders.set('file', require('./file')); +coreAppenders.set('dateFile', require('./dateFile')); +coreAppenders.set('fileSync', require('./fileSync')); +coreAppenders.set('tcp', require('./tcp')); + +const appenders = new Map(); + +const tryLoading = (modulePath, config) => { + let resolvedPath; + try { + const modulePathCJS = `${modulePath}.cjs`; + resolvedPath = require.resolve(modulePathCJS); + debug('Loading module from ', modulePathCJS); + } catch (e) { + resolvedPath = modulePath; + debug('Loading module from ', modulePath); + } + try { + // eslint-disable-next-line global-require, import/no-dynamic-require + return require(resolvedPath); + } catch (e) { + // if the module was found, and we still got an error, then raise it + configuration.throwExceptionIf( + config, + e.code !== 'MODULE_NOT_FOUND', + `appender "${modulePath}" could not be loaded (error was: ${e})` + ); + return undefined; + } +}; + +const loadAppenderModule = (type, config) => + coreAppenders.get(type) || + tryLoading(`./${type}`, config) || + tryLoading(type, config) || + (require.main && + require.main.filename && + tryLoading(path.join(path.dirname(require.main.filename), type), config)) || + tryLoading(path.join(process.cwd(), type), config); + +const appendersLoading = new Set(); + +const getAppender = (name, config) => { + if (appenders.has(name)) return appenders.get(name); + if (!config.appenders[name]) return false; + if (appendersLoading.has(name)) + throw new Error(`Dependency loop detected for appender ${name}.`); + appendersLoading.add(name); + + debug(`Creating appender ${name}`); + // eslint-disable-next-line no-use-before-define + const appender = createAppender(name, config); + appendersLoading.delete(name); + appenders.set(name, appender); + return appender; +}; + +const createAppender = (name, config) => { + const appenderConfig = config.appenders[name]; + const appenderModule = appenderConfig.type.configure + ? appenderConfig.type + : loadAppenderModule(appenderConfig.type, config); + configuration.throwExceptionIf( + config, + configuration.not(appenderModule), + `appender "${name}" is not valid (type "${appenderConfig.type}" could not be found)` + ); + if (appenderModule.appender) { + process.emitWarning( + `Appender ${appenderConfig.type} exports an appender function.`, + 'DeprecationWarning', + 'log4js-node-DEP0001' + ); + debug( + '[log4js-node-DEP0001]', + `DEPRECATION: Appender ${appenderConfig.type} exports an appender function.` + ); + } + if (appenderModule.shutdown) { + process.emitWarning( + `Appender ${appenderConfig.type} exports a shutdown function.`, + 'DeprecationWarning', + 'log4js-node-DEP0002' + ); + debug( + '[log4js-node-DEP0002]', + `DEPRECATION: Appender ${appenderConfig.type} exports a shutdown function.` + ); + } + + debug(`${name}: clustering.isMaster ? ${clustering.isMaster()}`); + debug( + // eslint-disable-next-line global-require + `${name}: appenderModule is ${require('util').inspect(appenderModule)}` + ); + return clustering.onlyOnMaster( + () => { + debug( + `calling appenderModule.configure for ${name} / ${appenderConfig.type}` + ); + return appenderModule.configure( + adapters.modifyConfig(appenderConfig), + layouts, + (appender) => getAppender(appender, config), + levels + ); + }, + /* istanbul ignore next: fn never gets called by non-master yet needed to pass config validation */ () => {} + ); +}; + +const setup = (config) => { + appenders.clear(); + appendersLoading.clear(); + if (!config) { + return; + } + + const usedAppenders = []; + Object.values(config.categories).forEach((category) => { + usedAppenders.push(...category.appenders); + }); + Object.keys(config.appenders).forEach((name) => { + // dodgy hard-coding of special case for tcp-server and multiprocess which may not have + // any categories associated with it, but needs to be started up anyway + if ( + usedAppenders.includes(name) || + config.appenders[name].type === 'tcp-server' || + config.appenders[name].type === 'multiprocess' + ) { + getAppender(name, config); + } + }); +}; + +const init = () => { + setup(); +}; +init(); + +configuration.addListener((config) => { + configuration.throwExceptionIf( + config, + configuration.not(configuration.anObject(config.appenders)), + 'must have a property "appenders" of type object.' + ); + const appenderNames = Object.keys(config.appenders); + configuration.throwExceptionIf( + config, + configuration.not(appenderNames.length), + 'must define at least one appender.' + ); + + appenderNames.forEach((name) => { + configuration.throwExceptionIf( + config, + configuration.not(config.appenders[name].type), + `appender "${name}" is not valid (must be an object with property "type")` + ); + }); +}); + +configuration.addListener(setup); + +module.exports = appenders; +module.exports.init = init; diff --git a/lib/appenders/logFacesAppender.js b/lib/appenders/logFacesAppender.js deleted file mode 100644 index ba7bda6a..00000000 --- a/lib/appenders/logFacesAppender.js +++ /dev/null @@ -1,130 +0,0 @@ -/** - * logFaces appender sends JSON formatted log events to logFaces receivers. - * There are two types of receivers supported - raw UDP sockets (for server side apps), - * and HTTP (for client side apps). Depending on the usage, this appender - * requires either of the two: - * - * For UDP require 'dgram', see 'https://nodejs.org/api/dgram.html' - * For HTTP require 'axios', see 'https://www.npmjs.com/package/axios' - * - * Make sure your project have relevant dependancy installed before using this appender. - */ -/* eslint global-require:0 */ - -'use strict'; - -const util = require('util'); - -const context = {}; - -function datagram(config) { - const sock = require('dgram').createSocket('udp4'); - const host = config.remoteHost || '127.0.0.1'; - const port = config.port || 55201; - - return function (event) { - const buff = new Buffer(JSON.stringify(event)); - sock.send(buff, 0, buff.length, port, host, (err) => { - if (err) { - console.error('log4js.logFacesAppender failed to %s:%d, error: %s', - host, port, err); - } - }); - }; -} - -function servlet(config) { - const axios = require('axios').create(); - axios.defaults.baseURL = config.url; - axios.defaults.timeout = config.timeout || 5000; - axios.defaults.headers = { 'Content-Type': 'application/json' }; - axios.defaults.withCredentials = true; - - return function (lfsEvent) { - axios.post('', lfsEvent) - .then((response) => { - if (response.status !== 200) { - console.error('log4js.logFacesAppender post to %s failed: %d', - config.url, response.status); - } - }) - .catch((response) => { - console.error('log4js.logFacesAppender post to %s excepted: %s', - config.url, response.status); - }); - }; -} - -/** - * For UDP (node.js) use the following configuration params: - * { -* "type": "logFacesAppender", // must be present for instantiation -* "application": "LFS-TEST", // name of the application (domain) -* "remoteHost": "127.0.0.1", // logFaces server address (hostname) -* "port": 55201 // UDP receiver listening port -* } - * - * For HTTP (browsers or node.js) use the following configuration params: - * { -* "type": "logFacesAppender", // must be present for instantiation -* "application": "LFS-TEST", // name of the application (domain) -* "url": "http://lfs-server/logs", // logFaces receiver servlet URL -* } - */ -function logFacesAppender(config) { - let send = config.send; - if (send === undefined) { - send = (config.url === undefined) ? datagram(config) : servlet(config); - } - - return function log(event) { - // convert to logFaces compact json format - const lfsEvent = { - a: config.application || '', // application name - t: event.startTime.getTime(), // time stamp - p: event.level.levelStr, // level (priority) - g: event.categoryName, // logger name - m: format(event.data) // message text - }; - - // add context variables if exist - Object.keys(context).forEach((key) => { - lfsEvent[`p_${key}`] = context[key]; - }); - - // send to server - send(lfsEvent); - }; -} - -function configure(config) { - return logFacesAppender(config); -} - -function setContext(key, value) { - context[key] = value; -} - -function format(logData) { - const data = Array.isArray(logData) ? - logData : Array.prototype.slice.call(arguments); - return util.format.apply(util, wrapErrorsWithInspect(data)); -} - -function wrapErrorsWithInspect(items) { - return items.map((item) => { - if ((item instanceof Error) && item.stack) { - return { - inspect: function () { - return `${util.format(item)}\n${item.stack}`; - } - }; - } - - return item; - }); -} - -module.exports.appender = logFacesAppender; -module.exports.configure = configure; -module.exports.setContext = setContext; diff --git a/lib/appenders/logLevelFilter.js b/lib/appenders/logLevelFilter.js index ea0d4202..2e759cd0 100644 --- a/lib/appenders/logLevelFilter.js +++ b/lib/appenders/logLevelFilter.js @@ -1,24 +1,20 @@ -'use strict'; - -const levels = require('../levels'); -const log4js = require('../log4js'); - -function logLevelFilter(minLevelString, maxLevelString, appender) { - const minLevel = levels.toLevel(minLevelString); - const maxLevel = levels.toLevel(maxLevelString, levels.FATAL); +function logLevelFilter(minLevelString, maxLevelString, appender, levels) { + const minLevel = levels.getLevel(minLevelString); + const maxLevel = levels.getLevel(maxLevelString, levels.FATAL); return (logEvent) => { const eventLevel = logEvent.level; - if (eventLevel.isGreaterThanOrEqualTo(minLevel) && eventLevel.isLessThanOrEqualTo(maxLevel)) { + if ( + minLevel.isLessThanOrEqualTo(eventLevel) && + maxLevel.isGreaterThanOrEqualTo(eventLevel) + ) { appender(logEvent); } }; } -function configure(config, options) { - log4js.loadAppender(config.appender.type); - const appender = log4js.appenderMakers[config.appender.type](config.appender, options); - return logLevelFilter(config.level, config.maxLevel, appender); +function configure(config, layouts, findAppender, levels) { + const appender = findAppender(config.appender); + return logLevelFilter(config.level, config.maxLevel, appender, levels); } -module.exports.appender = logLevelFilter; module.exports.configure = configure; diff --git a/lib/appenders/loggly.js b/lib/appenders/loggly.js deleted file mode 100644 index 77afd06b..00000000 --- a/lib/appenders/loggly.js +++ /dev/null @@ -1,121 +0,0 @@ -/* eslint no-prototype-builtins:1,no-restricted-syntax:[1, "ForInStatement"] */ - -'use strict'; - -const layouts = require('../layouts'); -const loggly = require('loggly'); -const os = require('os'); - -const passThrough = layouts.messagePassThroughLayout; - -let openRequests = 0; -let shutdownCB; - -function isAnyObject(value) { - return value !== null && (typeof value === 'object' || typeof value === 'function'); -} - -function numKeys(obj) { - let res = 0; - for (const key in obj) { - if (obj.hasOwnProperty(key)) { - res++; // eslint-disable-line no-plusplus - } - } - return res; -} - -/** - * @param msgListArgs - * @returns Object{ deTaggedMsg: [...], additionalTags: [...] } - */ -function processTags(msgListArgs) { - const msgList = (msgListArgs.length === 1 ? [msgListArgs[0]] : msgListArgs); - - return msgList.reduce((accumulate, element) => { - if (isAnyObject(element) && Array.isArray(element.tags) && numKeys(element) === 1) { - accumulate.additionalTags = accumulate.additionalTags.concat(element.tags); - } else { - accumulate.deTaggedData.push(element); - } - return accumulate; - }, { deTaggedData: [], additionalTags: [] }); -} - -/** - * Loggly Appender. Sends logging events to Loggly using node-loggly, optionally adding tags. - * - * This appender will scan the msg from the logging event, and pull out any argument of the - * shape `{ tags: [] }` so that it's possible to add tags in a normal logging call. - * - * For example: - * - * logger.info({ tags: ['my-tag-1', 'my-tag-2'] }, 'Some message', someObj, ...) - * - * And then this appender will remove the tags param and append it to the config.tags. - * - * @param config object with loggly configuration data - * { - * token: 'your-really-long-input-token', - * subdomain: 'your-subdomain', - * tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn'] - * } - * @param layout a function that takes a logevent and returns a string (defaults to objectLayout). - */ -function logglyAppender(config, layout) { - const client = loggly.createClient(config); - if (!layout) layout = passThrough; - - return (loggingEvent) => { - const result = processTags(loggingEvent.data); - const deTaggedData = result.deTaggedData; - const additionalTags = result.additionalTags; - - // Replace the data property with the deTaggedData - loggingEvent.data = deTaggedData; - - const msg = layout(loggingEvent); - - openRequests += 1; - - client.log({ - msg: msg, - level: loggingEvent.level.levelStr, - category: loggingEvent.categoryName, - hostname: os.hostname().toString(), - }, additionalTags, (error) => { - if (error) { - console.error('log4js.logglyAppender - error occurred: ', error); - } - - openRequests -= 1; - - if (shutdownCB && openRequests === 0) { - shutdownCB(); - - shutdownCB = undefined; - } - }); - }; -} - -function configure(config) { - let layout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - return logglyAppender(config, layout); -} - -function shutdown(cb) { - if (openRequests === 0) { - cb(); - } else { - shutdownCB = cb; - } -} - -module.exports.name = 'loggly'; -module.exports.appender = logglyAppender; -module.exports.configure = configure; -module.exports.shutdown = shutdown; diff --git a/lib/appenders/logstashUDP.js b/lib/appenders/logstashUDP.js deleted file mode 100644 index 7805e09c..00000000 --- a/lib/appenders/logstashUDP.js +++ /dev/null @@ -1,79 +0,0 @@ -'use strict'; - -const layouts = require('../layouts'); -const dgram = require('dgram'); -const util = require('util'); - -function logstashUDP(config, layout) { - const udp = dgram.createSocket('udp4'); - const type = config.logType ? config.logType : config.category; - layout = layout || layouts.dummyLayout; - - if (!config.fields) { - config.fields = {}; - } - - return function log(loggingEvent) { - /* - https://gist.github.com/jordansissel/2996677 - { - 'message' => 'hello world', - '@version' => '1', - '@timestamp' => '2014-04-22T23:03:14.111Z', - 'type' => 'stdin', - 'host' => 'hello.local' - } - @timestamp is the ISO8601 high-precision timestamp for the event. - @version is the version number of this json schema - Every other field is valid and fine. - */ - /* eslint no-prototype-builtins:1,no-restricted-syntax:[1, "ForInStatement"] */ - if (loggingEvent.data.length > 1) { - const secondEvData = loggingEvent.data[1]; - for (const key in secondEvData) { - if (secondEvData.hasOwnProperty(key)) { - config.fields[key] = secondEvData[key]; - } - } - } - config.fields.level = loggingEvent.level.levelStr; - config.fields.category = loggingEvent.categoryName; - - const logObject = { - '@version': '1', - '@timestamp': (new Date(loggingEvent.startTime)).toISOString(), - type: type, - message: layout(loggingEvent), - fields: config.fields - }; - - const keys = Object.keys(config.fields); - for (let i = 0, length = keys.length; i < length; i += 1) { - logObject[keys[i]] = config.fields[keys[i]]; - } - sendLog(udp, config.host, config.port, logObject); - }; -} - -function sendLog(udp, host, port, logObject) { - const buffer = new Buffer(JSON.stringify(logObject)); - - /* eslint no-unused-vars:0 */ - udp.send(buffer, 0, buffer.length, port, host, (err, bytes) => { - if (err) { - console.error('log4js.logstashUDP - %s:%p Error: %s', host, port, util.inspect(err)); - } - }); -} - -function configure(config) { - let layout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - - return logstashUDP(config, layout); -} - -module.exports.appender = logstashUDP; -module.exports.configure = configure; diff --git a/lib/appenders/mailgun.js b/lib/appenders/mailgun.js deleted file mode 100644 index 11341ff8..00000000 --- a/lib/appenders/mailgun.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -const layouts = require('../layouts'); -const mailgunFactory = require('mailgun-js'); - -let layout; -let config; -let mailgun; - -function mailgunAppender(_config, _layout) { - config = _config; - layout = _layout || layouts.basicLayout; - - return (loggingEvent) => { - const data = { - from: _config.from, - to: _config.to, - subject: _config.subject, - text: layout(loggingEvent, config.timezoneOffset) - }; - - /* eslint no-unused-vars:0 */ - mailgun.messages().send(data, (error, body) => { - if (error !== null) console.error('log4js.mailgunAppender - Error happened', error); - }); - }; -} - -function configure(_config) { - config = _config; - - if (_config.layout) { - layout = layouts.layout(_config.layout.type, _config.layout); - } - - mailgun = mailgunFactory({ - apiKey: _config.apikey, - domain: _config.domain - }); - - return mailgunAppender(_config, layout); -} - -module.exports.appender = mailgunAppender; -module.exports.configure = configure; diff --git a/lib/appenders/multiFile.js b/lib/appenders/multiFile.js new file mode 100644 index 00000000..1a0746fc --- /dev/null +++ b/lib/appenders/multiFile.js @@ -0,0 +1,91 @@ +const debug = require('debug')('log4js:multiFile'); +const path = require('path'); +const fileAppender = require('./file'); + +const findFileKey = (property, event) => + event[property] || event.context[property]; + +module.exports.configure = (config, layouts) => { + debug('Creating a multi-file appender'); + const files = new Map(); + const timers = new Map(); + + function checkForTimeout(fileKey) { + const timer = timers.get(fileKey); + const app = files.get(fileKey); + /* istanbul ignore else: failsafe */ + if (timer && app) { + if (Date.now() - timer.lastUsed > timer.timeout) { + debug('%s not used for > %d ms => close', fileKey, timer.timeout); + clearInterval(timer.interval); + timers.delete(fileKey); + files.delete(fileKey); + app.shutdown((err) => { + if (err) { + debug('ignore error on file shutdown: %s', err.message); + } + }); + } + } else { + // will never get here as files and timers are coupled to be added and deleted at same place + debug('timer or app does not exist'); + } + } + + const appender = (logEvent) => { + const fileKey = findFileKey(config.property, logEvent); + debug('fileKey for property ', config.property, ' is ', fileKey); + if (fileKey) { + let file = files.get(fileKey); + debug('existing file appender is ', file); + if (!file) { + debug('creating new file appender'); + config.filename = path.join(config.base, fileKey + config.extension); + file = fileAppender.configure(config, layouts); + files.set(fileKey, file); + if (config.timeout) { + debug('creating new timer'); + timers.set(fileKey, { + timeout: config.timeout, + lastUsed: Date.now(), + interval: setInterval( + checkForTimeout.bind(null, fileKey), + config.timeout + ), + }); + } + } else if (config.timeout) { + debug('%s extending activity', fileKey); + timers.get(fileKey).lastUsed = Date.now(); + } + + file(logEvent); + } else { + debug('No fileKey for logEvent, quietly ignoring this log event'); + } + }; + + appender.shutdown = (cb) => { + let shutdownFunctions = files.size; + if (shutdownFunctions <= 0) { + cb(); + } + let error; + timers.forEach((timer, fileKey) => { + debug('clearing timer for ', fileKey); + clearInterval(timer.interval); + }); + files.forEach((app, fileKey) => { + debug('calling shutdown for ', fileKey); + app.shutdown((err) => { + error = error || err; + shutdownFunctions -= 1; + if (shutdownFunctions <= 0) { + cb(error); + } + }); + }); + }; + + return appender; +}; diff --git a/lib/appenders/multiprocess.js b/lib/appenders/multiprocess.js index d56a7144..06d9fb52 100644 --- a/lib/appenders/multiprocess.js +++ b/lib/appenders/multiprocess.js @@ -1,158 +1,191 @@ -'use strict'; - -const log4js = require('../log4js'); +const debug = require('debug')('log4js:multiprocess'); const net = require('net'); +const LoggingEvent = require('../LoggingEvent'); const END_MSG = '__LOG4JS__'; -const servers = []; /** * Creates a server, listening on config.loggerPort, config.loggerHost. * Output goes to config.actualAppender (config.appender is used to * set up that appender). */ -function logServer(config) { +function logServer(config, actualAppender, levels) { /** * Takes a utf-8 string, returns an object with * the correct log properties. */ function deserializeLoggingEvent(clientSocket, msg) { - let loggingEvent; - try { - loggingEvent = JSON.parse(msg); - loggingEvent.startTime = new Date(loggingEvent.startTime); - loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr); - } catch (e) { - // JSON.parse failed, just log the contents probably a naughty. - loggingEvent = { - startTime: new Date(), - categoryName: 'log4js', - level: log4js.levels.ERROR, - data: ['Unable to parse log:', msg] - }; - } - + debug('(master) deserialising log event'); + const loggingEvent = LoggingEvent.deserialise(msg); loggingEvent.remoteAddress = clientSocket.remoteAddress; loggingEvent.remotePort = clientSocket.remotePort; return loggingEvent; } - const actualAppender = config.actualAppender; - - /* eslint prefer-arrow-callback:0 */ - const server = net.createServer(function serverCreated(clientSocket) { + const server = net.createServer((clientSocket) => { + debug('(master) connection received'); clientSocket.setEncoding('utf8'); let logMessage = ''; function logTheMessage(msg) { - if (logMessage.length > 0) { - actualAppender(deserializeLoggingEvent(clientSocket, msg)); - } + debug('(master) deserialising log event and sending to actual appender'); + actualAppender(deserializeLoggingEvent(clientSocket, msg)); } function chunkReceived(chunk) { + debug('(master) chunk of data received'); let event; logMessage += chunk || ''; if (logMessage.indexOf(END_MSG) > -1) { - event = logMessage.substring(0, logMessage.indexOf(END_MSG)); + event = logMessage.slice(0, logMessage.indexOf(END_MSG)); logTheMessage(event); - logMessage = logMessage.substring(event.length + END_MSG.length) || ''; + logMessage = logMessage.slice(event.length + END_MSG.length) || ''; // check for more, maybe it was a big chunk chunkReceived(); } } + function handleError(error) { + const loggingEvent = { + startTime: new Date(), + categoryName: 'log4js', + level: levels.ERROR, + data: ['A worker log process hung up unexpectedly', error], + remoteAddress: clientSocket.remoteAddress, + remotePort: clientSocket.remotePort, + }; + actualAppender(loggingEvent); + } + clientSocket.on('data', chunkReceived); clientSocket.on('end', chunkReceived); + clientSocket.on('error', handleError); }); - server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost', function () { - servers.push(server); - // allow the process to exit, if this is the only socket active - server.unref(); - }); + server.listen( + config.loggerPort || 5000, + config.loggerHost || 'localhost', + (e) => { + debug('(master) master server listening, error was ', e); + // allow the process to exit, if this is the only socket active + server.unref(); + } + ); + + function app(event) { + debug('(master) log event sent directly to actual appender (local event)'); + return actualAppender(event); + } - return actualAppender; + app.shutdown = function (cb) { + debug('(master) master shutdown called, closing server'); + server.close(cb); + }; + + return app; } function workerAppender(config) { let canWrite = false; const buffer = []; let socket; + let shutdownAttempts = 3; function write(loggingEvent) { - // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. - // The following allows us to serialize errors correctly. - // Validate that we really are in this case - if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') { - loggingEvent = { stack: loggingEvent.stack }; - } - socket.write(JSON.stringify(loggingEvent), 'utf8'); + debug('(worker) Writing log event to socket'); + socket.write(loggingEvent.serialise(), 'utf8'); socket.write(END_MSG, 'utf8'); } function emptyBuffer() { let evt; - - /* eslint no-cond-assign:0 */ + debug('(worker) emptying worker buffer'); while ((evt = buffer.shift())) { write(evt); } } function createSocket() { - socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost'); + debug( + `(worker) worker appender creating socket to ${ + config.loggerHost || 'localhost' + }:${config.loggerPort || 5000}` + ); + socket = net.createConnection( + config.loggerPort || 5000, + config.loggerHost || 'localhost' + ); socket.on('connect', () => { + debug('(worker) worker socket connected'); emptyBuffer(); canWrite = true; }); socket.on('timeout', socket.end.bind(socket)); - // don't bother listening for 'error', 'close' gets called after that anyway + socket.on('error', (e) => { + debug('connection error', e); + canWrite = false; + emptyBuffer(); + }); socket.on('close', createSocket); } createSocket(); - return function log(loggingEvent) { + function log(loggingEvent) { if (canWrite) { write(loggingEvent); } else { + debug( + '(worker) worker buffering log event because it cannot write at the moment' + ); buffer.push(loggingEvent); } + } + log.shutdown = function (cb) { + debug('(worker) worker shutdown called'); + if (buffer.length && shutdownAttempts) { + debug('(worker) worker buffer has items, waiting 100ms to empty'); + shutdownAttempts -= 1; + setTimeout(() => { + log.shutdown(cb); + }, 100); + } else { + socket.removeAllListeners('close'); + socket.end(cb); + } }; + return log; } -function createAppender(config) { +function createAppender(config, appender, levels) { if (config.mode === 'master') { - return logServer(config); + debug('Creating master appender'); + return logServer(config, appender, levels); } + debug('Creating worker appender'); return workerAppender(config); } -function configure(config, options) { - let actualAppender; - if (config.appender && config.mode === 'master') { - log4js.loadAppender(config.appender.type); - actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options); - config.actualAppender = actualAppender; +function configure(config, layouts, findAppender, levels) { + let appender; + debug(`configure with mode = ${config.mode}`); + if (config.mode === 'master') { + if (!config.appender) { + debug(`no appender found in config ${config}`); + throw new Error('multiprocess master must have an "appender" defined'); + } + debug(`actual appender is ${config.appender}`); + appender = findAppender(config.appender); + if (!appender) { + debug(`actual appender "${config.appender}" not found`); + throw new Error( + `multiprocess master appender "${config.appender}" not defined` + ); + } } - return createAppender(config); -} - -function shutdown(done) { - let toBeClosed = servers.length; - servers.forEach(function (server) { - server.close(function () { - toBeClosed -= 1; - if (toBeClosed < 1) { - done(); - } - }); - }); + return createAppender(config, appender, levels); } -module.exports.appender = createAppender; module.exports.configure = configure; -module.exports.shutdown = shutdown; diff --git a/lib/appenders/noLogFilter.js b/lib/appenders/noLogFilter.js new file mode 100644 index 00000000..82129ccf --- /dev/null +++ b/lib/appenders/noLogFilter.js @@ -0,0 +1,43 @@ +const debug = require('debug')('log4js:noLogFilter'); + +/** + * The function removes empty or null regexp from the array + * @param {string[]} regexp + * @returns {string[]} a filtered string array with not empty or null regexp + */ +function removeNullOrEmptyRegexp(regexp) { + const filtered = regexp.filter((el) => el != null && el !== ''); + return filtered; +} + +/** + * Returns a function that will exclude the events in case they match + * with the regular expressions provided + * @param {(string|string[])} filters contains the regexp that will be used for the evaluation + * @param {*} appender + * @returns {function} + */ +function noLogFilter(filters, appender) { + return (logEvent) => { + debug(`Checking data: ${logEvent.data} against filters: ${filters}`); + if (typeof filters === 'string') { + filters = [filters]; + } + filters = removeNullOrEmptyRegexp(filters); + const regex = new RegExp(filters.join('|'), 'i'); + if ( + filters.length === 0 || + logEvent.data.findIndex((value) => regex.test(value)) < 0 + ) { + debug('Not excluded, sending to appender'); + appender(logEvent); + } + }; +} + +function configure(config, layouts, findAppender) { + const appender = findAppender(config.appender); + return noLogFilter(config.exclude, appender); +} + +module.exports.configure = configure; diff --git a/lib/appenders/recording.js b/lib/appenders/recording.js new file mode 100644 index 00000000..ec917139 --- /dev/null +++ b/lib/appenders/recording.js @@ -0,0 +1,32 @@ +const debug = require('debug')('log4js:recording'); + +const recordedEvents = []; + +function configure(config) { + return function (logEvent) { + debug( + `received logEvent, number of events now ${recordedEvents.length + 1}` + ); + debug('log event was ', logEvent); + if (config.maxLength && recordedEvents.length >= config.maxLength) { + recordedEvents.shift(); + } + recordedEvents.push(logEvent); + }; +} + +function replay() { + return recordedEvents.slice(); +} + +function reset() { + recordedEvents.length = 0; +} + +module.exports = { + configure, + replay, + playback: replay, + reset, + erase: reset, +}; diff --git a/lib/appenders/slack.js b/lib/appenders/slack.js deleted file mode 100644 index ae366cd9..00000000 --- a/lib/appenders/slack.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict'; - -const Slack = require('slack-node'); -const layouts = require('../layouts'); - -let layout; -let slack; - -function slackAppender(_config, _layout) { - layout = _layout || layouts.basicLayout; - - return (loggingEvent) => { - const data = { - channel_id: _config.channel_id, - text: layout(loggingEvent, _config.timezoneOffset), - icon_url: _config.icon_url, - username: _config.username - }; - - /* eslint no-unused-vars:0 */ - slack.api('chat.postMessage', { - channel: data.channel_id, - text: data.text, - icon_url: data.icon_url, - username: data.username - }, (err, response) => { - if (err) { - throw err; - } - }); - }; -} - -function configure(_config) { - if (_config.layout) { - layout = layouts.layout(_config.layout.type, _config.layout); - } - - slack = new Slack(_config.token); - - return slackAppender(_config, layout); -} - -module.exports.name = 'slack'; -module.exports.appender = slackAppender; -module.exports.configure = configure; diff --git a/lib/appenders/smtp.js b/lib/appenders/smtp.js deleted file mode 100644 index dca9a3f2..00000000 --- a/lib/appenders/smtp.js +++ /dev/null @@ -1,156 +0,0 @@ -'use strict'; - -const layouts = require('../layouts'); -const mailer = require('nodemailer'); -const os = require('os'); - -const logEventBuffer = []; -let subjectLayout; -let layout; - -let unsentCount = 0; -let shutdownTimeout; - -let sendInterval; -let sendTimer; - -let config; - -function sendBuffer() { - if (logEventBuffer.length > 0) { - const transportOpts = getTransportOptions(config); - const transport = mailer.createTransport(transportOpts); - const firstEvent = logEventBuffer[0]; - let body = ''; - const count = logEventBuffer.length; - while (logEventBuffer.length > 0) { - body += `${layout(logEventBuffer.shift(), config.timezoneOffset)}\n`; - } - - const msg = { - to: config.recipients, - subject: config.subject || subjectLayout(firstEvent), - headers: { Hostname: os.hostname() } - }; - - if (config.attachment.enable === true) { - msg[config.html ? 'html' : 'text'] = config.attachment.message; - msg.attachments = [ - { - filename: config.attachment.filename, - contentType: 'text/x-log', - content: body - } - ]; - } else { - msg[config.html ? 'html' : 'text'] = body; - } - - if (config.sender) { - msg.from = config.sender; - } - transport.sendMail(msg, (error) => { - if (error) { - console.error('log4js.smtpAppender - Error happened', error); - } - transport.close(); - unsentCount -= count; - }); - } -} - -function getTransportOptions() { - let transportOpts = null; - if (config.SMTP) { - transportOpts = config.SMTP; - } else if (config.transport) { - const plugin = config.transport.plugin || 'smtp'; - const transportModule = `nodemailer-${plugin}-transport`; - - /* eslint global-require:0 */ - const transporter = require(transportModule); // eslint-disable-line - transportOpts = transporter(config.transport.options); - } - - return transportOpts; -} - -function scheduleSend() { - if (!sendTimer) { - sendTimer = setTimeout(() => { - sendTimer = null; - sendBuffer(); - }, sendInterval); - } -} - -/** - * SMTP Appender. Sends logging events using SMTP protocol. - * It can either send an email on each event or group several - * logging events gathered during specified interval. - * - * @param _config appender configuration data - * config.sendInterval time between log emails (in seconds), if 0 - * then every event sends an email - * config.shutdownTimeout time to give up remaining emails (in seconds; defaults to 5). - * @param _layout a function that takes a logevent and returns a string (defaults to basicLayout). - */ -function smtpAppender(_config, _layout) { - config = _config; - - if (!config.attachment) { - config.attachment = {}; - } - - config.attachment.enable = !!config.attachment.enable; - config.attachment.message = config.attachment.message || 'See logs as attachment'; - config.attachment.filename = config.attachment.filename || 'default.log'; - layout = _layout || layouts.basicLayout; - subjectLayout = layouts.messagePassThroughLayout; - sendInterval = config.sendInterval * 1000 || 0; - - shutdownTimeout = ('shutdownTimeout' in config ? config.shutdownTimeout : 5) * 1000; - - return (loggingEvent) => { - unsentCount++; // eslint-disable-line no-plusplus - logEventBuffer.push(loggingEvent); - if (sendInterval > 0) { - scheduleSend(); - } else { - sendBuffer(); - } - }; -} - -function configure(_config) { - config = _config; - if (_config.layout) { - layout = layouts.layout(_config.layout.type, _config.layout); - } - return smtpAppender(_config, layout); -} - -function shutdown(cb) { - if (shutdownTimeout > 0) { - setTimeout(() => { - if (sendTimer) { - clearTimeout(sendTimer); - } - - sendBuffer(); - }, shutdownTimeout); - } - - (function checkDone() { - if (unsentCount > 0) { - setTimeout(checkDone, 100); - } else { - cb(); - } - }()); -} - -module.exports.name = 'smtp'; -module.exports.appender = smtpAppender; -module.exports.configure = configure; -module.exports.shutdown = shutdown; diff --git a/lib/appenders/stderr.js b/lib/appenders/stderr.js index 8944468e..690d1df8 100644 --- a/lib/appenders/stderr.js +++ b/lib/appenders/stderr.js @@ -1,21 +1,15 @@ -'use strict'; - -const layouts = require('../layouts'); - function stderrAppender(layout, timezoneOffset) { - layout = layout || layouts.colouredLayout; return (loggingEvent) => { process.stderr.write(`${layout(loggingEvent, timezoneOffset)}\n`); }; } -function configure(config) { - let layout; +function configure(config, layouts) { + let layout = layouts.colouredLayout; if (config.layout) { layout = layouts.layout(config.layout.type, config.layout); } return stderrAppender(layout, config.timezoneOffset); } -module.exports.appender = stderrAppender; module.exports.configure = configure; diff --git a/lib/appenders/stdout.js b/lib/appenders/stdout.js index 124ac974..9f78b9f4 100644 --- a/lib/appenders/stdout.js +++ b/lib/appenders/stdout.js @@ -1,21 +1,15 @@ -'use strict'; - -const layouts = require('../layouts'); - function stdoutAppender(layout, timezoneOffset) { - layout = layout || layouts.colouredLayout; - return function (loggingEvent) { + return (loggingEvent) => { process.stdout.write(`${layout(loggingEvent, timezoneOffset)}\n`); }; } -function configure(config) { - let layout; +function configure(config, layouts) { + let layout = layouts.colouredLayout; if (config.layout) { layout = layouts.layout(config.layout.type, config.layout); } return stdoutAppender(layout, config.timezoneOffset); } -exports.appender = stdoutAppender; exports.configure = configure; diff --git a/lib/appenders/tcp-server.js b/lib/appenders/tcp-server.js new file mode 100644 index 00000000..3459c311 --- /dev/null +++ b/lib/appenders/tcp-server.js @@ -0,0 +1,49 @@ +const debug = require('debug')('log4js:tcp-server'); +const net = require('net'); +const clustering = require('../clustering'); +const LoggingEvent = require('../LoggingEvent'); + +const DELIMITER = '__LOG4JS__'; + +exports.configure = (config) => { + debug('configure called with ', config); + + const server = net.createServer((socket) => { + let dataSoFar = ''; + const send = (data) => { + if (data) { + dataSoFar += data; + if (dataSoFar.indexOf(DELIMITER)) { + const events = dataSoFar.split(DELIMITER); + if (!dataSoFar.endsWith(DELIMITER)) { + dataSoFar = events.pop(); + } else { + dataSoFar = ''; + } + events + .filter((e) => e.length) + .forEach((e) => { + clustering.send(LoggingEvent.deserialise(e)); + }); + } else { + dataSoFar = ''; + } + } + }; + socket.setEncoding('utf8'); + socket.on('data', send); + socket.on('end', send); + }); + + server.listen(config.port || 5000, config.host || 'localhost', () => { + debug(`listening on ${config.host || 'localhost'}:${config.port || 5000}`); + server.unref(); + }); + + return { + shutdown: (cb) => { + debug('shutdown called.'); + server.close(cb); + }, + }; +}; diff --git a/lib/appenders/tcp.js b/lib/appenders/tcp.js new file mode 100644 index 00000000..5a9854a3 --- /dev/null +++ b/lib/appenders/tcp.js @@ -0,0 +1,92 @@ +const debug = require('debug')('log4js:tcp'); +const net = require('net'); + +function appender(config, layout) { + let canWrite = false; + const buffer = []; + let socket; + let shutdownAttempts = 3; + let endMsg = '__LOG4JS__'; + + function write(loggingEvent) { + debug('Writing log event to socket'); + canWrite = socket.write(`${layout(loggingEvent)}${endMsg}`, 'utf8'); + } + + function emptyBuffer() { + let evt; + debug('emptying buffer'); + while ((evt = buffer.shift())) { + write(evt); + } + } + + function createSocket() { + debug( + `appender creating socket to ${config.host || 'localhost'}:${ + config.port || 5000 + }` + ); + endMsg = `${config.endMsg || '__LOG4JS__'}`; + socket = net.createConnection( + config.port || 5000, + config.host || 'localhost' + ); + socket.on('connect', () => { + debug('socket connected'); + emptyBuffer(); + canWrite = true; + }); + socket.on('drain', () => { + debug('drain event received, emptying buffer'); + canWrite = true; + emptyBuffer(); + }); + socket.on('timeout', socket.end.bind(socket)); + socket.on('error', (e) => { + debug('connection error', e); + canWrite = false; + emptyBuffer(); + }); + socket.on('close', createSocket); + } + + createSocket(); + + function log(loggingEvent) { + if (canWrite) { + write(loggingEvent); + } else { + debug('buffering log event because it cannot write at the moment'); + buffer.push(loggingEvent); + } + } + + log.shutdown = function (cb) { + debug('shutdown called'); + if (buffer.length && shutdownAttempts) { + debug('buffer has items, waiting 100ms to empty'); + shutdownAttempts -= 1; + setTimeout(() => { + log.shutdown(cb); + }, 100); + } else { + socket.removeAllListeners('close'); + socket.end(cb); + } + }; + return log; +} + +function configure(config, layouts) { + debug(`configure with config = ${config}`); + let layout = function (loggingEvent) { + return loggingEvent.serialise(); + }; + if (config.layout) { + layout = layouts.layout(config.layout.type, config.layout); + } + return appender(config, layout); +} + +module.exports.configure = configure; diff --git a/lib/categories.js b/lib/categories.js new file mode 100644 index 00000000..10ed016a --- /dev/null +++ b/lib/categories.js @@ -0,0 +1,219 @@ +const debug = require('debug')('log4js:categories'); +const configuration = require('./configuration'); +const levels = require('./levels'); +const appenders = require('./appenders'); + +const categories = new Map(); + +/** + * Add inherited config to this category. That includes extra appenders from parent, + * and level, if none is set on this category. + * This is recursive, so each parent also gets loaded with inherited appenders. + * Inheritance is blocked if a category has inherit=false + * @param {*} config + * @param {*} category the child category + * @param {string} categoryName dotted path to category + * @return {void} + */ +function inheritFromParent(config, category, categoryName) { + if (category.inherit === false) return; + const lastDotIndex = categoryName.lastIndexOf('.'); + if (lastDotIndex < 0) return; // category is not a child + const parentCategoryName = categoryName.slice(0, lastDotIndex); + let parentCategory = config.categories[parentCategoryName]; + + if (!parentCategory) { + // parent is missing, so implicitly create it, so that it can inherit from its parents + parentCategory = { inherit: true, appenders: [] }; + } + + // make sure parent has had its inheritance taken care of before pulling its properties to this child + inheritFromParent(config, parentCategory, parentCategoryName); + + // if the parent is not in the config (because we just created it above), + // and it inherited a valid configuration, add it to config.categories + if ( + !config.categories[parentCategoryName] && + parentCategory.appenders && + parentCategory.appenders.length && + parentCategory.level + ) { + config.categories[parentCategoryName] = parentCategory; + } + + category.appenders = category.appenders || []; + category.level = category.level || parentCategory.level; + + // merge in appenders from parent (parent is already holding its inherited appenders) + parentCategory.appenders.forEach((ap) => { + if (!category.appenders.includes(ap)) { + category.appenders.push(ap); + } + }); + category.parent = parentCategory; +} + +/** + * Walk all categories in the config, and pull down any configuration from parent to child. + * This includes inherited appenders, and level, where level is not set. + * Inheritance is skipped where a category has inherit=false. + * @param {*} config + */ +function addCategoryInheritance(config) { + if (!config.categories) return; + const categoryNames = Object.keys(config.categories); + categoryNames.forEach((name) => { + const category = config.categories[name]; + // add inherited appenders and level to this category + inheritFromParent(config, category, name); + }); +} + +configuration.addPreProcessingListener((config) => + addCategoryInheritance(config) +); + +configuration.addListener((config) => { + configuration.throwExceptionIf( + config, + configuration.not(configuration.anObject(config.categories)), + 'must have a property "categories" of type object.' + ); + + const categoryNames = Object.keys(config.categories); + configuration.throwExceptionIf( + config, + configuration.not(categoryNames.length), + 'must define at least one category.' + ); + + categoryNames.forEach((name) => { + const category = config.categories[name]; + configuration.throwExceptionIf( + config, + [ + configuration.not(category.appenders), + configuration.not(category.level), + ], + `category "${name}" is not valid (must be an object with properties "appenders" and "level")` + ); + + configuration.throwExceptionIf( + config, + configuration.not(Array.isArray(category.appenders)), + `category "${name}" is not valid (appenders must be an array of appender names)` + ); + + configuration.throwExceptionIf( + config, + configuration.not(category.appenders.length), + `category "${name}" is not valid (appenders must contain at least one appender name)` + ); + + if (Object.prototype.hasOwnProperty.call(category, 'enableCallStack')) { + configuration.throwExceptionIf( + config, + typeof category.enableCallStack !== 'boolean', + `category "${name}" is not valid (enableCallStack must be boolean type)` + ); + } + + category.appenders.forEach((appender) => { + configuration.throwExceptionIf( + config, + configuration.not(appenders.get(appender)), + `category "${name}" is not valid (appender "${appender}" is not defined)` + ); + }); + + configuration.throwExceptionIf( + config, + configuration.not(levels.getLevel(category.level)), + `category "${name}" is not valid (level "${category.level}" not recognised;` + + ` valid levels are ${levels.levels.join(', ')})` + ); + }); + + configuration.throwExceptionIf( + config, + configuration.not(config.categories.default), + 'must define a "default" category.' + ); +}); + +const setup = (config) => { + categories.clear(); + if (!config) { + return; + } + + const categoryNames = Object.keys(config.categories); + categoryNames.forEach((name) => { + const category = config.categories[name]; + const categoryAppenders = []; + category.appenders.forEach((appender) => { + categoryAppenders.push(appenders.get(appender)); + debug(`Creating category ${name}`); + categories.set(name, { + appenders: categoryAppenders, + level: levels.getLevel(category.level), + enableCallStack: category.enableCallStack || false, + }); + }); + }); +}; + +const init = () => { + setup(); +}; +init(); + +configuration.addListener(setup); + +const configForCategory = (category) => { + debug(`configForCategory: searching for config for ${category}`); + if (categories.has(category)) { + debug(`configForCategory: ${category} exists in config, returning it`); + return categories.get(category); + } + + let sourceCategoryConfig; + if (category.indexOf('.') > 0) { + debug(`configForCategory: ${category} has hierarchy, cloning from parents`); + sourceCategoryConfig = { + ...configForCategory(category.slice(0, category.lastIndexOf('.'))), + }; + } else { + if (!categories.has('default')) { + setup({ categories: { default: { appenders: ['out'], level: 'OFF' } } }); + } + debug('configForCategory: cloning default category'); + sourceCategoryConfig = { ...categories.get('default') }; + } + categories.set(category, sourceCategoryConfig); + return sourceCategoryConfig; +}; + +const appendersForCategory = (category) => + configForCategory(category).appenders; + +const getLevelForCategory = (category) => configForCategory(category).level; +const setLevelForCategory = (category, level) => { + configForCategory(category).level = level; +}; + +const getEnableCallStackForCategory = (category) => + configForCategory(category).enableCallStack === true; +const setEnableCallStackForCategory = (category, useCallStack) => { + configForCategory(category).enableCallStack = useCallStack; +}; + +module.exports = categories; +module.exports = Object.assign(module.exports, { + appendersForCategory, + getLevelForCategory, + setLevelForCategory, + getEnableCallStackForCategory, + setEnableCallStackForCategory, + init, +}); diff --git a/lib/clustering.js b/lib/clustering.js new file mode 100644 index 00000000..ba445e79 --- /dev/null +++ b/lib/clustering.js @@ -0,0 +1,105 @@ +const debug = require('debug')('log4js:clustering'); +const LoggingEvent = require('./LoggingEvent'); +const configuration = require('./configuration'); + +let disabled = false; +let cluster = null; +try { + // eslint-disable-next-line global-require + cluster = require('cluster'); +} catch (e) { + debug('cluster module not present'); + disabled = true; +} + +const listeners = []; + +let pm2 = false; +let pm2InstanceVar = 'NODE_APP_INSTANCE'; + +const isPM2Master = () => pm2 && process.env[pm2InstanceVar] === '0'; +const isMaster = () => + disabled || (cluster && cluster.isMaster) || isPM2Master(); + +const sendToListeners = (logEvent) => { + listeners.forEach((l) => l(logEvent)); +}; + +// in a multi-process node environment, worker loggers will use +// process.send +const receiver = (worker, message) => { + // prior to node v6, the worker parameter was not passed (args were message, handle) + debug('cluster message received from worker ', worker, ': ', message); + if (worker.topic && worker.data) { + message = worker; + worker = undefined; + } + if (message && message.topic && message.topic === 'log4js:message') { + debug('received message: ', message.data); + const logEvent = LoggingEvent.deserialise(message.data); + sendToListeners(logEvent); + } +}; + +if (!disabled) { + configuration.addListener((config) => { + // clear out the listeners, because configure has been called. + listeners.length = 0; + + ({ + pm2, + disableClustering: disabled, + pm2InstanceVar = 'NODE_APP_INSTANCE', + } = config); + + debug(`clustering disabled ? ${disabled}`); + debug(`cluster.isMaster ? ${cluster && cluster.isMaster}`); + debug(`pm2 enabled ? ${pm2}`); + debug(`pm2InstanceVar = ${pm2InstanceVar}`); + debug(`process.env[${pm2InstanceVar}] = ${process.env[pm2InstanceVar]}`); + + // just in case configure is called after shutdown + if (pm2) { + process.removeListener('message', receiver); + } + if (cluster && cluster.removeListener) { + cluster.removeListener('message', receiver); + } + + if (disabled || config.disableClustering) { + debug('Not listening for cluster messages, because clustering disabled.'); + } else if (isPM2Master()) { + // PM2 cluster support + // PM2 runs everything as workers - install pm2-intercom for this to work. + // we only want one of the app instances to write logs + debug('listening for PM2 broadcast messages'); + process.on('message', receiver); + } else if (cluster && cluster.isMaster) { + debug('listening for cluster messages'); + cluster.on('message', receiver); + } else { + debug('not listening for messages, because we are not a master process'); + } + }); +} + +module.exports = { + onlyOnMaster: (fn, notMaster) => (isMaster() ? fn() : notMaster), + isMaster, + send: (msg) => { + if (isMaster()) { + sendToListeners(msg); + } else { + if (!pm2) { + msg.cluster = { + workerId: cluster.worker.id, + worker: process.pid, + }; + } + process.send({ topic: 'log4js:message', data: msg.serialise() }); + } + }, + onMessage: (listener) => { + listeners.push(listener); + }, +}; diff --git a/lib/clusteringBrowser.js b/lib/clusteringBrowser.js new file mode 100644 index 00000000..fcae3019 --- /dev/null +++ b/lib/clusteringBrowser.js @@ -0,0 +1,19 @@ +/* istanbul ignore file */ +// This is used in browsers only and is designed to allow the rest of +// log4js to continue as if `clustering.js` is in use. +const isMaster = () => true; + +const listeners = []; + +const sendToListeners = (logEvent) => { + listeners.forEach((l) => l(logEvent)); +}; + +module.exports = { + onlyOnMaster: (fn, notMaster) => (isMaster() ? fn() : notMaster), + isMaster, + send: sendToListeners, + onMessage: (listener) => { + listeners.push(listener); + }, +}; diff --git a/lib/configuration.js b/lib/configuration.js new file mode 100644 index 00000000..e02a4e55 --- /dev/null +++ b/lib/configuration.js @@ -0,0 +1,64 @@ +const util = require('util'); +const debug = require('debug')('log4js:configuration'); + +const preProcessingListeners = []; +const listeners = []; + +const not = (thing) => !thing; + +const anObject = (thing) => + thing && typeof thing === 'object' && !Array.isArray(thing); + +const validIdentifier = (thing) => /^[A-Za-z][A-Za-z0-9_]*$/g.test(thing); + +const anInteger = (thing) => + thing && typeof thing === 'number' && Number.isInteger(thing); + +const addListener = (fn) => { + listeners.push(fn); + debug(`Added listener, now ${listeners.length} listeners`); +}; + +const addPreProcessingListener = (fn) => { + preProcessingListeners.push(fn); + debug( + `Added pre-processing listener, now ${preProcessingListeners.length} listeners` + ); +}; + +const throwExceptionIf = (config, checks, message) => { + const tests = Array.isArray(checks) ? checks : [checks]; + tests.forEach((test) => { + if (test) { + throw new Error( + `Problem with log4js configuration: (${util.inspect(config, { + depth: 5, + })}) - ${message}` + ); + } + }); +}; + +const configure = (candidate) => { + debug('New configuration to be validated: ', candidate); + throwExceptionIf(candidate, not(anObject(candidate)), 'must be an object.'); + + debug(`Calling pre-processing listeners (${preProcessingListeners.length})`); + preProcessingListeners.forEach((listener) => listener(candidate)); + debug('Configuration pre-processing finished.'); + + debug(`Calling configuration listeners (${listeners.length})`); + listeners.forEach((listener) => listener(candidate)); + debug('Configuration finished.'); +}; + +module.exports = { + configure, + addListener, + addPreProcessingListener, + throwExceptionIf, + anObject, + anInteger, + validIdentifier, + not, +}; diff --git a/lib/connect-logger.js b/lib/connect-logger.js index 2c60d3c3..b73822af 100755 --- a/lib/connect-logger.js +++ b/lib/connect-logger.js @@ -1,115 +1,24 @@ -/* eslint-disable no-plusplus */ - -'use strict'; +/* eslint no-underscore-dangle: ["error", { "allow": ["__statusCode", "_remoteAddress", "__headers", "_logging"] }] */ const levels = require('./levels'); -const DEFAULT_FORMAT = ':remote-addr - -' + +const DEFAULT_FORMAT = + ':remote-addr - -' + ' ":method :url HTTP/:http-version"' + ' :status :content-length ":referrer"' + ' ":user-agent"'; /** - * Log requests with the given `options` or a `format` string. - * - * Options: - * - * - `format` Format string, see below for tokens - * - `level` A log4js levels instance. Supports also 'auto' - * - `nolog` A string or RegExp to exclude target logs - * - * Tokens: - * - * - `:req[header]` ex: `:req[Accept]` - * - `:res[header]` ex: `:res[Content-Length]` - * - `:http-version` - * - `:response-time` - * - `:remote-addr` - * - `:date` - * - `:method` - * - `:url` - * - `:referrer` - * - `:user-agent` - * - `:status` + * Return request url path, + * adding this function prevents the Cyclomatic Complexity, + * for the assemble_tokens function at low, to pass the tests. * - * @return {Function} - * @param logger4js - * @param options - * @api public + * @param {IncomingMessage} req + * @return {string} + * @api private */ -function getLogger(logger4js, options) { - /* eslint no-underscore-dangle:0 */ - if (typeof options === 'object') { - options = options || {}; - } else if (options) { - options = { format: options }; - } else { - options = {}; - } - - const thisLogger = logger4js; - let level = levels.toLevel(options.level, levels.INFO); - const fmt = options.format || DEFAULT_FORMAT; - const nolog = options.nolog ? createNoLogCondition(options.nolog) : null; - - return (req, res, next) => { - // mount safety - if (req._logging) return next(); - - // nologs - if (nolog && nolog.test(req.originalUrl)) return next(); - - if (thisLogger.isLevelEnabled(level) || options.level === 'auto') { - const start = new Date(); - const writeHead = res.writeHead; - - // flag as logging - req._logging = true; - - // proxy for statusCode. - res.writeHead = (code, headers) => { - res.writeHead = writeHead; - res.writeHead(code, headers); - - res.__statusCode = code; - res.__headers = headers || {}; - - // status code response level handling - if (options.level === 'auto') { - level = levels.INFO; - if (code >= 300) level = levels.WARN; - if (code >= 400) level = levels.ERROR; - } else { - level = levels.toLevel(options.level, levels.INFO); - } - }; - - // hook on end request to emit the log entry of the HTTP request. - res.on('finish', () => { - res.responseTime = new Date() - start; - // status code response level handling - if (res.statusCode && options.level === 'auto') { - level = levels.INFO; - if (res.statusCode >= 300) level = levels.WARN; - if (res.statusCode >= 400) level = levels.ERROR; - } - - if (thisLogger.isLevelEnabled(level)) { - const combinedTokens = assembleTokens(req, res, options.tokens || []); - - if (typeof fmt === 'function') { - const line = fmt(req, res, str => format(str, combinedTokens)); - if (line) thisLogger.log(level, line); - } else { - thisLogger.log(level, format(fmt, combinedTokens)); - } - } - }); - } - - // ensure next gets always called - return next(); - }; +function getUrl(req) { + return req.originalUrl || req.url; } /** @@ -128,9 +37,9 @@ function assembleTokens(req, res, customTokens) { for (let i = 0; i < a.length; ++i) { for (let j = i + 1; j < a.length; ++j) { // not === because token can be regexp object - /* eslint eqeqeq:0 */ + // eslint-disable-next-line eqeqeq if (a[i].token == a[j].token) { - a.splice(j--, 1); + a.splice(j--, 1); // eslint-disable-line no-plusplus } } } @@ -142,73 +51,69 @@ function assembleTokens(req, res, customTokens) { defaultTokens.push({ token: ':protocol', replacement: req.protocol }); defaultTokens.push({ token: ':hostname', replacement: req.hostname }); defaultTokens.push({ token: ':method', replacement: req.method }); - defaultTokens.push({ token: ':status', replacement: res.__statusCode || res.statusCode }); - defaultTokens.push({ token: ':response-time', replacement: res.responseTime }); + defaultTokens.push({ + token: ':status', + replacement: res.__statusCode || res.statusCode, + }); + defaultTokens.push({ + token: ':response-time', + replacement: res.responseTime, + }); defaultTokens.push({ token: ':date', replacement: new Date().toUTCString() }); defaultTokens.push({ token: ':referrer', - replacement: req.headers.referer || req.headers.referrer || '' + replacement: req.headers.referer || req.headers.referrer || '', }); defaultTokens.push({ token: ':http-version', - replacement: `${req.httpVersionMajor}.${req.httpVersionMinor}` + replacement: `${req.httpVersionMajor}.${req.httpVersionMinor}`, }); defaultTokens.push({ token: ':remote-addr', - replacement: req.headers['x-forwarded-for'] || - req.ip || - req._remoteAddress || - (req.socket && - (req.socket.remoteAddress || - (req.socket.socket && req.socket.socket.remoteAddress) - ) - ) + replacement: + req.headers['x-forwarded-for'] || + req.ip || + req._remoteAddress || + (req.socket && + (req.socket.remoteAddress || + (req.socket.socket && req.socket.socket.remoteAddress))), + }); + defaultTokens.push({ + token: ':user-agent', + replacement: req.headers['user-agent'], }); - defaultTokens.push({ token: ':user-agent', replacement: req.headers['user-agent'] }); defaultTokens.push({ token: ':content-length', - replacement: (res._headers && res._headers['content-length']) || - (res.__headers && res.__headers['Content-Length']) || - '-' + replacement: + res.getHeader('content-length') || + (res.__headers && res.__headers['Content-Length']) || + '-', }); defaultTokens.push({ token: /:req\[([^\]]+)]/g, - replacement: function (_, field) { + replacement(_, field) { return req.headers[field.toLowerCase()]; - } + }, }); defaultTokens.push({ token: /:res\[([^\]]+)]/g, - replacement: function (_, field) { - return res._headers ? - (res._headers[field.toLowerCase()] || res.__headers[field]) - : (res.__headers && res.__headers[field]); - } + replacement(_, field) { + return ( + res.getHeader(field.toLowerCase()) || + (res.__headers && res.__headers[field]) + ); + }, }); return arrayUniqueTokens(customTokens.concat(defaultTokens)); } -/** - * Return request url path, - * adding this function prevents the Cyclomatic Complexity, - * for the assemble_tokens function at low, to pass the tests. - * - * @param {IncomingMessage} req - * @return {String} - * @api private - */ - -function getUrl(req) { - return req.originalUrl || req.url; -} - /** * Return formatted log line. * - * @param {String} str + * @param {string} str * @param {Array} tokens - * @return {String} + * @return {string} * @api private */ function format(str, tokens) { @@ -221,7 +126,7 @@ function format(str, tokens) { /** * Return RegExp Object about nolog * - * @param {String|Array} nolog + * @param {(string|Array)} nolog * @return {RegExp} * @api private * @@ -248,23 +153,172 @@ function format(str, tokens) { function createNoLogCondition(nolog) { let regexp = null; - if (nolog) { - if (nolog instanceof RegExp) { - regexp = nolog; - } + if (nolog instanceof RegExp) { + regexp = nolog; + } - if (typeof nolog === 'string') { - regexp = new RegExp(nolog); - } + if (typeof nolog === 'string') { + regexp = new RegExp(nolog); + } - if (Array.isArray(nolog)) { - // convert to strings - const regexpsAsStrings = nolog.map(reg => (reg.source ? reg.source : reg)); - regexp = new RegExp(regexpsAsStrings.join('|')); - } + if (Array.isArray(nolog)) { + // convert to strings + const regexpsAsStrings = nolog.map((reg) => + reg.source ? reg.source : reg + ); + regexp = new RegExp(regexpsAsStrings.join('|')); } return regexp; } -module.exports.connectLogger = getLogger; +/** + * Allows users to define rules around status codes to assign them to a specific + * logging level. + * There are two types of rules: + * - RANGE: matches a code within a certain range + * E.g. { 'from': 200, 'to': 299, 'level': 'info' } + * - CONTAINS: matches a code to a set of expected codes + * E.g. { 'codes': [200, 203], 'level': 'debug' } + * Note*: Rules are respected only in order of prescendence. + * + * @param {Number} statusCode + * @param {Level} currentLevel + * @param {Object} ruleSet + * @return {Level} + * @api private + */ +function matchRules(statusCode, currentLevel, ruleSet) { + let level = currentLevel; + + if (ruleSet) { + const matchedRule = ruleSet.find((rule) => { + let ruleMatched = false; + if (rule.from && rule.to) { + ruleMatched = statusCode >= rule.from && statusCode <= rule.to; + } else { + ruleMatched = rule.codes.indexOf(statusCode) !== -1; + } + return ruleMatched; + }); + if (matchedRule) { + level = levels.getLevel(matchedRule.level, level); + } + } + return level; +} + +/** + * Log requests with the given `options` or a `format` string. + * + * Options: + * + * - `format` Format string, see below for tokens + * - `level` A log4js levels instance. Supports also 'auto' + * - `nolog` A string or RegExp to exclude target logs or function(req, res): boolean + * - `statusRules` A array of rules for setting specific logging levels base on status codes + * - `context` Whether to add a response of express to the context + * + * Tokens: + * + * - `:req[header]` ex: `:req[Accept]` + * - `:res[header]` ex: `:res[Content-Length]` + * - `:http-version` + * - `:response-time` + * - `:remote-addr` + * - `:date` + * - `:method` + * - `:url` + * - `:referrer` + * - `:user-agent` + * - `:status` + * + * @return {Function} + * @param logger4js + * @param options + * @api public + */ +module.exports = function getLogger(logger4js, options) { + if (typeof options === 'string' || typeof options === 'function') { + options = { format: options }; + } else { + options = options || {}; + } + + const thisLogger = logger4js; + let level = levels.getLevel(options.level, levels.INFO); + const fmt = options.format || DEFAULT_FORMAT; + + return (req, res, next) => { + // mount safety + if (typeof req._logging !== 'undefined') return next(); + + // nologs + if (typeof options.nolog !== 'function') { + const nolog = createNoLogCondition(options.nolog); + if (nolog && nolog.test(req.originalUrl)) return next(); + } + + if (thisLogger.isLevelEnabled(level) || options.level === 'auto') { + const start = new Date(); + const { writeHead } = res; + + // flag as logging + req._logging = true; + + // proxy for statusCode. + res.writeHead = (code, headers) => { + res.writeHead = writeHead; + res.writeHead(code, headers); + + res.__statusCode = code; + res.__headers = headers || {}; + return res; + }; + + // hook on end request to emit the log entry of the HTTP request. + let finished = false; + const handler = () => { + if (finished) { + return; + } + finished = true; + + // nologs + if (typeof options.nolog === 'function') { + if (options.nolog(req, res) === true) { + req._logging = false; + return; + } + } + + res.responseTime = new Date() - start; + // status code response level handling + if (res.statusCode && options.level === 'auto') { + level = levels.INFO; + if (res.statusCode >= 300) level = levels.WARN; + if (res.statusCode >= 400) level = levels.ERROR; + } + level = matchRules(res.statusCode, level, options.statusRules); + + const combinedTokens = assembleTokens(req, res, options.tokens || []); + + if (options.context) thisLogger.addContext('res', res); + if (typeof fmt === 'function') { + const line = fmt(req, res, (str) => format(str, combinedTokens)); + if (line) thisLogger.log(level, line); + } else { + thisLogger.log(level, format(fmt, combinedTokens)); + } + if (options.context) thisLogger.removeContext('res'); + }; + res.on('end', handler); + res.on('finish', handler); + res.on('error', handler); + res.on('close', handler); + } + + // ensure next gets always called + return next(); + }; +}; diff --git a/lib/layouts.js b/lib/layouts.js index 9a153176..13f0cf17 100644 --- a/lib/layouts.js +++ b/lib/layouts.js @@ -1,68 +1,9 @@ -'use strict'; - const dateFormat = require('date-format'); const os = require('os'); const util = require('util'); - -const eol = os.EOL || '\n'; -const layoutMakers = { - messagePassThrough: function () { - return messagePassThroughLayout; - }, - basic: function () { - return basicLayout; - }, - colored: function () { - return colouredLayout; - }, - coloured: function () { - return colouredLayout; - }, - pattern: function (config) { - return patternLayout(config && config.pattern, config && config.tokens); - }, - dummy: function () { - return dummyLayout; - } -}; -const colours = { - ALL: 'grey', - TRACE: 'blue', - DEBUG: 'cyan', - INFO: 'green', - WARN: 'yellow', - ERROR: 'red', - FATAL: 'magenta', - OFF: 'grey' -}; -const semver = require('semver'); - -function wrapErrorsWithInspect(items) { - return items.map((item) => { - if ((item instanceof Error) && item.stack) { - return { - inspect: function () { - return semver.satisfies(process.version, '>=6') ? util.format(item) : `${util.format(item)}\n${item.stack}`; - } - }; - } - return item; - }); -} - -/* eslint prefer-rest-params:0 */ -// todo: once node v4 support dropped, use rest parameter instead -function formatLogData(logData) { - let data = logData; - if (!Array.isArray(data)) { - const numArgs = arguments.length; - data = new Array(numArgs); - for (let i = 0; i < numArgs; i++) { // eslint-disable-line no-plusplus - data[i] = arguments[i]; - } - } - return util.format.apply(util, wrapErrorsWithInspect(data)); -} +const path = require('path'); +const url = require('url'); +const debug = require('debug')('log4js:layouts'); const styles = { // styles @@ -79,8 +20,8 @@ const styles = { cyan: [36, 39], green: [32, 39], magenta: [35, 39], - red: [31, 39], - yellow: [33, 39] + red: [91, 39], + yellow: [33, 39], }; function colorizeStart(style) { @@ -98,15 +39,15 @@ function colorize(str, style) { return colorizeStart(style) + str + colorizeEnd(style); } -function timestampLevelAndCategory(loggingEvent, colour, timezoneOffset) { +function timestampLevelAndCategory(loggingEvent, colour) { return colorize( - formatLogData( - '[%s] [%s] %s - ' - , dateFormat.asString(loggingEvent.startTime, timezoneOffset) - , loggingEvent.level - , loggingEvent.categoryName - ) - , colour + util.format( + '[%s] [%s] %s - ', + dateFormat.asString(loggingEvent.startTime), + loggingEvent.level.toString(), + loggingEvent.categoryName + ), + colour ); } @@ -119,28 +60,25 @@ function timestampLevelAndCategory(loggingEvent, colour, timezoneOffset) { * * @author Stephan Strittmatter */ -function basicLayout(loggingEvent, timezoneOffset) { - return timestampLevelAndCategory( - loggingEvent, - undefined, - timezoneOffset - ) + formatLogData(loggingEvent.data); +function basicLayout(loggingEvent) { + return ( + timestampLevelAndCategory(loggingEvent) + util.format(...loggingEvent.data) + ); } /** * colouredLayout - taken from masylum's fork. * same as basicLayout, but with colours. */ -function colouredLayout(loggingEvent, timezoneOffset) { - return timestampLevelAndCategory( - loggingEvent, - colours[loggingEvent.level.toString()], - timezoneOffset - ) + formatLogData(loggingEvent.data); +function colouredLayout(loggingEvent) { + return ( + timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) + + util.format(...loggingEvent.data) + ); } function messagePassThroughLayout(loggingEvent) { - return formatLogData(loggingEvent.data); + return util.format(...loggingEvent.data); } function dummyLayout(loggingEvent) { @@ -151,17 +89,34 @@ function dummyLayout(loggingEvent) { * PatternLayout * Format for specifiers is %[padding].[truncation][field]{[format]} * e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10 + * both padding and truncation can be negative. + * Negative truncation = trunc from end of string + * Positive truncation = trunc from start of string + * Negative padding = pad right + * Positive padding = pad left + * * Fields can be any of: * - %r time in toLocaleTimeString format * - %p log level * - %c log category * - %h hostname * - %m log data + * - %m{l} where l is an integer, log data.slice(l) + * - %m{l,u} where l and u are integers, log data.slice(l, u) * - %d date in constious formats * - %% % * - %n newline * - %z pid + * - %f filename + * - %l line number + * - %o column postion + * - %s call stack + * - %C class name [#1316](https://github.com/log4js-node/log4js-node/pull/1316) + * - %M method or function name [#1316](https://github.com/log4js-node/log4js-node/pull/1316) + * - %A method or function alias [#1316](https://github.com/log4js-node/log4js-node/pull/1316) + * - %F fully qualified caller name [#1316](https://github.com/log4js-node/log4js-node/pull/1316) * - %x{} add dynamic tokens to your log. Tokens are specified in the tokens parameter + * - %X{} add dynamic tokens to your log. Tokens are specified in logger context * You can use %[ and %] to define a colored block. * * Tokens are specified as simple key:value objects. @@ -179,9 +134,10 @@ function dummyLayout(loggingEvent) { * * @authors ['Stephan Strittmatter', 'Jan Schmidle'] */ -function patternLayout(pattern, tokens, timezoneOffset) { +function patternLayout(pattern, tokens) { const TTCC_CONVERSION_PATTERN = '%r %p %c - %m%n'; - const regex = /%(-?[0-9]+)?(\.?[0-9]+)?([[\]cdhmnprzxy%])(\{([^}]+)\})?|([^%]+)/; + const regex = + /%(-?[0-9]+)?(\.?-?[0-9]+)?([[\]cdhmnprzxXyflosCMAF%])(\{([^}]+)\})?|([^%]+)/; pattern = pattern || TTCC_CONVERSION_PATTERN; @@ -191,7 +147,9 @@ function patternLayout(pattern, tokens, timezoneOffset) { const precision = parseInt(specifier, 10); const loggerNameBits = loggerName.split('.'); if (precision < loggerNameBits.length) { - loggerName = loggerNameBits.slice(loggerNameBits.length - precision).join('.'); + loggerName = loggerNameBits + .slice(loggerNameBits.length - precision) + .join('.'); } } return loggerName; @@ -202,30 +160,69 @@ function patternLayout(pattern, tokens, timezoneOffset) { if (specifier) { format = specifier; // Pick up special cases - if (format === 'ISO8601') { - format = dateFormat.ISO8601_FORMAT; - } else if (format === 'ISO8601_WITH_TZ_OFFSET') { - format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT; - } else if (format === 'ABSOLUTE') { - format = dateFormat.ABSOLUTETIME_FORMAT; - } else if (format === 'DATE') { - format = dateFormat.DATETIME_FORMAT; + switch (format) { + case 'ISO8601': + case 'ISO8601_FORMAT': + format = dateFormat.ISO8601_FORMAT; + break; + case 'ISO8601_WITH_TZ_OFFSET': + case 'ISO8601_WITH_TZ_OFFSET_FORMAT': + format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT; + break; + case 'ABSOLUTE': + process.emitWarning( + 'Pattern %d{ABSOLUTE} is deprecated in favor of %d{ABSOLUTETIME}. ' + + 'Please use %d{ABSOLUTETIME} instead.', + 'DeprecationWarning', + 'log4js-node-DEP0003' + ); + debug( + '[log4js-node-DEP0003]', + 'DEPRECATION: Pattern %d{ABSOLUTE} is deprecated and replaced by %d{ABSOLUTETIME}.' + ); + // falls through + case 'ABSOLUTETIME': + case 'ABSOLUTETIME_FORMAT': + format = dateFormat.ABSOLUTETIME_FORMAT; + break; + case 'DATE': + process.emitWarning( + 'Pattern %d{DATE} is deprecated due to the confusion it causes when used. ' + + 'Please use %d{DATETIME} instead.', + 'DeprecationWarning', + 'log4js-node-DEP0004' + ); + debug( + '[log4js-node-DEP0004]', + 'DEPRECATION: Pattern %d{DATE} is deprecated and replaced by %d{DATETIME}.' + ); + // falls through + case 'DATETIME': + case 'DATETIME_FORMAT': + format = dateFormat.DATETIME_FORMAT; + break; + // no default } } // Format the date - return dateFormat.asString(format, loggingEvent.startTime, timezoneOffset); + return dateFormat.asString(format, loggingEvent.startTime); } function hostname() { return os.hostname().toString(); } - function formatMessage(loggingEvent) { - return formatLogData(loggingEvent.data); + function formatMessage(loggingEvent, specifier) { + let dataSlice = loggingEvent.data; + if (specifier) { + const [lowerBound, upperBound] = specifier.split(','); + dataSlice = dataSlice.slice(lowerBound, upperBound); + } + return util.format(...dataSlice); } function endOfLine() { - return eol; + return os.EOL; } function logLevel(loggingEvent) { @@ -233,15 +230,15 @@ function patternLayout(pattern, tokens, timezoneOffset) { } function startTime(loggingEvent) { - return dateFormat.asString('hh:mm:ss', loggingEvent.startTime, timezoneOffset); + return dateFormat.asString('hh:mm:ss', loggingEvent.startTime); } function startColour(loggingEvent) { - return colorizeStart(colours[loggingEvent.level.toString()]); + return colorizeStart(loggingEvent.level.colour); } function endColour(loggingEvent) { - return colorizeEnd(colours[loggingEvent.level.toString()]); + return colorizeEnd(loggingEvent.level.colour); } function percent() { @@ -249,45 +246,133 @@ function patternLayout(pattern, tokens, timezoneOffset) { } function pid(loggingEvent) { - return loggingEvent && loggingEvent.pid ? loggingEvent.pid : process.pid; + return loggingEvent && loggingEvent.pid + ? loggingEvent.pid.toString() + : process.pid.toString(); } - function clusterInfo(loggingEvent, specifier) { - if (loggingEvent.cluster && specifier) { - return specifier - .replace('%m', loggingEvent.cluster.master) - .replace('%w', loggingEvent.cluster.worker) - .replace('%i', loggingEvent.cluster.workerId); - } else if (loggingEvent.cluster) { - return `${loggingEvent.cluster.worker}@${loggingEvent.cluster.master}`; - } - + function clusterInfo() { + // this used to try to return the master and worker pids, + // but it would never have worked because master pid is not available to workers + // leaving this here to maintain compatibility for patterns return pid(); } function userDefined(loggingEvent, specifier) { if (typeof tokens[specifier] !== 'undefined') { - return typeof tokens[specifier] === 'function' ? tokens[specifier](loggingEvent) : tokens[specifier]; + return typeof tokens[specifier] === 'function' + ? tokens[specifier](loggingEvent) + : tokens[specifier]; } return null; } - /* eslint quote-props:0 */ + function contextDefined(loggingEvent, specifier) { + const resolver = loggingEvent.context[specifier]; + + if (typeof resolver !== 'undefined') { + return typeof resolver === 'function' ? resolver(loggingEvent) : resolver; + } + + return null; + } + + function fileName(loggingEvent, specifier) { + let filename = loggingEvent.fileName || ''; + + // support for ESM as it uses url instead of path for file + /* istanbul ignore next: unsure how to simulate ESM for test coverage */ + const convertFileURLToPath = function (filepath) { + const urlPrefix = 'file://'; + if (filepath.startsWith(urlPrefix)) { + // https://nodejs.org/api/url.html#urlfileurltopathurl + if (typeof url.fileURLToPath === 'function') { + filepath = url.fileURLToPath(filepath); + } + // backward-compatible for nodejs pre-10.12.0 (without url.fileURLToPath method) + else { + // posix: file:///hello/world/foo.txt -> /hello/world/foo.txt -> /hello/world/foo.txt + // win32: file:///C:/path/foo.txt -> /C:/path/foo.txt -> \C:\path\foo.txt -> C:\path\foo.txt + // win32: file://nas/foo.txt -> //nas/foo.txt -> nas\foo.txt -> \\nas\foo.txt + filepath = path.normalize( + filepath.replace(new RegExp(`^${urlPrefix}`), '') + ); + if (process.platform === 'win32') { + if (filepath.startsWith('\\')) { + filepath = filepath.slice(1); + } else { + filepath = path.sep + path.sep + filepath; + } + } + } + } + return filepath; + }; + filename = convertFileURLToPath(filename); + + if (specifier) { + const fileDepth = parseInt(specifier, 10); + const fileList = filename.split(path.sep); + if (fileList.length > fileDepth) { + filename = fileList.slice(-fileDepth).join(path.sep); + } + } + + return filename; + } + + function lineNumber(loggingEvent) { + return loggingEvent.lineNumber ? `${loggingEvent.lineNumber}` : ''; + } + + function columnNumber(loggingEvent) { + return loggingEvent.columnNumber ? `${loggingEvent.columnNumber}` : ''; + } + + function callStack(loggingEvent) { + return loggingEvent.callStack || ''; + } + + function className(loggingEvent) { + return loggingEvent.className || ''; + } + + function functionName(loggingEvent) { + return loggingEvent.functionName || ''; + } + + function functionAlias(loggingEvent) { + return loggingEvent.functionAlias || ''; + } + + function callerName(loggingEvent) { + return loggingEvent.callerName || ''; + } + const replacers = { - 'c': categoryName, - 'd': formatAsDate, - 'h': hostname, - 'm': formatMessage, - 'n': endOfLine, - 'p': logLevel, - 'r': startTime, + c: categoryName, + d: formatAsDate, + h: hostname, + m: formatMessage, + n: endOfLine, + p: logLevel, + r: startTime, '[': startColour, ']': endColour, - 'y': clusterInfo, - 'z': pid, + y: clusterInfo, + z: pid, '%': percent, - 'x': userDefined + x: userDefined, + X: contextDefined, + f: fileName, + l: lineNumber, + o: columnNumber, + s: callStack, + C: className, + M: functionName, + A: functionAlias, + F: callerName, }; function replaceToken(conversionCharacter, loggingEvent, specifier) { @@ -297,8 +382,9 @@ function patternLayout(pattern, tokens, timezoneOffset) { function truncate(truncation, toTruncate) { let len; if (truncation) { - len = parseInt(truncation.substr(1), 10); - return toTruncate.substring(0, len); + len = parseInt(truncation.slice(1), 10); + // negative truncate length means truncate from end of string + return len > 0 ? toTruncate.slice(0, len) : toTruncate.slice(len); } return toTruncate; @@ -308,7 +394,7 @@ function patternLayout(pattern, tokens, timezoneOffset) { let len; if (padding) { if (padding.charAt(0) === '-') { - len = parseInt(padding.substr(1), 10); + len = parseInt(padding.slice(1), 10); // Right pad with spaces while (toPad.length < len) { toPad += ' '; @@ -336,7 +422,6 @@ function patternLayout(pattern, tokens, timezoneOffset) { let result; let searchString = pattern; - /* eslint no-cond-assign:0 */ while ((result = regex.exec(searchString)) !== null) { // const matchedString = result[0]; const padding = result[1]; @@ -351,26 +436,51 @@ function patternLayout(pattern, tokens, timezoneOffset) { } else { // Create a raw replacement string based on the conversion // character and specifier - const replacement = replaceToken(conversionCharacter, loggingEvent, specifier); + const replacement = replaceToken( + conversionCharacter, + loggingEvent, + specifier + ); formattedString += truncateAndPad(replacement, truncation, padding); } - searchString = searchString.substr(result.index + result[0].length); + searchString = searchString.slice(result.index + result[0].length); } return formattedString; }; } +const layoutMakers = { + messagePassThrough() { + return messagePassThroughLayout; + }, + basic() { + return basicLayout; + }, + colored() { + return colouredLayout; + }, + coloured() { + return colouredLayout; + }, + pattern(config) { + return patternLayout(config && config.pattern, config && config.tokens); + }, + dummy() { + return dummyLayout; + }, +}; + module.exports = { - basicLayout: basicLayout, - messagePassThroughLayout: messagePassThroughLayout, - patternLayout: patternLayout, - colouredLayout: colouredLayout, + basicLayout, + messagePassThroughLayout, + patternLayout, + colouredLayout, coloredLayout: colouredLayout, - dummyLayout: dummyLayout, - addLayout: function (name, serializerGenerator) { + dummyLayout, + addLayout(name, serializerGenerator) { layoutMakers[name] = serializerGenerator; }, - layout: function (name, config) { + layout(name, config) { return layoutMakers[name] && layoutMakers[name](config); - } + }, }; diff --git a/lib/levels.js b/lib/levels.js index 2d981acf..4d319c8c 100644 --- a/lib/levels.js +++ b/lib/levels.js @@ -1,85 +1,155 @@ -'use strict'; +const configuration = require('./configuration'); + +const validColours = [ + 'white', + 'grey', + 'black', + 'blue', + 'cyan', + 'green', + 'magenta', + 'red', + 'yellow', +]; -/** - * @name Level - * @namespace Log4js - */ class Level { - constructor(level, levelStr) { + constructor(level, levelStr, colour) { this.level = level; this.levelStr = levelStr; + this.colour = colour; } toString() { return this.levelStr; } + /** + * converts given String to corresponding Level + * @param {(Level|string)} sArg -- String value of Level OR Log4js.Level + * @param {Level} [defaultLevel] -- default Level, if no String representation + * @return {Level} + */ + static getLevel(sArg, defaultLevel) { + if (!sArg) { + return defaultLevel; + } + + if (sArg instanceof Level) { + return sArg; + } + + // a json-serialised level won't be an instance of Level (see issue #768) + if (sArg instanceof Object && sArg.levelStr) { + sArg = sArg.levelStr; + } + + return Level[sArg.toString().toUpperCase()] || defaultLevel; + } + + static addLevels(customLevels) { + if (customLevels) { + const levels = Object.keys(customLevels); + levels.forEach((l) => { + const levelStr = l.toUpperCase(); + Level[levelStr] = new Level( + customLevels[l].value, + levelStr, + customLevels[l].colour + ); + const existingLevelIndex = Level.levels.findIndex( + (lvl) => lvl.levelStr === levelStr + ); + if (existingLevelIndex > -1) { + Level.levels[existingLevelIndex] = Level[levelStr]; + } else { + Level.levels.push(Level[levelStr]); + } + }); + Level.levels.sort((a, b) => a.level - b.level); + } + } + isLessThanOrEqualTo(otherLevel) { if (typeof otherLevel === 'string') { - otherLevel = toLevel(otherLevel); + otherLevel = Level.getLevel(otherLevel); } return this.level <= otherLevel.level; } isGreaterThanOrEqualTo(otherLevel) { if (typeof otherLevel === 'string') { - otherLevel = toLevel(otherLevel); + otherLevel = Level.getLevel(otherLevel); } return this.level >= otherLevel.level; } isEqualTo(otherLevel) { if (typeof otherLevel === 'string') { - otherLevel = toLevel(otherLevel); + otherLevel = Level.getLevel(otherLevel); } return this.level === otherLevel.level; } - } -/** - * converts given String to corresponding Level - * @param {Level|String} sArg -- String value of Level OR Log4js.Level - * @param {Level} [defaultLevel] -- default Level, if no String representation - * @return {Level} - */ -function toLevel(sArg, defaultLevel) { - if (!sArg) { - return defaultLevel; - } - - if (sArg instanceof Level) { - module.exports[sArg.toString()] = sArg; - return sArg; - } +Level.levels = []; +Level.addLevels({ + ALL: { value: Number.MIN_VALUE, colour: 'grey' }, + TRACE: { value: 5000, colour: 'blue' }, + DEBUG: { value: 10000, colour: 'cyan' }, + INFO: { value: 20000, colour: 'green' }, + WARN: { value: 30000, colour: 'yellow' }, + ERROR: { value: 40000, colour: 'red' }, + FATAL: { value: 50000, colour: 'magenta' }, + MARK: { value: 9007199254740992, colour: 'grey' }, // 2^53 + OFF: { value: Number.MAX_VALUE, colour: 'grey' }, +}); - if (typeof sArg === 'string') { - return module.exports[sArg.toUpperCase()] || defaultLevel; +configuration.addListener((config) => { + const levelConfig = config.levels; + if (levelConfig) { + configuration.throwExceptionIf( + config, + configuration.not(configuration.anObject(levelConfig)), + 'levels must be an object' + ); + const newLevels = Object.keys(levelConfig); + newLevels.forEach((l) => { + configuration.throwExceptionIf( + config, + configuration.not(configuration.validIdentifier(l)), + `level name "${l}" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)` + ); + configuration.throwExceptionIf( + config, + configuration.not(configuration.anObject(levelConfig[l])), + `level "${l}" must be an object` + ); + configuration.throwExceptionIf( + config, + configuration.not(levelConfig[l].value), + `level "${l}" must have a 'value' property` + ); + configuration.throwExceptionIf( + config, + configuration.not(configuration.anInteger(levelConfig[l].value)), + `level "${l}".value must have an integer value` + ); + configuration.throwExceptionIf( + config, + configuration.not(levelConfig[l].colour), + `level "${l}" must have a 'colour' property` + ); + configuration.throwExceptionIf( + config, + configuration.not(validColours.indexOf(levelConfig[l].colour) > -1), + `level "${l}".colour must be one of ${validColours.join(', ')}` + ); + }); } +}); - return toLevel(sArg.toString()); -} - -function getLevel(levelStr) { - let level; - if (typeof levelStr === 'string') { - const levelUpper = levelStr.toUpperCase(); - level = toLevel(levelUpper); - } - return level; -} +configuration.addListener((config) => { + Level.addLevels(config.levels); +}); -module.exports = { - ALL: new Level(Number.MIN_VALUE, 'ALL'), - TRACE: new Level(5000, 'TRACE'), - DEBUG: new Level(10000, 'DEBUG'), - INFO: new Level(20000, 'INFO'), - WARN: new Level(30000, 'WARN'), - ERROR: new Level(40000, 'ERROR'), - FATAL: new Level(50000, 'FATAL'), - MARK: new Level(9007199254740992, 'MARK'), // 2^53 - OFF: new Level(Number.MAX_VALUE, 'OFF'), - toLevel: toLevel, - Level: Level, - getLevel: getLevel -}; +module.exports = Level; diff --git a/lib/log4js.js b/lib/log4js.js index ae6c8ca4..a18502fc 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -1,23 +1,11 @@ -/* eslint no-prototype-builtins:1,no-restricted-syntax:[1, "ForInStatement"],no-plusplus:0 */ - -'use strict'; - /** * @fileoverview log4js is a library to log in JavaScript in similar manner - * than in log4j for Java. The API should be nearly the same. + * than in log4j for Java (but not really). * *

Example:

*
- *  let logging = require('log4js');
- *  //add an appender that logs all messages to stdout.
- *  logging.addAppender(logging.consoleAppender());
- *  //add an appender that logs 'some-category' to a file
- *  logging.addAppender(logging.fileAppender('file.log'), 'some-category');
- *  //get a logger
- *  let log = logging.getLogger('some-category');
- *  log.setLevel(logging.levels.TRACE); //set the Level
- *
- *  ...
+ *  const logging = require('log4js');
+ *  const log = logging.getLogger('some-category');
  *
  *  //call the log
  *  log.trace('trace me' );
@@ -25,516 +13,174 @@
  *
  * NOTE: the authors below are the original browser-based log4js authors
  * don't try to contact them about bugs in this version :)
- * @version 1.0
  * @author Stephan Strittmatter - http://jroller.com/page/stritti
  * @author Seth Chisamore - http://www.chisamore.com
  * @since 2005-05-20
- * @static
  * Website: http://log4js.berlios.de
  */
+const debug = require('debug')('log4js:main');
 const fs = require('fs');
-const util = require('util');
+const deepClone = require('rfdc')({ proto: true });
+const configuration = require('./configuration');
 const layouts = require('./layouts');
 const levels = require('./levels');
-const loggerModule = require('./logger');
-const connectLogger = require('./connect-logger').connectLogger;
-
-const Logger = loggerModule.Logger;
-
-const ALL_CATEGORIES = '[all]';
-const loggers = {};
-const appenderMakers = {};
-const appenderShutdowns = {};
-const defaultConfig = {
-  appenders: [
-    { type: 'stdout' }
-  ],
-  replaceConsole: false
-};
-
-let appenders = {};
-
-function hasLogger(logger) {
-  return loggers.hasOwnProperty(logger);
-}
-
-// todo: this method should be moved back to levels.js, but for loop require, need some refactor
-levels.forName = function (levelStr, levelVal) {
-  let level;
-  if (typeof levelStr === 'string' && typeof levelVal === 'number') {
-    const levelUpper = levelStr.toUpperCase();
-    level = new levels.Level(levelVal, levelUpper);
-    loggerModule.addLevelMethods(level);
-  }
-  return level;
-};
-
-function getBufferedLogger(categoryName) {
-  const baseLogger = getLogger(categoryName);
-  const logger = {};
-  logger.temp = [];
-  logger.target = baseLogger;
-  logger.flush = function () {
-    for (let i = 0; i < logger.temp.length; i++) {
-      const log = logger.temp[i];
-      logger.target[log.level](log.message);
-      delete logger.temp[i];
-    }
-  };
-  logger.trace = function (message) {
-    logger.temp.push({ level: 'trace', message: message });
-  };
-  logger.debug = function (message) {
-    logger.temp.push({ level: 'debug', message: message });
-  };
-  logger.info = function (message) {
-    logger.temp.push({ level: 'info', message: message });
-  };
-  logger.warn = function (message) {
-    logger.temp.push({ level: 'warn', message: message });
-  };
-  logger.error = function (message) {
-    logger.temp.push({ level: 'error', message: message });
-  };
-  logger.fatal = function (message) {
-    logger.temp.push({ level: 'fatal', message: message });
-  };
-
-  return logger;
-}
-
-function normalizeCategory(category) {
-  return `${category}.`;
-}
-
-function doesLevelEntryContainsLogger(levelCategory, loggerCategory) {
-  const normalizedLevelCategory = normalizeCategory(levelCategory);
-  const normalizedLoggerCategory = normalizeCategory(loggerCategory);
-  return normalizedLoggerCategory.substring(0, normalizedLevelCategory.length) === normalizedLevelCategory;
-}
-
-function doesAppenderContainsLogger(appenderCategory, loggerCategory) {
-  const normalizedAppenderCategory = normalizeCategory(appenderCategory);
-  const normalizedLoggerCategory = normalizeCategory(loggerCategory);
-  return normalizedLoggerCategory.substring(0, normalizedAppenderCategory.length) === normalizedAppenderCategory;
-}
-
-/**
- * Get a logger instance. Instance is cached on categoryName level.
- * @static
- * @param loggerCategoryName
- * @return {Logger} instance of logger for the category
- */
-function getLogger(loggerCategoryName) {
-  // Use default logger if categoryName is not specified or invalid
-  if (typeof loggerCategoryName !== 'string') {
-    loggerCategoryName = Logger.DEFAULT_CATEGORY;
-  }
-
-  if (!hasLogger(loggerCategoryName)) {
-    let level;
-
-    /* jshint -W073 */
-    // If there's a 'levels' entry in the configuration
-    if (levels.config) {
-      // Goes through the categories in the levels configuration entry,
-      // starting with the 'higher' ones.
-      const keys = Object.keys(levels.config).sort();
-      for (let idx = 0; idx < keys.length; idx++) {
-        const levelCategory = keys[idx];
-        if (doesLevelEntryContainsLogger(levelCategory, loggerCategoryName)) {
-          // level for the logger
-          level = levels.config[levelCategory];
-        }
-      }
-    }
-    /* jshint +W073 */
-
-    // Create the logger for this name if it doesn't already exist
-    loggers[loggerCategoryName] = new Logger(loggerCategoryName, level);
-
-    /* jshint -W083 */
-    let appenderList;
-    for (const appenderCategory in appenders) {
-      if (doesAppenderContainsLogger(appenderCategory, loggerCategoryName)) {
-        appenderList = appenders[appenderCategory];
-        appenderList.forEach((appender) => {
-          loggers[loggerCategoryName].addListener('log', appender);
-        });
-      }
-    }
-    /* jshint +W083 */
-
-    if (appenders[ALL_CATEGORIES]) {
-      appenderList = appenders[ALL_CATEGORIES];
-      appenderList.forEach((appender) => {
-        loggers[loggerCategoryName].addListener('log', appender);
-      });
-    }
-  }
-
-  return loggers[loggerCategoryName];
-}
-
-/**
- * args are appender, optional shutdown function, then zero or more categories
- */
-function addAppender() {
-  /* eslint prefer-rest-params:0 */
-  // todo: once node v4 support dropped, use rest parameter instead
-  let args = Array.from(arguments);
-  const appender = args.shift();
-  // check for a shutdown fn
-  if (args.length > 0 && typeof args[0] === 'function') {
-    appenderShutdowns[appender] = args.shift();
-  }
-
-  if (args.length === 0 || args[0] === undefined) {
-    args = [ALL_CATEGORIES];
-  }
-  // argument may already be an array
-  if (Array.isArray(args[0])) {
-    args = args[0];
-  }
-
-  args.forEach((appenderCategory) => {
-    addAppenderToCategory(appender, appenderCategory);
-
-    if (appenderCategory === ALL_CATEGORIES) {
-      addAppenderToAllLoggers(appender);
-    } else {
-      for (const loggerCategory in loggers) {
-        if (doesAppenderContainsLogger(appenderCategory, loggerCategory)) {
-          loggers[loggerCategory].addListener('log', appender);
-        }
-      }
-    }
+const appenders = require('./appenders');
+const categories = require('./categories');
+const Logger = require('./logger');
+const clustering = require('./clustering');
+const connectLogger = require('./connect-logger');
+const recordingModule = require('./appenders/recording');
+
+let enabled = false;
+
+function sendLogEventToAppender(logEvent) {
+  if (!enabled) return;
+  debug('Received log event ', logEvent);
+  const categoryAppenders = categories.appendersForCategory(
+    logEvent.categoryName
+  );
+  categoryAppenders.forEach((appender) => {
+    appender(logEvent);
   });
 }
 
-function addAppenderToAllLoggers(appender) {
-  for (const logger in loggers) {
-    if (hasLogger(logger)) {
-      loggers[logger].addListener('log', appender);
-    }
-  }
-}
-
-function addAppenderToCategory(appender, category) {
-  if (!appenders[category]) {
-    appenders[category] = [];
-  }
-  appenders[category].push(appender);
-}
-
-function clearAppenders() {
-  // if we're calling clearAppenders, we're probably getting ready to write
-  // so turn log writes back on, just in case this is after a shutdown
-  loggerModule.enableAllLogWrites();
-  appenders = {};
-  for (const logger in loggers) {
-    if (hasLogger(logger)) {
-      loggers[logger].removeAllListeners('log');
-    }
-  }
-}
-
-function configureAppenders(appenderList, options) {
-  clearAppenders();
-  if (appenderList) {
-    appenderList.forEach((appenderConfig) => {
-      loadAppender(appenderConfig.type);
-      let appender;
-      appenderConfig.makers = appenderMakers;
-      try {
-        appender = appenderMakers[appenderConfig.type](appenderConfig, options);
-        addAppender(appender, appenderConfig.category);
-      } catch (e) {
-        throw new Error(`log4js configuration problem for ${util.inspect(appenderConfig)}`, e);
-      }
-    });
-  }
-}
-
-function configureLevels(_levels) {
-  levels.config = _levels; // Keep it so we can create loggers later using this cfg
-  if (_levels) {
-    const keys = Object.keys(levels.config).sort();
-
-    /* eslint-disable guard-for-in */
-    for (const idx in keys) {
-      const category = keys[idx];
-      if (category === ALL_CATEGORIES) {
-        setGlobalLogLevel(_levels[category]);
-      }
-
-      for (const loggerCategory in loggers) {
-        if (doesLevelEntryContainsLogger(category, loggerCategory)) {
-          loggers[loggerCategory].setLevel(_levels[category]);
-        }
-      }
-    }
-  }
-}
-
-function setGlobalLogLevel(level) {
-  Logger.prototype.level = levels.toLevel(level, levels.TRACE);
-}
-
-/**
- * Get the default logger instance.
- * @return {Logger} instance of default logger
- * @static
- */
-function getDefaultLogger() {
-  return getLogger(Logger.DEFAULT_CATEGORY);
-}
-
-const configState = {};
-
 function loadConfigurationFile(filename) {
-  if (filename) {
-    return JSON.parse(fs.readFileSync(filename, 'utf8'));
-  }
-  return undefined;
-}
-
-function configureOnceOff(config, options) {
-  if (config) {
-    try {
-      restoreConsole();
-      configureLevels(config.levels);
-      configureAppenders(config.appenders, options);
-
-      if (config.replaceConsole) {
-        replaceConsole();
-      }
-    } catch (e) {
-      throw new Error(
-        `Problem reading log4js config ${util.inspect(config)}. Error was '${e.message}' (${e.stack})`
-      );
-    }
-  }
-}
-
-function reloadConfiguration(options) {
-  const mtime = getMTime(configState.filename);
-  if (!mtime) return;
-
-  if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
-    configureOnceOff(loadConfigurationFile(configState.filename), options);
-  }
-  configState.lastMTime = mtime;
-}
-
-function getMTime(filename) {
-  let mtime;
+  debug(`Loading configuration from ${filename}`);
   try {
-    mtime = fs.statSync(configState.filename).mtime;
+    return JSON.parse(fs.readFileSync(filename, 'utf8'));
   } catch (e) {
-    getLogger('log4js').warn(`Failed to load configuration file ${filename}`);
+    throw new Error(
+      `Problem reading config from file "${filename}". Error was ${e.message}`,
+      e
+    );
   }
-  return mtime;
 }
 
-function initReloadConfiguration(filename, options) {
-  if (configState.timerId) {
-    clearInterval(configState.timerId);
-    delete configState.timerId;
+function configure(configurationFileOrObject) {
+  if (enabled) {
+    // eslint-disable-next-line no-use-before-define
+    shutdown();
   }
-  configState.filename = filename;
-  configState.lastMTime = getMTime(filename);
-  configState.timerId = setInterval(reloadConfiguration, options.reloadSecs * 1000, options);
-}
 
-function configure(configurationFileOrObject, options) {
-  let config = configurationFileOrObject;
-  config = config || process.env.LOG4JS_CONFIG;
-  options = options || {};
+  let configObject = configurationFileOrObject;
 
-  if (config === undefined || config === null || typeof config === 'string') {
-    if (options.reloadSecs) {
-      initReloadConfiguration(config, options);
-    }
-    config = loadConfigurationFile(config) || defaultConfig;
-  } else {
-    if (options.reloadSecs) { // eslint-disable-line
-      getLogger('log4js').warn(
-        'Ignoring configuration reload parameter for "object" configuration.'
-      );
-    }
+  if (typeof configObject === 'string') {
+    configObject = loadConfigurationFile(configurationFileOrObject);
   }
-  configureOnceOff(config, options);
-}
+  debug(`Configuration is ${configObject}`);
 
-const originalConsoleFunctions = {
-  log: console.log,
-  debug: console.debug,
-  info: console.info,
-  warn: console.warn,
-  error: console.error
-};
+  configuration.configure(deepClone(configObject));
 
-function replaceConsole(logger) {
-  function replaceWith(fn) {
-    return function () {
-      /* eslint prefer-rest-params:0 */
-      // todo: once node v4 support dropped, use rest parameter instead
-      fn.apply(logger, Array.from(arguments));
-    };
-  }
+  clustering.onMessage(sendLogEventToAppender);
 
-  logger = logger || getLogger('console');
+  enabled = true;
 
-  ['log', 'debug', 'info', 'warn', 'error'].forEach((item) => {
-    console[item] = replaceWith(item === 'log' ? logger.info : logger[item]);
-  });
+  // eslint-disable-next-line no-use-before-define
+  return log4js;
 }
 
-function restoreConsole() {
-  ['log', 'debug', 'info', 'warn', 'error'].forEach((item) => {
-    console[item] = originalConsoleFunctions[item];
-  });
+function isConfigured() {
+  return enabled;
 }
 
-/* eslint global-require:0 */
-/**
- * Load an appenderModule based on the provided appender filepath. Will first
- * check if the appender path is a subpath of the log4js 'lib/appenders' directory.
- * If not, it will attempt to load the the appender as complete path.
- *
- * @param {string} appender The filepath for the appender.
- * @returns {Object|null} The required appender or null if appender could not be loaded.
- * @private
- */
-function requireAppender(appender) {
-  let appenderModule;
-  try {
-    appenderModule = require(`./appenders/${appender}`); // eslint-disable-line
-  } catch (e) {
-    appenderModule = require(appender); // eslint-disable-line
-  }
-  return appenderModule;
+function recording() {
+  return recordingModule;
 }
 
 /**
- * Load an appender. Provided the appender path to be loaded. If appenderModule is defined,
- * it will be used in place of requiring the appender module.
+ * This callback type is called `shutdownCallback` and is displayed as a global symbol.
  *
- * @param {string} appender The path to the appender module.
- * @param {Object|void} [appenderModule] The pre-required appender module. When provided,
- * instead of requiring the appender by its path, this object will be used.
- * @returns {void}
- * @private
+ * @callback shutdownCallback
+ * @param {Error} [error]
  */
-function loadAppender(appender, appenderModule) {
-  appenderModule = appenderModule || requireAppender(appender);
-
-  if (!appenderModule) {
-    throw new Error(`Invalid log4js appender: ${util.inspect(appender)}`);
-  }
-
-  log4js.appenders[appender] = appenderModule.appender.bind(appenderModule);
-  if (appenderModule.shutdown) {
-    appenderShutdowns[appender] = appenderModule.shutdown.bind(appenderModule);
-  }
-  appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
-}
 
 /**
  * Shutdown all log appenders. This will first disable all writing to appenders
  * and then call the shutdown function each appender.
  *
- * @params {Function} cb - The callback to be invoked once all appenders have
+ * @param {shutdownCallback} [callback] - The callback to be invoked once all appenders have
  *  shutdown. If an error occurs, the callback will be given the error object
  *  as the first argument.
  */
-function shutdown(cb) {
+function shutdown(callback = () => {}) {
+  if (typeof callback !== 'function') {
+    throw new TypeError('Invalid callback passed to shutdown');
+  }
+  debug('Shutdown called. Disabling all log writing.');
   // First, disable all writing to appenders. This prevents appenders from
   // not being able to be drained because of run-away log writes.
-  loggerModule.disableAllLogWrites();
+  enabled = false;
+
+  // Clone out to maintain a reference
+  const appendersToCheck = Array.from(appenders.values());
+
+  // Reset immediately to prevent leaks
+  appenders.init();
+  categories.init();
 
-  // turn off config reloading
-  if (configState.timerId) {
-    clearInterval(configState.timerId);
+  // Count the number of shutdown functions
+  const shutdownFunctions = appendersToCheck.reduce(
+    (accum, next) => (next.shutdown ? accum + 1 : accum),
+    0
+  );
+  if (shutdownFunctions === 0) {
+    debug('No appenders with shutdown functions found.');
+    callback();
   }
 
-  // Call each of the shutdown functions in parallel
   let completed = 0;
   let error;
-  const shutdownFunctions = [];
-
+  debug(`Found ${shutdownFunctions} appenders with shutdown functions.`);
   function complete(err) {
     error = error || err;
-    completed++;
-    if (completed >= shutdownFunctions.length) {
-      cb(error);
+    completed += 1;
+    debug(`Appender shutdowns complete: ${completed} / ${shutdownFunctions}`);
+    if (completed >= shutdownFunctions) {
+      debug('All shutdown functions completed.');
+      callback(error);
     }
   }
 
-  for (const category in appenderShutdowns) {
-    if (appenderShutdowns.hasOwnProperty(category)) {
-      shutdownFunctions.push(appenderShutdowns[category]);
-    }
-  }
+  // Call each of the shutdown functions
+  appendersToCheck
+    .filter((a) => a.shutdown)
+    .forEach((a) => a.shutdown(complete));
+}
 
-  if (!shutdownFunctions.length) {
-    return cb();
+/**
+ * Get a logger instance.
+ * @static
+ * @param {string} [category=default]
+ * @return {Logger} instance of logger for the category
+ */
+function getLogger(category) {
+  if (!enabled) {
+    configure(
+      process.env.LOG4JS_CONFIG || {
+        appenders: { out: { type: 'stdout' } },
+        categories: { default: { appenders: ['out'], level: 'OFF' } },
+      }
+    );
   }
-
-  shutdownFunctions.forEach((shutdownFct) => {
-    shutdownFct(complete);
-  });
-
-  return null;
+  return new Logger(category || 'default');
 }
 
 /**
  * @name log4js
  * @namespace Log4js
- * @property getBufferedLogger
  * @property getLogger
- * @property getDefaultLogger
- * @property hasLogger
- * @property addAppender
- * @property loadAppender
- * @property clearAppenders
  * @property configure
  * @property shutdown
- * @property replaceConsole
- * @property restoreConsole
- * @property levels
- * @property setGlobalLogLevel
- * @property layouts
- * @property appenders
- * @property appenderMakers
- * @property connectLogger
  */
 const log4js = {
-  getBufferedLogger,
   getLogger,
-  getDefaultLogger,
-  hasLogger,
-
-  addAppender,
-  loadAppender,
-  clearAppenders,
   configure,
+  isConfigured,
   shutdown,
-
-  replaceConsole,
-  restoreConsole,
-
+  connectLogger,
   levels,
-  setGlobalLogLevel,
-
-  layouts,
-  appenders: {},
-  appenderMakers,
-  connectLogger
+  addLayout: layouts.addLayout,
+  recording,
 };
 
 module.exports = log4js;
-
-// set ourselves up
-configure();
diff --git a/lib/logger.js b/lib/logger.js
index 1da0cae3..b21f9e1e 100644
--- a/lib/logger.js
+++ b/lib/logger.js
@@ -1,35 +1,77 @@
-/* eslint no-underscore-dangle:0 */
-
-'use strict';
+/* eslint no-underscore-dangle: ["error", { "allow": ["_log"] }] */
 
+const debug = require('debug')('log4js:logger');
+const LoggingEvent = require('./LoggingEvent');
 const levels = require('./levels');
-const EventEmitter = require('events');
-
-const DEFAULT_CATEGORY = '[default]';
+const clustering = require('./clustering');
+const categories = require('./categories');
+const configuration = require('./configuration');
 
-let logWritesEnabled = true;
+const stackReg = /^(?:\s*)at (?:(.+) \()?(?:([^(]+?):(\d+):(\d+))\)?$/;
+/**
+ * The top entry is the Error
+ */
+const baseCallStackSkip = 1;
+/**
+ * The _log function is 3 levels deep, we need to skip those to make it to the callSite
+ */
+const defaultErrorCallStackSkip = 3;
 
 /**
- * @name LoggingEvent
- * @namespace Log4js
+ *
+ * @param {Error} data
+ * @param {number} skipIdx
+ * @returns {import('../types/log4js').CallStack | null}
  */
-class LoggingEvent {
-  /**
-   * Models a logging event.
-   * @constructor
-   * @param {String} categoryName name of category
-   * @param {Log4js.Level} level level of message
-   * @param {Array} data objects to log
-   * @param {Logger} logger the associated logger
-   * @author Seth Chisamore
-   */
-  constructor(categoryName, level, data, logger) {
-    this.startTime = new Date();
-    this.categoryName = categoryName;
-    this.data = data;
-    this.level = level;
-    this.logger = logger;
+function defaultParseCallStack(
+  data,
+  skipIdx = defaultErrorCallStackSkip + baseCallStackSkip
+) {
+  try {
+    const stacklines = data.stack.split('\n').slice(skipIdx);
+    if (!stacklines.length) {
+      // There's no stack in this stack
+      // Should we try a previous index if skipIdx was set?
+      return null;
+    }
+    const lineMatch = stackReg.exec(stacklines[0]);
+    /* istanbul ignore else: failsafe */
+    if (lineMatch && lineMatch.length === 5) {
+      // extract class, function and alias names
+      let className = '';
+      let functionName = '';
+      let functionAlias = '';
+      if (lineMatch[1] && lineMatch[1] !== '') {
+        // WARN: this will unset alias if alias is not present.
+        [functionName, functionAlias] = lineMatch[1]
+          .replace(/[[\]]/g, '')
+          .split(' as ');
+        functionAlias = functionAlias || '';
+
+        if (functionName.includes('.'))
+          [className, functionName] = functionName.split('.');
+      }
+
+      return {
+        fileName: lineMatch[2],
+        lineNumber: parseInt(lineMatch[3], 10),
+        columnNumber: parseInt(lineMatch[4], 10),
+        callStack: stacklines.join('\n'),
+        className,
+        functionName,
+        functionAlias,
+        callerName: lineMatch[1] || '',
+      };
+      // eslint-disable-next-line no-else-return
+    } else {
+      // will never get here unless nodejs has changes to Error
+      console.error('log4js.logger - defaultParseCallStack error'); // eslint-disable-line no-console
+    }
+  } catch (err) {
+    // will never get error unless nodejs has breaking changes to Error
+    console.error('log4js.logger - defaultParseCallStack error', err); // eslint-disable-line no-console
   }
+  return null;
 }
 
 /**
@@ -39,38 +81,79 @@ class LoggingEvent {
  * @name Logger
  * @namespace Log4js
  * @param name name of category to log to
- * @param level
+ * @param level - the loglevel for the category
+ * @param dispatch - the function which will receive the logevents
  *
  * @author Stephan Strittmatter
  */
-class Logger extends EventEmitter {
-  constructor(name, level) {
-    super();
+class Logger {
+  constructor(name) {
+    if (!name) {
+      throw new Error('No category provided.');
+    }
+    this.category = name;
+    this.context = {};
+    /** @private */
+    this.callStackSkipIndex = 0;
+    /** @private */
+    this.parseCallStack = defaultParseCallStack;
+    debug(`Logger created (${this.category}, ${this.level})`);
+  }
 
-    this.category = name || DEFAULT_CATEGORY;
+  get level() {
+    return levels.getLevel(
+      categories.getLevelForCategory(this.category),
+      levels.OFF
+    );
+  }
 
-    if (level) {
-      this.setLevel(level);
-    }
+  set level(level) {
+    categories.setLevelForCategory(
+      this.category,
+      levels.getLevel(level, this.level)
+    );
   }
 
-  setLevel(level) {
-    this.level = levels.toLevel(level, this.level || levels.TRACE);
+  get useCallStack() {
+    return categories.getEnableCallStackForCategory(this.category);
   }
 
-  removeLevel() {
-    delete this.level;
+  set useCallStack(bool) {
+    categories.setEnableCallStackForCategory(this.category, bool === true);
   }
 
-  log() {
-    /* eslint prefer-rest-params:0 */
-    // todo: once node v4 support dropped, use rest parameter instead
-    const args = Array.from(arguments);
-    const logLevel = levels.toLevel(args[0], levels.INFO);
-    if (!this.isLevelEnabled(logLevel)) {
-      return;
+  get callStackLinesToSkip() {
+    return this.callStackSkipIndex;
+  }
+
+  set callStackLinesToSkip(number) {
+    if (typeof number !== 'number') {
+      throw new TypeError('Must be a number');
+    }
+    if (number < 0) {
+      throw new RangeError('Must be >= 0');
+    }
+    this.callStackSkipIndex = number;
+  }
+
+  log(level, ...args) {
+    const logLevel = levels.getLevel(level);
+    if (!logLevel) {
+      if (configuration.validIdentifier(level) && args.length > 0) {
+        // logLevel not found but of valid signature, WARN before fallback to INFO
+        this.log(
+          levels.WARN,
+          'log4js:logger.log: valid log-level not found as first parameter given:',
+          level
+        );
+        this.log(levels.INFO, `[${level}]`, ...args);
+      } else {
+        // apart from fallback, allow .log(...args) to be synonym with .log("INFO", ...args)
+        this.log(levels.INFO, level, ...args);
+      }
+    } else if (this.isLevelEnabled(logLevel)) {
+      this._log(logLevel, args);
     }
-    this._log(logLevel, args.slice(1));
   }
 
   isLevelEnabled(otherLevel) {
@@ -78,55 +161,85 @@ class Logger extends EventEmitter {
   }
 
   _log(level, data) {
-    const loggingEvent = new LoggingEvent(this.category, level, data, this);
-    this.emit('log', loggingEvent);
+    debug(`sending log data (${level}) to appenders`);
+    const error = data.find((item) => item instanceof Error);
+    let callStack;
+    if (this.useCallStack) {
+      try {
+        if (error) {
+          callStack = this.parseCallStack(
+            error,
+            this.callStackSkipIndex + baseCallStackSkip
+          );
+        }
+      } catch (_err) {
+        // Ignore Error and use the original method of creating a new Error.
+      }
+      callStack =
+        callStack ||
+        this.parseCallStack(
+          new Error(),
+          this.callStackSkipIndex +
+            defaultErrorCallStackSkip +
+            baseCallStackSkip
+        );
+    }
+    const loggingEvent = new LoggingEvent(
+      this.category,
+      level,
+      data,
+      this.context,
+      callStack,
+      error
+    );
+    clustering.send(loggingEvent);
   }
-}
 
-Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY;
-Logger.prototype.level = levels.TRACE;
+  addContext(key, value) {
+    this.context[key] = value;
+  }
 
-['Trace', 'Debug', 'Info', 'Warn', 'Error', 'Fatal', 'Mark'].forEach(addLevelMethods);
+  removeContext(key) {
+    delete this.context[key];
+  }
+
+  clearContext() {
+    this.context = {};
+  }
+
+  setParseCallStackFunction(parseFunction) {
+    if (typeof parseFunction === 'function') {
+      this.parseCallStack = parseFunction;
+    } else if (typeof parseFunction === 'undefined') {
+      this.parseCallStack = defaultParseCallStack;
+    } else {
+      throw new TypeError('Invalid type passed to setParseCallStackFunction');
+    }
+  }
+}
 
 function addLevelMethods(target) {
-  const level = levels.toLevel(target);
+  const level = levels.getLevel(target);
 
   const levelStrLower = level.toString().toLowerCase();
-  const levelMethod = levelStrLower.replace(/_([a-z])/g, g => g[1].toUpperCase());
+  const levelMethod = levelStrLower.replace(/_([a-z])/g, (g) =>
+    g[1].toUpperCase()
+  );
   const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
 
   Logger.prototype[`is${isLevelMethod}Enabled`] = function () {
-    return this.isLevelEnabled(level.toString());
+    return this.isLevelEnabled(level);
   };
 
-  Logger.prototype[levelMethod] = function () {
-    /* eslint prefer-rest-params:0 */
-    // todo: once node v4 support dropped, use rest parameter instead
-    const args = Array.from(arguments);
-    if (logWritesEnabled && this.isLevelEnabled(level)) {
-      this._log(level, args);
-    }
+  Logger.prototype[levelMethod] = function (...args) {
+    this.log(level, ...args);
   };
 }
 
-/**
- * Disable all log writes.
- * @returns {void}
- */
-function disableAllLogWrites() {
-  logWritesEnabled = false;
-}
+levels.levels.forEach(addLevelMethods);
 
-/**
- * Enable log writes.
- * @returns {void}
- */
-function enableAllLogWrites() {
-  logWritesEnabled = true;
-}
+configuration.addListener(() => {
+  levels.levels.forEach(addLevelMethods);
+});
 
-module.exports.LoggingEvent = LoggingEvent;
-module.exports.Logger = Logger;
-module.exports.disableAllLogWrites = disableAllLogWrites;
-module.exports.enableAllLogWrites = enableAllLogWrites;
-module.exports.addLevelMethods = addLevelMethods;
+module.exports = Logger;
diff --git a/package-lock.json b/package-lock.json
new file mode 100644
index 00000000..08635231
--- /dev/null
+++ b/package-lock.json
@@ -0,0 +1,5683 @@
+{
+  "name": "log4js",
+  "version": "6.9.1",
+  "lockfileVersion": 1,
+  "requires": true,
+  "dependencies": {
+    "@ampproject/remapping": {
+      "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz",
+      "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==",
+      "dev": true,
+      "requires": {
+        "@jridgewell/gen-mapping": "^0.1.0",
+        "@jridgewell/trace-mapping": "^0.3.9"
+      }
+    },
+    "@babel/code-frame": {
+      "version": "7.18.6",
+      "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz",
+      "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==",
+      "dev": true,
+      "requires": {
+        "@babel/highlight": "^7.18.6"
+      }
+    },
+    "@babel/compat-data": {
+      "version": "7.20.14",
+      "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.20.14.tgz",
+      "integrity": "sha512-0YpKHD6ImkWMEINCyDAD0HLLUH/lPCefG8ld9it8DJB2wnApraKuhgYTvTY1z7UFIfBTGy5LwncZ+5HWWGbhFw==",
+      "dev": true
+    },
+    "@babel/core": {
+      "version": "7.20.12",
+      "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.20.12.tgz",
+      "integrity": "sha512-XsMfHovsUYHFMdrIHkZphTN/2Hzzi78R08NuHfDBehym2VsPDL6Zn/JAD/JQdnRvbSsbQc4mVaU1m6JgtTEElg==",
+      "dev": true,
+      "requires": {
+        "@ampproject/remapping": "^2.1.0",
+        "@babel/code-frame": "^7.18.6",
+        "@babel/generator": "^7.20.7",
+        "@babel/helper-compilation-targets": "^7.20.7",
+        "@babel/helper-module-transforms": "^7.20.11",
+        "@babel/helpers": "^7.20.7",
+        "@babel/parser": "^7.20.7",
+        "@babel/template": "^7.20.7",
+        "@babel/traverse": "^7.20.12",
+        "@babel/types": "^7.20.7",
+        "convert-source-map": "^1.7.0",
+        "debug": "^4.1.0",
+        "gensync": "^1.0.0-beta.2",
+        "json5": "^2.2.2",
+        "semver": "^6.3.0"
+      },
+      "dependencies": {
+        "json5": {
+          "version": "2.2.3",
+          "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
+          "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
+          "dev": true
+        },
+        "semver": {
+          "version": "6.3.0",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+          "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+          "dev": true
+        }
+      }
+    },
+    "@babel/generator": {
+      "version": "7.20.14",
+      "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.20.14.tgz",
+      "integrity": "sha512-AEmuXHdcD3A52HHXxaTmYlb8q/xMEhoRP67B3T4Oq7lbmSoqroMZzjnGj3+i1io3pdnF8iBYVu4Ilj+c4hBxYg==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "^7.20.7",
+        "@jridgewell/gen-mapping": "^0.3.2",
+        "jsesc": "^2.5.1"
+      },
+      "dependencies": {
+        "@jridgewell/gen-mapping": {
+          "version": "0.3.2",
+          "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz",
+          "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==",
+          "dev": true,
+          "requires": {
+            "@jridgewell/set-array": "^1.0.1",
+            "@jridgewell/sourcemap-codec": "^1.4.10",
+            "@jridgewell/trace-mapping": "^0.3.9"
+          }
+        }
+      }
+    },
+    "@babel/helper-compilation-targets": {
+      "version": "7.20.7",
+      "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.7.tgz",
+      "integrity": "sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ==",
+      "dev": true,
+      "requires": {
+        "@babel/compat-data": "^7.20.5",
+        "@babel/helper-validator-option": "^7.18.6",
+        "browserslist": "^4.21.3",
+        "lru-cache": "^5.1.1",
+        "semver": "^6.3.0"
+      },
+      "dependencies": {
+        "lru-cache": {
+          "version": "5.1.1",
+          "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
+          "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
+          "dev": true,
+          "requires": {
+            "yallist": "^3.0.2"
+          }
+        },
+        "semver": {
+          "version": "6.3.0",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+          "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+          "dev": true
+        },
+        "yallist": {
+          "version": "3.1.1",
+          "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
+          "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
+          "dev": true
+        }
+      }
+    },
+    "@babel/helper-environment-visitor": {
+      "version": "7.18.9",
+      "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz",
+      "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==",
+      "dev": true
+    },
+    "@babel/helper-module-imports": {
+      "version": "7.18.6",
+      "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz",
+      "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "^7.18.6"
+      }
+    },
+    "@babel/helper-module-transforms": {
+      "version": "7.20.11",
+      "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.20.11.tgz",
+      "integrity": "sha512-uRy78kN4psmji1s2QtbtcCSaj/LILFDp0f/ymhpQH5QY3nljUZCaNWz9X1dEj/8MBdBEFECs7yRhKn8i7NjZgg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-environment-visitor": "^7.18.9",
+        "@babel/helper-module-imports": "^7.18.6",
+        "@babel/helper-simple-access": "^7.20.2",
+        "@babel/helper-split-export-declaration": "^7.18.6",
+        "@babel/helper-validator-identifier": "^7.19.1",
+        "@babel/template": "^7.20.7",
+        "@babel/traverse": "^7.20.10",
+        "@babel/types": "^7.20.7"
+      }
+    },
+    "@babel/helper-simple-access": {
+      "version": "7.20.2",
+      "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz",
+      "integrity": "sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "^7.20.2"
+      }
+    },
+    "@babel/helper-split-export-declaration": {
+      "version": "7.18.6",
+      "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz",
+      "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "^7.18.6"
+      }
+    },
+    "@babel/helper-string-parser": {
+      "version": "7.19.4",
+      "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz",
+      "integrity": "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==",
+      "dev": true
+    },
+    "@babel/helper-validator-identifier": {
+      "version": "7.19.1",
+      "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz",
+      "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==",
+      "dev": true
+    },
+    "@babel/helper-validator-option": {
+      "version": "7.18.6",
+      "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz",
+      "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==",
+      "dev": true
+    },
+    "@babel/helpers": {
+      "version": "7.20.13",
+      "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.20.13.tgz",
+      "integrity": "sha512-nzJ0DWCL3gB5RCXbUO3KIMMsBY2Eqbx8mBpKGE/02PgyRQFcPQLbkQ1vyy596mZLaP+dAfD+R4ckASzNVmW3jg==",
+      "dev": true,
+      "requires": {
+        "@babel/template": "^7.20.7",
+        "@babel/traverse": "^7.20.13",
+        "@babel/types": "^7.20.7"
+      }
+    },
+    "@babel/highlight": {
+      "version": "7.18.6",
+      "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz",
+      "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-validator-identifier": "^7.18.6",
+        "chalk": "^2.0.0",
+        "js-tokens": "^4.0.0"
+      },
+      "dependencies": {
+        "ansi-styles": {
+          "version": "3.2.1",
+          "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
+          "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
+          "dev": true,
+          "requires": {
+            "color-convert": "^1.9.0"
+          }
+        },
+        "chalk": {
+          "version": "2.4.2",
+          "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
+          "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
+          "dev": true,
+          "requires": {
+            "ansi-styles": "^3.2.1",
+            "escape-string-regexp": "^1.0.5",
+            "supports-color": "^5.3.0"
+          }
+        },
+        "color-convert": {
+          "version": "1.9.3",
+          "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
+          "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
+          "dev": true,
+          "requires": {
+            "color-name": "1.1.3"
+          }
+        },
+        "color-name": {
+          "version": "1.1.3",
+          "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
+          "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
+          "dev": true
+        },
+        "has-flag": {
+          "version": "3.0.0",
+          "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
+          "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
+          "dev": true
+        },
+        "supports-color": {
+          "version": "5.5.0",
+          "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
+          "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
+          "dev": true,
+          "requires": {
+            "has-flag": "^3.0.0"
+          }
+        }
+      }
+    },
+    "@babel/parser": {
+      "version": "7.20.15",
+      "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.20.15.tgz",
+      "integrity": "sha512-DI4a1oZuf8wC+oAJA9RW6ga3Zbe8RZFt7kD9i4qAspz3I/yHet1VvC3DiSy/fsUvv5pvJuNPh0LPOdCcqinDPg==",
+      "dev": true
+    },
+    "@babel/template": {
+      "version": "7.20.7",
+      "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz",
+      "integrity": "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==",
+      "dev": true,
+      "requires": {
+        "@babel/code-frame": "^7.18.6",
+        "@babel/parser": "^7.20.7",
+        "@babel/types": "^7.20.7"
+      }
+    },
+    "@babel/traverse": {
+      "version": "7.23.2",
+      "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.2.tgz",
+      "integrity": "sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==",
+      "dev": true,
+      "requires": {
+        "@babel/code-frame": "^7.22.13",
+        "@babel/generator": "^7.23.0",
+        "@babel/helper-environment-visitor": "^7.22.20",
+        "@babel/helper-function-name": "^7.23.0",
+        "@babel/helper-hoist-variables": "^7.22.5",
+        "@babel/helper-split-export-declaration": "^7.22.6",
+        "@babel/parser": "^7.23.0",
+        "@babel/types": "^7.23.0",
+        "debug": "^4.1.0",
+        "globals": "^11.1.0"
+      },
+      "dependencies": {
+        "@babel/code-frame": {
+          "version": "7.22.13",
+          "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz",
+          "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==",
+          "dev": true,
+          "requires": {
+            "@babel/highlight": "^7.22.13",
+            "chalk": "^2.4.2"
+          }
+        },
+        "@babel/generator": {
+          "version": "7.23.0",
+          "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz",
+          "integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==",
+          "dev": true,
+          "requires": {
+            "@babel/types": "^7.23.0",
+            "@jridgewell/gen-mapping": "^0.3.2",
+            "@jridgewell/trace-mapping": "^0.3.17",
+            "jsesc": "^2.5.1"
+          }
+        },
+        "@babel/helper-environment-visitor": {
+          "version": "7.22.20",
+          "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz",
+          "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==",
+          "dev": true
+        },
+        "@babel/helper-function-name": {
+          "version": "7.23.0",
+          "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz",
+          "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==",
+          "dev": true,
+          "requires": {
+            "@babel/template": "^7.22.15",
+            "@babel/types": "^7.23.0"
+          }
+        },
+        "@babel/helper-hoist-variables": {
+          "version": "7.22.5",
+          "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz",
+          "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==",
+          "dev": true,
+          "requires": {
+            "@babel/types": "^7.22.5"
+          }
+        },
+        "@babel/helper-split-export-declaration": {
+          "version": "7.22.6",
+          "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz",
+          "integrity": "sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==",
+          "dev": true,
+          "requires": {
+            "@babel/types": "^7.22.5"
+          }
+        },
+        "@babel/helper-string-parser": {
+          "version": "7.22.5",
+          "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz",
+          "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==",
+          "dev": true
+        },
+        "@babel/helper-validator-identifier": {
+          "version": "7.22.20",
+          "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz",
+          "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==",
+          "dev": true
+        },
+        "@babel/highlight": {
+          "version": "7.22.20",
+          "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz",
+          "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==",
+          "dev": true,
+          "requires": {
+            "@babel/helper-validator-identifier": "^7.22.20",
+            "chalk": "^2.4.2",
+            "js-tokens": "^4.0.0"
+          }
+        },
+        "@babel/parser": {
+          "version": "7.23.0",
+          "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz",
+          "integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==",
+          "dev": true
+        },
+        "@babel/template": {
+          "version": "7.22.15",
+          "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz",
+          "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==",
+          "dev": true,
+          "requires": {
+            "@babel/code-frame": "^7.22.13",
+            "@babel/parser": "^7.22.15",
+            "@babel/types": "^7.22.15"
+          }
+        },
+        "@babel/types": {
+          "version": "7.23.0",
+          "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz",
+          "integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==",
+          "dev": true,
+          "requires": {
+            "@babel/helper-string-parser": "^7.22.5",
+            "@babel/helper-validator-identifier": "^7.22.20",
+            "to-fast-properties": "^2.0.0"
+          }
+        },
+        "@jridgewell/gen-mapping": {
+          "version": "0.3.3",
+          "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz",
+          "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==",
+          "dev": true,
+          "requires": {
+            "@jridgewell/set-array": "^1.0.1",
+            "@jridgewell/sourcemap-codec": "^1.4.10",
+            "@jridgewell/trace-mapping": "^0.3.9"
+          }
+        },
+        "@jridgewell/trace-mapping": {
+          "version": "0.3.19",
+          "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz",
+          "integrity": "sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==",
+          "dev": true,
+          "requires": {
+            "@jridgewell/resolve-uri": "^3.1.0",
+            "@jridgewell/sourcemap-codec": "^1.4.14"
+          }
+        },
+        "ansi-styles": {
+          "version": "3.2.1",
+          "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
+          "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
+          "dev": true,
+          "requires": {
+            "color-convert": "^1.9.0"
+          }
+        },
+        "chalk": {
+          "version": "2.4.2",
+          "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
+          "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
+          "dev": true,
+          "requires": {
+            "ansi-styles": "^3.2.1",
+            "escape-string-regexp": "^1.0.5",
+            "supports-color": "^5.3.0"
+          }
+        },
+        "color-convert": {
+          "version": "1.9.3",
+          "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
+          "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
+          "dev": true,
+          "requires": {
+            "color-name": "1.1.3"
+          }
+        },
+        "color-name": {
+          "version": "1.1.3",
+          "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
+          "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
+          "dev": true
+        },
+        "globals": {
+          "version": "11.12.0",
+          "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
+          "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==",
+          "dev": true
+        },
+        "has-flag": {
+          "version": "3.0.0",
+          "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
+          "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
+          "dev": true
+        },
+        "supports-color": {
+          "version": "5.5.0",
+          "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
+          "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
+          "dev": true,
+          "requires": {
+            "has-flag": "^3.0.0"
+          }
+        }
+      }
+    },
+    "@babel/types": {
+      "version": "7.20.7",
+      "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.20.7.tgz",
+      "integrity": "sha512-69OnhBxSSgK0OzTJai4kyPDiKTIe3j+ctaHdIGVbRahTLAT7L3R9oeXHC2aVSuGYt3cVnoAMDmOCgJ2yaiLMvg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-string-parser": "^7.19.4",
+        "@babel/helper-validator-identifier": "^7.19.1",
+        "to-fast-properties": "^2.0.0"
+      }
+    },
+    "@commitlint/cli": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-17.4.4.tgz",
+      "integrity": "sha512-HwKlD7CPVMVGTAeFZylVNy14Vm5POVY0WxPkZr7EXLC/os0LH/obs6z4HRvJtH/nHCMYBvUBQhGwnufKfTjd5g==",
+      "dev": true,
+      "requires": {
+        "@commitlint/format": "^17.4.4",
+        "@commitlint/lint": "^17.4.4",
+        "@commitlint/load": "^17.4.4",
+        "@commitlint/read": "^17.4.4",
+        "@commitlint/types": "^17.4.4",
+        "execa": "^5.0.0",
+        "lodash.isfunction": "^3.0.9",
+        "resolve-from": "5.0.0",
+        "resolve-global": "1.0.0",
+        "yargs": "^17.0.0"
+      }
+    },
+    "@commitlint/config-conventional": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/config-conventional/-/config-conventional-17.4.4.tgz",
+      "integrity": "sha512-u6ztvxqzi6NuhrcEDR7a+z0yrh11elY66nRrQIpqsqW6sZmpxYkDLtpRH8jRML+mmxYQ8s4qqF06Q/IQx5aJeQ==",
+      "dev": true,
+      "requires": {
+        "conventional-changelog-conventionalcommits": "^5.0.0"
+      }
+    },
+    "@commitlint/config-validator": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/config-validator/-/config-validator-17.4.4.tgz",
+      "integrity": "sha512-bi0+TstqMiqoBAQDvdEP4AFh0GaKyLFlPPEObgI29utoKEYoPQTvF0EYqIwYYLEoJYhj5GfMIhPHJkTJhagfeg==",
+      "dev": true,
+      "requires": {
+        "@commitlint/types": "^17.4.4",
+        "ajv": "^8.11.0"
+      }
+    },
+    "@commitlint/ensure": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/ensure/-/ensure-17.4.4.tgz",
+      "integrity": "sha512-AHsFCNh8hbhJiuZ2qHv/m59W/GRE9UeOXbkOqxYMNNg9pJ7qELnFcwj5oYpa6vzTSHtPGKf3C2yUFNy1GGHq6g==",
+      "dev": true,
+      "requires": {
+        "@commitlint/types": "^17.4.4",
+        "lodash.camelcase": "^4.3.0",
+        "lodash.kebabcase": "^4.1.1",
+        "lodash.snakecase": "^4.1.1",
+        "lodash.startcase": "^4.4.0",
+        "lodash.upperfirst": "^4.3.1"
+      }
+    },
+    "@commitlint/execute-rule": {
+      "version": "17.4.0",
+      "resolved": "https://registry.npmjs.org/@commitlint/execute-rule/-/execute-rule-17.4.0.tgz",
+      "integrity": "sha512-LIgYXuCSO5Gvtc0t9bebAMSwd68ewzmqLypqI2Kke1rqOqqDbMpYcYfoPfFlv9eyLIh4jocHWwCK5FS7z9icUA==",
+      "dev": true
+    },
+    "@commitlint/format": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/format/-/format-17.4.4.tgz",
+      "integrity": "sha512-+IS7vpC4Gd/x+uyQPTAt3hXs5NxnkqAZ3aqrHd5Bx/R9skyCAWusNlNbw3InDbAK6j166D9asQM8fnmYIa+CXQ==",
+      "dev": true,
+      "requires": {
+        "@commitlint/types": "^17.4.4",
+        "chalk": "^4.1.0"
+      }
+    },
+    "@commitlint/is-ignored": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/is-ignored/-/is-ignored-17.4.4.tgz",
+      "integrity": "sha512-Y3eo1SFJ2JQDik4rWkBC4tlRIxlXEFrRWxcyrzb1PUT2k3kZ/XGNuCDfk/u0bU2/yS0tOA/mTjFsV+C4qyACHw==",
+      "dev": true,
+      "requires": {
+        "@commitlint/types": "^17.4.4",
+        "semver": "7.3.8"
+      }
+    },
+    "@commitlint/lint": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/lint/-/lint-17.4.4.tgz",
+      "integrity": "sha512-qgkCRRFjyhbMDWsti/5jRYVJkgYZj4r+ZmweZObnbYqPUl5UKLWMf9a/ZZisOI4JfiPmRktYRZ2JmqlSvg+ccw==",
+      "dev": true,
+      "requires": {
+        "@commitlint/is-ignored": "^17.4.4",
+        "@commitlint/parse": "^17.4.4",
+        "@commitlint/rules": "^17.4.4",
+        "@commitlint/types": "^17.4.4"
+      }
+    },
+    "@commitlint/load": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/load/-/load-17.4.4.tgz",
+      "integrity": "sha512-z6uFIQ7wfKX5FGBe1AkOF4l/ShOQsaa1ml/nLMkbW7R/xF8galGS7Zh0yHvzVp/srtfS0brC+0bUfQfmpMPFVQ==",
+      "dev": true,
+      "requires": {
+        "@commitlint/config-validator": "^17.4.4",
+        "@commitlint/execute-rule": "^17.4.0",
+        "@commitlint/resolve-extends": "^17.4.4",
+        "@commitlint/types": "^17.4.4",
+        "@types/node": "*",
+        "chalk": "^4.1.0",
+        "cosmiconfig": "^8.0.0",
+        "cosmiconfig-typescript-loader": "^4.0.0",
+        "lodash.isplainobject": "^4.0.6",
+        "lodash.merge": "^4.6.2",
+        "lodash.uniq": "^4.5.0",
+        "resolve-from": "^5.0.0",
+        "ts-node": "^10.8.1",
+        "typescript": "^4.6.4"
+      }
+    },
+    "@commitlint/message": {
+      "version": "17.4.2",
+      "resolved": "https://registry.npmjs.org/@commitlint/message/-/message-17.4.2.tgz",
+      "integrity": "sha512-3XMNbzB+3bhKA1hSAWPCQA3lNxR4zaeQAQcHj0Hx5sVdO6ryXtgUBGGv+1ZCLMgAPRixuc6en+iNAzZ4NzAa8Q==",
+      "dev": true
+    },
+    "@commitlint/parse": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/parse/-/parse-17.4.4.tgz",
+      "integrity": "sha512-EKzz4f49d3/OU0Fplog7nwz/lAfXMaDxtriidyGF9PtR+SRbgv4FhsfF310tKxs6EPj8Y+aWWuX3beN5s+yqGg==",
+      "dev": true,
+      "requires": {
+        "@commitlint/types": "^17.4.4",
+        "conventional-changelog-angular": "^5.0.11",
+        "conventional-commits-parser": "^3.2.2"
+      }
+    },
+    "@commitlint/read": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/read/-/read-17.4.4.tgz",
+      "integrity": "sha512-B2TvUMJKK+Svzs6eji23WXsRJ8PAD+orI44lVuVNsm5zmI7O8RSGJMvdEZEikiA4Vohfb+HevaPoWZ7PiFZ3zA==",
+      "dev": true,
+      "requires": {
+        "@commitlint/top-level": "^17.4.0",
+        "@commitlint/types": "^17.4.4",
+        "fs-extra": "^11.0.0",
+        "git-raw-commits": "^2.0.0",
+        "minimist": "^1.2.6"
+      }
+    },
+    "@commitlint/resolve-extends": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/resolve-extends/-/resolve-extends-17.4.4.tgz",
+      "integrity": "sha512-znXr1S0Rr8adInptHw0JeLgumS11lWbk5xAWFVno+HUFVN45875kUtqjrI6AppmD3JI+4s0uZlqqlkepjJd99A==",
+      "dev": true,
+      "requires": {
+        "@commitlint/config-validator": "^17.4.4",
+        "@commitlint/types": "^17.4.4",
+        "import-fresh": "^3.0.0",
+        "lodash.mergewith": "^4.6.2",
+        "resolve-from": "^5.0.0",
+        "resolve-global": "^1.0.0"
+      }
+    },
+    "@commitlint/rules": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/rules/-/rules-17.4.4.tgz",
+      "integrity": "sha512-0tgvXnHi/mVcyR8Y8mjTFZIa/FEQXA4uEutXS/imH2v1UNkYDSEMsK/68wiXRpfW1euSgEdwRkvE1z23+yhNrQ==",
+      "dev": true,
+      "requires": {
+        "@commitlint/ensure": "^17.4.4",
+        "@commitlint/message": "^17.4.2",
+        "@commitlint/to-lines": "^17.4.0",
+        "@commitlint/types": "^17.4.4",
+        "execa": "^5.0.0"
+      }
+    },
+    "@commitlint/to-lines": {
+      "version": "17.4.0",
+      "resolved": "https://registry.npmjs.org/@commitlint/to-lines/-/to-lines-17.4.0.tgz",
+      "integrity": "sha512-LcIy/6ZZolsfwDUWfN1mJ+co09soSuNASfKEU5sCmgFCvX5iHwRYLiIuoqXzOVDYOy7E7IcHilr/KS0e5T+0Hg==",
+      "dev": true
+    },
+    "@commitlint/top-level": {
+      "version": "17.4.0",
+      "resolved": "https://registry.npmjs.org/@commitlint/top-level/-/top-level-17.4.0.tgz",
+      "integrity": "sha512-/1loE/g+dTTQgHnjoCy0AexKAEFyHsR2zRB4NWrZ6lZSMIxAhBJnmCqwao7b4H8888PsfoTBCLBYIw8vGnej8g==",
+      "dev": true,
+      "requires": {
+        "find-up": "^5.0.0"
+      },
+      "dependencies": {
+        "find-up": {
+          "version": "5.0.0",
+          "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+          "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+          "dev": true,
+          "requires": {
+            "locate-path": "^6.0.0",
+            "path-exists": "^4.0.0"
+          }
+        },
+        "locate-path": {
+          "version": "6.0.0",
+          "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+          "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+          "dev": true,
+          "requires": {
+            "p-locate": "^5.0.0"
+          }
+        },
+        "p-limit": {
+          "version": "3.1.0",
+          "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+          "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+          "dev": true,
+          "requires": {
+            "yocto-queue": "^0.1.0"
+          }
+        },
+        "p-locate": {
+          "version": "5.0.0",
+          "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+          "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+          "dev": true,
+          "requires": {
+            "p-limit": "^3.0.2"
+          }
+        }
+      }
+    },
+    "@commitlint/types": {
+      "version": "17.4.4",
+      "resolved": "https://registry.npmjs.org/@commitlint/types/-/types-17.4.4.tgz",
+      "integrity": "sha512-amRN8tRLYOsxRr6mTnGGGvB5EmW/4DDjLMgiwK3CCVEmN6Sr/6xePGEpWaspKkckILuUORCwe6VfDBw6uj4axQ==",
+      "dev": true,
+      "requires": {
+        "chalk": "^4.1.0"
+      }
+    },
+    "@cspotcode/source-map-support": {
+      "version": "0.8.1",
+      "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
+      "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
+      "dev": true,
+      "requires": {
+        "@jridgewell/trace-mapping": "0.3.9"
+      }
+    },
+    "@eslint/eslintrc": {
+      "version": "1.4.1",
+      "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.4.1.tgz",
+      "integrity": "sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==",
+      "dev": true,
+      "requires": {
+        "ajv": "^6.12.4",
+        "debug": "^4.3.2",
+        "espree": "^9.4.0",
+        "globals": "^13.19.0",
+        "ignore": "^5.2.0",
+        "import-fresh": "^3.2.1",
+        "js-yaml": "^4.1.0",
+        "minimatch": "^3.1.2",
+        "strip-json-comments": "^3.1.1"
+      },
+      "dependencies": {
+        "ajv": {
+          "version": "6.12.6",
+          "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
+          "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+          "dev": true,
+          "requires": {
+            "fast-deep-equal": "^3.1.1",
+            "fast-json-stable-stringify": "^2.0.0",
+            "json-schema-traverse": "^0.4.1",
+            "uri-js": "^4.2.2"
+          }
+        },
+        "json-schema-traverse": {
+          "version": "0.4.1",
+          "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
+          "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
+          "dev": true
+        }
+      }
+    },
+    "@humanwhocodes/config-array": {
+      "version": "0.11.8",
+      "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz",
+      "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==",
+      "dev": true,
+      "requires": {
+        "@humanwhocodes/object-schema": "^1.2.1",
+        "debug": "^4.1.1",
+        "minimatch": "^3.0.5"
+      }
+    },
+    "@humanwhocodes/module-importer": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
+      "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
+      "dev": true
+    },
+    "@humanwhocodes/object-schema": {
+      "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz",
+      "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==",
+      "dev": true
+    },
+    "@istanbuljs/load-nyc-config": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
+      "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==",
+      "dev": true,
+      "requires": {
+        "camelcase": "^5.3.1",
+        "find-up": "^4.1.0",
+        "get-package-type": "^0.1.0",
+        "js-yaml": "^3.13.1",
+        "resolve-from": "^5.0.0"
+      },
+      "dependencies": {
+        "argparse": {
+          "version": "1.0.10",
+          "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
+          "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+          "dev": true,
+          "requires": {
+            "sprintf-js": "~1.0.2"
+          }
+        },
+        "js-yaml": {
+          "version": "3.14.1",
+          "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
+          "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
+          "dev": true,
+          "requires": {
+            "argparse": "^1.0.7",
+            "esprima": "^4.0.0"
+          }
+        }
+      }
+    },
+    "@istanbuljs/schema": {
+      "version": "0.1.3",
+      "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz",
+      "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==",
+      "dev": true
+    },
+    "@jridgewell/gen-mapping": {
+      "version": "0.1.1",
+      "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz",
+      "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==",
+      "dev": true,
+      "requires": {
+        "@jridgewell/set-array": "^1.0.0",
+        "@jridgewell/sourcemap-codec": "^1.4.10"
+      }
+    },
+    "@jridgewell/resolve-uri": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz",
+      "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==",
+      "dev": true
+    },
+    "@jridgewell/set-array": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz",
+      "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==",
+      "dev": true
+    },
+    "@jridgewell/sourcemap-codec": {
+      "version": "1.4.14",
+      "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz",
+      "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==",
+      "dev": true
+    },
+    "@jridgewell/trace-mapping": {
+      "version": "0.3.9",
+      "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
+      "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
+      "dev": true,
+      "requires": {
+        "@jridgewell/resolve-uri": "^3.0.3",
+        "@jridgewell/sourcemap-codec": "^1.4.10"
+      }
+    },
+    "@log4js-node/sandboxed-module": {
+      "version": "2.2.1",
+      "resolved": "https://registry.npmjs.org/@log4js-node/sandboxed-module/-/sandboxed-module-2.2.1.tgz",
+      "integrity": "sha512-BtpxW7EReVwZ6WSNHPMyID2vVYuBKYkJyevJxbPsTtecWGqwm1wL4/O3oOQcyGhJsuNi7Y8JhNc5FE9jdXlZ0A==",
+      "dev": true,
+      "requires": {
+        "require-like": "0.1.2",
+        "stack-trace": "0.0.10"
+      }
+    },
+    "@nodelib/fs.scandir": {
+      "version": "2.1.5",
+      "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
+      "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
+      "dev": true,
+      "requires": {
+        "@nodelib/fs.stat": "2.0.5",
+        "run-parallel": "^1.1.9"
+      }
+    },
+    "@nodelib/fs.stat": {
+      "version": "2.0.5",
+      "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
+      "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
+      "dev": true
+    },
+    "@nodelib/fs.walk": {
+      "version": "1.2.8",
+      "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
+      "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
+      "dev": true,
+      "requires": {
+        "@nodelib/fs.scandir": "2.1.5",
+        "fastq": "^1.6.0"
+      }
+    },
+    "@tsconfig/node10": {
+      "version": "1.0.9",
+      "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
+      "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==",
+      "dev": true
+    },
+    "@tsconfig/node12": {
+      "version": "1.0.11",
+      "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
+      "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
+      "dev": true
+    },
+    "@tsconfig/node14": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
+      "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
+      "dev": true
+    },
+    "@tsconfig/node16": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.3.tgz",
+      "integrity": "sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==",
+      "dev": true
+    },
+    "@types/json5": {
+      "version": "0.0.29",
+      "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz",
+      "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
+      "dev": true
+    },
+    "@types/minimist": {
+      "version": "1.2.2",
+      "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz",
+      "integrity": "sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==",
+      "dev": true
+    },
+    "@types/node": {
+      "version": "18.14.0",
+      "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.0.tgz",
+      "integrity": "sha512-5EWrvLmglK+imbCJY0+INViFWUHg1AHel1sq4ZVSfdcNqGy9Edv3UB9IIzzg+xPaUcAgZYcfVs2fBcwDeZzU0A==",
+      "dev": true
+    },
+    "@types/normalize-package-data": {
+      "version": "2.4.1",
+      "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz",
+      "integrity": "sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==",
+      "dev": true
+    },
+    "JSONStream": {
+      "version": "1.3.5",
+      "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz",
+      "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==",
+      "dev": true,
+      "requires": {
+        "jsonparse": "^1.2.0",
+        "through": ">=2.2.7 <3"
+      }
+    },
+    "acorn": {
+      "version": "8.8.2",
+      "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz",
+      "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==",
+      "dev": true
+    },
+    "acorn-jsx": {
+      "version": "5.3.2",
+      "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
+      "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
+      "dev": true
+    },
+    "acorn-walk": {
+      "version": "8.2.0",
+      "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz",
+      "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==",
+      "dev": true
+    },
+    "aggregate-error": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz",
+      "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==",
+      "dev": true,
+      "requires": {
+        "clean-stack": "^2.0.0",
+        "indent-string": "^4.0.0"
+      }
+    },
+    "ajv": {
+      "version": "8.12.0",
+      "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz",
+      "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==",
+      "dev": true,
+      "requires": {
+        "fast-deep-equal": "^3.1.1",
+        "json-schema-traverse": "^1.0.0",
+        "require-from-string": "^2.0.2",
+        "uri-js": "^4.2.2"
+      }
+    },
+    "ansi-regex": {
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+      "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+      "dev": true
+    },
+    "ansi-styles": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+      "dev": true,
+      "requires": {
+        "color-convert": "^2.0.1"
+      }
+    },
+    "anymatch": {
+      "version": "3.1.3",
+      "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
+      "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
+      "dev": true,
+      "requires": {
+        "normalize-path": "^3.0.0",
+        "picomatch": "^2.0.4"
+      }
+    },
+    "append-transform": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz",
+      "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==",
+      "dev": true,
+      "requires": {
+        "default-require-extensions": "^3.0.0"
+      }
+    },
+    "archy": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz",
+      "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==",
+      "dev": true
+    },
+    "arg": {
+      "version": "4.1.3",
+      "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
+      "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
+      "dev": true
+    },
+    "argparse": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+      "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
+      "dev": true
+    },
+    "array-ify": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz",
+      "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==",
+      "dev": true
+    },
+    "array-includes": {
+      "version": "3.1.6",
+      "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz",
+      "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.4",
+        "es-abstract": "^1.20.4",
+        "get-intrinsic": "^1.1.3",
+        "is-string": "^1.0.7"
+      }
+    },
+    "array.prototype.flat": {
+      "version": "1.3.1",
+      "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz",
+      "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.4",
+        "es-abstract": "^1.20.4",
+        "es-shim-unscopables": "^1.0.0"
+      }
+    },
+    "array.prototype.flatmap": {
+      "version": "1.3.1",
+      "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz",
+      "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.4",
+        "es-abstract": "^1.20.4",
+        "es-shim-unscopables": "^1.0.0"
+      }
+    },
+    "arrify": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
+      "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==",
+      "dev": true
+    },
+    "async-hook-domain": {
+      "version": "2.0.4",
+      "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-2.0.4.tgz",
+      "integrity": "sha512-14LjCmlK1PK8eDtTezR6WX8TMaYNIzBIsd2D1sGoGjgx0BuNMMoSdk7i/drlbtamy0AWv9yv2tkB+ASdmeqFIw==",
+      "dev": true
+    },
+    "available-typed-arrays": {
+      "version": "1.0.5",
+      "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz",
+      "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==",
+      "dev": true
+    },
+    "balanced-match": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+      "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+      "dev": true
+    },
+    "binary-extensions": {
+      "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
+      "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
+      "dev": true
+    },
+    "bind-obj-methods": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz",
+      "integrity": "sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw==",
+      "dev": true
+    },
+    "brace-expansion": {
+      "version": "1.1.11",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+      "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+      "dev": true,
+      "requires": {
+        "balanced-match": "^1.0.0",
+        "concat-map": "0.0.1"
+      }
+    },
+    "braces": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
+      "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
+      "dev": true,
+      "requires": {
+        "fill-range": "^7.1.1"
+      },
+      "dependencies": {
+        "fill-range": {
+          "version": "7.1.1",
+          "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
+          "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
+          "dev": true,
+          "requires": {
+            "to-regex-range": "^5.0.1"
+          }
+        }
+      }
+    },
+    "browserslist": {
+      "version": "4.21.5",
+      "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.5.tgz",
+      "integrity": "sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w==",
+      "dev": true,
+      "requires": {
+        "caniuse-lite": "^1.0.30001449",
+        "electron-to-chromium": "^1.4.284",
+        "node-releases": "^2.0.8",
+        "update-browserslist-db": "^1.0.10"
+      }
+    },
+    "buffer-from": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
+      "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
+      "dev": true
+    },
+    "caching-transform": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz",
+      "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==",
+      "dev": true,
+      "requires": {
+        "hasha": "^5.0.0",
+        "make-dir": "^3.0.0",
+        "package-hash": "^4.0.0",
+        "write-file-atomic": "^3.0.0"
+      }
+    },
+    "call-bind": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
+      "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
+      "dev": true,
+      "requires": {
+        "function-bind": "^1.1.1",
+        "get-intrinsic": "^1.0.2"
+      }
+    },
+    "callsites": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
+      "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
+      "dev": true
+    },
+    "camelcase": {
+      "version": "5.3.1",
+      "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
+      "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
+      "dev": true
+    },
+    "camelcase-keys": {
+      "version": "6.2.2",
+      "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz",
+      "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==",
+      "dev": true,
+      "requires": {
+        "camelcase": "^5.3.1",
+        "map-obj": "^4.0.0",
+        "quick-lru": "^4.0.1"
+      }
+    },
+    "caniuse-lite": {
+      "version": "1.0.30001457",
+      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001457.tgz",
+      "integrity": "sha512-SDIV6bgE1aVbK6XyxdURbUE89zY7+k1BBBaOwYwkNCglXlel/E7mELiHC64HQ+W0xSKlqWhV9Wh7iHxUjMs4fA==",
+      "dev": true
+    },
+    "chalk": {
+      "version": "4.1.2",
+      "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
+      "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
+      "dev": true,
+      "requires": {
+        "ansi-styles": "^4.1.0",
+        "supports-color": "^7.1.0"
+      }
+    },
+    "chokidar": {
+      "version": "3.5.3",
+      "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
+      "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==",
+      "dev": true,
+      "requires": {
+        "anymatch": "~3.1.2",
+        "braces": "~3.0.2",
+        "fsevents": "~2.3.2",
+        "glob-parent": "~5.1.2",
+        "is-binary-path": "~2.1.0",
+        "is-glob": "~4.0.1",
+        "normalize-path": "~3.0.0",
+        "readdirp": "~3.6.0"
+      },
+      "dependencies": {
+        "glob-parent": {
+          "version": "5.1.2",
+          "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
+          "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
+          "dev": true,
+          "requires": {
+            "is-glob": "^4.0.1"
+          }
+        }
+      }
+    },
+    "ci-info": {
+      "version": "3.8.0",
+      "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz",
+      "integrity": "sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==",
+      "dev": true
+    },
+    "clean-stack": {
+      "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz",
+      "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==",
+      "dev": true
+    },
+    "cliui": {
+      "version": "8.0.1",
+      "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
+      "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
+      "dev": true,
+      "requires": {
+        "string-width": "^4.2.0",
+        "strip-ansi": "^6.0.1",
+        "wrap-ansi": "^7.0.0"
+      }
+    },
+    "color-convert": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+      "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+      "dev": true,
+      "requires": {
+        "color-name": "~1.1.4"
+      }
+    },
+    "color-name": {
+      "version": "1.1.4",
+      "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+      "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+      "dev": true
+    },
+    "color-support": {
+      "version": "1.1.3",
+      "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
+      "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
+      "dev": true
+    },
+    "commondir": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz",
+      "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==",
+      "dev": true
+    },
+    "compare-func": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz",
+      "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==",
+      "dev": true,
+      "requires": {
+        "array-ify": "^1.0.0",
+        "dot-prop": "^5.1.0"
+      }
+    },
+    "concat-map": {
+      "version": "0.0.1",
+      "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+      "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
+      "dev": true
+    },
+    "confusing-browser-globals": {
+      "version": "1.0.11",
+      "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz",
+      "integrity": "sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==",
+      "dev": true
+    },
+    "conventional-changelog-angular": {
+      "version": "5.0.13",
+      "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-5.0.13.tgz",
+      "integrity": "sha512-i/gipMxs7s8L/QeuavPF2hLnJgH6pEZAttySB6aiQLWcX3puWDL3ACVmvBhJGxnAy52Qc15ua26BufY6KpmrVA==",
+      "dev": true,
+      "requires": {
+        "compare-func": "^2.0.0",
+        "q": "^1.5.1"
+      }
+    },
+    "conventional-changelog-conventionalcommits": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-5.0.0.tgz",
+      "integrity": "sha512-lCDbA+ZqVFQGUj7h9QBKoIpLhl8iihkO0nCTyRNzuXtcd7ubODpYB04IFy31JloiJgG0Uovu8ot8oxRzn7Nwtw==",
+      "dev": true,
+      "requires": {
+        "compare-func": "^2.0.0",
+        "lodash": "^4.17.15",
+        "q": "^1.5.1"
+      }
+    },
+    "conventional-commits-parser": {
+      "version": "3.2.4",
+      "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-3.2.4.tgz",
+      "integrity": "sha512-nK7sAtfi+QXbxHCYfhpZsfRtaitZLIA6889kFIouLvz6repszQDgxBu7wf2WbU+Dco7sAnNCJYERCwt54WPC2Q==",
+      "dev": true,
+      "requires": {
+        "JSONStream": "^1.0.4",
+        "is-text-path": "^1.0.1",
+        "lodash": "^4.17.15",
+        "meow": "^8.0.0",
+        "split2": "^3.0.0",
+        "through2": "^4.0.0"
+      }
+    },
+    "convert-source-map": {
+      "version": "1.9.0",
+      "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz",
+      "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==",
+      "dev": true
+    },
+    "cosmiconfig": {
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.0.0.tgz",
+      "integrity": "sha512-da1EafcpH6b/TD8vDRaWV7xFINlHlF6zKsGwS1TsuVJTZRkquaS5HTMq7uq6h31619QjbsYl21gVDOm32KM1vQ==",
+      "dev": true,
+      "requires": {
+        "import-fresh": "^3.2.1",
+        "js-yaml": "^4.1.0",
+        "parse-json": "^5.0.0",
+        "path-type": "^4.0.0"
+      }
+    },
+    "cosmiconfig-typescript-loader": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.3.0.tgz",
+      "integrity": "sha512-NTxV1MFfZDLPiBMjxbHRwSh5LaLcPMwNdCutmnHJCKoVnlvldPWlllonKwrsRJ5pYZBIBGRWWU2tfvzxgeSW5Q==",
+      "dev": true
+    },
+    "create-require": {
+      "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
+      "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
+      "dev": true
+    },
+    "cross-spawn": {
+      "version": "7.0.3",
+      "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
+      "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+      "dev": true,
+      "requires": {
+        "path-key": "^3.1.0",
+        "shebang-command": "^2.0.0",
+        "which": "^2.0.1"
+      }
+    },
+    "dargs": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/dargs/-/dargs-7.0.0.tgz",
+      "integrity": "sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg==",
+      "dev": true
+    },
+    "date-format": {
+      "version": "4.0.14",
+      "resolved": "https://registry.npmjs.org/date-format/-/date-format-4.0.14.tgz",
+      "integrity": "sha512-39BOQLs9ZjKh0/patS9nrT8wc3ioX3/eA/zgbKNopnF2wCqJEoxywwwElATYvRsXdnOxA/OQeQoFZ3rFjVajhg=="
+    },
+    "debug": {
+      "version": "4.3.4",
+      "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
+      "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
+      "requires": {
+        "ms": "2.1.2"
+      }
+    },
+    "decamelize": {
+      "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
+      "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
+      "dev": true
+    },
+    "decamelize-keys": {
+      "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz",
+      "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==",
+      "dev": true,
+      "requires": {
+        "decamelize": "^1.1.0",
+        "map-obj": "^1.0.0"
+      },
+      "dependencies": {
+        "map-obj": {
+          "version": "1.0.1",
+          "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz",
+          "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==",
+          "dev": true
+        }
+      }
+    },
+    "deep-freeze": {
+      "version": "0.0.1",
+      "resolved": "https://registry.npmjs.org/deep-freeze/-/deep-freeze-0.0.1.tgz",
+      "integrity": "sha512-Z+z8HiAvsGwmjqlphnHW5oz6yWlOwu6EQfFTjmeTWlDeda3FS2yv3jhq35TX/ewmsnqB+RX2IdsIOyjJCQN5tg==",
+      "dev": true
+    },
+    "deep-is": {
+      "version": "0.1.4",
+      "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
+      "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
+      "dev": true
+    },
+    "default-require-extensions": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.1.tgz",
+      "integrity": "sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw==",
+      "dev": true,
+      "requires": {
+        "strip-bom": "^4.0.0"
+      },
+      "dependencies": {
+        "strip-bom": {
+          "version": "4.0.0",
+          "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz",
+          "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==",
+          "dev": true
+        }
+      }
+    },
+    "define-properties": {
+      "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz",
+      "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==",
+      "dev": true,
+      "requires": {
+        "has-property-descriptors": "^1.0.0",
+        "object-keys": "^1.1.1"
+      }
+    },
+    "diff": {
+      "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
+      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
+      "dev": true
+    },
+    "doctrine": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
+      "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
+      "dev": true,
+      "requires": {
+        "esutils": "^2.0.2"
+      }
+    },
+    "dot-prop": {
+      "version": "5.3.0",
+      "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz",
+      "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==",
+      "dev": true,
+      "requires": {
+        "is-obj": "^2.0.0"
+      }
+    },
+    "electron-to-chromium": {
+      "version": "1.4.302",
+      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.302.tgz",
+      "integrity": "sha512-Uk7C+7aPBryUR1Fwvk9VmipBcN9fVsqBO57jV2ZjTm+IZ6BMNqu7EDVEg2HxCNufk6QcWlFsBkhQyQroB2VWKw==",
+      "dev": true
+    },
+    "emoji-regex": {
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+      "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+      "dev": true
+    },
+    "error-ex": {
+      "version": "1.3.2",
+      "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
+      "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==",
+      "dev": true,
+      "requires": {
+        "is-arrayish": "^0.2.1"
+      }
+    },
+    "es-abstract": {
+      "version": "1.21.1",
+      "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.1.tgz",
+      "integrity": "sha512-QudMsPOz86xYz/1dG1OuGBKOELjCh99IIWHLzy5znUB6j8xG2yMA7bfTV86VSqKF+Y/H08vQPR+9jyXpuC6hfg==",
+      "dev": true,
+      "requires": {
+        "available-typed-arrays": "^1.0.5",
+        "call-bind": "^1.0.2",
+        "es-set-tostringtag": "^2.0.1",
+        "es-to-primitive": "^1.2.1",
+        "function-bind": "^1.1.1",
+        "function.prototype.name": "^1.1.5",
+        "get-intrinsic": "^1.1.3",
+        "get-symbol-description": "^1.0.0",
+        "globalthis": "^1.0.3",
+        "gopd": "^1.0.1",
+        "has": "^1.0.3",
+        "has-property-descriptors": "^1.0.0",
+        "has-proto": "^1.0.1",
+        "has-symbols": "^1.0.3",
+        "internal-slot": "^1.0.4",
+        "is-array-buffer": "^3.0.1",
+        "is-callable": "^1.2.7",
+        "is-negative-zero": "^2.0.2",
+        "is-regex": "^1.1.4",
+        "is-shared-array-buffer": "^1.0.2",
+        "is-string": "^1.0.7",
+        "is-typed-array": "^1.1.10",
+        "is-weakref": "^1.0.2",
+        "object-inspect": "^1.12.2",
+        "object-keys": "^1.1.1",
+        "object.assign": "^4.1.4",
+        "regexp.prototype.flags": "^1.4.3",
+        "safe-regex-test": "^1.0.0",
+        "string.prototype.trimend": "^1.0.6",
+        "string.prototype.trimstart": "^1.0.6",
+        "typed-array-length": "^1.0.4",
+        "unbox-primitive": "^1.0.2",
+        "which-typed-array": "^1.1.9"
+      }
+    },
+    "es-set-tostringtag": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz",
+      "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==",
+      "dev": true,
+      "requires": {
+        "get-intrinsic": "^1.1.3",
+        "has": "^1.0.3",
+        "has-tostringtag": "^1.0.0"
+      }
+    },
+    "es-shim-unscopables": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz",
+      "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==",
+      "dev": true,
+      "requires": {
+        "has": "^1.0.3"
+      }
+    },
+    "es-to-primitive": {
+      "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz",
+      "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==",
+      "dev": true,
+      "requires": {
+        "is-callable": "^1.1.4",
+        "is-date-object": "^1.0.1",
+        "is-symbol": "^1.0.2"
+      }
+    },
+    "es6-error": {
+      "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz",
+      "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==",
+      "dev": true
+    },
+    "escalade": {
+      "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+      "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
+      "dev": true
+    },
+    "escape-string-regexp": {
+      "version": "1.0.5",
+      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
+      "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
+      "dev": true
+    },
+    "eslint": {
+      "version": "8.34.0",
+      "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.34.0.tgz",
+      "integrity": "sha512-1Z8iFsucw+7kSqXNZVslXS8Ioa4u2KM7GPwuKtkTFAqZ/cHMcEaR+1+Br0wLlot49cNxIiZk5wp8EAbPcYZxTg==",
+      "dev": true,
+      "requires": {
+        "@eslint/eslintrc": "^1.4.1",
+        "@humanwhocodes/config-array": "^0.11.8",
+        "@humanwhocodes/module-importer": "^1.0.1",
+        "@nodelib/fs.walk": "^1.2.8",
+        "ajv": "^6.10.0",
+        "chalk": "^4.0.0",
+        "cross-spawn": "^7.0.2",
+        "debug": "^4.3.2",
+        "doctrine": "^3.0.0",
+        "escape-string-regexp": "^4.0.0",
+        "eslint-scope": "^7.1.1",
+        "eslint-utils": "^3.0.0",
+        "eslint-visitor-keys": "^3.3.0",
+        "espree": "^9.4.0",
+        "esquery": "^1.4.0",
+        "esutils": "^2.0.2",
+        "fast-deep-equal": "^3.1.3",
+        "file-entry-cache": "^6.0.1",
+        "find-up": "^5.0.0",
+        "glob-parent": "^6.0.2",
+        "globals": "^13.19.0",
+        "grapheme-splitter": "^1.0.4",
+        "ignore": "^5.2.0",
+        "import-fresh": "^3.0.0",
+        "imurmurhash": "^0.1.4",
+        "is-glob": "^4.0.0",
+        "is-path-inside": "^3.0.3",
+        "js-sdsl": "^4.1.4",
+        "js-yaml": "^4.1.0",
+        "json-stable-stringify-without-jsonify": "^1.0.1",
+        "levn": "^0.4.1",
+        "lodash.merge": "^4.6.2",
+        "minimatch": "^3.1.2",
+        "natural-compare": "^1.4.0",
+        "optionator": "^0.9.1",
+        "regexpp": "^3.2.0",
+        "strip-ansi": "^6.0.1",
+        "strip-json-comments": "^3.1.0",
+        "text-table": "^0.2.0"
+      },
+      "dependencies": {
+        "ajv": {
+          "version": "6.12.6",
+          "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
+          "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+          "dev": true,
+          "requires": {
+            "fast-deep-equal": "^3.1.1",
+            "fast-json-stable-stringify": "^2.0.0",
+            "json-schema-traverse": "^0.4.1",
+            "uri-js": "^4.2.2"
+          }
+        },
+        "escape-string-regexp": {
+          "version": "4.0.0",
+          "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+          "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
+          "dev": true
+        },
+        "find-up": {
+          "version": "5.0.0",
+          "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+          "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+          "dev": true,
+          "requires": {
+            "locate-path": "^6.0.0",
+            "path-exists": "^4.0.0"
+          }
+        },
+        "json-schema-traverse": {
+          "version": "0.4.1",
+          "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
+          "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
+          "dev": true
+        },
+        "locate-path": {
+          "version": "6.0.0",
+          "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+          "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+          "dev": true,
+          "requires": {
+            "p-locate": "^5.0.0"
+          }
+        },
+        "p-limit": {
+          "version": "3.1.0",
+          "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+          "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+          "dev": true,
+          "requires": {
+            "yocto-queue": "^0.1.0"
+          }
+        },
+        "p-locate": {
+          "version": "5.0.0",
+          "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+          "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+          "dev": true,
+          "requires": {
+            "p-limit": "^3.0.2"
+          }
+        }
+      }
+    },
+    "eslint-config-airbnb-base": {
+      "version": "15.0.0",
+      "resolved": "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz",
+      "integrity": "sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==",
+      "dev": true,
+      "requires": {
+        "confusing-browser-globals": "^1.0.10",
+        "object.assign": "^4.1.2",
+        "object.entries": "^1.1.5",
+        "semver": "^6.3.0"
+      },
+      "dependencies": {
+        "semver": {
+          "version": "6.3.0",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+          "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+          "dev": true
+        }
+      }
+    },
+    "eslint-config-prettier": {
+      "version": "8.6.0",
+      "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.6.0.tgz",
+      "integrity": "sha512-bAF0eLpLVqP5oEVUFKpMA+NnRFICwn9X8B5jrR9FcqnYBuPbqWEjTEspPWMj5ye6czoSLDweCzSo3Ko7gGrZaA==",
+      "dev": true
+    },
+    "eslint-import-resolver-node": {
+      "version": "0.3.7",
+      "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz",
+      "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==",
+      "dev": true,
+      "requires": {
+        "debug": "^3.2.7",
+        "is-core-module": "^2.11.0",
+        "resolve": "^1.22.1"
+      },
+      "dependencies": {
+        "debug": {
+          "version": "3.2.7",
+          "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+          "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
+          "dev": true,
+          "requires": {
+            "ms": "^2.1.1"
+          }
+        }
+      }
+    },
+    "eslint-module-utils": {
+      "version": "2.7.4",
+      "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz",
+      "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==",
+      "dev": true,
+      "requires": {
+        "debug": "^3.2.7"
+      },
+      "dependencies": {
+        "debug": {
+          "version": "3.2.7",
+          "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+          "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
+          "dev": true,
+          "requires": {
+            "ms": "^2.1.1"
+          }
+        }
+      }
+    },
+    "eslint-plugin-import": {
+      "version": "2.27.5",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz",
+      "integrity": "sha512-LmEt3GVofgiGuiE+ORpnvP+kAm3h6MLZJ4Q5HCyHADofsb4VzXFsRiWj3c0OFiV+3DWFh0qg3v9gcPlfc3zRow==",
+      "dev": true,
+      "requires": {
+        "array-includes": "^3.1.6",
+        "array.prototype.flat": "^1.3.1",
+        "array.prototype.flatmap": "^1.3.1",
+        "debug": "^3.2.7",
+        "doctrine": "^2.1.0",
+        "eslint-import-resolver-node": "^0.3.7",
+        "eslint-module-utils": "^2.7.4",
+        "has": "^1.0.3",
+        "is-core-module": "^2.11.0",
+        "is-glob": "^4.0.3",
+        "minimatch": "^3.1.2",
+        "object.values": "^1.1.6",
+        "resolve": "^1.22.1",
+        "semver": "^6.3.0",
+        "tsconfig-paths": "^3.14.1"
+      },
+      "dependencies": {
+        "debug": {
+          "version": "3.2.7",
+          "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+          "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
+          "dev": true,
+          "requires": {
+            "ms": "^2.1.1"
+          }
+        },
+        "doctrine": {
+          "version": "2.1.0",
+          "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
+          "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
+          "dev": true,
+          "requires": {
+            "esutils": "^2.0.2"
+          }
+        },
+        "semver": {
+          "version": "6.3.0",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+          "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+          "dev": true
+        }
+      }
+    },
+    "eslint-plugin-prettier": {
+      "version": "4.2.1",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz",
+      "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==",
+      "dev": true,
+      "requires": {
+        "prettier-linter-helpers": "^1.0.0"
+      }
+    },
+    "eslint-scope": {
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz",
+      "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==",
+      "dev": true,
+      "requires": {
+        "esrecurse": "^4.3.0",
+        "estraverse": "^5.2.0"
+      }
+    },
+    "eslint-utils": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz",
+      "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==",
+      "dev": true,
+      "requires": {
+        "eslint-visitor-keys": "^2.0.0"
+      },
+      "dependencies": {
+        "eslint-visitor-keys": {
+          "version": "2.1.0",
+          "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz",
+          "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==",
+          "dev": true
+        }
+      }
+    },
+    "eslint-visitor-keys": {
+      "version": "3.3.0",
+      "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz",
+      "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==",
+      "dev": true
+    },
+    "espree": {
+      "version": "9.4.1",
+      "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz",
+      "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==",
+      "dev": true,
+      "requires": {
+        "acorn": "^8.8.0",
+        "acorn-jsx": "^5.3.2",
+        "eslint-visitor-keys": "^3.3.0"
+      }
+    },
+    "esprima": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
+      "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
+      "dev": true
+    },
+    "esquery": {
+      "version": "1.4.2",
+      "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.2.tgz",
+      "integrity": "sha512-JVSoLdTlTDkmjFmab7H/9SL9qGSyjElT3myyKp7krqjVFQCDLmj1QFaCLRFBszBKI0XVZaiiXvuPIX3ZwHe1Ng==",
+      "dev": true,
+      "requires": {
+        "estraverse": "^5.1.0"
+      }
+    },
+    "esrecurse": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
+      "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
+      "dev": true,
+      "requires": {
+        "estraverse": "^5.2.0"
+      }
+    },
+    "estraverse": {
+      "version": "5.3.0",
+      "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
+      "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
+      "dev": true
+    },
+    "esutils": {
+      "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
+      "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
+      "dev": true
+    },
+    "events-to-array": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/events-to-array/-/events-to-array-1.1.2.tgz",
+      "integrity": "sha512-inRWzRY7nG+aXZxBzEqYKB3HPgwflZRopAjDCHv0whhRx+MTUr1ei0ICZUypdyE0HRm4L2d5VEcIqLD6yl+BFA==",
+      "dev": true
+    },
+    "execa": {
+      "version": "5.1.1",
+      "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz",
+      "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==",
+      "dev": true,
+      "requires": {
+        "cross-spawn": "^7.0.3",
+        "get-stream": "^6.0.0",
+        "human-signals": "^2.1.0",
+        "is-stream": "^2.0.0",
+        "merge-stream": "^2.0.0",
+        "npm-run-path": "^4.0.1",
+        "onetime": "^5.1.2",
+        "signal-exit": "^3.0.3",
+        "strip-final-newline": "^2.0.0"
+      }
+    },
+    "fast-deep-equal": {
+      "version": "3.1.3",
+      "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+      "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+      "dev": true
+    },
+    "fast-diff": {
+      "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz",
+      "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==",
+      "dev": true
+    },
+    "fast-json-stable-stringify": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
+      "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
+      "dev": true
+    },
+    "fast-levenshtein": {
+      "version": "2.0.6",
+      "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
+      "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
+      "dev": true
+    },
+    "fastq": {
+      "version": "1.15.0",
+      "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz",
+      "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==",
+      "dev": true,
+      "requires": {
+        "reusify": "^1.0.4"
+      }
+    },
+    "file-entry-cache": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
+      "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
+      "dev": true,
+      "requires": {
+        "flat-cache": "^3.0.4"
+      }
+    },
+    "fill-keys": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/fill-keys/-/fill-keys-1.0.2.tgz",
+      "integrity": "sha512-tcgI872xXjwFF4xgQmLxi76GnwJG3g/3isB1l4/G5Z4zrbddGpBjqZCO9oEAcB5wX0Hj/5iQB3toxfO7in1hHA==",
+      "dev": true,
+      "requires": {
+        "is-object": "~1.0.1",
+        "merge-descriptors": "~1.0.0"
+      }
+    },
+    "find-cache-dir": {
+      "version": "3.3.2",
+      "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz",
+      "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==",
+      "dev": true,
+      "requires": {
+        "commondir": "^1.0.1",
+        "make-dir": "^3.0.2",
+        "pkg-dir": "^4.1.0"
+      }
+    },
+    "find-up": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
+      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
+      "dev": true,
+      "requires": {
+        "locate-path": "^5.0.0",
+        "path-exists": "^4.0.0"
+      }
+    },
+    "findit": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/findit/-/findit-2.0.0.tgz",
+      "integrity": "sha512-ENZS237/Hr8bjczn5eKuBohLgaD0JyUd0arxretR1f9RO46vZHA1b2y0VorgGV3WaOT3c+78P8h7v4JGJ1i/rg==",
+      "dev": true
+    },
+    "flat-cache": {
+      "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
+      "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==",
+      "dev": true,
+      "requires": {
+        "flatted": "^3.1.0",
+        "rimraf": "^3.0.2"
+      }
+    },
+    "flatted": {
+      "version": "3.2.7",
+      "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
+      "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ=="
+    },
+    "for-each": {
+      "version": "0.3.3",
+      "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
+      "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==",
+      "dev": true,
+      "requires": {
+        "is-callable": "^1.1.3"
+      }
+    },
+    "foreground-child": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
+      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
+      "dev": true,
+      "requires": {
+        "cross-spawn": "^7.0.0",
+        "signal-exit": "^3.0.2"
+      }
+    },
+    "fromentries": {
+      "version": "1.3.2",
+      "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz",
+      "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==",
+      "dev": true
+    },
+    "fs-exists-cached": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz",
+      "integrity": "sha512-kSxoARUDn4F2RPXX48UXnaFKwVU7Ivd/6qpzZL29MCDmr9sTvybv4gFCp+qaI4fM9m0z9fgz/yJvi56GAz+BZg==",
+      "dev": true
+    },
+    "fs-extra": {
+      "version": "11.1.0",
+      "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.0.tgz",
+      "integrity": "sha512-0rcTq621PD5jM/e0a3EJoGC/1TC5ZBCERW82LQuwfGnCa1V8w7dpYH1yNu+SLb6E5dkeCBzKEyLGlFrnr+dUyw==",
+      "dev": true,
+      "requires": {
+        "graceful-fs": "^4.2.0",
+        "jsonfile": "^6.0.1",
+        "universalify": "^2.0.0"
+      },
+      "dependencies": {
+        "jsonfile": {
+          "version": "6.1.0",
+          "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
+          "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
+          "dev": true,
+          "requires": {
+            "graceful-fs": "^4.1.6",
+            "universalify": "^2.0.0"
+          }
+        },
+        "universalify": {
+          "version": "2.0.0",
+          "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
+          "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==",
+          "dev": true
+        }
+      }
+    },
+    "fs.realpath": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+      "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
+      "dev": true
+    },
+    "fsevents": {
+      "version": "2.3.2",
+      "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
+      "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
+      "dev": true,
+      "optional": true
+    },
+    "function-bind": {
+      "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
+      "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
+      "dev": true
+    },
+    "function-loop": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-2.0.1.tgz",
+      "integrity": "sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ==",
+      "dev": true
+    },
+    "function.prototype.name": {
+      "version": "1.1.5",
+      "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz",
+      "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.3",
+        "es-abstract": "^1.19.0",
+        "functions-have-names": "^1.2.2"
+      }
+    },
+    "functions-have-names": {
+      "version": "1.2.3",
+      "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz",
+      "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==",
+      "dev": true
+    },
+    "gensync": {
+      "version": "1.0.0-beta.2",
+      "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
+      "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
+      "dev": true
+    },
+    "get-caller-file": {
+      "version": "2.0.5",
+      "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+      "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+      "dev": true
+    },
+    "get-intrinsic": {
+      "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
+      "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==",
+      "dev": true,
+      "requires": {
+        "function-bind": "^1.1.1",
+        "has": "^1.0.3",
+        "has-symbols": "^1.0.3"
+      }
+    },
+    "get-package-type": {
+      "version": "0.1.0",
+      "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz",
+      "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==",
+      "dev": true
+    },
+    "get-stream": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
+      "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==",
+      "dev": true
+    },
+    "get-symbol-description": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz",
+      "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "get-intrinsic": "^1.1.1"
+      }
+    },
+    "git-raw-commits": {
+      "version": "2.0.11",
+      "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-2.0.11.tgz",
+      "integrity": "sha512-VnctFhw+xfj8Va1xtfEqCUD2XDrbAPSJx+hSrE5K7fGdjZruW7XV+QOrN7LF/RJyvspRiD2I0asWsxFp0ya26A==",
+      "dev": true,
+      "requires": {
+        "dargs": "^7.0.0",
+        "lodash": "^4.17.15",
+        "meow": "^8.0.0",
+        "split2": "^3.0.0",
+        "through2": "^4.0.0"
+      }
+    },
+    "glob": {
+      "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+      "dev": true,
+      "requires": {
+        "fs.realpath": "^1.0.0",
+        "inflight": "^1.0.4",
+        "inherits": "2",
+        "minimatch": "^3.1.1",
+        "once": "^1.3.0",
+        "path-is-absolute": "^1.0.0"
+      }
+    },
+    "glob-parent": {
+      "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
+      "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
+      "dev": true,
+      "requires": {
+        "is-glob": "^4.0.3"
+      }
+    },
+    "global-dirs": {
+      "version": "0.1.1",
+      "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-0.1.1.tgz",
+      "integrity": "sha512-NknMLn7F2J7aflwFOlGdNIuCDpN3VGoSoB+aap3KABFWbHVn1TCgFC+np23J8W2BiZbjfEw3BFBycSMv1AFblg==",
+      "dev": true,
+      "requires": {
+        "ini": "^1.3.4"
+      }
+    },
+    "globals": {
+      "version": "13.20.0",
+      "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz",
+      "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==",
+      "dev": true,
+      "requires": {
+        "type-fest": "^0.20.2"
+      },
+      "dependencies": {
+        "type-fest": {
+          "version": "0.20.2",
+          "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
+          "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
+          "dev": true
+        }
+      }
+    },
+    "globalthis": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz",
+      "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==",
+      "dev": true,
+      "requires": {
+        "define-properties": "^1.1.3"
+      }
+    },
+    "gopd": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
+      "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
+      "dev": true,
+      "requires": {
+        "get-intrinsic": "^1.1.3"
+      }
+    },
+    "graceful-fs": {
+      "version": "4.2.10",
+      "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz",
+      "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA=="
+    },
+    "grapheme-splitter": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz",
+      "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==",
+      "dev": true
+    },
+    "hard-rejection": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz",
+      "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==",
+      "dev": true
+    },
+    "has": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
+      "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
+      "dev": true,
+      "requires": {
+        "function-bind": "^1.1.1"
+      }
+    },
+    "has-bigints": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz",
+      "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==",
+      "dev": true
+    },
+    "has-flag": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+      "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+      "dev": true
+    },
+    "has-property-descriptors": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz",
+      "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==",
+      "dev": true,
+      "requires": {
+        "get-intrinsic": "^1.1.1"
+      }
+    },
+    "has-proto": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz",
+      "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==",
+      "dev": true
+    },
+    "has-symbols": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
+      "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
+      "dev": true
+    },
+    "has-tostringtag": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz",
+      "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==",
+      "dev": true,
+      "requires": {
+        "has-symbols": "^1.0.2"
+      }
+    },
+    "hasha": {
+      "version": "5.2.2",
+      "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz",
+      "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==",
+      "dev": true,
+      "requires": {
+        "is-stream": "^2.0.0",
+        "type-fest": "^0.8.0"
+      },
+      "dependencies": {
+        "type-fest": {
+          "version": "0.8.1",
+          "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
+          "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
+          "dev": true
+        }
+      }
+    },
+    "hosted-git-info": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz",
+      "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==",
+      "dev": true,
+      "requires": {
+        "lru-cache": "^6.0.0"
+      }
+    },
+    "html-escaper": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz",
+      "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==",
+      "dev": true
+    },
+    "human-signals": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz",
+      "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==",
+      "dev": true
+    },
+    "husky": {
+      "version": "8.0.3",
+      "resolved": "https://registry.npmjs.org/husky/-/husky-8.0.3.tgz",
+      "integrity": "sha512-+dQSyqPh4x1hlO1swXBiNb2HzTDN1I2IGLQx1GrBuiqFJfoMrnZWwVmatvSiO+Iz8fBUnf+lekwNo4c2LlXItg==",
+      "dev": true
+    },
+    "ignore": {
+      "version": "5.2.4",
+      "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz",
+      "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==",
+      "dev": true
+    },
+    "import-fresh": {
+      "version": "3.3.0",
+      "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz",
+      "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==",
+      "dev": true,
+      "requires": {
+        "parent-module": "^1.0.0",
+        "resolve-from": "^4.0.0"
+      },
+      "dependencies": {
+        "resolve-from": {
+          "version": "4.0.0",
+          "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
+          "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
+          "dev": true
+        }
+      }
+    },
+    "imurmurhash": {
+      "version": "0.1.4",
+      "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
+      "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
+      "dev": true
+    },
+    "indent-string": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz",
+      "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==",
+      "dev": true
+    },
+    "inflight": {
+      "version": "1.0.6",
+      "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+      "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
+      "dev": true,
+      "requires": {
+        "once": "^1.3.0",
+        "wrappy": "1"
+      }
+    },
+    "inherits": {
+      "version": "2.0.4",
+      "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+      "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
+      "dev": true
+    },
+    "ini": {
+      "version": "1.3.8",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
+      "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
+      "dev": true
+    },
+    "internal-slot": {
+      "version": "1.0.5",
+      "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz",
+      "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==",
+      "dev": true,
+      "requires": {
+        "get-intrinsic": "^1.2.0",
+        "has": "^1.0.3",
+        "side-channel": "^1.0.4"
+      }
+    },
+    "is-array-buffer": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.1.tgz",
+      "integrity": "sha512-ASfLknmY8Xa2XtB4wmbz13Wu202baeA18cJBCeCy0wXUHZF0IPyVEXqKEcd+t2fNSLLL1vC6k7lxZEojNbISXQ==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "get-intrinsic": "^1.1.3",
+        "is-typed-array": "^1.1.10"
+      }
+    },
+    "is-arrayish": {
+      "version": "0.2.1",
+      "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
+      "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
+      "dev": true
+    },
+    "is-bigint": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz",
+      "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==",
+      "dev": true,
+      "requires": {
+        "has-bigints": "^1.0.1"
+      }
+    },
+    "is-binary-path": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
+      "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
+      "dev": true,
+      "requires": {
+        "binary-extensions": "^2.0.0"
+      }
+    },
+    "is-boolean-object": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz",
+      "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "has-tostringtag": "^1.0.0"
+      }
+    },
+    "is-callable": {
+      "version": "1.2.7",
+      "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
+      "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==",
+      "dev": true
+    },
+    "is-ci": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz",
+      "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==",
+      "dev": true,
+      "requires": {
+        "ci-info": "^3.2.0"
+      }
+    },
+    "is-core-module": {
+      "version": "2.11.0",
+      "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz",
+      "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==",
+      "dev": true,
+      "requires": {
+        "has": "^1.0.3"
+      }
+    },
+    "is-date-object": {
+      "version": "1.0.5",
+      "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz",
+      "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==",
+      "dev": true,
+      "requires": {
+        "has-tostringtag": "^1.0.0"
+      }
+    },
+    "is-extglob": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+      "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
+      "dev": true
+    },
+    "is-fullwidth-code-point": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+      "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+      "dev": true
+    },
+    "is-glob": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
+      "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
+      "dev": true,
+      "requires": {
+        "is-extglob": "^2.1.1"
+      }
+    },
+    "is-negative-zero": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz",
+      "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==",
+      "dev": true
+    },
+    "is-number": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+      "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+      "dev": true
+    },
+    "is-number-object": {
+      "version": "1.0.7",
+      "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz",
+      "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==",
+      "dev": true,
+      "requires": {
+        "has-tostringtag": "^1.0.0"
+      }
+    },
+    "is-obj": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz",
+      "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==",
+      "dev": true
+    },
+    "is-object": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.2.tgz",
+      "integrity": "sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==",
+      "dev": true
+    },
+    "is-path-inside": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
+      "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
+      "dev": true
+    },
+    "is-plain-obj": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz",
+      "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==",
+      "dev": true
+    },
+    "is-regex": {
+      "version": "1.1.4",
+      "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz",
+      "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "has-tostringtag": "^1.0.0"
+      }
+    },
+    "is-shared-array-buffer": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz",
+      "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2"
+      }
+    },
+    "is-stream": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
+      "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
+      "dev": true
+    },
+    "is-string": {
+      "version": "1.0.7",
+      "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz",
+      "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==",
+      "dev": true,
+      "requires": {
+        "has-tostringtag": "^1.0.0"
+      }
+    },
+    "is-symbol": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz",
+      "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==",
+      "dev": true,
+      "requires": {
+        "has-symbols": "^1.0.2"
+      }
+    },
+    "is-text-path": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-1.0.1.tgz",
+      "integrity": "sha512-xFuJpne9oFz5qDaodwmmG08e3CawH/2ZV8Qqza1Ko7Sk8POWbkRdwIoAWVhqvq0XeUzANEhKo2n0IXUGBm7A/w==",
+      "dev": true,
+      "requires": {
+        "text-extensions": "^1.0.0"
+      }
+    },
+    "is-typed-array": {
+      "version": "1.1.10",
+      "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz",
+      "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==",
+      "dev": true,
+      "requires": {
+        "available-typed-arrays": "^1.0.5",
+        "call-bind": "^1.0.2",
+        "for-each": "^0.3.3",
+        "gopd": "^1.0.1",
+        "has-tostringtag": "^1.0.0"
+      }
+    },
+    "is-typedarray": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
+      "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==",
+      "dev": true
+    },
+    "is-weakref": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz",
+      "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2"
+      }
+    },
+    "is-windows": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz",
+      "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==",
+      "dev": true
+    },
+    "isexe": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+      "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+      "dev": true
+    },
+    "istanbul-lib-coverage": {
+      "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz",
+      "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==",
+      "dev": true
+    },
+    "istanbul-lib-hook": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz",
+      "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==",
+      "dev": true,
+      "requires": {
+        "append-transform": "^2.0.0"
+      }
+    },
+    "istanbul-lib-instrument": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz",
+      "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==",
+      "dev": true,
+      "requires": {
+        "@babel/core": "^7.7.5",
+        "@istanbuljs/schema": "^0.1.2",
+        "istanbul-lib-coverage": "^3.0.0",
+        "semver": "^6.3.0"
+      },
+      "dependencies": {
+        "semver": {
+          "version": "6.3.0",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+          "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+          "dev": true
+        }
+      }
+    },
+    "istanbul-lib-processinfo": {
+      "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz",
+      "integrity": "sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==",
+      "dev": true,
+      "requires": {
+        "archy": "^1.0.0",
+        "cross-spawn": "^7.0.3",
+        "istanbul-lib-coverage": "^3.2.0",
+        "p-map": "^3.0.0",
+        "rimraf": "^3.0.0",
+        "uuid": "^8.3.2"
+      }
+    },
+    "istanbul-lib-report": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz",
+      "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==",
+      "dev": true,
+      "requires": {
+        "istanbul-lib-coverage": "^3.0.0",
+        "make-dir": "^3.0.0",
+        "supports-color": "^7.1.0"
+      }
+    },
+    "istanbul-lib-source-maps": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz",
+      "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==",
+      "dev": true,
+      "requires": {
+        "debug": "^4.1.1",
+        "istanbul-lib-coverage": "^3.0.0",
+        "source-map": "^0.6.1"
+      }
+    },
+    "istanbul-reports": {
+      "version": "3.1.5",
+      "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz",
+      "integrity": "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==",
+      "dev": true,
+      "requires": {
+        "html-escaper": "^2.0.0",
+        "istanbul-lib-report": "^3.0.0"
+      }
+    },
+    "jackspeak": {
+      "version": "1.4.2",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-1.4.2.tgz",
+      "integrity": "sha512-GHeGTmnuaHnvS+ZctRB01bfxARuu9wW83ENbuiweu07SFcVlZrJpcshSre/keGT7YGBhLHg/+rXCNSrsEHKU4Q==",
+      "dev": true,
+      "requires": {
+        "cliui": "^7.0.4"
+      },
+      "dependencies": {
+        "cliui": {
+          "version": "7.0.4",
+          "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
+          "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
+          "dev": true,
+          "requires": {
+            "string-width": "^4.2.0",
+            "strip-ansi": "^6.0.0",
+            "wrap-ansi": "^7.0.0"
+          }
+        }
+      }
+    },
+    "js-sdsl": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz",
+      "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==",
+      "dev": true
+    },
+    "js-tokens": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
+      "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
+      "dev": true
+    },
+    "js-yaml": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
+      "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
+      "dev": true,
+      "requires": {
+        "argparse": "^2.0.1"
+      }
+    },
+    "jsesc": {
+      "version": "2.5.2",
+      "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz",
+      "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==",
+      "dev": true
+    },
+    "json-parse-even-better-errors": {
+      "version": "2.3.1",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
+      "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
+      "dev": true
+    },
+    "json-schema-traverse": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
+      "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
+      "dev": true
+    },
+    "json-stable-stringify-without-jsonify": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
+      "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
+      "dev": true
+    },
+    "json5": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
+      "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
+      "dev": true,
+      "requires": {
+        "minimist": "^1.2.0"
+      }
+    },
+    "jsonfile": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
+      "integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==",
+      "requires": {
+        "graceful-fs": "^4.1.6"
+      }
+    },
+    "jsonparse": {
+      "version": "1.3.1",
+      "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz",
+      "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==",
+      "dev": true
+    },
+    "kind-of": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
+      "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
+      "dev": true
+    },
+    "levn": {
+      "version": "0.4.1",
+      "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
+      "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
+      "dev": true,
+      "requires": {
+        "prelude-ls": "^1.2.1",
+        "type-check": "~0.4.0"
+      }
+    },
+    "libtap": {
+      "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/libtap/-/libtap-1.4.0.tgz",
+      "integrity": "sha512-STLFynswQ2A6W14JkabgGetBNk6INL1REgJ9UeNKw5llXroC2cGLgKTqavv0sl8OLVztLLipVKMcQ7yeUcqpmg==",
+      "dev": true,
+      "requires": {
+        "async-hook-domain": "^2.0.4",
+        "bind-obj-methods": "^3.0.0",
+        "diff": "^4.0.2",
+        "function-loop": "^2.0.1",
+        "minipass": "^3.1.5",
+        "own-or": "^1.0.0",
+        "own-or-env": "^1.0.2",
+        "signal-exit": "^3.0.4",
+        "stack-utils": "^2.0.4",
+        "tap-parser": "^11.0.0",
+        "tap-yaml": "^1.0.0",
+        "tcompare": "^5.0.6",
+        "trivial-deferred": "^1.0.1"
+      }
+    },
+    "lines-and-columns": {
+      "version": "1.2.4",
+      "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
+      "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
+      "dev": true
+    },
+    "locate-path": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
+      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
+      "dev": true,
+      "requires": {
+        "p-locate": "^4.1.0"
+      }
+    },
+    "lodash": {
+      "version": "4.17.21",
+      "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
+      "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
+      "dev": true
+    },
+    "lodash.camelcase": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
+      "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==",
+      "dev": true
+    },
+    "lodash.flattendeep": {
+      "version": "4.4.0",
+      "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz",
+      "integrity": "sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==",
+      "dev": true
+    },
+    "lodash.isfunction": {
+      "version": "3.0.9",
+      "resolved": "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-3.0.9.tgz",
+      "integrity": "sha512-AirXNj15uRIMMPihnkInB4i3NHeb4iBtNg9WRWuK2o31S+ePwwNmDPaTL3o7dTJ+VXNZim7rFs4rxN4YU1oUJw==",
+      "dev": true
+    },
+    "lodash.isplainobject": {
+      "version": "4.0.6",
+      "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
+      "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
+      "dev": true
+    },
+    "lodash.kebabcase": {
+      "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz",
+      "integrity": "sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==",
+      "dev": true
+    },
+    "lodash.merge": {
+      "version": "4.6.2",
+      "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
+      "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
+      "dev": true
+    },
+    "lodash.mergewith": {
+      "version": "4.6.2",
+      "resolved": "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz",
+      "integrity": "sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==",
+      "dev": true
+    },
+    "lodash.snakecase": {
+      "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz",
+      "integrity": "sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==",
+      "dev": true
+    },
+    "lodash.startcase": {
+      "version": "4.4.0",
+      "resolved": "https://registry.npmjs.org/lodash.startcase/-/lodash.startcase-4.4.0.tgz",
+      "integrity": "sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==",
+      "dev": true
+    },
+    "lodash.uniq": {
+      "version": "4.5.0",
+      "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz",
+      "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==",
+      "dev": true
+    },
+    "lodash.upperfirst": {
+      "version": "4.3.1",
+      "resolved": "https://registry.npmjs.org/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz",
+      "integrity": "sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==",
+      "dev": true
+    },
+    "lru-cache": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+      "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+      "dev": true,
+      "requires": {
+        "yallist": "^4.0.0"
+      }
+    },
+    "make-dir": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
+      "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
+      "dev": true,
+      "requires": {
+        "semver": "^6.0.0"
+      },
+      "dependencies": {
+        "semver": {
+          "version": "6.3.0",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+          "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+          "dev": true
+        }
+      }
+    },
+    "make-error": {
+      "version": "1.3.6",
+      "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
+      "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
+      "dev": true
+    },
+    "map-obj": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz",
+      "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==",
+      "dev": true
+    },
+    "meow": {
+      "version": "8.1.2",
+      "resolved": "https://registry.npmjs.org/meow/-/meow-8.1.2.tgz",
+      "integrity": "sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==",
+      "dev": true,
+      "requires": {
+        "@types/minimist": "^1.2.0",
+        "camelcase-keys": "^6.2.2",
+        "decamelize-keys": "^1.1.0",
+        "hard-rejection": "^2.1.0",
+        "minimist-options": "4.1.0",
+        "normalize-package-data": "^3.0.0",
+        "read-pkg-up": "^7.0.1",
+        "redent": "^3.0.0",
+        "trim-newlines": "^3.0.0",
+        "type-fest": "^0.18.0",
+        "yargs-parser": "^20.2.3"
+      }
+    },
+    "merge-descriptors": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
+      "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==",
+      "dev": true
+    },
+    "merge-stream": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
+      "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==",
+      "dev": true
+    },
+    "mimic-fn": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
+      "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
+      "dev": true
+    },
+    "min-indent": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz",
+      "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==",
+      "dev": true
+    },
+    "minimatch": {
+      "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
+      "dev": true,
+      "requires": {
+        "brace-expansion": "^1.1.7"
+      }
+    },
+    "minimist": {
+      "version": "1.2.8",
+      "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
+      "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
+      "dev": true
+    },
+    "minimist-options": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz",
+      "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==",
+      "dev": true,
+      "requires": {
+        "arrify": "^1.0.1",
+        "is-plain-obj": "^1.1.0",
+        "kind-of": "^6.0.3"
+      }
+    },
+    "minipass": {
+      "version": "3.3.6",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
+      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
+      "dev": true,
+      "requires": {
+        "yallist": "^4.0.0"
+      }
+    },
+    "mkdirp": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+      "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
+      "dev": true
+    },
+    "module-not-found-error": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/module-not-found-error/-/module-not-found-error-1.0.1.tgz",
+      "integrity": "sha512-pEk4ECWQXV6z2zjhRZUongnLJNUeGQJ3w6OQ5ctGwD+i5o93qjRQUk2Rt6VdNeu3sEP0AB4LcfvdebpxBRVr4g==",
+      "dev": true
+    },
+    "ms": {
+      "version": "2.1.2",
+      "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+      "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
+    },
+    "natural-compare": {
+      "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
+      "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
+      "dev": true
+    },
+    "node-preload": {
+      "version": "0.2.1",
+      "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz",
+      "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==",
+      "dev": true,
+      "requires": {
+        "process-on-spawn": "^1.0.0"
+      }
+    },
+    "node-releases": {
+      "version": "2.0.10",
+      "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.10.tgz",
+      "integrity": "sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==",
+      "dev": true
+    },
+    "normalize-package-data": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz",
+      "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==",
+      "dev": true,
+      "requires": {
+        "hosted-git-info": "^4.0.1",
+        "is-core-module": "^2.5.0",
+        "semver": "^7.3.4",
+        "validate-npm-package-license": "^3.0.1"
+      }
+    },
+    "normalize-path": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
+      "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
+      "dev": true
+    },
+    "npm-run-path": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
+      "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==",
+      "dev": true,
+      "requires": {
+        "path-key": "^3.0.0"
+      }
+    },
+    "nyc": {
+      "version": "15.1.0",
+      "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz",
+      "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==",
+      "dev": true,
+      "requires": {
+        "@istanbuljs/load-nyc-config": "^1.0.0",
+        "@istanbuljs/schema": "^0.1.2",
+        "caching-transform": "^4.0.0",
+        "convert-source-map": "^1.7.0",
+        "decamelize": "^1.2.0",
+        "find-cache-dir": "^3.2.0",
+        "find-up": "^4.1.0",
+        "foreground-child": "^2.0.0",
+        "get-package-type": "^0.1.0",
+        "glob": "^7.1.6",
+        "istanbul-lib-coverage": "^3.0.0",
+        "istanbul-lib-hook": "^3.0.0",
+        "istanbul-lib-instrument": "^4.0.0",
+        "istanbul-lib-processinfo": "^2.0.2",
+        "istanbul-lib-report": "^3.0.0",
+        "istanbul-lib-source-maps": "^4.0.0",
+        "istanbul-reports": "^3.0.2",
+        "make-dir": "^3.0.0",
+        "node-preload": "^0.2.1",
+        "p-map": "^3.0.0",
+        "process-on-spawn": "^1.0.0",
+        "resolve-from": "^5.0.0",
+        "rimraf": "^3.0.0",
+        "signal-exit": "^3.0.2",
+        "spawn-wrap": "^2.0.0",
+        "test-exclude": "^6.0.0",
+        "yargs": "^15.0.2"
+      },
+      "dependencies": {
+        "cliui": {
+          "version": "6.0.0",
+          "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
+          "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
+          "dev": true,
+          "requires": {
+            "string-width": "^4.2.0",
+            "strip-ansi": "^6.0.0",
+            "wrap-ansi": "^6.2.0"
+          }
+        },
+        "wrap-ansi": {
+          "version": "6.2.0",
+          "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
+          "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
+          "dev": true,
+          "requires": {
+            "ansi-styles": "^4.0.0",
+            "string-width": "^4.1.0",
+            "strip-ansi": "^6.0.0"
+          }
+        },
+        "y18n": {
+          "version": "4.0.3",
+          "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz",
+          "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==",
+          "dev": true
+        },
+        "yargs": {
+          "version": "15.4.1",
+          "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz",
+          "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
+          "dev": true,
+          "requires": {
+            "cliui": "^6.0.0",
+            "decamelize": "^1.2.0",
+            "find-up": "^4.1.0",
+            "get-caller-file": "^2.0.1",
+            "require-directory": "^2.1.1",
+            "require-main-filename": "^2.0.0",
+            "set-blocking": "^2.0.0",
+            "string-width": "^4.2.0",
+            "which-module": "^2.0.0",
+            "y18n": "^4.0.0",
+            "yargs-parser": "^18.1.2"
+          }
+        },
+        "yargs-parser": {
+          "version": "18.1.3",
+          "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz",
+          "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
+          "dev": true,
+          "requires": {
+            "camelcase": "^5.0.0",
+            "decamelize": "^1.2.0"
+          }
+        }
+      }
+    },
+    "object-inspect": {
+      "version": "1.12.3",
+      "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
+      "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==",
+      "dev": true
+    },
+    "object-keys": {
+      "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
+      "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
+      "dev": true
+    },
+    "object.assign": {
+      "version": "4.1.4",
+      "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz",
+      "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.4",
+        "has-symbols": "^1.0.3",
+        "object-keys": "^1.1.1"
+      }
+    },
+    "object.entries": {
+      "version": "1.1.6",
+      "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.6.tgz",
+      "integrity": "sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.4",
+        "es-abstract": "^1.20.4"
+      }
+    },
+    "object.values": {
+      "version": "1.1.6",
+      "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz",
+      "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.4",
+        "es-abstract": "^1.20.4"
+      }
+    },
+    "once": {
+      "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+      "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
+      "dev": true,
+      "requires": {
+        "wrappy": "1"
+      }
+    },
+    "onetime": {
+      "version": "5.1.2",
+      "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
+      "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==",
+      "dev": true,
+      "requires": {
+        "mimic-fn": "^2.1.0"
+      }
+    },
+    "opener": {
+      "version": "1.5.2",
+      "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz",
+      "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==",
+      "dev": true
+    },
+    "optionator": {
+      "version": "0.9.1",
+      "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
+      "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==",
+      "dev": true,
+      "requires": {
+        "deep-is": "^0.1.3",
+        "fast-levenshtein": "^2.0.6",
+        "levn": "^0.4.1",
+        "prelude-ls": "^1.2.1",
+        "type-check": "^0.4.0",
+        "word-wrap": "^1.2.3"
+      }
+    },
+    "own-or": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/own-or/-/own-or-1.0.0.tgz",
+      "integrity": "sha512-NfZr5+Tdf6MB8UI9GLvKRs4cXY8/yB0w3xtt84xFdWy8hkGjn+JFc60VhzS/hFRfbyxFcGYMTjnF4Me+RbbqrA==",
+      "dev": true
+    },
+    "own-or-env": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/own-or-env/-/own-or-env-1.0.2.tgz",
+      "integrity": "sha512-NQ7v0fliWtK7Lkb+WdFqe6ky9XAzYmlkXthQrBbzlYbmFKoAYbDDcwmOm6q8kOuwSRXW8bdL5ORksploUJmWgw==",
+      "dev": true,
+      "requires": {
+        "own-or": "^1.0.0"
+      }
+    },
+    "p-limit": {
+      "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
+      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
+      "dev": true,
+      "requires": {
+        "p-try": "^2.0.0"
+      }
+    },
+    "p-locate": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
+      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
+      "dev": true,
+      "requires": {
+        "p-limit": "^2.2.0"
+      }
+    },
+    "p-map": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz",
+      "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==",
+      "dev": true,
+      "requires": {
+        "aggregate-error": "^3.0.0"
+      }
+    },
+    "p-try": {
+      "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
+      "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
+      "dev": true
+    },
+    "package-hash": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz",
+      "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==",
+      "dev": true,
+      "requires": {
+        "graceful-fs": "^4.1.15",
+        "hasha": "^5.0.0",
+        "lodash.flattendeep": "^4.4.0",
+        "release-zalgo": "^1.0.0"
+      }
+    },
+    "parent-module": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
+      "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
+      "dev": true,
+      "requires": {
+        "callsites": "^3.0.0"
+      }
+    },
+    "parse-json": {
+      "version": "5.2.0",
+      "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz",
+      "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==",
+      "dev": true,
+      "requires": {
+        "@babel/code-frame": "^7.0.0",
+        "error-ex": "^1.3.1",
+        "json-parse-even-better-errors": "^2.3.0",
+        "lines-and-columns": "^1.1.6"
+      }
+    },
+    "path-exists": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+      "dev": true
+    },
+    "path-is-absolute": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+      "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
+      "dev": true
+    },
+    "path-key": {
+      "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+      "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+      "dev": true
+    },
+    "path-parse": {
+      "version": "1.0.7",
+      "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
+      "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
+      "dev": true
+    },
+    "path-type": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
+      "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
+      "dev": true
+    },
+    "picocolors": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
+      "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
+      "dev": true
+    },
+    "picomatch": {
+      "version": "2.3.1",
+      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+      "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+      "dev": true
+    },
+    "pkg-dir": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
+      "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
+      "dev": true,
+      "requires": {
+        "find-up": "^4.0.0"
+      }
+    },
+    "prelude-ls": {
+      "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
+      "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
+      "dev": true
+    },
+    "prettier": {
+      "version": "2.8.4",
+      "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.4.tgz",
+      "integrity": "sha512-vIS4Rlc2FNh0BySk3Wkd6xmwxB0FpOndW5fisM5H8hsZSxU2VWVB5CWIkIjWvrHjIhxk2g3bfMKM87zNTrZddw==",
+      "dev": true
+    },
+    "prettier-linter-helpers": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz",
+      "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==",
+      "dev": true,
+      "requires": {
+        "fast-diff": "^1.1.2"
+      }
+    },
+    "process-on-spawn": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz",
+      "integrity": "sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==",
+      "dev": true,
+      "requires": {
+        "fromentries": "^1.2.0"
+      }
+    },
+    "proxyquire": {
+      "version": "2.1.3",
+      "resolved": "https://registry.npmjs.org/proxyquire/-/proxyquire-2.1.3.tgz",
+      "integrity": "sha512-BQWfCqYM+QINd+yawJz23tbBM40VIGXOdDw3X344KcclI/gtBbdWF6SlQ4nK/bYhF9d27KYug9WzljHC6B9Ysg==",
+      "dev": true,
+      "requires": {
+        "fill-keys": "^1.0.2",
+        "module-not-found-error": "^1.0.1",
+        "resolve": "^1.11.1"
+      }
+    },
+    "punycode": {
+      "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
+      "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==",
+      "dev": true
+    },
+    "q": {
+      "version": "1.5.1",
+      "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz",
+      "integrity": "sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==",
+      "dev": true
+    },
+    "queue-microtask": {
+      "version": "1.2.3",
+      "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
+      "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
+      "dev": true
+    },
+    "quick-lru": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz",
+      "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==",
+      "dev": true
+    },
+    "react-devtools-core": {
+      "version": "4.28.4",
+      "resolved": "https://registry.npmjs.org/react-devtools-core/-/react-devtools-core-4.28.4.tgz",
+      "integrity": "sha512-IUZKLv3CimeM07G3vX4H4loxVpByrzq3HvfTX7v9migalwvLs9ZY5D3S3pKR33U+GguYfBBdMMZyToFhsSE/iQ==",
+      "dev": true,
+      "requires": {
+        "shell-quote": "^1.6.1",
+        "ws": "^7"
+      }
+    },
+    "read-pkg": {
+      "version": "5.2.0",
+      "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz",
+      "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==",
+      "dev": true,
+      "requires": {
+        "@types/normalize-package-data": "^2.4.0",
+        "normalize-package-data": "^2.5.0",
+        "parse-json": "^5.0.0",
+        "type-fest": "^0.6.0"
+      },
+      "dependencies": {
+        "hosted-git-info": {
+          "version": "2.8.9",
+          "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz",
+          "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==",
+          "dev": true
+        },
+        "normalize-package-data": {
+          "version": "2.5.0",
+          "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
+          "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==",
+          "dev": true,
+          "requires": {
+            "hosted-git-info": "^2.1.4",
+            "resolve": "^1.10.0",
+            "semver": "2 || 3 || 4 || 5",
+            "validate-npm-package-license": "^3.0.1"
+          }
+        },
+        "semver": {
+          "version": "5.7.1",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
+          "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
+          "dev": true
+        },
+        "type-fest": {
+          "version": "0.6.0",
+          "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz",
+          "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==",
+          "dev": true
+        }
+      }
+    },
+    "read-pkg-up": {
+      "version": "7.0.1",
+      "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz",
+      "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==",
+      "dev": true,
+      "requires": {
+        "find-up": "^4.1.0",
+        "read-pkg": "^5.2.0",
+        "type-fest": "^0.8.1"
+      },
+      "dependencies": {
+        "type-fest": {
+          "version": "0.8.1",
+          "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
+          "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
+          "dev": true
+        }
+      }
+    },
+    "readable-stream": {
+      "version": "3.6.0",
+      "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
+      "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+      "dev": true,
+      "requires": {
+        "inherits": "^2.0.3",
+        "string_decoder": "^1.1.1",
+        "util-deprecate": "^1.0.1"
+      }
+    },
+    "readdirp": {
+      "version": "3.6.0",
+      "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
+      "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
+      "dev": true,
+      "requires": {
+        "picomatch": "^2.2.1"
+      }
+    },
+    "redent": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz",
+      "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==",
+      "dev": true,
+      "requires": {
+        "indent-string": "^4.0.0",
+        "strip-indent": "^3.0.0"
+      }
+    },
+    "regexp.prototype.flags": {
+      "version": "1.4.3",
+      "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz",
+      "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.3",
+        "functions-have-names": "^1.2.2"
+      }
+    },
+    "regexpp": {
+      "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz",
+      "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==",
+      "dev": true
+    },
+    "release-zalgo": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz",
+      "integrity": "sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==",
+      "dev": true,
+      "requires": {
+        "es6-error": "^4.0.1"
+      }
+    },
+    "require-directory": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+      "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
+      "dev": true
+    },
+    "require-from-string": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
+      "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
+      "dev": true
+    },
+    "require-like": {
+      "version": "0.1.2",
+      "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz",
+      "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==",
+      "dev": true
+    },
+    "require-main-filename": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
+      "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
+      "dev": true
+    },
+    "resolve": {
+      "version": "1.22.1",
+      "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
+      "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==",
+      "dev": true,
+      "requires": {
+        "is-core-module": "^2.9.0",
+        "path-parse": "^1.0.7",
+        "supports-preserve-symlinks-flag": "^1.0.0"
+      }
+    },
+    "resolve-from": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
+      "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
+      "dev": true
+    },
+    "resolve-global": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/resolve-global/-/resolve-global-1.0.0.tgz",
+      "integrity": "sha512-zFa12V4OLtT5XUX/Q4VLvTfBf+Ok0SPc1FNGM/z9ctUdiU618qwKpWnd0CHs3+RqROfyEg/DhuHbMWYqcgljEw==",
+      "dev": true,
+      "requires": {
+        "global-dirs": "^0.1.1"
+      }
+    },
+    "reusify": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz",
+      "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==",
+      "dev": true
+    },
+    "rfdc": {
+      "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz",
+      "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA=="
+    },
+    "rimraf": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+      "dev": true,
+      "requires": {
+        "glob": "^7.1.3"
+      }
+    },
+    "run-parallel": {
+      "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
+      "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
+      "dev": true,
+      "requires": {
+        "queue-microtask": "^1.2.2"
+      }
+    },
+    "safe-buffer": {
+      "version": "5.2.1",
+      "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
+      "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
+      "dev": true
+    },
+    "safe-regex-test": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz",
+      "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "get-intrinsic": "^1.1.3",
+        "is-regex": "^1.1.4"
+      }
+    },
+    "semver": {
+      "version": "7.3.8",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
+      "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
+      "dev": true,
+      "requires": {
+        "lru-cache": "^6.0.0"
+      }
+    },
+    "set-blocking": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
+      "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
+      "dev": true
+    },
+    "shebang-command": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+      "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+      "dev": true,
+      "requires": {
+        "shebang-regex": "^3.0.0"
+      }
+    },
+    "shebang-regex": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+      "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+      "dev": true
+    },
+    "shell-quote": {
+      "version": "1.8.1",
+      "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.1.tgz",
+      "integrity": "sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==",
+      "dev": true
+    },
+    "side-channel": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz",
+      "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.0",
+        "get-intrinsic": "^1.0.2",
+        "object-inspect": "^1.9.0"
+      }
+    },
+    "signal-exit": {
+      "version": "3.0.7",
+      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
+      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
+      "dev": true
+    },
+    "source-map": {
+      "version": "0.6.1",
+      "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
+      "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
+      "dev": true
+    },
+    "source-map-support": {
+      "version": "0.5.21",
+      "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
+      "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
+      "dev": true,
+      "requires": {
+        "buffer-from": "^1.0.0",
+        "source-map": "^0.6.0"
+      }
+    },
+    "spawn-wrap": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz",
+      "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==",
+      "dev": true,
+      "requires": {
+        "foreground-child": "^2.0.0",
+        "is-windows": "^1.0.2",
+        "make-dir": "^3.0.0",
+        "rimraf": "^3.0.0",
+        "signal-exit": "^3.0.2",
+        "which": "^2.0.1"
+      }
+    },
+    "spdx-correct": {
+      "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz",
+      "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==",
+      "dev": true,
+      "requires": {
+        "spdx-expression-parse": "^3.0.0",
+        "spdx-license-ids": "^3.0.0"
+      }
+    },
+    "spdx-exceptions": {
+      "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz",
+      "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==",
+      "dev": true
+    },
+    "spdx-expression-parse": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz",
+      "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==",
+      "dev": true,
+      "requires": {
+        "spdx-exceptions": "^2.1.0",
+        "spdx-license-ids": "^3.0.0"
+      }
+    },
+    "spdx-license-ids": {
+      "version": "3.0.12",
+      "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.12.tgz",
+      "integrity": "sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==",
+      "dev": true
+    },
+    "split2": {
+      "version": "3.2.2",
+      "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz",
+      "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==",
+      "dev": true,
+      "requires": {
+        "readable-stream": "^3.0.0"
+      }
+    },
+    "sprintf-js": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
+      "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
+      "dev": true
+    },
+    "stack-trace": {
+      "version": "0.0.10",
+      "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",
+      "integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==",
+      "dev": true
+    },
+    "stack-utils": {
+      "version": "2.0.6",
+      "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz",
+      "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==",
+      "dev": true,
+      "requires": {
+        "escape-string-regexp": "^2.0.0"
+      },
+      "dependencies": {
+        "escape-string-regexp": {
+          "version": "2.0.0",
+          "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
+          "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
+          "dev": true
+        }
+      }
+    },
+    "streamroller": {
+      "version": "3.1.5",
+      "resolved": "https://registry.npmjs.org/streamroller/-/streamroller-3.1.5.tgz",
+      "integrity": "sha512-KFxaM7XT+irxvdqSP1LGLgNWbYN7ay5owZ3r/8t77p+EtSUAfUgtl7be3xtqtOmGUl9K9YPO2ca8133RlTjvKw==",
+      "requires": {
+        "date-format": "^4.0.14",
+        "debug": "^4.3.4",
+        "fs-extra": "^8.1.0"
+      },
+      "dependencies": {
+        "fs-extra": {
+          "version": "8.1.0",
+          "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
+          "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
+          "requires": {
+            "graceful-fs": "^4.2.0",
+            "jsonfile": "^4.0.0",
+            "universalify": "^0.1.0"
+          }
+        }
+      }
+    },
+    "string-width": {
+      "version": "4.2.3",
+      "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+      "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+      "dev": true,
+      "requires": {
+        "emoji-regex": "^8.0.0",
+        "is-fullwidth-code-point": "^3.0.0",
+        "strip-ansi": "^6.0.1"
+      }
+    },
+    "string.prototype.trimend": {
+      "version": "1.0.6",
+      "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz",
+      "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.4",
+        "es-abstract": "^1.20.4"
+      }
+    },
+    "string.prototype.trimstart": {
+      "version": "1.0.6",
+      "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz",
+      "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.1.4",
+        "es-abstract": "^1.20.4"
+      }
+    },
+    "string_decoder": {
+      "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
+      "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
+      "dev": true,
+      "requires": {
+        "safe-buffer": "~5.2.0"
+      }
+    },
+    "strip-ansi": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+      "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+      "dev": true,
+      "requires": {
+        "ansi-regex": "^5.0.1"
+      }
+    },
+    "strip-bom": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
+      "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==",
+      "dev": true
+    },
+    "strip-final-newline": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
+      "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==",
+      "dev": true
+    },
+    "strip-indent": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz",
+      "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==",
+      "dev": true,
+      "requires": {
+        "min-indent": "^1.0.0"
+      }
+    },
+    "strip-json-comments": {
+      "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
+      "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
+      "dev": true
+    },
+    "supports-color": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+      "dev": true,
+      "requires": {
+        "has-flag": "^4.0.0"
+      }
+    },
+    "supports-preserve-symlinks-flag": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
+      "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
+      "dev": true
+    },
+    "tap": {
+      "version": "16.3.4",
+      "resolved": "https://registry.npmjs.org/tap/-/tap-16.3.4.tgz",
+      "integrity": "sha512-SAexdt2ZF4XBgye6TPucFI2y7VE0qeFXlXucJIV1XDPCs+iJodk0MYacr1zR6Ycltzz7PYg8zrblDXKbAZM2LQ==",
+      "dev": true,
+      "requires": {
+        "@isaacs/import-jsx": "^4.0.1",
+        "@types/react": "^17.0.52",
+        "chokidar": "^3.3.0",
+        "findit": "^2.0.0",
+        "foreground-child": "^2.0.0",
+        "fs-exists-cached": "^1.0.0",
+        "glob": "^7.2.3",
+        "ink": "^3.2.0",
+        "isexe": "^2.0.0",
+        "istanbul-lib-processinfo": "^2.0.3",
+        "jackspeak": "^1.4.2",
+        "libtap": "^1.4.0",
+        "minipass": "^3.3.4",
+        "mkdirp": "^1.0.4",
+        "nyc": "^15.1.0",
+        "opener": "^1.5.1",
+        "react": "^17.0.2",
+        "rimraf": "^3.0.0",
+        "signal-exit": "^3.0.6",
+        "source-map-support": "^0.5.16",
+        "tap-mocha-reporter": "^5.0.3",
+        "tap-parser": "^11.0.2",
+        "tap-yaml": "^1.0.2",
+        "tcompare": "^5.0.7",
+        "treport": "^3.0.4",
+        "which": "^2.0.2"
+      },
+      "dependencies": {
+        "@ampproject/remapping": {
+          "version": "2.1.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@jridgewell/trace-mapping": "^0.3.0"
+          }
+        },
+        "@babel/code-frame": {
+          "version": "7.16.7",
+          "bundled": true,
+          "requires": {
+            "@babel/highlight": "^7.16.7"
+          }
+        },
+        "@babel/compat-data": {
+          "version": "7.17.7",
+          "bundled": true,
+          "dev": true
+        },
+        "@babel/core": {
+          "version": "7.17.8",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@ampproject/remapping": "^2.1.0",
+            "@babel/code-frame": "^7.16.7",
+            "@babel/generator": "^7.17.7",
+            "@babel/helper-compilation-targets": "^7.17.7",
+            "@babel/helper-module-transforms": "^7.17.7",
+            "@babel/helpers": "^7.17.8",
+            "@babel/parser": "^7.17.8",
+            "@babel/template": "^7.16.7",
+            "@babel/traverse": "^7.17.3",
+            "@babel/types": "^7.17.0",
+            "convert-source-map": "^1.7.0",
+            "debug": "^4.1.0",
+            "gensync": "^1.0.0-beta.2",
+            "json5": "^2.1.2",
+            "semver": "^6.3.0"
+          }
+        },
+        "@babel/generator": {
+          "version": "7.17.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/types": "^7.17.0",
+            "jsesc": "^2.5.1",
+            "source-map": "^0.5.0"
+          }
+        },
+        "@babel/helper-annotate-as-pure": {
+          "version": "7.16.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/types": "^7.16.7"
+          }
+        },
+        "@babel/helper-compilation-targets": {
+          "version": "7.17.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/compat-data": "^7.17.7",
+            "@babel/helper-validator-option": "^7.16.7",
+            "browserslist": "^4.17.5",
+            "semver": "^6.3.0"
+          }
+        },
+        "@babel/helper-environment-visitor": {
+          "version": "7.16.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/types": "^7.16.7"
+          }
+        },
+        "@babel/helper-function-name": {
+          "version": "7.16.7",
+          "bundled": true,
+          "requires": {
+            "@babel/helper-get-function-arity": "^7.16.7",
+            "@babel/template": "^7.16.7",
+            "@babel/types": "^7.16.7"
+          }
+        },
+        "@babel/helper-get-function-arity": {
+          "version": "7.16.7",
+          "bundled": true,
+          "requires": {
+            "@babel/types": "^7.16.7"
+          }
+        },
+        "@babel/helper-hoist-variables": {
+          "version": "7.16.7",
+          "bundled": true,
+          "requires": {
+            "@babel/types": "^7.16.7"
+          }
+        },
+        "@babel/helper-module-imports": {
+          "version": "7.16.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/types": "^7.16.7"
+          }
+        },
+        "@babel/helper-module-transforms": {
+          "version": "7.17.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/helper-environment-visitor": "^7.16.7",
+            "@babel/helper-module-imports": "^7.16.7",
+            "@babel/helper-simple-access": "^7.17.7",
+            "@babel/helper-split-export-declaration": "^7.16.7",
+            "@babel/helper-validator-identifier": "^7.16.7",
+            "@babel/template": "^7.16.7",
+            "@babel/traverse": "^7.17.3",
+            "@babel/types": "^7.17.0"
+          }
+        },
+        "@babel/helper-plugin-utils": {
+          "version": "7.16.7",
+          "bundled": true,
+          "dev": true
+        },
+        "@babel/helper-simple-access": {
+          "version": "7.17.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/types": "^7.17.0"
+          }
+        },
+        "@babel/helper-split-export-declaration": {
+          "version": "7.16.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/types": "^7.16.7"
+          }
+        },
+        "@babel/helper-validator-identifier": {
+          "version": "7.16.7",
+          "bundled": true
+        },
+        "@babel/helper-validator-option": {
+          "version": "7.16.7",
+          "bundled": true,
+          "dev": true
+        },
+        "@babel/helpers": {
+          "version": "7.17.8",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/template": "^7.16.7",
+            "@babel/traverse": "^7.17.3",
+            "@babel/types": "^7.17.0"
+          }
+        },
+        "@babel/highlight": {
+          "version": "7.16.10",
+          "bundled": true,
+          "requires": {
+            "@babel/helper-validator-identifier": "^7.16.7",
+            "chalk": "^2.0.0",
+            "js-tokens": "^4.0.0"
+          }
+        },
+        "@babel/parser": {
+          "version": "7.17.8",
+          "bundled": true
+        },
+        "@babel/plugin-proposal-object-rest-spread": {
+          "version": "7.17.3",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/compat-data": "^7.17.0",
+            "@babel/helper-compilation-targets": "^7.16.7",
+            "@babel/helper-plugin-utils": "^7.16.7",
+            "@babel/plugin-syntax-object-rest-spread": "^7.8.3",
+            "@babel/plugin-transform-parameters": "^7.16.7"
+          }
+        },
+        "@babel/plugin-syntax-jsx": {
+          "version": "7.16.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/helper-plugin-utils": "^7.16.7"
+          }
+        },
+        "@babel/plugin-syntax-object-rest-spread": {
+          "version": "7.8.3",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/helper-plugin-utils": "^7.8.0"
+          }
+        },
+        "@babel/plugin-transform-destructuring": {
+          "version": "7.17.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/helper-plugin-utils": "^7.16.7"
+          }
+        },
+        "@babel/plugin-transform-parameters": {
+          "version": "7.16.7",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/helper-plugin-utils": "^7.16.7"
+          }
+        },
+        "@babel/plugin-transform-react-jsx": {
+          "version": "7.17.3",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/helper-annotate-as-pure": "^7.16.7",
+            "@babel/helper-module-imports": "^7.16.7",
+            "@babel/helper-plugin-utils": "^7.16.7",
+            "@babel/plugin-syntax-jsx": "^7.16.7",
+            "@babel/types": "^7.17.0"
+          }
+        },
+        "@babel/template": {
+          "version": "7.16.7",
+          "bundled": true,
+          "requires": {
+            "@babel/code-frame": "^7.16.7",
+            "@babel/parser": "^7.16.7",
+            "@babel/types": "^7.16.7"
+          }
+        },
+        "@babel/types": {
+          "version": "7.17.0",
+          "bundled": true,
+          "requires": {
+            "@babel/helper-validator-identifier": "^7.16.7",
+            "to-fast-properties": "^2.0.0"
+          }
+        },
+        "@isaacs/import-jsx": {
+          "version": "4.0.1",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@babel/core": "^7.5.5",
+            "@babel/plugin-proposal-object-rest-spread": "^7.5.5",
+            "@babel/plugin-transform-destructuring": "^7.5.0",
+            "@babel/plugin-transform-react-jsx": "^7.3.0",
+            "caller-path": "^3.0.1",
+            "find-cache-dir": "^3.2.0",
+            "make-dir": "^3.0.2",
+            "resolve-from": "^3.0.0",
+            "rimraf": "^3.0.0"
+          }
+        },
+        "@jridgewell/resolve-uri": {
+          "version": "3.0.5",
+          "bundled": true,
+          "dev": true
+        },
+        "@jridgewell/sourcemap-codec": {
+          "version": "1.4.11",
+          "bundled": true,
+          "dev": true
+        },
+        "@jridgewell/trace-mapping": {
+          "version": "0.3.4",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@jridgewell/resolve-uri": "^3.0.3",
+            "@jridgewell/sourcemap-codec": "^1.4.10"
+          }
+        },
+        "@types/prop-types": {
+          "version": "15.7.4",
+          "bundled": true,
+          "dev": true
+        },
+        "@types/react": {
+          "version": "17.0.52",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@types/prop-types": "*",
+            "@types/scheduler": "*",
+            "csstype": "^3.0.2"
+          }
+        },
+        "@types/scheduler": {
+          "version": "0.16.2",
+          "bundled": true,
+          "dev": true
+        },
+        "@types/yoga-layout": {
+          "version": "1.9.2",
+          "bundled": true,
+          "dev": true
+        },
+        "ansi-escapes": {
+          "version": "4.3.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "type-fest": "^0.21.3"
+          },
+          "dependencies": {
+            "type-fest": {
+              "version": "0.21.3",
+              "bundled": true,
+              "dev": true
+            }
+          }
+        },
+        "ansi-regex": {
+          "version": "5.0.1",
+          "bundled": true,
+          "dev": true
+        },
+        "ansi-styles": {
+          "version": "3.2.1",
+          "bundled": true,
+          "requires": {
+            "color-convert": "^1.9.0"
+          }
+        },
+        "ansicolors": {
+          "version": "0.3.2",
+          "bundled": true,
+          "dev": true
+        },
+        "astral-regex": {
+          "version": "2.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "auto-bind": {
+          "version": "4.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "balanced-match": {
+          "version": "1.0.2",
+          "bundled": true,
+          "dev": true
+        },
+        "brace-expansion": {
+          "version": "1.1.11",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "balanced-match": "^1.0.0",
+            "concat-map": "0.0.1"
+          }
+        },
+        "browserslist": {
+          "version": "4.20.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "caniuse-lite": "^1.0.30001317",
+            "electron-to-chromium": "^1.4.84",
+            "escalade": "^3.1.1",
+            "node-releases": "^2.0.2",
+            "picocolors": "^1.0.0"
+          }
+        },
+        "caller-callsite": {
+          "version": "4.1.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "callsites": "^3.1.0"
+          }
+        },
+        "caller-path": {
+          "version": "3.0.1",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "caller-callsite": "^4.1.0"
+          }
+        },
+        "callsites": {
+          "version": "3.1.0",
+          "bundled": true,
+          "dev": true
+        },
+        "caniuse-lite": {
+          "version": "1.0.30001319",
+          "bundled": true,
+          "dev": true
+        },
+        "cardinal": {
+          "version": "2.1.1",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "ansicolors": "~0.3.2",
+            "redeyed": "~2.1.0"
+          }
+        },
+        "chalk": {
+          "version": "2.4.2",
+          "bundled": true,
+          "requires": {
+            "ansi-styles": "^3.2.1",
+            "escape-string-regexp": "^1.0.5",
+            "supports-color": "^5.3.0"
+          }
+        },
+        "ci-info": {
+          "version": "2.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "cli-boxes": {
+          "version": "2.2.1",
+          "bundled": true,
+          "dev": true
+        },
+        "cli-cursor": {
+          "version": "3.1.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "restore-cursor": "^3.1.0"
+          }
+        },
+        "cli-truncate": {
+          "version": "2.1.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "slice-ansi": "^3.0.0",
+            "string-width": "^4.2.0"
+          }
+        },
+        "code-excerpt": {
+          "version": "3.0.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "convert-to-spaces": "^1.0.1"
+          }
+        },
+        "color-convert": {
+          "version": "1.9.3",
+          "bundled": true,
+          "requires": {
+            "color-name": "1.1.3"
+          }
+        },
+        "color-name": {
+          "version": "1.1.3",
+          "bundled": true
+        },
+        "commondir": {
+          "version": "1.0.1",
+          "bundled": true,
+          "dev": true
+        },
+        "concat-map": {
+          "version": "0.0.1",
+          "bundled": true,
+          "dev": true
+        },
+        "convert-source-map": {
+          "version": "1.8.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "safe-buffer": "~5.1.1"
+          }
+        },
+        "convert-to-spaces": {
+          "version": "1.0.2",
+          "bundled": true,
+          "dev": true
+        },
+        "csstype": {
+          "version": "3.0.11",
+          "bundled": true,
+          "dev": true
+        },
+        "debug": {
+          "version": "4.3.4",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "ms": "2.1.2"
+          }
+        },
+        "electron-to-chromium": {
+          "version": "1.4.89",
+          "bundled": true,
+          "dev": true
+        },
+        "emoji-regex": {
+          "version": "8.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "escalade": {
+          "version": "3.1.1",
+          "bundled": true,
+          "dev": true
+        },
+        "escape-string-regexp": {
+          "version": "1.0.5",
+          "bundled": true
+        },
+        "esprima": {
+          "version": "4.0.1",
+          "bundled": true,
+          "dev": true
+        },
+        "events-to-array": {
+          "version": "1.1.2",
+          "bundled": true,
+          "dev": true
+        },
+        "find-cache-dir": {
+          "version": "3.3.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "commondir": "^1.0.1",
+            "make-dir": "^3.0.2",
+            "pkg-dir": "^4.1.0"
+          }
+        },
+        "find-up": {
+          "version": "4.1.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "locate-path": "^5.0.0",
+            "path-exists": "^4.0.0"
+          }
+        },
+        "fs.realpath": {
+          "version": "1.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "gensync": {
+          "version": "1.0.0-beta.2",
+          "bundled": true,
+          "dev": true
+        },
+        "glob": {
+          "version": "7.2.3",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "fs.realpath": "^1.0.0",
+            "inflight": "^1.0.4",
+            "inherits": "2",
+            "minimatch": "^3.1.1",
+            "once": "^1.3.0",
+            "path-is-absolute": "^1.0.0"
+          }
+        },
+        "globals": {
+          "version": "11.12.0",
+          "bundled": true
+        },
+        "has-flag": {
+          "version": "3.0.0",
+          "bundled": true
+        },
+        "indent-string": {
+          "version": "4.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "inflight": {
+          "version": "1.0.6",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "once": "^1.3.0",
+            "wrappy": "1"
+          }
+        },
+        "inherits": {
+          "version": "2.0.4",
+          "bundled": true,
+          "dev": true
+        },
+        "ink": {
+          "version": "3.2.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "ansi-escapes": "^4.2.1",
+            "auto-bind": "4.0.0",
+            "chalk": "^4.1.0",
+            "cli-boxes": "^2.2.0",
+            "cli-cursor": "^3.1.0",
+            "cli-truncate": "^2.1.0",
+            "code-excerpt": "^3.0.0",
+            "indent-string": "^4.0.0",
+            "is-ci": "^2.0.0",
+            "lodash": "^4.17.20",
+            "patch-console": "^1.0.0",
+            "react-devtools-core": "^4.19.1",
+            "react-reconciler": "^0.26.2",
+            "scheduler": "^0.20.2",
+            "signal-exit": "^3.0.2",
+            "slice-ansi": "^3.0.0",
+            "stack-utils": "^2.0.2",
+            "string-width": "^4.2.2",
+            "type-fest": "^0.12.0",
+            "widest-line": "^3.1.0",
+            "wrap-ansi": "^6.2.0",
+            "ws": "^7.5.5",
+            "yoga-layout-prebuilt": "^1.9.6"
+          },
+          "dependencies": {
+            "ansi-styles": {
+              "version": "4.3.0",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "color-convert": "^2.0.1"
+              }
+            },
+            "chalk": {
+              "version": "4.1.2",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "ansi-styles": "^4.1.0",
+                "supports-color": "^7.1.0"
+              }
+            },
+            "color-convert": {
+              "version": "2.0.1",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "color-name": "~1.1.4"
+              }
+            },
+            "color-name": {
+              "version": "1.1.4",
+              "bundled": true,
+              "dev": true
+            },
+            "has-flag": {
+              "version": "4.0.0",
+              "bundled": true,
+              "dev": true
+            },
+            "supports-color": {
+              "version": "7.2.0",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "has-flag": "^4.0.0"
+              }
+            }
+          }
+        },
+        "is-ci": {
+          "version": "2.0.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "ci-info": "^2.0.0"
+          }
+        },
+        "is-fullwidth-code-point": {
+          "version": "3.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "js-tokens": {
+          "version": "4.0.0",
+          "bundled": true
+        },
+        "jsesc": {
+          "version": "2.5.2",
+          "bundled": true,
+          "dev": true
+        },
+        "json5": {
+          "version": "2.2.3",
+          "bundled": true,
+          "dev": true
+        },
+        "locate-path": {
+          "version": "5.0.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "p-locate": "^4.1.0"
+          }
+        },
+        "lodash": {
+          "version": "4.17.21",
+          "bundled": true,
+          "dev": true
+        },
+        "loose-envify": {
+          "version": "1.4.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "js-tokens": "^3.0.0 || ^4.0.0"
+          }
+        },
+        "make-dir": {
+          "version": "3.1.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "semver": "^6.0.0"
+          }
+        },
+        "mimic-fn": {
+          "version": "2.1.0",
+          "bundled": true,
+          "dev": true
+        },
+        "minimatch": {
+          "version": "3.1.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "brace-expansion": "^1.1.7"
+          }
+        },
+        "minipass": {
+          "version": "3.3.4",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "yallist": "^4.0.0"
+          }
+        },
+        "ms": {
+          "version": "2.1.2",
+          "bundled": true,
+          "dev": true
+        },
+        "node-releases": {
+          "version": "2.0.2",
+          "bundled": true,
+          "dev": true
+        },
+        "object-assign": {
+          "version": "4.1.1",
+          "bundled": true,
+          "dev": true
+        },
+        "once": {
+          "version": "1.4.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "wrappy": "1"
+          }
+        },
+        "onetime": {
+          "version": "5.1.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "mimic-fn": "^2.1.0"
+          }
+        },
+        "p-limit": {
+          "version": "2.3.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "p-try": "^2.0.0"
+          }
+        },
+        "p-locate": {
+          "version": "4.1.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "p-limit": "^2.2.0"
+          }
+        },
+        "p-try": {
+          "version": "2.2.0",
+          "bundled": true,
+          "dev": true
+        },
+        "patch-console": {
+          "version": "1.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "path-exists": {
+          "version": "4.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "path-is-absolute": {
+          "version": "1.0.1",
+          "bundled": true,
+          "dev": true
+        },
+        "picocolors": {
+          "version": "1.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "pkg-dir": {
+          "version": "4.2.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "find-up": "^4.0.0"
+          }
+        },
+        "punycode": {
+          "version": "2.1.1",
+          "bundled": true,
+          "dev": true
+        },
+        "react": {
+          "version": "17.0.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "loose-envify": "^1.1.0",
+            "object-assign": "^4.1.1"
+          }
+        },
+        "react-reconciler": {
+          "version": "0.26.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "loose-envify": "^1.1.0",
+            "object-assign": "^4.1.1",
+            "scheduler": "^0.20.2"
+          }
+        },
+        "redeyed": {
+          "version": "2.1.1",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "esprima": "~4.0.0"
+          }
+        },
+        "resolve-from": {
+          "version": "3.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "restore-cursor": {
+          "version": "3.1.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "onetime": "^5.1.0",
+            "signal-exit": "^3.0.2"
+          }
+        },
+        "rimraf": {
+          "version": "3.0.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "glob": "^7.1.3"
+          }
+        },
+        "safe-buffer": {
+          "version": "5.1.2",
+          "bundled": true,
+          "dev": true
+        },
+        "scheduler": {
+          "version": "0.20.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "loose-envify": "^1.1.0",
+            "object-assign": "^4.1.1"
+          }
+        },
+        "semver": {
+          "version": "6.3.0",
+          "bundled": true,
+          "dev": true
+        },
+        "shell-quote": {
+          "version": "1.7.3",
+          "bundled": true
+        },
+        "signal-exit": {
+          "version": "3.0.7",
+          "bundled": true,
+          "dev": true
+        },
+        "slice-ansi": {
+          "version": "3.0.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "ansi-styles": "^4.0.0",
+            "astral-regex": "^2.0.0",
+            "is-fullwidth-code-point": "^3.0.0"
+          },
+          "dependencies": {
+            "ansi-styles": {
+              "version": "4.3.0",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "color-convert": "^2.0.1"
+              }
+            },
+            "color-convert": {
+              "version": "2.0.1",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "color-name": "~1.1.4"
+              }
+            },
+            "color-name": {
+              "version": "1.1.4",
+              "bundled": true,
+              "dev": true
+            }
+          }
+        },
+        "source-map": {
+          "version": "0.5.7",
+          "bundled": true,
+          "dev": true
+        },
+        "stack-utils": {
+          "version": "2.0.5",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "escape-string-regexp": "^2.0.0"
+          },
+          "dependencies": {
+            "escape-string-regexp": {
+              "version": "2.0.0",
+              "bundled": true,
+              "dev": true
+            }
+          }
+        },
+        "string-width": {
+          "version": "4.2.3",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "emoji-regex": "^8.0.0",
+            "is-fullwidth-code-point": "^3.0.0",
+            "strip-ansi": "^6.0.1"
+          }
+        },
+        "strip-ansi": {
+          "version": "6.0.1",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "ansi-regex": "^5.0.1"
+          }
+        },
+        "supports-color": {
+          "version": "5.5.0",
+          "bundled": true,
+          "requires": {
+            "has-flag": "^3.0.0"
+          }
+        },
+        "tap-parser": {
+          "version": "11.0.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "events-to-array": "^1.0.1",
+            "minipass": "^3.1.6",
+            "tap-yaml": "^1.0.0"
+          }
+        },
+        "tap-yaml": {
+          "version": "1.0.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "yaml": "^1.10.2"
+          }
+        },
+        "to-fast-properties": {
+          "version": "2.0.0",
+          "bundled": true
+        },
+        "treport": {
+          "version": "3.0.4",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@isaacs/import-jsx": "^4.0.1",
+            "cardinal": "^2.1.1",
+            "chalk": "^3.0.0",
+            "ink": "^3.2.0",
+            "ms": "^2.1.2",
+            "tap-parser": "^11.0.0",
+            "tap-yaml": "^1.0.0",
+            "unicode-length": "^2.0.2"
+          },
+          "dependencies": {
+            "ansi-styles": {
+              "version": "4.3.0",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "color-convert": "^2.0.1"
+              }
+            },
+            "chalk": {
+              "version": "3.0.0",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "ansi-styles": "^4.1.0",
+                "supports-color": "^7.1.0"
+              }
+            },
+            "color-convert": {
+              "version": "2.0.1",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "color-name": "~1.1.4"
+              }
+            },
+            "color-name": {
+              "version": "1.1.4",
+              "bundled": true,
+              "dev": true
+            },
+            "has-flag": {
+              "version": "4.0.0",
+              "bundled": true,
+              "dev": true
+            },
+            "supports-color": {
+              "version": "7.2.0",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "has-flag": "^4.0.0"
+              }
+            }
+          }
+        },
+        "type-fest": {
+          "version": "0.12.0",
+          "bundled": true,
+          "dev": true
+        },
+        "unicode-length": {
+          "version": "2.0.2",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "punycode": "^2.0.0",
+            "strip-ansi": "^3.0.1"
+          },
+          "dependencies": {
+            "ansi-regex": {
+              "version": "2.1.1",
+              "bundled": true,
+              "dev": true
+            },
+            "strip-ansi": {
+              "version": "3.0.1",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "ansi-regex": "^2.0.0"
+              }
+            }
+          }
+        },
+        "widest-line": {
+          "version": "3.1.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "string-width": "^4.0.0"
+          }
+        },
+        "wrap-ansi": {
+          "version": "6.2.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "ansi-styles": "^4.0.0",
+            "string-width": "^4.1.0",
+            "strip-ansi": "^6.0.0"
+          },
+          "dependencies": {
+            "ansi-styles": {
+              "version": "4.3.0",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "color-convert": "^2.0.1"
+              }
+            },
+            "color-convert": {
+              "version": "2.0.1",
+              "bundled": true,
+              "dev": true,
+              "requires": {
+                "color-name": "~1.1.4"
+              }
+            },
+            "color-name": {
+              "version": "1.1.4",
+              "bundled": true,
+              "dev": true
+            }
+          }
+        },
+        "wrappy": {
+          "version": "1.0.2",
+          "bundled": true,
+          "dev": true
+        },
+        "yallist": {
+          "version": "4.0.0",
+          "bundled": true,
+          "dev": true
+        },
+        "yaml": {
+          "version": "1.10.2",
+          "bundled": true,
+          "dev": true
+        },
+        "yoga-layout-prebuilt": {
+          "version": "1.10.0",
+          "bundled": true,
+          "dev": true,
+          "requires": {
+            "@types/yoga-layout": "1.9.2"
+          }
+        }
+      }
+    },
+    "tap-mocha-reporter": {
+      "version": "5.0.3",
+      "resolved": "https://registry.npmjs.org/tap-mocha-reporter/-/tap-mocha-reporter-5.0.3.tgz",
+      "integrity": "sha512-6zlGkaV4J+XMRFkN0X+yuw6xHbE9jyCZ3WUKfw4KxMyRGOpYSRuuQTRJyWX88WWuLdVTuFbxzwXhXuS2XE6o0g==",
+      "dev": true,
+      "requires": {
+        "color-support": "^1.1.0",
+        "debug": "^4.1.1",
+        "diff": "^4.0.1",
+        "escape-string-regexp": "^2.0.0",
+        "glob": "^7.0.5",
+        "tap-parser": "^11.0.0",
+        "tap-yaml": "^1.0.0",
+        "unicode-length": "^2.0.2"
+      },
+      "dependencies": {
+        "escape-string-regexp": {
+          "version": "2.0.0",
+          "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
+          "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
+          "dev": true
+        }
+      }
+    },
+    "tap-parser": {
+      "version": "11.0.2",
+      "resolved": "https://registry.npmjs.org/tap-parser/-/tap-parser-11.0.2.tgz",
+      "integrity": "sha512-6qGlC956rcORw+fg7Fv1iCRAY8/bU9UabUAhs3mXRH6eRmVZcNPLheSXCYaVaYeSwx5xa/1HXZb1537YSvwDZg==",
+      "dev": true,
+      "requires": {
+        "events-to-array": "^1.0.1",
+        "minipass": "^3.1.6",
+        "tap-yaml": "^1.0.0"
+      }
+    },
+    "tap-yaml": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/tap-yaml/-/tap-yaml-1.0.2.tgz",
+      "integrity": "sha512-GegASpuqBnRNdT1U+yuUPZ8rEU64pL35WPBpCISWwff4dErS2/438barz7WFJl4Nzh3Y05tfPidZnH+GaV1wMg==",
+      "dev": true,
+      "requires": {
+        "yaml": "^1.10.2"
+      }
+    },
+    "tcompare": {
+      "version": "5.0.7",
+      "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-5.0.7.tgz",
+      "integrity": "sha512-d9iddt6YYGgyxJw5bjsN7UJUO1kGOtjSlNy/4PoGYAjQS5pAT/hzIoLf1bZCw+uUxRmZJh7Yy1aA7xKVRT9B4w==",
+      "dev": true,
+      "requires": {
+        "diff": "^4.0.2"
+      }
+    },
+    "test-exclude": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
+      "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==",
+      "dev": true,
+      "requires": {
+        "@istanbuljs/schema": "^0.1.2",
+        "glob": "^7.1.4",
+        "minimatch": "^3.0.4"
+      }
+    },
+    "text-extensions": {
+      "version": "1.9.0",
+      "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-1.9.0.tgz",
+      "integrity": "sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==",
+      "dev": true
+    },
+    "text-table": {
+      "version": "0.2.0",
+      "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
+      "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
+      "dev": true
+    },
+    "through": {
+      "version": "2.3.8",
+      "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
+      "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==",
+      "dev": true
+    },
+    "through2": {
+      "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz",
+      "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==",
+      "dev": true,
+      "requires": {
+        "readable-stream": "3"
+      }
+    },
+    "to-fast-properties": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz",
+      "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==",
+      "dev": true
+    },
+    "to-regex-range": {
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+      "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+      "dev": true,
+      "requires": {
+        "is-number": "^7.0.0"
+      }
+    },
+    "trim-newlines": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz",
+      "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==",
+      "dev": true
+    },
+    "trivial-deferred": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/trivial-deferred/-/trivial-deferred-1.0.1.tgz",
+      "integrity": "sha512-dagAKX7vaesNNAwOc9Np9C2mJ+7YopF4lk+jE2JML9ta4kZ91Y6UruJNH65bLRYoUROD8EY+Pmi44qQWwXR7sw==",
+      "dev": true
+    },
+    "ts-node": {
+      "version": "10.9.1",
+      "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz",
+      "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==",
+      "dev": true,
+      "requires": {
+        "@cspotcode/source-map-support": "^0.8.0",
+        "@tsconfig/node10": "^1.0.7",
+        "@tsconfig/node12": "^1.0.7",
+        "@tsconfig/node14": "^1.0.0",
+        "@tsconfig/node16": "^1.0.2",
+        "acorn": "^8.4.1",
+        "acorn-walk": "^8.1.1",
+        "arg": "^4.1.0",
+        "create-require": "^1.1.0",
+        "diff": "^4.0.1",
+        "make-error": "^1.1.1",
+        "v8-compile-cache-lib": "^3.0.1",
+        "yn": "3.1.1"
+      }
+    },
+    "tsconfig-paths": {
+      "version": "3.14.1",
+      "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz",
+      "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==",
+      "dev": true,
+      "requires": {
+        "@types/json5": "^0.0.29",
+        "json5": "^1.0.1",
+        "minimist": "^1.2.6",
+        "strip-bom": "^3.0.0"
+      }
+    },
+    "type-check": {
+      "version": "0.4.0",
+      "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
+      "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
+      "dev": true,
+      "requires": {
+        "prelude-ls": "^1.2.1"
+      }
+    },
+    "type-fest": {
+      "version": "0.18.1",
+      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz",
+      "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==",
+      "dev": true
+    },
+    "typed-array-length": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz",
+      "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "for-each": "^0.3.3",
+        "is-typed-array": "^1.1.9"
+      }
+    },
+    "typedarray-to-buffer": {
+      "version": "3.1.5",
+      "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz",
+      "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==",
+      "dev": true,
+      "requires": {
+        "is-typedarray": "^1.0.0"
+      }
+    },
+    "typescript": {
+      "version": "4.9.5",
+      "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz",
+      "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==",
+      "dev": true
+    },
+    "unbox-primitive": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz",
+      "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==",
+      "dev": true,
+      "requires": {
+        "call-bind": "^1.0.2",
+        "has-bigints": "^1.0.2",
+        "has-symbols": "^1.0.3",
+        "which-boxed-primitive": "^1.0.2"
+      }
+    },
+    "unicode-length": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/unicode-length/-/unicode-length-2.1.0.tgz",
+      "integrity": "sha512-4bV582zTV9Q02RXBxSUMiuN/KHo5w4aTojuKTNT96DIKps/SIawFp7cS5Mu25VuY1AioGXrmYyzKZUzh8OqoUw==",
+      "dev": true,
+      "requires": {
+        "punycode": "^2.0.0"
+      }
+    },
+    "universalify": {
+      "version": "0.1.2",
+      "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
+      "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
+    },
+    "update-browserslist-db": {
+      "version": "1.0.10",
+      "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz",
+      "integrity": "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==",
+      "dev": true,
+      "requires": {
+        "escalade": "^3.1.1",
+        "picocolors": "^1.0.0"
+      }
+    },
+    "uri-js": {
+      "version": "4.4.1",
+      "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
+      "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
+      "dev": true,
+      "requires": {
+        "punycode": "^2.1.0"
+      }
+    },
+    "util-deprecate": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
+      "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
+      "dev": true
+    },
+    "uuid": {
+      "version": "8.3.2",
+      "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+      "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
+      "dev": true
+    },
+    "v8-compile-cache-lib": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
+      "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
+      "dev": true
+    },
+    "validate-npm-package-license": {
+      "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
+      "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==",
+      "dev": true,
+      "requires": {
+        "spdx-correct": "^3.0.0",
+        "spdx-expression-parse": "^3.0.0"
+      }
+    },
+    "which": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+      "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+      "dev": true,
+      "requires": {
+        "isexe": "^2.0.0"
+      }
+    },
+    "which-boxed-primitive": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz",
+      "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==",
+      "dev": true,
+      "requires": {
+        "is-bigint": "^1.0.1",
+        "is-boolean-object": "^1.1.0",
+        "is-number-object": "^1.0.4",
+        "is-string": "^1.0.5",
+        "is-symbol": "^1.0.3"
+      }
+    },
+    "which-module": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
+      "integrity": "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==",
+      "dev": true
+    },
+    "which-typed-array": {
+      "version": "1.1.9",
+      "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz",
+      "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==",
+      "dev": true,
+      "requires": {
+        "available-typed-arrays": "^1.0.5",
+        "call-bind": "^1.0.2",
+        "for-each": "^0.3.3",
+        "gopd": "^1.0.1",
+        "has-tostringtag": "^1.0.0",
+        "is-typed-array": "^1.1.10"
+      }
+    },
+    "word-wrap": {
+      "version": "1.2.4",
+      "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz",
+      "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==",
+      "dev": true
+    },
+    "wrap-ansi": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+      "dev": true,
+      "requires": {
+        "ansi-styles": "^4.0.0",
+        "string-width": "^4.1.0",
+        "strip-ansi": "^6.0.0"
+      }
+    },
+    "wrappy": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+      "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
+      "dev": true
+    },
+    "write-file-atomic": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz",
+      "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==",
+      "dev": true,
+      "requires": {
+        "imurmurhash": "^0.1.4",
+        "is-typedarray": "^1.0.0",
+        "signal-exit": "^3.0.2",
+        "typedarray-to-buffer": "^3.1.5"
+      }
+    },
+    "ws": {
+      "version": "7.5.10",
+      "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz",
+      "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==",
+      "dev": true
+    },
+    "y18n": {
+      "version": "5.0.8",
+      "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+      "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
+      "dev": true
+    },
+    "yallist": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+      "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
+      "dev": true
+    },
+    "yaml": {
+      "version": "1.10.2",
+      "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
+      "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
+      "dev": true
+    },
+    "yargs": {
+      "version": "17.7.0",
+      "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.0.tgz",
+      "integrity": "sha512-dwqOPg5trmrre9+v8SUo2q/hAwyKoVfu8OC1xPHKJGNdxAvPl4sKxL4vBnh3bQz/ZvvGAFeA5H3ou2kcOY8sQQ==",
+      "dev": true,
+      "requires": {
+        "cliui": "^8.0.1",
+        "escalade": "^3.1.1",
+        "get-caller-file": "^2.0.5",
+        "require-directory": "^2.1.1",
+        "string-width": "^4.2.3",
+        "y18n": "^5.0.5",
+        "yargs-parser": "^21.1.1"
+      },
+      "dependencies": {
+        "yargs-parser": {
+          "version": "21.1.1",
+          "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
+          "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
+          "dev": true
+        }
+      }
+    },
+    "yargs-parser": {
+      "version": "20.2.9",
+      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
+      "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
+      "dev": true
+    },
+    "yn": {
+      "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
+      "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
+      "dev": true
+    },
+    "yocto-queue": {
+      "version": "0.1.0",
+      "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+      "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
+      "dev": true
+    }
+  }
+}
diff --git a/package.json b/package.json
index be0e054b..a39a61ab 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,14 @@
 {
   "name": "log4js",
-  "version": "1.1.0",
+  "version": "6.9.1",
   "description": "Port of Log4js to work with node.",
+  "homepage": "https://log4js-node.github.io/log4js-node/",
+  "files": [
+    "lib",
+    "types/*.d.ts",
+    "CHANGELOG.md",
+    "SECURITY.md"
+  ],
   "keywords": [
     "logging",
     "log",
@@ -10,81 +17,86 @@
   ],
   "license": "Apache-2.0",
   "main": "./lib/log4js",
-  "author": "Gareth Jones ",
+  "types": "./types/log4js.d.ts",
+  "contributors": [
+    "Gareth Jones ",
+    "Lam Wei Li "
+  ],
   "repository": {
     "type": "git",
-    "url": "https://github.com/nomiddlename/log4js-node.git"
+    "url": "https://github.com/log4js-node/log4js-node.git"
   },
   "bugs": {
-    "url": "http://github.com/nomiddlename/log4js-node/issues"
+    "url": "http://github.com/log4js-node/log4js-node/issues"
   },
   "engines": {
-    "node": ">=4.0"
+    "node": ">=8.0"
   },
   "scripts": {
-    "clean": "find test -type f ! -name '*.json' ! -name '*.js' ! -name '.eslintrc' -delete && rm *.log",
-    "lint": "eslint lib/ test/",
-    "prepush": "npm test",
-    "commitmsg": "validate-commit-msg",
-    "posttest": "npm run clean",
-    "pretest": "eslint lib/**/*",
-    "test": "tap 'test/tap/**/*.js'",
-    "coverage": "tap 'test/tap/**/*.js' --cov",
-    "codecov": "tap 'test/tap/**/*.js' --cov --coverage-report=lcov && codecov"
+    "prepare": "is-ci || husky install",
+    "pretest": "npm run prettier --if-present && eslint \"lib/**/*.js\" \"test/**/*.js\"",
+    "prettier": "prettier --check \"**/*.*\"",
+    "prettier:fix": "prettier --write \"**/*.*\"",
+    "test": "tap \"test/tap/**/*.js\" --cov --reporter=classic --coverage-report=text --coverage-report=html --timeout=45",
+    "typings": "tsc -p types/tsconfig.json"
   },
   "directories": {
     "test": "test",
     "lib": "lib"
   },
   "dependencies": {
-    "date-format": "^1.0.0",
-    "debug": "^2.2.0",
-    "semver": "^5.3.0",
-    "streamroller": "^0.3.0"
+    "date-format": "^4.0.14",
+    "debug": "^4.3.4",
+    "flatted": "^3.2.7",
+    "rfdc": "^1.3.0",
+    "streamroller": "^3.1.5"
   },
   "devDependencies": {
-    "codecov": "^1.0.1",
-    "conventional-changelog": "^1.1.0",
-    "eslint": "^3.12.0",
-    "eslint-config-airbnb-base": "^11.0.0",
-    "eslint-plugin-import": "^2.0.0",
-    "husky": "^0.12.0",
-    "nyc": "^10.0.0",
-    "sandboxed-module": "^2.0.3",
-    "tap": "^8.0.1",
-    "validate-commit-msg": "^2.6.1"
-  },
-  "optionalDependencies": {
-    "hipchat-notifier": "^1.1.0",
-    "loggly": "^1.1.0",
-    "mailgun-js": "^0.7.0",
-    "nodemailer": "^2.5.0",
-    "slack-node": "~0.2.0",
-    "axios": "^0.15.3"
+    "@commitlint/cli": "^17.4.4",
+    "@commitlint/config-conventional": "^17.4.4",
+    "@log4js-node/sandboxed-module": "^2.2.1",
+    "callsites": "^3.1.0",
+    "deep-freeze": "0.0.1",
+    "eslint": "^8.34.0",
+    "eslint-config-airbnb-base": "^15.0.0",
+    "eslint-config-prettier": "^8.6.0",
+    "eslint-import-resolver-node": "^0.3.7",
+    "eslint-plugin-import": "^2.27.5",
+    "eslint-plugin-prettier": "^4.2.1",
+    "fs-extra": "^11.1.0",
+    "husky": "^8.0.3",
+    "is-ci": "^3.0.1",
+    "nyc": "^15.1.0",
+    "prettier": "^2.8.4",
+    "proxyquire": "^2.1.3",
+    "tap": "^16.3.4",
+    "typescript": "^4.9.5"
   },
   "browser": {
-    "os": false
+    "os": false,
+    "streamroller": false,
+    "./lib/clustering.js": "./lib/clusteringBrowser.js",
+    "./lib/appenders/dateFile.js": "./lib/appenders/ignoreBrowser.js",
+    "./lib/appenders/file.js": "./lib/appenders/ignoreBrowser.js",
+    "./lib/appenders/fileSync.js": "./lib/appenders/ignoreBrowser.js",
+    "./lib/appenders/multiFile.js": "./lib/appenders/ignoreBrowser.js"
+  },
+  "prettier": {
+    "trailingComma": "es5",
+    "arrowParens": "always",
+    "overrides": [
+      {
+        "files": [
+          "*.cjs"
+        ],
+        "options": {
+          "parser": "typescript"
+        }
+      }
+    ]
   },
-  "config": {
-    "validate-commit-msg": {
-      "types": [
-        "feat",
-        "fix",
-        "docs",
-        "style",
-        "refactor",
-        "example",
-        "perf",
-        "test",
-        "chore",
-        "revert"
-      ],
-      "warnOnFail": false,
-      "maxSubjectLength": 72,
-      "subjectPattern": ".+",
-      "subjectPatternErrorMsg": "subject does not match subject pattern!",
-      "helpMessage": "\n# allowed type: feat, fix, docs, style, refactor, example, perf, test, chore, revert\n# subject no more than 50 chars\n# a body line no more than 72 chars"
-    }
+  "tap": {
+    "check-coverage": true
   },
   "nyc": {
     "all": true,
diff --git a/test/.eslintrc b/test/.eslintrc
index 985a565e..72248364 100644
--- a/test/.eslintrc
+++ b/test/.eslintrc
@@ -1,11 +1,11 @@
 {
   "extends": "../.eslintrc",
-	"rules": {
-		"no-plusplus": 0,
+  "rules": {
+    "no-plusplus": 0,
     "global-require": 0,
     "no-mixed-operators": 0,
     "no-underscore-dangle": 0,
     "guard-for-in": 0,
     "no-restricted-syntax": ["error", "WithStatement"]
-	}
+  }
 }
diff --git a/test/multiprocess-worker.js b/test/multiprocess-worker.js
new file mode 100644
index 00000000..f2e2dbf6
--- /dev/null
+++ b/test/multiprocess-worker.js
@@ -0,0 +1,14 @@
+if (process.argv.indexOf('start-multiprocess-worker') >= 0) {
+  const log4js = require('../lib/log4js');
+  const port = parseInt(process.argv[process.argv.length - 1], 10);
+  log4js.configure({
+    appenders: {
+      multi: { type: 'multiprocess', mode: 'worker', loggerPort: port },
+    },
+    categories: { default: { appenders: ['multi'], level: 'debug' } },
+  });
+  log4js.getLogger('worker').info('Logging from worker');
+  log4js.shutdown(() => {
+    process.send('worker is done');
+  });
+}
diff --git a/test/sandbox-coverage.js b/test/sandbox-coverage.js
index 8be97c8e..c6fb7595 100644
--- a/test/sandbox-coverage.js
+++ b/test/sandbox-coverage.js
@@ -1,15 +1,46 @@
-'use strict';
-
-const sandbox = require('sandboxed-module');
+const sandbox = require('@log4js-node/sandboxed-module');
 
 sandbox.configure({
   sourceTransformers: {
-    nyc: function (source) {
+    nyc(source) {
       if (this.filename.indexOf('node_modules') > -1) {
         return source;
       }
-      const nyc = new (require('nyc'))();
-      return nyc.instrumenter().instrumentSync(source, this.filename);
-    }
-  }
+      const nyc = new (require('nyc'))({});
+      return nyc
+        .instrumenter()
+        .instrumentSync(source, this.filename, { registerMap: () => {} });
+    },
+  },
 });
+
+// polyfill for Node.js <12
+Promise.allSettled =
+  Promise.allSettled ||
+  ((promises) =>
+    Promise.all(
+      promises.map((p) =>
+        p
+          .then((value) => ({
+            status: 'fulfilled',
+            value,
+          }))
+          .catch((reason) => ({
+            status: 'rejected',
+            reason,
+          }))
+      )
+    ));
+
+// polyfill for Node.js <10
+process.off = process.off || process.removeListener;
+
+// polyfill for Node.js <10
+const fs = require('fs'); // eslint-disable-line import/newline-after-import
+fs.promises = fs.promises || {};
+fs.promises.unlink =
+  fs.promises.unlink ||
+  ((...args) =>
+    new Promise((resolve, reject) => {
+      fs.unlink(...args, (err) => (err ? reject(err) : resolve()));
+    }));
diff --git a/test/tap/LoggingEvent-test.js b/test/tap/LoggingEvent-test.js
new file mode 100644
index 00000000..4c227cae
--- /dev/null
+++ b/test/tap/LoggingEvent-test.js
@@ -0,0 +1,252 @@
+const flatted = require('flatted');
+const { test } = require('tap');
+const LoggingEvent = require('../../lib/LoggingEvent');
+const levels = require('../../lib/levels');
+
+test('LoggingEvent', (batch) => {
+  batch.test('should throw error for invalid location', (t) => {
+    t.throws(
+      () =>
+        new LoggingEvent(
+          'cheese',
+          levels.DEBUG,
+          ['log message'],
+          undefined,
+          []
+        ),
+      'Invalid location type passed to LoggingEvent constructor'
+    );
+    t.end();
+  });
+
+  batch.test('should serialise to flatted', (t) => {
+    const nullPrototype = Object.create(null);
+    nullPrototype.hello = 'world';
+    const event = new LoggingEvent(
+      'cheese',
+      levels.DEBUG,
+      [
+        'log message',
+        Number('abc'),
+        'NaN',
+        1 / 0,
+        'Infinity',
+        -1 / 0,
+        '-Infinity',
+        undefined,
+        'undefined',
+        nullPrototype,
+      ],
+      {
+        user: 'bob',
+      }
+    );
+    // set the event date to a known value
+    event.startTime = new Date(Date.UTC(2018, 1, 4, 18, 30, 23, 10));
+    const rehydratedEvent = flatted.parse(event.serialise());
+    t.equal(rehydratedEvent.startTime, '2018-02-04T18:30:23.010Z');
+    t.equal(rehydratedEvent.categoryName, 'cheese');
+    t.equal(rehydratedEvent.level.levelStr, 'DEBUG');
+    t.equal(rehydratedEvent.data.length, 10);
+    t.equal(rehydratedEvent.data[0], 'log message');
+    t.equal(rehydratedEvent.data[1], '__LOG4JS_NaN__');
+    t.equal(rehydratedEvent.data[2], 'NaN');
+    t.equal(rehydratedEvent.data[3], '__LOG4JS_Infinity__');
+    t.equal(rehydratedEvent.data[4], 'Infinity');
+    t.equal(rehydratedEvent.data[5], '__LOG4JS_-Infinity__');
+    t.equal(rehydratedEvent.data[6], '-Infinity');
+    t.equal(rehydratedEvent.data[7], '__LOG4JS_undefined__');
+    t.equal(rehydratedEvent.data[8], 'undefined');
+    t.equal(
+      Object.entries(rehydratedEvent.data[9]).length,
+      Object.entries(nullPrototype).length
+    );
+    t.equal(rehydratedEvent.data[9].hello, 'world');
+    t.equal(rehydratedEvent.context.user, 'bob');
+    t.end();
+  });
+
+  batch.test('should deserialise from flatted', (t) => {
+    const dehydratedEvent = flatted.stringify({
+      startTime: '2018-02-04T10:25:23.010Z',
+      categoryName: 'biscuits',
+      level: {
+        levelStr: 'INFO',
+      },
+      data: [
+        'some log message',
+        { x: 1 },
+        '__LOG4JS_NaN__',
+        'NaN',
+        '__LOG4JS_Infinity__',
+        'Infinity',
+        '__LOG4JS_-Infinity__',
+        '-Infinity',
+        '__LOG4JS_undefined__',
+        'undefined',
+      ],
+      context: { thing: 'otherThing' },
+      pid: '1234',
+      functionName: 'bound',
+      fileName: 'domain.js',
+      lineNumber: 421,
+      columnNumber: 15,
+      callStack: 'at bound (domain.js:421:15)\n',
+    });
+    const event = LoggingEvent.deserialise(dehydratedEvent);
+    t.type(event, LoggingEvent);
+    t.same(event.startTime, new Date(Date.UTC(2018, 1, 4, 10, 25, 23, 10)));
+    t.equal(event.categoryName, 'biscuits');
+    t.same(event.level, levels.INFO);
+    t.equal(event.data.length, 10);
+    t.equal(event.data[0], 'some log message');
+    t.equal(event.data[1].x, 1);
+    t.ok(Number.isNaN(event.data[2]));
+    t.equal(event.data[3], 'NaN');
+    t.equal(event.data[4], 1 / 0);
+    t.equal(event.data[5], 'Infinity');
+    t.equal(event.data[6], -1 / 0);
+    t.equal(event.data[7], '-Infinity');
+    t.equal(event.data[8], undefined);
+    t.equal(event.data[9], 'undefined');
+    t.equal(event.context.thing, 'otherThing');
+    t.equal(event.pid, '1234');
+    t.equal(event.functionName, 'bound');
+    t.equal(event.fileName, 'domain.js');
+    t.equal(event.lineNumber, 421);
+    t.equal(event.columnNumber, 15);
+    t.equal(event.callStack, 'at bound (domain.js:421:15)\n');
+    t.end();
+  });
+
+  batch.test('Should correct construct with/without location info', (t) => {
+    // console.log([Error('123').stack.split('\n').slice(1).join('\n')])
+    const callStack =
+      '    at repl:1:14\n    at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n    at REPLServer.defaultEval (repl.js:240:29)\n    at bound (domain.js:301:14)\n    at REPLServer.runBound [as eval] (domain.js:314:12)\n    at REPLServer.onLine (repl.js:468:10)\n    at emitOne (events.js:121:20)\n    at REPLServer.emit (events.js:211:7)\n    at REPLServer.Interface._onLine (readline.js:280:10)\n    at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
+    const fileName = '/log4js-node/test/tap/layouts-test.js';
+    const lineNumber = 1;
+    const columnNumber = 14;
+    const className = '';
+    const functionName = '';
+    const functionAlias = '';
+    const callerName = '';
+    const location = {
+      fileName,
+      lineNumber,
+      columnNumber,
+      callStack,
+      className,
+      functionName,
+      functionAlias,
+      callerName,
+    };
+    const event = new LoggingEvent(
+      'cheese',
+      levels.DEBUG,
+      ['log message'],
+      { user: 'bob' },
+      location
+    );
+    t.equal(event.fileName, fileName);
+    t.equal(event.lineNumber, lineNumber);
+    t.equal(event.columnNumber, columnNumber);
+    t.equal(event.callStack, callStack);
+    t.equal(event.className, className);
+    t.equal(event.functionName, functionName);
+    t.equal(event.functionAlias, functionAlias);
+    t.equal(event.callerName, callerName);
+
+    const event2 = new LoggingEvent('cheese', levels.DEBUG, ['log message'], {
+      user: 'bob',
+    });
+    t.equal(event2.fileName, undefined);
+    t.equal(event2.lineNumber, undefined);
+    t.equal(event2.columnNumber, undefined);
+    t.equal(event2.callStack, undefined);
+    t.equal(event2.className, undefined);
+    t.equal(event2.functionName, undefined);
+    t.equal(event2.functionAlias, undefined);
+    t.equal(event2.callerName, undefined);
+    t.end();
+  });
+
+  batch.test('Should contain class, method and alias names', (t) => {
+    // console.log([Error('123').stack.split('\n').slice(1).join('\n')])
+    const callStack =
+      '    at Foo.bar [as baz] (repl:1:14)\n    at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n    at REPLServer.defaultEval (repl.js:240:29)\n    at bound (domain.js:301:14)\n    at REPLServer.runBound [as eval] (domain.js:314:12)\n    at REPLServer.onLine (repl.js:468:10)\n    at emitOne (events.js:121:20)\n    at REPLServer.emit (events.js:211:7)\n    at REPLServer.Interface._onLine (readline.js:280:10)\n    at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
+    const fileName = '/log4js-node/test/tap/layouts-test.js';
+    const lineNumber = 1;
+    const columnNumber = 14;
+    const className = 'Foo';
+    const functionName = 'bar';
+    const functionAlias = 'baz';
+    const callerName = 'Foo.bar [as baz]';
+    const location = {
+      fileName,
+      lineNumber,
+      columnNumber,
+      callStack,
+      className,
+      functionName,
+      functionAlias,
+      callerName,
+    };
+    const event = new LoggingEvent(
+      'cheese',
+      levels.DEBUG,
+      ['log message'],
+      { user: 'bob' },
+      location
+    );
+    t.equal(event.fileName, fileName);
+    t.equal(event.lineNumber, lineNumber);
+    t.equal(event.columnNumber, columnNumber);
+    t.equal(event.callStack, callStack);
+    t.equal(event.className, className);
+    t.equal(event.functionName, functionName);
+    t.equal(event.functionAlias, functionAlias);
+    t.equal(event.callerName, callerName);
+    t.end();
+  });
+
+  batch.test('Should correctly serialize and deserialize', (t) => {
+    const error = new Error('test');
+    const location = {
+      fileName: __filename,
+      lineNumber: 123,
+      columnNumber: 52,
+      callStack: error.stack,
+      className: 'Foo',
+      functionName: 'test',
+      functionAlias: 'baz',
+      callerName: 'Foo.test [as baz]',
+    };
+    const event = new LoggingEvent(
+      'cheese',
+      levels.DEBUG,
+      [
+        error,
+        'log message',
+        Number('abc'),
+        'NaN',
+        1 / 0,
+        'Infinity',
+        -1 / 0,
+        '-Infinity',
+        undefined,
+        'undefined',
+      ],
+      {
+        user: 'bob',
+      },
+      location,
+      error
+    );
+    const event2 = LoggingEvent.deserialise(event.serialise());
+    t.match(event2, event);
+
+    t.end();
+  });
+
+  batch.end();
+});
diff --git a/test/tap/appender-dependencies-test.js b/test/tap/appender-dependencies-test.js
new file mode 100644
index 00000000..4c680e78
--- /dev/null
+++ b/test/tap/appender-dependencies-test.js
@@ -0,0 +1,115 @@
+const { test } = require('tap');
+
+const categories = {
+  default: { appenders: ['filtered'], level: 'debug' },
+};
+
+let log4js;
+let recording;
+
+test('log4js appender dependencies', (batch) => {
+  batch.beforeEach((done) => {
+    log4js = require('../../lib/log4js');
+    recording = require('../../lib/appenders/recording');
+    if (typeof done === 'function') {
+      done();
+    }
+  });
+  batch.afterEach((done) => {
+    recording.erase();
+    if (typeof done === 'function') {
+      done();
+    }
+  });
+  batch.test('in order', (t) => {
+    const config = {
+      categories,
+      appenders: {
+        recorder: { type: 'recording' },
+        filtered: {
+          type: 'logLevelFilter',
+          appender: 'recorder',
+          level: 'ERROR',
+        },
+      },
+    };
+    t.test('should resolve if defined in dependency order', (assert) => {
+      assert.doesNotThrow(() => {
+        log4js.configure(config);
+      }, 'this should not trigger an error');
+      assert.end();
+    });
+    const logger = log4js.getLogger('logLevelTest');
+    logger.debug('this should not trigger an event');
+    logger.error('this should, though');
+
+    const logEvents = recording.replay();
+    t.test('should process log events normally', (assert) => {
+      assert.equal(logEvents.length, 1);
+      assert.equal(logEvents[0].data[0], 'this should, though');
+      assert.end();
+    });
+    t.end();
+  });
+
+  batch.test('not in order', (t) => {
+    const config = {
+      categories,
+      appenders: {
+        filtered: {
+          type: 'logLevelFilter',
+          appender: 'recorder',
+          level: 'ERROR',
+        },
+        recorder: { type: 'recording' },
+      },
+    };
+    t.test('should resolve if defined out of dependency order', (assert) => {
+      assert.doesNotThrow(() => {
+        log4js.configure(config);
+      }, 'this should not trigger an error');
+      assert.end();
+    });
+    const logger = log4js.getLogger('logLevelTest');
+    logger.debug('this should not trigger an event');
+    logger.error('this should, though');
+
+    const logEvents = recording.replay();
+    t.test('should process log events normally', (assert) => {
+      assert.equal(logEvents.length, 1);
+      assert.equal(logEvents[0].data[0], 'this should, though');
+      assert.end();
+    });
+    t.end();
+  });
+
+  batch.test('with dependency loop', (t) => {
+    const config = {
+      categories,
+      appenders: {
+        filtered: {
+          type: 'logLevelFilter',
+          appender: 'filtered2',
+          level: 'ERROR',
+        },
+        filtered2: {
+          type: 'logLevelFilter',
+          appender: 'filtered',
+          level: 'ERROR',
+        },
+        recorder: { type: 'recording' },
+      },
+    };
+    t.test(
+      'should throw an error if if a dependency loop is found',
+      (assert) => {
+        assert.throws(() => {
+          log4js.configure(config);
+        }, 'Dependency loop detected for appender filtered.');
+        assert.end();
+      }
+    );
+    t.end();
+  });
+  batch.end();
+});
diff --git a/test/tap/categoryFilter-test.js b/test/tap/categoryFilter-test.js
index 4cd10439..40c28409 100644
--- a/test/tap/categoryFilter-test.js
+++ b/test/tap/categoryFilter-test.js
@@ -1,78 +1,107 @@
-'use strict';
-
-const test = require('tap').test;
-const fs = require('fs');
-const EOL = require('os').EOL || '\n';
+const { test } = require('tap');
 const log4js = require('../../lib/log4js');
-
-function remove(filename) {
-  try {
-    fs.unlinkSync(filename);
-  } catch (e) {
-    // doesn't really matter if it failed
-  }
-}
-
-function cleanup(done) {
-  remove(`${__dirname}/categoryFilter-web.log`);
-  remove(`${__dirname}/categoryFilter-noweb.log`);
-  done();
-}
+const recording = require('../../lib/appenders/recording');
 
 test('log4js categoryFilter', (batch) => {
-  batch.beforeEach(cleanup);
+  batch.beforeEach((done) => {
+    recording.reset();
+    if (typeof done === 'function') {
+      done();
+    }
+  });
 
   batch.test('appender should exclude categories', (t) => {
-    const logEvents = [];
-    const appender = require(
-      '../../lib/appenders/categoryFilter'
-    ).appender(
-      ['app'],
-      (evt) => {
-        logEvents.push(evt);
-      }
-    );
-    log4js.clearAppenders();
-    log4js.addAppender(appender, ['app', 'web']);
+    log4js.configure({
+      appenders: {
+        recorder: { type: 'recording' },
+        filtered: {
+          type: 'categoryFilter',
+          exclude: 'web',
+          appender: 'recorder',
+        },
+      },
+      categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
+    });
 
     const webLogger = log4js.getLogger('web');
     const appLogger = log4js.getLogger('app');
 
-    webLogger.debug('This should get logged');
-    appLogger.debug('This should not');
+    webLogger.debug('This should not get logged');
+    appLogger.debug('This should get logged');
     webLogger.debug('Hello again');
-    log4js.getLogger('db').debug('This shouldn\'t be included by the appender anyway');
+    log4js
+      .getLogger('db')
+      .debug('This should be included by the appender anyway');
 
+    const logEvents = recording.replay();
     t.equal(logEvents.length, 2);
     t.equal(logEvents[0].data[0], 'This should get logged');
-    t.equal(logEvents[1].data[0], 'Hello again');
+    t.equal(
+      logEvents[1].data[0],
+      'This should be included by the appender anyway'
+    );
     t.end();
   });
 
-  batch.test('should work with configuration file', (t) => {
-    log4js.configure('test/tap/with-categoryFilter.json');
-    const logger = log4js.getLogger('app');
-    const weblogger = log4js.getLogger('web');
+  batch.test('appender should exclude categories', (t) => {
+    log4js.configure({
+      appenders: {
+        recorder: { type: 'recording' },
+        filtered: {
+          type: 'categoryFilter',
+          exclude: ['app', 'web'],
+          appender: 'recorder',
+        },
+      },
+      categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
+    });
 
-    logger.info('Loading app');
-    logger.info('Initialising indexes');
-    weblogger.info('00:00:00 GET / 200');
-    weblogger.warn('00:00:00 GET / 500');
+    const webLogger = log4js.getLogger('web');
+    const appLogger = log4js.getLogger('app');
 
-    setTimeout(() => {
-      fs.readFile(`${__dirname}/categoryFilter-noweb.log`, 'utf8', (err, contents) => {
-        const noWebMessages = contents.trim().split(EOL);
-        t.same(noWebMessages, ['Loading app', 'Initialising indexes']);
+    webLogger.debug('This should not get logged');
+    appLogger.debug('This should get logged');
+    webLogger.debug('Hello again');
+    log4js
+      .getLogger('db')
+      .debug('This should be included by the appender anyway');
 
-        fs.readFile(`${__dirname}/categoryFilter-web.log`, 'utf8', (e, c) => {
-          const messages = c.trim().split(EOL);
-          t.same(messages, ['00:00:00 GET / 200', '00:00:00 GET / 500']);
-          t.end();
-        });
-      });
-    }, 500);
+    const logEvents = recording.replay();
+    t.equal(logEvents.length, 1);
+    t.equal(
+      logEvents[0].data[0],
+      'This should be included by the appender anyway'
+    );
+    t.end();
+  });
+
+  batch.test('should not really need a category filter any more', (t) => {
+    log4js.configure({
+      appenders: { recorder: { type: 'recording' } },
+      categories: {
+        default: { appenders: ['recorder'], level: 'DEBUG' },
+        web: { appenders: ['recorder'], level: 'OFF' },
+      },
+    });
+    const appLogger = log4js.getLogger('app');
+    const webLogger = log4js.getLogger('web');
+
+    webLogger.debug('This should not get logged');
+    appLogger.debug('This should get logged');
+    webLogger.debug('Hello again');
+    log4js
+      .getLogger('db')
+      .debug('This should be included by the appender anyway');
+
+    const logEvents = recording.replay();
+    t.equal(logEvents.length, 2);
+    t.equal(logEvents[0].data[0], 'This should get logged');
+    t.equal(
+      logEvents[1].data[0],
+      'This should be included by the appender anyway'
+    );
+    t.end();
   });
 
-  batch.afterEach(cleanup);
   batch.end();
 });
diff --git a/test/tap/cluster-test.js b/test/tap/cluster-test.js
new file mode 100644
index 00000000..e43cd059
--- /dev/null
+++ b/test/tap/cluster-test.js
@@ -0,0 +1,100 @@
+const { test } = require('tap');
+const cluster = require('cluster');
+const log4js = require('../../lib/log4js');
+const recorder = require('../../lib/appenders/recording');
+
+log4js.configure({
+  appenders: {
+    vcr: { type: 'recording' },
+  },
+  categories: { default: { appenders: ['vcr'], level: 'debug' } },
+});
+
+if (cluster.isMaster) {
+  cluster.fork();
+
+  const masterLogger = log4js.getLogger('master');
+  const masterPid = process.pid;
+  masterLogger.info('this is master');
+
+  let workerLevel;
+  cluster.on('message', (worker, message) => {
+    if (worker.type || worker.topic) {
+      message = worker;
+    }
+    if (message.type && message.type === '::testing') {
+      workerLevel = message.level;
+    }
+  });
+
+  cluster.on('exit', (worker) => {
+    const workerPid = worker.process.pid;
+    const logEvents = recorder.replay();
+
+    test('cluster master', (batch) => {
+      batch.test('events should be logged', (t) => {
+        t.equal(logEvents.length, 3);
+
+        t.equal(logEvents[0].categoryName, 'master');
+        t.equal(logEvents[0].pid, masterPid);
+
+        t.equal(logEvents[1].categoryName, 'worker');
+        t.equal(logEvents[1].pid, workerPid);
+        // serialising errors with stacks intact
+        t.type(logEvents[1].data[1], 'Error');
+        t.match(logEvents[1].data[1].stack, 'Error: oh dear');
+        // serialising circular references in objects
+        t.type(logEvents[1].data[2], 'object');
+        t.type(logEvents[1].data[2].me, 'object');
+        // serialising errors with custom properties
+        t.type(logEvents[1].data[3], 'Error');
+        t.match(logEvents[1].data[3].stack, 'Error: wtf');
+        t.equal(logEvents[1].data[3].alert, 'chartreuse');
+        // serialising things that are not errors, but look a bit like them
+        t.type(logEvents[1].data[4], 'object');
+        t.equal(logEvents[1].data[4].stack, 'this is not a stack trace');
+
+        t.equal(logEvents[2].categoryName, 'log4js');
+        t.equal(logEvents[2].level.toString(), 'ERROR');
+        t.equal(logEvents[2].data[0], 'Unable to parse log:');
+
+        t.end();
+      });
+
+      batch.end();
+    });
+
+    test('cluster worker', (batch) => {
+      batch.test('logger should get correct config', (t) => {
+        t.equal(workerLevel, 'DEBUG');
+        t.end();
+      });
+      batch.end();
+    });
+  });
+} else {
+  const workerLogger = log4js.getLogger('worker');
+  // test for serialising circular references
+  const circle = {};
+  circle.me = circle;
+  // test for serialising errors with their own properties
+  const someError = new Error('wtf');
+  someError.alert = 'chartreuse';
+  // test for serialising things that look like errors but aren't.
+  const notAnError = { stack: 'this is not a stack trace' };
+  workerLogger.info(
+    'this is worker',
+    new Error('oh dear'),
+    circle,
+    someError,
+    notAnError
+  );
+  // can't run the test in the worker, things get weird
+  process.send({
+    type: '::testing',
+    level: workerLogger.level.toString(),
+  });
+  // test sending a badly-formed log message
+  process.send({ topic: 'log4js:message', data: { cheese: 'gouda' } });
+  cluster.worker.disconnect();
+}
diff --git a/test/tap/clusteredAppender-test.js b/test/tap/clusteredAppender-test.js
deleted file mode 100644
index 83b1e5ee..00000000
--- a/test/tap/clusteredAppender-test.js
+++ /dev/null
@@ -1,147 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const sandbox = require('sandboxed-module');
-const LoggingEvent = require('../../lib/logger').LoggingEvent;
-
-test('log4js cluster appender', (batch) => {
-  batch.test('when in master mode', (t) => {
-    const registeredClusterEvents = [];
-    const loggingEvents = [];
-    let onChildProcessForked;
-    let onMasterReceiveChildMessage;
-
-    // Fake cluster module, so no real cluster listeners be really added
-    const fakeCluster = {
-
-      on: function (event, callback) {
-        registeredClusterEvents.push(event);
-        onChildProcessForked = callback;
-      },
-
-      isMaster: true,
-      isWorker: false,
-
-    };
-    const fakeWorker = {
-      on: function (event, callback) {
-        onMasterReceiveChildMessage = callback;
-      },
-      process: {
-        pid: 123
-      },
-      id: 'workerid'
-    };
-
-    const fakeActualAppender = function (loggingEvent) {
-      loggingEvents.push(loggingEvent);
-    };
-
-    // Load appender and fake modules in it
-    const appenderModule = sandbox.require('../../lib/appenders/clustered', {
-      requires: {
-        cluster: fakeCluster,
-      }
-    });
-
-    const masterAppender = appenderModule.appender({
-      actualAppenders: [fakeActualAppender, fakeActualAppender, fakeActualAppender],
-      appenders: [{}, { category: 'test' }, { category: 'wovs' }]
-    });
-
-    // Actual test - log message using masterAppender
-    masterAppender(new LoggingEvent('wovs', 'Info', ['masterAppender test']));
-
-    // Simulate a 'fork' event to register the master's message handler on our fake worker.
-    onChildProcessForked(fakeWorker);
-    // Simulate a cluster message received by the masterAppender.
-    const simulatedLoggingEvent = new LoggingEvent(
-      'wovs',
-      'Error',
-      [
-        'message deserialization test',
-        { stack: 'my wrapped stack' }
-      ]
-    );
-    onMasterReceiveChildMessage({
-      type: '::log-message',
-      event: JSON.stringify(simulatedLoggingEvent)
-    });
-
-    t.test("should register 'fork' event listener on 'cluster'", (assert) => {
-      assert.equal(registeredClusterEvents[0], 'fork');
-      assert.end();
-    });
-
-    t.test('should log using actual appender', (assert) => {
-      assert.equal(loggingEvents.length, 4);
-      assert.equal(loggingEvents[0].data[0], 'masterAppender test');
-      assert.equal(loggingEvents[1].data[0], 'masterAppender test');
-      assert.equal(loggingEvents[2].data[0], 'message deserialization test');
-      assert.equal(loggingEvents[2].data[1], 'my wrapped stack');
-      assert.equal(loggingEvents[3].data[0], 'message deserialization test');
-      assert.equal(loggingEvents[3].data[1], 'my wrapped stack');
-      assert.end();
-    });
-
-    t.end();
-  });
-
-  batch.test('when in worker mode', (t) => {
-    const registeredProcessEvents = [];
-
-    // Fake cluster module, to fake we're inside a worker process
-    const fakeCluster = {
-
-      isMaster: false,
-      isWorker: true,
-
-    };
-
-    const fakeProcess = {
-
-      send: function (data) {
-        registeredProcessEvents.push(data);
-      },
-      env: process.env
-
-    };
-
-    // Load appender and fake modules in it
-    const appenderModule = sandbox.require('../../lib/appenders/clustered', {
-      requires: {
-        cluster: fakeCluster,
-      },
-      globals: {
-        process: fakeProcess,
-      }
-    });
-
-    const workerAppender = appenderModule.appender();
-
-    // Actual test - log message using masterAppender
-    workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test']));
-    workerAppender(new LoggingEvent('wovs', 'Info', [new Error('Error test')]));
-
-    t.test('worker appender should call process.send', (assert) => {
-      assert.equal(registeredProcessEvents[0].type, '::log-message');
-      assert.equal(
-        JSON.parse(registeredProcessEvents[0].event).data[0],
-        'workerAppender test'
-      );
-      assert.end();
-    });
-
-    t.test('worker should serialize an Error correctly', (assert) => {
-      assert.equal(registeredProcessEvents[1].type, '::log-message');
-      assert.ok(JSON.parse(registeredProcessEvents[1].event).data[0].stack);
-      const actual = JSON.parse(registeredProcessEvents[1].event).data[0].stack;
-      assert.match(actual, /^Error: Error test/);
-      assert.end();
-    });
-
-    t.end();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/configuration-inheritance-test.js b/test/tap/configuration-inheritance-test.js
new file mode 100644
index 00000000..ae49422b
--- /dev/null
+++ b/test/tap/configuration-inheritance-test.js
@@ -0,0 +1,361 @@
+const { test } = require('tap');
+const log4js = require('../../lib/log4js');
+const categories = require('../../lib/categories');
+
+test('log4js category inherit all appenders from direct parent', (batch) => {
+  batch.test('should inherit appenders from direct parent', (t) => {
+    const config = {
+      appenders: {
+        stdout1: { type: 'dummy-appender', label: 'stdout1' },
+        stdout2: { type: 'dummy-appender', label: 'stdout2' },
+      },
+      categories: {
+        default: { appenders: ['stdout1'], level: 'ERROR' },
+        catA: { appenders: ['stdout1', 'stdout2'], level: 'INFO' },
+        'catA.catB': { level: 'DEBUG' },
+      },
+    };
+
+    log4js.configure(config);
+
+    const childCategoryName = 'catA.catB';
+    const childAppenders = categories.appendersForCategory(childCategoryName);
+    const childLevel = categories.getLevelForCategory(childCategoryName);
+
+    t.ok(childAppenders);
+    t.equal(childAppenders.length, 2, 'inherited 2 appenders');
+    t.ok(
+      childAppenders.some((a) => a.label === 'stdout1'),
+      'inherited stdout1'
+    );
+    t.ok(
+      childAppenders.some((a) => a.label === 'stdout2'),
+      'inherited stdout2'
+    );
+    t.equal(childLevel.levelStr, 'DEBUG', 'child level overrides parent');
+    t.end();
+  });
+
+  batch.test(
+    'multiple children should inherit config from shared parent',
+    (t) => {
+      const config = {
+        appenders: {
+          stdout1: { type: 'dummy-appender', label: 'stdout1' },
+          stdout2: { type: 'dummy-appender', label: 'stdout2' },
+        },
+        categories: {
+          default: { appenders: ['stdout1'], level: 'ERROR' },
+          catA: { appenders: ['stdout1'], level: 'INFO' },
+          'catA.catB.cat1': { level: 'DEBUG' }, // should get sdtout1, DEBUG
+          'catA.catB.cat2': { appenders: ['stdout2'] }, // should get sdtout1,sdtout2, INFO
+        },
+      };
+
+      log4js.configure(config);
+
+      const child1CategoryName = 'catA.catB.cat1';
+      const child1Appenders =
+        categories.appendersForCategory(child1CategoryName);
+      const child1Level = categories.getLevelForCategory(child1CategoryName);
+
+      t.equal(child1Appenders.length, 1, 'inherited 1 appender');
+      t.ok(
+        child1Appenders.some((a) => a.label === 'stdout1'),
+        'inherited stdout1'
+      );
+      t.equal(child1Level.levelStr, 'DEBUG', 'child level overrides parent');
+
+      const child2CategoryName = 'catA.catB.cat2';
+      const child2Appenders =
+        categories.appendersForCategory(child2CategoryName);
+      const child2Level = categories.getLevelForCategory(child2CategoryName);
+
+      t.ok(child2Appenders);
+      t.equal(
+        child2Appenders.length,
+        2,
+        'inherited 1 appenders, plus its original'
+      );
+      t.ok(
+        child2Appenders.some((a) => a.label === 'stdout1'),
+        'inherited stdout1'
+      );
+      t.ok(
+        child2Appenders.some((a) => a.label === 'stdout2'),
+        'kept stdout2'
+      );
+      t.equal(child2Level.levelStr, 'INFO', 'inherited parent level');
+
+      t.end();
+    }
+  );
+
+  batch.test('should inherit appenders from multiple parents', (t) => {
+    const config = {
+      appenders: {
+        stdout1: { type: 'dummy-appender', label: 'stdout1' },
+        stdout2: { type: 'dummy-appender', label: 'stdout2' },
+      },
+      categories: {
+        default: { appenders: ['stdout1'], level: 'ERROR' },
+        catA: { appenders: ['stdout1'], level: 'INFO' },
+        'catA.catB': { appenders: ['stdout2'], level: 'INFO' }, // should get stdout1 and stdout2
+        'catA.catB.catC': { level: 'DEBUG' }, // should get stdout1 and stdout2
+      },
+    };
+
+    log4js.configure(config);
+
+    const childCategoryName = 'catA.catB.catC';
+    const childAppenders = categories.appendersForCategory(childCategoryName);
+
+    t.ok(childAppenders);
+    t.equal(childAppenders.length, 2, 'inherited 2 appenders');
+    t.ok(
+      childAppenders.some((a) => a.label === 'stdout1'),
+      'inherited stdout1'
+    );
+    t.ok(
+      childAppenders.some((a) => a.label === 'stdout1'),
+      'inherited stdout1'
+    );
+
+    const firstParentName = 'catA.catB';
+    const firstParentAppenders =
+      categories.appendersForCategory(firstParentName);
+
+    t.ok(firstParentAppenders);
+    t.equal(firstParentAppenders.length, 2, 'ended up with 2 appenders');
+    t.ok(
+      firstParentAppenders.some((a) => a.label === 'stdout1'),
+      'inherited stdout1'
+    );
+    t.ok(
+      firstParentAppenders.some((a) => a.label === 'stdout2'),
+      'kept stdout2'
+    );
+
+    t.end();
+  });
+
+  batch.test(
+    'should inherit appenders from deep parent with missing direct parent',
+    (t) => {
+      const config = {
+        appenders: {
+          stdout1: { type: 'dummy-appender', label: 'stdout1' },
+          stdout2: { type: 'dummy-appender', label: 'stdout2' },
+        },
+        categories: {
+          default: { appenders: ['stdout1'], level: 'ERROR' },
+          catA: { appenders: ['stdout1'], level: 'INFO' },
+          // no catA.catB, but should get created, with stdout1
+          'catA.catB.catC': { level: 'DEBUG' }, // should get stdout1
+        },
+      };
+
+      log4js.configure(config);
+
+      const childCategoryName = 'catA.catB.catC';
+      const childAppenders = categories.appendersForCategory(childCategoryName);
+
+      t.ok(childAppenders);
+      t.equal(childAppenders.length, 1, 'inherited 1 appenders');
+      t.ok(
+        childAppenders.some((a) => a.label === 'stdout1'),
+        'inherited stdout1'
+      );
+
+      const firstParentCategoryName = 'catA.catB';
+      const firstParentAppenders = categories.appendersForCategory(
+        firstParentCategoryName
+      );
+
+      t.ok(firstParentAppenders, 'catA.catB got created implicitily');
+      t.equal(
+        firstParentAppenders.length,
+        1,
+        'created with 1 inherited appender'
+      );
+      t.ok(
+        firstParentAppenders.some((a) => a.label === 'stdout1'),
+        'inherited stdout1'
+      );
+
+      t.end();
+    }
+  );
+
+  batch.test('should deal gracefully with missing parent', (t) => {
+    const config = {
+      appenders: {
+        stdout1: { type: 'dummy-appender', label: 'stdout1' },
+        stdout2: { type: 'dummy-appender', label: 'stdout2' },
+      },
+      categories: {
+        default: { appenders: ['stdout1'], level: 'ERROR' },
+        // no catA nor catA.catB, but should get created, with default values
+        'catA.catB.catC': { appenders: ['stdout2'], level: 'DEBUG' }, // should get stdout2, DEBUG
+      },
+    };
+
+    log4js.configure(config);
+
+    const childCategoryName = 'catA.catB.catC';
+    const childAppenders = categories.appendersForCategory(childCategoryName);
+
+    t.ok(childAppenders);
+    t.equal(childAppenders.length, 1);
+    t.ok(childAppenders.some((a) => a.label === 'stdout2'));
+
+    t.end();
+  });
+
+  batch.test(
+    'should not get duplicate appenders if parent has the same one',
+    (t) => {
+      const config = {
+        appenders: {
+          stdout1: { type: 'dummy-appender', label: 'stdout1' },
+          stdout2: { type: 'dummy-appender', label: 'stdout2' },
+        },
+        categories: {
+          default: { appenders: ['stdout1'], level: 'ERROR' },
+          catA: { appenders: ['stdout1', 'stdout2'], level: 'INFO' },
+          'catA.catB': { appenders: ['stdout1'], level: 'DEBUG' },
+        },
+      };
+
+      log4js.configure(config);
+
+      const childCategoryName = 'catA.catB';
+      const childAppenders = categories.appendersForCategory(childCategoryName);
+
+      t.ok(childAppenders);
+      t.equal(childAppenders.length, 2, 'inherited 1 appender');
+      t.ok(
+        childAppenders.some((a) => a.label === 'stdout1'),
+        'still have stdout1'
+      );
+      t.ok(
+        childAppenders.some((a) => a.label === 'stdout2'),
+        'inherited stdout2'
+      );
+      t.end();
+    }
+  );
+
+  batch.test('inherit:falses should disable inheritance', (t) => {
+    const config = {
+      appenders: {
+        stdout1: { type: 'dummy-appender', label: 'stdout1' },
+        stdout2: { type: 'dummy-appender', label: 'stdout2' },
+      },
+      categories: {
+        default: { appenders: ['stdout1'], level: 'ERROR' },
+        catA: { appenders: ['stdout1'], level: 'INFO' },
+        'catA.catB': { appenders: ['stdout2'], level: 'INFO', inherit: false }, // should not inherit from catA
+      },
+    };
+
+    log4js.configure(config);
+
+    const childCategoryName = 'catA.catB';
+    const childAppenders = categories.appendersForCategory(childCategoryName);
+
+    t.ok(childAppenders);
+    t.equal(childAppenders.length, 1, 'inherited no appender');
+    t.ok(
+      childAppenders.some((a) => a.label === 'stdout2'),
+      'kept stdout2'
+    );
+
+    t.end();
+  });
+
+  batch.test(
+    'inheritance should stop if direct parent has inherit off',
+    (t) => {
+      const config = {
+        appenders: {
+          stdout1: { type: 'dummy-appender', label: 'stdout1' },
+          stdout2: { type: 'dummy-appender', label: 'stdout2' },
+        },
+        categories: {
+          default: { appenders: ['stdout1'], level: 'ERROR' },
+          catA: { appenders: ['stdout1'], level: 'INFO' },
+          'catA.catB': {
+            appenders: ['stdout2'],
+            level: 'INFO',
+            inherit: false,
+          }, // should not inherit from catA
+          'catA.catB.catC': { level: 'DEBUG' }, // should inherit from catB only
+        },
+      };
+
+      log4js.configure(config);
+
+      const childCategoryName = 'catA.catB.catC';
+      const childAppenders = categories.appendersForCategory(childCategoryName);
+
+      t.ok(childAppenders);
+      t.equal(childAppenders.length, 1, 'inherited 1 appender');
+      t.ok(
+        childAppenders.some((a) => a.label === 'stdout2'),
+        'inherited stdout2'
+      );
+
+      const firstParentCategoryName = 'catA.catB';
+      const firstParentAppenders = categories.appendersForCategory(
+        firstParentCategoryName
+      );
+
+      t.ok(firstParentAppenders);
+      t.equal(firstParentAppenders.length, 1, 'did not inherit new appenders');
+      t.ok(
+        firstParentAppenders.some((a) => a.label === 'stdout2'),
+        'kept stdout2'
+      );
+
+      t.end();
+    }
+  );
+
+  batch.test('should inherit level when it is missing', (t) => {
+    const config = {
+      appenders: {
+        stdout1: { type: 'dummy-appender', label: 'stdout1' },
+        stdout2: { type: 'dummy-appender', label: 'stdout2' },
+      },
+      categories: {
+        default: { appenders: ['stdout1'], level: 'ERROR' },
+        catA: { appenders: ['stdout1'], level: 'INFO' },
+        // no catA.catB, but should get created, with stdout1, level INFO
+        'catA.catB.catC': {}, // should get stdout1, level INFO
+      },
+    };
+
+    log4js.configure(config);
+
+    const childCategoryName = 'catA.catB.catC';
+    const childLevel = categories.getLevelForCategory(childCategoryName);
+
+    t.equal(childLevel.levelStr, 'INFO', 'inherited level');
+
+    const firstParentCategoryName = 'catA.catB';
+    const firstParentLevel = categories.getLevelForCategory(
+      firstParentCategoryName
+    );
+
+    t.equal(
+      firstParentLevel.levelStr,
+      'INFO',
+      'generate parent inherited level from base'
+    );
+
+    t.end();
+  });
+
+  batch.end();
+});
diff --git a/test/tap/configuration-test.js b/test/tap/configuration-test.js
index 9c84ebf2..36e40e4e 100644
--- a/test/tap/configuration-test.js
+++ b/test/tap/configuration-test.js
@@ -1,147 +1,108 @@
-'use strict';
-
-const test = require('tap').test;
-const sandbox = require('sandboxed-module');
-
-function makeTestAppender() {
-  return {
-    configure: function (config, options) {
-      this.configureCalled = true;
-      this.config = config;
-      this.options = options;
-      return this.appender();
-    },
-    appender: function () {
-      const self = this;
-      return function (logEvt) {
-        self.logEvt = logEvt;
-      };
-    }
-  };
-}
-
-test('log4js configure', (batch) => {
-  batch.test('when appenders specified by type', (t) => {
-    const testAppender = makeTestAppender();
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        singleOnly: true,
-        requires: {
-          './appenders/cheese': testAppender
-        }
-      }
-    );
-
-    log4js.configure(
-      {
-        appenders: [
-          { type: 'cheese', flavour: 'gouda' }
-        ]
-      },
-      { pants: 'yes' }
-    );
-    t.ok(testAppender.configureCalled, 'should load appender');
-    t.equal(testAppender.config.flavour, 'gouda', 'should pass config to appender');
-    t.equal(testAppender.options.pants, 'yes', 'should pass log4js options to appender');
-    t.end();
-  });
+const { test } = require('tap');
+const sandbox = require('@log4js-node/sandboxed-module');
+const realFS = require('fs');
 
-  batch.test('when core appender loaded via loadAppender', (t) => {
-    const testAppender = makeTestAppender();
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        singleOnly: true,
-        requires: { './appenders/cheese': testAppender }
-      }
-    );
-
-    log4js.loadAppender('cheese');
-
-    t.ok(log4js.appenders.cheese, 'should load appender from ../../lib/appenders');
-    t.type(log4js.appenderMakers.cheese, 'function', 'should add appender configure function to appenderMakers');
-    t.end();
-  });
+const modulePath = 'some/path/to/mylog4js.json';
+const pathsChecked = [];
 
-  batch.test('when appender in node_modules loaded via loadAppender', (t) => {
-    const testAppender = makeTestAppender();
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        singleOnly: true,
-        requires: { 'some/other/external': testAppender }
-      }
-    );
-
-    log4js.loadAppender('some/other/external');
-    t.ok(log4js.appenders['some/other/external'], 'should load appender via require');
-    t.type(
-      log4js.appenderMakers['some/other/external'], 'function',
-      'should add appender configure function to appenderMakers'
-    );
-    t.end();
-  });
+let fakeFS = {};
+let dependencies;
+let fileRead;
 
-  batch.test('when appender object loaded via loadAppender', (t) => {
-    const testAppender = makeTestAppender();
-    const log4js = sandbox.require('../../lib/log4js');
-
-    log4js.loadAppender('some/other/external', testAppender);
-
-    t.ok(log4js.appenders['some/other/external'], 'should load appender with provided object');
-    t.type(
-      log4js.appenderMakers['some/other/external'], 'function',
-      'should add appender configure function to appenderMakers'
-    );
-    t.end();
-  });
-
-  batch.test('when configuration file loaded via LOG4JS_CONFIG env variable', (t) => {
-    process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
-    let fileRead = 0;
-    const modulePath = 'some/path/to/mylog4js.json';
-    const pathsChecked = [];
-    const mtime = new Date();
-
-    const fakeFS = {
+test('log4js configure', (batch) => {
+  batch.beforeEach((done) => {
+    fileRead = 0;
+
+    fakeFS = {
+      realpath: realFS.realpath, // fs-extra looks for this
+      ReadStream: realFS.ReadStream, // need to define these, because graceful-fs uses them
+      WriteStream: realFS.WriteStream,
+      read: realFS.read,
+      closeSync: () => {},
       config: {
-        appenders: [{ type: 'console', layout: { type: 'messagePassThrough' } }],
-        levels: { 'a-test': 'INFO' }
-      },
-      readdirSync: function (dir) {
-        return require('fs').readdirSync(dir);
+        appenders: {
+          console: {
+            type: 'console',
+            layout: { type: 'messagePassThrough' },
+          },
+        },
+        categories: {
+          default: {
+            appenders: ['console'],
+            level: 'INFO',
+          },
+        },
       },
-      readFileSync: function (file, encoding) {
+      readdirSync: (dir) => require('fs').readdirSync(dir),
+      readFileSync: (file, encoding) => {
         fileRead += 1;
-        t.type(file, 'string');
-        t.equal(file, modulePath);
-        t.equal(encoding, 'utf8');
+        batch.type(file, 'string');
+        batch.equal(file, modulePath);
+        batch.equal(encoding, 'utf8');
         return JSON.stringify(fakeFS.config);
       },
-      statSync: function (path) {
+      statSync: (path) => {
         pathsChecked.push(path);
         if (path === modulePath) {
-          return { mtime: mtime };
+          return { mtime: new Date() };
         }
         throw new Error('no such file');
-      }
+      },
     };
 
-    sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          fs: fakeFS,
-        }
-      }
-    );
-
-    delete process.env.LOG4JS_CONFIG;
+    dependencies = {
+      requires: {
+        fs: fakeFS,
+      },
+    };
 
-    t.equal(fileRead, 1, 'should load the specified local config file');
-    t.end();
+    if (typeof done === 'function') {
+      done();
+    }
   });
 
+  batch.test(
+    'when configuration file loaded via LOG4JS_CONFIG env variable',
+    (t) => {
+      process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
+
+      const log4js = sandbox.require('../../lib/log4js', dependencies);
+
+      t.notOk(log4js.isConfigured(), 'should not have configured');
+      log4js.getLogger('test-logger');
+      t.ok(log4js.isConfigured(), 'should be configured');
+
+      t.equal(fileRead, 1, 'should load the specified local config file');
+
+      delete process.env.LOG4JS_CONFIG;
+
+      t.end();
+    }
+  );
+
+  batch.test(
+    'when configuration is set via configure() method call, return the log4js object',
+    (t) => {
+      const log4js = sandbox
+        .require('../../lib/log4js', dependencies)
+        .configure(fakeFS.config);
+      t.type(
+        log4js,
+        'object',
+        'Configure method call should return the log4js object!'
+      );
+
+      const log = log4js.getLogger('daemon');
+      t.type(
+        log,
+        'object',
+        'log4js object, returned by configure(...) method should be able to create log object.'
+      );
+      t.type(log.info, 'function');
+
+      t.end();
+    }
+  );
+
   batch.end();
 });
diff --git a/test/tap/configuration-validation-test.js b/test/tap/configuration-validation-test.js
new file mode 100644
index 00000000..cd0ad06c
--- /dev/null
+++ b/test/tap/configuration-validation-test.js
@@ -0,0 +1,457 @@
+const { test } = require('tap');
+const util = require('util');
+const path = require('path');
+const sandbox = require('@log4js-node/sandboxed-module');
+const debug = require('debug')('log4js:test.configuration-validation');
+const deepFreeze = require('deep-freeze');
+const fs = require('fs');
+const log4js = require('../../lib/log4js');
+const configuration = require('../../lib/configuration');
+
+const removeFiles = async (filenames) => {
+  if (!Array.isArray(filenames)) filenames = [filenames];
+  const promises = filenames.map((filename) => fs.promises.unlink(filename));
+  await Promise.allSettled(promises);
+};
+
+const testAppender = (label, result) => ({
+  configure(config, layouts, findAppender) {
+    debug(
+      `testAppender(${label}).configure called, with config: ${util.inspect(
+        config
+      )}`
+    );
+    result.configureCalled = true;
+    result.type = config.type;
+    result.label = label;
+    result.config = config;
+    result.layouts = layouts;
+    result.findAppender = findAppender;
+    return {};
+  },
+});
+
+test('log4js configuration validation', (batch) => {
+  batch.test('should give error if config is just plain silly', (t) => {
+    [null, undefined, '', ' ', []].forEach((config) => {
+      const expectedError = new Error(
+        `Problem with log4js configuration: (${util.inspect(
+          config
+        )}) - must be an object.`
+      );
+      t.throws(() => configuration.configure(config), expectedError);
+    });
+
+    t.end();
+  });
+
+  batch.test('should give error if config is an empty object', (t) => {
+    t.throws(
+      () => log4js.configure({}),
+      '- must have a property "appenders" of type object.'
+    );
+    t.end();
+  });
+
+  batch.test('should give error if config has no appenders', (t) => {
+    t.throws(
+      () => log4js.configure({ categories: {} }),
+      '- must have a property "appenders" of type object.'
+    );
+    t.end();
+  });
+
+  batch.test('should give error if config has no categories', (t) => {
+    t.throws(
+      () => log4js.configure({ appenders: { out: { type: 'stdout' } } }),
+      '- must have a property "categories" of type object.'
+    );
+    t.end();
+  });
+
+  batch.test('should give error if appenders is not an object', (t) => {
+    t.throws(
+      () => log4js.configure({ appenders: [], categories: [] }),
+      '- must have a property "appenders" of type object.'
+    );
+    t.end();
+  });
+
+  batch.test('should give error if appenders are not all valid', (t) => {
+    t.throws(
+      () =>
+        log4js.configure({ appenders: { thing: 'cheese' }, categories: {} }),
+      '- appender "thing" is not valid (must be an object with property "type")'
+    );
+    t.end();
+  });
+
+  batch.test('should require at least one appender', (t) => {
+    t.throws(
+      () => log4js.configure({ appenders: {}, categories: {} }),
+      '- must define at least one appender.'
+    );
+    t.end();
+  });
+
+  batch.test('should give error if categories are not all valid', (t) => {
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: { stdout: { type: 'stdout' } },
+          categories: { thing: 'cheese' },
+        }),
+      '- category "thing" is not valid (must be an object with properties "appenders" and "level")'
+    );
+    t.end();
+  });
+
+  batch.test('should give error if default category not defined', (t) => {
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: { stdout: { type: 'stdout' } },
+          categories: { thing: { appenders: ['stdout'], level: 'ERROR' } },
+        }),
+      '- must define a "default" category.'
+    );
+    t.end();
+  });
+
+  batch.test('should require at least one category', (t) => {
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: { stdout: { type: 'stdout' } },
+          categories: {},
+        }),
+      '- must define at least one category.'
+    );
+    t.end();
+  });
+
+  batch.test('should give error if category.appenders is not an array', (t) => {
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: { stdout: { type: 'stdout' } },
+          categories: { thing: { appenders: {}, level: 'ERROR' } },
+        }),
+      '- category "thing" is not valid (appenders must be an array of appender names)'
+    );
+    t.end();
+  });
+
+  batch.test('should give error if category.appenders is empty', (t) => {
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: { stdout: { type: 'stdout' } },
+          categories: { thing: { appenders: [], level: 'ERROR' } },
+        }),
+      '- category "thing" is not valid (appenders must contain at least one appender name)'
+    );
+    t.end();
+  });
+
+  batch.test(
+    'should give error if categories do not refer to valid appenders',
+    (t) => {
+      t.throws(
+        () =>
+          log4js.configure({
+            appenders: { stdout: { type: 'stdout' } },
+            categories: { thing: { appenders: ['cheese'], level: 'ERROR' } },
+          }),
+        '- category "thing" is not valid (appender "cheese" is not defined)'
+      );
+      t.end();
+    }
+  );
+
+  batch.test('should give error if category level is not valid', (t) => {
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: { stdout: { type: 'stdout' } },
+          categories: { default: { appenders: ['stdout'], level: 'Biscuits' } },
+        }),
+      '- category "default" is not valid (level "Biscuits" not recognised; valid levels are ALL, TRACE'
+    );
+    t.end();
+  });
+
+  batch.test(
+    'should give error if category enableCallStack is not valid',
+    (t) => {
+      t.throws(
+        () =>
+          log4js.configure({
+            appenders: { stdout: { type: 'stdout' } },
+            categories: {
+              default: {
+                appenders: ['stdout'],
+                level: 'Debug',
+                enableCallStack: '123',
+              },
+            },
+          }),
+        '- category "default" is not valid (enableCallStack must be boolean type)'
+      );
+      t.end();
+    }
+  );
+
+  batch.test('should give error if appender type cannot be found', (t) => {
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: { thing: { type: 'cheese' } },
+          categories: { default: { appenders: ['thing'], level: 'ERROR' } },
+        }),
+      '- appender "thing" is not valid (type "cheese" could not be found)'
+    );
+    t.end();
+  });
+
+  batch.test('should create appender instances', (t) => {
+    const thing = {};
+    const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        cheese: testAppender('cheesy', thing),
+      },
+      ignoreMissing: true,
+    });
+
+    sandboxedLog4js.configure({
+      appenders: { thing: { type: 'cheese' } },
+      categories: { default: { appenders: ['thing'], level: 'ERROR' } },
+    });
+
+    t.ok(thing.configureCalled);
+    t.equal(thing.type, 'cheese');
+    t.end();
+  });
+
+  batch.test(
+    'should use provided appender instance if instance provided',
+    (t) => {
+      const thing = {};
+      const cheese = testAppender('cheesy', thing);
+      const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+        ignoreMissing: true,
+      });
+
+      sandboxedLog4js.configure({
+        appenders: { thing: { type: cheese } },
+        categories: { default: { appenders: ['thing'], level: 'ERROR' } },
+      });
+
+      t.ok(thing.configureCalled);
+      t.same(thing.type, cheese);
+      t.end();
+    }
+  );
+
+  batch.test('should not throw error if configure object is freezed', (t) => {
+    const testFile = 'test/tap/freeze-date-file-test';
+    t.teardown(async () => {
+      await removeFiles(testFile);
+    });
+    t.doesNotThrow(() =>
+      log4js.configure(
+        deepFreeze({
+          appenders: {
+            dateFile: {
+              type: 'dateFile',
+              filename: testFile,
+              alwaysIncludePattern: false,
+            },
+          },
+          categories: {
+            default: { appenders: ['dateFile'], level: log4js.levels.ERROR },
+          },
+        })
+      )
+    );
+    log4js.shutdown(() => {
+      t.end();
+    });
+  });
+
+  batch.test('should load appenders from core first', (t) => {
+    const result = {};
+    const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        './cheese': testAppender('correct', result),
+        cheese: testAppender('wrong', result),
+      },
+      ignoreMissing: true,
+    });
+
+    sandboxedLog4js.configure({
+      appenders: { thing: { type: 'cheese' } },
+      categories: { default: { appenders: ['thing'], level: 'ERROR' } },
+    });
+
+    t.ok(result.configureCalled);
+    t.equal(result.type, 'cheese');
+    t.equal(result.label, 'correct');
+    t.end();
+  });
+
+  batch.test(
+    'should load appenders relative to main file if not in core, or node_modules',
+    (t) => {
+      const result = {};
+      const mainPath = path.dirname(require.main.filename);
+      const sandboxConfig = {
+        ignoreMissing: true,
+        requires: {},
+      };
+      sandboxConfig.requires[`${mainPath}/cheese`] = testAppender(
+        'correct',
+        result
+      );
+      // add this one, because when we're running coverage the main path is a bit different
+      sandboxConfig.requires[
+        `${path.join(mainPath, '../../node_modules/nyc/bin/cheese')}`
+      ] = testAppender('correct', result);
+      // in tap v15, the main path is at root of log4js (run `DEBUG=log4js:appenders npm test > /dev/null` to check)
+      sandboxConfig.requires[`${path.join(mainPath, '../../cheese')}`] =
+        testAppender('correct', result);
+      // in node v6, there's an extra layer of node modules for some reason, so add this one to work around it
+      sandboxConfig.requires[
+        `${path.join(
+          mainPath,
+          '../../node_modules/tap/node_modules/nyc/bin/cheese'
+        )}`
+      ] = testAppender('correct', result);
+
+      const sandboxedLog4js = sandbox.require(
+        '../../lib/log4js',
+        sandboxConfig
+      );
+
+      sandboxedLog4js.configure({
+        appenders: { thing: { type: 'cheese' } },
+        categories: { default: { appenders: ['thing'], level: 'ERROR' } },
+      });
+
+      t.ok(result.configureCalled);
+      t.equal(result.type, 'cheese');
+      t.equal(result.label, 'correct');
+      t.end();
+    }
+  );
+
+  batch.test(
+    'should load appenders relative to process.cwd if not found in core, node_modules',
+    (t) => {
+      const result = {};
+      const fakeProcess = new Proxy(process, {
+        get(target, key) {
+          if (key === 'cwd') {
+            return () => '/var/lib/cheese';
+          }
+
+          return target[key];
+        },
+      });
+
+      // windows file paths are different to unix, so let's make this work for both.
+      const requires = {};
+      requires[path.join('/var', 'lib', 'cheese', 'cheese')] = testAppender(
+        'correct',
+        result
+      );
+
+      const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+        ignoreMissing: true,
+        requires,
+        globals: {
+          process: fakeProcess,
+        },
+      });
+
+      sandboxedLog4js.configure({
+        appenders: { thing: { type: 'cheese' } },
+        categories: { default: { appenders: ['thing'], level: 'ERROR' } },
+      });
+
+      t.ok(result.configureCalled);
+      t.equal(result.type, 'cheese');
+      t.equal(result.label, 'correct');
+      t.end();
+    }
+  );
+
+  batch.test('should pass config, layout, findAppender to appenders', (t) => {
+    const result = {};
+    const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+      ignoreMissing: true,
+      requires: {
+        cheese: testAppender('cheesy', result),
+        notCheese: testAppender('notCheesy', {}),
+      },
+    });
+
+    sandboxedLog4js.configure({
+      appenders: {
+        thing: { type: 'cheese', foo: 'bar' },
+        thing2: { type: 'notCheese' },
+      },
+      categories: { default: { appenders: ['thing'], level: 'ERROR' } },
+    });
+
+    t.ok(result.configureCalled);
+    t.equal(result.type, 'cheese');
+    t.equal(result.config.foo, 'bar');
+    t.type(result.layouts, 'object');
+    t.type(result.layouts.basicLayout, 'function');
+    t.type(result.findAppender, 'function');
+    t.type(result.findAppender('thing2'), 'object');
+    t.end();
+  });
+
+  batch.test(
+    'should not give error if level object is used instead of string',
+    (t) => {
+      t.doesNotThrow(() =>
+        log4js.configure({
+          appenders: { thing: { type: 'stdout' } },
+          categories: {
+            default: { appenders: ['thing'], level: log4js.levels.ERROR },
+          },
+        })
+      );
+      t.end();
+    }
+  );
+
+  batch.test(
+    'should not create appender instance if not used in categories',
+    (t) => {
+      const used = {};
+      const notUsed = {};
+      const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+        requires: {
+          cat: testAppender('meow', used),
+          dog: testAppender('woof', notUsed),
+        },
+        ignoreMissing: true,
+      });
+
+      sandboxedLog4js.configure({
+        appenders: { used: { type: 'cat' }, notUsed: { type: 'dog' } },
+        categories: { default: { appenders: ['used'], level: 'ERROR' } },
+      });
+
+      t.ok(used.configureCalled);
+      t.notOk(notUsed.configureCalled);
+      t.end();
+    }
+  );
+
+  batch.end();
+});
diff --git a/test/tap/configureNoLevels-test.js b/test/tap/configureNoLevels-test.js
deleted file mode 100644
index 0c5988b4..00000000
--- a/test/tap/configureNoLevels-test.js
+++ /dev/null
@@ -1,38 +0,0 @@
-'use strict';
-
-// This test shows unexpected behaviour for log4js.configure() in log4js-node@0.4.3 and earlier:
-// 1) log4js.configure(), log4js.configure(null),
-// log4js.configure({}), log4js.configure()
-// all set all loggers levels to trace, even if they were previously set to something else.
-// 2) log4js.configure({levels:{}}), log4js.configure({levels: {foo:
-// bar}}) leaves previously set logger levels intact.
-//
-const test = require('tap').test;
-
-// setup the configurations we want to test
-const configs = [
-  undefined,
-  null,
-  {},
-  { foo: 'bar' },
-  { levels: null },
-  { levels: {} },
-  { levels: { foo: 'bar' } },
-  { levels: { A: 'INFO' } }
-];
-
-test('log4js dodgy config', (batch) => {
-  const log4js = require('../../lib/log4js');
-  const logger = log4js.getLogger('test-logger');
-  const error = log4js.levels.ERROR;
-  logger.setLevel('ERROR');
-
-  configs.forEach((config) => {
-    batch.test(`config of ${config} should not change logger level`, (t) => {
-      log4js.configure(config);
-      t.equal(logger.level, error);
-      t.end();
-    });
-  });
-  batch.end();
-});
diff --git a/test/tap/connect-context-test.js b/test/tap/connect-context-test.js
new file mode 100644
index 00000000..58d440e8
--- /dev/null
+++ b/test/tap/connect-context-test.js
@@ -0,0 +1,131 @@
+/* eslint max-classes-per-file: ["error", 2] */
+
+const { test } = require('tap');
+const EE = require('events').EventEmitter;
+const levels = require('../../lib/levels');
+
+class MockLogger {
+  constructor() {
+    this.level = levels.TRACE;
+    this.context = {};
+    this.contexts = [];
+  }
+
+  log() {
+    this.contexts.push(Object.assign({}, this.context)); // eslint-disable-line prefer-object-spread
+  }
+
+  isLevelEnabled(level) {
+    return level.isGreaterThanOrEqualTo(this.level);
+  }
+
+  addContext(key, value) {
+    this.context[key] = value;
+  }
+
+  removeContext(key) {
+    delete this.context[key];
+  }
+}
+
+function MockRequest(remoteAddr, method, originalUrl) {
+  this.socket = { remoteAddress: remoteAddr };
+  this.originalUrl = originalUrl;
+  this.method = method;
+  this.httpVersionMajor = '5';
+  this.httpVersionMinor = '0';
+  this.headers = {};
+}
+
+class MockResponse extends EE {
+  constructor(code) {
+    super();
+    this.statusCode = code;
+    this.cachedHeaders = {};
+  }
+
+  end() {
+    this.emit('finish');
+  }
+
+  setHeader(key, value) {
+    this.cachedHeaders[key.toLowerCase()] = value;
+  }
+
+  getHeader(key) {
+    return this.cachedHeaders[key.toLowerCase()];
+  }
+
+  writeHead(code /* , headers */) {
+    this.statusCode = code;
+    return this;
+  }
+}
+
+test('log4js connect logger', (batch) => {
+  const clm = require('../../lib/connect-logger');
+
+  batch.test('with context config', (t) => {
+    const ml = new MockLogger();
+    const cl = clm(ml, { context: true });
+
+    t.beforeEach((done) => {
+      ml.contexts = [];
+      if (typeof done === 'function') {
+        done();
+      }
+    });
+
+    t.test('response should be included in context', (assert) => {
+      const { contexts } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.png'
+      ); // not gif
+      const res = new MockResponse(200);
+      cl(req, res, () => {});
+      res.end('chunk', 'encoding');
+
+      assert.type(contexts, 'Array');
+      assert.equal(contexts.length, 1);
+      assert.type(contexts[0].res, MockResponse);
+      assert.end();
+    });
+
+    t.end();
+  });
+
+  batch.test('without context config', (t) => {
+    const ml = new MockLogger();
+    const cl = clm(ml, {});
+
+    t.beforeEach((done) => {
+      ml.contexts = [];
+      if (typeof done === 'function') {
+        done();
+      }
+    });
+
+    t.test('response should not be included in context', (assert) => {
+      const { contexts } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.png'
+      ); // not gif
+      const res = new MockResponse(200);
+      cl(req, res, () => {});
+      res.end('chunk', 'encoding');
+
+      assert.type(contexts, 'Array');
+      assert.equal(contexts.length, 1);
+      assert.type(contexts[0].res, undefined);
+      assert.end();
+    });
+
+    t.end();
+  });
+
+  batch.end();
+});
diff --git a/test/tap/connect-logger-test.js b/test/tap/connect-logger-test.js
index 5c61b99e..48dc4879 100644
--- a/test/tap/connect-logger-test.js
+++ b/test/tap/connect-logger-test.js
@@ -1,8 +1,6 @@
-/* jshint maxparams:7 */
+/* eslint max-classes-per-file: ["error", 2] */
 
-'use strict';
-
-const test = require('tap').test;
+const { test } = require('tap');
 const EE = require('events').EventEmitter;
 const levels = require('../../lib/levels');
 
@@ -11,7 +9,7 @@ class MockLogger {
     this.level = levels.TRACE;
     this.messages = [];
     this.log = function (level, message) {
-      this.messages.push({ level: level, message: message });
+      this.messages.push({ level, message });
     };
     this.isLevelEnabled = function (level) {
       return level.isGreaterThanOrEqualTo(this.level);
@@ -19,14 +17,21 @@ class MockLogger {
   }
 }
 
-function MockRequest(remoteAddr, method, originalUrl, headers) {
+function MockRequest(remoteAddr, method, originalUrl, headers, url, custom) {
   this.socket = { remoteAddress: remoteAddr };
   this.originalUrl = originalUrl;
+  this.url = url;
   this.method = method;
   this.httpVersionMajor = '5';
   this.httpVersionMinor = '0';
   this.headers = headers || {};
 
+  if (custom) {
+    for (const key of Object.keys(custom)) {
+      this[key] = custom[key];
+    }
+  }
+
   const self = this;
   Object.keys(this.headers).forEach((key) => {
     self.headers[key.toLowerCase()] = self.headers[key];
@@ -36,60 +41,92 @@ function MockRequest(remoteAddr, method, originalUrl, headers) {
 class MockResponse extends EE {
   constructor() {
     super();
-    const r = this;
-    this.end = function () {
-      r.emit('finish');
-    };
+    this.cachedHeaders = {};
+  }
 
-    this.writeHead = function (code, headers) {
-      this.statusCode = code;
-      this._headers = headers;
-    };
+  end() {
+    this.emit('finish');
+  }
+
+  setHeader(key, value) {
+    this.cachedHeaders[key.toLowerCase()] = value;
+  }
+
+  getHeader(key) {
+    return this.cachedHeaders[key.toLowerCase()];
+  }
+
+  writeHead(code /* , headers */) {
+    this.statusCode = code;
+    return this;
   }
 }
 
-function request(cl, method, url, code, reqHeaders, resHeaders) {
-  const req = new MockRequest('my.remote.addr', method, url, reqHeaders);
+function request(
+  cl,
+  method,
+  originalUrl,
+  code,
+  reqHeaders,
+  resHeaders,
+  next,
+  url,
+  custom = undefined
+) {
+  const req = new MockRequest(
+    'my.remote.addr',
+    method,
+    originalUrl,
+    reqHeaders,
+    url,
+    custom
+  );
   const res = new MockResponse();
-  cl(req, res, () => {
-  });
-  res.writeHead(code, resHeaders);
-  res.end('chunk', 'encoding');
+  if (next) {
+    next = next.bind(null, req, res, () => {});
+  } else {
+    next = () => {};
+  }
+  cl(req, res, next);
+  res.writeHead(code, resHeaders).end('chunk', 'encoding');
 }
 
 test('log4js connect logger', (batch) => {
   const clm = require('../../lib/connect-logger');
   batch.test('getConnectLoggerModule', (t) => {
-    t.type(clm, 'object', 'should return a connect logger factory');
+    t.type(clm, 'function', 'should return a connect logger factory');
 
-    t.test('should take a log4js logger and return a "connect logger"', (assert) => {
-      const ml = new MockLogger();
-      const cl = clm.connectLogger(ml);
+    t.test(
+      'should take a log4js logger and return a "connect logger"',
+      (assert) => {
+        const ml = new MockLogger();
+        const cl = clm(ml);
 
-      assert.type(cl, 'function');
-      assert.end();
-    });
+        assert.type(cl, 'function');
+        assert.end();
+      }
+    );
 
     t.test('log events', (assert) => {
       const ml = new MockLogger();
-      const cl = clm.connectLogger(ml);
+      const cl = clm(ml);
       request(cl, 'GET', 'http://url', 200);
 
-      const messages = ml.messages;
+      const { messages } = ml;
       assert.type(messages, 'Array');
       assert.equal(messages.length, 1);
       assert.ok(levels.INFO.isEqualTo(messages[0].level));
-      assert.include(messages[0].message, 'GET');
-      assert.include(messages[0].message, 'http://url');
-      assert.include(messages[0].message, 'my.remote.addr');
-      assert.include(messages[0].message, '200');
+      assert.match(messages[0].message, 'GET');
+      assert.match(messages[0].message, 'http://url');
+      assert.match(messages[0].message, 'my.remote.addr');
+      assert.match(messages[0].message, '200');
       assert.end();
     });
 
     t.test('log events with level below logging level', (assert) => {
       const ml = new MockLogger();
       ml.level = levels.FATAL;
-      const cl = clm.connectLogger(ml);
+      const cl = clm(ml);
       request(cl, 'GET', 'http://url', 200);
 
       assert.type(ml.messages, 'Array');
@@ -100,26 +137,42 @@ test('log4js connect logger', (batch) => {
     t.test('log events with non-default level and custom format', (assert) => {
       const ml = new MockLogger();
       ml.level = levels.INFO;
-      const cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' });
+      const cl = clm(ml, { level: levels.WARN, format: ':method :url' });
       request(cl, 'GET', 'http://url', 200);
 
-      const messages = ml.messages;
+      const { messages } = ml;
       assert.type(messages, Array);
       assert.equal(messages.length, 1);
-      assert.ok(levels.INFO.isEqualTo(messages[0].level));
+      assert.ok(levels.WARN.isEqualTo(messages[0].level));
       assert.equal(messages[0].message, 'GET http://url');
       assert.end();
     });
+
+    t.test('adding multiple loggers should only log once', (assert) => {
+      const ml = new MockLogger();
+      ml.level = levels.INFO;
+      const cl = clm(ml, { level: levels.WARN, format: ':method :url' });
+      const nextLogger = clm(ml, { level: levels.INFO, format: ':method' });
+      request(cl, 'GET', 'http://url', 200, null, null, nextLogger);
+
+      const { messages } = ml;
+      assert.type(messages, Array);
+      assert.equal(messages.length, 1);
+      assert.ok(levels.WARN.isEqualTo(messages[0].level));
+      assert.equal(messages[0].message, 'GET http://url');
+
+      assert.end();
+    });
     t.end();
   });
 
   batch.test('logger with options as string', (t) => {
     const ml = new MockLogger();
     ml.level = levels.INFO;
-    const cl = clm.connectLogger(ml, ':method :url');
+    const cl = clm(ml, ':method :url');
     request(cl, 'POST', 'http://meh', 200);
 
-    const messages = ml.messages;
+    const { messages } = ml;
     t.equal(messages[0].message, 'POST http://meh');
     t.end();
   });
@@ -127,14 +180,14 @@ test('log4js connect logger', (batch) => {
   batch.test('auto log levels', (t) => {
     const ml = new MockLogger();
     ml.level = levels.INFO;
-    const cl = clm.connectLogger(ml, { level: 'auto', format: ':method :url' });
+    const cl = clm(ml, { level: 'auto', format: ':method :url' });
     request(cl, 'GET', 'http://meh', 200);
     request(cl, 'GET', 'http://meh', 201);
     request(cl, 'GET', 'http://meh', 302);
     request(cl, 'GET', 'http://meh', 404);
     request(cl, 'GET', 'http://meh', 500);
 
-    const messages = ml.messages;
+    const { messages } = ml;
     t.test('should use INFO for 2xx', (assert) => {
       assert.ok(levels.INFO.isEqualTo(messages[0].level));
       assert.ok(levels.INFO.isEqualTo(messages[1].level));
@@ -158,25 +211,94 @@ test('log4js connect logger', (batch) => {
     t.end();
   });
 
+  batch.test('logger with status code rules applied', (t) => {
+    const ml = new MockLogger();
+    ml.level = levels.DEBUG;
+    const clr = [
+      { codes: [201, 304], level: levels.DEBUG.toString() },
+      { from: 200, to: 299, level: levels.DEBUG.toString() },
+      { from: 300, to: 399, level: levels.INFO.toString() },
+    ];
+    const cl = clm(ml, {
+      level: 'auto',
+      format: ':method :url',
+      statusRules: clr,
+    });
+    request(cl, 'GET', 'http://meh', 200);
+    request(cl, 'GET', 'http://meh', 201);
+    request(cl, 'GET', 'http://meh', 302);
+    request(cl, 'GET', 'http://meh', 304);
+    request(cl, 'GET', 'http://meh', 404);
+    request(cl, 'GET', 'http://meh', 500);
+
+    const { messages } = ml;
+    t.test('should use DEBUG for 2xx', (assert) => {
+      assert.ok(levels.DEBUG.isEqualTo(messages[0].level));
+      assert.ok(levels.DEBUG.isEqualTo(messages[1].level));
+      assert.end();
+    });
+
+    t.test('should use WARN for 3xx, DEBUG for 304', (assert) => {
+      assert.ok(levels.INFO.isEqualTo(messages[2].level));
+      assert.ok(levels.DEBUG.isEqualTo(messages[3].level));
+      assert.end();
+    });
+
+    t.test('should use ERROR for 4xx', (assert) => {
+      assert.ok(levels.ERROR.isEqualTo(messages[4].level));
+      assert.end();
+    });
+
+    t.test('should use ERROR for 5xx', (assert) => {
+      assert.ok(levels.ERROR.isEqualTo(messages[5].level));
+      assert.end();
+    });
+    t.end();
+  });
+
   batch.test('format using a function', (t) => {
     const ml = new MockLogger();
     ml.level = levels.INFO;
-    const cl = clm.connectLogger(ml, () => 'I was called');
+    const cl = clm(ml, () => 'I was called');
     request(cl, 'GET', 'http://blah', 200);
 
     t.equal(ml.messages[0].message, 'I was called');
     t.end();
   });
 
-  batch.test('format that includes request headers', (t) => {
+  batch.test('format using a function that also uses tokens', (t) => {
     const ml = new MockLogger();
     ml.level = levels.INFO;
-    const cl = clm.connectLogger(ml, ':req[Content-Type]');
-    request(
-      cl,
-      'GET', 'http://blah', 200,
-      { 'Content-Type': 'application/json' }
+    const cl = clm(
+      ml,
+      (req, res, tokenReplacer) => `${req.method} ${tokenReplacer(':status')}`
     );
+    request(cl, 'GET', 'http://blah', 200);
+
+    t.equal(ml.messages[0].message, 'GET 200');
+    t.end();
+  });
+
+  batch.test(
+    'format using a function, but do not log anything if the function returns nothing',
+    (t) => {
+      const ml = new MockLogger();
+      ml.level = levels.INFO;
+      const cl = clm(ml, () => null);
+      request(cl, 'GET', 'http://blah', 200);
+
+      t.equal(ml.messages.length, 0);
+      t.end();
+    }
+  );
+
+  batch.test('format that includes request headers', (t) => {
+    const ml = new MockLogger();
+    ml.level = levels.INFO;
+    const cl = clm(ml, ':req[Content-Type]');
+    request(cl, 'GET', 'http://blah', 200, {
+      'Content-Type': 'application/json',
+    });
 
     t.equal(ml.messages[0].message, 'application/json');
     t.end();
@@ -185,29 +307,36 @@ test('log4js connect logger', (batch) => {
   batch.test('format that includes response headers', (t) => {
     const ml = new MockLogger();
     ml.level = levels.INFO;
-    const cl = clm.connectLogger(ml, ':res[Content-Type]');
-    request(
-      cl,
-      'GET', 'http://blah', 200,
-      null,
-      { 'Content-Type': 'application/cheese' }
-    );
+    const cl = clm(ml, ':res[Content-Type]');
+    request(cl, 'GET', 'http://blah', 200, null, {
+      'Content-Type': 'application/cheese',
+    });
 
     t.equal(ml.messages[0].message, 'application/cheese');
     t.end();
   });
 
+  batch.test('url token should check originalUrl and url', (t) => {
+    const ml = new MockLogger();
+    const cl = clm(ml, ':url');
+    request(cl, 'GET', null, 200, null, null, null, 'http://cheese');
+
+    t.equal(ml.messages[0].message, 'http://cheese');
+    t.end();
+  });
+
   batch.test('log events with custom token', (t) => {
     const ml = new MockLogger();
     ml.level = levels.INFO;
-    const cl = clm.connectLogger(ml, {
+    const cl = clm(ml, {
       level: levels.INFO,
       format: ':method :url :custom_string',
       tokens: [
         {
-          token: ':custom_string', replacement: 'fooBAR'
-        }
-      ]
+          token: ':custom_string',
+          replacement: 'fooBAR',
+        },
+      ],
     });
     request(cl, 'GET', 'http://url', 200);
 
@@ -221,14 +350,15 @@ test('log4js connect logger', (batch) => {
   batch.test('log events with custom override token', (t) => {
     const ml = new MockLogger();
     ml.level = levels.INFO;
-    const cl = clm.connectLogger(ml, {
+    const cl = clm(ml, {
       level: levels.INFO,
       format: ':method :url :date',
       tokens: [
         {
-          token: ':date', replacement: '20150310'
-        }
-      ]
+          token: ':date',
+          replacement: '20150310',
+        },
+      ],
     });
     request(cl, 'GET', 'http://url', 200);
 
@@ -239,5 +369,73 @@ test('log4js connect logger', (batch) => {
     t.end();
   });
 
+  batch.test('log events with custom format', (t) => {
+    const ml = new MockLogger();
+    const body = { say: 'hi!' };
+    ml.level = levels.INFO;
+    const cl = clm(ml, {
+      level: levels.INFO,
+      format: (req, res, format) =>
+        format(`:method :url ${JSON.stringify(req.body)}`),
+    });
+    request(
+      cl,
+      'POST',
+      'http://url',
+      200,
+      { 'Content-Type': 'application/json' },
+      null,
+      null,
+      null,
+      { body }
+    );
+
+    t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
+    t.equal(ml.messages[0].message, `POST http://url ${JSON.stringify(body)}`);
+    t.end();
+  });
+
+  batch.test(
+    'handle weird old node versions where socket contains socket',
+    (t) => {
+      const ml = new MockLogger();
+      const cl = clm(ml, ':remote-addr');
+      const req = new MockRequest(null, 'GET', 'http://blah');
+      req.socket = { socket: { remoteAddress: 'this is weird' } };
+
+      const res = new MockResponse();
+      cl(req, res, () => {});
+      res.writeHead(200, {});
+      res.end('chunk', 'encoding');
+
+      t.equal(ml.messages[0].message, 'this is weird');
+      t.end();
+    }
+  );
+
+  batch.test(
+    'handles as soon as any of the events end/finish/error/close triggers (only once)',
+    (t) => {
+      const ml = new MockLogger();
+      const cl = clm(ml, ':remote-addr');
+      const req = new MockRequest(null, 'GET', 'http://blah');
+      req.socket = { socket: { remoteAddress: 'this is weird' } };
+
+      const res = new MockResponse();
+      cl(req, res, () => {});
+      res.writeHead(200, {});
+
+      t.equal(ml.messages.length, 0);
+      res.emit('end');
+      res.emit('finish');
+      res.emit('error');
+      res.emit('close');
+      t.equal(ml.messages.length, 1);
+
+      t.equal(ml.messages[0].message, 'this is weird');
+      t.end();
+    }
+  );
+
   batch.end();
 });
diff --git a/test/tap/connect-nolog-test.js b/test/tap/connect-nolog-test.js
index 8d3370da..1c605e44 100644
--- a/test/tap/connect-nolog-test.js
+++ b/test/tap/connect-nolog-test.js
@@ -1,6 +1,6 @@
-'use strict';
+/* eslint max-classes-per-file: ["error", 2] */
 
-const test = require('tap').test;
+const { test } = require('tap');
 const EE = require('events').EventEmitter;
 const levels = require('../../lib/levels');
 
@@ -10,7 +10,7 @@ class MockLogger {
     this.level = levels.TRACE;
 
     this.log = function (level, message) {
-      this.messages.push({ level: level, message: message });
+      this.messages.push({ level, message });
     };
 
     this.isLevelEnabled = function (level) {
@@ -29,14 +29,27 @@ function MockRequest(remoteAddr, method, originalUrl) {
 }
 
 class MockResponse extends EE {
-  constructor(statusCode) {
+  constructor(code) {
     super();
-    const r = this;
-    this.statusCode = statusCode;
+    this.statusCode = code;
+    this.cachedHeaders = {};
+  }
 
-    this.end = function () {
-      r.emit('finish');
-    };
+  end() {
+    this.emit('finish');
+  }
+
+  setHeader(key, value) {
+    this.cachedHeaders[key.toLowerCase()] = value;
+  }
+
+  getHeader(key) {
+    return this.cachedHeaders[key.toLowerCase()];
+  }
+
+  writeHead(code /* , headers */) {
+    this.statusCode = code;
+    return this;
   }
 }
 
@@ -45,30 +58,43 @@ test('log4js connect logger', (batch) => {
 
   batch.test('with nolog config', (t) => {
     const ml = new MockLogger();
-    const cl = clm.connectLogger(ml, { nolog: '\\.gif' });
+    const cl = clm(ml, { nolog: '\\.gif' });
 
-    t.beforeEach((done) => { ml.messages = []; done(); });
+    t.beforeEach((done) => {
+      ml.messages = [];
+      if (typeof done === 'function') {
+        done();
+      }
+    });
 
     t.test('check unmatch url request', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.png'
+      ); // not gif
       const res = new MockResponse(200);
-      cl(req, res, () => { });
+      cl(req, res, () => {});
       res.end('chunk', 'encoding');
 
       assert.type(messages, 'Array');
       assert.equal(messages.length, 1);
       assert.ok(levels.INFO.isEqualTo(messages[0].level));
-      assert.include(messages[0].message, 'GET');
-      assert.include(messages[0].message, 'http://url');
-      assert.include(messages[0].message, 'my.remote.addr');
-      assert.include(messages[0].message, '200');
+      assert.match(messages[0].message, 'GET');
+      assert.match(messages[0].message, 'http://url');
+      assert.match(messages[0].message, 'my.remote.addr');
+      assert.match(messages[0].message, '200');
       assert.end();
     });
 
     t.test('check match url request', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.gif'
+      ); // gif
       const res = new MockResponse(200);
       cl(req, res, () => {});
       res.end('chunk', 'encoding');
@@ -82,31 +108,44 @@ test('log4js connect logger', (batch) => {
 
   batch.test('nolog Strings', (t) => {
     const ml = new MockLogger();
-    const cl = clm.connectLogger(ml, { nolog: '\\.gif|\\.jpe?g' });
+    const cl = clm(ml, { nolog: '\\.gif|\\.jpe?g' });
 
-    t.beforeEach((done) => { ml.messages = []; done(); });
+    t.beforeEach((done) => {
+      ml.messages = [];
+      if (typeof done === 'function') {
+        done();
+      }
+    });
 
     t.test('check unmatch url request (png)', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.png'
+      ); // not gif
       const res = new MockResponse(200);
-      cl(req, res, () => { });
+      cl(req, res, () => {});
       res.end('chunk', 'encoding');
 
       assert.equal(messages.length, 1);
       assert.ok(levels.INFO.isEqualTo(messages[0].level));
-      assert.include(messages[0].message, 'GET');
-      assert.include(messages[0].message, 'http://url');
-      assert.include(messages[0].message, 'my.remote.addr');
-      assert.include(messages[0].message, '200');
+      assert.match(messages[0].message, 'GET');
+      assert.match(messages[0].message, 'http://url');
+      assert.match(messages[0].message, 'my.remote.addr');
+      assert.match(messages[0].message, '200');
       assert.end();
     });
 
     t.test('check match url request (gif)', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif');
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.gif'
+      );
       const res = new MockResponse(200);
-      cl(req, res, () => { });
+      cl(req, res, () => {});
       res.end('chunk', 'encoding');
 
       assert.equal(messages.length, 0);
@@ -114,10 +153,14 @@ test('log4js connect logger', (batch) => {
     });
 
     t.test('check match url request (jpeg)', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg');
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.jpeg'
+      );
       const res = new MockResponse(200);
-      cl(req, res, () => { });
+      cl(req, res, () => {});
       res.end('chunk', 'encoding');
 
       assert.equal(messages.length, 0);
@@ -129,31 +172,44 @@ test('log4js connect logger', (batch) => {
 
   batch.test('nolog Array', (t) => {
     const ml = new MockLogger();
-    const cl = clm.connectLogger(ml, { nolog: ['\\.gif', '\\.jpe?g'] });
+    const cl = clm(ml, { nolog: ['\\.gif', '\\.jpe?g'] });
 
-    t.beforeEach((done) => { ml.messages = []; done(); });
+    t.beforeEach((done) => {
+      ml.messages = [];
+      if (typeof done === 'function') {
+        done();
+      }
+    });
 
     t.test('check unmatch url request (png)', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.png'
+      ); // not gif
       const res = new MockResponse(200);
-      cl(req, res, () => { });
+      cl(req, res, () => {});
       res.end('chunk', 'encoding');
 
       assert.equal(messages.length, 1);
       assert.ok(levels.INFO.isEqualTo(messages[0].level));
-      assert.include(messages[0].message, 'GET');
-      assert.include(messages[0].message, 'http://url');
-      assert.include(messages[0].message, 'my.remote.addr');
-      assert.include(messages[0].message, '200');
+      assert.match(messages[0].message, 'GET');
+      assert.match(messages[0].message, 'http://url');
+      assert.match(messages[0].message, 'my.remote.addr');
+      assert.match(messages[0].message, '200');
       assert.end();
     });
 
     t.test('check match url request (gif)', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.gif'
+      ); // gif
       const res = new MockResponse(200);
-      cl(req, res, () => { });
+      cl(req, res, () => {});
       res.end('chunk', 'encoding');
 
       assert.equal(messages.length, 0);
@@ -161,10 +217,14 @@ test('log4js connect logger', (batch) => {
     });
 
     t.test('check match url request (jpeg)', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.jpeg'
+      ); // gif
       const res = new MockResponse(200);
-      cl(req, res, () => { });
+      cl(req, res, () => {});
       res.end('chunk', 'encoding');
 
       assert.equal(messages.length, 0);
@@ -176,29 +236,106 @@ test('log4js connect logger', (batch) => {
 
   batch.test('nolog RegExp', (t) => {
     const ml = new MockLogger();
-    const cl = clm.connectLogger(ml, { nolog: /\.gif|\.jpe?g/ });
+    const cl = clm(ml, { nolog: /\.gif|\.jpe?g/ });
+
+    t.beforeEach((done) => {
+      ml.messages = [];
+      if (typeof done === 'function') {
+        done();
+      }
+    });
+
+    t.test('check unmatch url request (png)', (assert) => {
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.png'
+      ); // not gif
+      const res = new MockResponse(200);
+      cl(req, res, () => {});
+      res.end('chunk', 'encoding');
+
+      assert.equal(messages.length, 1);
+      assert.ok(levels.INFO.isEqualTo(messages[0].level));
+      assert.match(messages[0].message, 'GET');
+      assert.match(messages[0].message, 'http://url');
+      assert.match(messages[0].message, 'my.remote.addr');
+      assert.match(messages[0].message, '200');
+      assert.end();
+    });
+
+    t.test('check match url request (gif)', (assert) => {
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.gif'
+      ); // gif
+      const res = new MockResponse(200);
+      cl(req, res, () => {});
+      res.end('chunk', 'encoding');
+
+      assert.equal(messages.length, 0);
+      assert.end();
+    });
+
+    t.test('check match url request (jpeg)', (assert) => {
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.jpeg'
+      ); // gif
+      const res = new MockResponse(200);
+      cl(req, res, () => {});
+      res.end('chunk', 'encoding');
+
+      assert.equal(messages.length, 0);
+      assert.end();
+    });
+
+    t.end();
+  });
+
+  batch.test('nolog Array', (t) => {
+    const ml = new MockLogger();
+    const cl = clm(ml, { nolog: [/\.gif/, /\.jpe?g/] });
 
-    t.beforeEach((done) => { ml.messages = []; done(); });
+    t.beforeEach((done) => {
+      ml.messages = [];
+      if (typeof done === 'function') {
+        done();
+      }
+    });
 
     t.test('check unmatch url request (png)', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.png'
+      ); // not gif
       const res = new MockResponse(200);
-      cl(req, res, () => { });
+      cl(req, res, () => {});
       res.end('chunk', 'encoding');
 
       assert.equal(messages.length, 1);
       assert.ok(levels.INFO.isEqualTo(messages[0].level));
-      assert.include(messages[0].message, 'GET');
-      assert.include(messages[0].message, 'http://url');
-      assert.include(messages[0].message, 'my.remote.addr');
-      assert.include(messages[0].message, '200');
+      assert.match(messages[0].message, 'GET');
+      assert.match(messages[0].message, 'http://url');
+      assert.match(messages[0].message, 'my.remote.addr');
+      assert.match(messages[0].message, '200');
       assert.end();
     });
 
     t.test('check match url request (gif)', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.gif'
+      ); // gif
       const res = new MockResponse(200);
       cl(req, res, () => {});
       res.end('chunk', 'encoding');
@@ -208,8 +345,56 @@ test('log4js connect logger', (batch) => {
     });
 
     t.test('check match url request (jpeg)', (assert) => {
-      const messages = ml.messages;
-      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
+      const { messages } = ml;
+      const req = new MockRequest(
+        'my.remote.addr',
+        'GET',
+        'http://url/hoge.jpeg'
+      ); // gif
+      const res = new MockResponse(200);
+      cl(req, res, () => {});
+      res.end('chunk', 'encoding');
+
+      assert.equal(messages.length, 0);
+      assert.end();
+    });
+
+    t.end();
+  });
+
+  batch.test('nolog function', (t) => {
+    const ml = new MockLogger();
+    const cl = clm(ml, {
+      nolog: (_req, res) =>
+        res.getHeader('content-type') === 'image/png' || res.statusCode < 400,
+    });
+
+    t.beforeEach((done) => {
+      ml.messages = [];
+      if (typeof done === 'function') {
+        done();
+      }
+    });
+
+    t.test('check unmatch function return (statusCode < 400)', (assert) => {
+      const { messages } = ml;
+      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/log');
+      const res = new MockResponse(500);
+      cl(req, res, () => {});
+      res.end('chunk', 'encoding');
+
+      assert.equal(messages.length, 1);
+      assert.ok(levels.INFO.isEqualTo(messages[0].level));
+      assert.match(messages[0].message, 'GET');
+      assert.match(messages[0].message, 'http://url');
+      assert.match(messages[0].message, 'my.remote.addr');
+      assert.match(messages[0].message, '500');
+      assert.end();
+    });
+
+    t.test('check match function return (statusCode >= 400)', (assert) => {
+      const { messages } = ml;
+      const req = new MockRequest('my.remote.addr', 'GET', 'http://url/nolog');
       const res = new MockResponse(200);
       cl(req, res, () => {});
       res.end('chunk', 'encoding');
@@ -218,6 +403,27 @@ test('log4js connect logger', (batch) => {
       assert.end();
     });
 
+    t.test(
+      'check match function server response content-type header',
+      (assert) => {
+        const { messages } = ml;
+        const req = new MockRequest(
+          'my.remote.addr',
+          'GET',
+          'http://url/nolog'
+        );
+        const res = new MockResponse(500);
+        res.on('finish', () => {
+          res.setHeader('content-type', 'image/png');
+        });
+        cl(req, res, () => {});
+        res.end('chunk', 'encoding');
+
+        assert.equal(messages.length, 0);
+        assert.end();
+      }
+    );
+
     t.end();
   });
 
diff --git a/test/tap/consoleAppender-test.js b/test/tap/consoleAppender-test.js
index 6fe32cdb..6853add9 100644
--- a/test/tap/consoleAppender-test.js
+++ b/test/tap/consoleAppender-test.js
@@ -1,28 +1,56 @@
-'use strict';
-
-const test = require('tap').test;
-const layouts = require('../../lib/layouts');
-const sandbox = require('sandboxed-module');
+const { test } = require('tap');
+const sandbox = require('@log4js-node/sandboxed-module');
+const consoleAppender = require('../../lib/appenders/console');
 
 test('log4js console appender', (batch) => {
+  batch.test('should export a configure function', (t) => {
+    t.type(consoleAppender.configure, 'function');
+    t.end();
+  });
+
+  batch.test('should use default layout if none specified', (t) => {
+    const messages = [];
+    const fakeConsole = {
+      log(msg) {
+        messages.push(msg);
+      },
+    };
+    const log4js = sandbox.require('../../lib/log4js', {
+      globals: {
+        console: fakeConsole,
+      },
+    });
+    log4js.configure({
+      appenders: { console: { type: 'console' } },
+      categories: { default: { appenders: ['console'], level: 'DEBUG' } },
+    });
+
+    log4js.getLogger().info('blah');
+
+    t.match(messages[0], /.*default.*blah/);
+    t.end();
+  });
+
   batch.test('should output to console', (t) => {
     const messages = [];
     const fakeConsole = {
-      log: function (msg) {
+      log(msg) {
         messages.push(msg);
-      }
+      },
     };
-    const appenderModule = sandbox.require(
-      '../../lib/appenders/console',
-      {
-        globals: {
-          console: fakeConsole
-        }
-      }
-    );
-
-    const appender = appenderModule.appender(layouts.messagePassThroughLayout);
-    appender({ data: ['blah'] });
+    const log4js = sandbox.require('../../lib/log4js', {
+      globals: {
+        console: fakeConsole,
+      },
+    });
+    log4js.configure({
+      appenders: {
+        console: { type: 'console', layout: { type: 'messagePassThrough' } },
+      },
+      categories: { default: { appenders: ['console'], level: 'DEBUG' } },
+    });
+
+    log4js.getLogger().info('blah');
 
     t.equal(messages[0], 'blah');
     t.end();
diff --git a/test/tap/dateFileAppender-test.js b/test/tap/dateFileAppender-test.js
index 768eb17d..a14f1abf 100644
--- a/test/tap/dateFileAppender-test.js
+++ b/test/tap/dateFileAppender-test.js
@@ -1,191 +1,301 @@
-'use strict';
+/* eslint max-classes-per-file: ["error", 3] */
 
-const test = require('tap').test;
+const { test } = require('tap');
 const path = require('path');
 const fs = require('fs');
-const sandbox = require('sandboxed-module');
-const log4js = require('../../lib/log4js');
 const EOL = require('os').EOL || '\n';
+const format = require('date-format');
+const sandbox = require('@log4js-node/sandboxed-module');
+const log4js = require('../../lib/log4js');
+
+const osDelay = process.platform === 'win32' ? 400 : 200;
 
 function removeFile(filename) {
   try {
     fs.unlinkSync(path.join(__dirname, filename));
-  } catch (e) {}
+  } catch (e) {
+    // doesn't matter
+  }
 }
 
 test('../../lib/appenders/dateFile', (batch) => {
-  batch.test('adding multiple dateFileAppenders', (t) => {
-    const listenersCount = process.listeners('exit').length;
-    const dateFileAppender = require('../../lib/appenders/dateFile');
-    let count = 5;
-    let logfile;
-
-    while (count--) {
-      logfile = path.join(__dirname, `datefa-default-test${count}.log`);
-      log4js.addAppender(dateFileAppender.appender(logfile));
-    }
-
-    t.teardown(() => {
-      removeFile('datefa-default-test0.log');
-      removeFile('datefa-default-test1.log');
-      removeFile('datefa-default-test2.log');
-      removeFile('datefa-default-test3.log');
-      removeFile('datefa-default-test4.log');
-    });
-
-    t.equal(process.listeners('exit').length, listenersCount + 1, 'should only add one exit listener');
-    t.end();
-  });
-
-  batch.test('exit listener', (t) => {
-    let exitListener;
-    const openedFiles = [];
-
-    const dateFileAppender = sandbox.require(
-      '../../lib/appenders/dateFile',
-      {
-        globals: {
-          process: {
-            on: function (evt, listener) {
-              exitListener = listener;
-            }
-          }
-        },
-        requires: {
-          streamroller: {
-            DateRollingFileStream: function (filename) {
-              openedFiles.push(filename);
-
-              this.end = function () {
-                openedFiles.shift();
-              };
-            }
-          }
-        }
-      }
-    );
-
-    for (let i = 0; i < 5; i += 1) {
-      dateFileAppender.appender(`test${i}`);
-    }
-    t.equal(openedFiles.length, 5);
-    exitListener();
-    t.equal(openedFiles.length, 0, 'should close all opened files');
-    t.end();
-  });
-
   batch.test('with default settings', (t) => {
     const testFile = path.join(__dirname, 'date-appender-default.log');
-    const appender = require('../../lib/appenders/dateFile').appender(testFile);
+    log4js.configure({
+      appenders: { date: { type: 'dateFile', filename: testFile } },
+      categories: { default: { appenders: ['date'], level: 'DEBUG' } },
+    });
+
     const logger = log4js.getLogger('default-settings');
-    log4js.clearAppenders();
-    log4js.addAppender(appender, 'default-settings');
 
     logger.info('This should be in the file.');
-    t.teardown(() => { removeFile('date-appender-default.log'); });
+    t.teardown(() => {
+      removeFile('date-appender-default.log');
+    });
 
     setTimeout(() => {
       fs.readFile(testFile, 'utf8', (err, contents) => {
-        t.include(contents, 'This should be in the file');
+        t.match(contents, 'This should be in the file');
         t.match(
           contents,
-          /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
+          /\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
         );
         t.end();
       });
-    }, 100);
+    }, osDelay);
   });
 
   batch.test('configure with dateFileAppender', (t) => {
-    // this config file defines one file appender (to ./date-file-test.log)
-    // and sets the log level for "tests" to WARN
-    log4js.configure('test/tap/with-dateFile.json');
+    log4js.configure({
+      appenders: {
+        date: {
+          type: 'dateFile',
+          filename: 'test/tap/date-file-test.log',
+          pattern: '-yyyy-MM-dd',
+          layout: { type: 'messagePassThrough' },
+        },
+      },
+      categories: { default: { appenders: ['date'], level: 'WARN' } },
+    });
     const logger = log4js.getLogger('tests');
     logger.info('this should not be written to the file');
     logger.warn('this should be written to the file');
 
-    t.teardown(() => { removeFile('date-file-test.log'); });
+    log4js.shutdown(() => {
+      fs.readFile(
+        path.join(__dirname, 'date-file-test.log'),
+        'utf8',
+        (err, contents) => {
+          t.match(contents, `this should be written to the file${EOL}`);
+          t.equal(
+            contents.indexOf('this should not be written to the file'),
+            -1
+          );
+          t.end();
+        }
+      );
+    });
 
-    fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', (err, contents) => {
-      t.include(contents, `this should be written to the file${EOL}`);
-      t.equal(contents.indexOf('this should not be written to the file'), -1);
-      t.end();
+    t.teardown(() => {
+      removeFile('date-file-test.log');
     });
   });
 
   batch.test('configure with options.alwaysIncludePattern', (t) => {
-    const format = require('date-format');
-
     const options = {
-      appenders: [
-        {
+      appenders: {
+        date: {
           category: 'tests',
           type: 'dateFile',
           filename: 'test/tap/date-file-test',
-          pattern: '-from-MM-dd.log',
+          pattern: 'yyyy-MM-dd.log',
           alwaysIncludePattern: true,
           layout: {
-            type: 'messagePassThrough'
-          }
-        }
-      ]
+            type: 'messagePassThrough',
+          },
+        },
+      },
+      categories: { default: { appenders: ['date'], level: 'debug' } },
     };
 
-    const thisTime = format.asString(options.appenders[0].pattern, new Date());
-    fs.writeFileSync(
-      path.join(__dirname, `date-file-test${thisTime}`),
-      `this is existing data${EOL}`,
-      'utf8'
+    const thisTime = format.asString(
+      options.appenders.date.pattern,
+      new Date()
     );
-    log4js.clearAppenders();
+    const testFile = `date-file-test.${thisTime}`;
+    const existingFile = path.join(__dirname, testFile);
+    fs.writeFileSync(existingFile, `this is existing data${EOL}`, 'utf8');
     log4js.configure(options);
     const logger = log4js.getLogger('tests');
     logger.warn('this should be written to the file with the appended date');
 
-    t.teardown(() => { removeFile(`date-file-test${thisTime}`); });
+    t.teardown(() => {
+      removeFile(testFile);
+    });
 
     // wait for filesystem to catch up
-    setTimeout(() => {
-      fs.readFile(path.join(__dirname, `date-file-test${thisTime}`), 'utf8', (err, contents) => {
-        t.include(contents, 'this should be written to the file with the appended date');
-        t.include(contents, 'this is existing data', 'should not overwrite the file on open (issue #132)');
+    log4js.shutdown(() => {
+      fs.readFile(existingFile, 'utf8', (err, contents) => {
+        t.match(
+          contents,
+          'this is existing data',
+          'should not overwrite the file on open (issue #132)'
+        );
+        t.match(
+          contents,
+          'this should be written to the file with the appended date'
+        );
         t.end();
       });
-    }, 100);
+    });
   });
 
-  batch.test('configure with cwd option', (t) => {
-    let fileOpened;
+  batch.test('should flush logs on shutdown', (t) => {
+    const testFile = path.join(__dirname, 'date-appender-flush.log');
+    log4js.configure({
+      appenders: { test: { type: 'dateFile', filename: testFile } },
+      categories: { default: { appenders: ['test'], level: 'trace' } },
+    });
+    const logger = log4js.getLogger('default-settings');
+
+    logger.info('1');
+    logger.info('2');
+    logger.info('3');
+    t.teardown(() => {
+      removeFile('date-appender-flush.log');
+    });
 
-    const appender = sandbox.require(
+    log4js.shutdown(() => {
+      fs.readFile(testFile, 'utf8', (err, fileContents) => {
+        // 3 lines of output, plus the trailing newline.
+        t.equal(fileContents.split(EOL).length, 4);
+        t.match(
+          fileContents,
+          /\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
+        );
+        t.end();
+      });
+    });
+  });
+
+  batch.test('should map maxLogSize to maxSize', (t) => {
+    const fakeStreamroller = {};
+    class DateRollingFileStream {
+      constructor(filename, pattern, options) {
+        fakeStreamroller.filename = filename;
+        fakeStreamroller.pattern = pattern;
+        fakeStreamroller.options = options;
+      }
+
+      on() {} // eslint-disable-line class-methods-use-this
+    }
+    fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
+    const dateFileAppenderModule = sandbox.require(
       '../../lib/appenders/dateFile',
       {
-        requires: {
-          streamroller: {
-            DateRollingFileStream: function (file) {
-              fileOpened = file;
-              return {
-                on: function () {
-                },
-                end: function () {
-                }
-              };
-            }
-          }
-        }
+        requires: { streamroller: fakeStreamroller },
       }
     );
+    dateFileAppenderModule.configure(
+      {
+        filename: 'cheese.log',
+        pattern: 'yyyy',
+        maxLogSize: 100,
+      },
+      { basicLayout: () => {} }
+    );
+
+    t.equal(fakeStreamroller.options.maxSize, 100);
+    t.end();
+  });
+
+  batch.test('handling of writer.writable', (t) => {
+    const output = [];
+    let writable = true;
+
+    const DateRollingFileStream = class {
+      write(loggingEvent) {
+        output.push(loggingEvent);
+        this.written = true;
+        return true;
+      }
+
+      // eslint-disable-next-line class-methods-use-this
+      on() {}
+
+      // eslint-disable-next-line class-methods-use-this
+      get writable() {
+        return writable;
+      }
+    };
+    const dateFileAppender = sandbox.require('../../lib/appenders/dateFile', {
+      requires: {
+        streamroller: {
+          DateRollingFileStream,
+        },
+      },
+    });
 
-    appender.configure(
+    const appender = dateFileAppender.configure(
+      { filename: 'test1.log', maxLogSize: 100 },
       {
-        filename: 'whatever.log',
-        maxLogSize: 10
+        basicLayout(loggingEvent) {
+          return loggingEvent.data;
+        },
+      }
+    );
+
+    t.test('should log when writer.writable=true', (assert) => {
+      writable = true;
+      appender({ data: 'something to log' });
+      assert.ok(output.length, 1);
+      assert.match(output[output.length - 1], 'something to log');
+      assert.end();
+    });
+
+    t.test('should not log when writer.writable=false', (assert) => {
+      writable = false;
+      appender({ data: 'this should not be logged' });
+      assert.ok(output.length, 1);
+      assert.notMatch(output[output.length - 1], 'this should not be logged');
+      assert.end();
+    });
+
+    t.end();
+  });
+
+  batch.test('when underlying stream errors', (t) => {
+    let consoleArgs;
+    let errorHandler;
+
+    const DateRollingFileStream = class {
+      end() {
+        this.ended = true;
+      }
+
+      on(evt, cb) {
+        if (evt === 'error') {
+          this.errored = true;
+          errorHandler = cb;
+        }
+      }
+
+      write() {
+        this.written = true;
+        return true;
+      }
+    };
+    const dateFileAppender = sandbox.require('../../lib/appenders/dateFile', {
+      globals: {
+        console: {
+          error(...args) {
+            consoleArgs = args;
+          },
+        },
+      },
+      requires: {
+        streamroller: {
+          DateRollingFileStream,
+        },
       },
-      { cwd: '/absolute/path/to' }
+    });
+
+    dateFileAppender.configure(
+      { filename: 'test1.log', maxLogSize: 100 },
+      { basicLayout() {} }
     );
+    errorHandler({ error: 'aargh' });
 
-    const expected = path.sep + path.join('absolute', 'path', 'to', 'whatever.log');
-    t.equal(fileOpened, expected, 'should prepend options.cwd to config.filename');
+    t.test('should log the error to console.error', (assert) => {
+      assert.ok(consoleArgs);
+      assert.equal(
+        consoleArgs[0],
+        'log4js.dateFileAppender - Writing to file %s, error happened '
+      );
+      assert.equal(consoleArgs[1], 'test1.log');
+      assert.equal(consoleArgs[2].error, 'aargh');
+      assert.end();
+    });
     t.end();
   });
 
diff --git a/test/tap/default-settings-test.js b/test/tap/default-settings-test.js
index d3e6a6dc..f7720380 100644
--- a/test/tap/default-settings-test.js
+++ b/test/tap/default-settings-test.js
@@ -1,36 +1,161 @@
-'use strict';
+const { test } = require('tap');
+const debug = require('debug');
+const sandbox = require('@log4js-node/sandboxed-module');
 
-const test = require('tap').test;
-const sandbox = require('sandboxed-module');
+test('default settings', (batch) => {
+  const originalListener =
+    process.listeners('warning')[process.listeners('warning').length - 1];
+  const warningListener = (error) => {
+    if (error.name === 'DeprecationWarning') {
+      if (
+        error.code.startsWith('log4js-node-DEP0001') ||
+        error.code.startsWith('log4js-node-DEP0002')
+      ) {
+        return;
+      }
+    }
+    originalListener(error);
+  };
+  process.off('warning', originalListener);
+  process.on('warning', warningListener);
+
+  const debugWasEnabled = debug.enabled('log4js:appenders');
+  const debugLogs = [];
+  const originalWrite = process.stderr.write;
+  process.stderr.write = (string, encoding, fd) => {
+    debugLogs.push(string);
+    if (debugWasEnabled) {
+      originalWrite.apply(process.stderr, [string, encoding, fd]);
+    }
+  };
+  const originalNamespace = debug.disable();
+  debug.enable(`${originalNamespace}, log4js:appenders`);
+
+  batch.teardown(async () => {
+    // next event loop so that past warnings will not be printed
+    setImmediate(() => {
+      process.off('warning', warningListener);
+      process.on('warning', originalListener);
+    });
+    process.stderr.write = originalWrite;
+    debug.enable(originalNamespace);
+  });
 
-test('default settings', (t) => {
   const output = [];
+  const log4js = sandbox.require('../../lib/log4js', {
+    requires: {
+      './appenders/stdout': {
+        name: 'stdout',
+        appender() {
+          // deprecated
+          return function (evt) {
+            output.push(evt);
+          };
+        },
+        shutdown() {
+          // deprecated
+        },
+        configure() {
+          return this.appender();
+        },
+      },
+      debug,
+    },
+  });
 
-  const log4js = sandbox.require(
-    '../../lib/log4js',
-    {
-      requires: {
-        './appenders/stdout': {
-          name: 'stdout',
-          appender: function () {
-            return function (evt) {
-              output.push(evt);
-            };
-          },
-          configure: function () {
-            return this.appender();
-          }
-        }
-      }
+  let logger;
+
+  batch.test(
+    'should call configure() on getLogger() if not configured',
+    (t) => {
+      const DEP0001 = debugLogs.filter(
+        (e) => e.indexOf('log4js-node-DEP0001') > -1
+      ).length;
+      const DEP0002 = debugLogs.filter(
+        (e) => e.indexOf('log4js-node-DEP0002') > -1
+      ).length;
+      logger = log4js.getLogger('default-settings');
+      t.equal(
+        debugLogs.filter((e) => e.indexOf('log4js-node-DEP0001') > -1).length,
+        DEP0001 + 1,
+        'deprecation log4js-node-DEP0001 emitted'
+      );
+      t.equal(
+        debugLogs.filter((e) => e.indexOf('log4js-node-DEP0002') > -1).length,
+        DEP0002 + 1,
+        'deprecation log4js-node-DEP0002 emitted'
+      );
+      t.end();
     }
   );
 
-  const logger = log4js.getLogger('default-settings');
+  batch.test(
+    'nothing should be logged until level is set or configure() is called',
+    (t) => {
+      const originalLevel = logger.level;
+      t.equal(
+        originalLevel.levelStr,
+        'OFF',
+        'default logger.level should be OFF'
+      );
 
-  logger.info('This should go to stdout.');
+      logger.info('This should not be logged yet.');
+      t.equal(output.length, 0, 'nothing should be logged');
+
+      t.test('after level is set', (assert) => {
+        logger.level = 'debug';
+        logger.info('This should be logged.');
+        assert.equal(
+          output.length,
+          1,
+          'should log the message if level is set'
+        );
+        assert.equal(
+          output[output.length - 1].data[0],
+          'This should be logged.'
+        );
+        logger.level = originalLevel;
+        assert.end();
+      });
+
+      t.test('after configure() is called', (assert) => {
+        const DEP0001 = debugLogs.filter(
+          (e) => e.indexOf('log4js-node-DEP0001') > -1
+        ).length;
+        const DEP0002 = debugLogs.filter(
+          (e) => e.indexOf('log4js-node-DEP0002') > -1
+        ).length;
+        log4js.configure({
+          appenders: { stdout: { type: 'stdout' } },
+          categories: { default: { appenders: ['stdout'], level: 'debug' } },
+        });
+        assert.equal(
+          debugLogs.filter((e) => e.indexOf('log4js-node-DEP0001') > -1).length,
+          DEP0001 + 1,
+          'deprecation log4js-node-DEP0001 emitted'
+        );
+        assert.equal(
+          debugLogs.filter((e) => e.indexOf('log4js-node-DEP0002') > -1).length,
+          DEP0002 + 1,
+          'deprecation log4js-node-DEP0002 emitted'
+        );
+
+        logger.info('This should go to stdout.');
+        assert.equal(
+          output.length,
+          2,
+          'should log the message after configure() is called'
+        );
+        assert.equal(
+          output[output.length - 1].data[0],
+          'This should go to stdout.'
+        );
+        assert.end();
+      });
+
+      t.end();
+    }
+  );
 
-  t.plan(2);
-  t.equal(output.length, 1, 'It should log to stdout.');
-  t.equal(output[0].data[0], 'This should go to stdout.', 'It should log the message.');
-  t.end();
+  batch.end();
 });
diff --git a/test/tap/disable-cluster-test.js b/test/tap/disable-cluster-test.js
new file mode 100644
index 00000000..4128eaa6
--- /dev/null
+++ b/test/tap/disable-cluster-test.js
@@ -0,0 +1,57 @@
+const { test } = require('tap');
+const cluster = require('cluster');
+const log4js = require('../../lib/log4js');
+const recorder = require('../../lib/appenders/recording');
+
+cluster.removeAllListeners();
+
+log4js.configure({
+  appenders: {
+    vcr: { type: 'recording' },
+  },
+  categories: { default: { appenders: ['vcr'], level: 'debug' } },
+  disableClustering: true,
+});
+
+if (cluster.isMaster) {
+  cluster.fork();
+
+  const masterLogger = log4js.getLogger('master');
+  const masterPid = process.pid;
+  masterLogger.info('this is master');
+
+  cluster.on('exit', () => {
+    const logEvents = recorder.replay();
+
+    test('cluster master', (batch) => {
+      batch.test('only master events should be logged', (t) => {
+        t.equal(logEvents.length, 1);
+        t.equal(logEvents[0].categoryName, 'master');
+        t.equal(logEvents[0].pid, masterPid);
+        t.equal(logEvents[0].data[0], 'this is master');
+        t.end();
+      });
+
+      batch.end();
+    });
+  });
+} else {
+  const workerLogger = log4js.getLogger('worker');
+  workerLogger.info('this is worker', new Error('oh dear'));
+
+  const workerEvents = recorder.replay();
+  test('cluster worker', (batch) => {
+    batch.test('should send events to its own appender', (t) => {
+      t.equal(workerEvents.length, 1);
+      t.equal(workerEvents[0].categoryName, 'worker');
+      t.equal(workerEvents[0].data[0], 'this is worker');
+      t.type(workerEvents[0].data[1], 'Error');
+      t.match(workerEvents[0].data[1].stack, 'Error: oh dear');
+      t.end();
+    });
+    batch.end();
+  });
+  // test sending a cluster-style log message
+  process.send({ topic: 'log4js:message', data: { cheese: 'gouda' } });
+  cluster.worker.disconnect();
+}
diff --git a/test/tap/dummy-appender.cjs b/test/tap/dummy-appender.cjs
new file mode 100644
index 00000000..4d1b0309
--- /dev/null
+++ b/test/tap/dummy-appender.cjs
@@ -0,0 +1,20 @@
+// Dummy appender for test purposes; set config.label to identify instances in a test
+
+function createDummyAppender() {
+  // This is the function that generates an appender function
+  // This is the appender function itself
+  return (/* loggingEvent */) => {
+    // do nothing
+    // console.log(loggingEvent.data);
+  };
+}
+
+function configure(config) {
+  // create a new appender instance
+  const appender = createDummyAppender();
+  appender.label = config.label;
+  return appender;
+}
+
+// export the only function needed
+exports.configure = configure;
diff --git a/test/tap/file-descriptor-leak-test.js b/test/tap/file-descriptor-leak-test.js
new file mode 100644
index 00000000..8e97aeaa
--- /dev/null
+++ b/test/tap/file-descriptor-leak-test.js
@@ -0,0 +1,104 @@
+const { test } = require('tap');
+const fs = require('fs');
+const path = require('path');
+const log4js = require('../../lib/log4js');
+
+const osDelay = process.platform === 'win32' ? 400 : 200;
+
+const removeFiles = async (filenames) => {
+  if (!Array.isArray(filenames)) filenames = [filenames];
+  const promises = filenames.map((filename) => fs.promises.unlink(filename));
+  await Promise.allSettled(promises);
+};
+
+// no file descriptors on Windows, so don't run the tests
+if (process.platform !== 'win32') {
+  test('multiple log4js configure fd leak test', (batch) => {
+    const config = {
+      appenders: {},
+      categories: {
+        default: { appenders: [], level: 'debug' },
+      },
+    };
+
+    // create 11 appenders
+    const numOfAppenders = 11;
+    for (let i = 1; i <= numOfAppenders; i++) {
+      config.appenders[`app${i}`] = {
+        type: 'file',
+        filename: path.join(__dirname, `file${i}.log`),
+      };
+      config.categories.default.appenders.push(`app${i}`);
+    }
+
+    const initialFd = fs.readdirSync('/proc/self/fd').length;
+    let loadedFd;
+
+    batch.test(
+      'initial log4js configure to increase file descriptor count',
+      (t) => {
+        log4js.configure(config);
+
+        // wait for the file system to catch up
+        setTimeout(() => {
+          loadedFd = fs.readdirSync('/proc/self/fd').length;
+          t.equal(
+            loadedFd,
+            initialFd + numOfAppenders,
+            `file descriptor count should increase by ${numOfAppenders} after 1st configure() call`
+          );
+          t.end();
+        }, osDelay);
+      }
+    );
+
+    batch.test(
+      'repeated log4js configure to not increase file descriptor count',
+      (t) => {
+        log4js.configure(config);
+        log4js.configure(config);
+        log4js.configure(config);
+
+        // wait for the file system to catch up
+        setTimeout(() => {
+          t.equal(
+            fs.readdirSync('/proc/self/fd').length,
+            loadedFd,
+            `file descriptor count should be identical after repeated configure() calls`
+          );
+          t.end();
+        }, osDelay);
+      }
+    );
+
+    batch.test(
+      'file descriptor count should return back to initial count',
+      (t) => {
+        log4js.shutdown();
+
+        // wait for the file system to catch up
+        setTimeout(() => {
+          t.equal(
+            fs.readdirSync('/proc/self/fd').length,
+            initialFd,
+            `file descriptor count should be back to initial`
+          );
+          t.end();
+        }, osDelay);
+      }
+    );
+
+    batch.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
+      });
+
+      const filenames = Object.values(config.appenders).map(
+        (appender) => appender.filename
+      );
+      await removeFiles(filenames);
+    });
+
+    batch.end();
+  });
+}
diff --git a/test/tap/file-sighup-test.js b/test/tap/file-sighup-test.js
index 5ed6afa0..bebd12cb 100644
--- a/test/tap/file-sighup-test.js
+++ b/test/tap/file-sighup-test.js
@@ -1,45 +1,154 @@
-'use strict';
+const { test } = require('tap');
+const path = require('path');
+const fs = require('fs');
+const sandbox = require('@log4js-node/sandboxed-module');
 
-const test = require('tap').test;
-const sandbox = require('sandboxed-module');
+const osDelay = process.platform === 'win32' ? 400 : 200;
+
+const removeFiles = async (filenames) => {
+  if (!Array.isArray(filenames)) filenames = [filenames];
+  const promises = filenames.map((filename) => fs.promises.unlink(filename));
+  await Promise.allSettled(promises);
+};
+
+test('file appender single SIGHUP handler', (t) => {
+  const initialListeners = process.listenerCount('SIGHUP');
+
+  let warning;
+  const originalListener =
+    process.listeners('warning')[process.listeners('warning').length - 1];
+  const warningListener = (error) => {
+    if (
+      error.type === 'SIGHUP' &&
+      error.name === 'MaxListenersExceededWarning'
+    ) {
+      warning = error;
+      return;
+    }
+    originalListener(error);
+  };
+  process.off('warning', originalListener);
+  process.on('warning', warningListener);
+
+  const config = {
+    appenders: {},
+    categories: {
+      default: { appenders: [], level: 'debug' },
+    },
+  };
+
+  // create 11 appenders to make nodejs warn for >10 max listeners
+  const numOfAppenders = 11;
+  for (let i = 1; i <= numOfAppenders; i++) {
+    config.appenders[`app${i}`] = {
+      type: 'file',
+      filename: path.join(__dirname, `file${i}.log`),
+    };
+    config.categories.default.appenders.push(`app${i}`);
+  }
+
+  const log4js = require('../../lib/log4js');
+  log4js.configure(config);
+
+  t.teardown(async () => {
+    // next event loop so that past warnings will not be printed
+    setImmediate(() => {
+      process.off('warning', warningListener);
+      process.on('warning', originalListener);
+    });
+
+    await new Promise((resolve) => {
+      log4js.shutdown(resolve);
+    });
+
+    const filenames = Object.values(config.appenders).map(
+      (appender) => appender.filename
+    );
+    await removeFiles(filenames);
+  });
+
+  t.plan(2);
+  // next event loop to allow event emitter/listener to happen
+  setImmediate(() => {
+    t.notOk(warning, 'should not have MaxListenersExceededWarning for SIGHUP');
+    t.equal(
+      process.listenerCount('SIGHUP') - initialListeners,
+      1,
+      'should be 1 SIGHUP listener'
+    );
+    t.end();
+  });
+});
 
 test('file appender SIGHUP', (t) => {
   let closeCalled = 0;
   let openCalled = 0;
 
-  sandbox.require(
-    '../../lib/appenders/file',
-    {
+  sandbox
+    .require('../../lib/appenders/file', {
       requires: {
         streamroller: {
-          RollingFileStream: function () {
-            this.openTheStream = function () {
+          RollingFileStream: class RollingFileStream {
+            constructor() {
               openCalled++;
-            };
+              this.ended = false;
+            }
 
-            this.closeTheStream = function (cb) {
-              closeCalled++;
-              if (cb) {
-                cb();
-              }
-            };
+            on() {
+              this.dummy = 'easier than turning off lint rule';
+            }
 
-            this.on = function () {
-            };
+            end(cb) {
+              this.ended = true;
+              closeCalled++;
+              cb();
+            }
 
-            this.end = function () {
-            };
-          }
-        }
+            write() {
+              if (this.ended) {
+                throw new Error('write after end');
+              }
+              return true;
+            }
+          },
+        },
+      },
+    })
+    .configure(
+      { type: 'file', filename: 'sighup-test-file' },
+      {
+        basicLayout() {
+          return 'whatever';
+        },
       }
-    }
-  ).appender('sighup-test-file');
+    );
+
+  process.emit('SIGHUP', 'SIGHUP', 1);
 
-  process.kill(process.pid, 'SIGHUP');
   t.plan(2);
   setTimeout(() => {
-    t.equal(openCalled, 1, 'open should be called once');
+    t.equal(openCalled, 2, 'open should be called twice');
     t.equal(closeCalled, 1, 'close should be called once');
     t.end();
-  }, 10);
+  }, osDelay);
+});
+
+test('file appender SIGHUP handler leak', (t) => {
+  const log4js = require('../../lib/log4js');
+  const initialListeners = process.listenerCount('SIGHUP');
+  log4js.configure({
+    appenders: {
+      file: { type: 'file', filename: 'test.log' },
+    },
+    categories: { default: { appenders: ['file'], level: 'info' } },
+  });
+  t.teardown(async () => {
+    await removeFiles('test.log');
+  });
+  t.plan(2);
+  t.equal(process.listenerCount('SIGHUP'), initialListeners + 1);
+  log4js.shutdown(() => {
+    t.equal(process.listenerCount('SIGHUP'), initialListeners);
+    t.end();
+  });
 });
diff --git a/test/tap/fileAppender-test.js b/test/tap/fileAppender-test.js
index 95220ece..e51610cb 100644
--- a/test/tap/fileAppender-test.js
+++ b/test/tap/fileAppender-test.js
@@ -1,368 +1,507 @@
-'use strict';
+/* eslint max-classes-per-file: ["error", 2] */
 
-const test = require('tap').test;
-const fs = require('fs');
+const { test } = require('tap');
+const fs = require('fs-extra');
 const path = require('path');
-const sandbox = require('sandboxed-module');
-const log4js = require('../../lib/log4js');
+const sandbox = require('@log4js-node/sandboxed-module');
 const zlib = require('zlib');
-const EOL = require('os').EOL || '\n';
+const util = require('util');
+
+const osDelay = process.platform === 'win32' ? 400 : 200;
 
-log4js.clearAppenders();
+const sleep = util.promisify(setTimeout);
+const gunzip = util.promisify(zlib.gunzip);
+const EOL = require('os').EOL || '\n';
+const log4js = require('../../lib/log4js');
 
-function remove(filename) {
+const removeFile = async (filename) => {
   try {
-    fs.unlinkSync(filename);
+    await fs.unlink(filename);
   } catch (e) {
-    // doesn't really matter if it failed
+    // let's pretend this never happened
   }
-}
+};
 
 test('log4js fileAppender', (batch) => {
-  batch.test('adding multiple fileAppenders', (t) => {
-    const initialCount = process.listeners('exit').length;
-    let count = 5;
-    let logfile;
-
-    while (count--) {
-      logfile = path.join(__dirname, `fa-default-test${count}.log`);
-      log4js.addAppender(
-        require('../../lib/appenders/file').appender(logfile),
-        'default-settings'
-      );
-    }
+  batch.test('with default fileAppender settings', async (t) => {
+    const testFile = path.join(__dirname, 'fa-default-test.log');
+    const logger = log4js.getLogger('default-settings');
+    await removeFile(testFile);
 
-    t.equal(initialCount + 1, process.listeners('exit').length, 'should not add more than one exit listener');
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
+      });
+      await removeFile(testFile);
+    });
+
+    log4js.configure({
+      appenders: { file: { type: 'file', filename: testFile } },
+      categories: { default: { appenders: ['file'], level: 'debug' } },
+    });
+
+    logger.info('This should be in the file.');
+
+    await sleep(osDelay);
+    const fileContents = await fs.readFile(testFile, 'utf8');
+    t.match(fileContents, `This should be in the file.${EOL}`);
+    t.match(
+      fileContents,
+      /\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
+    );
     t.end();
   });
 
-  batch.test('exit listener', (t) => {
-    let exitListener;
-    const openedFiles = [];
-
-    const fileAppender = sandbox.require(
-      '../../lib/appenders/file',
-      {
-        globals: {
-          process: {
-            on: function (evt, listener) {
-              if (evt === 'exit') {
-                exitListener = listener;
-              }
-            }
-          }
+  batch.test('with tilde expansion in filename', async (t) => {
+    const fileName = 'tmpTilde.log';
+    const expandedPath = path.join(__dirname, fileName);
+    await removeFile(expandedPath);
+
+    const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        os: {
+          homedir() {
+            return __dirname;
+          },
         },
-        singleOnly: true,
-        requires: {
-          streamroller: {
-            RollingFileStream: function (filename) {
-              openedFiles.push(filename);
-
-              this.end = function () {
-                openedFiles.shift();
-              };
-
-              this.on = function () {
-              };
-            }
-          }
-        }
-      }
+      },
+    });
+
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        sandboxedLog4js.shutdown(resolve);
+      });
+      await removeFile(expandedPath);
+    });
+
+    sandboxedLog4js.configure({
+      appenders: { file: { type: 'file', filename: path.join('~', fileName) } },
+      categories: { default: { appenders: ['file'], level: 'debug' } },
+    });
+
+    t.ok(
+      fs.existsSync(expandedPath),
+      'should expand tilde to create in home directory'
     );
+    t.end();
+  });
 
-    for (let i = 0; i < 5; i += 1) {
-      fileAppender.appender(`test${i}`, null, 100);
-    }
-    t.ok(openedFiles);
-    exitListener();
-    t.equal(openedFiles.length, 0, 'should close all open files');
+  batch.test('should give error if invalid filename', async (t) => {
+    const file = '';
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: {
+            file: {
+              type: 'file',
+              filename: file,
+            },
+          },
+          categories: {
+            default: { appenders: ['file'], level: 'debug' },
+          },
+        }),
+      new Error(`Invalid filename: ${file}`)
+    );
+    const dir = `.${path.sep}`;
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: {
+            file: {
+              type: 'file',
+              filename: dir,
+            },
+          },
+          categories: {
+            default: { appenders: ['file'], level: 'debug' },
+          },
+        }),
+      new Error(`Filename is a directory: ${dir}`)
+    );
     t.end();
   });
 
-  batch.test('with default fileAppender settings', (t) => {
+  batch.test('should flush logs on shutdown', async (t) => {
     const testFile = path.join(__dirname, 'fa-default-test.log');
     const logger = log4js.getLogger('default-settings');
-    remove(testFile);
-
-    log4js.clearAppenders();
-    log4js.addAppender(
-      require('../../lib/appenders/file').appender(testFile),
-      'default-settings'
-    );
-
-    logger.info('This should be in the file.');
+    await removeFile(testFile);
 
-    setTimeout(() => {
-      fs.readFile(testFile, 'utf8', (err, fileContents) => {
-        t.include(fileContents, `This should be in the file.${EOL}`);
-        t.match(
-          fileContents,
-          /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
-        );
-        t.end();
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
       });
-    }, 100);
-  });
+      await removeFile(testFile);
+    });
 
-  batch.test('fileAppender subcategories', (t) => {
-    log4js.clearAppenders();
+    log4js.configure({
+      appenders: { test: { type: 'file', filename: testFile } },
+      categories: { default: { appenders: ['test'], level: 'trace' } },
+    });
 
-    function addAppender(cat) {
-      const testFile = path.join(
-        __dirname,
-        `fa-subcategories-test-${cat.join('-').replace(/\./g, '_')}.log`
-      );
-      remove(testFile);
-      log4js.addAppender(require('../../lib/appenders/file').appender(testFile), cat);
-      return testFile;
-    }
+    logger.info('1');
+    logger.info('2');
+    logger.info('3');
 
-    /* eslint-disable camelcase */
-    const file_sub1 = addAppender(['sub1']);
-    const file_sub1_sub12$sub1_sub13 = addAppender(['sub1.sub12', 'sub1.sub13']);
-    const file_sub1_sub12 = addAppender(['sub1.sub12']);
-    const logger_sub1_sub12_sub123 = log4js.getLogger('sub1.sub12.sub123');
-    const logger_sub1_sub13_sub133 = log4js.getLogger('sub1.sub13.sub133');
-    const logger_sub1_sub14 = log4js.getLogger('sub1.sub14');
-    const logger_sub2 = log4js.getLogger('sub2');
-
-    logger_sub1_sub12_sub123.info('sub1_sub12_sub123');
-    logger_sub1_sub13_sub133.info('sub1_sub13_sub133');
-    logger_sub1_sub14.info('sub1_sub14');
-    logger_sub2.info('sub2');
-
-    setTimeout(() => {
-      t.test('file contents', (assert) => {
-        const fileContents = {
-          file_sub1: fs.readFileSync(file_sub1).toString(),
-          file_sub1_sub12$sub1_sub13: fs.readFileSync(file_sub1_sub12$sub1_sub13).toString(),
-          file_sub1_sub12: fs.readFileSync(file_sub1_sub12).toString()
-        };
-        // everything but category 'sub2'
-        assert.match(
-          fileContents.file_sub1,
-          /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133|sub1.sub14 - sub1_sub14)[\s\S]){3}$/ // eslint-disable-line
-        );
-        assert.ok(
-          fileContents.file_sub1.match(/sub123/) &&
-          fileContents.file_sub1.match(/sub133/) &&
-          fileContents.file_sub1.match(/sub14/)
-        );
-        assert.ok(!fileContents.file_sub1.match(/sub2/));
-
-        // only catgories starting with 'sub1.sub12' and 'sub1.sub13'
-        assert.match(
-          fileContents.file_sub1_sub12$sub1_sub13,
-          /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133)[\s\S]){2}$/ // eslint-disable-line
-        );
-        assert.ok(
-          fileContents.file_sub1_sub12$sub1_sub13.match(/sub123/) &&
-          fileContents.file_sub1_sub12$sub1_sub13.match(/sub133/)
-        );
-        assert.ok(!fileContents.file_sub1_sub12$sub1_sub13.match(/sub14|sub2/));
-
-        // only catgories starting with 'sub1.sub12'
-        assert.match(
-          fileContents.file_sub1_sub12,
-          /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] (sub1.sub12.sub123 - sub1_sub12_sub123)[\s\S]){1}$/ // eslint-disable-line
-        );
-        assert.ok(!fileContents.file_sub1_sub12.match(/sub14|sub2|sub13/));
-        assert.end();
-      });
-      t.end();
-    }, 3000);
+    await new Promise((resolve) => {
+      log4js.shutdown(resolve);
+    });
+    const fileContents = await fs.readFile(testFile, 'utf8');
+    // 3 lines of output, plus the trailing newline.
+    t.equal(fileContents.split(EOL).length, 4);
+    t.match(
+      fileContents,
+      /\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
+    );
+    t.end();
   });
 
-  batch.test('with a max file size and no backups', (t) => {
+  batch.test('with a max file size and no backups', async (t) => {
     const testFile = path.join(__dirname, 'fa-maxFileSize-test.log');
     const logger = log4js.getLogger('max-file-size');
-    remove(testFile);
-    remove(`${testFile}.1`);
+    await removeFile(testFile);
+
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
+      });
+      await removeFile(testFile);
+    });
+
     // log file of 100 bytes maximum, no backups
-    log4js.clearAppenders();
-    log4js.addAppender(
-      require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
-      'max-file-size'
-    );
+    log4js.configure({
+      appenders: {
+        file: {
+          type: 'file',
+          filename: testFile,
+          maxLogSize: 100,
+          backups: 0,
+        },
+      },
+      categories: {
+        default: { appenders: ['file'], level: 'debug' },
+      },
+    });
+
     logger.info('This is the first log message.');
     logger.info('This is an intermediate log message.');
     logger.info('This is the second log message.');
     // wait for the file system to catch up
-    setTimeout(() => {
-      fs.readFile(testFile, 'utf8', (err, fileContents) => {
-        t.include(fileContents, 'This is the second log message.');
-        t.equal(fileContents.indexOf('This is the first log message.'), -1);
-        fs.readdir(__dirname, (e, files) => {
-          const logFiles = files.filter(
-            file => file.includes('fa-maxFileSize-test.log')
-          );
-          t.equal(logFiles.length, 2, 'should be 2 files');
-          t.end();
-        });
+    await sleep(osDelay * 2);
+    const fileContents = await fs.readFile(testFile, 'utf8');
+    t.match(fileContents, 'This is the second log message.');
+    t.equal(fileContents.indexOf('This is the first log message.'), -1);
+    const files = await fs.readdir(__dirname);
+    const logFiles = files.filter((file) =>
+      file.includes('fa-maxFileSize-test.log')
+    );
+    t.equal(logFiles.length, 1, 'should be 1 file');
+    t.end();
+  });
+
+  batch.test('with a max file size in wrong unit mode', async (t) => {
+    const invalidUnit = '1Z';
+    const expectedError = new Error(`maxLogSize: "${invalidUnit}" is invalid`);
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: {
+            file: {
+              type: 'file',
+              maxLogSize: invalidUnit,
+            },
+          },
+          categories: {
+            default: { appenders: ['file'], level: 'debug' },
+          },
+        }),
+      expectedError
+    );
+    t.end();
+  });
+
+  batch.test('with a max file size in unit mode and no backups', async (t) => {
+    const testFile = path.join(__dirname, 'fa-maxFileSize-unit-test.log');
+    const logger = log4js.getLogger('max-file-size-unit');
+    await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
+
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
       });
-    }, 100);
+      await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
+    });
+
+    // log file of 1K = 1024 bytes maximum, no backups
+    log4js.configure({
+      appenders: {
+        file: {
+          type: 'file',
+          filename: testFile,
+          maxLogSize: '1K',
+          backups: 0,
+          layout: { type: 'messagePassThrough' },
+        },
+      },
+      categories: {
+        default: { appenders: ['file'], level: 'debug' },
+      },
+    });
+    const maxLine = 22; // 1024 max file size / 47 bytes per line
+    for (let i = 0; i < maxLine; i++) {
+      logger.info('These are the log messages for the first file.'); // 46 bytes per line + '\n'
+    }
+
+    logger.info('This is the second log message.');
+
+    // wait for the file system to catch up
+    await sleep(osDelay);
+    const fileContents = await fs.readFile(testFile, 'utf8');
+    t.match(fileContents, 'This is the second log message.');
+    t.notMatch(fileContents, 'These are the log messages for the first file.');
+    const files = await fs.readdir(__dirname);
+    const logFiles = files.filter((file) =>
+      file.includes('fa-maxFileSize-unit-test.log')
+    );
+    t.equal(logFiles.length, 1, 'should be 1 file');
+    t.end();
   });
 
-  batch.test('with a max file size and 2 backups', (t) => {
-    const testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-test.log');
+  batch.test('with a max file size and 2 backups', async (t) => {
+    const testFile = path.join(
+      __dirname,
+      'fa-maxFileSize-with-backups-test.log'
+    );
     const logger = log4js.getLogger('max-file-size-backups');
-    remove(testFile);
-    remove(`${testFile}.1`);
-    remove(`${testFile}.2`);
+    await Promise.all([
+      removeFile(testFile),
+      removeFile(`${testFile}.1`),
+      removeFile(`${testFile}.2`),
+    ]);
+
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
+      });
+      await Promise.all([
+        removeFile(testFile),
+        removeFile(`${testFile}.1`),
+        removeFile(`${testFile}.2`),
+      ]);
+    });
 
     // log file of 50 bytes maximum, 2 backups
-    log4js.clearAppenders();
-    log4js.addAppender(
-      require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
-      'max-file-size-backups'
-    );
+    log4js.configure({
+      appenders: {
+        file: {
+          type: 'file',
+          filename: testFile,
+          maxLogSize: 50,
+          backups: 2,
+        },
+      },
+      categories: { default: { appenders: ['file'], level: 'debug' } },
+    });
+
     logger.info('This is the first log message.');
     logger.info('This is the second log message.');
     logger.info('This is the third log message.');
     logger.info('This is the fourth log message.');
     // give the system a chance to open the stream
-    setTimeout(() => {
-      fs.readdir(__dirname, (err, files) => {
-        const logFiles = files.sort().filter(
-          file => file.includes('fa-maxFileSize-with-backups-test.log')
-        );
-        t.equal(logFiles.length, 3);
-        t.same(logFiles, [
-          'fa-maxFileSize-with-backups-test.log',
-          'fa-maxFileSize-with-backups-test.log.1',
-          'fa-maxFileSize-with-backups-test.log.2'
-        ]);
-        t.test('the contents of the first file', (assert) => {
-          fs.readFile(path.join(__dirname, logFiles[0]), 'utf8', (e, contents) => {
-            assert.include(contents, 'This is the fourth log message.');
-            assert.end();
-          });
-        });
-        t.test('the contents of the second file', (assert) => {
-          fs.readFile(path.join(__dirname, logFiles[1]), 'utf8', (e, contents) => {
-            assert.include(contents, 'This is the third log message.');
-            assert.end();
-          });
-        });
-        t.test('the contents of the third file', (assert) => {
-          fs.readFile(path.join(__dirname, logFiles[2]), 'utf8', (e, contents) => {
-            assert.include(contents, 'This is the second log message.');
-            assert.end();
-          });
-        });
-        t.end();
-      });
-    }, 200);
+    await sleep(osDelay);
+    const files = await fs.readdir(__dirname);
+    const logFiles = files
+      .sort()
+      .filter((file) => file.includes('fa-maxFileSize-with-backups-test.log'));
+    t.equal(logFiles.length, 3);
+    t.same(logFiles, [
+      'fa-maxFileSize-with-backups-test.log',
+      'fa-maxFileSize-with-backups-test.log.1',
+      'fa-maxFileSize-with-backups-test.log.2',
+    ]);
+    let contents = await fs.readFile(path.join(__dirname, logFiles[0]), 'utf8');
+    t.match(contents, 'This is the fourth log message.');
+    contents = await fs.readFile(path.join(__dirname, logFiles[1]), 'utf8');
+    t.match(contents, 'This is the third log message.');
+    contents = await fs.readFile(path.join(__dirname, logFiles[2]), 'utf8');
+    t.match(contents, 'This is the second log message.');
+
+    t.end();
   });
 
-  batch.test('with a max file size and 2 compressed backups', (t) => {
-    const testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-compressed-test.log');
+  batch.test('with a max file size and 2 compressed backups', async (t) => {
+    const testFile = path.join(
+      __dirname,
+      'fa-maxFileSize-with-backups-compressed-test.log'
+    );
     const logger = log4js.getLogger('max-file-size-backups');
-    remove(testFile);
-    remove(`${testFile}.1.gz`);
-    remove(`${testFile}.2.gz`);
+    await Promise.all([
+      removeFile(testFile),
+      removeFile(`${testFile}.1.gz`),
+      removeFile(`${testFile}.2.gz`),
+    ]);
+
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
+      });
+      await Promise.all([
+        removeFile(testFile),
+        removeFile(`${testFile}.1.gz`),
+        removeFile(`${testFile}.2.gz`),
+      ]);
+    });
 
     // log file of 50 bytes maximum, 2 backups
-    log4js.clearAppenders();
-    log4js.addAppender(
-      require('../../lib/appenders/file').appender(
-        testFile, log4js.layouts.basicLayout, 50, 2, { compress: true }
-      ),
-      'max-file-size-backups'
-    );
+    log4js.configure({
+      appenders: {
+        file: {
+          type: 'file',
+          filename: testFile,
+          maxLogSize: 50,
+          backups: 2,
+          compress: true,
+        },
+      },
+      categories: { default: { appenders: ['file'], level: 'debug' } },
+    });
     logger.info('This is the first log message.');
     logger.info('This is the second log message.');
     logger.info('This is the third log message.');
     logger.info('This is the fourth log message.');
     // give the system a chance to open the stream
-    setTimeout(() => {
-      fs.readdir(__dirname, (err, files) => {
-        const logFiles = files.sort().filter(
-          file => file.includes('fa-maxFileSize-with-backups-compressed-test.log')
-        );
-        t.equal(logFiles.length, 3, 'should be 3 files');
-        t.same(logFiles, [
-          'fa-maxFileSize-with-backups-compressed-test.log',
-          'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
-          'fa-maxFileSize-with-backups-compressed-test.log.2.gz'
-        ]);
-        t.test('the contents of the first file', (assert) => {
-          fs.readFile(path.join(__dirname, logFiles[0]), 'utf8', (e, contents) => {
-            assert.include(contents, 'This is the fourth log message.');
-            assert.end();
-          });
-        });
-        t.test('the contents of the second file', (assert) => {
-          zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[1])), (e, contents) => {
-            assert.include(contents.toString('utf8'), 'This is the third log message.');
-            assert.end();
-          });
-        });
-        t.test('the contents of the third file', (assert) => {
-          zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[2])), (e, contents) => {
-            assert.include(contents.toString('utf8'), 'This is the second log message.');
-            assert.end();
-          });
-        });
-        t.end();
-      });
-    }, 1000);
+    await sleep(osDelay);
+    const files = await fs.readdir(__dirname);
+    const logFiles = files
+      .sort()
+      .filter((file) =>
+        file.includes('fa-maxFileSize-with-backups-compressed-test.log')
+      );
+    t.equal(logFiles.length, 3, 'should be 3 files');
+    t.same(logFiles, [
+      'fa-maxFileSize-with-backups-compressed-test.log',
+      'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
+      'fa-maxFileSize-with-backups-compressed-test.log.2.gz',
+    ]);
+    let contents = await fs.readFile(path.join(__dirname, logFiles[0]), 'utf8');
+    t.match(contents, 'This is the fourth log message.');
+
+    contents = await gunzip(
+      await fs.readFile(path.join(__dirname, logFiles[1]))
+    );
+    t.match(contents.toString('utf8'), 'This is the third log message.');
+    contents = await gunzip(
+      await fs.readFile(path.join(__dirname, logFiles[2]))
+    );
+    t.match(contents.toString('utf8'), 'This is the second log message.');
+    t.end();
   });
 
-  batch.test('configure with fileAppender', (t) => {
-    // this config file defines one file appender (to ./tmp-tests.log)
-    // and sets the log level for "tests" to WARN
-    log4js.configure('./test/tap/log4js.json');
-    const logger = log4js.getLogger('tests');
-    logger.info('this should not be written to the file');
-    logger.warn('this should be written to the file');
+  batch.test('handling of writer.writable', (t) => {
+    const output = [];
+    let writable = true;
 
-    // wait for the file system to catch up
-    setTimeout(() => {
-      fs.readFile('tmp-tests.log', 'utf8', (err, contents) => {
-        t.include(contents, `this should be written to the file${EOL}`);
-        t.equal(contents.indexOf('this should not be written to the file'), -1);
-        t.end();
-      });
-    }, 100);
+    const RollingFileStream = class {
+      write(loggingEvent) {
+        output.push(loggingEvent);
+        this.written = true;
+        return true;
+      }
+
+      // eslint-disable-next-line class-methods-use-this
+      on() {}
+
+      // eslint-disable-next-line class-methods-use-this
+      get writable() {
+        return writable;
+      }
+    };
+    const fileAppender = sandbox.require('../../lib/appenders/file', {
+      requires: {
+        streamroller: {
+          RollingFileStream,
+        },
+      },
+    });
+
+    const appender = fileAppender.configure(
+      { filename: 'test1.log', maxLogSize: 100 },
+      {
+        basicLayout(loggingEvent) {
+          return loggingEvent.data;
+        },
+      }
+    );
+
+    t.test('should log when writer.writable=true', (assert) => {
+      writable = true;
+      assert.ok(output.length === 0);
+      appender({ data: 'something to log' });
+      assert.ok(output.length === 1);
+      assert.match(output[output.length - 1], 'something to log');
+      assert.end();
+    });
+
+    t.test('should not log when writer.writable=false', (assert) => {
+      writable = false;
+      assert.ok(output.length === 1);
+      appender({ data: 'this should not be logged' });
+      assert.ok(output.length === 1);
+      assert.notMatch(output[output.length - 1], 'this should not be logged');
+      assert.end();
+    });
+
+    t.end();
   });
 
   batch.test('when underlying stream errors', (t) => {
     let consoleArgs;
     let errorHandler;
 
-    const fileAppender = sandbox.require(
-      '../../lib/appenders/file',
-      {
-        globals: {
-          console: {
-            error: function () {
-              consoleArgs = Array.prototype.slice.call(arguments);
-            }
-          }
-        },
-        requires: {
-          streamroller: {
-            RollingFileStream: function () {
-              this.end = function () {
-              };
-              this.on = function (evt, cb) {
-                if (evt === 'error') {
-                  errorHandler = cb;
-                }
-              };
-            }
-          }
+    const RollingFileStream = class {
+      end() {
+        this.ended = true;
+      }
+
+      on(evt, cb) {
+        if (evt === 'error') {
+          this.errored = true;
+          errorHandler = cb;
         }
       }
-    );
 
-    fileAppender.appender('test1.log', null, 100);
+      write() {
+        this.written = true;
+        return true;
+      }
+    };
+    const fileAppender = sandbox.require('../../lib/appenders/file', {
+      globals: {
+        console: {
+          error(...args) {
+            consoleArgs = args;
+          },
+        },
+      },
+      requires: {
+        streamroller: {
+          RollingFileStream,
+        },
+      },
+    });
+
+    fileAppender.configure(
+      { filename: 'test1.log', maxLogSize: 100 },
+      { basicLayout() {} }
+    );
     errorHandler({ error: 'aargh' });
 
     t.test('should log the error to console.error', (assert) => {
       assert.ok(consoleArgs);
-      assert.equal(consoleArgs[0], 'log4js.fileAppender - Writing to file %s, error happened ');
+      assert.equal(
+        consoleArgs[0],
+        'log4js.fileAppender - Writing to file %s, error happened '
+      );
       assert.equal(consoleArgs[1], 'test1.log');
       assert.equal(consoleArgs[2].error, 'aargh');
       assert.end();
@@ -370,5 +509,61 @@ test('log4js fileAppender', (batch) => {
     t.end();
   });
 
+  batch.test('with removeColor fileAppender settings', async (t) => {
+    const testFilePlain = path.join(__dirname, 'fa-removeColor-test.log');
+    const testFileAsIs = path.join(__dirname, 'fa-asIs-test.log');
+    const logger = log4js.getLogger('default-settings');
+    await removeFile(testFilePlain);
+    await removeFile(testFileAsIs);
+
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
+      });
+      await removeFile(testFilePlain);
+      await removeFile(testFileAsIs);
+    });
+
+    log4js.configure({
+      appenders: {
+        plainFile: { type: 'file', filename: testFilePlain, removeColor: true },
+        asIsFile: { type: 'file', filename: testFileAsIs, removeColor: false },
+      },
+      categories: {
+        default: { appenders: ['plainFile', 'asIsFile'], level: 'debug' },
+      },
+    });
+
+    logger.info(
+      'This should be in the file.',
+      '\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m.',
+      {},
+      []
+    );
+
+    await sleep(osDelay);
+    let fileContents = await fs.readFile(testFilePlain, 'utf8');
+    t.match(
+      fileContents,
+      `This should be in the file. Color should be plain. {} []${EOL}`
+    );
+    t.match(
+      fileContents,
+      /\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
+    );
+
+    fileContents = await fs.readFile(testFileAsIs, 'utf8');
+    t.match(
+      fileContents,
+      'This should be in the file.',
+      `\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m. {} []${EOL}`
+    );
+    t.match(
+      fileContents,
+      /\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
+    );
+    t.end();
+  });
+
   batch.end();
 });
diff --git a/test/tap/fileSyncAppender-test.js b/test/tap/fileSyncAppender-test.js
index 4874862b..eba2c869 100644
--- a/test/tap/fileSyncAppender-test.js
+++ b/test/tap/fileSyncAppender-test.js
@@ -1,12 +1,9 @@
-'use strict';
-
-const test = require('tap').test;
+const { test } = require('tap');
 const fs = require('fs');
 const path = require('path');
-const log4js = require('../../lib/log4js');
 const EOL = require('os').EOL || '\n';
-
-log4js.clearAppenders();
+const sandbox = require('@log4js-node/sandboxed-module');
+const log4js = require('../../lib/log4js');
 
 function remove(filename) {
   try {
@@ -22,60 +19,255 @@ test('log4js fileSyncAppender', (batch) => {
     const logger = log4js.getLogger('default-settings');
     remove(testFile);
 
-    log4js.clearAppenders();
-    log4js.addAppender(
-      require('../../lib/appenders/fileSync').appender(testFile),
-      'default-settings'
-    );
+    t.teardown(() => {
+      remove(testFile);
+    });
+
+    log4js.configure({
+      appenders: { sync: { type: 'fileSync', filename: testFile } },
+      categories: { default: { appenders: ['sync'], level: 'debug' } },
+    });
 
     logger.info('This should be in the file.');
 
     fs.readFile(testFile, 'utf8', (err, fileContents) => {
-      t.include(fileContents, `This should be in the file.${EOL}`);
+      t.match(fileContents, `This should be in the file.${EOL}`);
       t.match(
         fileContents,
-        /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
+        /\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
       );
       t.end();
     });
   });
 
+  batch.test('with tilde expansion in filename', (t) => {
+    const fileName = 'tmpTilde.log';
+    const expandedPath = path.join(__dirname, fileName);
+    remove(expandedPath);
+
+    const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        os: {
+          homedir() {
+            return __dirname;
+          },
+        },
+      },
+    });
+
+    t.teardown(() => {
+      log4js.shutdown(() => {
+        remove(expandedPath);
+      });
+    });
+
+    sandboxedLog4js.configure({
+      appenders: {
+        sync: { type: 'fileSync', filename: path.join('~', fileName) },
+      },
+      categories: { default: { appenders: ['sync'], level: 'debug' } },
+    });
+
+    t.ok(
+      fs.existsSync(expandedPath),
+      'should expand tilde to create in home directory'
+    );
+    t.end();
+  });
+
+  batch.test('with existing file', (t) => {
+    const testFile = path.join(__dirname, '/fa-existing-file-sync-test.log');
+    const logger = log4js.getLogger('default-settings');
+    remove(testFile);
+
+    t.teardown(() => {
+      remove(testFile);
+    });
+
+    log4js.configure({
+      appenders: { sync: { type: 'fileSync', filename: testFile } },
+      categories: { default: { appenders: ['sync'], level: 'debug' } },
+    });
+
+    logger.info('This should be in the file.');
+
+    log4js.shutdown(() => {
+      log4js.configure({
+        appenders: { sync: { type: 'fileSync', filename: testFile } },
+        categories: { default: { appenders: ['sync'], level: 'debug' } },
+      });
+
+      logger.info('This should also be in the file.');
+
+      fs.readFile(testFile, 'utf8', (err, fileContents) => {
+        t.match(fileContents, `This should be in the file.${EOL}`);
+        t.match(fileContents, `This should also be in the file.${EOL}`);
+        t.match(
+          fileContents,
+          /\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
+        );
+        t.end();
+      });
+    });
+  });
+
+  batch.test('should give error if invalid filename', async (t) => {
+    const file = '';
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: {
+            file: {
+              type: 'fileSync',
+              filename: file,
+            },
+          },
+          categories: {
+            default: { appenders: ['file'], level: 'debug' },
+          },
+        }),
+      new Error(`Invalid filename: ${file}`)
+    );
+    const dir = `.${path.sep}`;
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: {
+            file: {
+              type: 'fileSync',
+              filename: dir,
+            },
+          },
+          categories: {
+            default: { appenders: ['file'], level: 'debug' },
+          },
+        }),
+      new Error(`Filename is a directory: ${dir}`)
+    );
+    t.end();
+  });
+
+  batch.test('should give error if invalid maxLogSize', async (t) => {
+    const maxLogSize = -1;
+    const expectedError = new Error(`maxLogSize (${maxLogSize}) should be > 0`);
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: {
+            file: {
+              type: 'fileSync',
+              filename: path.join(
+                __dirname,
+                'fa-invalidMaxFileSize-sync-test.log'
+              ),
+              maxLogSize: -1,
+            },
+          },
+          categories: {
+            default: { appenders: ['file'], level: 'debug' },
+          },
+        }),
+      expectedError
+    );
+    t.end();
+  });
+
   batch.test('with a max file size and no backups', (t) => {
     const testFile = path.join(__dirname, '/fa-maxFileSize-sync-test.log');
     const logger = log4js.getLogger('max-file-size');
     remove(testFile);
-    remove(`${testFile}.1`);
+
+    t.teardown(() => {
+      remove(testFile);
+    });
+
     // log file of 100 bytes maximum, no backups
-    log4js.clearAppenders();
-    log4js.addAppender(
-      require(
-        '../../lib/appenders/fileSync'
-      ).appender(
-        testFile,
-        log4js.layouts.basicLayout,
-        100,
-        0
-      ),
-      'max-file-size'
-    );
+    log4js.configure({
+      appenders: {
+        sync: {
+          type: 'fileSync',
+          filename: testFile,
+          maxLogSize: 100,
+          backups: 0,
+        },
+      },
+      categories: { default: { appenders: ['sync'], level: 'debug' } },
+    });
     logger.info('This is the first log message.');
     logger.info('This is an intermediate log message.');
     logger.info('This is the second log message.');
 
     t.test('log file should only contain the second message', (assert) => {
       fs.readFile(testFile, 'utf8', (err, fileContents) => {
-        assert.include(fileContents, `This is the second log message.${EOL}`);
-        assert.equal(fileContents.indexOf('This is the first log message.'), -1);
+        assert.match(fileContents, `This is the second log message.${EOL}`);
+        assert.equal(
+          fileContents.indexOf('This is the first log message.'),
+          -1
+        );
+        assert.end();
+      });
+    });
+
+    t.test('there should be one test files', (assert) => {
+      fs.readdir(__dirname, (err, files) => {
+        const logFiles = files.filter((file) =>
+          file.includes('fa-maxFileSize-sync-test.log')
+        );
+        assert.equal(logFiles.length, 1);
+        assert.end();
+      });
+    });
+    t.end();
+  });
+
+  batch.test('with a max file size in unit mode and no backups', (t) => {
+    const testFile = path.join(__dirname, '/fa-maxFileSize-unit-sync-test.log');
+    const logger = log4js.getLogger('max-file-size-unit');
+    remove(testFile);
+    remove(`${testFile}.1`);
+
+    t.teardown(() => {
+      remove(testFile);
+      remove(`${testFile}.1`);
+    });
+
+    // log file of 100 bytes maximum, no backups
+    log4js.configure({
+      appenders: {
+        sync: {
+          type: 'fileSync',
+          filename: testFile,
+          maxLogSize: '1K',
+          backups: 0,
+          layout: { type: 'messagePassThrough' },
+        },
+      },
+      categories: { default: { appenders: ['sync'], level: 'debug' } },
+    });
+    const maxLine = 22; // 1024 max file size / 47 bytes per line
+    for (let i = 0; i < maxLine; i++) {
+      logger.info('These are the log messages for the first file.'); // 46 bytes per line + '\n'
+    }
+
+    logger.info('This is the second log message.');
+
+    t.test('log file should only contain the second message', (assert) => {
+      fs.readFile(testFile, 'utf8', (err, fileContents) => {
+        assert.match(fileContents, `This is the second log message.${EOL}`);
+        assert.notMatch(
+          fileContents,
+          'These are the log messages for the first file.'
+        );
         assert.end();
       });
     });
 
-    t.test('there should be two test files', (assert) => {
+    t.test('there should be one test file', (assert) => {
       fs.readdir(__dirname, (err, files) => {
-        const logFiles = files.filter(
-          file => file.includes('fa-maxFileSize-sync-test.log')
+        const logFiles = files.filter((file) =>
+          file.includes('fa-maxFileSize-unit-sync-test.log')
         );
-        assert.equal(logFiles.length, 2);
+        assert.equal(logFiles.length, 1);
         assert.end();
       });
     });
@@ -83,23 +275,33 @@ test('log4js fileSyncAppender', (batch) => {
   });
 
   batch.test('with a max file size and 2 backups', (t) => {
-    const testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-sync-test.log');
+    const testFile = path.join(
+      __dirname,
+      '/fa-maxFileSize-with-backups-sync-test.log'
+    );
     const logger = log4js.getLogger('max-file-size-backups');
     remove(testFile);
     remove(`${testFile}.1`);
     remove(`${testFile}.2`);
 
+    t.teardown(() => {
+      remove(testFile);
+      remove(`${testFile}.1`);
+      remove(`${testFile}.2`);
+    });
+
     // log file of 50 bytes maximum, 2 backups
-    log4js.clearAppenders();
-    log4js.addAppender(
-      require('../../lib/appenders/fileSync').appender(
-        testFile,
-        log4js.layouts.basicLayout,
-        50,
-        2
-      ),
-      'max-file-size-backups'
-    );
+    log4js.configure({
+      appenders: {
+        sync: {
+          type: 'fileSync',
+          filename: testFile,
+          maxLogSize: 50,
+          backups: 2,
+        },
+      },
+      categories: { default: { appenders: ['sync'], level: 'debug' } },
+    });
     logger.info('This is the first log message.');
     logger.info('This is the second log message.');
     logger.info('This is the third log message.');
@@ -108,55 +310,320 @@ test('log4js fileSyncAppender', (batch) => {
     t.test('the log files', (assert) => {
       assert.plan(5);
       fs.readdir(__dirname, (err, files) => {
-        const logFiles = files.filter(
-          file => file.includes('fa-maxFileSize-with-backups-sync-test.log')
+        const logFiles = files.filter((file) =>
+          file.includes('fa-maxFileSize-with-backups-sync-test.log')
         );
         assert.equal(logFiles.length, 3, 'should be 3 files');
-        assert.same(logFiles, [
-          'fa-maxFileSize-with-backups-sync-test.log',
-          'fa-maxFileSize-with-backups-sync-test.log.1',
-          'fa-maxFileSize-with-backups-sync-test.log.2'
-        ], 'should be named in sequence');
-
-        fs.readFile(path.join(__dirname, logFiles[0]), 'utf8', (e, contents) => {
-          assert.include(contents, 'This is the fourth log message.');
-        });
-        fs.readFile(path.join(__dirname, logFiles[1]), 'utf8', (e, contents) => {
-          assert.include(contents, 'This is the third log message.');
-        });
-        fs.readFile(path.join(__dirname, logFiles[2]), 'utf8', (e, contents) => {
-          assert.include(contents, 'This is the second log message.');
-        });
+        assert.same(
+          logFiles,
+          [
+            'fa-maxFileSize-with-backups-sync-test.log',
+            'fa-maxFileSize-with-backups-sync-test.log.1',
+            'fa-maxFileSize-with-backups-sync-test.log.2',
+          ],
+          'should be named in sequence'
+        );
+
+        fs.readFile(
+          path.join(__dirname, logFiles[0]),
+          'utf8',
+          (e, contents) => {
+            assert.match(contents, 'This is the fourth log message.');
+          }
+        );
+        fs.readFile(
+          path.join(__dirname, logFiles[1]),
+          'utf8',
+          (e, contents) => {
+            assert.match(contents, 'This is the third log message.');
+          }
+        );
+        fs.readFile(
+          path.join(__dirname, logFiles[2]),
+          'utf8',
+          (e, contents) => {
+            assert.match(contents, 'This is the second log message.');
+          }
+        );
       });
     });
     t.end();
   });
 
   batch.test('configure with fileSyncAppender', (t) => {
+    const testFile = 'tmp-sync-tests.log';
+    remove(testFile);
+
+    t.teardown(() => {
+      remove(testFile);
+    });
+
     // this config defines one file appender (to ./tmp-sync-tests.log)
     // and sets the log level for "tests" to WARN
     log4js.configure({
-      appenders: [
-        {
-          category: 'tests',
-          type: 'file',
-          filename: 'tmp-sync-tests.log',
-          layout: { type: 'messagePassThrough' }
-        }
-      ],
-
-      levels: { tests: 'WARN' }
+      appenders: {
+        sync: {
+          type: 'fileSync',
+          filename: testFile,
+          layout: { type: 'messagePassThrough' },
+        },
+      },
+      categories: {
+        default: { appenders: ['sync'], level: 'debug' },
+        tests: { appenders: ['sync'], level: 'warn' },
+      },
     });
     const logger = log4js.getLogger('tests');
     logger.info('this should not be written to the file');
     logger.warn('this should be written to the file');
 
-    fs.readFile('tmp-sync-tests.log', 'utf8', (err, contents) => {
-      t.include(contents, `this should be written to the file${EOL}`);
+    fs.readFile(testFile, 'utf8', (err, contents) => {
+      t.match(contents, `this should be written to the file${EOL}`);
       t.equal(contents.indexOf('this should not be written to the file'), -1);
       t.end();
     });
   });
 
+  batch.test(
+    'configure with non-existent multi-directory (recursive, nodejs >= 10.12.0)',
+    (t) => {
+      const testFile = 'tmpA/tmpB/tmpC/tmp-sync-tests-recursive.log';
+      remove(testFile);
+
+      t.teardown(() => {
+        remove(testFile);
+        try {
+          fs.rmdirSync('tmpA/tmpB/tmpC');
+          fs.rmdirSync('tmpA/tmpB');
+          fs.rmdirSync('tmpA');
+        } catch (e) {
+          // doesn't matter
+        }
+      });
+
+      log4js.configure({
+        appenders: {
+          sync: {
+            type: 'fileSync',
+            filename: testFile,
+            layout: { type: 'messagePassThrough' },
+          },
+        },
+        categories: {
+          default: { appenders: ['sync'], level: 'debug' },
+        },
+      });
+      const logger = log4js.getLogger();
+      logger.info('this should be written to the file');
+
+      fs.readFile(testFile, 'utf8', (err, contents) => {
+        t.match(contents, `this should be written to the file${EOL}`);
+        t.end();
+      });
+    }
+  );
+
+  batch.test(
+    'configure with non-existent multi-directory (non-recursive, nodejs < 10.12.0)',
+    (t) => {
+      const testFile = 'tmpA/tmpB/tmpC/tmp-sync-tests-non-recursive.log';
+      remove(testFile);
+
+      t.teardown(() => {
+        remove(testFile);
+        try {
+          fs.rmdirSync('tmpA/tmpB/tmpC');
+          fs.rmdirSync('tmpA/tmpB');
+          fs.rmdirSync('tmpA');
+        } catch (e) {
+          // doesn't matter
+        }
+      });
+
+      const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+        requires: {
+          fs: {
+            ...fs,
+            mkdirSync(dirPath, options) {
+              return fs.mkdirSync(dirPath, {
+                ...options,
+                ...{ recursive: false },
+              });
+            },
+          },
+        },
+      });
+      sandboxedLog4js.configure({
+        appenders: {
+          sync: {
+            type: 'fileSync',
+            filename: testFile,
+            layout: { type: 'messagePassThrough' },
+          },
+        },
+        categories: {
+          default: { appenders: ['sync'], level: 'debug' },
+        },
+      });
+      const logger = sandboxedLog4js.getLogger();
+      logger.info('this should be written to the file');
+
+      fs.readFile(testFile, 'utf8', (err, contents) => {
+        t.match(contents, `this should be written to the file${EOL}`);
+        t.end();
+      });
+    }
+  );
+
+  batch.test(
+    'configure with non-existent multi-directory (error handling)',
+    (t) => {
+      const testFile = 'tmpA/tmpB/tmpC/tmp-sync-tests-error-handling.log';
+      remove(testFile);
+
+      t.teardown(() => {
+        remove(testFile);
+        try {
+          fs.rmdirSync('tmpA/tmpB/tmpC');
+          fs.rmdirSync('tmpA/tmpB');
+          fs.rmdirSync('tmpA');
+        } catch (e) {
+          // doesn't matter
+        }
+      });
+
+      const errorEPERM = new Error('EPERM');
+      errorEPERM.code = 'EPERM';
+
+      let sandboxedLog4js = sandbox.require('../../lib/log4js', {
+        requires: {
+          fs: {
+            ...fs,
+            mkdirSync() {
+              throw errorEPERM;
+            },
+          },
+        },
+      });
+      t.throws(
+        () =>
+          sandboxedLog4js.configure({
+            appenders: {
+              sync: {
+                type: 'fileSync',
+                filename: testFile,
+                layout: { type: 'messagePassThrough' },
+              },
+            },
+            categories: {
+              default: { appenders: ['sync'], level: 'debug' },
+            },
+          }),
+        errorEPERM
+      );
+
+      const errorEROFS = new Error('EROFS');
+      errorEROFS.code = 'EROFS';
+
+      sandboxedLog4js = sandbox.require('../../lib/log4js', {
+        requires: {
+          fs: {
+            ...fs,
+            mkdirSync() {
+              throw errorEROFS;
+            },
+            statSync() {
+              return {
+                isDirectory() {
+                  return false;
+                },
+              };
+            },
+          },
+        },
+      });
+      t.throws(
+        () =>
+          sandboxedLog4js.configure({
+            appenders: {
+              sync: {
+                type: 'fileSync',
+                filename: testFile,
+                layout: { type: 'messagePassThrough' },
+              },
+            },
+            categories: {
+              default: { appenders: ['sync'], level: 'debug' },
+            },
+          }),
+        errorEROFS
+      );
+
+      fs.mkdirSync('tmpA');
+      fs.mkdirSync('tmpA/tmpB');
+      fs.mkdirSync('tmpA/tmpB/tmpC');
+
+      sandboxedLog4js = sandbox.require('../../lib/log4js', {
+        requires: {
+          fs: {
+            ...fs,
+            mkdirSync() {
+              throw errorEROFS;
+            },
+          },
+        },
+      });
+      t.doesNotThrow(() =>
+        sandboxedLog4js.configure({
+          appenders: {
+            sync: {
+              type: 'fileSync',
+              filename: testFile,
+              layout: { type: 'messagePassThrough' },
+            },
+          },
+          categories: {
+            default: { appenders: ['sync'], level: 'debug' },
+          },
+        })
+      );
+
+      t.end();
+    }
+  );
+
+  batch.test('test options', (t) => {
+    const testFile = 'tmp-options-tests.log';
+    remove(testFile);
+
+    t.teardown(() => {
+      remove(testFile);
+    });
+
+    // using non-standard options
+    log4js.configure({
+      appenders: {
+        sync: {
+          type: 'fileSync',
+          filename: testFile,
+          layout: { type: 'messagePassThrough' },
+          flags: 'w',
+          encoding: 'ascii',
+          mode: 0o666,
+        },
+      },
+      categories: {
+        default: { appenders: ['sync'], level: 'info' },
+      },
+    });
+    const logger = log4js.getLogger();
+    logger.warn('log message');
+
+    fs.readFile(testFile, 'ascii', (err, contents) => {
+      t.match(contents, `log message${EOL}`);
+      t.end();
+    });
+  });
+
   batch.end();
 });
diff --git a/test/tap/gelfAppender-test.js b/test/tap/gelfAppender-test.js
deleted file mode 100644
index fc822bef..00000000
--- a/test/tap/gelfAppender-test.js
+++ /dev/null
@@ -1,231 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const sandbox = require('sandboxed-module');
-const log4js = require('../../lib/log4js');
-const realLayouts = require('../../lib/layouts');
-
-const setupLogging = function (options, category, compressedLength) {
-  const fakeDgram = {
-    sent: false,
-    socket: {
-      packetLength: 0,
-      closed: false,
-      close: function () {
-        this.closed = true;
-      },
-      send: function (pkt, offset, pktLength, port, host) {
-        fakeDgram.sent = true;
-        this.packet = pkt;
-        this.offset = offset;
-        this.packetLength = pktLength;
-        this.port = port;
-        this.host = host;
-      }
-    },
-    createSocket: function (type) {
-      this.type = type;
-      return this.socket;
-    }
-  };
-
-  const fakeZlib = {
-    gzip: function (objectToCompress, callback) {
-      fakeZlib.uncompressed = objectToCompress;
-      if (this.shouldError) {
-        callback({ stack: 'oh noes' });
-        return;
-      }
-
-      if (compressedLength) {
-        callback(null, { length: compressedLength });
-      } else {
-        callback(null, "I've been compressed");
-      }
-    }
-  };
-
-  let exitHandler;
-
-  const fakeConsole = {
-    error: function (message) {
-      this.message = message;
-    }
-  };
-
-  const fakeLayouts = {
-    layout: function (type, opt) {
-      this.type = type;
-      this.options = opt;
-      return realLayouts.messagePassThroughLayout;
-    },
-    messagePassThroughLayout: realLayouts.messagePassThroughLayout
-  };
-
-  const appender = sandbox.require('../../lib/appenders/gelf', {
-    singleOnly: true,
-    requires: {
-      dgram: fakeDgram,
-      zlib: fakeZlib,
-      '../layouts': fakeLayouts
-    },
-    globals: {
-      process: {
-        on: function (evt, handler) {
-          if (evt === 'exit') {
-            exitHandler = handler;
-          }
-        }
-      },
-      console: fakeConsole
-    }
-  });
-
-  log4js.clearAppenders();
-  log4js.addAppender(appender.configure(options || {}), category || 'gelf-test');
-  return {
-    dgram: fakeDgram,
-    compress: fakeZlib,
-    exitHandler: exitHandler,
-    console: fakeConsole,
-    layouts: fakeLayouts,
-    logger: log4js.getLogger(category || 'gelf-test')
-  };
-};
-
-test('log4js gelfAppender', (batch) => {
-  batch.test('with default gelfAppender settings', (t) => {
-    const setup = setupLogging();
-    setup.logger.info('This is a test');
-
-    const dgram = setup.dgram;
-
-    t.test('dgram packet should be sent via udp to the localhost gelf server', (assert) => {
-      assert.equal(dgram.type, 'udp4');
-      assert.equal(dgram.socket.host, 'localhost');
-      assert.equal(dgram.socket.port, 12201);
-      assert.equal(dgram.socket.offset, 0);
-      assert.ok(dgram.socket.packetLength > 0, 'Received blank message');
-      assert.equal(dgram.socket.packet, "I've been compressed");
-      assert.end();
-    });
-
-    const message = JSON.parse(setup.compress.uncompressed);
-    t.test('the uncompressed log message should be in the gelf format', (assert) => {
-      assert.equal(message.version, '1.1');
-      assert.equal(message.host, require('os').hostname());
-      assert.equal(message.level, 6); // INFO
-      assert.equal(message.short_message, 'This is a test');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('with a message longer than 8k', (t) => {
-    const setup = setupLogging(undefined, undefined, 10240);
-    setup.logger.info('Blah.');
-
-    t.equal(setup.dgram.sent, false, 'the dgram packet should not be sent');
-    t.end();
-  });
-
-  batch.test('with non-default options', (t) => {
-    const setup = setupLogging({
-      host: 'somewhere',
-      port: 12345,
-      hostname: 'cheese',
-      facility: 'nonsense'
-    });
-    setup.logger.debug('Just testing.');
-
-    const dgram = setup.dgram;
-    t.test('the dgram packet should pick up the options', (assert) => {
-      assert.equal(dgram.socket.host, 'somewhere');
-      assert.equal(dgram.socket.port, 12345);
-      assert.end();
-    });
-
-    const message = JSON.parse(setup.compress.uncompressed);
-    t.test('the uncompressed packet should pick up the options', (assert) => {
-      assert.equal(message.host, 'cheese');
-      assert.equal(message._facility, 'nonsense');
-      assert.end();
-    });
-
-    t.end();
-  });
-
-  batch.test('on process.exit should close open sockets', (t) => {
-    const setup = setupLogging();
-    setup.exitHandler();
-
-    t.ok(setup.dgram.socket.closed);
-    t.end();
-  });
-
-  batch.test('on zlib error should output to console.error', (t) => {
-    const setup = setupLogging();
-    setup.compress.shouldError = true;
-    setup.logger.info('whatever');
-
-    t.equal(setup.console.message, 'oh noes');
-    t.end();
-  });
-
-  batch.test('with layout in configuration', (t) => {
-    const setup = setupLogging({
-      layout: {
-        type: 'madeuplayout',
-        earlgrey: 'yes, please'
-      }
-    });
-
-    t.test('should pass options to layout', (assert) => {
-      assert.equal(setup.layouts.type, 'madeuplayout');
-      assert.equal(setup.layouts.options.earlgrey, 'yes, please');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('with custom fields options', (t) => {
-    const setup = setupLogging({
-      host: 'somewhere',
-      port: 12345,
-      hostname: 'cheese',
-      facility: 'nonsense',
-      customFields: {
-        _every1: 'Hello every one',
-        _every2: 'Hello every two'
-      }
-    });
-    const myFields = {
-      GELF: true,
-      _every2: 'Overwritten!',
-      _myField: 'This is my field!'
-    };
-    setup.logger.debug(myFields, 'Just testing.');
-
-    const dgram = setup.dgram;
-    t.test('the dgram packet should pick up the options', (assert) => {
-      assert.equal(dgram.socket.host, 'somewhere');
-      assert.equal(dgram.socket.port, 12345);
-      assert.end();
-    });
-
-    const message = JSON.parse(setup.compress.uncompressed);
-    t.test('the uncompressed packet should pick up the options', (assert) => {
-      assert.equal(message.host, 'cheese');
-      assert.notOk(message.GELF); // make sure flag was removed
-      assert.equal(message._facility, 'nonsense');
-      assert.equal(message._every1, 'Hello every one'); // the default value
-      assert.equal(message._every2, 'Overwritten!'); // the overwritten value
-      assert.equal(message._myField, 'This is my field!'); // the value for this message only
-      assert.equal(message.short_message, 'Just testing.'); // skip the field object
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/global-log-level-test.js b/test/tap/global-log-level-test.js
deleted file mode 100644
index 14beb61b..00000000
--- a/test/tap/global-log-level-test.js
+++ /dev/null
@@ -1,126 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-
-test('log4js global loglevel', (batch) => {
-  batch.test('global loglevel', (t) => {
-    const log4js = require('../../lib/log4js');
-
-    t.test('set global loglevel on creation', (assert) => {
-      const log1 = log4js.getLogger('log1');
-      let level = 'OFF';
-      if (log1.level.toString() === level) {
-        level = 'TRACE';
-      }
-      assert.notEqual(log1.level.toString(), level);
-
-      log4js.setGlobalLogLevel(level);
-      assert.equal(log1.level.toString(), level);
-
-      const log2 = log4js.getLogger('log2');
-      assert.equal(log2.level.toString(), level);
-      assert.end();
-    });
-
-    t.test('global change loglevel', (assert) => {
-      const log1 = log4js.getLogger('log1');
-      const log2 = log4js.getLogger('log2');
-      let level = 'OFF';
-      if (log1.level.toString() === level) {
-        level = 'TRACE';
-      }
-      assert.notEqual(log1.level.toString(), level);
-
-      log4js.setGlobalLogLevel(level);
-      assert.equal(log1.level.toString(), level);
-      assert.equal(log2.level.toString(), level);
-      assert.end();
-    });
-
-    t.test('override loglevel', (assert) => {
-      const log1 = log4js.getLogger('log1');
-      const log2 = log4js.getLogger('log2');
-      let level = 'OFF';
-      if (log1.level.toString() === level) {
-        level = 'TRACE';
-      }
-      assert.notEqual(log1.level.toString(), level);
-
-      const oldLevel = log1.level.toString();
-      assert.equal(log2.level.toString(), oldLevel);
-
-      log2.setLevel(level);
-      assert.equal(log1.level.toString(), oldLevel);
-      assert.equal(log2.level.toString(), level);
-      assert.notEqual(oldLevel, level);
-
-      log2.removeLevel();
-      assert.equal(log1.level.toString(), oldLevel);
-      assert.equal(log2.level.toString(), oldLevel);
-      assert.end();
-    });
-
-    t.test('preload loglevel', (assert) => {
-      const log1 = log4js.getLogger('log1');
-      let level = 'OFF';
-      if (log1.level.toString() === level) {
-        level = 'TRACE';
-      }
-      assert.notEqual(log1.level.toString(), level);
-
-      const oldLevel = log1.level.toString();
-      log4js.getLogger('log2').setLevel(level);
-
-      assert.equal(log1.level.toString(), oldLevel);
-
-      // get again same logger but as different variable
-      const log2 = log4js.getLogger('log2');
-      assert.equal(log2.level.toString(), level);
-      assert.notEqual(oldLevel, level);
-
-      log2.removeLevel();
-      assert.equal(log1.level.toString(), oldLevel);
-      assert.equal(log2.level.toString(), oldLevel);
-      assert.end();
-    });
-
-    t.test('set level on all categories', (assert) => {
-      // Get 2 loggers
-      const log1 = log4js.getLogger('log1');
-      const log2 = log4js.getLogger('log2');
-
-      // First a test with 2 categories with different levels
-      const config = {
-        levels: {
-          log1: 'ERROR',
-          log2: 'WARN'
-        }
-      };
-      log4js.configure(config);
-
-      // Check if the levels are set correctly
-      assert.equal('ERROR', log1.level.toString());
-      assert.equal('WARN', log2.level.toString());
-
-      log1.removeLevel();
-      log2.removeLevel();
-
-      // Almost identical test, but now we set
-      // level on all categories
-      const config2 = {
-        levels: {
-          '[all]': 'DEBUG'
-        }
-      };
-      log4js.configure(config2);
-
-      // Check if the loggers got the DEBUG level
-      assert.equal('DEBUG', log1.level.toString());
-      assert.equal('DEBUG', log2.level.toString());
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/hipchatAppender-test.js b/test/tap/hipchatAppender-test.js
deleted file mode 100644
index 032bde74..00000000
--- a/test/tap/hipchatAppender-test.js
+++ /dev/null
@@ -1,131 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const log4js = require('../../lib/log4js');
-const sandbox = require('sandboxed-module');
-
-function setupLogging(category, options) {
-  const lastRequest = {};
-
-  const fakeRequest = function (args, level) {
-    lastRequest.notifier = this;
-    lastRequest.body = args[0];
-    lastRequest.callback = args[1];
-    lastRequest.level = level;
-  };
-
-  const fakeHipchatNotifier = {
-    make: function (room, token, from, host, notify) {
-      return {
-        room: room,
-        token: token,
-        from: from || '',
-        host: host || 'api.hipchat.com',
-        notify: notify || false,
-        setRoom: function (val) {
-          this.room = val;
-        },
-        setFrom: function (val) {
-          this.from = val;
-        },
-        setHost: function (val) {
-          this.host = val;
-        },
-        setNotify: function (val) {
-          this.notify = val;
-        },
-        info: function () {
-          fakeRequest.call(this, arguments, 'info');
-        },
-        warning: function () {
-          fakeRequest.call(this, arguments, 'warning');
-        },
-        failure: function () {
-          fakeRequest.call(this, arguments, 'failure');
-        },
-        success: function () {
-          fakeRequest.call(this, arguments, 'success');
-        }
-      };
-    }
-  };
-
-  const hipchatModule = sandbox.require('../../lib/appenders/hipchat', {
-    requires: {
-      'hipchat-notifier': fakeHipchatNotifier
-    }
-  });
-  log4js.clearAppenders();
-  log4js.addAppender(hipchatModule.configure(options), category);
-
-  return {
-    logger: log4js.getLogger(category),
-    lastRequest: lastRequest
-  };
-}
-
-test('HipChat appender', (batch) => {
-  batch.test('when logging to HipChat v2 API', (t) => {
-    const customCallback = function () {
-      return 'works';
-    };
-
-    const topic = setupLogging('myCategory', {
-      type: 'hipchat',
-      hipchat_token: 'User_Token_With_Notification_Privs',
-      hipchat_room: 'Room_ID_Or_Name',
-      hipchat_from: 'Log4js_Test',
-      hipchat_notify: true,
-      hipchat_host: 'hipchat.your-company.tld',
-      hipchat_response_callback: customCallback
-    });
-    topic.logger.warn('Log event #1');
-
-    t.test('a request to hipchat_host should be sent', (assert) => {
-      assert.equal(topic.lastRequest.notifier.host, 'hipchat.your-company.tld');
-      assert.equal(topic.lastRequest.notifier.notify, true);
-      assert.equal(topic.lastRequest.body, 'Log event #1');
-      assert.equal(topic.lastRequest.level, 'warning');
-      assert.end();
-    });
-
-    t.equal(topic.lastRequest.callback(), 'works', 'a custom callback to the HipChat response is supported');
-    t.end();
-  });
-
-  batch.test('when missing options', (t) => {
-    const topic = setupLogging('myLogger', {
-      type: 'hipchat',
-    });
-    topic.logger.error('Log event #2');
-
-    t.test('it sets some defaults', (assert) => {
-      assert.equal(topic.lastRequest.notifier.host, 'api.hipchat.com');
-      assert.equal(topic.lastRequest.notifier.notify, false);
-      assert.equal(topic.lastRequest.body, 'Log event #2');
-      assert.equal(topic.lastRequest.level, 'failure');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('when basicLayout is provided', (t) => {
-    const topic = setupLogging('myLogger', {
-      type: 'hipchat',
-      layout: log4js.layouts.basicLayout
-    });
-    topic.logger.debug('Log event #3');
-
-    t.test('it should include the timestamp', (assert) => {
-      // basicLayout adds [TIMESTAMP] [LEVEL] category - message
-      // e.g. [2016-06-10 11:50:53.819] [DEBUG] myLogger - Log event #23
-
-      assert.match(topic.lastRequest.body, /^\[[^\]]+] \[[^\]]+].*Log event #3$/);
-      assert.equal(topic.lastRequest.level, 'info');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/layouts-test.js b/test/tap/layouts-test.js
index f816a931..3d842788 100644
--- a/test/tap/layouts-test.js
+++ b/test/tap/layouts-test.js
@@ -1,10 +1,9 @@
-'use strict';
-
-const test = require('tap').test;
+const { test } = require('tap');
+const debug = require('debug');
 const os = require('os');
-const semver = require('semver');
+const path = require('path');
 
-const EOL = os.EOL || '\n';
+const { EOL } = os;
 
 // used for patternLayout tests.
 function testPattern(assert, layout, event, tokens, pattern, value) {
@@ -21,74 +20,94 @@ test('log4js layouts', (batch) => {
         startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
         categoryName: 'cheese',
         level: {
-          toString: function () {
+          toString() {
             return 'ERROR';
-          }
-        }
+          },
+          colour: 'red',
+        },
       });
 
       assert.equal(
         output,
-        '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mnonsense'
+        '\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense'
       );
       assert.end();
     });
 
-    t.test('should support the console.log format for the message', (assert) => {
-      const output = layout({
-        data: ['thing %d', 2],
-        startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
-        categoryName: 'cheese',
-        level: {
-          toString: function () {
-            return 'ERROR';
-          }
-        }
-      });
-      assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mthing 2');
-      assert.end();
-    });
+    t.test(
+      'should support the console.log format for the message',
+      (assert) => {
+        const output = layout({
+          data: ['thing %d', 2],
+          startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+          categoryName: 'cheese',
+          level: {
+            toString() {
+              return 'ERROR';
+            },
+            colour: 'red',
+          },
+        });
+        assert.equal(
+          output,
+          '\x1B[91m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2'
+        );
+        assert.end();
+      }
+    );
     t.end();
   });
 
   batch.test('messagePassThroughLayout', (t) => {
     const layout = require('../../lib/layouts').messagePassThroughLayout;
 
-    t.equal(layout({
-      data: ['nonsense'],
-      startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
-      categoryName: 'cheese',
-      level: {
-        colour: 'green',
-        toString: function () {
-          return 'ERROR';
-        }
-      }
-    }), 'nonsense', 'should take a logevent and output only the message');
+    t.equal(
+      layout({
+        data: ['nonsense'],
+        startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+        categoryName: 'cheese',
+        level: {
+          colour: 'green',
+          toString() {
+            return 'ERROR';
+          },
+        },
+      }),
+      'nonsense',
+      'should take a logevent and output only the message'
+    );
 
-    t.equal(layout({
-      data: ['thing %d', 1, 'cheese'],
-      startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
-      categoryName: 'cheese',
-      level: {
-        colour: 'green',
-        toString: function () {
-          return 'ERROR';
-        }
-      }
-    }), 'thing 1 cheese', 'should support the console.log format for the message');
+    t.equal(
+      layout({
+        data: ['thing %d', 1, 'cheese'],
+        startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+        categoryName: 'cheese',
+        level: {
+          colour: 'green',
+          toString() {
+            return 'ERROR';
+          },
+        },
+      }),
+      'thing 1 cheese',
+      'should support the console.log format for the message'
+    );
 
-    t.equal(layout({
-      data: [{ thing: 1 }],
-      startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
-      categoryName: 'cheese',
-      level: {
-        colour: 'green',
-        toString: function () {
-          return 'ERROR';
-        }
-      }
-    }), '{ thing: 1 }', 'should output the first item even if it is not a string');
+    t.equal(
+      layout({
+        data: [{ thing: 1 }],
+        startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+        categoryName: 'cheese',
+        level: {
+          colour: 'green',
+          toString() {
+            return 'ERROR';
+          },
+        },
+      }),
+      '{ thing: 1 }',
+      'should output the first item even if it is not a string'
+    );
 
     t.match(
       layout({
@@ -97,12 +116,12 @@ test('log4js layouts', (batch) => {
         categoryName: 'cheese',
         level: {
           colour: 'green',
-          toString: function () {
+          toString() {
             return 'ERROR';
-          }
-        }
+          },
+        },
       }),
-      /at Object\.\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
+      /at (Test\.batch\.test(\.t)?|Test\.)\s+\((.*)test[\\/]tap[\\/]layouts-test\.js:\d+:\d+\)/,
       'regexp did not return a match - should print the stacks of a passed error objects'
     );
 
@@ -117,19 +136,27 @@ test('log4js layouts', (batch) => {
         categoryName: 'cheese',
         level: {
           colour: 'green',
-          toString: function () {
+          toString() {
             return 'ERROR';
-          }
-        }
+          },
+        },
       });
 
-      assert.match(layoutOutput, /Error: My Unique Error Message/, 'should print the contained error message');
+      assert.match(
+        layoutOutput,
+        /Error: My Unique Error Message/,
+        'should print the contained error message'
+      );
       assert.match(
         layoutOutput,
         /augmented:\s'My Unique attribute value'/,
         'should print error augmented string attributes'
       );
-      assert.match(layoutOutput, /augObj:\s\{ at1: 'at2' \}/, 'should print error augmented object attributes');
+      assert.match(
+        layoutOutput,
+        /augObj:\s\{ at1: 'at2' \}/,
+        'should print error augmented object attributes'
+      );
       assert.end();
     });
     t.end();
@@ -143,96 +170,183 @@ test('log4js layouts', (batch) => {
       startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
       categoryName: 'tests',
       level: {
-        toString: function () {
+        toString() {
           return 'DEBUG';
-        }
-      }
+        },
+      },
     };
 
-    t.equal(layout(event), '[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test');
+    t.equal(
+      layout(event),
+      '[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test'
+    );
 
-    t.test('should output a stacktrace, message if the event has an error attached', (assert) => {
-      let i;
-      const error = new Error('Some made-up error');
-      const stack = error.stack.split(/\n/);
+    t.test(
+      'should output a stacktrace, message if the event has an error attached',
+      (assert) => {
+        let i;
+        const error = new Error('Some made-up error');
+        const stack = error.stack.split(/\n/);
 
-      event.data = ['this is a test', error];
-      const output = layout(event);
-      const lines = output.split(/\n/);
+        event.data = ['this is a test', error];
+        const output = layout(event);
+        const lines = output.split(/\n/);
 
-      if (semver.satisfies(process.version, '>=6')) {
         assert.equal(lines.length, stack.length);
         assert.equal(
           lines[0],
-          '[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error'
+          '[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error'
         );
         for (i = 1; i < stack.length; i++) {
           assert.equal(lines[i], stack[i]);
         }
-      } else {
-        assert.equal(lines.length - 1, stack.length);
+        assert.end();
+      }
+    );
+
+    t.test(
+      'should output any extra data in the log event as util.inspect strings',
+      (assert) => {
+        event.data = [
+          'this is a test',
+          {
+            name: 'Cheese',
+            message: 'Gorgonzola smells.',
+          },
+        ];
+        const output = layout(event);
         assert.equal(
-          lines[0],
-          '[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]'
+          output,
+          '[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test ' +
+            "{ name: 'Cheese', message: 'Gorgonzola smells.' }"
         );
-        for (i = 1; i < stack.length; i++) {
-          assert.equal(lines[i + 2], stack[i + 1]);
-        }
+        assert.end();
       }
-      assert.end();
-    });
+    );
+    t.end();
+  });
 
-    t.test('should output any extra data in the log event as util.inspect strings', (assert) => {
-      event.data = [
-        'this is a test', {
-          name: 'Cheese',
-          message: 'Gorgonzola smells.'
-        }
-      ];
-      const output = layout(event);
-      assert.equal(
-        output,
-        '[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test ' +
-        "{ name: 'Cheese', message: 'Gorgonzola smells.' }"
-      );
+  batch.test('dummyLayout', (t) => {
+    const layout = require('../../lib/layouts').dummyLayout;
+
+    t.test('should output just the first element of the log data', (assert) => {
+      const event = {
+        data: ['this is the first value', 'this is not'],
+        startTime: new Date('2010-12-05 14:18:30.045'),
+        categoryName: 'multiple.levels.of.tests',
+        level: {
+          toString() {
+            return 'DEBUG';
+          },
+          colour: 'cyan',
+        },
+      };
+
+      assert.equal(layout(event), 'this is the first value');
       assert.end();
     });
     t.end();
   });
 
   batch.test('patternLayout', (t) => {
-    const event = {
-      data: ['this is a test'],
-      startTime: new Date('2010-12-05T14:18:30.045Z'), // new Date(2010, 11, 5, 14, 18, 30, 45),
-      categoryName: 'multiple.levels.of.tests',
-      level: {
-        toString: function () {
-          return 'DEBUG';
+    const originalListener =
+      process.listeners('warning')[process.listeners('warning').length - 1];
+    const warningListener = (error) => {
+      if (error.name === 'DeprecationWarning') {
+        if (
+          error.code.startsWith('log4js-node-DEP0003') ||
+          error.code.startsWith('log4js-node-DEP0004')
+        ) {
+          return;
         }
       }
+      originalListener(error);
     };
+    process.off('warning', originalListener);
+    process.on('warning', warningListener);
 
-    const layout = require('../../lib/layouts').patternLayout;
+    const debugWasEnabled = debug.enabled('log4js:layouts');
+    const debugLogs = [];
+    const originalWrite = process.stderr.write;
+    process.stderr.write = (string, encoding, fd) => {
+      debugLogs.push(string);
+      if (debugWasEnabled) {
+        originalWrite.apply(process.stderr, [string, encoding, fd]);
+      }
+    };
+    const originalNamespace = debug.disable();
+    debug.enable(`${originalNamespace}, log4js:layouts`);
+
+    batch.teardown(async () => {
+      // next event loop so that past warnings will not be printed
+      setImmediate(() => {
+        process.off('warning', warningListener);
+        process.on('warning', originalListener);
+      });
+      process.stderr.write = originalWrite;
+      debug.enable(originalNamespace);
+    });
 
     const tokens = {
       testString: 'testStringToken',
-      testFunction: function () {
+      testFunction() {
         return 'testFunctionToken';
       },
-      fnThatUsesLogEvent: function (logEvent) {
+      fnThatUsesLogEvent(logEvent) {
         return logEvent.level.toString();
-      }
+      },
     };
 
-    // override getTimezoneOffset
-    event.startTime.getTimezoneOffset = function () {
-      return 0;
+    // console.log([Error('123').stack.split('\n').slice(1).join('\n')])
+    const callStack =
+      '    at Foo.bar [as baz] (repl:1:14)\n    at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n    at REPLServer.defaultEval (repl.js:240:29)\n    at bound (domain.js:301:14)\n    at REPLServer.runBound [as eval] (domain.js:314:12)\n    at REPLServer.onLine (repl.js:468:10)\n    at emitOne (events.js:121:20)\n    at REPLServer.emit (events.js:211:7)\n    at REPLServer.Interface._onLine (readline.js:280:10)\n    at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
+    const fileName = path.normalize('/log4js-node/test/tap/layouts-test.js');
+    const lineNumber = 1;
+    const columnNumber = 14;
+    const className = 'Foo';
+    const functionName = 'bar';
+    const functionAlias = 'baz';
+    const callerName = 'Foo.bar [as baz]';
+    const event = {
+      data: ['this is a test'],
+      startTime: new Date('2010-12-05 14:18:30.045'),
+      categoryName: 'multiple.levels.of.tests',
+      level: {
+        toString() {
+          return 'DEBUG';
+        },
+        colour: 'cyan',
+      },
+      context: tokens,
+
+      // location
+      callStack,
+      fileName,
+      lineNumber,
+      columnNumber,
+      className,
+      functionName,
+      functionAlias,
+      callerName,
     };
+    event.startTime.getTimezoneOffset = () => -600;
 
-    t.test('should default to "time logLevel loggerName - message"', (assert) => {
-      testPattern(assert, layout, event, tokens, null, `14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`);
-      assert.end();
-    });
+    const layout = require('../../lib/layouts').patternLayout;
+
+    t.test(
+      'should default to "time logLevel loggerName - message"',
+      (assert) => {
+        testPattern(
+          assert,
+          layout,
+          event,
+          tokens,
+          null,
+          `14:18:30 DEBUG multiple.levels.of.tests - this is a test${EOL}`
+        );
+        assert.end();
+      }
+    );
 
     t.test('%r should output time only', (assert) => {
       testPattern(assert, layout, event, tokens, '%r', '14:18:30');
@@ -245,7 +359,14 @@ test('log4js layouts', (batch) => {
     });
 
     t.test('%c should output the log category', (assert) => {
-      testPattern(assert, layout, event, tokens, '%c', 'multiple.levels.of.tests');
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%c',
+        'multiple.levels.of.tests'
+      );
       assert.end();
     });
 
@@ -254,13 +375,115 @@ test('log4js layouts', (batch) => {
       assert.end();
     });
 
+    t.test('%m should apply util.format on data', (assert) => {
+      const eventWithSeveralDataEntry = JSON.parse(JSON.stringify(event));
+      eventWithSeveralDataEntry.data = [
+        'This %s a %s like other ones',
+        "isn't",
+        'test',
+      ];
+      testPattern(
+        assert,
+        layout,
+        eventWithSeveralDataEntry,
+        tokens,
+        '%m',
+        "This isn't a test like other ones"
+      );
+      assert.end();
+    });
+
+    t.test('%m{1} should only consider data.slice(1)', (assert) => {
+      const eventWithSeveralDataEntry = JSON.parse(JSON.stringify(event));
+      eventWithSeveralDataEntry.data = [
+        'This %s a %s like other ones',
+        "isn't",
+        'test',
+      ];
+      testPattern(
+        assert,
+        layout,
+        eventWithSeveralDataEntry,
+        tokens,
+        '%m{1}',
+        "isn't test"
+      );
+      assert.end();
+    });
+
+    t.test('%m{0,1} should behave like a dummy layout', (assert) => {
+      const eventWithSeveralDataEntry = JSON.parse(JSON.stringify(event));
+      eventWithSeveralDataEntry.data = [
+        'This %s a %s like other ones',
+        "isn't",
+        'test',
+      ];
+      testPattern(
+        assert,
+        layout,
+        eventWithSeveralDataEntry,
+        tokens,
+        '%m{0,1}',
+        'This %s a %s like other ones'
+      );
+      assert.end();
+    });
+
+    t.test('%m{1,2} should only consider data.slice(1, 2)', (assert) => {
+      const eventWithSeveralDataEntry = JSON.parse(JSON.stringify(event));
+      eventWithSeveralDataEntry.data = [
+        'This %s a %s like other ones',
+        "isn't",
+        'test',
+      ];
+      testPattern(
+        assert,
+        layout,
+        eventWithSeveralDataEntry,
+        tokens,
+        '%m{1,2}',
+        "isn't"
+      );
+      assert.end();
+    });
+
+    t.test(
+      '%m{0,-1} should consider the whole data except the last element',
+      (assert) => {
+        const eventWithSeveralDataEntry = JSON.parse(JSON.stringify(event));
+        eventWithSeveralDataEntry.data = [
+          'This %s a %s like %s ones',
+          "isn't",
+          'test',
+          'other',
+          "won't be considered in call to util.format",
+        ];
+        testPattern(
+          assert,
+          layout,
+          eventWithSeveralDataEntry,
+          tokens,
+          '%m{0,-1}',
+          "This isn't a test like other ones"
+        );
+        assert.end();
+      }
+    );
+
     t.test('%n should output a new line', (assert) => {
       testPattern(assert, layout, event, tokens, '%n', EOL);
       assert.end();
     });
 
     t.test('%h should output hostname', (assert) => {
-      testPattern(assert, layout, event, tokens, '%h', os.hostname().toString());
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%h',
+        os.hostname().toString()
+      );
       assert.end();
     });
 
@@ -269,29 +492,157 @@ test('log4js layouts', (batch) => {
       assert.end();
     });
 
-    t.test('%c should handle category names like java-style package names', (assert) => {
-      testPattern(assert, layout, event, tokens, '%c{1}', 'tests');
-      testPattern(assert, layout, event, tokens, '%c{2}', 'of.tests');
-      testPattern(assert, layout, event, tokens, '%c{3}', 'levels.of.tests');
-      testPattern(assert, layout, event, tokens, '%c{4}', 'multiple.levels.of.tests');
-      testPattern(assert, layout, event, tokens, '%c{5}', 'multiple.levels.of.tests');
-      testPattern(assert, layout, event, tokens, '%c{99}', 'multiple.levels.of.tests');
+    t.test('%z should pick up pid from log event if present', (assert) => {
+      event.pid = '1234';
+      testPattern(assert, layout, event, tokens, '%z', '1234');
+      delete event.pid;
       assert.end();
     });
 
+    t.test('%y should output pid (was cluster info)', (assert) => {
+      testPattern(assert, layout, event, tokens, '%y', process.pid.toString());
+      assert.end();
+    });
+
+    t.test(
+      '%c should handle category names like java-style package names',
+      (assert) => {
+        testPattern(assert, layout, event, tokens, '%c{1}', 'tests');
+        testPattern(assert, layout, event, tokens, '%c{2}', 'of.tests');
+        testPattern(assert, layout, event, tokens, '%c{3}', 'levels.of.tests');
+        testPattern(
+          assert,
+          layout,
+          event,
+          tokens,
+          '%c{4}',
+          'multiple.levels.of.tests'
+        );
+        testPattern(
+          assert,
+          layout,
+          event,
+          tokens,
+          '%c{5}',
+          'multiple.levels.of.tests'
+        );
+        testPattern(
+          assert,
+          layout,
+          event,
+          tokens,
+          '%c{99}',
+          'multiple.levels.of.tests'
+        );
+        assert.end();
+      }
+    );
+
     t.test('%d should output the date in ISO8601 format', (assert) => {
-      testPattern(assert, layout, event, tokens, '%d', '2010-12-05 14:18:30.045');
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d',
+        '2010-12-05T14:18:30.045'
+      );
       assert.end();
     });
 
     t.test('%d should allow for format specification', (assert) => {
-      testPattern(assert, layout, event, tokens, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30.045-0000');
-      testPattern(assert, layout, event, tokens, '%d{ISO8601}', '2010-12-05 14:18:30.045');
-      testPattern(assert, layout, event, tokens, '%d{ABSOLUTE}', '14:18:30.045');
-      testPattern(assert, layout, event, tokens, '%d{DATE}', '05 12 2010 14:18:30.045');
-      testPattern(assert, layout, event, tokens, '%d{yy MM dd hh mm ss}', '10 12 05 14 18 30');
-      testPattern(assert, layout, event, tokens, '%d{yyyy MM dd}', '2010 12 05');
-      testPattern(assert, layout, event, tokens, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045');
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d{ISO8601}',
+        '2010-12-05T14:18:30.045'
+      );
+
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d{ISO8601_WITH_TZ_OFFSET}',
+        '2010-12-05T14:18:30.045+10:00'
+      );
+
+      const DEP0003 = debugLogs.filter(
+        (e) => e.indexOf('log4js-node-DEP0003') > -1
+      ).length;
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d{ABSOLUTE}', // deprecated
+        '14:18:30.045'
+      );
+      assert.equal(
+        debugLogs.filter((e) => e.indexOf('log4js-node-DEP0003') > -1).length,
+        DEP0003 + 1,
+        'deprecation log4js-node-DEP0003 emitted'
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d{ABSOLUTETIME}',
+        '14:18:30.045'
+      );
+
+      const DEP0004 = debugLogs.filter(
+        (e) => e.indexOf('log4js-node-DEP0004') > -1
+      ).length;
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d{DATE}', // deprecated
+        '05 12 2010 14:18:30.045'
+      );
+      assert.equal(
+        debugLogs.filter((e) => e.indexOf('log4js-node-DEP0004') > -1).length,
+        DEP0004 + 1,
+        'deprecation log4js-node-DEP0004 emitted'
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d{DATETIME}',
+        '05 12 2010 14:18:30.045'
+      );
+
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d{yy MM dd hh mm ss}',
+        '10 12 05 14 18 30'
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d{yyyy MM dd}',
+        '2010 12 05'
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%d{yyyy MM dd hh mm ss SSS}',
+        '2010 12 05 14 18 30 045'
+      );
       assert.end();
     });
 
@@ -300,19 +651,181 @@ test('log4js layouts', (batch) => {
       assert.end();
     });
 
-    t.test('should output anything not preceded by % as literal', (assert) => {
-      testPattern(assert, layout, event, tokens, 'blah blah blah', 'blah blah blah');
+    t.test('%f should output filename', (assert) => {
+      testPattern(assert, layout, event, tokens, '%f', fileName);
+      assert.end();
+    });
+
+    t.test('%f should handle filename depth', (assert) => {
+      testPattern(assert, layout, event, tokens, '%f{1}', 'layouts-test.js');
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%f{2}',
+        path.join('tap', 'layouts-test.js')
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%f{3}',
+        path.join('test', 'tap', 'layouts-test.js')
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%f{4}',
+        path.join('log4js-node', 'test', 'tap', 'layouts-test.js')
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%f{5}',
+        path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%f{99}',
+        path.join('/log4js-node', 'test', 'tap', 'layouts-test.js')
+      );
+      assert.end();
+    });
+
+    t.test('%f should accept truncation and padding', (assert) => {
+      testPattern(assert, layout, event, tokens, '%.5f', fileName.slice(0, 5));
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%20f{1}',
+        '     layouts-test.js'
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%30.30f{2}',
+        `           ${path.join('tap', 'layouts-test.js')}`
+      );
+      testPattern(assert, layout, event, tokens, '%10.-5f{1}', '     st.js');
+      assert.end();
+    });
+
+    t.test('%l should output line number', (assert) => {
+      testPattern(assert, layout, event, tokens, '%l', lineNumber.toString());
+      assert.end();
+    });
+
+    t.test('%l should accept truncation and padding', (assert) => {
+      testPattern(assert, layout, event, tokens, '%5.10l', '    1');
+      testPattern(assert, layout, event, tokens, '%.5l', '1');
+      testPattern(assert, layout, event, tokens, '%.-5l', '1');
+      testPattern(assert, layout, event, tokens, '%-5l', '1    ');
       assert.end();
     });
 
-    t.test('should output the original string if no replacer matches the token', (assert) => {
-      testPattern(assert, layout, event, tokens, '%a{3}', 'a{3}');
+    t.test('%o should output column postion', (assert) => {
+      testPattern(assert, layout, event, tokens, '%o', columnNumber.toString());
+      assert.end();
+    });
+
+    t.test('%o should accept truncation and padding', (assert) => {
+      testPattern(assert, layout, event, tokens, '%5.10o', '   14');
+      testPattern(assert, layout, event, tokens, '%.5o', '14');
+      testPattern(assert, layout, event, tokens, '%.1o', '1');
+      testPattern(assert, layout, event, tokens, '%.-1o', '4');
+      testPattern(assert, layout, event, tokens, '%-5o', '14   ');
+      assert.end();
+    });
+
+    t.test('%s should output stack', (assert) => {
+      testPattern(assert, layout, event, tokens, '%s', callStack);
+      assert.end();
+    });
+
+    t.test(
+      '%f should output empty string when fileName not exist',
+      (assert) => {
+        delete event.fileName;
+        testPattern(assert, layout, event, tokens, '%f', '');
+        assert.end();
+      }
+    );
+
+    t.test(
+      '%l should output empty string when lineNumber not exist',
+      (assert) => {
+        delete event.lineNumber;
+        testPattern(assert, layout, event, tokens, '%l', '');
+        assert.end();
+      }
+    );
+
+    t.test(
+      '%o should output empty string when columnNumber not exist',
+      (assert) => {
+        delete event.columnNumber;
+        testPattern(assert, layout, event, tokens, '%o', '');
+        assert.end();
+      }
+    );
+
+    t.test(
+      '%s should output empty string when callStack not exist',
+      (assert) => {
+        delete event.callStack;
+        testPattern(assert, layout, event, tokens, '%s', '');
+        assert.end();
+      }
+    );
+
+    t.test('should output anything not preceded by % as literal', (assert) => {
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        'blah blah blah',
+        'blah blah blah'
+      );
       assert.end();
     });
 
+    t.test(
+      'should output the original string if no replacer matches the token',
+      (assert) => {
+        testPattern(assert, layout, event, tokens, '%a{3}', 'a{3}');
+        assert.end();
+      }
+    );
+
     t.test('should handle complicated patterns', (assert) => {
-      testPattern(assert, layout, event, tokens,
-        '%m%n %c{2} at %d{ABSOLUTE} cheese %p%n',
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%m%n %c{2} at %d{ABSOLUTE} cheese %p%n', // deprecated
+        `this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
+      );
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%m%n %c{2} at %d{ABSOLUTETIME} cheese %p%n',
         `this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
       );
       assert.end();
@@ -323,7 +836,15 @@ test('log4js layouts', (batch) => {
       testPattern(assert, layout, event, tokens, '%.7m', 'this is');
       testPattern(assert, layout, event, tokens, '%.9m', 'this is a');
       testPattern(assert, layout, event, tokens, '%.14m', 'this is a test');
-      testPattern(assert, layout, event, tokens, '%.2919102m', 'this is a test');
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%.2919102m',
+        'this is a test'
+      );
+      testPattern(assert, layout, event, tokens, '%.-4m', 'test');
       assert.end();
     });
 
@@ -340,35 +861,189 @@ test('log4js layouts', (batch) => {
     });
 
     t.test('%[%r%] should output colored time', (assert) => {
-      testPattern(assert, layout, event, tokens, '%[%r%]', '\x1B[36m14:18:30\x1B[39m');
+      testPattern(
+        assert,
+        layout,
+        event,
+        tokens,
+        '%[%r%]',
+        '\x1B[36m14:18:30\x1B[39m'
+      );
+      assert.end();
+    });
+
+    t.test(
+      '%x{testString} should output the string stored in tokens',
+      (assert) => {
+        testPattern(
+          assert,
+          layout,
+          event,
+          tokens,
+          '%x{testString}',
+          'testStringToken'
+        );
+        assert.end();
+      }
+    );
+
+    t.test(
+      '%x{testFunction} should output the result of the function stored in tokens',
+      (assert) => {
+        testPattern(
+          assert,
+          layout,
+          event,
+          tokens,
+          '%x{testFunction}',
+          'testFunctionToken'
+        );
+        assert.end();
+      }
+    );
+
+    t.test(
+      '%x{doesNotExist} should output the string stored in tokens',
+      (assert) => {
+        testPattern(assert, layout, event, tokens, '%x{doesNotExist}', 'null');
+        assert.end();
+      }
+    );
+
+    t.test(
+      '%x{fnThatUsesLogEvent} should be able to use the logEvent',
+      (assert) => {
+        testPattern(
+          assert,
+          layout,
+          event,
+          tokens,
+          '%x{fnThatUsesLogEvent}',
+          'DEBUG'
+        );
+        assert.end();
+      }
+    );
+
+    t.test('%x should output the string stored in tokens', (assert) => {
+      testPattern(assert, layout, event, tokens, '%x', 'null');
       assert.end();
     });
 
-    t.test('%x{testString} should output the string stored in tokens', (assert) => {
-      testPattern(assert, layout, event, tokens, '%x{testString}', 'testStringToken');
+    t.test(
+      '%X{testString} should output the string stored in tokens',
+      (assert) => {
+        testPattern(
+          assert,
+          layout,
+          event,
+          {},
+          '%X{testString}',
+          'testStringToken'
+        );
+        assert.end();
+      }
+    );
+
+    t.test(
+      '%X{testFunction} should output the result of the function stored in tokens',
+      (assert) => {
+        testPattern(
+          assert,
+          layout,
+          event,
+          {},
+          '%X{testFunction}',
+          'testFunctionToken'
+        );
+        assert.end();
+      }
+    );
+
+    t.test(
+      '%X{doesNotExist} should output the string stored in tokens',
+      (assert) => {
+        testPattern(assert, layout, event, {}, '%X{doesNotExist}', 'null');
+        assert.end();
+      }
+    );
+
+    t.test(
+      '%X{fnThatUsesLogEvent} should be able to use the logEvent',
+      (assert) => {
+        testPattern(
+          assert,
+          layout,
+          event,
+          {},
+          '%X{fnThatUsesLogEvent}',
+          'DEBUG'
+        );
+        assert.end();
+      }
+    );
+
+    t.test('%X should output the string stored in tokens', (assert) => {
+      testPattern(assert, layout, event, {}, '%X', 'null');
       assert.end();
     });
 
-    t.test('%x{testFunction} should output the result of the function stored in tokens', (assert) => {
-      testPattern(assert, layout, event, tokens, '%x{testFunction}', 'testFunctionToken');
+    t.test('%M should output function name', (assert) => {
+      testPattern(assert, layout, event, tokens, '%M', functionName);
       assert.end();
     });
 
-    t.test('%x{doesNotExist} should output the string stored in tokens', (assert) => {
-      testPattern(assert, layout, event, tokens, '%x{doesNotExist}', 'null');
+    t.test(
+      '%M should output empty string when functionName not exist',
+      (assert) => {
+        delete event.functionName;
+        testPattern(assert, layout, event, tokens, '%M', '');
+        assert.end();
+      }
+    );
+
+    t.test('%C should output class name', (assert) => {
+      testPattern(assert, layout, event, tokens, '%C', className);
       assert.end();
     });
 
-    t.test('%x{fnThatUsesLogEvent} should be able to use the logEvent', (assert) => {
-      testPattern(assert, layout, event, tokens, '%x{fnThatUsesLogEvent}', 'DEBUG');
+    t.test(
+      '%C should output empty string when className not exist',
+      (assert) => {
+        delete event.className;
+        testPattern(assert, layout, event, tokens, '%C', '');
+        assert.end();
+      }
+    );
+
+    t.test('%A should output function alias', (assert) => {
+      testPattern(assert, layout, event, tokens, '%A', functionAlias);
       assert.end();
     });
 
-    t.test('%x should output the string stored in tokens', (assert) => {
-      testPattern(assert, layout, event, tokens, '%x', 'null');
+    t.test(
+      '%A should output empty string when functionAlias not exist',
+      (assert) => {
+        delete event.functionAlias;
+        testPattern(assert, layout, event, tokens, '%A', '');
+        assert.end();
+      }
+    );
+
+    t.test('%F should output fully qualified caller name', (assert) => {
+      testPattern(assert, layout, event, tokens, '%F', callerName);
       assert.end();
     });
 
+    t.test(
+      '%F should output empty string when callerName not exist',
+      (assert) => {
+        delete event.callerName;
+        testPattern(assert, layout, event, tokens, '%F', '');
+        assert.end();
+      }
+    );
+
     t.end();
   });
 
@@ -381,8 +1056,23 @@ test('log4js layouts', (batch) => {
       assert.ok(layouts.layout('colored'));
       assert.ok(layouts.layout('coloured'));
       assert.ok(layouts.layout('pattern'));
+      assert.ok(layouts.layout('dummy'));
       assert.end();
     });
+
+    t.test(
+      'layout pattern maker should pass pattern and tokens to layout from config',
+      (assert) => {
+        let layout = layouts.layout('pattern', { pattern: '%%' });
+        assert.equal(layout({}), '%');
+        layout = layouts.layout('pattern', {
+          pattern: '%x{testStringToken}',
+          tokens: { testStringToken: 'cheese' },
+        });
+        assert.equal(layout({}), 'cheese');
+        assert.end();
+      }
+    );
     t.end();
   });
 
diff --git a/test/tap/levels-before-configure-test.js b/test/tap/levels-before-configure-test.js
new file mode 100644
index 00000000..e75820ae
--- /dev/null
+++ b/test/tap/levels-before-configure-test.js
@@ -0,0 +1,12 @@
+const { test } = require('tap');
+
+test('Accessing things setup in configure before configure is called', (batch) => {
+  batch.test('should work', (t) => {
+    const log4js = require('../../lib/log4js');
+    t.ok(log4js.levels);
+    t.ok(log4js.connectLogger);
+    t.end();
+  });
+
+  batch.end();
+});
diff --git a/test/tap/levels-test.js b/test/tap/levels-test.js
index 54471917..89d7980b 100644
--- a/test/tap/levels-test.js
+++ b/test/tap/levels-test.js
@@ -1,6 +1,4 @@
-'use strict';
-
-const test = require('tap').test;
+const { test } = require('tap');
 const levels = require('../../lib/levels');
 
 function assertThat(assert, level) {
@@ -11,24 +9,24 @@ function assertThat(assert, level) {
   }
 
   return {
-    isLessThanOrEqualTo: function (lvls) {
+    isLessThanOrEqualTo(lvls) {
       assertForEach(assert.ok, level.isLessThanOrEqualTo, lvls);
     },
-    isNotLessThanOrEqualTo: function (lvls) {
+    isNotLessThanOrEqualTo(lvls) {
       assertForEach(assert.notOk, level.isLessThanOrEqualTo, lvls);
     },
-    isGreaterThanOrEqualTo: function (lvls) {
+    isGreaterThanOrEqualTo(lvls) {
       assertForEach(assert.ok, level.isGreaterThanOrEqualTo, lvls);
     },
-    isNotGreaterThanOrEqualTo: function (lvls) {
+    isNotGreaterThanOrEqualTo(lvls) {
       assertForEach(assert.notOk, level.isGreaterThanOrEqualTo, lvls);
     },
-    isEqualTo: function (lvls) {
+    isEqualTo(lvls) {
       assertForEach(assert.ok, level.isEqualTo, lvls);
     },
-    isNotEqualTo: function (lvls) {
+    isNotEqualTo(lvls) {
       assertForEach(assert.notOk, level.isEqualTo, lvls);
-    }
+    },
   };
 }
 
@@ -49,126 +47,117 @@ test('levels', (batch) => {
 
     t.test('ALL', (assert) => {
       const all = levels.ALL;
-      assertThat(assert, all).isLessThanOrEqualTo(
-        [
-          levels.ALL,
-          levels.TRACE,
-          levels.DEBUG,
-          levels.INFO,
-          levels.WARN,
-          levels.ERROR,
-          levels.FATAL,
-          levels.MARK,
-          levels.OFF
-        ]
-      );
-      assertThat(assert, all).isNotGreaterThanOrEqualTo(
-        [
-          levels.TRACE,
-          levels.DEBUG,
-          levels.INFO,
-          levels.WARN,
-          levels.ERROR,
-          levels.FATAL,
-          levels.MARK,
-          levels.OFF
-        ]
-      );
-      assertThat(assert, all).isEqualTo([levels.toLevel('ALL')]);
-      assertThat(assert, all).isNotEqualTo(
-        [
-          levels.TRACE,
-          levels.DEBUG,
-          levels.INFO,
-          levels.WARN,
-          levels.ERROR,
-          levels.FATAL,
-          levels.MARK,
-          levels.OFF
-        ]
-      );
+      assertThat(assert, all).isLessThanOrEqualTo([
+        levels.ALL,
+        levels.TRACE,
+        levels.DEBUG,
+        levels.INFO,
+        levels.WARN,
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
+      assertThat(assert, all).isNotGreaterThanOrEqualTo([
+        levels.TRACE,
+        levels.DEBUG,
+        levels.INFO,
+        levels.WARN,
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
+      assertThat(assert, all).isEqualTo([levels.getLevel('ALL')]);
+      assertThat(assert, all).isNotEqualTo([
+        levels.TRACE,
+        levels.DEBUG,
+        levels.INFO,
+        levels.WARN,
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
       assert.end();
     });
 
     t.test('TRACE', (assert) => {
       const trace = levels.TRACE;
-      assertThat(assert, trace).isLessThanOrEqualTo(
-        [
-          levels.DEBUG,
-          levels.INFO,
-          levels.WARN,
-          levels.ERROR,
-          levels.FATAL,
-          levels.MARK,
-          levels.OFF
-        ]
-      );
+      assertThat(assert, trace).isLessThanOrEqualTo([
+        levels.DEBUG,
+        levels.INFO,
+        levels.WARN,
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
       assertThat(assert, trace).isNotLessThanOrEqualTo([levels.ALL]);
-      assertThat(assert, trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
-      assertThat(assert, trace).isNotGreaterThanOrEqualTo(
-        [
-          levels.DEBUG,
-          levels.INFO,
-          levels.WARN,
-          levels.ERROR,
-          levels.FATAL,
-          levels.MARK,
-          levels.OFF
-        ]
-      );
-      assertThat(assert, trace).isEqualTo([levels.toLevel('TRACE')]);
-      assertThat(assert, trace).isNotEqualTo(
-        [
-          levels.ALL,
-          levels.DEBUG,
-          levels.INFO,
-          levels.WARN,
-          levels.ERROR,
-          levels.FATAL,
-          levels.MARK,
-          levels.OFF
-        ]
-      );
+      assertThat(assert, trace).isGreaterThanOrEqualTo([
+        levels.ALL,
+        levels.TRACE,
+      ]);
+      assertThat(assert, trace).isNotGreaterThanOrEqualTo([
+        levels.DEBUG,
+        levels.INFO,
+        levels.WARN,
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
+      assertThat(assert, trace).isEqualTo([levels.getLevel('TRACE')]);
+      assertThat(assert, trace).isNotEqualTo([
+        levels.ALL,
+        levels.DEBUG,
+        levels.INFO,
+        levels.WARN,
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
       assert.end();
     });
 
     t.test('DEBUG', (assert) => {
       const debug = levels.DEBUG;
-      assertThat(assert, debug).isLessThanOrEqualTo(
-        [
-          levels.INFO,
-          levels.WARN,
-          levels.ERROR,
-          levels.FATAL,
-          levels.MARK,
-          levels.OFF
-        ]
-      );
-      assertThat(assert, debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]);
-      assertThat(assert, debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
-      assertThat(assert, debug).isNotGreaterThanOrEqualTo(
-        [
-          levels.INFO,
-          levels.WARN,
-          levels.ERROR,
-          levels.FATAL,
-          levels.MARK,
-          levels.OFF
-        ]
-      );
-      assertThat(assert, debug).isEqualTo([levels.toLevel('DEBUG')]);
-      assertThat(assert, debug).isNotEqualTo(
-        [
-          levels.ALL,
-          levels.TRACE,
-          levels.INFO,
-          levels.WARN,
-          levels.ERROR,
-          levels.FATAL,
-          levels.MARK,
-          levels.OFF
-        ]
-      );
+      assertThat(assert, debug).isLessThanOrEqualTo([
+        levels.INFO,
+        levels.WARN,
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
+      assertThat(assert, debug).isNotLessThanOrEqualTo([
+        levels.ALL,
+        levels.TRACE,
+      ]);
+      assertThat(assert, debug).isGreaterThanOrEqualTo([
+        levels.ALL,
+        levels.TRACE,
+      ]);
+      assertThat(assert, debug).isNotGreaterThanOrEqualTo([
+        levels.INFO,
+        levels.WARN,
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
+      assertThat(assert, debug).isEqualTo([levels.getLevel('DEBUG')]);
+      assertThat(assert, debug).isNotEqualTo([
+        levels.ALL,
+        levels.TRACE,
+        levels.INFO,
+        levels.WARN,
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
       assert.end();
     });
 
@@ -179,18 +168,26 @@ test('levels', (batch) => {
         levels.ERROR,
         levels.FATAL,
         levels.MARK,
-        levels.OFF
+        levels.OFF,
+      ]);
+      assertThat(assert, info).isNotLessThanOrEqualTo([
+        levels.ALL,
+        levels.TRACE,
+        levels.DEBUG,
+      ]);
+      assertThat(assert, info).isGreaterThanOrEqualTo([
+        levels.ALL,
+        levels.TRACE,
+        levels.DEBUG,
       ]);
-      assertThat(assert, info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
-      assertThat(assert, info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
       assertThat(assert, info).isNotGreaterThanOrEqualTo([
         levels.WARN,
         levels.ERROR,
         levels.FATAL,
         levels.MARK,
-        levels.OFF
+        levels.OFF,
       ]);
-      assertThat(assert, info).isEqualTo([levels.toLevel('INFO')]);
+      assertThat(assert, info).isEqualTo([levels.getLevel('INFO')]);
       assertThat(assert, info).isNotEqualTo([
         levels.ALL,
         levels.TRACE,
@@ -199,30 +196,38 @@ test('levels', (batch) => {
         levels.ERROR,
         levels.FATAL,
         levels.MARK,
-        levels.OFF
+        levels.OFF,
       ]);
       assert.end();
     });
 
     t.test('WARN', (assert) => {
       const warn = levels.WARN;
-      assertThat(assert, warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.MARK, levels.OFF]);
+      assertThat(assert, warn).isLessThanOrEqualTo([
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
       assertThat(assert, warn).isNotLessThanOrEqualTo([
         levels.ALL,
         levels.TRACE,
         levels.DEBUG,
-        levels.INFO
+        levels.INFO,
       ]);
       assertThat(assert, warn).isGreaterThanOrEqualTo([
         levels.ALL,
         levels.TRACE,
         levels.DEBUG,
-        levels.INFO
+        levels.INFO,
       ]);
       assertThat(assert, warn).isNotGreaterThanOrEqualTo([
-        levels.ERROR, levels.FATAL, levels.MARK, levels.OFF
+        levels.ERROR,
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
       ]);
-      assertThat(assert, warn).isEqualTo([levels.toLevel('WARN')]);
+      assertThat(assert, warn).isEqualTo([levels.getLevel('WARN')]);
       assertThat(assert, warn).isNotEqualTo([
         levels.ALL,
         levels.TRACE,
@@ -230,30 +235,38 @@ test('levels', (batch) => {
         levels.INFO,
         levels.ERROR,
         levels.FATAL,
-        levels.OFF
+        levels.OFF,
       ]);
       assert.end();
     });
 
     t.test('ERROR', (assert) => {
       const error = levels.ERROR;
-      assertThat(assert, error).isLessThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
+      assertThat(assert, error).isLessThanOrEqualTo([
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
       assertThat(assert, error).isNotLessThanOrEqualTo([
         levels.ALL,
         levels.TRACE,
         levels.DEBUG,
         levels.INFO,
-        levels.WARN
+        levels.WARN,
       ]);
       assertThat(assert, error).isGreaterThanOrEqualTo([
         levels.ALL,
         levels.TRACE,
         levels.DEBUG,
         levels.INFO,
-        levels.WARN
+        levels.WARN,
       ]);
-      assertThat(assert, error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
-      assertThat(assert, error).isEqualTo([levels.toLevel('ERROR')]);
+      assertThat(assert, error).isNotGreaterThanOrEqualTo([
+        levels.FATAL,
+        levels.MARK,
+        levels.OFF,
+      ]);
+      assertThat(assert, error).isEqualTo([levels.getLevel('ERROR')]);
       assertThat(assert, error).isNotEqualTo([
         levels.ALL,
         levels.TRACE,
@@ -262,7 +275,7 @@ test('levels', (batch) => {
         levels.WARN,
         levels.FATAL,
         levels.MARK,
-        levels.OFF
+        levels.OFF,
       ]);
       assert.end();
     });
@@ -276,7 +289,7 @@ test('levels', (batch) => {
         levels.DEBUG,
         levels.INFO,
         levels.WARN,
-        levels.ERROR
+        levels.ERROR,
       ]);
       assertThat(assert, fatal).isGreaterThanOrEqualTo([
         levels.ALL,
@@ -284,10 +297,13 @@ test('levels', (batch) => {
         levels.DEBUG,
         levels.INFO,
         levels.WARN,
-        levels.ERROR
+        levels.ERROR,
       ]);
-      assertThat(assert, fatal).isNotGreaterThanOrEqualTo([levels.MARK, levels.OFF]);
-      assertThat(assert, fatal).isEqualTo([levels.toLevel('FATAL')]);
+      assertThat(assert, fatal).isNotGreaterThanOrEqualTo([
+        levels.MARK,
+        levels.OFF,
+      ]);
+      assertThat(assert, fatal).isEqualTo([levels.getLevel('FATAL')]);
       assertThat(assert, fatal).isNotEqualTo([
         levels.ALL,
         levels.TRACE,
@@ -296,7 +312,7 @@ test('levels', (batch) => {
         levels.WARN,
         levels.ERROR,
         levels.MARK,
-        levels.OFF
+        levels.OFF,
       ]);
       assert.end();
     });
@@ -311,7 +327,7 @@ test('levels', (batch) => {
         levels.INFO,
         levels.WARN,
         levels.FATAL,
-        levels.ERROR
+        levels.ERROR,
       ]);
       assertThat(assert, mark).isGreaterThanOrEqualTo([
         levels.ALL,
@@ -320,10 +336,10 @@ test('levels', (batch) => {
         levels.INFO,
         levels.WARN,
         levels.ERROR,
-        levels.FATAL
+        levels.FATAL,
       ]);
       assertThat(assert, mark).isNotGreaterThanOrEqualTo([levels.OFF]);
-      assertThat(assert, mark).isEqualTo([levels.toLevel('MARK')]);
+      assertThat(assert, mark).isEqualTo([levels.getLevel('MARK')]);
       assertThat(assert, mark).isNotEqualTo([
         levels.ALL,
         levels.TRACE,
@@ -332,7 +348,7 @@ test('levels', (batch) => {
         levels.WARN,
         levels.ERROR,
         levels.FATAL,
-        levels.OFF
+        levels.OFF,
       ]);
       assert.end();
     });
@@ -347,7 +363,7 @@ test('levels', (batch) => {
         levels.WARN,
         levels.ERROR,
         levels.FATAL,
-        levels.MARK
+        levels.MARK,
       ]);
       assertThat(assert, off).isGreaterThanOrEqualTo([
         levels.ALL,
@@ -357,9 +373,9 @@ test('levels', (batch) => {
         levels.WARN,
         levels.ERROR,
         levels.FATAL,
-        levels.MARK
+        levels.MARK,
       ]);
-      assertThat(assert, off).isEqualTo([levels.toLevel('OFF')]);
+      assertThat(assert, off).isEqualTo([levels.getLevel('OFF')]);
       assertThat(assert, off).isNotEqualTo([
         levels.ALL,
         levels.TRACE,
@@ -368,7 +384,7 @@ test('levels', (batch) => {
         levels.WARN,
         levels.ERROR,
         levels.FATAL,
-        levels.MARK
+        levels.MARK,
       ]);
       assert.end();
     });
@@ -378,14 +394,26 @@ test('levels', (batch) => {
   batch.test('isGreaterThanOrEqualTo', (t) => {
     const info = levels.INFO;
     assertThat(t, info).isGreaterThanOrEqualTo(['all', 'trace', 'debug']);
-    assertThat(t, info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
+    assertThat(t, info).isNotGreaterThanOrEqualTo([
+      'warn',
+      'ERROR',
+      'Fatal',
+      'MARK',
+      'off',
+    ]);
     t.end();
   });
 
   batch.test('isLessThanOrEqualTo', (t) => {
     const info = levels.INFO;
     assertThat(t, info).isNotLessThanOrEqualTo(['all', 'trace', 'debug']);
-    assertThat(t, info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
+    assertThat(t, info).isLessThanOrEqualTo([
+      'warn',
+      'ERROR',
+      'Fatal',
+      'MARK',
+      'off',
+    ]);
     t.end();
   });
 
@@ -395,12 +423,16 @@ test('levels', (batch) => {
     t.end();
   });
 
-  batch.test('toLevel', (t) => {
-    t.equal(levels.toLevel('debug'), levels.DEBUG);
-    t.equal(levels.toLevel('DEBUG'), levels.DEBUG);
-    t.equal(levels.toLevel('DeBuG'), levels.DEBUG);
-    t.notOk(levels.toLevel('cheese'));
-    t.equal(levels.toLevel('cheese', levels.DEBUG), levels.DEBUG);
+  batch.test('getLevel', (t) => {
+    t.equal(levels.getLevel('debug'), levels.DEBUG);
+    t.equal(levels.getLevel('DEBUG'), levels.DEBUG);
+    t.equal(levels.getLevel('DeBuG'), levels.DEBUG);
+    t.notOk(levels.getLevel('cheese'));
+    t.equal(levels.getLevel('cheese', levels.DEBUG), levels.DEBUG);
+    t.equal(
+      levels.getLevel({ level: 10000, levelStr: 'DEBUG', colour: 'cyan' }),
+      levels.DEBUG
+    );
     t.end();
   });
 
diff --git a/test/tap/log-abspath-test.js b/test/tap/log-abspath-test.js
deleted file mode 100644
index aa274ac7..00000000
--- a/test/tap/log-abspath-test.js
+++ /dev/null
@@ -1,88 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const path = require('path');
-const sandbox = require('sandboxed-module');
-
-test('log4js-abspath', (batch) => {
-  batch.test('options', (t) => {
-    let appenderOptions;
-
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        singleOnly: true,
-        requires: {
-          './appenders/fake': {
-            name: 'fake',
-            appender: function () {
-            },
-            configure: function (configuration, options) {
-              appenderOptions = options;
-              return function () {
-              };
-            }
-          }
-        }
-      }
-    );
-
-    const config = {
-      appenders: [
-        {
-          type: 'fake',
-          filename: 'cheesy-wotsits.log'
-        }
-      ]
-    };
-
-    log4js.configure(config, {
-      cwd: '/absolute/path/to'
-    });
-    t.test('should be passed to appenders during configuration', (assert) => {
-      assert.equal(appenderOptions.cwd, '/absolute/path/to');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('file appender', (t) => {
-    let fileOpened;
-
-    const fileAppender = sandbox.require(
-      '../../lib/appenders/file',
-      {
-        requires: {
-          streamroller: {
-            RollingFileStream: function (file) {
-              fileOpened = file;
-              return {
-                on: function () {
-                },
-                end: function () {
-                }
-              };
-            }
-          }
-        }
-      }
-    );
-
-    fileAppender.configure(
-      {
-        filename: 'whatever.log',
-        maxLogSize: 10
-      },
-      { cwd: '/absolute/path/to' }
-    );
-
-    t.test('should prepend options.cwd to config.filename', (assert) => {
-      const expected = path.sep + path.join('absolute', 'path', 'to', 'whatever.log');
-      assert.equal(fileOpened, expected);
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/log4js.json b/test/tap/log4js.json
index 3a4e54a9..1bae43a3 100644
--- a/test/tap/log4js.json
+++ b/test/tap/log4js.json
@@ -1,16 +1,16 @@
 {
   "appenders": [
-    { 
-      "category": "tests", 
-      "type": "file", 
-      "filename": "tmp-tests.log", 
-      "layout": { 
-        "type": "messagePassThrough" 
-      } 
+    {
+      "category": "tests",
+      "type": "file",
+      "filename": "tmp-tests.log",
+      "layout": {
+        "type": "messagePassThrough"
+      }
     }
   ],
-  
+
   "levels": {
-    "tests":  "WARN"
+    "tests": "WARN"
   }
 }
diff --git a/test/tap/logFacesAppender-test.js b/test/tap/logFacesAppender-test.js
deleted file mode 100644
index fe1a6322..00000000
--- a/test/tap/logFacesAppender-test.js
+++ /dev/null
@@ -1,89 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const log4js = require('../../lib/log4js');
-
-function setupLogging(category, options) {
-  const sent = {};
-
-  function fake(event) {
-    Object.keys(event).forEach((key) => {
-      sent[key] = event[key];
-    });
-  }
-
-  const lfsModule = require('../../lib/appenders/logFacesAppender');
-  options.send = fake;
-  log4js.clearAppenders();
-  log4js.addAppender(lfsModule.configure(options), category);
-  lfsModule.setContext('foo', 'bar');
-  lfsModule.setContext('bar', 'foo');
-
-  return {
-    logger: log4js.getLogger(category),
-    results: sent
-  };
-}
-
-test('logFaces appender', (batch) => {
-  batch.test('when using HTTP receivers', (t) => {
-    const setup = setupLogging('myCategory', {
-      type: 'logFacesAppender',
-      application: 'LFS-HTTP',
-      url: 'http://localhost/receivers/rx1'
-    });
-
-    setup.logger.warn('Log event #1');
-
-    t.test('an event should be sent', (assert) => {
-      const event = setup.results;
-      assert.equal(event.a, 'LFS-HTTP');
-      assert.equal(event.m, 'Log event #1');
-      assert.equal(event.g, 'myCategory');
-      assert.equal(event.p, 'WARN');
-      assert.equal(event.p_foo, 'bar');
-      assert.equal(event.p_bar, 'foo');
-
-      // Assert timestamp, up to hours resolution.
-      const date = new Date(event.t);
-      assert.equal(
-        date.toISOString().substring(0, 14),
-        new Date().toISOString().substring(0, 14)
-      );
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('when using UDP receivers', (t) => {
-    const setup = setupLogging('udpCategory', {
-      type: 'logFacesAppender',
-      application: 'LFS-UDP',
-      remoteHost: '127.0.0.1',
-      port: 55201
-    });
-
-    setup.logger.error('Log event #2');
-
-    t.test('an event should be sent', (assert) => {
-      const event = setup.results;
-      assert.equal(event.a, 'LFS-UDP');
-      assert.equal(event.m, 'Log event #2');
-      assert.equal(event.g, 'udpCategory');
-      assert.equal(event.p, 'ERROR');
-      assert.equal(event.p_foo, 'bar');
-      assert.equal(event.p_bar, 'foo');
-
-      // Assert timestamp, up to hours resolution.
-      const date = new Date(event.t);
-      assert.equal(
-        date.toISOString().substring(0, 14),
-        new Date().toISOString().substring(0, 14)
-      );
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/logLevelFilter-test.js b/test/tap/logLevelFilter-test.js
index 9a09aefd..32fb6f99 100644
--- a/test/tap/logLevelFilter-test.js
+++ b/test/tap/logLevelFilter-test.js
@@ -1,11 +1,11 @@
-'use strict';
-
-const test = require('tap').test;
+const { test } = require('tap');
 const fs = require('fs');
 const os = require('os');
 
 const EOL = os.EOL || '\n';
 
+const osDelay = process.platform === 'win32' ? 400 : 200;
+
 function remove(filename) {
   try {
     fs.unlinkSync(filename);
@@ -17,20 +17,21 @@ function remove(filename) {
 test('log4js logLevelFilter', (batch) => {
   batch.test('appender', (t) => {
     const log4js = require('../../lib/log4js');
-    const logEvents = [];
+    const recording = require('../../lib/appenders/recording');
 
-    log4js.clearAppenders();
-    log4js.addAppender(
-      require('../../lib/appenders/logLevelFilter')
-        .appender(
-          'ERROR',
-          undefined,
-          (evt) => {
-            logEvents.push(evt);
-          }
-        ),
-      'logLevelTest'
-    );
+    log4js.configure({
+      appenders: {
+        recorder: { type: 'recording' },
+        filtered: {
+          type: 'logLevelFilter',
+          appender: 'recorder',
+          level: 'ERROR',
+        },
+      },
+      categories: {
+        default: { appenders: ['filtered'], level: 'debug' },
+      },
+    });
 
     const logger = log4js.getLogger('logLevelTest');
     logger.debug('this should not trigger an event');
@@ -38,12 +39,17 @@ test('log4js logLevelFilter', (batch) => {
     logger.error('this should, though');
     logger.fatal('so should this');
 
-    t.test('should only pass log events greater than or equal to its own level', (assert) => {
-      assert.equal(logEvents.length, 2);
-      assert.equal(logEvents[0].data[0], 'this should, though');
-      assert.equal(logEvents[1].data[0], 'so should this');
-      assert.end();
-    });
+    const logEvents = recording.replay();
+
+    t.test(
+      'should only pass log events greater than or equal to its own level',
+      (assert) => {
+        assert.equal(logEvents.length, 2);
+        assert.equal(logEvents[0].data[0], 'this should, though');
+        assert.equal(logEvents[1].data[0], 'so should this');
+        assert.end();
+      }
+    );
     t.end();
   });
 
@@ -54,7 +60,47 @@ test('log4js logLevelFilter', (batch) => {
     remove(`${__dirname}/logLevelFilter-warnings.log`);
     remove(`${__dirname}/logLevelFilter-debugs.log`);
 
-    log4js.configure('test/tap/with-logLevelFilter.json');
+    t.teardown(() => {
+      remove(`${__dirname}/logLevelFilter.log`);
+      remove(`${__dirname}/logLevelFilter-warnings.log`);
+      remove(`${__dirname}/logLevelFilter-debugs.log`);
+    });
+
+    log4js.configure({
+      appenders: {
+        'warning-file': {
+          type: 'file',
+          filename: 'test/tap/logLevelFilter-warnings.log',
+          layout: { type: 'messagePassThrough' },
+        },
+        warnings: {
+          type: 'logLevelFilter',
+          level: 'WARN',
+          appender: 'warning-file',
+        },
+        'debug-file': {
+          type: 'file',
+          filename: 'test/tap/logLevelFilter-debugs.log',
+          layout: { type: 'messagePassThrough' },
+        },
+        debugs: {
+          type: 'logLevelFilter',
+          level: 'TRACE',
+          maxLevel: 'DEBUG',
+          appender: 'debug-file',
+        },
+        tests: {
+          type: 'file',
+          filename: 'test/tap/logLevelFilter.log',
+          layout: {
+            type: 'messagePassThrough',
+          },
+        },
+      },
+      categories: {
+        default: { appenders: ['tests', 'warnings', 'debugs'], level: 'trace' },
+      },
+    });
     const logger = log4js.getLogger('tests');
     logger.debug('debug');
     logger.info('info');
@@ -64,29 +110,54 @@ test('log4js logLevelFilter', (batch) => {
     logger.trace('trace');
     // wait for the file system to catch up
     setTimeout(() => {
-      t.test('tmp-tests.log should contain all log messages', (assert) => {
-        fs.readFile(`${__dirname}/logLevelFilter.log`, 'utf8', (err, contents) => {
-          const messages = contents.trim().split(EOL);
-          assert.same(messages, ['debug', 'info', 'error', 'warn', 'debug', 'trace']);
-          assert.end();
-        });
-      });
-      t.test('tmp-tests-warnings.log should contain only error and warning logs', (assert) => {
-        fs.readFile(`${__dirname}/logLevelFilter-warnings.log`, 'utf8', (err, contents) => {
-          const messages = contents.trim().split(EOL);
-          assert.deepEqual(messages, ['error', 'warn']);
-          assert.end();
-        });
-      });
-      t.test('tmp-tests-debugs.log should contain only trace and debug logs', (assert) => {
-        fs.readFile(`${__dirname}/logLevelFilter-debugs.log`, 'utf8', (err, contents) => {
-          const messages = contents.trim().split(EOL);
-          assert.deepEqual(messages, ['debug', 'debug', 'trace']);
-          assert.end();
-        });
+      t.test('logLevelFilter.log should contain all log messages', (assert) => {
+        fs.readFile(
+          `${__dirname}/logLevelFilter.log`,
+          'utf8',
+          (err, contents) => {
+            const messages = contents.trim().split(EOL);
+            assert.same(messages, [
+              'debug',
+              'info',
+              'error',
+              'warn',
+              'debug',
+              'trace',
+            ]);
+            assert.end();
+          }
+        );
       });
+      t.test(
+        'logLevelFilter-warnings.log should contain only error and warning logs',
+        (assert) => {
+          fs.readFile(
+            `${__dirname}/logLevelFilter-warnings.log`,
+            'utf8',
+            (err, contents) => {
+              const messages = contents.trim().split(EOL);
+              assert.same(messages, ['error', 'warn']);
+              assert.end();
+            }
+          );
+        }
+      );
+      t.test(
+        'logLevelFilter-debugs.log should contain only trace and debug logs',
+        (assert) => {
+          fs.readFile(
+            `${__dirname}/logLevelFilter-debugs.log`,
+            'utf8',
+            (err, contents) => {
+              const messages = contents.trim().split(EOL);
+              assert.same(messages, ['debug', 'debug', 'trace']);
+              assert.end();
+            }
+          );
+        }
+      );
       t.end();
-    }, 500);
+    }, osDelay);
   });
 
   batch.end();
diff --git a/test/tap/logger-test.js b/test/tap/logger-test.js
index 6edf2295..3320bfb9 100644
--- a/test/tap/logger-test.js
+++ b/test/tap/logger-test.js
@@ -1,81 +1,542 @@
-'use strict';
-
-const test = require('tap').test;
+const { test } = require('tap');
+const debug = require('debug')('log4js:test.logger');
+const sandbox = require('@log4js-node/sandboxed-module');
+const callsites = require('callsites');
 const levels = require('../../lib/levels');
-const loggerModule = require('../../lib/logger');
+const categories = require('../../lib/categories');
+
+/** @type {import('../../types/log4js').LoggingEvent[]} */
+const events = [];
+/** @type {string[]} */
+const messages = [];
 
-const Logger = loggerModule.Logger;
+/**
+ * @typedef {import('../../types/log4js').Logger} LoggerClass
+ */
+
+/** @type {{new (): LoggerClass}} */
+const Logger = sandbox.require('../../lib/logger', {
+  requires: {
+    './levels': levels,
+    './categories': categories,
+    './clustering': {
+      isMaster: () => true,
+      onlyOnMaster: (fn) => fn(),
+      send: (evt) => {
+        debug('fake clustering got event:', evt);
+        events.push(evt);
+      },
+    },
+  },
+  globals: {
+    console: {
+      ...console,
+      error(msg) {
+        messages.push(msg);
+      },
+    },
+  },
+});
+
+const testConfig = {
+  level: levels.TRACE,
+};
 
 test('../../lib/logger', (batch) => {
+  batch.beforeEach((done) => {
+    events.length = 0;
+    testConfig.level = levels.TRACE;
+    if (typeof done === 'function') {
+      done();
+    }
+  });
+
   batch.test('constructor with no parameters', (t) => {
-    const logger = new Logger();
-    t.equal(logger.category, Logger.DEFAULT_CATEGORY, 'should use default category');
-    t.equal(logger.level, levels.TRACE, 'should use TRACE log level');
+    t.throws(() => new Logger(), new Error('No category provided.'));
     t.end();
   });
 
   batch.test('constructor with category', (t) => {
     const logger = new Logger('cheese');
     t.equal(logger.category, 'cheese', 'should use category');
-    t.equal(logger.level, levels.TRACE, 'should use TRACE log level');
+    t.equal(logger.level, levels.OFF, 'should use OFF log level');
     t.end();
   });
 
-  batch.test('constructor with category and level', (t) => {
-    const logger = new Logger('cheese', 'debug');
+  batch.test('set level should delegate', (t) => {
+    const logger = new Logger('cheese');
+    logger.level = 'debug';
     t.equal(logger.category, 'cheese', 'should use category');
     t.equal(logger.level, levels.DEBUG, 'should use level');
     t.end();
   });
 
   batch.test('isLevelEnabled', (t) => {
-    const logger = new Logger('cheese', 'info');
+    const logger = new Logger('cheese');
     const functions = [
-      'isTraceEnabled', 'isDebugEnabled', 'isInfoEnabled',
-      'isWarnEnabled', 'isErrorEnabled', 'isFatalEnabled'
+      'isTraceEnabled',
+      'isDebugEnabled',
+      'isInfoEnabled',
+      'isWarnEnabled',
+      'isErrorEnabled',
+      'isFatalEnabled',
     ];
-    t.test('should provide a level enabled function for all levels', (subtest) => {
-      subtest.plan(functions.length);
-      functions.forEach((fn) => {
-        subtest.type(logger[fn], 'function');
-      });
-    });
-    t.test('should return the right values', (subtest) => {
-      subtest.notOk(logger.isTraceEnabled());
-      subtest.notOk(logger.isDebugEnabled());
-      subtest.ok(logger.isInfoEnabled());
-      subtest.ok(logger.isWarnEnabled());
-      subtest.ok(logger.isErrorEnabled());
-      subtest.ok(logger.isFatalEnabled());
-      subtest.end();
-    });
+    t.test(
+      'should provide a level enabled function for all levels',
+      (subtest) => {
+        subtest.plan(functions.length);
+        functions.forEach((fn) => {
+          subtest.type(logger[fn], 'function');
+        });
+      }
+    );
+    logger.level = 'INFO';
+    t.notOk(logger.isTraceEnabled());
+    t.notOk(logger.isDebugEnabled());
+    t.ok(logger.isInfoEnabled());
+    t.ok(logger.isWarnEnabled());
+    t.ok(logger.isErrorEnabled());
+    t.ok(logger.isFatalEnabled());
     t.end();
   });
 
-  batch.test('should emit log events', (t) => {
-    const events = [];
-    const logger = new Logger();
-    logger.addListener('log', (logEvent) => {
-      events.push(logEvent);
-    });
+  batch.test('should send log events to dispatch function', (t) => {
+    const logger = new Logger('cheese');
+    logger.level = 'debug';
     logger.debug('Event 1');
-    loggerModule.disableAllLogWrites();
     logger.debug('Event 2');
-    loggerModule.enableAllLogWrites();
     logger.debug('Event 3');
 
-    t.test('when log writes are enabled', (assert) => {
-      assert.equal(events[0].data[0], 'Event 1');
-      assert.end();
-    });
+    t.equal(events.length, 3);
+    t.equal(events[0].data[0], 'Event 1');
+    t.equal(events[1].data[0], 'Event 2');
+    t.equal(events[2].data[0], 'Event 3');
+    t.end();
+  });
+
+  batch.test('should add context values to every event', (t) => {
+    const logger = new Logger('fromage');
+    logger.level = 'debug';
+    logger.debug('Event 1');
+    logger.addContext('cheese', 'edam');
+    logger.debug('Event 2');
+    logger.debug('Event 3');
+    logger.addContext('biscuits', 'timtam');
+    logger.debug('Event 4');
+    logger.removeContext('cheese');
+    logger.debug('Event 5');
+    logger.clearContext();
+    logger.debug('Event 6');
+
+    t.equal(events.length, 6);
+    t.same(events[0].context, {});
+    t.same(events[1].context, { cheese: 'edam' });
+    t.same(events[2].context, { cheese: 'edam' });
+    t.same(events[3].context, { cheese: 'edam', biscuits: 'timtam' });
+    t.same(events[4].context, { biscuits: 'timtam' });
+    t.same(events[5].context, {});
+    t.end();
+  });
+
+  batch.test('should not break when log data has no toString', (t) => {
+    const logger = new Logger('thing');
+    logger.level = 'debug';
+    logger.info('Just testing ', Object.create(null));
+
+    t.equal(events.length, 1);
+    t.end();
+  });
+
+  batch.test(
+    'default should disable useCallStack unless manual enable',
+    (t) => {
+      const logger = new Logger('stack');
+      logger.level = 'debug';
+
+      t.equal(logger.useCallStack, false);
+
+      logger.debug('test no callStack');
+      let event = events.shift();
+      t.notMatch(event, { functionName: String });
+      t.notMatch(event, { fileName: String });
+      t.notMatch(event, { lineNumber: Number });
+      t.notMatch(event, { columnNumber: Number });
+      t.notMatch(event, { callStack: String });
+
+      logger.useCallStack = false;
+      t.equal(logger.useCallStack, false);
+
+      logger.useCallStack = 0;
+      t.equal(logger.useCallStack, false);
+
+      logger.useCallStack = '';
+      t.equal(logger.useCallStack, false);
+
+      logger.useCallStack = null;
+      t.equal(logger.useCallStack, false);
+
+      logger.useCallStack = undefined;
+      t.equal(logger.useCallStack, false);
+
+      logger.useCallStack = 'true';
+      t.equal(logger.useCallStack, false);
+
+      logger.useCallStack = true;
+      t.equal(logger.useCallStack, true);
+      logger.debug('test with callStack');
+      event = events.shift();
+      t.match(event, {
+        functionName: String,
+        fileName: String,
+        lineNumber: Number,
+        columnNumber: Number,
+        callStack: String,
+      });
+      t.end();
+    }
+  );
+
+  batch.test('should correctly switch on/off useCallStack', (t) => {
+    const logger = new Logger('stack');
+    logger.level = 'debug';
+    logger.useCallStack = true;
+    t.equal(logger.useCallStack, true);
+
+    logger.info('hello world');
+    const callsite = callsites()[0];
+
+    t.equal(events.length, 1);
+    t.equal(events[0].data[0], 'hello world');
+    t.equal(events[0].fileName, callsite.getFileName());
+    t.equal(events[0].lineNumber, callsite.getLineNumber() - 1);
+    t.equal(events[0].columnNumber, 12);
+
+    logger.useCallStack = false;
+    logger.info('disabled');
+    t.equal(logger.useCallStack, false);
+    t.equal(events[1].data[0], 'disabled');
+    t.equal(events[1].fileName, undefined);
+    t.equal(events[1].lineNumber, undefined);
+    t.equal(events[1].columnNumber, undefined);
+    t.end();
+  });
+
+  batch.test(
+    'Once switch on/off useCallStack will apply all same category loggers',
+    (t) => {
+      const logger1 = new Logger('stack');
+      logger1.level = 'debug';
+      logger1.useCallStack = true;
+      const logger2 = new Logger('stack');
+      logger2.level = 'debug';
+
+      logger1.info('hello world');
+      const callsite = callsites()[0];
+
+      t.equal(logger1.useCallStack, true);
+      t.equal(events.length, 1);
+      t.equal(events[0].data[0], 'hello world');
+      t.equal(events[0].fileName, callsite.getFileName());
+      t.equal(events[0].lineNumber, callsite.getLineNumber() - 1);
+      t.equal(events[0].columnNumber, 15); // col of the '.' in logger1.info(...)
+
+      logger2.info('hello world');
+      const callsite2 = callsites()[0];
+
+      t.equal(logger2.useCallStack, true);
+      t.equal(events[1].data[0], 'hello world');
+      t.equal(events[1].fileName, callsite2.getFileName());
+      t.equal(events[1].lineNumber, callsite2.getLineNumber() - 1);
+      t.equal(events[1].columnNumber, 15); // col of the '.' in logger1.info(...)
+
+      logger1.useCallStack = false;
+      logger2.info('hello world');
+      t.equal(logger2.useCallStack, false);
+      t.equal(events[2].data[0], 'hello world');
+      t.equal(events[2].fileName, undefined);
+      t.equal(events[2].lineNumber, undefined);
+      t.equal(events[2].columnNumber, undefined);
+
+      t.end();
+    }
+  );
 
-    t.test('but not when log writes are disabled', (assert) => {
-      assert.equal(events.length, 2);
-      assert.equal(events[1].data[0], 'Event 3');
-      assert.end();
+  batch.test('parseCallStack function coverage', (t) => {
+    const logger = new Logger('stack');
+    logger.useCallStack = true;
+
+    let results;
+
+    results = logger.parseCallStack(new Error());
+    t.ok(results);
+    t.equal(messages.length, 0, 'should not have error');
+
+    results = logger.parseCallStack('');
+    t.notOk(results);
+    t.equal(messages.length, 1, 'should have error');
+
+    results = logger.parseCallStack(new Error(), 100);
+    t.equal(results, null);
+
+    t.end();
+  });
+
+  batch.test('parseCallStack names extraction', (t) => {
+    const logger = new Logger('stack');
+    logger.useCallStack = true;
+
+    let results;
+
+    const callStack1 =
+      '    at Foo.bar [as baz] (repl:1:14)\n    at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n    at REPLServer.defaultEval (repl.js:240:29)\n    at bound (domain.js:301:14)\n    at REPLServer.runBound [as eval] (domain.js:314:12)\n    at REPLServer.onLine (repl.js:468:10)\n    at emitOne (events.js:121:20)\n    at REPLServer.emit (events.js:211:7)\n    at REPLServer.Interface._onLine (readline.js:280:10)\n    at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
+    results = logger.parseCallStack({ stack: callStack1 }, 0);
+    t.ok(results);
+    t.equal(results.className, 'Foo');
+    t.equal(results.functionName, 'bar');
+    t.equal(results.functionAlias, 'baz');
+    t.equal(results.callerName, 'Foo.bar [as baz]');
+
+    const callStack2 =
+      '    at bar [as baz] (repl:1:14)\n    at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n    at REPLServer.defaultEval (repl.js:240:29)\n    at bound (domain.js:301:14)\n    at REPLServer.runBound [as eval] (domain.js:314:12)\n    at REPLServer.onLine (repl.js:468:10)\n    at emitOne (events.js:121:20)\n    at REPLServer.emit (events.js:211:7)\n    at REPLServer.Interface._onLine (readline.js:280:10)\n    at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
+    results = logger.parseCallStack({ stack: callStack2 }, 0);
+    t.ok(results);
+    t.equal(results.className, '');
+    t.equal(results.functionName, 'bar');
+    t.equal(results.functionAlias, 'baz');
+    t.equal(results.callerName, 'bar [as baz]');
+
+    const callStack3 =
+      '    at bar (repl:1:14)\n    at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n    at REPLServer.defaultEval (repl.js:240:29)\n    at bound (domain.js:301:14)\n    at REPLServer.runBound [as eval] (domain.js:314:12)\n    at REPLServer.onLine (repl.js:468:10)\n    at emitOne (events.js:121:20)\n    at REPLServer.emit (events.js:211:7)\n    at REPLServer.Interface._onLine (readline.js:280:10)\n    at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
+    results = logger.parseCallStack({ stack: callStack3 }, 0);
+    t.ok(results);
+    t.equal(results.className, '');
+    t.equal(results.functionName, 'bar');
+    t.equal(results.functionAlias, '');
+    t.equal(results.callerName, 'bar');
+
+    const callStack4 =
+      '    at repl:1:14\n    at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n    at REPLServer.defaultEval (repl.js:240:29)\n    at bound (domain.js:301:14)\n    at REPLServer.runBound [as eval] (domain.js:314:12)\n    at REPLServer.onLine (repl.js:468:10)\n    at emitOne (events.js:121:20)\n    at REPLServer.emit (events.js:211:7)\n    at REPLServer.Interface._onLine (readline.js:280:10)\n    at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
+    results = logger.parseCallStack({ stack: callStack4 }, 0);
+    t.ok(results);
+    t.equal(results.className, '');
+    t.equal(results.functionName, '');
+    t.equal(results.functionAlias, '');
+    t.equal(results.callerName, '');
+
+    const callStack5 =
+      '    at Foo.bar (repl:1:14)\n    at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n    at REPLServer.defaultEval (repl.js:240:29)\n    at bound (domain.js:301:14)\n    at REPLServer.runBound [as eval] (domain.js:314:12)\n    at REPLServer.onLine (repl.js:468:10)\n    at emitOne (events.js:121:20)\n    at REPLServer.emit (events.js:211:7)\n    at REPLServer.Interface._onLine (readline.js:280:10)\n    at REPLServer.Interface._line (readline.js:629:8)'; // eslint-disable-line max-len
+    results = logger.parseCallStack({ stack: callStack5 }, 0);
+    t.ok(results);
+    t.equal(results.className, 'Foo');
+    t.equal(results.functionName, 'bar');
+    t.equal(results.functionAlias, '');
+    t.equal(results.callerName, 'Foo.bar');
+
+    t.end();
+  });
+
+  batch.test('should correctly change the parseCallStack function', (t) => {
+    const logger = new Logger('stack');
+    logger.level = 'debug';
+    logger.useCallStack = true;
+
+    logger.info('test defaultParseCallStack');
+    const initialEvent = events.shift();
+    const parseFunction = function () {
+      return {
+        functionName: 'test function name',
+        fileName: 'test file name',
+        lineNumber: 15,
+        columnNumber: 25,
+        callStack: 'test callstack',
+      };
+    };
+    logger.setParseCallStackFunction(parseFunction);
+
+    t.equal(logger.parseCallStack, parseFunction);
+
+    logger.info('test parseCallStack');
+    t.equal(events[0].functionName, 'test function name');
+    t.equal(events[0].fileName, 'test file name');
+    t.equal(events[0].lineNumber, 15);
+    t.equal(events[0].columnNumber, 25);
+    t.equal(events[0].callStack, 'test callstack');
+
+    events.shift();
+
+    logger.setParseCallStackFunction(undefined);
+    logger.info('test restoredDefaultParseCallStack');
+
+    t.equal(events[0].functionName, initialEvent.functionName);
+    t.equal(events[0].fileName, initialEvent.fileName);
+    t.equal(events[0].columnNumber, initialEvent.columnNumber);
+
+    t.throws(
+      () => logger.setParseCallStackFunction('not a function'),
+      'Invalid type passed to setParseCallStackFunction'
+    );
+
+    t.end();
+  });
+
+  batch.test('should correctly change the stack levels to skip', (t) => {
+    const logger = new Logger('stack');
+    logger.level = 'debug';
+    logger.useCallStack = true;
+
+    t.equal(
+      logger.callStackLinesToSkip,
+      0,
+      'initial callStackLinesToSkip changed'
+    );
+
+    logger.info('get initial stack');
+    const initialEvent = events.shift();
+    const newStackSkip = 1;
+    logger.callStackLinesToSkip = newStackSkip;
+    t.equal(logger.callStackLinesToSkip, newStackSkip);
+    logger.info('test stack skip');
+    const event = events.shift();
+    t.not(event.functionName, initialEvent.functionName);
+    t.not(event.fileName, initialEvent.fileName);
+    t.equal(
+      event.callStack,
+      initialEvent.callStack.split('\n').slice(newStackSkip).join('\n')
+    );
+
+    t.throws(() => {
+      logger.callStackLinesToSkip = -1;
+    });
+    t.throws(() => {
+      logger.callStackLinesToSkip = '2';
     });
     t.end();
   });
 
+  batch.test('should utilize the first Error data value', (t) => {
+    const logger = new Logger('stack');
+    logger.level = 'debug';
+    logger.useCallStack = true;
+
+    const error = new Error();
+
+    logger.info(error);
+    const event = events.shift();
+    t.equal(event.error, error);
+
+    logger.info(error);
+
+    t.match(event, events.shift());
+
+    logger.callStackLinesToSkip = 1;
+    logger.info(error);
+    const event2 = events.shift();
+
+    t.equal(event2.callStack, event.callStack.split('\n').slice(1).join('\n'));
+    logger.callStackLinesToSkip = 0;
+    logger.info('hi', error);
+    const event3 = events.shift();
+    t.equal(event3.callStack, event.callStack);
+    t.equal(event3.error, error);
+
+    logger.info('hi', error, new Error());
+    const event4 = events.shift();
+    t.equal(event4.callStack, event.callStack);
+    t.equal(event4.error, error);
+
+    t.end();
+  });
+
+  batch.test('creating/cloning of category', (t) => {
+    const defaultLogger = new Logger('default');
+    defaultLogger.level = 'trace';
+    defaultLogger.useCallStack = true;
+
+    t.test(
+      'category should be cloned from parent/default if does not exist',
+      (assert) => {
+        const originalLength = categories.size;
+
+        const logger = new Logger('cheese1');
+        assert.equal(
+          categories.size,
+          originalLength + 1,
+          'category should be cloned'
+        );
+        assert.equal(
+          logger.level,
+          levels.TRACE,
+          'should inherit level=TRACE from default-category'
+        );
+        assert.equal(
+          logger.useCallStack,
+          true,
+          'should inherit useCallStack=true from default-category'
+        );
+        assert.end();
+      }
+    );
+
+    t.test(
+      'changing level should not impact default-category or useCallStack',
+      (assert) => {
+        const logger = new Logger('cheese2');
+        logger.level = 'debug';
+        assert.equal(
+          logger.level,
+          levels.DEBUG,
+          'should be changed to level=DEBUG'
+        );
+        assert.equal(
+          defaultLogger.level,
+          levels.TRACE,
+          'default-category should remain as level=TRACE'
+        );
+        assert.equal(
+          logger.useCallStack,
+          true,
+          'should remain as useCallStack=true'
+        );
+        assert.equal(
+          defaultLogger.useCallStack,
+          true,
+          'default-category should remain as useCallStack=true'
+        );
+        assert.end();
+      }
+    );
+
+    t.test(
+      'changing useCallStack should not impact default-category or level',
+      (assert) => {
+        const logger = new Logger('cheese3');
+        logger.useCallStack = false;
+        assert.equal(
+          logger.useCallStack,
+          false,
+          'should be changed to useCallStack=false'
+        );
+        assert.equal(
+          defaultLogger.useCallStack,
+          true,
+          'default-category should remain as useCallStack=true'
+        );
+        assert.equal(
+          logger.level,
+          levels.TRACE,
+          'should remain as level=TRACE'
+        );
+        assert.equal(
+          defaultLogger.level,
+          levels.TRACE,
+          'default-category should remain as level=TRACE'
+        );
+        assert.end();
+      }
+    );
+
+    t.end();
+  });
+
   batch.end();
 });
diff --git a/test/tap/logging-test.js b/test/tap/logging-test.js
index 42321aaf..1248b489 100644
--- a/test/tap/logging-test.js
+++ b/test/tap/logging-test.js
@@ -1,83 +1,71 @@
-'use strict';
+const { test } = require('tap');
+const sandbox = require('@log4js-node/sandboxed-module');
+const util = require('util');
+const recording = require('../../lib/appenders/recording');
 
-const test = require('tap').test;
-const sandbox = require('sandboxed-module');
-
-function setupConsoleTest() {
-  const fakeConsole = {};
-  const logEvents = [];
-
-  ['trace', 'debug', 'log', 'info', 'warn', 'error'].forEach((fn) => {
-    fakeConsole[fn] = function () {
-      throw new Error('this should not be called.');
-    };
+test('log4js', (batch) => {
+  batch.test('should throw error for invalid callback to shutdown', (t) => {
+    const log4js = require('../../lib/log4js');
+    t.throws(() => log4js.shutdown([]));
+    t.end();
   });
 
-  const log4js = sandbox.require(
-    '../../lib/log4js',
-    {
-      globals: {
-        console: fakeConsole
-      }
-    }
-  );
-
-  log4js.clearAppenders();
-  log4js.addAppender((evt) => {
-    logEvents.push(evt);
-  });
+  batch.test(
+    'shutdown should return appenders and categories back to initial state',
+    (t) => {
+      const stringifyMap = (map) => JSON.stringify(Array.from(map));
+      const deepCopyMap = (map) => new Map(JSON.parse(stringifyMap(map)));
 
-  return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
-}
+      const log4js = require('../../lib/log4js');
 
-test('log4js', (batch) => {
-  batch.test('getBufferedLogger', (t) => {
-    const log4js = require('../../lib/log4js');
-    log4js.clearAppenders();
-    const logger = log4js.getBufferedLogger('tests');
+      const appenders = require('../../lib/appenders');
+      const categories = require('../../lib/categories');
+      const initialAppenders = deepCopyMap(appenders);
+      const initialCategories = deepCopyMap(categories);
 
-    t.test('should take a category and return a logger', (assert) => {
-      assert.equal(logger.target.category, 'tests');
-      assert.type(logger.flush, 'function');
-      assert.type(logger.trace, 'function');
-      assert.type(logger.debug, 'function');
-      assert.type(logger.info, 'function');
-      assert.type(logger.warn, 'function');
-      assert.type(logger.error, 'function');
-      assert.type(logger.fatal, 'function');
-      assert.end();
-    });
-
-    t.test('cache events', (assert) => {
-      const events = [];
-      logger.target.setLevel('TRACE');
-      logger.target.addListener('log', (logEvent) => {
-        events.push(logEvent);
+      log4js.configure({
+        appenders: { recorder: { type: 'recording' } },
+        categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
       });
-      logger.debug('Debug event');
-      logger.trace('Trace event 1');
-      logger.trace('Trace event 2');
-      logger.warn('Warning event');
-      logger.error('Aargh!', new Error('Pants are on fire!'));
-      logger.error(
-        'Simulated CouchDB problem',
-        { err: 127, cause: 'incendiary underwear' }
-      );
 
-      assert.equal(events.length, 0, 'should not emit log events if .flush() is not called.');
-      logger.flush();
-      assert.equal(events.length, 6, 'should emit log events when .flush() is called.');
-      assert.end();
-    });
-    t.end();
-  });
+      const configuredAppenders = deepCopyMap(appenders);
+      const configuredCategories = deepCopyMap(categories);
+      t.not(
+        stringifyMap(configuredAppenders),
+        stringifyMap(initialAppenders),
+        'appenders should be different from initial state'
+      );
+      t.not(
+        stringifyMap(configuredCategories),
+        stringifyMap(initialCategories),
+        'categories should be different from initial state'
+      );
 
+      log4js.shutdown(() => {
+        const finalAppenders = deepCopyMap(appenders);
+        const finalCategories = deepCopyMap(categories);
+        t.equal(
+          stringifyMap(finalAppenders),
+          stringifyMap(initialAppenders),
+          'appenders should revert back to initial state'
+        );
+        t.equal(
+          stringifyMap(finalCategories),
+          stringifyMap(initialCategories),
+          'categories should revert back to initial state'
+        );
+        t.end();
+      });
+    }
+  );
 
   batch.test('getLogger', (t) => {
     const log4js = require('../../lib/log4js');
-    log4js.clearAppenders();
+    log4js.configure({
+      appenders: { recorder: { type: 'recording' } },
+      categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
+    });
     const logger = log4js.getLogger('tests');
-    logger.setLevel('DEBUG');
 
     t.test('should take a category and return a logger', (assert) => {
       assert.equal(logger.category, 'tests');
@@ -91,16 +79,19 @@ test('log4js', (batch) => {
     });
 
     t.test('log events', (assert) => {
-      const events = [];
-      logger.addListener('log', (logEvent) => {
-        events.push(logEvent);
-      });
+      recording.reset();
+
       logger.debug('Debug event');
       logger.trace('Trace event 1');
       logger.trace('Trace event 2');
       logger.warn('Warning event');
       logger.error('Aargh!', new Error('Pants are on fire!'));
-      logger.error('Simulated CouchDB problem', { err: 127, cause: 'incendiary underwear' });
+      logger.error('Simulated CouchDB problem', {
+        err: 127,
+        cause: 'incendiary underwear',
+      });
+
+      const events = recording.replay();
 
       assert.equal(events[0].level.toString(), 'DEBUG');
       assert.equal(events[0].data[0], 'Debug event');
@@ -109,7 +100,11 @@ test('log4js', (batch) => {
       assert.equal(events.length, 4, 'should not emit events of a lower level');
       assert.equal(events[1].level.toString(), 'WARN');
 
-      assert.type(events[2].data[1], 'Error', 'should include the error if passed in');
+      assert.type(
+        events[2].data[1],
+        'Error',
+        'should include the error if passed in'
+      );
       assert.equal(events[2].data[1].message, 'Pants are on fire!');
       assert.end();
     });
@@ -119,490 +114,166 @@ test('log4js', (batch) => {
 
   batch.test('when shutdown is called', (t) => {
     const events = {
-      appenderShutdownCalled: false
+      shutdownCalled: [],
     };
 
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          './appenders/file': {
-            name: 'file',
-            appender: function () {
-            },
-            configure: function () {
-              return function () {
-              };
-            },
-            shutdown: function (cb) {
-              events.appenderShutdownCalled = true;
-              cb();
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        './appenders/file': {
+          name: 'file',
+          configure() {
+            function thing(evt) {
+              events.event = evt;
+              return null;
             }
-          }
-        }
-      }
-    );
-
-    const config = {
-      appenders: [
-        {
-          type: 'file',
-          filename: 'cheesy-wotsits.log',
-          maxLogSize: 1024,
-          backups: 3
-        }
-      ]
-    };
 
-    log4js.configure(config);
-    log4js.shutdown(() => {
-      // Re-enable log writing so other tests that use logger are not
-      // affected.
-      require('../../lib/logger').enableAllLogWrites();
-      t.ok(events.appenderShutdownCalled, 'should invoke appender shutdowns');
-      t.end();
+            thing.shutdown = function (cb) {
+              events.shutdownCalled.push(true);
+              cb();
+            };
+            return thing;
+          },
+        },
+      },
     });
-  });
-
-  // 'invalid configuration': {
-  //   'should throw an exception': function () {
-  //     assert.throws(() => {
-  //       // todo: here is weird, it's not ideal test
-  //       require('../../lib/log4js').configure({ type: 'invalid' });
-  //     });
-  //   }
-  // },
-
-  batch.test('configuration when passed as object', (t) => {
-    let appenderConfig;
-
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          './appenders/file': {
-            name: 'file',
-            appender: function () {
-            },
-            configure: function (configuration) {
-              appenderConfig = configuration;
-              return function () {
-              };
-            }
-          }
-        }
-      }
-    );
 
     const config = {
-      appenders: [
-        {
+      appenders: {
+        file: {
           type: 'file',
           filename: 'cheesy-wotsits.log',
           maxLogSize: 1024,
-          backups: 3
-        }
-      ]
-    };
-
-    log4js.configure(config);
-    t.equal(appenderConfig.filename, 'cheesy-wotsits.log', 'should be passed to appender config');
-    t.end();
-  });
-
-  batch.test('configuration that causes an error', (t) => {
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          './appenders/file': {
-            name: 'file',
-            appender: function () {
-            },
-            configure: function () {
-              throw new Error('oh noes');
-            }
-          }
-        }
-      }
-    );
-
-    const config = {
-      appenders: [
-        {
+          backups: 3,
+        },
+        alsoFile: {
           type: 'file',
-          filename: 'cheesy-wotsits.log',
-          maxLogSize: 1024,
-          backups: 3
-        }
-      ]
+        },
+      },
+      categories: {
+        default: { appenders: ['file', 'alsoFile'], level: 'DEBUG' },
+      },
     };
 
-    try {
-      log4js.configure(config);
-    } catch (e) {
-      t.ok(e.message.includes('log4js configuration problem for'));
+    log4js.configure(config);
+    const logger = log4js.getLogger();
+    log4js.shutdown(() => {
+      t.equal(
+        events.shutdownCalled.length,
+        2,
+        'should invoke appender shutdowns'
+      );
+      logger.info('this should not go to the appenders');
+      logger.log('info', 'this should not go to the appenders');
+      logger._log(require('../../lib/levels').INFO, [
+        'this should not go to the appenders',
+      ]);
+      t.notOk(events.event);
       t.end();
-    }
+    });
   });
 
   batch.test('configuration when passed as filename', (t) => {
     let appenderConfig;
     let configFilename;
 
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          fs: {
-            statSync: function () {
-              return { mtime: Date.now() };
-            },
-            readFileSync: function (filename) {
-              configFilename = filename;
-              return JSON.stringify({
-                appenders: [
-                  {
-                    type: 'file',
-                    filename: 'whatever.log'
-                  }
-                ]
-              });
-            },
-            readdirSync: function () {
-              return ['file'];
-            }
+    const log4js = sandbox.require('../../lib/log4js', {
+      ignoreMissing: true,
+      requires: {
+        fs: {
+          statSync() {
+            return { mtime: Date.now() };
           },
-          './appenders/file': {
-            name: 'file',
-            appender: function () {
-            },
-            configure: function (configuration) {
-              appenderConfig = configuration;
-              return function () {
-              };
-            }
-          }
-        }
-      }
-    );
+          readFileSync(filename) {
+            configFilename = filename;
+            return JSON.stringify({
+              appenders: {
+                file: {
+                  type: 'file',
+                  filename: 'whatever.log',
+                },
+              },
+              categories: { default: { appenders: ['file'], level: 'DEBUG' } },
+            });
+          },
+          readdirSync() {
+            return ['file'];
+          },
+        },
+        './file': {
+          configure(configuration) {
+            appenderConfig = configuration;
+            return function () {};
+          },
+        },
+      },
+    });
 
     log4js.configure('/path/to/cheese.json');
-    t.equal(configFilename, '/path/to/cheese.json', 'should read the config from a file');
-    t.equal(appenderConfig.filename, 'whatever.log', 'should pass config to appender');
+    t.equal(
+      configFilename,
+      '/path/to/cheese.json',
+      'should read the config from a file'
+    );
+    t.equal(
+      appenderConfig.filename,
+      'whatever.log',
+      'should pass config to appender'
+    );
     t.end();
   });
 
-  batch.test('with no appenders defined', (t) => {
+  batch.test('with configure not called', (t) => {
     const fakeStdoutAppender = {
-      name: 'stdout',
-      appender: function () {
+      configure() {
+        this.required = true;
         return function (evt) {
-          t.equal(evt.data[0], 'This is a test', 'should default to the stdout appender');
-          t.end();
+          fakeStdoutAppender.evt = evt;
         };
       },
-      configure: function () {
-        return fakeStdoutAppender.appender();
-      }
     };
 
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          './appenders/stdout': fakeStdoutAppender
-        }
-      }
-    );
-
-    const logger = log4js.getLogger('some-logger');
-    logger.debug('This is a test');
-    // assert is back at the top, in the fake stdout appender
-  });
-
-  batch.test('addAppender', (t) => {
-    const log4js = require('../../lib/log4js');
-    log4js.clearAppenders();
-
-    t.test('without a category', (assert) => {
-      let appenderEvent;
-
-      const appender = function (evt) {
-        appenderEvent = evt;
-      };
-
-      const logger = log4js.getLogger('tests');
-
-      log4js.addAppender(appender);
-      logger.debug('This is a test');
-
-      assert.equal(
-        appenderEvent.data[0],
-        'This is a test',
-        'should register the function as a listener for all loggers'
-      );
-      assert.equal(appenderEvent.categoryName, 'tests');
-      assert.equal(appenderEvent.level.toString(), 'DEBUG');
-      assert.end();
-    });
-
-    t.test('if an appender for a category is defined', (assert) => {
-      let otherEvent;
-      let appenderEvent;
-
-      log4js.addAppender((evt) => {
-        appenderEvent = evt;
-      });
-      log4js.addAppender((evt) => {
-        otherEvent = evt;
-      }, 'cheese');
-
-      const cheeseLogger = log4js.getLogger('cheese');
-      cheeseLogger.debug('This is a test');
-
-      assert.same(appenderEvent, otherEvent, 'should register for that category');
-      assert.equal(otherEvent.data[0], 'This is a test');
-      assert.equal(otherEvent.categoryName, 'cheese');
-
-      otherEvent = undefined;
-      appenderEvent = undefined;
-      log4js.getLogger('pants').debug('this should not be propagated to otherEvent');
-      assert.notOk(otherEvent);
-      assert.equal(appenderEvent.data[0], 'this should not be propagated to otherEvent');
-      assert.end();
-    });
-
-    t.test('with a category', (assert) => {
-      let appenderEvent;
-
-      const appender = function (evt) {
-        appenderEvent = evt;
-      };
-
-      const logger = log4js.getLogger('tests');
-
-      log4js.addAppender(appender, 'tests');
-      logger.debug('this is a category test');
-      assert.equal(
-        appenderEvent.data[0],
-        'this is a category test',
-        'should only register the function as a listener for that category'
-      );
-
-      appenderEvent = undefined;
-      log4js.getLogger('some other category').debug('Cheese');
-      assert.notOk(appenderEvent);
-      assert.end();
-    });
-
-    t.test('with multiple categories', (assert) => {
-      let appenderEvent;
-
-      const appender = function (evt) {
-        appenderEvent = evt;
-      };
-
-      const logger = log4js.getLogger('tests');
-
-      log4js.addAppender(appender, 'tests', 'biscuits');
-
-      logger.debug('this is a test');
-      assert.equal(
-        appenderEvent.data[0],
-        'this is a test',
-        'should register the function as a listener for all the categories'
-      );
-
-      appenderEvent = undefined;
-      const otherLogger = log4js.getLogger('biscuits');
-      otherLogger.debug('mmm... garibaldis');
-      assert.equal(appenderEvent.data[0], 'mmm... garibaldis');
-
-      appenderEvent = undefined;
-
-      log4js.getLogger('something else').debug('pants');
-      assert.notOk(appenderEvent);
-      assert.end();
-    });
-
-    t.test('should register the function when the list of categories is an array', (assert) => {
-      let appenderEvent;
-
-      const appender = function (evt) {
-        appenderEvent = evt;
-      };
-
-      log4js.addAppender(appender, ['tests', 'pants']);
-
-      log4js.getLogger('tests').debug('this is a test');
-      assert.equal(appenderEvent.data[0], 'this is a test');
-
-      appenderEvent = undefined;
-
-      log4js.getLogger('pants').debug('big pants');
-      assert.equal(appenderEvent.data[0], 'big pants');
-
-      appenderEvent = undefined;
-
-      log4js.getLogger('something else').debug('pants');
-      assert.notOk(appenderEvent);
-      assert.end();
-    });
-
-    t.end();
-  });
-
-  batch.test('default setup', (t) => {
-    const appenderEvents = [];
-
-    const fakeStdout = {
-      name: 'stdout',
-      appender: function () {
-        return function (evt) {
-          appenderEvents.push(evt);
-        };
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        './appenders/stdout': fakeStdoutAppender,
       },
-      configure: function () {
-        return fakeStdout.appender();
-      }
-    };
-
-    const globalConsole = {
-      log: function () {
-      }
-    };
-
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          './appenders/stdout': fakeStdout
-        },
-        globals: {
-          console: globalConsole
-        }
-      }
-    );
-
-    const logger = log4js.getLogger('a-test');
-
-    logger.debug('this is a test');
-    globalConsole.log('this should not be logged');
-
-    t.equal(appenderEvents[0].data[0], 'this is a test', 'should configure a stdout appender');
-    t.equal(appenderEvents.length, 1, 'should not replace console.log with log4js version');
-    t.end();
-  });
-
-  batch.test('console', (t) => {
-    const setup = setupConsoleTest();
-
-    t.test('when replaceConsole called', (assert) => {
-      setup.log4js.replaceConsole();
-
-      setup.fakeConsole.log('Some debug message someone put in a module');
-      setup.fakeConsole.debug('Some debug');
-      setup.fakeConsole.error('An error');
-      setup.fakeConsole.info('some info');
-      setup.fakeConsole.warn('a warning');
-
-      setup.fakeConsole.log('cheese (%s) and biscuits (%s)', 'gouda', 'garibaldis');
-      setup.fakeConsole.log({ lumpy: 'tapioca' });
-      setup.fakeConsole.log('count %d', 123);
-      setup.fakeConsole.log('stringify %j', { lumpy: 'tapioca' });
-
-      const logEvents = setup.logEvents;
-      assert.equal(logEvents.length, 9);
-      assert.equal(logEvents[0].data[0], 'Some debug message someone put in a module');
-      assert.equal(logEvents[0].level.toString(), 'INFO');
-      assert.equal(logEvents[1].data[0], 'Some debug');
-      assert.equal(logEvents[1].level.toString(), 'DEBUG');
-      assert.equal(logEvents[2].data[0], 'An error');
-      assert.equal(logEvents[2].level.toString(), 'ERROR');
-      assert.equal(logEvents[3].data[0], 'some info');
-      assert.equal(logEvents[3].level.toString(), 'INFO');
-      assert.equal(logEvents[4].data[0], 'a warning');
-      assert.equal(logEvents[4].level.toString(), 'WARN');
-      assert.equal(logEvents[5].data[0], 'cheese (%s) and biscuits (%s)');
-      assert.equal(logEvents[5].data[1], 'gouda');
-      assert.equal(logEvents[5].data[2], 'garibaldis');
-      assert.end();
     });
 
-    t.test('when turned off', (assert) => {
-      setup.log4js.restoreConsole();
-      try {
-        setup.fakeConsole.log('This should cause the error described in the setup');
-      } catch (e) {
-        assert.type(e, 'Error', 'should call the original console methods');
-        assert.equal(e.message, 'this should not be called.');
-        assert.end();
-      }
-    });
+    const logger = log4js.getLogger('some-logger');
+    logger.debug('This is a test');
+    t.ok(fakeStdoutAppender.required, 'stdout should be required');
+    t.notOk(fakeStdoutAppender.evt, 'should not log anything');
     t.end();
   });
 
-  batch.test('console configuration', (t) => {
-    const setup = setupConsoleTest();
-
-    t.test('when disabled', (assert) => {
-      setup.log4js.replaceConsole();
-      setup.log4js.configure({ replaceConsole: false });
-      try {
-        setup.fakeConsole.log('This should cause the error described in the setup');
-      } catch (e) {
-        assert.type(e, 'Error');
-        assert.equal(e.message, 'this should not be called.');
-        assert.end();
-      }
-    });
-
-    t.test('when enabled', (assert) => {
-      setup.log4js.restoreConsole();
-      setup.log4js.configure({ replaceConsole: true });
-      // log4js.configure clears all appenders
-      setup.log4js.addAppender((evt) => {
-        setup.logEvents.push(evt);
-      });
-
-      setup.fakeConsole.debug('Some debug');
-
-      const logEvents = setup.logEvents;
-      assert.equal(logEvents.length, 1);
-      assert.equal(logEvents[0].level.toString(), 'DEBUG');
-      assert.equal(logEvents[0].data[0], 'Some debug');
-      assert.end();
+  batch.test('with configure called with empty values', (t) => {
+    [null, undefined, '', ' ', []].forEach((config) => {
+      const log4js = require('../../lib/log4js');
+      const expectedError = `Problem reading config from file "${util.inspect(
+        config
+      )}". Error was ENOENT: no such file or directory`;
+      t.throws(() => log4js.configure(config), expectedError);
     });
 
     t.end();
   });
 
   batch.test('configuration persistence', (t) => {
-    let logEvent;
     const firstLog4js = require('../../lib/log4js');
-
-    firstLog4js.clearAppenders();
-    firstLog4js.addAppender((evt) => {
-      logEvent = evt;
+    firstLog4js.configure({
+      appenders: { recorder: { type: 'recording' } },
+      categories: { default: { appenders: ['recorder'], level: 'DEBUG' } },
     });
+    recording.reset();
 
     const secondLog4js = require('../../lib/log4js');
-    secondLog4js.getLogger().info('This should go to the appender defined in firstLog4js');
+    secondLog4js
+      .getLogger()
+      .info('This should go to the appender defined in firstLog4js');
 
-    t.equal(logEvent.data[0], 'This should go to the appender defined in firstLog4js');
-    t.end();
-  });
-
-  batch.test('getDefaultLogger', (t) => {
-    const logger = require('../../lib/log4js').getDefaultLogger();
-
-    t.test('should return a logger', (assert) => {
-      assert.ok(logger.info);
-      assert.ok(logger.debug);
-      assert.ok(logger.error);
-      assert.end();
-    });
+    t.equal(
+      recording.replay()[0].data[0],
+      'This should go to the appender defined in firstLog4js'
+    );
     t.end();
   });
 
diff --git a/test/tap/logglyAppender-test.js b/test/tap/logglyAppender-test.js
deleted file mode 100644
index 8fb25ad2..00000000
--- a/test/tap/logglyAppender-test.js
+++ /dev/null
@@ -1,115 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const log4js = require('../../lib/log4js');
-const sandbox = require('sandboxed-module');
-
-function setupLogging(category, options) {
-  const msgs = [];
-
-  const fakeLoggly = {
-    createClient: function (opts) {
-      return {
-        config: opts,
-        log: function (msg, tags, cb) {
-          msgs.push({
-            msg: msg,
-            tags: tags,
-            cb: cb
-          });
-        }
-      };
-    }
-  };
-
-  const fakeLayouts = {
-    layout: function (type, config) {
-      this.type = type;
-      this.config = config;
-      return log4js.layouts.messagePassThroughLayout;
-    },
-    basicLayout: log4js.layouts.basicLayout,
-    messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
-  };
-
-  const fakeConsole = {
-    errors: [],
-    error: function (msg, value) {
-      this.errors.push({ msg: msg, value: value });
-    }
-  };
-
-  const logglyModule = sandbox.require('../../lib/appenders/loggly', {
-    requires: {
-      loggly: fakeLoggly,
-      '../layouts': fakeLayouts
-    },
-    globals: {
-      console: fakeConsole
-    }
-  });
-
-  log4js.addAppender(
-    logglyModule.configure(options),
-    logglyModule.shutdown,
-    category);
-
-  return {
-    logger: log4js.getLogger(category),
-    loggly: fakeLoggly,
-    layouts: fakeLayouts,
-    console: fakeConsole,
-    results: msgs
-  };
-}
-
-log4js.clearAppenders();
-
-function setupTaggedLogging() {
-  return setupLogging('loggly', {
-    token: 'your-really-long-input-token',
-    subdomain: 'your-subdomain',
-    tags: ['loggly-tag1', 'loggly-tag2', 'loggly-tagn']
-  });
-}
-
-test('log4js logglyAppender', (batch) => {
-  batch.test('with minimal config', (t) => {
-    const setup = setupTaggedLogging();
-    setup.logger.log('trace', 'Log event #1', 'Log 2', { tags: ['tag1', 'tag2'] });
-
-    t.equal(setup.results.length, 1, 'has a results.length of 1');
-    t.equal(setup.results[0].msg.msg, 'Log event #1 Log 2', 'has a result msg with both args concatenated');
-    t.same(setup.results[0].tags, ['tag1', 'tag2'], 'has the correct result tags');
-    t.end();
-  });
-
-  batch.test('config with object with tags and other keys', (t) => {
-    const setup = setupTaggedLogging();
-    // ignore this tags object b/c there are 2 keys
-    setup.logger.log('trace', 'Log event #1', { other: 'other', tags: ['tag1', 'tag2'] });
-
-    t.equal(setup.results.length, 1, 'has a results.length of 1');
-    t.equal(
-      setup.results[0].msg.msg,
-      'Log event #1 { other: \'other\', tags: [ \'tag1\', \'tag2\' ] }',
-      'has a result msg with the args concatenated'
-    );
-    t.same(setup.results[0].tags, [], 'has a result tags with the arg that contains no tags');
-    t.end();
-  });
-
-  batch.test('with shutdown callback', (t) => {
-    const setup = setupTaggedLogging();
-    setup.logger.log('trace', 'Log event #1', 'Log 2', {
-      tags: ['tag1', 'tag2']
-    });
-
-    log4js.shutdown(() => { t.end(); });
-
-    // shutdown will until after the last message has been sent to loggly
-    setup.results[0].cb();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/logstashUDP-test.js b/test/tap/logstashUDP-test.js
deleted file mode 100644
index 5bacc4e5..00000000
--- a/test/tap/logstashUDP-test.js
+++ /dev/null
@@ -1,137 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const log4js = require('../../lib/log4js');
-const sandbox = require('sandboxed-module');
-
-function setupLogging(category, options) {
-  const udpSent = {};
-
-  const fakeDgram = {
-    createSocket: function () {
-      return {
-        send: function (buffer, offset, length, port, host, callback) {
-          udpSent.date = new Date();
-          udpSent.host = host;
-          udpSent.port = port;
-          udpSent.length = length;
-          udpSent.offset = 0;
-          udpSent.buffer = buffer;
-          callback(undefined, length);
-        }
-      };
-    }
-  };
-
-  const logstashModule = sandbox.require('../../lib/appenders/logstashUDP', {
-    singleOnly: true,
-    requires: {
-      dgram: fakeDgram
-    }
-  });
-  log4js.clearAppenders();
-  log4js.addAppender(logstashModule.configure(options), category);
-
-  return {
-    logger: log4js.getLogger(category),
-    results: udpSent
-  };
-}
-
-test('logstashUDP appender', (batch) => {
-  batch.test('a UDP packet should be sent', (t) => {
-    const setup = setupLogging('myCategory', {
-      host: '127.0.0.1',
-      port: 10001,
-      type: 'logstashUDP',
-      logType: 'myAppType',
-      category: 'myLogger',
-      fields: {
-        field1: 'value1',
-        field2: 'value2'
-      },
-      layout: {
-        type: 'pattern',
-        pattern: '%m'
-      }
-    });
-    setup.logger.log('trace', 'Log event #1');
-
-    t.equal(setup.results.host, '127.0.0.1');
-    t.equal(setup.results.port, 10001);
-    t.equal(setup.results.offset, 0);
-
-    const json = JSON.parse(setup.results.buffer.toString());
-    t.equal(json.type, 'myAppType');
-    const fields = {
-      field1: 'value1',
-      field2: 'value2',
-      level: 'TRACE',
-      category: 'myCategory'
-    };
-
-    const keys = Object.keys(fields);
-    for (let i = 0, length = keys.length; i < length; i += 1) {
-        t.equal(json[keys[i]], fields[keys[i]]);
-    }
-
-    t.equal(JSON.stringify(json.fields), JSON.stringify(fields));
-    t.equal(json.message, 'Log event #1');
-    // Assert timestamp, up to hours resolution.
-    const date = new Date(json['@timestamp']);
-    t.equal(
-      date.toISOString().substring(0, 14),
-      setup.results.date.toISOString().substring(0, 14)
-    );
-
-    t.end();
-  });
-
-  batch.test('default options', (t) => {
-    const setup = setupLogging('myLogger', {
-      host: '127.0.0.1',
-      port: 10001,
-      type: 'logstashUDP',
-      category: 'myLogger',
-      layout: {
-        type: 'pattern',
-        pattern: '%m'
-      }
-    });
-    setup.logger.log('trace', 'Log event #1');
-
-    const json = JSON.parse(setup.results.buffer.toString());
-    t.equal(json.type, 'myLogger');
-    t.equal(
-      JSON.stringify(json.fields),
-      JSON.stringify({ level: 'TRACE', category: 'myLogger' })
-    );
-
-    t.end();
-  });
-
-  batch.test('extra fields should be added to the fields structure', (t) => {
-    const setup = setupLogging('myLogger', {
-      host: '127.0.0.1',
-      port: 10001,
-      type: 'logstashUDP',
-      category: 'myLogger',
-      layout: {
-        type: 'dummy'
-      }
-    });
-    setup.logger.log('trace', 'Log event #1', { extra1: 'value1', extra2: 'value2' });
-
-    const json = JSON.parse(setup.results.buffer.toString());
-    const fields = {
-      extra1: 'value1',
-      extra2: 'value2',
-      level: 'TRACE',
-      category: 'myLogger'
-    };
-    t.equal(JSON.stringify(json.fields), JSON.stringify(fields));
-    t.end();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/mailgunAppender-test.js b/test/tap/mailgunAppender-test.js
deleted file mode 100644
index 3408a385..00000000
--- a/test/tap/mailgunAppender-test.js
+++ /dev/null
@@ -1,182 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const log4js = require('../../lib/log4js');
-const sandbox = require('sandboxed-module');
-
-function setupLogging(category, options) {
-  const msgs = [];
-
-  const mailgunCredentials = {
-    apiKey: options.apikey,
-    domain: options.domain
-  };
-
-  const fakeMailgun = function () {
-    return {
-      messages: function () {
-        return {
-          config: options,
-          send: function (data, callback) {
-            msgs.push(data);
-            callback(false, { status: 'OK' });
-          }
-        };
-      }
-    };
-  };
-
-  const fakeLayouts = {
-    layout: function (type, config) {
-      this.type = type;
-      this.config = config;
-      return log4js.layouts.messagePassThroughLayout;
-    },
-    basicLayout: log4js.layouts.basicLayout,
-    messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
-  };
-
-  const fakeConsole = {
-    errors: [],
-    logs: [],
-    error: function (msg, value) {
-      this.errors.push({ msg: msg, value: value });
-    },
-    log: function (msg, value) {
-      this.logs.push({ msg: msg, value: value });
-    }
-  };
-
-
-  const mailgunModule = sandbox.require('../../lib/appenders/mailgun', {
-    requires: {
-      'mailgun-js': fakeMailgun,
-      '../layouts': fakeLayouts
-    },
-    globals: {
-      console: fakeConsole
-    }
-  });
-
-
-  log4js.addAppender(mailgunModule.configure(options), category);
-
-  return {
-    logger: log4js.getLogger(category),
-    mailer: fakeMailgun,
-    layouts: fakeLayouts,
-    console: fakeConsole,
-    mails: msgs,
-    credentials: mailgunCredentials
-  };
-}
-
-function checkMessages(assert, result) {
-  for (let i = 0; i < result.mails.length; ++i) {
-    assert.equal(result.mails[i].from, 'sender@domain.com');
-    assert.equal(result.mails[i].to, 'recepient@domain.com');
-    assert.equal(result.mails[i].subject, 'This is subject');
-    assert.ok(new RegExp(`.+Log event #${i + 1}`).test(result.mails[i].text));
-  }
-}
-
-log4js.clearAppenders();
-
-test('log4js mailgunAppender', (batch) => {
-  batch.test('mailgun setup', (t) => {
-    const result = setupLogging('mailgun setup', {
-      apikey: 'APIKEY',
-      domain: 'DOMAIN',
-      from: 'sender@domain.com',
-      to: 'recepient@domain.com',
-      subject: 'This is subject'
-    });
-
-    t.test('mailgun credentials should match', (assert) => {
-      assert.equal(result.credentials.apiKey, 'APIKEY');
-      assert.equal(result.credentials.domain, 'DOMAIN');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('basic usage', (t) => {
-    const result = setupLogging('basic usage', {
-      apikey: 'APIKEY',
-      domain: 'DOMAIN',
-      from: 'sender@domain.com',
-      to: 'recepient@domain.com',
-      subject: 'This is subject'
-    });
-
-    result.logger.info('Log event #1');
-
-    t.equal(result.mails.length, 1, 'should be one message only');
-    checkMessages(t, result);
-    t.end();
-  });
-
-  batch.test('config with layout', (t) => {
-    const result = setupLogging('config with layout', {
-      layout: {
-        type: 'tester'
-      }
-    });
-    t.equal(result.layouts.type, 'tester', 'should configure layout');
-    t.end();
-  });
-
-  batch.test('error when sending email', (t) => {
-    const setup = setupLogging('separate email for each event', {
-      apikey: 'APIKEY',
-      domain: 'DOMAIN',
-      from: 'sender@domain.com',
-      to: 'recepient@domain.com',
-      subject: 'This is subject'
-    });
-
-    setup.mailer.messages = function () {
-      return {
-        send: function (msg, cb) {
-          cb({ msg: 'log4js.mailgunAppender - Error happened' }, null);
-        }
-      };
-    };
-
-    setup.logger.info('This will break');
-    const cons = setup.console;
-
-    t.test('should be logged to console', (assert) => {
-      assert.equal(cons.errors.length, 1);
-      assert.equal(cons.errors[0].msg, 'log4js.mailgunAppender - Error happened');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('separate email for each event', (t) => {
-    const setup = setupLogging('separate email for each event', {
-      apikey: 'APIKEY',
-      domain: 'DOMAIN',
-      from: 'sender@domain.com',
-      to: 'recepient@domain.com',
-      subject: 'This is subject'
-    });
-    setTimeout(() => {
-      setup.logger.info('Log event #1');
-    }, 0);
-    setTimeout(() => {
-      setup.logger.info('Log event #2');
-    }, 500);
-    setTimeout(() => {
-      setup.logger.info('Log event #3');
-    }, 1100);
-    setTimeout(() => {
-      t.equal(setup.mails.length, 3, 'should be three messages');
-      checkMessages(t, setup);
-      t.end();
-    }, 3000);
-  });
-
-  batch.end();
-});
diff --git a/test/tap/multi-file-appender-test.js b/test/tap/multi-file-appender-test.js
new file mode 100644
index 00000000..097b43c4
--- /dev/null
+++ b/test/tap/multi-file-appender-test.js
@@ -0,0 +1,461 @@
+const process = require('process');
+const { test } = require('tap');
+const debug = require('debug');
+const fs = require('fs');
+const sandbox = require('@log4js-node/sandboxed-module');
+const log4js = require('../../lib/log4js');
+
+const osDelay = process.platform === 'win32' ? 400 : 200;
+
+const removeFiles = async (filenames) => {
+  if (!Array.isArray(filenames)) filenames = [filenames];
+  const promises = filenames.map((filename) => fs.promises.unlink(filename));
+  await Promise.allSettled(promises);
+};
+
+test('multiFile appender', (batch) => {
+  batch.test(
+    'should write to multiple files based on the loggingEvent property',
+    (t) => {
+      t.teardown(async () => {
+        await removeFiles(['logs/A.log', 'logs/B.log']);
+      });
+      log4js.configure({
+        appenders: {
+          multi: {
+            type: 'multiFile',
+            base: 'logs/',
+            property: 'categoryName',
+            extension: '.log',
+          },
+        },
+        categories: { default: { appenders: ['multi'], level: 'info' } },
+      });
+      const loggerA = log4js.getLogger('A');
+      const loggerB = log4js.getLogger('B');
+      loggerA.info('I am in logger A');
+      loggerB.info('I am in logger B');
+      log4js.shutdown(() => {
+        t.match(fs.readFileSync('logs/A.log', 'utf-8'), 'I am in logger A');
+        t.match(fs.readFileSync('logs/B.log', 'utf-8'), 'I am in logger B');
+        t.end();
+      });
+    }
+  );
+
+  batch.test(
+    'should write to multiple files based on loggingEvent.context properties',
+    (t) => {
+      t.teardown(async () => {
+        await removeFiles(['logs/C.log', 'logs/D.log']);
+      });
+      log4js.configure({
+        appenders: {
+          multi: {
+            type: 'multiFile',
+            base: 'logs/',
+            property: 'label',
+            extension: '.log',
+          },
+        },
+        categories: { default: { appenders: ['multi'], level: 'info' } },
+      });
+      const loggerC = log4js.getLogger('cheese');
+      const loggerD = log4js.getLogger('biscuits');
+      loggerC.addContext('label', 'C');
+      loggerD.addContext('label', 'D');
+      loggerC.info('I am in logger C');
+      loggerD.info('I am in logger D');
+      log4js.shutdown(() => {
+        t.match(fs.readFileSync('logs/C.log', 'utf-8'), 'I am in logger C');
+        t.match(fs.readFileSync('logs/D.log', 'utf-8'), 'I am in logger D');
+        t.end();
+      });
+    }
+  );
+
+  batch.test('should close file after timeout', (t) => {
+    /* checking that the file is closed after a timeout is done by looking at the debug logs
+      since detecting file locks with node.js is platform specific.
+     */
+    const debugWasEnabled = debug.enabled('log4js:multiFile');
+    const debugLogs = [];
+    const originalWrite = process.stderr.write;
+    process.stderr.write = (string, encoding, fd) => {
+      debugLogs.push(string);
+      if (debugWasEnabled) {
+        originalWrite.apply(process.stderr, [string, encoding, fd]);
+      }
+    };
+    const originalNamespace = debug.disable();
+    debug.enable(`${originalNamespace}, log4js:multiFile`);
+
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
+      });
+      await removeFiles('logs/C.log');
+      process.stderr.write = originalWrite;
+      debug.enable(originalNamespace);
+    });
+
+    const timeoutMs = 50;
+    log4js.configure({
+      appenders: {
+        multi: {
+          type: 'multiFile',
+          base: 'logs/',
+          property: 'label',
+          extension: '.log',
+          timeout: timeoutMs,
+        },
+      },
+      categories: { default: { appenders: ['multi'], level: 'info' } },
+    });
+    const loggerC = log4js.getLogger('cheese');
+    loggerC.addContext('label', 'C');
+    loggerC.info('I am in logger C');
+    setTimeout(() => {
+      t.match(
+        debugLogs[debugLogs.length - 1],
+        `C not used for > ${timeoutMs} ms => close`,
+        '(timeout1) should have closed'
+      );
+      t.end();
+    }, timeoutMs * 1 + osDelay);
+  });
+
+  batch.test('should close file safely after timeout', (t) => {
+    const error = new Error('fileAppender shutdown error');
+    const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        './appenders/file': {
+          configure(config, layouts) {
+            const fileAppender = require('../../lib/appenders/file').configure(
+              config,
+              layouts
+            );
+            const originalShutdown = fileAppender.shutdown;
+            fileAppender.shutdown = function (complete) {
+              const onCallback = function () {
+                complete(error);
+              };
+              originalShutdown(onCallback);
+            };
+            return fileAppender;
+          },
+        },
+        debug,
+      },
+    });
+    /* checking that the file is closed after a timeout is done by looking at the debug logs
+      since detecting file locks with node.js is platform specific.
+     */
+    const debugWasEnabled = debug.enabled('log4js:multiFile');
+    const debugLogs = [];
+    const originalWrite = process.stderr.write;
+    process.stderr.write = (string, encoding, fd) => {
+      debugLogs.push(string);
+      if (debugWasEnabled) {
+        originalWrite.apply(process.stderr, [string, encoding, fd]);
+      }
+    };
+    const originalNamespace = debug.disable();
+    debug.enable(`${originalNamespace}, log4js:multiFile`);
+
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        sandboxedLog4js.shutdown(resolve);
+      });
+      await removeFiles('logs/C.log');
+      process.stderr.write = originalWrite;
+      debug.enable(originalNamespace);
+    });
+
+    const timeoutMs = 50;
+    sandboxedLog4js.configure({
+      appenders: {
+        multi: {
+          type: 'multiFile',
+          base: 'logs/',
+          property: 'label',
+          extension: '.log',
+          timeout: timeoutMs,
+        },
+      },
+      categories: { default: { appenders: ['multi'], level: 'info' } },
+    });
+    const loggerC = sandboxedLog4js.getLogger('cheese');
+    loggerC.addContext('label', 'C');
+    loggerC.info('I am in logger C');
+    setTimeout(() => {
+      t.match(
+        debugLogs[debugLogs.length - 2],
+        `C not used for > ${timeoutMs} ms => close`,
+        '(timeout1) should have closed'
+      );
+      t.match(
+        debugLogs[debugLogs.length - 1],
+        `ignore error on file shutdown: ${error.message}`,
+        'safely shutdown'
+      );
+      t.end();
+    }, timeoutMs * 1 + osDelay);
+  });
+
+  batch.test('should close file after extended timeout', (t) => {
+    /* checking that the file is closed after a timeout is done by looking at the debug logs
+      since detecting file locks with node.js is platform specific.
+     */
+    const debugWasEnabled = debug.enabled('log4js:multiFile');
+    const debugLogs = [];
+    const originalWrite = process.stderr.write;
+    process.stderr.write = (string, encoding, fd) => {
+      debugLogs.push(string);
+      if (debugWasEnabled) {
+        originalWrite.apply(process.stderr, [string, encoding, fd]);
+      }
+    };
+    const originalNamespace = debug.disable();
+    debug.enable(`${originalNamespace}, log4js:multiFile`);
+
+    t.teardown(async () => {
+      await new Promise((resolve) => {
+        log4js.shutdown(resolve);
+      });
+      await removeFiles('logs/D.log');
+      process.stderr.write = originalWrite;
+      debug.enable(originalNamespace);
+    });
+
+    const timeoutMs = 1000;
+    log4js.configure({
+      appenders: {
+        multi: {
+          type: 'multiFile',
+          base: 'logs/',
+          property: 'label',
+          extension: '.log',
+          timeout: timeoutMs,
+        },
+      },
+      categories: { default: { appenders: ['multi'], level: 'info' } },
+    });
+    const loggerD = log4js.getLogger('cheese');
+    loggerD.addContext('label', 'D');
+    loggerD.info('I am in logger D');
+    setTimeout(() => {
+      loggerD.info('extending activity!');
+      t.match(
+        debugLogs[debugLogs.length - 1],
+        'D extending activity',
+        'should have extended'
+      );
+    }, timeoutMs / 2);
+    setTimeout(() => {
+      t.notOk(
+        debugLogs.some(
+          (s) => s.indexOf(`D not used for > ${timeoutMs} ms => close`) !== -1
+        ),
+        '(timeout1) should not have closed'
+      );
+    }, timeoutMs * 1 + osDelay);
+    setTimeout(() => {
+      t.match(
+        debugLogs[debugLogs.length - 1],
+        `D not used for > ${timeoutMs} ms => close`,
+        '(timeout2) should have closed'
+      );
+      t.end();
+    }, timeoutMs * 2 + osDelay);
+  });
+
+  batch.test('should clear interval for active timers on shutdown', (t) => {
+    /* checking that the file is closed after a timeout is done by looking at the debug logs
+      since detecting file locks with node.js is platform specific.
+     */
+    const debugWasEnabled = debug.enabled('log4js:multiFile');
+    const debugLogs = [];
+    const originalWrite = process.stderr.write;
+    process.stderr.write = (string, encoding, fd) => {
+      debugLogs.push(string);
+      if (debugWasEnabled) {
+        originalWrite.apply(process.stderr, [string, encoding, fd]);
+      }
+    };
+    const originalNamespace = debug.disable();
+    debug.enable(`${originalNamespace}, log4js:multiFile`);
+
+    t.teardown(async () => {
+      await removeFiles('logs/D.log');
+      process.stderr.write = originalWrite;
+      debug.enable(originalNamespace);
+    });
+
+    const timeoutMs = 100;
+    log4js.configure({
+      appenders: {
+        multi: {
+          type: 'multiFile',
+          base: 'logs/',
+          property: 'label',
+          extension: '.log',
+          timeout: timeoutMs,
+        },
+      },
+      categories: { default: { appenders: ['multi'], level: 'info' } },
+    });
+    const loggerD = log4js.getLogger('cheese');
+    loggerD.addContext('label', 'D');
+    loggerD.info('I am in logger D');
+    log4js.shutdown(() => {
+      t.notOk(
+        debugLogs.some(
+          (s) => s.indexOf(`D not used for > ${timeoutMs} ms => close`) !== -1
+        ),
+        'should not have closed'
+      );
+      t.ok(
+        debugLogs.some((s) => s.indexOf('clearing timer for  D') !== -1),
+        'should have cleared timers'
+      );
+      t.match(
+        debugLogs[debugLogs.length - 1],
+        'calling shutdown for  D',
+        'should have called shutdown'
+      );
+      t.end();
+    });
+  });
+
+  batch.test(
+    'should fail silently if loggingEvent property has no value',
+    (t) => {
+      t.teardown(async () => {
+        await removeFiles('logs/E.log');
+      });
+      log4js.configure({
+        appenders: {
+          multi: {
+            type: 'multiFile',
+            base: 'logs/',
+            property: 'label',
+            extension: '.log',
+          },
+        },
+        categories: { default: { appenders: ['multi'], level: 'info' } },
+      });
+      const loggerE = log4js.getLogger();
+      loggerE.addContext('label', 'E');
+      loggerE.info('I am in logger E');
+      loggerE.removeContext('label');
+      loggerE.info('I am not in logger E');
+      loggerE.addContext('label', null);
+      loggerE.info('I am also not in logger E');
+      log4js.shutdown(() => {
+        const contents = fs.readFileSync('logs/E.log', 'utf-8');
+        t.match(contents, 'I am in logger E');
+        t.notMatch(contents, 'I am not in logger E');
+        t.notMatch(contents, 'I am also not in logger E');
+        t.end();
+      });
+    }
+  );
+
+  batch.test('should pass options to rolling file stream', (t) => {
+    t.teardown(async () => {
+      await removeFiles(['logs/F.log', 'logs/F.log.1', 'logs/F.log.2']);
+    });
+    log4js.configure({
+      appenders: {
+        multi: {
+          type: 'multiFile',
+          base: 'logs/',
+          property: 'label',
+          extension: '.log',
+          maxLogSize: 30,
+          backups: 2,
+          layout: { type: 'messagePassThrough' },
+        },
+      },
+      categories: { default: { appenders: ['multi'], level: 'info' } },
+    });
+    const loggerF = log4js.getLogger();
+    loggerF.addContext('label', 'F');
+    loggerF.info('Being in logger F is the best.');
+    loggerF.info('I am also in logger F, awesome');
+    loggerF.info('I am in logger F');
+    log4js.shutdown(() => {
+      let contents = fs.readFileSync('logs/F.log', 'utf-8');
+      t.match(contents, 'I am in logger F');
+      contents = fs.readFileSync('logs/F.log.1', 'utf-8');
+      t.match(contents, 'I am also in logger F');
+      contents = fs.readFileSync('logs/F.log.2', 'utf-8');
+      t.match(contents, 'Being in logger F is the best');
+      t.end();
+    });
+  });
+
+  batch.test('should inherit config from category hierarchy', (t) => {
+    t.teardown(async () => {
+      await removeFiles('logs/test.someTest.log');
+    });
+    log4js.configure({
+      appenders: {
+        out: { type: 'stdout' },
+        test: {
+          type: 'multiFile',
+          base: 'logs/',
+          property: 'categoryName',
+          extension: '.log',
+        },
+      },
+      categories: {
+        default: { appenders: ['out'], level: 'info' },
+        test: { appenders: ['test'], level: 'debug' },
+      },
+    });
+
+    const testLogger = log4js.getLogger('test.someTest');
+    testLogger.debug('This should go to the file');
+    log4js.shutdown(() => {
+      const contents = fs.readFileSync('logs/test.someTest.log', 'utf-8');
+      t.match(contents, 'This should go to the file');
+      t.end();
+    });
+  });
+
+  batch.test('should shutdown safely even if it is not used', (t) => {
+    log4js.configure({
+      appenders: {
+        out: { type: 'stdout' },
+        test: {
+          type: 'multiFile',
+          base: 'logs/',
+          property: 'categoryName',
+          extension: '.log',
+        },
+      },
+      categories: {
+        default: { appenders: ['out'], level: 'info' },
+        test: { appenders: ['test'], level: 'debug' },
+      },
+    });
+    log4js.shutdown(() => {
+      t.ok('callback is called');
+      t.end();
+    });
+  });
+
+  batch.teardown(async () => {
+    try {
+      const files = fs.readdirSync('logs');
+      await removeFiles(files.map((filename) => `logs/${filename}`));
+      fs.rmdirSync('logs');
+    } catch (e) {
+      // doesn't matter
+    }
+  });
+
+  batch.end();
+});
diff --git a/test/tap/multiprocess-shutdown-test.js b/test/tap/multiprocess-shutdown-test.js
index 1bde5919..e62cf765 100644
--- a/test/tap/multiprocess-shutdown-test.js
+++ b/test/tap/multiprocess-shutdown-test.js
@@ -1,30 +1,127 @@
-'use strict';
-
-const test = require('tap').test;
-const log4js = require('../../lib/log4js');
+const { test } = require('tap');
 const net = require('net');
+const childProcess = require('child_process');
+const sandbox = require('@log4js-node/sandboxed-module');
+const log4js = require('../../lib/log4js');
 
-test('multiprocess appender shutdown (master)', { timeout: 2000 }, (t) => {
+test('multiprocess appender shutdown (master)', { timeout: 10000 }, (t) => {
   log4js.configure({
-    appenders: [
-      {
+    appenders: {
+      stdout: { type: 'stdout' },
+      multi: {
         type: 'multiprocess',
         mode: 'master',
         loggerPort: 12345,
-        appender: { type: 'stdout' }
-      }
-    ]
+        appender: 'stdout',
+      },
+    },
+    categories: { default: { appenders: ['multi'], level: 'debug' } },
   });
 
   setTimeout(() => {
     log4js.shutdown(() => {
-      net.connect({ port: 12345 }, () => {
-        t.fail('connection should not still work');
-        t.end();
-      }).on('error', (err) => {
-        t.ok(err, 'we got a connection error');
-        t.end();
-      });
+      setTimeout(() => {
+        net
+          .connect({ port: 12345 }, () => {
+            t.fail('connection should not still work');
+            t.end();
+          })
+          .on('error', (err) => {
+            t.ok(err, 'we got a connection error');
+            t.end();
+          });
+      }, 1000);
     });
+  }, 1000);
+});
+
+test('multiprocess appender shutdown (worker)', (t) => {
+  const fakeConnection = {
+    evts: {},
+    msgs: [],
+    on(evt, cb) {
+      this.evts[evt] = cb;
+    },
+    write(data) {
+      this.msgs.push(data);
+    },
+    removeAllListeners() {
+      this.removeAllListenersCalled = true;
+    },
+    end(cb) {
+      this.endCb = cb;
+    },
+  };
+  const logLib = sandbox.require('../../lib/log4js', {
+    requires: {
+      net: {
+        createConnection() {
+          return fakeConnection;
+        },
+      },
+    },
+  });
+  logLib.configure({
+    appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
+    categories: { default: { appenders: ['worker'], level: 'debug' } },
+  });
+
+  logLib
+    .getLogger()
+    .info(
+      'Putting something in the buffer before the connection is established'
+    );
+  // nothing been written yet.
+  t.equal(fakeConnection.msgs.length, 0);
+
+  let shutdownFinished = false;
+  logLib.shutdown(() => {
+    shutdownFinished = true;
+  });
+
+  // still nothing been written yet.
+  t.equal(fakeConnection.msgs.length, 0);
+
+  fakeConnection.evts.connect();
+
+  setTimeout(() => {
+    t.equal(fakeConnection.msgs.length, 2);
+    t.ok(fakeConnection.removeAllListenersCalled);
+    fakeConnection.endCb();
+
+    t.ok(shutdownFinished);
+    t.end();
   }, 500);
 });
+
+test('multiprocess appender crash (worker)', (t) => {
+  const loggerPort = 12346;
+  const vcr = require('../../lib/appenders/recording');
+  log4js.configure({
+    appenders: {
+      console: { type: 'recording' },
+      multi: {
+        type: 'multiprocess',
+        mode: 'master',
+        loggerPort,
+        appender: 'console',
+      },
+    },
+    categories: { default: { appenders: ['multi'], level: 'debug' } },
+  });
+
+  const worker = childProcess.fork(require.resolve('../multiprocess-worker'), [
+    'start-multiprocess-worker',
+    loggerPort,
+  ]);
+
+  worker.on('message', (m) => {
+    if (m === 'worker is done') {
+      setTimeout(() => {
+        worker.kill();
+        t.equal(vcr.replay()[0].data[0], 'Logging from worker');
+        log4js.shutdown(() => t.end());
+      }, 100);
+    }
+  });
+});
diff --git a/test/tap/multiprocess-test.js b/test/tap/multiprocess-test.js
index 0b0c61c9..7577b1d4 100644
--- a/test/tap/multiprocess-test.js
+++ b/test/tap/multiprocess-test.js
@@ -1,106 +1,127 @@
-'use strict';
-
-const test = require('tap').test;
-const sandbox = require('sandboxed-module');
+const childProcess = require('child_process');
+const { test } = require('tap');
+const flatted = require('flatted');
+const sandbox = require('@log4js-node/sandboxed-module');
+const recording = require('../../lib/appenders/recording');
 
 function makeFakeNet() {
   return {
-    logEvents: [],
     data: [],
     cbs: {},
     createConnectionCalled: 0,
-    fakeAppender: function (logEvent) {
-      this.logEvents.push(logEvent);
-    },
-    createConnection: function (port, host) {
+    createConnection(port, host) {
       const fakeNet = this;
       this.port = port;
       this.host = host;
       this.createConnectionCalled += 1;
       return {
-        on: function (evt, cb) {
+        on(evt, cb) {
           fakeNet.cbs[evt] = cb;
         },
-        write: function (data, encoding) {
+        write(data, encoding) {
           fakeNet.data.push(data);
           fakeNet.encoding = encoding;
         },
-        end: function () {
+        end() {
           fakeNet.closeCalled = true;
-        }
+        },
       };
     },
-    createServer: function (cb) {
+    createServer(cb) {
       const fakeNet = this;
       cb({
         remoteAddress: '1.2.3.4',
         remotePort: '1234',
-        setEncoding: function (encoding) {
+        setEncoding(encoding) {
           fakeNet.encoding = encoding;
         },
-        on: function (event, cb2) {
+        on(event, cb2) {
           fakeNet.cbs[event] = cb2;
-        }
+        },
       });
 
       return {
-        listen: function (port, host) {
+        listen(port, host) {
           fakeNet.port = port;
           fakeNet.host = host;
-        }
+        },
       };
-    }
+    },
   };
 }
 
-test('Multiprocess Appender', (batch) => {
+test('Multiprocess Appender', async (batch) => {
+  batch.beforeEach((done) => {
+    recording.erase();
+    if (typeof done === 'function') {
+      done();
+    }
+  });
+
   batch.test('worker', (t) => {
     const fakeNet = makeFakeNet();
 
-    const appender = sandbox.require(
-      '../../lib/appenders/multiprocess',
-      {
-        requires: {
-          net: fakeNet
-        }
-      }
-    ).appender({ mode: 'worker', loggerPort: 1234, loggerHost: 'pants' });
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        net: fakeNet,
+      },
+    });
+    log4js.configure({
+      appenders: {
+        worker: {
+          type: 'multiprocess',
+          mode: 'worker',
+          loggerPort: 1234,
+          loggerHost: 'pants',
+        },
+      },
+      categories: { default: { appenders: ['worker'], level: 'trace' } },
+    });
 
-    // don't need a proper log event for the worker tests
-    appender('before connect');
+    const logger = log4js.getLogger();
+    logger.info('before connect');
     fakeNet.cbs.connect();
-    appender('after connect');
-    fakeNet.cbs.close(true);
-    appender('after error, before connect');
+    logger.info('after connect');
+    fakeNet.cbs.close();
+    logger.info('after error, before connect');
     fakeNet.cbs.connect();
-    appender('after error, after connect');
-    appender(new Error('Error test'));
+    logger.info('after error, after connect');
+    logger.error(new Error('Error test'));
 
     const net = fakeNet;
-    t.test('should open a socket to the loggerPort and loggerHost', (assert) => {
-      assert.equal(net.port, 1234);
-      assert.equal(net.host, 'pants');
-      assert.end();
-    });
+    t.test(
+      'should open a socket to the loggerPort and loggerHost',
+      (assert) => {
+        assert.equal(net.port, 1234);
+        assert.equal(net.host, 'pants');
+        assert.end();
+      }
+    );
 
-    t.test('should buffer messages written before socket is connected', (assert) => {
-      assert.equal(net.data[0], JSON.stringify('before connect'));
-      assert.end();
-    });
+    t.test(
+      'should buffer messages written before socket is connected',
+      (assert) => {
+        assert.match(net.data[0], 'before connect');
+        assert.end();
+      }
+    );
 
-    t.test('should write log messages to socket as json strings with a terminator string', (assert) => {
-      assert.equal(net.data[0], JSON.stringify('before connect'));
-      assert.equal(net.data[1], '__LOG4JS__');
-      assert.equal(net.data[2], JSON.stringify('after connect'));
-      assert.equal(net.data[3], '__LOG4JS__');
-      assert.equal(net.encoding, 'utf8');
-      assert.end();
-    });
+    t.test(
+      'should write log messages to socket as flatted strings with a terminator string',
+      (assert) => {
+        assert.match(net.data[0], 'before connect');
+        assert.equal(net.data[1], '__LOG4JS__');
+        assert.match(net.data[2], 'after connect');
+        assert.equal(net.data[3], '__LOG4JS__');
+        assert.equal(net.encoding, 'utf8');
+        assert.end();
+      }
+    );
 
     t.test('should attempt to re-open the socket on error', (assert) => {
-      assert.equal(net.data[4], JSON.stringify('after error, before connect'));
+      assert.match(net.data[4], 'after error, before connect');
       assert.equal(net.data[5], '__LOG4JS__');
-      assert.equal(net.data[6], JSON.stringify('after error, after connect'));
+      assert.match(net.data[6], 'after error, after connect');
       assert.equal(net.data[7], '__LOG4JS__');
       assert.equal(net.createConnectionCalled, 2);
       assert.end();
@@ -108,48 +129,89 @@ test('Multiprocess Appender', (batch) => {
 
     t.test('should serialize an Error correctly', (assert) => {
       assert.ok(
-        JSON.parse(net.data[8]).stack,
-        `Expected:\n\n${net.data[8]}\n\n to have a 'stack' property`
+        flatted.parse(net.data[8]).data[0].stack,
+        `Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
       );
-      const actual = JSON.parse(net.data[8]).stack;
+      const actual = flatted.parse(net.data[8]).data[0].stack;
       assert.match(actual, /^Error: Error test/);
       assert.end();
     });
+
     t.end();
   });
 
   batch.test('worker with timeout', (t) => {
     const fakeNet = makeFakeNet();
 
-    const appender = sandbox.require(
-      '../../lib/appenders/multiprocess',
-      {
-        requires: {
-          net: fakeNet
-        }
-      }
-    ).appender({ mode: 'worker' });
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        net: fakeNet,
+      },
+    });
+    log4js.configure({
+      appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
+      categories: { default: { appenders: ['worker'], level: 'trace' } },
+    });
 
-    // don't need a proper log event for the worker tests
-    appender('before connect');
+    const logger = log4js.getLogger();
+    logger.info('before connect');
     fakeNet.cbs.connect();
-    appender('after connect');
+    logger.info('after connect');
     fakeNet.cbs.timeout();
-    appender('after timeout, before close');
+    logger.info('after timeout, before close');
     fakeNet.cbs.close();
-    appender('after close, before connect');
+    logger.info('after close, before connect');
     fakeNet.cbs.connect();
-    appender('after close, after connect');
+    logger.info('after close, after connect');
 
     const net = fakeNet;
 
     t.test('should attempt to re-open the socket', (assert) => {
       // skipping the __LOG4JS__ separators
-      assert.equal(net.data[0], JSON.stringify('before connect'));
-      assert.equal(net.data[2], JSON.stringify('after connect'));
-      assert.equal(net.data[4], JSON.stringify('after timeout, before close'));
-      assert.equal(net.data[6], JSON.stringify('after close, before connect'));
-      assert.equal(net.data[8], JSON.stringify('after close, after connect'));
+      assert.match(net.data[0], 'before connect');
+      assert.match(net.data[2], 'after connect');
+      assert.match(net.data[4], 'after timeout, before close');
+      assert.match(net.data[6], 'after close, before connect');
+      assert.match(net.data[8], 'after close, after connect');
+      assert.equal(net.createConnectionCalled, 2);
+      assert.end();
+    });
+    t.end();
+  });
+
+  batch.test('worker with error', (t) => {
+    const fakeNet = makeFakeNet();
+
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        net: fakeNet,
+      },
+    });
+    log4js.configure({
+      appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
+      categories: { default: { appenders: ['worker'], level: 'trace' } },
+    });
+
+    const logger = log4js.getLogger();
+    logger.info('before connect');
+    fakeNet.cbs.connect();
+    logger.info('after connect');
+    fakeNet.cbs.error();
+    logger.info('after error, before close');
+    fakeNet.cbs.close();
+    logger.info('after close, before connect');
+    fakeNet.cbs.connect();
+    logger.info('after close, after connect');
+
+    const net = fakeNet;
+
+    t.test('should attempt to re-open the socket', (assert) => {
+      // skipping the __LOG4JS__ separators
+      assert.match(net.data[0], 'before connect');
+      assert.match(net.data[2], 'after connect');
+      assert.match(net.data[4], 'after error, before close');
+      assert.match(net.data[6], 'after close, before connect');
+      assert.match(net.data[8], 'after close, after connect');
       assert.equal(net.createConnectionCalled, 2);
       assert.end();
     });
@@ -159,14 +221,15 @@ test('Multiprocess Appender', (batch) => {
   batch.test('worker defaults', (t) => {
     const fakeNet = makeFakeNet();
 
-    sandbox.require(
-      '../../lib/appenders/multiprocess',
-      {
-        requires: {
-          net: fakeNet
-        }
-      }
-    ).appender({ mode: 'worker' });
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        net: fakeNet,
+      },
+    });
+    log4js.configure({
+      appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
+      categories: { default: { appenders: ['worker'], level: 'trace' } },
+    });
 
     t.test('should open a socket to localhost:5000', (assert) => {
       assert.equal(fakeNet.port, 5000);
@@ -179,106 +242,174 @@ test('Multiprocess Appender', (batch) => {
   batch.test('master', (t) => {
     const fakeNet = makeFakeNet();
 
-    const appender = sandbox.require(
-      '../../lib/appenders/multiprocess',
-      {
-        requires: {
-          net: fakeNet
-        }
-      }
-    ).appender({
-      mode: 'master',
-      loggerHost: 'server',
-      loggerPort: 1234,
-      actualAppender: fakeNet.fakeAppender.bind(fakeNet)
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        net: fakeNet,
+        './appenders/recording': recording,
+      },
+    });
+    log4js.configure({
+      appenders: {
+        recorder: { type: 'recording' },
+        master: {
+          type: 'multiprocess',
+          mode: 'master',
+          loggerPort: 1234,
+          loggerHost: 'server',
+          appender: 'recorder',
+        },
+      },
+      categories: { default: { appenders: ['master'], level: 'trace' } },
     });
-
-    appender('this should be sent to the actual appender directly');
 
     const net = fakeNet;
 
-    t.test('should listen for log messages on loggerPort and loggerHost', (assert) => {
-      assert.equal(net.port, 1234);
-      assert.equal(net.host, 'server');
-      assert.end();
-    });
+    t.test(
+      'should listen for log messages on loggerPort and loggerHost',
+      (assert) => {
+        assert.equal(net.port, 1234);
+        assert.equal(net.host, 'server');
+        assert.end();
+      }
+    );
 
     t.test('should return the underlying appender', (assert) => {
-      assert.equal(net.logEvents[0], 'this should be sent to the actual appender directly');
+      log4js
+        .getLogger()
+        .info('this should be sent to the actual appender directly');
+
+      assert.equal(
+        recording.replay()[0].data[0],
+        'this should be sent to the actual appender directly'
+      );
       assert.end();
     });
 
+    t.test('should log the error on "error" event', (assert) => {
+      net.cbs.error(new Error('Expected error'));
+      const logEvents = recording.replay();
+      assert.plan(2);
+      assert.equal(logEvents.length, 1);
+      assert.equal(
+        'A worker log process hung up unexpectedly',
+        logEvents[0].data[0]
+      );
+    });
+
     t.test('when a client connects', (assert) => {
-      const logString = `${JSON.stringify(
-        {
-          level: { level: 10000, levelStr: 'DEBUG' },
-          data: ['some debug']
-        }
-      )}__LOG4JS__`;
+      const logString = `${flatted.stringify({
+        level: { level: 10000, levelStr: 'DEBUG' },
+        data: ['some debug'],
+      })}__LOG4JS__`;
 
       net.cbs.data(
-        `${JSON.stringify(
-          {
-            level: { level: 40000, levelStr: 'ERROR' },
-            data: ['an error message']
-          }
-        )}__LOG4JS__`
+        `${flatted.stringify({
+          level: { level: 40000, levelStr: 'ERROR' },
+          data: ['an error message'],
+        })}__LOG4JS__`
       );
-      net.cbs.data(logString.substring(0, 10));
-      net.cbs.data(logString.substring(10));
+      net.cbs.data(logString.slice(0, 10));
+      net.cbs.data(logString.slice(10));
       net.cbs.data(logString + logString + logString);
       net.cbs.end(
-        `${JSON.stringify(
-          {
-            level: { level: 50000, levelStr: 'FATAL' },
-            data: ["that's all folks"]
-          }
-        )}__LOG4JS__`
+        `${flatted.stringify({
+          level: { level: 50000, levelStr: 'FATAL' },
+          data: ["that's all folks"],
+        })}__LOG4JS__`
       );
       net.cbs.data('bad message__LOG4JS__');
 
+      const logEvents = recording.replay();
       // should parse log messages into log events and send to appender
-      assert.equal(net.logEvents[1].level.toString(), 'ERROR');
-      assert.equal(net.logEvents[1].data[0], 'an error message');
-      assert.equal(net.logEvents[1].remoteAddress, '1.2.3.4');
-      assert.equal(net.logEvents[1].remotePort, '1234');
+      assert.equal(logEvents[0].level.toString(), 'ERROR');
+      assert.equal(logEvents[0].data[0], 'an error message');
+      assert.equal(logEvents[0].remoteAddress, '1.2.3.4');
+      assert.equal(logEvents[0].remotePort, '1234');
 
       // should parse log messages split into multiple chunks'
-      assert.equal(net.logEvents[2].level.toString(), 'DEBUG');
-      assert.equal(net.logEvents[2].data[0], 'some debug');
-      assert.equal(net.logEvents[2].remoteAddress, '1.2.3.4');
-      assert.equal(net.logEvents[2].remotePort, '1234');
+      assert.equal(logEvents[1].level.toString(), 'DEBUG');
+      assert.equal(logEvents[1].data[0], 'some debug');
+      assert.equal(logEvents[1].remoteAddress, '1.2.3.4');
+      assert.equal(logEvents[1].remotePort, '1234');
 
       // should parse multiple log messages in a single chunk'
-      assert.equal(net.logEvents[3].data[0], 'some debug');
-      assert.equal(net.logEvents[4].data[0], 'some debug');
-      assert.equal(net.logEvents[5].data[0], 'some debug');
+      assert.equal(logEvents[2].data[0], 'some debug');
+      assert.equal(logEvents[3].data[0], 'some debug');
+      assert.equal(logEvents[4].data[0], 'some debug');
 
       // should handle log messages sent as part of end event'
-      assert.equal(net.logEvents[6].data[0], "that's all folks");
+      assert.equal(logEvents[5].data[0], "that's all folks");
 
       // should handle unparseable log messages
-      assert.equal(net.logEvents[7].level.toString(), 'ERROR');
-      assert.equal(net.logEvents[7].categoryName, 'log4js');
-      assert.equal(net.logEvents[7].data[0], 'Unable to parse log:');
-      assert.equal(net.logEvents[7].data[1], 'bad message');
+      assert.equal(logEvents[6].level.toString(), 'ERROR');
+      assert.equal(logEvents[6].categoryName, 'log4js');
+      assert.equal(logEvents[6].data[0], 'Unable to parse log:');
+      assert.equal(logEvents[6].data[1], 'bad message');
 
       assert.end();
     });
     t.end();
   });
 
+  batch.test('master without actual appender throws error', (t) => {
+    const fakeNet = makeFakeNet();
+
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        net: fakeNet,
+      },
+    });
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: { master: { type: 'multiprocess', mode: 'master' } },
+          categories: { default: { appenders: ['master'], level: 'trace' } },
+        }),
+      new Error('multiprocess master must have an "appender" defined')
+    );
+    t.end();
+  });
+
+  batch.test('master with unknown appender throws error', (t) => {
+    const fakeNet = makeFakeNet();
+
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        net: fakeNet,
+      },
+    });
+    t.throws(
+      () =>
+        log4js.configure({
+          appenders: {
+            master: {
+              type: 'multiprocess',
+              mode: 'master',
+              appender: 'cheese',
+            },
+          },
+          categories: { default: { appenders: ['master'], level: 'trace' } },
+        }),
+      new Error('multiprocess master appender "cheese" not defined')
+    );
+    t.end();
+  });
+
   batch.test('master defaults', (t) => {
     const fakeNet = makeFakeNet();
 
-    sandbox.require(
-      '../../lib/appenders/multiprocess',
-      {
-        requires: {
-          net: fakeNet
-        }
-      }
-    ).appender({ mode: 'master' });
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        net: fakeNet,
+      },
+    });
+    log4js.configure({
+      appenders: {
+        stdout: { type: 'stdout' },
+        master: { type: 'multiprocess', mode: 'master', appender: 'stdout' },
+      },
+      categories: { default: { appenders: ['master'], level: 'trace' } },
+    });
 
     t.test('should listen for log messages on localhost:5000', (assert) => {
       assert.equal(fakeNet.port, 5000);
@@ -288,43 +419,39 @@ test('Multiprocess Appender', (batch) => {
     t.end();
   });
 
-  batch.test('configure', (t) => {
-    const results = {};
-    const fakeNet = makeFakeNet();
-
-    sandbox.require(
-      '../../lib/appenders/multiprocess',
-      {
-        requires: {
-          net: fakeNet,
-          '../log4js': {
-            loadAppender: function (app) {
-              results.appenderLoaded = app;
-            },
-            appenderMakers: {
-              madeupappender: function (config, options) {
-                results.config = config;
-                results.options = options;
-              }
-            }
-          }
-        }
-      }
-    ).configure(
-      {
-        mode: 'master',
-        appender: {
-          type: 'madeupappender',
-          cheese: 'gouda'
-        }
+  await batch.test('e2e test', async (assert) => {
+    const log4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        './appenders/recording': recording,
       },
-      { crackers: 'jacobs' }
+    });
+    log4js.configure({
+      appenders: {
+        recording: { type: 'recording' },
+        master: {
+          type: 'multiprocess',
+          mode: 'master',
+          appender: 'recording',
+          loggerPort: 5001,
+        },
+      },
+      categories: { default: { appenders: ['recording'], level: 'trace' } },
+    });
+    const child = childProcess.fork(
+      require.resolve('../multiprocess-worker.js'),
+      ['start-multiprocess-worker', '5001'],
+      { stdio: 'inherit' }
     );
+    const actualMsg = await new Promise((res, rej) => {
+      child.on('message', res);
+      child.on('error', rej);
+    });
 
-    t.equal(results.appenderLoaded, 'madeupappender', 'should load underlying appender for master');
-    t.equal(results.config.cheese, 'gouda', 'should pass config to underlying appender');
-    t.equal(results.options.crackers, 'jacobs', 'should pass options to underlying appender');
-    t.end();
+    const logEvents = recording.replay();
+    assert.equal(actualMsg, 'worker is done');
+    assert.equal(logEvents.length, 1);
+    assert.equal(logEvents[0].data[0], 'Logging from worker');
+    assert.end();
   });
 
   batch.end();
diff --git a/test/tap/newLevel-test.js b/test/tap/newLevel-test.js
index b817cfac..10714fef 100644
--- a/test/tap/newLevel-test.js
+++ b/test/tap/newLevel-test.js
@@ -1,65 +1,104 @@
-'use strict';
-
-const test = require('tap').test;
-const Level = require('../../lib/levels');
+const { test } = require('tap');
 const log4js = require('../../lib/log4js');
-const loggerModule = require('../../lib/logger');
-
-const Logger = loggerModule.Logger;
+const recording = require('../../lib/appenders/recording');
 
 test('../../lib/logger', (batch) => {
+  batch.beforeEach((done) => {
+    recording.reset();
+    if (typeof done === 'function') {
+      done();
+    }
+  });
+
   batch.test('creating a new log level', (t) => {
-    Level.forName('DIAG', 6000);
-    const logger = new Logger();
+    log4js.configure({
+      levels: {
+        DIAG: { value: 6000, colour: 'green' },
+      },
+      appenders: {
+        stdout: { type: 'stdout' },
+      },
+      categories: {
+        default: { appenders: ['stdout'], level: 'trace' },
+      },
+    });
+
+    const logger = log4js.getLogger();
 
     t.test('should export new log level in levels module', (assert) => {
-      assert.ok(Level.DIAG);
-      assert.equal(Level.DIAG.levelStr, 'DIAG');
-      assert.equal(Level.DIAG.level, 6000);
+      assert.ok(log4js.levels.DIAG);
+      assert.equal(log4js.levels.DIAG.levelStr, 'DIAG');
+      assert.equal(log4js.levels.DIAG.level, 6000);
+      assert.equal(log4js.levels.DIAG.colour, 'green');
       assert.end();
     });
 
-    t.type(logger.diag, 'function', 'should create named function on logger prototype');
-    t.type(logger.isDiagEnabled, 'function', 'should create isLevelEnabled function on logger prototype');
+    t.type(
+      logger.diag,
+      'function',
+      'should create named function on logger prototype'
+    );
+    t.type(
+      logger.isDiagEnabled,
+      'function',
+      'should create isLevelEnabled function on logger prototype'
+    );
+    t.type(logger.info, 'function', 'should retain default levels');
     t.end();
   });
 
   batch.test('creating a new log level with underscores', (t) => {
-    Level.forName('NEW_LEVEL_OTHER', 6000);
-    const logger = new Logger();
+    log4js.configure({
+      levels: {
+        NEW_LEVEL_OTHER: { value: 6000, colour: 'blue' },
+      },
+      appenders: { stdout: { type: 'stdout' } },
+      categories: { default: { appenders: ['stdout'], level: 'trace' } },
+    });
+    const logger = log4js.getLogger();
 
     t.test('should export new log level to levels module', (assert) => {
-      assert.ok(Level.NEW_LEVEL_OTHER);
-      assert.equal(Level.NEW_LEVEL_OTHER.levelStr, 'NEW_LEVEL_OTHER');
-      assert.equal(Level.NEW_LEVEL_OTHER.level, 6000);
+      assert.ok(log4js.levels.NEW_LEVEL_OTHER);
+      assert.equal(log4js.levels.NEW_LEVEL_OTHER.levelStr, 'NEW_LEVEL_OTHER');
+      assert.equal(log4js.levels.NEW_LEVEL_OTHER.level, 6000);
+      assert.equal(log4js.levels.NEW_LEVEL_OTHER.colour, 'blue');
       assert.end();
     });
 
     t.type(
-      logger.newLevelOther, 'function',
+      logger.newLevelOther,
+      'function',
       'should create named function on logger prototype in camel case'
     );
     t.type(
-      logger.isNewLevelOtherEnabled, 'function',
+      logger.isNewLevelOtherEnabled,
+      'function',
       'should create named isLevelEnabled function on logger prototype in camel case'
     );
     t.end();
   });
 
   batch.test('creating log events containing newly created log level', (t) => {
-    const events = [];
-    const logger = new Logger();
-    logger.addListener('log', (logEvent) => {
-      events.push(logEvent);
+    log4js.configure({
+      levels: {
+        LVL1: { value: 6000, colour: 'grey' },
+        LVL2: { value: 5000, colour: 'magenta' },
+      },
+      appenders: { recorder: { type: 'recording' } },
+      categories: {
+        default: { appenders: ['recorder'], level: 'LVL1' },
+      },
     });
+    const logger = log4js.getLogger();
 
-    logger.log(Level.forName('LVL1', 6000), 'Event 1');
-    logger.log(Level.getLevel('LVL1'), 'Event 2');
+    logger.log(log4js.levels.getLevel('LVL1', log4js.levels.DEBUG), 'Event 1');
+    logger.log(log4js.levels.getLevel('LVL1'), 'Event 2');
     logger.log('LVL1', 'Event 3');
     logger.lvl1('Event 4');
 
-    logger.setLevel(Level.forName('LVL2', 7000));
-    logger.lvl1('Event 5');
+    logger.lvl2('Event 5');
+
+    const events = recording.replay();
 
     t.test('should show log events with new log level', (assert) => {
       assert.equal(events[0].level.toString(), 'LVL1');
@@ -76,49 +115,200 @@ test('../../lib/logger', (batch) => {
       assert.end();
     });
 
-    t.equal(events.length, 4, 'should not be present if min log level is greater than newly created level');
+    t.equal(
+      events.length,
+      4,
+      'should not be present if min log level is greater than newly created level'
+    );
     t.end();
   });
 
   batch.test('creating a new log level with incorrect parameters', (t) => {
-    log4js.levels.forName(9000, 'FAIL_LEVEL_1');
-    log4js.levels.forName('FAIL_LEVEL_2');
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          cheese: { value: 'biscuits' },
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level "cheese".value must have an integer value');
+
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          cheese: 'biscuits',
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level "cheese" must be an object');
+
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          cheese: { thing: 'biscuits' },
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level "cheese" must have a \'value\' property');
+
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          cheese: { value: 3 },
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level "cheese" must have a \'colour\' property');
+
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          cheese: { value: 3, colour: 'pants' },
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level "cheese".colour must be one of white, grey, black, blue, cyan, green, magenta, red, yellow');
+
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          '#pants': 3,
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level name "#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
+
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          'thing#pants': 3,
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level name "thing#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
+
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          '1pants': 3,
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level name "1pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
+
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          2: 3,
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level name "2" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
+
+    t.throws(() => {
+      log4js.configure({
+        levels: {
+          'cheese!': 3,
+        },
+        appenders: { stdout: { type: 'stdout' } },
+        categories: { default: { appenders: ['stdout'], level: 'trace' } },
+      });
+    }, 'level name "cheese!" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
 
-    t.test('should fail to create the level', (assert) => {
-      assert.notOk(Level.FAIL_LEVEL_1);
-      assert.notOk(Level.FAIL_LEVEL_2);
-      assert.end();
-    });
     t.end();
   });
 
   batch.test('calling log with an undefined log level', (t) => {
-    const events = [];
-    const logger = new Logger();
-    logger.addListener('log', (logEvent) => {
-      events.push(logEvent);
+    log4js.configure({
+      appenders: { recorder: { type: 'recording' } },
+      categories: { default: { appenders: ['recorder'], level: 'trace' } },
     });
 
-    logger.log('LEVEL_DOES_NEXT_EXIST', 'Event 1');
-    logger.log(Level.forName('LEVEL_DOES_NEXT_EXIST'), 'Event 2');
+    const logger = log4js.getLogger();
 
-    t.equal(events[0].level.toString(), 'INFO', 'should fall back to INFO');
+    // fallback behavior
+    logger.log('LEVEL_DOES_NOT_EXIST', 'Event 1');
+    logger.log(
+      log4js.levels.getLevel('LEVEL_DOES_NOT_EXIST'),
+      'Event 2',
+      '2 Text'
+    );
+
+    // synonym behavior
+    logger.log('Event 3');
+    logger.log('Event 4', '4 Text');
+
+    const events = recording.replay();
+
+    t.equal(events[0].level.toString(), 'WARN', 'should log warning');
+    t.equal(
+      events[0].data[0],
+      'log4js:logger.log: valid log-level not found as first parameter given:'
+    );
+    t.equal(events[0].data[1], 'LEVEL_DOES_NOT_EXIST');
     t.equal(events[1].level.toString(), 'INFO', 'should fall back to INFO');
+    t.equal(events[1].data[0], '[LEVEL_DOES_NOT_EXIST]');
+    t.equal(events[1].data[1], 'Event 1');
+
+    t.equal(events[2].level.toString(), 'WARN', 'should log warning');
+    t.equal(
+      events[2].data[0],
+      'log4js:logger.log: valid log-level not found as first parameter given:'
+    );
+    t.equal(events[2].data[1], undefined);
+    t.equal(events[3].level.toString(), 'INFO', 'should fall back to INFO');
+    t.equal(events[3].data[0], '[undefined]');
+    t.equal(events[3].data[1], 'Event 2');
+    t.equal(events[3].data[2], '2 Text');
+
+    t.equal(events[4].level.toString(), 'INFO', 'LOG is synonym of INFO');
+    t.equal(events[4].data[0], 'Event 3');
+
+    t.equal(events[5].level.toString(), 'INFO', 'LOG is synonym of INFO');
+    t.equal(events[5].data[0], 'Event 4');
+    t.equal(events[5].data[1], '4 Text');
+
     t.end();
   });
 
   batch.test('creating a new level with an existing level name', (t) => {
-    const events = [];
-    const logger = new Logger();
-    logger.addListener('log', (logEvent) => {
-      events.push(logEvent);
+    log4js.configure({
+      levels: {
+        info: { value: 1234, colour: 'blue' },
+      },
+      appenders: { recorder: { type: 'recording' } },
+      categories: { default: { appenders: ['recorder'], level: 'all' } },
     });
 
-    logger.log(log4js.levels.forName('MY_LEVEL', 9000), 'Event 1');
-    logger.log(log4js.levels.forName('MY_LEVEL', 8000), 'Event 1');
+    t.equal(
+      log4js.levels.INFO.level,
+      1234,
+      'should override the existing log level'
+    );
+    t.equal(
+      log4js.levels.INFO.colour,
+      'blue',
+      'should override the existing log level'
+    );
 
-    t.equal(events[0].level.level, 9000, 'should override the existing log level');
-    t.equal(events[1].level.level, 8000, 'should override the existing log level');
+    const logger = log4js.getLogger();
+    logger.info('test message');
+
+    const events = recording.replay();
+    t.equal(
+      events[0].level.level,
+      1234,
+      'should override the existing log level'
+    );
     t.end();
   });
   batch.end();
diff --git a/test/tap/no-cluster-test.js b/test/tap/no-cluster-test.js
new file mode 100644
index 00000000..65a2ea77
--- /dev/null
+++ b/test/tap/no-cluster-test.js
@@ -0,0 +1,15 @@
+const { test } = require('tap');
+const proxyquire = require('proxyquire');
+
+test('clustering is disabled if cluster is not present', (t) => {
+  const log4js = proxyquire('../../lib/log4js', { cluster: null });
+  const recorder = require('../../lib/appenders/recording');
+  log4js.configure({
+    appenders: { vcr: { type: 'recording' } },
+    categories: { default: { appenders: ['vcr'], level: 'debug' } },
+  });
+  log4js.getLogger().info('it should still work');
+  const events = recorder.replay();
+  t.equal(events[0].data[0], 'it should still work');
+  t.end();
+});
diff --git a/test/tap/noLogFilter-test.js b/test/tap/noLogFilter-test.js
new file mode 100644
index 00000000..00fe6a3d
--- /dev/null
+++ b/test/tap/noLogFilter-test.js
@@ -0,0 +1,205 @@
+const { test } = require('tap');
+const log4js = require('../../lib/log4js');
+const recording = require('../../lib/appenders/recording');
+
+/**
+ * test a simple regexp
+ */
+test('log4js noLogFilter', (batch) => {
+  batch.beforeEach((done) => {
+    recording.reset();
+    if (typeof done === 'function') {
+      done();
+    }
+  });
+
+  batch.test(
+    'appender should exclude events that match the regexp string',
+    (t) => {
+      log4js.configure({
+        appenders: {
+          recorder: { type: 'recording' },
+          filtered: {
+            type: 'noLogFilter',
+            exclude: 'This.*not',
+            appender: 'recorder',
+          },
+        },
+        categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
+      });
+
+      const logger = log4js.getLogger();
+      logger.debug('This should not get logged');
+      logger.debug('This should get logged');
+      logger.debug(
+        'Another case that not match the regex, so it should get logged'
+      );
+      const logEvents = recording.replay();
+      t.equal(logEvents.length, 2);
+      t.equal(logEvents[0].data[0], 'This should get logged');
+      t.equal(
+        logEvents[1].data[0],
+        'Another case that not match the regex, so it should get logged'
+      );
+      t.end();
+    }
+  );
+
+  /**
+   * test an array of regexp
+   */
+  batch.test(
+    'appender should exclude events that match the regexp string contained in the array',
+    (t) => {
+      log4js.configure({
+        appenders: {
+          recorder: { type: 'recording' },
+          filtered: {
+            type: 'noLogFilter',
+            exclude: ['This.*not', 'instead'],
+            appender: 'recorder',
+          },
+        },
+        categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
+      });
+
+      const logger = log4js.getLogger();
+      logger.debug('This should not get logged');
+      logger.debug('This should get logged');
+      logger.debug(
+        'Another case that not match the regex, so it should get logged'
+      );
+      logger.debug('This case instead it should get logged');
+      logger.debug('The last that should get logged');
+      const logEvents = recording.replay();
+      t.equal(logEvents.length, 3);
+      t.equal(logEvents[0].data[0], 'This should get logged');
+      t.equal(
+        logEvents[1].data[0],
+        'Another case that not match the regex, so it should get logged'
+      );
+      t.equal(logEvents[2].data[0], 'The last that should get logged');
+      t.end();
+    }
+  );
+  /**
+   * test case insentitive regexp
+   */
+  batch.test(
+    'appender should evaluate the regexp using incase sentitive option',
+    (t) => {
+      log4js.configure({
+        appenders: {
+          recorder: { type: 'recording' },
+          filtered: {
+            type: 'noLogFilter',
+            exclude: ['NOT', 'eX.*de'],
+            appender: 'recorder',
+          },
+        },
+        categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
+      });
+
+      const logger = log4js.getLogger();
+
+      logger.debug('This should not get logged');
+      logger.debug('This should get logged');
+      logger.debug('Exclude this string');
+      logger.debug('Include this string');
+      const logEvents = recording.replay();
+      t.equal(logEvents.length, 2);
+      t.equal(logEvents[0].data[0], 'This should get logged');
+      t.equal(logEvents[1].data[0], 'Include this string');
+      t.end();
+    }
+  );
+
+  /**
+   * test empty string or null regexp
+   */
+  batch.test(
+    'appender should skip the match in case of empty or null regexp',
+    (t) => {
+      log4js.configure({
+        appenders: {
+          recorder: { type: 'recording' },
+          filtered: {
+            type: 'noLogFilter',
+            exclude: ['', null, undefined],
+            appender: 'recorder',
+          },
+        },
+        categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
+      });
+
+      const logger = log4js.getLogger();
+      logger.debug('This should get logged');
+      logger.debug('Another string that should get logged');
+      const logEvents = recording.replay();
+      t.equal(logEvents.length, 2);
+      t.equal(logEvents[0].data[0], 'This should get logged');
+      t.equal(logEvents[1].data[0], 'Another string that should get logged');
+      t.end();
+    }
+  );
+
+  /**
+   * test for excluding all the events that contains digits
+   */
+  batch.test('appender should exclude the events that contains digits', (t) => {
+    log4js.configure({
+      appenders: {
+        recorder: { type: 'recording' },
+        filtered: {
+          type: 'noLogFilter',
+          exclude: '\\d',
+          appender: 'recorder',
+        },
+      },
+      categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
+    });
+
+    const logger = log4js.getLogger();
+    logger.debug('This should get logged');
+    logger.debug('The 2nd event should not get logged');
+    logger.debug('The 3rd event should not get logged, such as the 2nd');
+    const logEvents = recording.replay();
+    t.equal(logEvents.length, 1);
+    t.equal(logEvents[0].data[0], 'This should get logged');
+    t.end();
+  });
+
+  /**
+   * test the cases provided in the documentation
+   * https://log4js-node.github.io/log4js-node/noLogFilter.html
+   */
+  batch.test(
+    'appender should exclude not valid events according to the documentation',
+    (t) => {
+      log4js.configure({
+        appenders: {
+          recorder: { type: 'recording' },
+          filtered: {
+            type: 'noLogFilter',
+            exclude: ['NOT', '\\d', ''],
+            appender: 'recorder',
+          },
+        },
+        categories: { default: { appenders: ['filtered'], level: 'DEBUG' } },
+      });
+
+      const logger = log4js.getLogger();
+      logger.debug('I will be logged in all-the-logs.log');
+      logger.debug('I will be not logged in all-the-logs.log');
+      logger.debug('A 2nd message that will be excluded in all-the-logs.log');
+      logger.debug('Hello again');
+      const logEvents = recording.replay();
+      t.equal(logEvents.length, 2);
+      t.equal(logEvents[0].data[0], 'I will be logged in all-the-logs.log');
+      t.equal(logEvents[1].data[0], 'Hello again');
+      t.end();
+    }
+  );
+
+  batch.end();
+});
diff --git a/test/tap/passenger-test.js b/test/tap/passenger-test.js
new file mode 100644
index 00000000..2d3a1945
--- /dev/null
+++ b/test/tap/passenger-test.js
@@ -0,0 +1,53 @@
+const { test } = require('tap');
+const sandbox = require('@log4js-node/sandboxed-module');
+
+// passenger provides a non-functional cluster module,
+// but it does not implement the event emitter functions
+const passengerCluster = {
+  disconnect() {
+    return false;
+  },
+  fork() {
+    return false;
+  },
+  setupMaster() {
+    return false;
+  },
+  isWorker: true,
+  isMaster: false,
+  schedulingPolicy: false,
+  settings: false,
+  worker: false,
+  workers: false,
+};
+
+const vcr = require('../../lib/appenders/recording');
+
+const log4js = sandbox.require('../../lib/log4js', {
+  requires: {
+    cluster: passengerCluster,
+    './appenders/recording': vcr,
+  },
+});
+
+test('When running in Passenger', (batch) => {
+  batch.test('it should still log', (t) => {
+    log4js.configure({
+      appenders: {
+        vcr: { type: 'recording' },
+      },
+      categories: {
+        default: { appenders: ['vcr'], level: 'info' },
+      },
+      disableClustering: true,
+    });
+    log4js.getLogger().info('This should still work');
+
+    const events = vcr.replay();
+    t.equal(events.length, 1);
+    t.equal(events[0].data[0], 'This should still work');
+    t.end();
+  });
+
+  batch.end();
+});
diff --git a/test/tap/pause-test.js b/test/tap/pause-test.js
new file mode 100644
index 00000000..e5ec312c
--- /dev/null
+++ b/test/tap/pause-test.js
@@ -0,0 +1,122 @@
+const tap = require('tap');
+const fs = require('fs');
+const log4js = require('../../lib/log4js');
+
+const removeFiles = async (filenames) => {
+  if (!Array.isArray(filenames)) filenames = [filenames];
+  const promises = filenames.map((filename) => fs.promises.unlink(filename));
+  await Promise.allSettled(promises);
+};
+
+tap.test('Drain event test', (batch) => {
+  batch.test(
+    'Should emit pause event and resume when logging in a file with high frequency',
+    (t) => {
+      t.teardown(async () => {
+        process.off(
+          'log4js:pause',
+          process.listeners('log4js:pause')[
+            process.listeners('log4js:pause').length - 1
+          ]
+        );
+        await removeFiles('logs/drain.log');
+      });
+      // Generate logger with 5k of highWaterMark config
+      log4js.configure({
+        appenders: {
+          file: {
+            type: 'file',
+            filename: 'logs/drain.log',
+            highWaterMark: 5 * 1024,
+          },
+        },
+        categories: {
+          default: { appenders: ['file'], level: 'debug' },
+        },
+      });
+
+      let paused = false;
+      let resumed = false;
+
+      process.on('log4js:pause', (value) => {
+        if (value) {
+          paused = true;
+          t.ok(value, 'log4js:pause, true');
+        } else {
+          resumed = true;
+          t.ok(!value, 'log4js:pause, false');
+          t.end();
+        }
+      });
+
+      const logger = log4js.getLogger();
+      while (!paused && !resumed) {
+        if (!paused) {
+          logger.info('This is a test for emitting drain event');
+        }
+      }
+    }
+  );
+
+  batch.test(
+    'Should emit pause event and resume when logging in a date file with high frequency',
+    (t) => {
+      t.teardown(async () => {
+        process.off(
+          'log4js:pause',
+          process.listeners('log4js:pause')[
+            process.listeners('log4js:pause').length - 1
+          ]
+        );
+        await removeFiles('logs/date-file-drain.log');
+      });
+      // Generate date file logger with 5kb of highWaterMark config
+      log4js.configure({
+        appenders: {
+          file: {
+            type: 'dateFile',
+            filename: 'logs/date-file-drain.log',
+            highWaterMark: 5 * 1024,
+          },
+        },
+        categories: {
+          default: { appenders: ['file'], level: 'debug' },
+        },
+      });
+
+      let paused = false;
+      let resumed = false;
+
+      process.on('log4js:pause', (value) => {
+        if (value) {
+          paused = true;
+          t.ok(value, 'log4js:pause, true');
+        } else {
+          resumed = true;
+          t.ok(!value, 'log4js:pause, false');
+          t.end();
+        }
+      });
+
+      const logger = log4js.getLogger();
+      while (!paused && !resumed) {
+        if (!paused)
+          logger.info(
+            'This is a test for emitting drain event in date file logger'
+          );
+      }
+    }
+  );
+
+  batch.teardown(async () => {
+    try {
+      const files = fs.readdirSync('logs');
+      await removeFiles(files.map((filename) => `logs/${filename}`));
+      fs.rmdirSync('logs');
+    } catch (e) {
+      // doesn't matter
+    }
+  });
+
+  batch.end();
+});
diff --git a/test/tap/pm2-support-test.js b/test/tap/pm2-support-test.js
new file mode 100644
index 00000000..7d5ee440
--- /dev/null
+++ b/test/tap/pm2-support-test.js
@@ -0,0 +1,125 @@
+const { test } = require('tap');
+const cluster = require('cluster');
+const debug = require('debug')('log4js:pm2-test');
+
+// PM2 runs everything as workers
+// - no master in the cluster (PM2 acts as master itself)
+// - we will simulate that here (avoid having to include PM2 as a dev dep)
+if (cluster.isMaster) {
+  // create two worker forks
+  // PASS IN NODE_APP_INSTANCE HERE
+  const appEvents = {};
+  ['0', '1'].forEach((i) => {
+    cluster.fork({ NODE_APP_INSTANCE: i });
+  });
+
+  const messageHandler = (worker, msg) => {
+    if (worker.type || worker.topic) {
+      msg = worker;
+    }
+    if (msg.type === 'testing') {
+      debug(
+        `Received testing message from ${msg.instance} with events ${msg.events}`
+      );
+      appEvents[msg.instance] = msg.events;
+    }
+
+    // we have to do the re-broadcasting that the pm2-intercom module would do.
+    if (msg.topic === 'log4js:message') {
+      debug(`Received log message ${msg}`);
+      for (const id in cluster.workers) {
+        cluster.workers[id].send(msg);
+      }
+    }
+  };
+
+  cluster.on('message', messageHandler);
+
+  let count = 0;
+  cluster.on('exit', () => {
+    count += 1;
+    if (count === 2) {
+      // wait for any IPC messages still to come, because it seems they are slooooow.
+      setTimeout(() => {
+        test('PM2 Support', (batch) => {
+          batch.test('should not get any events when turned off', (t) => {
+            t.notOk(
+              appEvents['0'].filter(
+                (e) => e && e.data[0].indexOf('will not be logged') > -1
+              ).length
+            );
+            t.notOk(
+              appEvents['1'].filter(
+                (e) => e && e.data[0].indexOf('will not be logged') > -1
+              ).length
+            );
+            t.end();
+          });
+
+          batch.test('should get events on app instance 0', (t) => {
+            t.equal(appEvents['0'].length, 2);
+            t.equal(appEvents['0'][0].data[0], 'this should now get logged');
+            t.equal(appEvents['0'][1].data[0], 'this should now get logged');
+            t.end();
+          });
+
+          batch.test('should not get events on app instance 1', (t) => {
+            t.equal(appEvents['1'].length, 0);
+            t.end();
+          });
+          batch.end();
+          cluster.removeListener('message', messageHandler);
+        });
+      }, 1000);
+    }
+  });
+} else {
+  const recorder = require('../../lib/appenders/recording');
+  const log4js = require('../../lib/log4js');
+  log4js.configure({
+    appenders: { out: { type: 'recording' } },
+    categories: { default: { appenders: ['out'], level: 'info' } },
+  });
+
+  const logger = log4js.getLogger('test');
+  logger.info(
+    'this is a test, but without enabling PM2 support it will not be logged'
+  );
+
+  // IPC messages can take a while to get through to start with.
+  setTimeout(() => {
+    log4js.shutdown(() => {
+      log4js.configure({
+        appenders: { out: { type: 'recording' } },
+        categories: { default: { appenders: ['out'], level: 'info' } },
+        pm2: true,
+      });
+      const anotherLogger = log4js.getLogger('test');
+      setTimeout(() => {
+        anotherLogger.info('this should now get logged');
+      }, 1000);
+
+      // if we're the pm2-master we should wait for the other process to send its log messages
+      setTimeout(() => {
+        log4js.shutdown(() => {
+          const events = recorder.replay();
+          debug(
+            `Sending test events ${events} from ${process.env.NODE_APP_INSTANCE}`
+          );
+          process.send(
+            {
+              type: 'testing',
+              instance: process.env.NODE_APP_INSTANCE,
+              events,
+            },
+            () => {
+              setTimeout(() => {
+                cluster.worker.disconnect();
+              }, 1000);
+            }
+          );
+        });
+      }, 3000);
+    });
+  }, 2000);
+}
diff --git a/test/tap/recordingAppender-test.js b/test/tap/recordingAppender-test.js
new file mode 100644
index 00000000..574c08f7
--- /dev/null
+++ b/test/tap/recordingAppender-test.js
@@ -0,0 +1,79 @@
+const { test } = require('tap');
+const log4js = require('../../lib/log4js');
+
+test('recording appender', (batch) => {
+  batch.test('should store logs in memory until cleared', (t) => {
+    log4js.configure({
+      appenders: { rec: { type: 'recording' } },
+      categories: { default: { appenders: ['rec'], level: 'debug' } },
+    });
+
+    const logger = log4js.getLogger();
+    logger.level = 'debug';
+    logger.debug('This will go to the recording!');
+    logger.debug('Another one');
+
+    const recording = log4js.recording();
+    const loggingEvents = recording.playback();
+
+    t.equal(loggingEvents.length, 2, 'There should be 2 recorded events');
+    t.equal(loggingEvents[0].data[0], 'This will go to the recording!');
+    t.equal(loggingEvents[1].data[0], 'Another one');
+
+    recording.reset();
+    const loggingEventsPostReset = recording.playback();
+
+    t.equal(
+      loggingEventsPostReset.length,
+      0,
+      'There should be 0 recorded events'
+    );
+
+    t.end();
+  });
+
+  batch.test('should store 2 rolling logs in memory until cleared', (t) => {
+    log4js.configure({
+      appenders: { rec2: { type: 'recording', maxLength: 2 } },
+      categories: { default: { appenders: ['rec2'], level: 'debug' } },
+    });
+
+    const logger = log4js.getLogger();
+    logger.level = 'debug';
+    logger.debug('First log entry');
+    logger.debug('Second log entry');
+
+    const recording = log4js.recording();
+
+    t.equal(
+      recording.playback().length,
+      2,
+      'There should be 2 recorded events'
+    );
+    t.equal(recording.playback()[0].data[0], 'First log entry');
+    t.equal(recording.playback()[1].data[0], 'Second log entry');
+
+    logger.debug('Third log entry');
+
+    t.equal(
+      recording.playback().length,
+      2,
+      'There should still be 2 recording events'
+    );
+    t.equal(recording.playback()[0].data[0], 'Second log entry');
+    t.equal(recording.playback()[1].data[0], 'Third log entry');
+
+    recording.reset();
+    const loggingEventsPostReset = recording.playback();
+
+    t.equal(
+      loggingEventsPostReset.length,
+      0,
+      'There should be 0 recorded events'
+    );
+
+    t.end();
+  });
+
+  batch.end();
+});
diff --git a/test/tap/reload-shutdown-test.js b/test/tap/reload-shutdown-test.js
deleted file mode 100644
index 7b3175f3..00000000
--- a/test/tap/reload-shutdown-test.js
+++ /dev/null
@@ -1,34 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const path = require('path');
-const sandbox = require('sandboxed-module');
-
-test('Reload configuration shutdown hook', (t) => {
-  let timerId;
-
-  const log4js = sandbox.require(
-    '../../lib/log4js',
-    {
-      globals: {
-        clearInterval: function (id) {
-          timerId = id;
-        },
-        setInterval: function () {
-          return '1234';
-        }
-      }
-    }
-  );
-
-  log4js.configure(
-    path.join(__dirname, 'test-config.json'),
-    { reloadSecs: 30 }
-  );
-
-  t.plan(1);
-  log4js.shutdown(() => {
-    t.equal(timerId, '1234', 'Shutdown should clear the reload timer');
-    t.end();
-  });
-});
diff --git a/test/tap/reloadConfiguration-test.js b/test/tap/reloadConfiguration-test.js
deleted file mode 100644
index 6ce338ca..00000000
--- a/test/tap/reloadConfiguration-test.js
+++ /dev/null
@@ -1,350 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const sandbox = require('sandboxed-module');
-
-function setupConsoleTest() {
-  const fakeConsole = {};
-  const logEvents = [];
-
-  ['trace', 'debug', 'log', 'info', 'warn', 'error'].forEach((fn) => {
-    fakeConsole[fn] = function () {
-      throw new Error('this should not be called.');
-    };
-  });
-
-  const log4js = sandbox.require(
-    '../../lib/log4js',
-    {
-      globals: {
-        console: fakeConsole
-      }
-    }
-  );
-
-  log4js.clearAppenders();
-  log4js.addAppender((evt) => {
-    logEvents.push(evt);
-  });
-
-  return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
-}
-
-test('reload configuration', (batch) => {
-  batch.test('with config file changing', (t) => {
-    const pathsChecked = [];
-    const logEvents = [];
-    const modulePath = 'path/to/log4js.json';
-
-    const fakeFS = {
-      lastMtime: Date.now(),
-      config: {
-        appenders: [
-          { type: 'console', layout: { type: 'messagePassThrough' } }
-        ],
-        levels: { 'a-test': 'INFO' }
-      },
-      readFileSync: function (file, encoding) {
-        t.equal(file, modulePath);
-        t.equal(encoding, 'utf8');
-        return JSON.stringify(fakeFS.config);
-      },
-      statSync: function (path) {
-        pathsChecked.push(path);
-        if (path === modulePath) {
-          fakeFS.lastMtime += 1;
-          return { mtime: new Date(fakeFS.lastMtime) };
-        }
-        throw new Error('no such file');
-      }
-    };
-
-    const fakeConsole = {
-      name: 'console',
-      appender: function () {
-        return function (evt) {
-          logEvents.push(evt);
-        };
-      },
-      configure: function () {
-        return fakeConsole.appender();
-      }
-    };
-
-    let setIntervalCallback;
-
-    const fakeSetInterval = function (cb) {
-      setIntervalCallback = cb;
-    };
-
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          fs: fakeFS,
-          './appenders/console': fakeConsole
-        },
-        globals: {
-          console: fakeConsole,
-          setInterval: fakeSetInterval,
-        }
-      }
-    );
-
-    log4js.configure('path/to/log4js.json', { reloadSecs: 30 });
-    const logger = log4js.getLogger('a-test');
-    logger.info('info1');
-    logger.debug('debug2 - should be ignored');
-    fakeFS.config.levels['a-test'] = 'DEBUG';
-    setIntervalCallback();
-    logger.info('info3');
-    logger.debug('debug4');
-
-    t.test('should configure log4js from first log4js.json found', (assert) => {
-      assert.equal(logEvents[0].data[0], 'info1');
-      assert.equal(logEvents[1].data[0], 'info3');
-      assert.equal(logEvents[2].data[0], 'debug4');
-      assert.equal(logEvents.length, 3);
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('with config file staying the same', (t) => {
-    const pathsChecked = [];
-    let fileRead = 0;
-    const logEvents = [];
-    const modulePath = require('path').normalize(`${__dirname}/../../lib/log4js.json`);
-    const mtime = new Date();
-
-    const fakeFS = {
-      config: {
-        appenders: [
-          { type: 'console', layout: { type: 'messagePassThrough' } }
-        ],
-        levels: { 'a-test': 'INFO' }
-      },
-      readFileSync: function (file, encoding) {
-        fileRead += 1;
-        t.type(file, 'string');
-        t.equal(file, modulePath);
-        t.equal(encoding, 'utf8');
-        return JSON.stringify(fakeFS.config);
-      },
-      statSync: function (path) {
-        pathsChecked.push(path);
-        if (path === modulePath) {
-          return { mtime: mtime };
-        }
-        throw new Error('no such file');
-      }
-    };
-
-    const fakeConsole = {
-      name: 'console',
-      appender: function () {
-        return function (evt) {
-          logEvents.push(evt);
-        };
-      },
-      configure: function () {
-        return fakeConsole.appender();
-      }
-    };
-
-    let setIntervalCallback;
-
-    const fakeSetInterval = function (cb) {
-      setIntervalCallback = cb;
-    };
-
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          fs: fakeFS,
-          './appenders/console': fakeConsole
-        },
-        globals: {
-          console: fakeConsole,
-          setInterval: fakeSetInterval,
-        }
-      }
-    );
-
-    log4js.configure(modulePath, { reloadSecs: 3 });
-    const logger = log4js.getLogger('a-test');
-    logger.info('info1');
-    logger.debug('debug2 - should be ignored');
-    setIntervalCallback();
-    logger.info('info3');
-    logger.debug('debug4');
-
-    t.equal(fileRead, 1, 'should only read the configuration file once');
-    t.test('should configure log4js from first log4js.json found', (assert) => {
-      assert.equal(logEvents.length, 2);
-      assert.equal(logEvents[0].data[0], 'info1');
-      assert.equal(logEvents[1].data[0], 'info3');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('when config file is removed', (t) => {
-    let fileRead = 0;
-    const logEvents = [];
-    const modulePath = require('path').normalize(`${__dirname}/../../lib/log4js.json`);
-
-    const fakeFS = {
-      config: {
-        appenders: [
-          { type: 'console', layout: { type: 'messagePassThrough' } }
-        ],
-        levels: { 'a-test': 'INFO' }
-      },
-      readFileSync: function (file, encoding) {
-        fileRead += 1;
-        t.type(file, 'string');
-        t.equal(file, modulePath);
-        t.equal(encoding, 'utf8');
-        return JSON.stringify(fakeFS.config);
-      },
-      statSync: function () {
-        this.statSync = function () {
-          throw new Error('no such file');
-        };
-        return { mtime: new Date() };
-      }
-    };
-
-    const fakeConsole = {
-      name: 'console',
-      appender: function () {
-        return function (evt) {
-          logEvents.push(evt);
-        };
-      },
-      configure: function () {
-        return fakeConsole.appender();
-      }
-    };
-
-    let setIntervalCallback;
-
-    const fakeSetInterval = function (cb) {
-      setIntervalCallback = cb;
-    };
-
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          fs: fakeFS,
-          './appenders/console': fakeConsole
-        },
-        globals: {
-          console: fakeConsole,
-          setInterval: fakeSetInterval,
-        }
-      }
-    );
-
-    log4js.configure(modulePath, { reloadSecs: 3 });
-    const logger = log4js.getLogger('a-test');
-    logger.info('info1');
-    logger.debug('debug2 - should be ignored');
-    setIntervalCallback();
-    logger.info('info3');
-    logger.debug('debug4');
-
-    t.equal(fileRead, 1, 'should only read the configuration file once');
-    t.test('should not clear configuration when config file not found', (assert) => {
-      assert.equal(logEvents.length, 3);
-      assert.equal(logEvents[0].data[0], 'info1');
-      assert.equal(logEvents[1].level.toString(), 'WARN');
-      assert.include(logEvents[1].data[0], 'Failed to load configuration file');
-      assert.equal(logEvents[2].data[0], 'info3');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('when passed an object', (t) => {
-    const setup = setupConsoleTest();
-    setup.log4js.configure({}, { reloadSecs: 30 });
-    const events = setup.logEvents;
-
-    t.test('should log a warning', (assert) => {
-      assert.equal(events[0].level.toString(), 'WARN');
-      assert.equal(
-        events[0].data[0],
-        'Ignoring configuration reload parameter for "object" configuration.'
-      );
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('when called twice with reload options', (t) => {
-    const modulePath = require('path').normalize(`${__dirname}/../../lib/log4js.json`);
-
-    const fakeFS = {
-      readFileSync: function () {
-        return JSON.stringify({});
-      },
-      statSync: function () {
-        return { mtime: new Date() };
-      }
-    };
-
-    const fakeConsole = {
-      name: 'console',
-      appender: function () {
-        return function () {
-        };
-      },
-      configure: function () {
-        return fakeConsole.appender();
-      }
-    };
-
-    let setIntervalCallback; // eslint-disable-line
-    let intervalCleared = false;
-    let clearedId;
-
-    const fakeSetInterval = function (cb) {
-      setIntervalCallback = cb;
-      return 1234;
-    };
-
-    const log4js = sandbox.require(
-      '../../lib/log4js',
-      {
-        requires: {
-          fs: fakeFS,
-          './appenders/console': fakeConsole
-        },
-        globals: {
-          console: fakeConsole,
-          setInterval: fakeSetInterval,
-          clearInterval: function (interval) {
-            intervalCleared = true;
-            clearedId = interval;
-          }
-        }
-      }
-    );
-
-    log4js.configure(modulePath, { reloadSecs: 3 });
-    log4js.configure(modulePath, { reloadSecs: 15 });
-
-    t.test('should clear the previous interval', (assert) => {
-      assert.ok(intervalCleared);
-      assert.equal(clearedId, 1234);
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/server-test.js b/test/tap/server-test.js
new file mode 100644
index 00000000..2277b838
--- /dev/null
+++ b/test/tap/server-test.js
@@ -0,0 +1,192 @@
+const { test } = require('tap');
+const net = require('net');
+const log4js = require('../../lib/log4js');
+const vcr = require('../../lib/appenders/recording');
+const levels = require('../../lib/levels');
+const LoggingEvent = require('../../lib/LoggingEvent');
+
+test('TCP Server', (batch) => {
+  batch.test(
+    'should listen for TCP messages and re-send via process.send',
+    (t) => {
+      log4js.configure({
+        appenders: {
+          vcr: { type: 'recording' },
+          tcp: { type: 'tcp-server', port: 5678 },
+        },
+        categories: {
+          default: { appenders: ['vcr'], level: 'debug' },
+        },
+      });
+      // give the socket a chance to start up
+      setTimeout(() => {
+        const socket = net.connect(5678, () => {
+          socket.write(
+            `${new LoggingEvent(
+              'test-category',
+              levels.INFO,
+              ['something'],
+              {}
+            ).serialise()}__LOG4JS__${new LoggingEvent(
+              'test-category',
+              levels.INFO,
+              ['something else'],
+              {}
+            ).serialise()}__LOG4JS__some nonsense__LOG4JS__{"some":"json"}__LOG4JS__`,
+            () => {
+              socket.end();
+              setTimeout(() => {
+                log4js.shutdown(() => {
+                  const logs = vcr.replay();
+                  t.equal(logs.length, 4);
+                  t.match(logs[0], {
+                    data: ['something'],
+                    categoryName: 'test-category',
+                    level: { levelStr: 'INFO' },
+                    context: {},
+                  });
+                  t.match(logs[1], {
+                    data: ['something else'],
+                    categoryName: 'test-category',
+                    level: { levelStr: 'INFO' },
+                    context: {},
+                  });
+                  t.match(logs[2], {
+                    data: [
+                      'Unable to parse log:',
+                      'some nonsense',
+                      'because: ',
+                      SyntaxError,
+                    ],
+                    categoryName: 'log4js',
+                    level: { levelStr: 'ERROR' },
+                    context: {},
+                  });
+                  t.match(logs[3], {
+                    data: [
+                      'Unable to parse log:',
+                      '{"some":"json"}',
+                      'because: ',
+                      TypeError,
+                    ],
+                    categoryName: 'log4js',
+                    level: { levelStr: 'ERROR' },
+                    context: {},
+                  });
+                  t.end();
+                });
+              }, 100);
+            }
+          );
+        });
+
+        socket.unref();
+      }, 100);
+    }
+  );
+
+  batch.test('sending incomplete messages in chunks', (t) => {
+    log4js.configure({
+      appenders: {
+        vcr: { type: 'recording' },
+        tcp: { type: 'tcp-server' },
+      },
+      categories: {
+        default: { appenders: ['vcr'], level: 'debug' },
+      },
+    });
+    // give the socket a chance to start up
+    setTimeout(() => {
+      const socket = net.connect(5000, () => {
+        const syncWrite = (dataArray, finalCallback) => {
+          if (!Array.isArray(dataArray)) {
+            dataArray = [dataArray];
+          }
+          if (typeof finalCallback !== 'function') {
+            finalCallback = () => {};
+          }
+          setTimeout(() => {
+            if (!dataArray.length) {
+              finalCallback();
+            } else if (dataArray.length === 1) {
+              socket.write(dataArray.shift(), finalCallback);
+            } else {
+              socket.write(dataArray.shift(), () => {
+                syncWrite(dataArray, finalCallback);
+              });
+            }
+          }, 100);
+        };
+
+        const dataArray = [
+          '__LOG4JS__',
+          'Hello__LOG4JS__World',
+          '__LOG4JS__',
+          'testing nonsense',
+          `__LOG4JS__more nonsense__LOG4JS__`,
+        ];
+
+        const finalCallback = () => {
+          socket.end();
+          setTimeout(() => {
+            log4js.shutdown(() => {
+              const logs = vcr.replay();
+              t.equal(logs.length, 8);
+              t.match(logs[4], {
+                data: [
+                  'Unable to parse log:',
+                  'Hello',
+                  'because: ',
+                  SyntaxError,
+                ],
+                categoryName: 'log4js',
+                level: { levelStr: 'ERROR' },
+                context: {},
+              });
+              t.match(logs[5], {
+                data: [
+                  'Unable to parse log:',
+                  'World',
+                  'because: ',
+                  SyntaxError,
+                ],
+                categoryName: 'log4js',
+                level: { levelStr: 'ERROR' },
+                context: {},
+              });
+              t.match(logs[6], {
+                data: [
+                  'Unable to parse log:',
+                  'testing nonsense',
+                  'because: ',
+                  SyntaxError,
+                ],
+                categoryName: 'log4js',
+                level: { levelStr: 'ERROR' },
+                context: {},
+              });
+              t.match(logs[7], {
+                data: [
+                  'Unable to parse log:',
+                  'more nonsense',
+                  'because: ',
+                  SyntaxError,
+                ],
+                categoryName: 'log4js',
+                level: { levelStr: 'ERROR' },
+                context: {},
+              });
+              t.end();
+            });
+          }, 100);
+        };
+
+        syncWrite(dataArray, finalCallback);
+      });
+
+      socket.unref();
+    }, 100);
+  });
+
+  batch.end();
+});
diff --git a/test/tap/setLevel-asymmetry-test.js b/test/tap/setLevel-asymmetry-test.js
index c3d52220..4a9148ba 100644
--- a/test/tap/setLevel-asymmetry-test.js
+++ b/test/tap/setLevel-asymmetry-test.js
@@ -1,6 +1,3 @@
-'use strict';
-
-/* jshint loopfunc: true */
 // This test shows an asymmetry between setLevel and isLevelEnabled
 // (in log4js-node@0.4.3 and earlier):
 // 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently
@@ -8,22 +5,22 @@
 // 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
 //
 
-const test = require('tap').test;
+const { test } = require('tap');
 const log4js = require('../../lib/log4js');
 
 const logger = log4js.getLogger('test-setLevel-asymmetry');
 
 // Define the array of levels as string to iterate over.
 const strLevels = ['Trace', 'Debug', 'Info', 'Warn', 'Error', 'Fatal'];
-const log4jsLevels = strLevels.map(log4js.levels.toLevel);
+const log4jsLevels = strLevels.map(log4js.levels.getLevel);
 
 test('log4js setLevel', (batch) => {
   strLevels.forEach((strLevel) => {
     batch.test(`is called with a ${strLevel} as string`, (t) => {
-      const log4jsLevel = log4js.levels.toLevel(strLevel);
+      const log4jsLevel = log4js.levels.getLevel(strLevel);
 
       t.test('should convert string to level correctly', (assert) => {
-        logger.setLevel(strLevel);
+        logger.level = strLevel;
         log4jsLevels.forEach((level) => {
           assert.equal(
             logger.isLevelEnabled(level),
@@ -34,7 +31,7 @@ test('log4js setLevel', (batch) => {
       });
 
       t.test('should also accept a Level', (assert) => {
-        logger.setLevel(log4jsLevel);
+        logger.level = log4jsLevel;
         log4jsLevels.forEach((level) => {
           assert.equal(
             logger.isLevelEnabled(level),
diff --git a/test/tap/slackAppender-test.js b/test/tap/slackAppender-test.js
deleted file mode 100644
index acc1bbbc..00000000
--- a/test/tap/slackAppender-test.js
+++ /dev/null
@@ -1,157 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const log4js = require('../../lib/log4js');
-const sandbox = require('sandboxed-module');
-
-function setupLogging(category, options) {
-  const msgs = [];
-
-  const slackCredentials = {
-    token: options.token,
-    channel_id: options.channel_id,
-    username: options.username,
-    format: options.format,
-    icon_url: options.icon_url
-  };
-  const fakeSlack = (function (key) {
-    function constructor() {
-      return {
-        options: key,
-        api: function (action, data, callback) {
-          msgs.push(data);
-          callback(false, { status: 'sent' });
-        }
-      };
-    }
-
-    return constructor(key);
-  });
-
-  const fakeLayouts = {
-    layout: function (type, config) {
-      this.type = type;
-      this.config = config;
-      return log4js.layouts.messagePassThroughLayout;
-    },
-    basicLayout: log4js.layouts.basicLayout,
-    coloredLayout: log4js.layouts.coloredLayout,
-    messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
-  };
-
-  const fakeConsole = {
-    errors: [],
-    logs: [],
-    error: function (msg, value) {
-      this.errors.push({ msg: msg, value: value });
-    },
-    log: function (msg, value) {
-      this.logs.push({ msg: msg, value: value });
-    }
-  };
-
-  const slackModule = sandbox.require('../../lib/appenders/slack', {
-    requires: {
-      'slack-node': fakeSlack,
-      '../layouts': fakeLayouts
-    },
-    globals: {
-      console: fakeConsole
-    }
-  });
-
-  log4js.addAppender(slackModule.configure(options), category);
-
-  return {
-    logger: log4js.getLogger(category),
-    mailer: fakeSlack,
-    layouts: fakeLayouts,
-    console: fakeConsole,
-    messages: msgs,
-    credentials: slackCredentials
-  };
-}
-
-function checkMessages(assert, result) {
-  for (let i = 0; i < result.messages.length; ++i) {
-    assert.equal(result.messages[i].channel, '#CHANNEL');
-    assert.equal(result.messages[i].username, 'USERNAME');
-    assert.ok(new RegExp(`.+Log event #${i + 1}`).test(result.messages[i].text));
-  }
-}
-
-log4js.clearAppenders();
-
-test('log4js slackAppender', (batch) => {
-  batch.test('slack setup', (t) => {
-    const result = setupLogging('slack setup', {
-      token: 'TOKEN',
-      channel_id: '#CHANNEL',
-      username: 'USERNAME',
-      format: 'FORMAT',
-      icon_url: 'ICON_URL'
-    });
-
-    t.test('slack credentials should match', (assert) => {
-      assert.equal(result.credentials.token, 'TOKEN');
-      assert.equal(result.credentials.channel_id, '#CHANNEL');
-      assert.equal(result.credentials.username, 'USERNAME');
-      assert.equal(result.credentials.format, 'FORMAT');
-      assert.equal(result.credentials.icon_url, 'ICON_URL');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('basic usage', (t) => {
-    const setup = setupLogging('basic usage', {
-      token: 'TOKEN',
-      channel_id: '#CHANNEL',
-      username: 'USERNAME',
-      format: 'FORMAT',
-      icon_url: 'ICON_URL',
-    });
-
-    setup.logger.info('Log event #1');
-
-    t.equal(setup.messages.length, 1, 'should be one message only');
-    checkMessages(t, setup);
-    t.end();
-  });
-
-  batch.test('config with layout', (t) => {
-    const result = setupLogging('config with layout', {
-      layout: {
-        type: 'tester'
-      }
-    });
-    t.equal(result.layouts.type, 'tester', 'should configure layout');
-    t.end();
-  });
-
-  batch.test('separate notification for each event', (t) => {
-    const setup = setupLogging('separate notification for each event', {
-      token: 'TOKEN',
-      channel_id: '#CHANNEL',
-      username: 'USERNAME',
-      format: 'FORMAT',
-      icon_url: 'ICON_URL',
-    });
-    setTimeout(() => {
-      setup.logger.info('Log event #1');
-    }, 0);
-    setTimeout(() => {
-      setup.logger.info('Log event #2');
-    }, 500);
-    setTimeout(() => {
-      setup.logger.info('Log event #3');
-    }, 1100);
-    setTimeout(() => {
-      t.equal(setup.messages.length, 3, 'should be three messages');
-      checkMessages(t, setup);
-      t.end();
-    }, 3000);
-  });
-
-  batch.end();
-});
diff --git a/test/tap/smtpAppender-test.js b/test/tap/smtpAppender-test.js
deleted file mode 100644
index fef1361a..00000000
--- a/test/tap/smtpAppender-test.js
+++ /dev/null
@@ -1,287 +0,0 @@
-'use strict';
-
-const test = require('tap').test;
-const log4js = require('../../lib/log4js');
-const sandbox = require('sandboxed-module');
-
-function setupLogging(category, options) {
-  const msgs = [];
-
-  const fakeMailer = {
-    createTransport: function (name, opts) {
-      return {
-        config: opts,
-        sendMail: function (msg, callback) {
-          msgs.push(msg);
-          callback(null, true);
-        },
-        close: function () {
-        }
-      };
-    }
-  };
-
-  const fakeLayouts = {
-    layout: function (type, config) {
-      this.type = type;
-      this.config = config;
-      return log4js.layouts.messagePassThroughLayout;
-    },
-    basicLayout: log4js.layouts.basicLayout,
-    messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
-  };
-
-  const fakeConsole = {
-    errors: [],
-    error: function (msg, value) {
-      this.errors.push({ msg: msg, value: value });
-    }
-  };
-
-  const fakeTransportPlugin = function () {
-  };
-
-  const smtpModule = sandbox.require('../../lib/appenders/smtp', {
-    singleOnly: true,
-    requires: {
-      nodemailer: fakeMailer,
-      'nodemailer-sendmail-transport': fakeTransportPlugin,
-      'nodemailer-smtp-transport': fakeTransportPlugin,
-      '../layouts': fakeLayouts
-    },
-    globals: {
-      console: fakeConsole
-    }
-  });
-
-  log4js.addAppender(smtpModule.configure(options), category);
-
-  return {
-    logger: log4js.getLogger(category),
-    mailer: fakeMailer,
-    layouts: fakeLayouts,
-    console: fakeConsole,
-    results: msgs
-  };
-}
-
-function checkMessages(assert, result, sender, subject) {
-  for (let i = 0; i < result.results.length; ++i) {
-    assert.equal(result.results[i].from, sender);
-    assert.equal(result.results[i].to, 'recipient@domain.com');
-    assert.equal(result.results[i].subject, subject ? subject : `Log event #${i + 1}`); // eslint-disable-line
-    assert.ok(new RegExp(`.+Log event #${i + 1}\n$`).test(result.results[i].text));
-  }
-}
-
-log4js.clearAppenders();
-
-test('log4js smtpAppender', (batch) => {
-  batch.test('minimal config', (t) => {
-    const setup = setupLogging('minimal config', {
-      recipients: 'recipient@domain.com',
-      SMTP: {
-        port: 25,
-        auth: {
-          user: 'user@domain.com'
-        }
-      }
-    });
-    setup.logger.info('Log event #1');
-
-    t.equal(setup.results.length, 1, 'should be one message only');
-    checkMessages(t, setup);
-    t.end();
-  });
-
-  batch.test('fancy config', (t) => {
-    const setup = setupLogging('fancy config', {
-      recipients: 'recipient@domain.com',
-      sender: 'sender@domain.com',
-      subject: 'This is subject',
-      SMTP: {
-        port: 25,
-        auth: {
-          user: 'user@domain.com'
-        }
-      }
-    });
-    setup.logger.info('Log event #1');
-
-    t.equal(setup.results.length, 1, 'should be one message only');
-    checkMessages(t, setup, 'sender@domain.com', 'This is subject');
-    t.end();
-  });
-
-  batch.test('config with layout', (t) => {
-    const setup = setupLogging('config with layout', {
-      layout: {
-        type: 'tester'
-      }
-    });
-    t.equal(setup.layouts.type, 'tester', 'should configure layout');
-    t.end();
-  });
-
-  batch.test('separate email for each event', (t) => {
-    const setup = setupLogging('separate email for each event', {
-      recipients: 'recipient@domain.com',
-      SMTP: {
-        port: 25,
-        auth: {
-          user: 'user@domain.com'
-        }
-      }
-    });
-    setTimeout(() => {
-      setup.logger.info('Log event #1');
-    }, 0);
-    setTimeout(() => {
-      setup.logger.info('Log event #2');
-    }, 500);
-    setTimeout(() => {
-      setup.logger.info('Log event #3');
-    }, 1100);
-    setTimeout(() => {
-      t.equal(setup.results.length, 3, 'there should be three messages');
-      checkMessages(t, setup);
-      t.end();
-    }, 3000);
-  });
-
-  batch.test('multiple events in one email', (t) => {
-    const setup = setupLogging('multiple events in one email', {
-      recipients: 'recipient@domain.com',
-      sendInterval: 1,
-      SMTP: {
-        port: 25,
-        auth: {
-          user: 'user@domain.com'
-        }
-      }
-    });
-    setTimeout(() => {
-      setup.logger.info('Log event #1');
-    }, 0);
-    setTimeout(() => {
-      setup.logger.info('Log event #2');
-    }, 100);
-    setTimeout(() => {
-      setup.logger.info('Log event #3');
-    }, 1500);
-    setTimeout(() => {
-      t.equal(setup.results.length, 2, 'there should be two messages');
-      t.equal(setup.results[0].to, 'recipient@domain.com');
-      t.equal(setup.results[0].subject, 'Log event #1');
-      t.equal(
-        setup.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length,
-        2
-      );
-      t.equal(setup.results[1].to, 'recipient@domain.com');
-      t.equal(setup.results[1].subject, 'Log event #3');
-      t.ok(/.+Log event #3\n$/.test(setup.results[1].text));
-      t.end();
-    }, 3000);
-  });
-
-  batch.test('error when sending email', (t) => {
-    const setup = setupLogging('error when sending email', {
-      recipients: 'recipient@domain.com',
-      sendInterval: 0,
-      SMTP: { port: 25, auth: { user: 'user@domain.com' } }
-    });
-
-    setup.mailer.createTransport = function () {
-      return {
-        sendMail: function (msg, cb) {
-          cb({ message: 'oh noes' });
-        },
-        close: function () {
-        }
-      };
-    };
-
-    setup.logger.info('This will break');
-
-    t.test('should be logged to console', (assert) => {
-      assert.equal(setup.console.errors.length, 1);
-      assert.equal(setup.console.errors[0].msg, 'log4js.smtpAppender - Error happened');
-      assert.equal(setup.console.errors[0].value.message, 'oh noes');
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.test('transport full config', (t) => {
-    const setup = setupLogging('transport full config', {
-      recipients: 'recipient@domain.com',
-      transport: {
-        plugin: 'sendmail',
-        options: {
-          path: '/usr/sbin/sendmail'
-        }
-      }
-    });
-    setup.logger.info('Log event #1');
-
-    t.equal(setup.results.length, 1, 'should be one message only');
-    checkMessages(t, setup);
-    t.end();
-  });
-
-  batch.test('transport no-options config', (t) => {
-    const setup = setupLogging('transport no-options config', {
-      recipients: 'recipient@domain.com',
-      transport: {
-        plugin: 'sendmail'
-      }
-    });
-    setup.logger.info('Log event #1');
-
-    t.equal(setup.results.length, 1, 'should be one message only');
-    checkMessages(t, setup);
-    t.end();
-  });
-
-  batch.test('transport no-plugin config', (t) => {
-    const setup = setupLogging('transport no-plugin config', {
-      recipients: 'recipient@domain.com',
-      transport: {}
-    });
-    setup.logger.info('Log event #1');
-
-    t.equal(setup.results.length, 1, 'should be one message only');
-    checkMessages(t, setup);
-    t.end();
-  });
-
-  batch.test('attachment config', (t) => {
-    const setup = setupLogging('attachment config', {
-      recipients: 'recipient@domain.com',
-      attachment: {
-        enable: true
-      },
-      SMTP: {
-        port: 25,
-        auth: {
-          user: 'user@domain.com'
-        }
-      }
-    });
-    setup.logger.info('Log event #1');
-
-    t.test('message should contain proper data', (assert) => {
-      assert.equal(setup.results.length, 1);
-      assert.equal(setup.results[0].attachments.length, 1);
-      const attachment = setup.results[0].attachments[0];
-      assert.equal(setup.results[0].text, 'See logs as attachment');
-      assert.equal(attachment.filename, 'default.log');
-      assert.equal(attachment.contentType, 'text/x-log');
-      assert.ok(new RegExp(`.+Log event #${1}\n$`).test(attachment.content));
-      assert.end();
-    });
-    t.end();
-  });
-
-  batch.end();
-});
diff --git a/test/tap/stacktraces-test.js b/test/tap/stacktraces-test.js
new file mode 100644
index 00000000..d8b018fc
--- /dev/null
+++ b/test/tap/stacktraces-test.js
@@ -0,0 +1,29 @@
+const { test } = require('tap');
+
+test('Stacktraces from errors in different VM context', (t) => {
+  const log4js = require('../../lib/log4js');
+  const recorder = require('../../lib/appenders/recording');
+  const layout = require('../../lib/layouts').basicLayout;
+  const vm = require('vm');
+
+  log4js.configure({
+    appenders: { vcr: { type: 'recording' } },
+    categories: { default: { appenders: ['vcr'], level: 'debug' } },
+  });
+
+  const logger = log4js.getLogger();
+
+  try {
+    // Access not defined variable.
+    vm.runInNewContext('myVar();', {}, 'myfile.js');
+  } catch (e) {
+    // Expect to have a stack trace printed.
+    logger.error(e);
+  }
+
+  const events = recorder.replay();
+  // recording appender events do not go through layouts, so let's do it
+  const output = layout(events[0]);
+  t.match(output, 'stacktraces-test.js');
+  t.end();
+});
diff --git a/test/tap/stderrAppender-test.js b/test/tap/stderrAppender-test.js
index 9fd4871c..3c107103 100644
--- a/test/tap/stderrAppender-test.js
+++ b/test/tap/stderrAppender-test.js
@@ -1,26 +1,26 @@
-'use strict';
-
-const test = require('tap').test;
+const { test } = require('tap');
+const sandbox = require('@log4js-node/sandboxed-module');
 const layouts = require('../../lib/layouts');
-const sandbox = require('sandboxed-module');
 
 test('stderr appender', (t) => {
   const output = [];
 
-  const appender = sandbox.require(
-    '../../lib/appenders/stderr',
-    {
+  const appender = sandbox
+    .require('../../lib/appenders/stderr', {
       globals: {
         process: {
           stderr: {
-            write: function (data) {
+            write(data) {
               output.push(data);
-            }
-          }
-        }
-      }
-    }
-  ).appender(layouts.messagePassThroughLayout);
+            },
+          },
+        },
+      },
+    })
+    .configure(
+      { type: 'stderr', layout: { type: 'messagePassThrough' } },
+      layouts
+    );
 
   appender({ data: ['biscuits'] });
   t.plan(2);
@@ -28,3 +28,32 @@ test('stderr appender', (t) => {
   t.equal(output[0], 'biscuits\n', 'The message should be biscuits.');
   t.end();
 });
+
+test('stderr appender with default layout', (t) => {
+  const output = [];
+  layouts.colouredLayout = () => 'I used the colouredLayout';
+
+  const appender = sandbox
+    .require('../../lib/appenders/stderr', {
+      globals: {
+        process: {
+          stderr: {
+            write(data) {
+              output.push(data);
+            },
+          },
+        },
+      },
+    })
+    .configure({ type: 'stderr' }, layouts);
+
+  appender({ data: ['biscuits'] });
+  t.plan(2);
+  t.equal(output.length, 1, 'There should be one message.');
+  t.equal(
+    output[0],
+    'I used the colouredLayout\n',
+    'The message should have gone through the default layout.'
+  );
+  t.end();
+});
diff --git a/test/tap/stdoutAppender-test.js b/test/tap/stdoutAppender-test.js
index 9ae5bafd..89fc2318 100644
--- a/test/tap/stdoutAppender-test.js
+++ b/test/tap/stdoutAppender-test.js
@@ -1,26 +1,26 @@
-'use strict';
-
-const test = require('tap').test;
+const { test } = require('tap');
+const sandbox = require('@log4js-node/sandboxed-module');
 const layouts = require('../../lib/layouts');
-const sandbox = require('sandboxed-module');
 
 test('stdout appender', (t) => {
   const output = [];
 
-  const appender = sandbox.require(
-    '../../lib/appenders/stdout',
-    {
+  const appender = sandbox
+    .require('../../lib/appenders/stdout', {
       globals: {
         process: {
           stdout: {
-            write: function (data) {
+            write(data) {
               output.push(data);
-            }
-          }
-        }
-      }
-    }
-  ).appender(layouts.messagePassThroughLayout);
+            },
+          },
+        },
+      },
+    })
+    .configure(
+      { type: 'stdout', layout: { type: 'messagePassThrough' } },
+      layouts
+    );
 
   appender({ data: ['cheese'] });
   t.plan(2);
diff --git a/test/tap/subcategories-test.js b/test/tap/subcategories-test.js
index f803c69a..4d71098a 100644
--- a/test/tap/subcategories-test.js
+++ b/test/tap/subcategories-test.js
@@ -1,18 +1,17 @@
-'use strict';
-
-const test = require('tap').test;
+const { test } = require('tap');
 const log4js = require('../../lib/log4js');
-const levels = require('../../lib/levels');
 
 test('subcategories', (batch) => {
   batch.test('loggers created after levels configuration is loaded', (t) => {
     log4js.configure({
-      levels: {
-        sub1: 'WARN',
-        'sub1.sub11': 'TRACE',
-        'sub1.sub11.sub111': 'WARN',
-        'sub1.sub12': 'INFO'
-      }
+      appenders: { stdout: { type: 'stdout' } },
+      categories: {
+        default: { appenders: ['stdout'], level: 'TRACE' },
+        sub1: { appenders: ['stdout'], level: 'WARN' },
+        'sub1.sub11': { appenders: ['stdout'], level: 'TRACE' },
+        'sub1.sub11.sub111': { appenders: ['stdout'], level: 'WARN' },
+        'sub1.sub12': { appenders: ['stdout'], level: 'INFO' },
+      },
     });
 
     const loggers = {
@@ -24,19 +23,19 @@ test('subcategories', (batch) => {
       sub13: log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
       sub112: log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
       sub121: log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
-      sub0: log4js.getLogger('sub0') // Not defined, not inherited: TRACE
+      sub0: log4js.getLogger('sub0'), // Not defined, not inherited: TRACE
     };
 
     t.test('check logger levels', (assert) => {
-      assert.equal(loggers.sub1.level, levels.WARN);
-      assert.equal(loggers.sub11.level, levels.TRACE);
-      assert.equal(loggers.sub111.level, levels.WARN);
-      assert.equal(loggers.sub12.level, levels.INFO);
-
-      assert.equal(loggers.sub13.level, levels.WARN);
-      assert.equal(loggers.sub112.level, levels.TRACE);
-      assert.equal(loggers.sub121.level, levels.INFO);
-      assert.equal(loggers.sub0.level, levels.TRACE);
+      assert.equal(loggers.sub1.level, log4js.levels.WARN);
+      assert.equal(loggers.sub11.level, log4js.levels.TRACE);
+      assert.equal(loggers.sub111.level, log4js.levels.WARN);
+      assert.equal(loggers.sub12.level, log4js.levels.INFO);
+
+      assert.equal(loggers.sub13.level, log4js.levels.WARN);
+      assert.equal(loggers.sub112.level, log4js.levels.TRACE);
+      assert.equal(loggers.sub121.level, log4js.levels.INFO);
+      assert.equal(loggers.sub0.level, log4js.levels.TRACE);
       assert.end();
     });
 
@@ -44,6 +43,13 @@ test('subcategories', (batch) => {
   });
 
   batch.test('loggers created before levels configuration is loaded', (t) => {
+    // reset to defaults
+    log4js.configure({
+      appenders: { stdout: { type: 'stdout' } },
+      categories: { default: { appenders: ['stdout'], level: 'info' } },
+    });
+
+    // these should all get the default log level of INFO
     const loggers = {
       sub1: log4js.getLogger('sub1'), // WARN
       sub11: log4js.getLogger('sub1.sub11'), // TRACE
@@ -53,31 +59,68 @@ test('subcategories', (batch) => {
       sub13: log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
       sub112: log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
       sub121: log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
-      sub0: log4js.getLogger('sub0') // Not defined, not inherited: TRACE
+      sub0: log4js.getLogger('sub0'), // Not defined, not inherited: TRACE
     };
 
     log4js.configure({
-      levels: {
-        sub1: 'WARN',
-        'sub1.sub11': 'TRACE',
-        'sub1.sub11.sub111': 'WARN',
-        'sub1.sub12': 'INFO'
-      }
+      appenders: { stdout: { type: 'stdout' } },
+      categories: {
+        default: { appenders: ['stdout'], level: 'TRACE' },
+        sub1: { appenders: ['stdout'], level: 'WARN' },
+        'sub1.sub11': { appenders: ['stdout'], level: 'TRACE' },
+        'sub1.sub11.sub111': { appenders: ['stdout'], level: 'WARN' },
+        'sub1.sub12': { appenders: ['stdout'], level: 'INFO' },
+      },
     });
 
-    t.test('check logger levels', (assert) => {
-      assert.equal(loggers.sub1.level, levels.WARN);
-      assert.equal(loggers.sub11.level, levels.TRACE);
-      assert.equal(loggers.sub111.level, levels.WARN);
-      assert.equal(loggers.sub12.level, levels.INFO);
-
-      assert.equal(loggers.sub13.level, levels.WARN);
-      assert.equal(loggers.sub112.level, levels.TRACE);
-      assert.equal(loggers.sub121.level, levels.INFO);
-      assert.equal(loggers.sub0.level, levels.TRACE);
+    t.test('should still get new levels', (assert) => {
+      // can't use .equal because by calling log4js.configure we create new instances
+      assert.same(loggers.sub1.level, log4js.levels.WARN);
+      assert.same(loggers.sub11.level, log4js.levels.TRACE);
+      assert.same(loggers.sub111.level, log4js.levels.WARN);
+      assert.same(loggers.sub12.level, log4js.levels.INFO);
+
+      assert.same(loggers.sub13.level, log4js.levels.WARN);
+      assert.same(loggers.sub112.level, log4js.levels.TRACE);
+      assert.same(loggers.sub121.level, log4js.levels.INFO);
+      assert.same(loggers.sub0.level, log4js.levels.TRACE);
       assert.end();
     });
     t.end();
   });
+
+  batch.test(
+    'setting level on subcategories should not set parent level',
+    (t) => {
+      log4js.configure({
+        appenders: { stdout: { type: 'stdout' } },
+        categories: {
+          default: { appenders: ['stdout'], level: 'trace' },
+          parent: { appenders: ['stdout'], level: 'error' },
+        },
+      });
+
+      const logger = log4js.getLogger('parent');
+      const subLogger = log4js.getLogger('parent.child');
+
+      t.test('should inherit parent level', (assert) => {
+        assert.same(subLogger.level, log4js.levels.ERROR);
+        assert.end();
+      });
+
+      t.test(
+        'changing child level should not change parent level',
+        (assert) => {
+          subLogger.level = 'info';
+          assert.same(subLogger.level, log4js.levels.INFO);
+          assert.same(logger.level, log4js.levels.ERROR);
+          assert.end();
+        }
+      );
+
+      t.end();
+    }
+  );
+
   batch.end();
 });
diff --git a/test/tap/tcp-appender-test.js b/test/tap/tcp-appender-test.js
new file mode 100644
index 00000000..068ff829
--- /dev/null
+++ b/test/tap/tcp-appender-test.js
@@ -0,0 +1,337 @@
+const { test } = require('tap');
+const net = require('net');
+const flatted = require('flatted');
+const sandbox = require('@log4js-node/sandboxed-module');
+const log4js = require('../../lib/log4js');
+const LoggingEvent = require('../../lib/LoggingEvent');
+
+let messages = [];
+let server = null;
+
+function makeServer(config) {
+  server = net.createServer((socket) => {
+    socket.setEncoding('utf8');
+
+    socket.on('data', (data) => {
+      data
+        .split(config.endMsg)
+        .filter((s) => s.length)
+        .forEach((s) => {
+          messages.push(config.deserialise(s));
+        });
+    });
+  });
+
+  server.unref();
+
+  return server;
+}
+
+function makeFakeNet() {
+  return {
+    data: [],
+    cbs: {},
+    createConnectionCalled: 0,
+    createConnection(port, host) {
+      const fakeNet = this;
+      this.port = port;
+      this.host = host;
+      this.createConnectionCalled += 1;
+      return {
+        on(evt, cb) {
+          fakeNet.cbs[evt] = cb;
+        },
+        write(data, encoding) {
+          fakeNet.data.push(data);
+          fakeNet.encoding = encoding;
+          return false;
+        },
+        end() {
+          fakeNet.closeCalled = true;
+        },
+      };
+    },
+    createServer(cb) {
+      const fakeNet = this;
+      cb({
+        remoteAddress: '1.2.3.4',
+        remotePort: '1234',
+        setEncoding(encoding) {
+          fakeNet.encoding = encoding;
+        },
+        on(event, cb2) {
+          fakeNet.cbs[event] = cb2;
+        },
+      });
+
+      return {
+        listen(port, host) {
+          fakeNet.port = port;
+          fakeNet.host = host;
+        },
+      };
+    },
+  };
+}
+
+test('TCP Appender', (batch) => {
+  batch.test('Default Configuration', (t) => {
+    messages = [];
+
+    const serverConfig = {
+      endMsg: '__LOG4JS__',
+      deserialise: (log) => LoggingEvent.deserialise(log),
+    };
+    server = makeServer(serverConfig);
+
+    server.listen(() => {
+      const { port } = server.address();
+      log4js.configure({
+        appenders: {
+          default: { type: 'tcp', port },
+        },
+        categories: {
+          default: { appenders: ['default'], level: 'debug' },
+        },
+      });
+
+      const logger = log4js.getLogger();
+      logger.info('This should be sent via TCP.');
+      logger.info('This should also be sent via TCP and not break things.');
+
+      log4js.shutdown(() => {
+        server.close(() => {
+          t.equal(messages.length, 2);
+          t.match(messages[0], {
+            data: ['This should be sent via TCP.'],
+            categoryName: 'default',
+            context: {},
+            level: { levelStr: 'INFO' },
+          });
+          t.match(messages[1], {
+            data: ['This should also be sent via TCP and not break things.'],
+            categoryName: 'default',
+            context: {},
+            level: { levelStr: 'INFO' },
+          });
+          t.end();
+        });
+      });
+    });
+  });
+
+  batch.test('Custom EndMessage String', (t) => {
+    messages = [];
+
+    const serverConfig = {
+      endMsg: '\n',
+      deserialise: (log) => LoggingEvent.deserialise(log),
+    };
+    server = makeServer(serverConfig);
+
+    server.listen(() => {
+      const { port } = server.address();
+      log4js.configure({
+        appenders: {
+          customEndMsg: { type: 'tcp', port, endMsg: '\n' },
+        },
+        categories: {
+          default: { appenders: ['customEndMsg'], level: 'debug' },
+        },
+      });
+
+      const logger = log4js.getLogger();
+      logger.info('This should be sent via TCP using a custom EndMsg string.');
+      logger.info(
+        'This should also be sent via TCP using a custom EndMsg string and not break things.'
+      );
+
+      log4js.shutdown(() => {
+        server.close(() => {
+          t.equal(messages.length, 2);
+          t.match(messages[0], {
+            data: ['This should be sent via TCP using a custom EndMsg string.'],
+            categoryName: 'default',
+            context: {},
+            level: { levelStr: 'INFO' },
+          });
+          t.match(messages[1], {
+            data: [
+              'This should also be sent via TCP using a custom EndMsg string and not break things.',
+            ],
+            categoryName: 'default',
+            context: {},
+            level: { levelStr: 'INFO' },
+          });
+          t.end();
+        });
+      });
+    });
+  });
+
+  batch.test('Custom Layout', (t) => {
+    messages = [];
+
+    const serverConfig = {
+      endMsg: '__LOG4JS__',
+      deserialise: (log) => JSON.parse(log),
+    };
+    server = makeServer(serverConfig);
+
+    log4js.addLayout(
+      'json',
+      () =>
+        function (logEvent) {
+          return JSON.stringify({
+            time: logEvent.startTime,
+            message: logEvent.data[0],
+            level: logEvent.level.toString(),
+          });
+        }
+    );
+
+    server.listen(() => {
+      const { port } = server.address();
+      log4js.configure({
+        appenders: {
+          customLayout: {
+            type: 'tcp',
+            port,
+            layout: { type: 'json' },
+          },
+        },
+        categories: {
+          default: { appenders: ['customLayout'], level: 'debug' },
+        },
+      });
+
+      const logger = log4js.getLogger();
+      logger.info('This should be sent as a customized json.');
+      logger.info(
+        'This should also be sent via TCP as a customized json and not break things.'
+      );
+
+      log4js.shutdown(() => {
+        server.close(() => {
+          t.equal(messages.length, 2);
+          t.match(messages[0], {
+            message: 'This should be sent as a customized json.',
+            level: 'INFO',
+          });
+          t.match(messages[1], {
+            message:
+              'This should also be sent via TCP as a customized json and not break things.',
+            level: 'INFO',
+          });
+          t.end();
+        });
+      });
+    });
+  });
+
+  batch.test('when underlying stream errors', (t) => {
+    const fakeNet = makeFakeNet();
+
+    const sandboxedLog4js = sandbox.require('../../lib/log4js', {
+      requires: {
+        net: fakeNet,
+      },
+    });
+    sandboxedLog4js.configure({
+      appenders: {
+        default: { type: 'tcp' },
+      },
+      categories: {
+        default: { appenders: ['default'], level: 'debug' },
+      },
+    });
+
+    const logger = sandboxedLog4js.getLogger();
+
+    logger.info('before connect');
+    t.test(
+      'should buffer messages written before socket is connected',
+      (assert) => {
+        assert.equal(fakeNet.data.length, 0);
+        assert.equal(fakeNet.createConnectionCalled, 1);
+        assert.end();
+      }
+    );
+
+    fakeNet.cbs.connect();
+    t.test('should flush buffered messages', (assert) => {
+      assert.equal(fakeNet.data.length, 1);
+      assert.equal(fakeNet.createConnectionCalled, 1);
+      assert.match(fakeNet.data[0], 'before connect');
+      assert.end();
+    });
+
+    logger.info('after connect');
+    t.test(
+      'should write log messages to socket as flatted strings with a terminator string',
+      (assert) => {
+        assert.equal(fakeNet.data.length, 2);
+        assert.match(fakeNet.data[0], 'before connect');
+        assert.ok(fakeNet.data[0].endsWith('__LOG4JS__'));
+        assert.match(fakeNet.data[1], 'after connect');
+        assert.ok(fakeNet.data[1].endsWith('__LOG4JS__'));
+        assert.equal(fakeNet.encoding, 'utf8');
+        assert.end();
+      }
+    );
+
+    fakeNet.cbs.error();
+    logger.info('after error, before close');
+    fakeNet.cbs.close();
+    logger.info('after close, before connect');
+    fakeNet.cbs.connect();
+    logger.info('after error, after connect');
+    t.test('should attempt to re-open the socket on error', (assert) => {
+      assert.equal(fakeNet.data.length, 5);
+      assert.equal(fakeNet.createConnectionCalled, 2);
+      assert.match(fakeNet.data[2], 'after error, before close');
+      assert.match(fakeNet.data[3], 'after close, before connect');
+      assert.match(fakeNet.data[4], 'after error, after connect');
+      assert.end();
+    });
+
+    t.test('should buffer messages until drain', (assert) => {
+      const previousLength = fakeNet.data.length;
+      logger.info('should not be flushed');
+      assert.equal(fakeNet.data.length, previousLength);
+      assert.notMatch(
+        fakeNet.data[fakeNet.data.length - 1],
+        'should not be flushed'
+      );
+
+      fakeNet.cbs.drain();
+      assert.equal(fakeNet.data.length, previousLength + 1);
+      assert.match(
+        fakeNet.data[fakeNet.data.length - 1],
+        'should not be flushed'
+      );
+      assert.end();
+    });
+
+    t.test('should serialize an Error correctly', (assert) => {
+      const previousLength = fakeNet.data.length;
+      logger.error(new Error('Error test'));
+      fakeNet.cbs.drain();
+      assert.equal(fakeNet.data.length, previousLength + 1);
+      const raw = fakeNet.data[fakeNet.data.length - 1];
+      const offset = raw.indexOf('__LOG4JS__');
+      assert.ok(
+        flatted.parse(raw.slice(0, offset !== -1 ? offset : 0)).data[0].stack,
+        `Expected:\n\n${fakeNet.data[6]}\n\n to have a 'data[0].stack' property`
+      );
+      const actual = flatted.parse(raw.slice(0, offset !== -1 ? offset : 0))
+        .data[0].stack;
+      assert.match(actual, /^Error: Error test/);
+      assert.end();
+    });
+
+    t.end();
+  });
+
+  batch.end();
+});
diff --git a/test/tap/test-config.json b/test/tap/test-config.json
index 2a69651b..bed4e5a0 100644
--- a/test/tap/test-config.json
+++ b/test/tap/test-config.json
@@ -1,5 +1,3 @@
 {
-  "appenders": [
-    { "type": "stdout" }
-  ]
+  "appenders": [{ "type": "stdout" }]
 }
diff --git a/test/tap/with-categoryFilter.json b/test/tap/with-categoryFilter.json
deleted file mode 100644
index f1efa4a7..00000000
--- a/test/tap/with-categoryFilter.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
-  "appenders": [
-    {
-      "type": "categoryFilter",
-      "exclude": "web",
-      "appender": {
-        "type": "file",
-        "filename": "test/tap/categoryFilter-noweb.log",
-        "layout": {
-          "type": "messagePassThrough"
-        }
-      }
-    },
-    {
-      "category": "web",
-      "type": "file",
-      "filename": "test/tap/categoryFilter-web.log", 
-      "layout": {
-        "type": "messagePassThrough"
-      }
-    }
-  ]
-}
diff --git a/test/tap/with-dateFile.json b/test/tap/with-dateFile.json
deleted file mode 100644
index 4691278e..00000000
--- a/test/tap/with-dateFile.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-  "appenders": [
-    {
-      "category": "tests",
-      "type": "dateFile",
-      "filename": "test/tap/date-file-test.log",
-      "pattern": "-from-MM-dd",
-      "layout": {
-        "type": "messagePassThrough"
-      }
-    }
-  ],
-
-  "levels": {
-    "tests":  "WARN"
-  }
-}
diff --git a/test/tap/with-logLevelFilter.json b/test/tap/with-logLevelFilter.json
deleted file mode 100644
index 0995d35c..00000000
--- a/test/tap/with-logLevelFilter.json
+++ /dev/null
@@ -1,41 +0,0 @@
-{
-  "appenders": [
-    {
-      "category": "tests",
-      "type": "logLevelFilter",
-      "level": "WARN",
-      "appender": {
-        "type": "file",
-        "filename": "test/tap/logLevelFilter-warnings.log",
-        "layout": {
-          "type": "messagePassThrough"
-        }
-      }
-    },
-    {
-      "category": "tests",
-      "type": "logLevelFilter",
-      "level": "TRACE",
-      "maxLevel": "DEBUG",
-      "appender": {
-        "type": "file",
-        "filename": "test/tap/logLevelFilter-debugs.log",
-        "layout": {
-          "type": "messagePassThrough"
-          }
-        }
-    },
-    {
-      "category": "tests",
-      "type": "file",
-      "filename": "test/tap/logLevelFilter.log",
-      "layout": {
-        "type": "messagePassThrough"
-      }
-    }
-  ],
-
-  "levels": {
-    "tests":  "TRACE"
-  }
-}
diff --git a/types/log4js.d.ts b/types/log4js.d.ts
new file mode 100644
index 00000000..6c5d5305
--- /dev/null
+++ b/types/log4js.d.ts
@@ -0,0 +1,486 @@
+// Type definitions for log4js
+
+type Format =
+  | string
+  | ((req: any, res: any, formatter: (str: string) => string) => string);
+
+export interface Log4js {
+  getLogger(category?: string): Logger;
+  configure(filename: string): Log4js;
+  configure(config: Configuration): Log4js;
+  isConfigured(): boolean;
+  addLayout(
+    name: string,
+    config: (a: any) => (logEvent: LoggingEvent) => string
+  ): void;
+  connectLogger(
+    logger: Logger,
+    options: { format?: Format; level?: string; nolog?: any }
+  ): any; // express.Handler;
+  levels: Levels;
+  shutdown(cb?: (error?: Error) => void): void;
+}
+
+export function getLogger(category?: string): Logger;
+
+export function configure(filename: string): Log4js;
+export function configure(config: Configuration): Log4js;
+export function isConfigured(): boolean;
+
+export function addLayout(
+  name: string,
+  config: (a: any) => (logEvent: LoggingEvent) => any
+): void;
+
+export function connectLogger(
+  logger: Logger,
+  options: {
+    format?: Format;
+    level?: string;
+    nolog?: any;
+    statusRules?: any[];
+    context?: boolean;
+  }
+): any; // express.Handler;
+
+export function recording(): Recording;
+
+export const levels: Levels;
+
+export function shutdown(cb?: (error?: Error) => void): void;
+
+export interface BasicLayout {
+  type: 'basic';
+}
+
+export interface ColoredLayout {
+  type: 'colored' | 'coloured';
+}
+
+export interface MessagePassThroughLayout {
+  type: 'messagePassThrough';
+}
+
+export interface DummyLayout {
+  type: 'dummy';
+}
+
+export interface Level {
+  isEqualTo(other: string): boolean;
+  isEqualTo(otherLevel: Level): boolean;
+  isLessThanOrEqualTo(other: string): boolean;
+  isLessThanOrEqualTo(otherLevel: Level): boolean;
+  isGreaterThanOrEqualTo(other: string): boolean;
+  isGreaterThanOrEqualTo(otherLevel: Level): boolean;
+  colour: string;
+  level: number;
+  levelStr: string;
+}
+/**
+ * A parsed CallStack from an `Error.stack` trace
+ */
+export interface CallStack {
+  functionName: string;
+  fileName: string;
+  lineNumber: number;
+  columnNumber: number;
+  /**
+   * The stack string after the skipped lines
+   */
+  callStack: string;
+}
+export interface LoggingEvent extends Partial {
+  categoryName: string; // name of category
+  level: Level; // level of message
+  data: any[]; // objects to log
+  startTime: Date;
+  pid: number;
+  context: any;
+  cluster?: {
+    workerId: number;
+    worker: number;
+  };
+  /**
+   * The first Error object in the data if there is one
+   */
+  error?: Error;
+  serialise(): string;
+}
+
+export type Token = ((logEvent: LoggingEvent) => string) | string;
+
+export interface PatternLayout {
+  type: 'pattern';
+  // specifier for the output format, using placeholders as described below
+  pattern: string;
+  // user-defined tokens to be used in the pattern
+  tokens?: { [name: string]: Token };
+}
+
+export interface CustomLayout {
+  [key: string]: any;
+  type: string;
+}
+
+export type Layout =
+  | BasicLayout
+  | ColoredLayout
+  | MessagePassThroughLayout
+  | DummyLayout
+  | PatternLayout
+  | CustomLayout;
+
+/**
+ * Category Filter
+ *
+ * @see https://log4js-node.github.io/log4js-node/categoryFilter.html
+ */
+export interface CategoryFilterAppender {
+  type: 'categoryFilter';
+  // the category (or categories if you provide an array of values) that will be excluded from the appender.
+  exclude?: string | string[];
+  // the name of the appender to filter. see https://log4js-node.github.io/log4js-node/layouts.html
+  appender?: string;
+}
+
+/**
+ * No Log Filter
+ *
+ * @see https://log4js-node.github.io/log4js-node/noLogFilter.html
+ */
+export interface NoLogFilterAppender {
+  type: 'noLogFilter';
+  // the regular expression (or the regular expressions if you provide an array of values)
+  // will be used for evaluating the events to pass to the appender.
+  // The events, which will match the regular expression, will be excluded and so not logged.
+  exclude: string | string[];
+  // the name of an appender, defined in the same configuration, that you want to filter.
+  appender: string;
+}
+
+/**
+ * Console Appender
+ *
+ * @see https://log4js-node.github.io/log4js-node/console.html
+ */
+export interface ConsoleAppender {
+  type: 'console';
+  // (defaults to ColoredLayout)
+  layout?: Layout;
+}
+
+export interface FileAppender {
+  type: 'file';
+  // the path of the file where you want your logs written.
+  filename: string;
+  // (defaults to undefined) the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
+  maxLogSize?: number | string;
+  // (defaults to 5) the number of old log files to keep (excluding the hot file).
+  backups?: number;
+  // (defaults to BasicLayout)
+  layout?: Layout;
+  // (defaults to utf-8)
+  encoding?: string;
+  // (defaults to 0o600)
+  mode?: number;
+  // (defaults to a)
+  flags?: string;
+  // (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
+  compress?: boolean;
+  // (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.1.log` instead of `file.log.1`).
+  keepFileExt?: boolean;
+  // (defaults to .) the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt)
+  fileNameSep?: string;
+  // (defaults to false) remove embedded ANSI color sequence
+  removeColor?: boolean;
+}
+
+export interface SyncfileAppender {
+  type: 'fileSync';
+  // the path of the file where you want your logs written.
+  filename: string;
+  // (defaults to undefined) the maximum size (in bytes) for the log file. If not specified or 0, then no log rolling will happen.
+  maxLogSize?: number | string;
+  // (defaults to 5) the number of old log files to keep (excluding the hot file).
+  backups?: number;
+  // (defaults to BasicLayout)
+  layout?: Layout;
+  // (defaults to utf-8)
+  encoding?: string;
+  // (defaults to 0o600)
+  mode?: number;
+  // (defaults to a)
+  flags?: string;
+}
+
+export interface DateFileAppender {
+  type: 'dateFile';
+  // the path of the file where you want your logs written.
+  filename: string;
+  // (defaults to yyyy-MM-dd) the pattern to use to determine when to roll the logs.
+  /**
+   * The following strings are recognised in the pattern:
+   *  - yyyy : the full year, use yy for just the last two digits
+   *  - MM   : the month
+   *  - dd   : the day of the month
+   *  - hh   : the hour of the day (24-hour clock)
+   *  - mm   : the minute of the hour
+   *  - ss   : seconds
+   *  - SSS  : milliseconds (although I'm not sure you'd want to roll your logs every millisecond)
+   *  - O    : timezone (capital letter o)
+   */
+  pattern?: string;
+  // (defaults to BasicLayout)
+  layout?: Layout;
+  // (defaults to utf-8)
+  encoding?: string;
+  // (defaults to 0o600)
+  mode?: number;
+  // (defaults to a)
+  flags?: string;
+  // (defaults to false) compress the backup files using gzip (backup files will have .gz extension)
+  compress?: boolean;
+  // (defaults to false) preserve the file extension when rotating log files (`file.log` becomes `file.2017-05-30.log` instead of `file.log.2017-05-30`).
+  keepFileExt?: boolean;
+  // (defaults to .) the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt)
+  fileNameSep?: string;
+  // (defaults to false) include the pattern in the name of the current log file.
+  alwaysIncludePattern?: boolean;
+  // (defaults to 1) the number of old files that matches the pattern to keep (excluding the hot file).
+  numBackups?: number;
+}
+
+export interface LogLevelFilterAppender {
+  type: 'logLevelFilter';
+  // the name of an appender, defined in the same configuration, that you want to filter
+  appender: string;
+  // the minimum level of event to allow through the filter
+  level: string;
+  // (defaults to FATAL) the maximum level of event to allow through the filter
+  maxLevel?: string;
+}
+
+export interface MultiFileAppender {
+  type: 'multiFile';
+  // the base part of the generated log filename
+  base: string;
+  // the value to use to split files (see below).
+  property: string;
+  // the suffix for the generated log filename.
+  extension: string;
+}
+
+export interface MultiprocessAppender {
+  type: 'multiprocess';
+  // controls whether the appender listens for log events sent over the network, or is responsible for serialising events and sending them to a server.
+  mode: 'master' | 'worker';
+  // (only needed if mode == master) the name of the appender to send the log events to
+  appender?: string;
+  // (defaults to 5000) the port to listen on, or send to
+  loggerPort?: number;
+  // (defaults to localhost) the host/IP address to listen on, or send to
+  loggerHost?: string;
+}
+
+export interface RecordingAppender {
+  type: 'recording';
+}
+
+export interface StandardErrorAppender {
+  type: 'stderr';
+  // (defaults to ColoredLayout)
+  layout?: Layout;
+}
+
+export interface StandardOutputAppender {
+  type: 'stdout';
+  // (defaults to ColoredLayout)
+  layout?: Layout;
+}
+
+/**
+ * TCP Appender
+ *
+ * @see https://log4js-node.github.io/log4js-node/tcp.html
+ */
+export interface TCPAppender {
+  type: 'tcp';
+  // (defaults to 5000)
+  port?: number;
+  // (defaults to localhost)
+  host?: string;
+  // (defaults to __LOG4JS__)
+  endMsg?: string;
+  // (defaults to a serialized log event)
+  layout?: Layout;
+}
+
+export interface CustomAppender {
+  type: string | AppenderModule;
+  [key: string]: any;
+}
+
+/**
+ * Mapping of all Appenders to allow for declaration merging
+ * @example
+ * declare module 'log4js' {
+ *   interface Appenders {
+ *     StorageTestAppender: {
+ *       type: 'storageTest';
+ *       storageMedium: 'dvd' | 'usb' | 'hdd';
+ *     };
+ *   }
+ * }
+ */
+export interface Appenders {
+  CategoryFilterAppender: CategoryFilterAppender;
+  ConsoleAppender: ConsoleAppender;
+  FileAppender: FileAppender;
+  SyncfileAppender: SyncfileAppender;
+  DateFileAppender: DateFileAppender;
+  LogLevelFilterAppender: LogLevelFilterAppender;
+  NoLogFilterAppender: NoLogFilterAppender;
+  MultiFileAppender: MultiFileAppender;
+  MultiprocessAppender: MultiprocessAppender;
+  RecordingAppender: RecordingAppender;
+  StandardErrorAppender: StandardErrorAppender;
+  StandardOutputAppender: StandardOutputAppender;
+  TCPAppender: TCPAppender;
+  CustomAppender: CustomAppender;
+}
+
+export interface AppenderModule {
+  configure: (
+    config?: Config,
+    layouts?: LayoutsParam,
+    findAppender?: () => AppenderFunction,
+    levels?: Levels
+  ) => AppenderFunction;
+}
+
+export type AppenderFunction = (loggingEvent: LoggingEvent) => void;
+
+// TODO: Actually add types here...
+// It's supposed to be the full config element
+export type Config = any;
+
+export interface LayoutsParam {
+  basicLayout: LayoutFunction;
+  messagePassThroughLayout: LayoutFunction;
+  patternLayout: LayoutFunction;
+  colouredLayout: LayoutFunction;
+  coloredLayout: LayoutFunction;
+  dummyLayout: LayoutFunction;
+  addLayout: (name: string, serializerGenerator: LayoutFunction) => void;
+  layout: (name: string, config: PatternToken) => LayoutFunction;
+}
+
+export interface PatternToken {
+  pattern: string; // TODO type this to enforce good pattern...
+  tokens: { [tokenName: string]: () => any };
+}
+
+export type LayoutFunction = (loggingEvent: LoggingEvent) => string;
+
+export type Appender = Appenders[keyof Appenders];
+
+export interface Levels {
+  ALL: Level;
+  MARK: Level;
+  TRACE: Level;
+  DEBUG: Level;
+  INFO: Level;
+  WARN: Level;
+  ERROR: Level;
+  FATAL: Level;
+  OFF: Level;
+  levels: Level[];
+  getLevel(level: Level | string, defaultLevel?: Level): Level;
+  addLevels(customLevels: object): void;
+}
+
+export interface Configuration {
+  appenders: { [name: string]: Appender };
+  categories: {
+    [name: string]: {
+      appenders: string[];
+      level: string;
+      enableCallStack?: boolean;
+    };
+  };
+  pm2?: boolean;
+  pm2InstanceVar?: string;
+  levels?:
+    | Levels
+    | {
+        [name: string]: {
+          value: number;
+          colour: string;
+        };
+      };
+  disableClustering?: boolean;
+}
+
+export interface Recording {
+  configure(): AppenderFunction;
+  replay(): LoggingEvent[];
+  playback(): LoggingEvent[];
+  reset(): void;
+  erase(): void;
+}
+
+export interface Logger {
+  new (name: string): Logger;
+
+  readonly category: string;
+  level: Level | string;
+
+  log(level: Level | string, ...args: any[]): void;
+
+  isLevelEnabled(level?: string): boolean;
+
+  isTraceEnabled(): boolean;
+  isDebugEnabled(): boolean;
+  isInfoEnabled(): boolean;
+  isWarnEnabled(): boolean;
+  isErrorEnabled(): boolean;
+  isFatalEnabled(): boolean;
+
+  _log(level: Level, data: any): void;
+
+  addContext(key: string, value: any): void;
+
+  removeContext(key: string): void;
+
+  clearContext(): void;
+
+  /**
+   * Replace the basic parse function with a new custom one
+   * - Note that linesToSkip will be based on the origin of the Error object in addition to the callStackLinesToSkip (at least 1)
+   * @param parseFunction the new parseFunction. Use `undefined` to reset to the base implementation
+   */
+  setParseCallStackFunction(
+    parseFunction: (error: Error, linesToSkip: number) => CallStack | undefined
+  ): void;
+
+  /**
+   * Adjust the value of linesToSkip when the parseFunction is called.
+   *
+   * Cannot be less than 0.
+   */
+  callStackLinesToSkip: number;
+
+  trace(message: any, ...args: any[]): void;
+
+  debug(message: any, ...args: any[]): void;
+
+  info(message: any, ...args: any[]): void;
+
+  warn(message: any, ...args: any[]): void;
+
+  error(message: any, ...args: any[]): void;
+
+  fatal(message: any, ...args: any[]): void;
+
+  mark(message: any, ...args: any[]): void;
+}
diff --git a/types/test.ts b/types/test.ts
new file mode 100644
index 00000000..ba2b6786
--- /dev/null
+++ b/types/test.ts
@@ -0,0 +1,227 @@
+import * as log4js from './log4js';
+
+console.log(log4js.isConfigured());
+log4js.configure('./filename');
+console.log(log4js.isConfigured());
+
+const logger1 = log4js.getLogger();
+logger1.level = 'debug';
+logger1.debug('Some debug messages');
+logger1.fatal({
+  whatever: 'foo',
+});
+
+const logger3 = log4js.getLogger('cheese');
+logger3.trace('Entering cheese testing');
+logger3.debug('Got cheese.');
+logger3.info('Cheese is Gouda.');
+logger3.warn('Cheese is quite smelly.');
+logger3.error('Cheese is too ripe!');
+logger3.fatal('Cheese was breeding ground for listeria.');
+
+log4js.configure({
+  appenders: { cheese: { type: 'console', filename: 'cheese.log' } },
+  categories: { default: { appenders: ['cheese'], level: 'error' } },
+});
+
+log4js.configure({
+  appenders: {
+    out: { type: 'file', filename: 'pm2logs.log' },
+  },
+  categories: {
+    default: { appenders: ['out'], level: 'info' },
+  },
+  pm2: true,
+  pm2InstanceVar: 'INSTANCE_ID',
+});
+
+log4js.addLayout(
+  'json',
+  (config) =>
+    function (logEvent) {
+      return JSON.stringify(logEvent) + config.separator;
+    }
+);
+
+log4js.configure({
+  appenders: {
+    out: { type: 'stdout', layout: { type: 'json', separator: ',' } },
+  },
+  categories: {
+    default: { appenders: ['out'], level: 'info' },
+  },
+});
+
+log4js.configure({
+  appenders: {
+    file: { type: 'dateFile', filename: 'thing.log', pattern: '.mm' },
+  },
+  categories: {
+    default: { appenders: ['file'], level: 'debug' },
+  },
+});
+
+const logger4 = log4js.getLogger('thing');
+logger4.log('logging a thing');
+
+const logger5 = log4js.getLogger('json-test');
+logger5.info('this is just a test');
+logger5.error('of a custom appender');
+logger5.warn('that outputs json');
+log4js.shutdown();
+
+log4js.configure({
+  appenders: {
+    cheeseLogs: { type: 'file', filename: 'cheese.log' },
+    console: { type: 'console' },
+  },
+  categories: {
+    cheese: { appenders: ['cheeseLogs'], level: 'error' },
+    another: { appenders: ['console'], level: 'trace' },
+    default: { appenders: ['console', 'cheeseLogs'], level: 'trace' },
+  },
+});
+
+const logger6 = log4js.getLogger('cheese');
+// only errors and above get logged.
+const otherLogger = log4js.getLogger();
+
+// this will get coloured output on console, and appear in cheese.log
+otherLogger.error('AAArgh! Something went wrong', {
+  some: 'otherObject',
+  useful_for: 'debug purposes',
+});
+otherLogger.log('This should appear as info output');
+
+// these will not appear (logging level beneath error)
+logger6.trace('Entering cheese testing');
+logger6.debug('Got cheese.');
+logger6.info('Cheese is Gouda.');
+logger6.log('Something funny about cheese.');
+logger6.warn('Cheese is quite smelly.');
+// these end up only in cheese.log
+logger6.error('Cheese %s is too ripe!', 'gouda');
+logger6.fatal('Cheese was breeding ground for listeria.');
+
+// these don't end up in cheese.log, but will appear on the console
+const anotherLogger = log4js.getLogger('another');
+anotherLogger.debug('Just checking');
+
+// will also go to console and cheese.log, since that's configured for all categories
+const pantsLog = log4js.getLogger('pants');
+pantsLog.debug('Something for pants');
+
+import { configure, getLogger } from './log4js';
+configure('./filename');
+const logger2 = getLogger();
+logger2.level = 'debug';
+logger2.debug('Some debug messages');
+
+configure({
+  appenders: { cheese: { type: 'file', filename: 'cheese.log' } },
+  categories: { default: { appenders: ['cheese'], level: 'error' } },
+});
+
+log4js.configure('./filename').getLogger();
+const logger7 = log4js.getLogger();
+logger7.level = 'debug';
+logger7.debug('Some debug messages');
+
+const levels: log4js.Levels = log4js.levels;
+const level: log4js.Level = levels.getLevel('info');
+
+log4js.connectLogger(logger1, {
+  format: ':x, :y',
+  level: 'info',
+  context: true,
+});
+
+log4js.connectLogger(logger2, {
+  format: (req, _res, format) =>
+    format(
+      `:remote-addr - ${req.id} - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"`
+    ),
+});
+
+//support for passing in an appender module
+log4js.configure({
+  appenders: { thing: { type: { configure: () => () => {} } } },
+  categories: { default: { appenders: ['thing'], level: 'debug' } },
+});
+
+declare module './log4js' {
+  interface Appenders {
+    StorageTestAppender: {
+      type: 'storageTest';
+      storageMedium: 'dvd' | 'usb' | 'hdd';
+    };
+  }
+}
+
+log4js.configure({
+  appenders: { test: { type: 'storageTest', storageMedium: 'dvd' } },
+  categories: { default: { appenders: ['test'], level: 'debug' } },
+});
+
+log4js.configure({
+  appenders: { rec: { type: 'recording' } },
+  categories: { default: { appenders: ['rec'], level: 'debug' } },
+});
+const logger8 = log4js.getLogger();
+logger8.level = 'debug';
+logger8.debug('This will go to the recording!');
+logger8.debug('Another one');
+const recording = log4js.recording();
+const loggingEvents = recording.playback();
+if (loggingEvents.length !== 2) {
+  throw new Error(`Expected 2 recorded events, got ${loggingEvents.length}`);
+}
+if (loggingEvents[0].data[0] !== 'This will go to the recording!') {
+  throw new Error(
+    `Expected message 'This will go to the recording!', got ${loggingEvents[0].data[0]}`
+  );
+}
+if (loggingEvents[1].data[0] !== 'Another one') {
+  throw new Error(
+    `Expected message 'Another one', got ${loggingEvents[1].data[0]}`
+  );
+}
+recording.reset();
+const loggingEventsPostReset = recording.playback();
+if (loggingEventsPostReset.length !== 0) {
+  throw new Error(
+    `Expected 0 recorded events after reset, got ${loggingEventsPostReset.length}`
+  );
+}
+
+log4js.configure({
+  levels: {
+    VERBOSE: { value: 10500, colour: 'cyan' },
+    SILLY: { value: 4500, colour: 'blue' },
+  },
+  appenders: {
+    console: { type: 'console' },
+  },
+  categories: {
+    testApp: { appenders: ['console'], level: 'info' },
+    default: { appenders: ['console'], level: 'info' },
+  },
+});
+
+const logger9 = log4js.getLogger('testApp');
+logger9.level = 'silly';
+logger9.debug('test 123');
+
+// update Logger interface with new custom levels
+declare module './log4js' {
+  interface Logger {
+    isVerboseEnabled: boolean;
+    isSillyEnabled: boolean;
+
+    verbose(message: any, ...args: any[]): void;
+    silly(message: any, ...args: any[]): void;
+  }
+}
+
+logger9.verbose('asdf');
+logger9.silly('asdfasdfasdf');
diff --git a/types/tsconfig.json b/types/tsconfig.json
new file mode 100644
index 00000000..35b9d6fb
--- /dev/null
+++ b/types/tsconfig.json
@@ -0,0 +1,9 @@
+{
+  "compileOnSave": false,
+  "compilerOptions": {
+    "strict": true,
+    "noUnusedParameters": true,
+    "noUnusedLocals": false,
+    "noEmit": true
+  }
+}
diff --git a/v2-changes.md b/v2-changes.md
new file mode 100644
index 00000000..1da1998b
--- /dev/null
+++ b/v2-changes.md
@@ -0,0 +1,10 @@
+# CHANGES
+
+- no exit listeners defined for appenders by default. users should call log4js.shutdown in their exit listeners.
+- context added to loggers (only logstash uses it so far)
+- logstash split into two appenders (udp and http)
+- no cwd, reload options in config
+- configure only by calling configure, no manual adding of appenders, etc
+- config format changed a lot, now need to define named appenders and at least one category
+- appender format changed, will break any non-core appenders (maybe create adapter?)
+- no replacement of console functions