diff --git a/.editorconfig b/.editorconfig
index 7bdd406..2774da0 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -4,11 +4,3 @@ charset = utf-8
end_of_line = lf
trim_trailing_whitespace = true
insert_final_newline = true
-
-[*.cpp]
-indent_style = space
-indent_size = 4
-
-[*.js]
-indent_style = space
-indent_size = 2
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 069112c..3a3cce5 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -5,7 +5,7 @@
version: 2
updates:
- - package-ecosystem: "yarn" # See documentation for possible values
+ - package-ecosystem: "npm" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 99a3ad3..9318491 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -2,100 +2,134 @@ name: CI
on: [push, pull_request]
-env:
- YARN_GPG: no
- npm_config_debug: yes
- npm_config_build_from_source: true
-
jobs:
+ lint:
+ name: Lint
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: true
+
+ - name: Install dependencies
+ run: npm ci
+
+ - name: Lint
+ run: npm run lint
+
+ coverage:
+ name: Report coverage
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: true
+
+ - name: Use Node.js 20
+ uses: actions/setup-node@v4
+ with:
+ cache: npm
+ node-version: 20
+
+ - name: Install dependencies
+ run: npm ci
+ env:
+ npm_config_debug: true
+
+ - name: Run tests
+ run: node --test --experimental-test-coverage --test-reporter=lcov --test-reporter-destination=lcov.info test.cjs
+
+ - name: Install lcov
+ run: sudo apt install -yq lcov
+
+ - name: Merge coverage reports
+ run: |
+ lcov --capture --directory . --no-external --output-file lcov-cpp.info
+ lcov --add-tracefile lcov-cpp.info --add-tracefile lcov.info --output-file lcov.info
+ lcov --remove lcov.info "*/node_modules/*" --output-file lcov.info
+
+ - name: "Send to Codacy"
+ uses: codacy/codacy-coverage-reporter-action@v1
+ with:
+ coverage-reports: lcov.info
+ project-token: ${{ secrets.CODACY_PROJECT_TOKEN }}
+
test:
strategy:
matrix:
- node-version: [16, 18, 20]
- os: [ubuntu-20.04, macos-11, macos-m1, windows-2019]
+ node-version: [18, 20, 22]
+ os: [ubuntu-22.04, macos-13, macos-14, windows-2019]
name: Test for node-${{ matrix.node-version }} on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
+
steps:
- - name: Checkout
- uses: actions/checkout@v3
- with:
- submodules: true
-
- - name: Use Node.js ${{ matrix.node-version }}
- uses: actions/setup-node@v3
- with:
- cache: yarn
- node-version: ${{ matrix.node-version }}
-
- - name: Install dependencies
- run: yarn install --frozen-lockfile
-
- - name: Run tests
- run: yarn test
-
- - name: "[Linux] Install lcov"
- if: matrix.os == 'ubuntu-20.04'
- run: sudo apt install lcov
-
- - name: "[Linux] Generate coverage"
- if: matrix.os == 'ubuntu-20.04'
- run: |
- cp -rl build/* build-tmp-napi-v3
- rm -r build
- yarn c8 report --reporter=text-lcov > lcov-js.info
- lcov -c -d . --no-external -o lcov-cpp.info
- lcov -r lcov-cpp.info "*/node_modules/*" -o lcov-cpp.info
- lcov -a lcov-js.info -a lcov-cpp.info -o lcov.info
-
- - name: "[Linux] Send to Codacy"
- if: matrix.os == 'ubuntu-20.04'
- uses: codacy/codacy-coverage-reporter-action@v1
- with:
- coverage-reports: lcov.info
- project-token: ${{ secrets.CODACY_PROJECT_TOKEN }}
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: true
+
+ - name: Use Node.js ${{ matrix.node-version }}
+ uses: actions/setup-node@v4
+ with:
+ cache: npm
+ node-version: ${{ matrix.node-version }}
+
+ - name: Install dependencies
+ run: npm ci
+
+ - name: Run tests
+ run: npm test
test-alpine:
- name: Test on Alpine Linux
+ strategy:
+ matrix:
+ node-version: [18, 20, 22]
+
+ name: Test for node-${{ matrix.node-version }} on Alpine Linux
runs-on: ubuntu-latest
+
container:
- image: node:16-alpine
+ image: node:${{ matrix.node-version }}-alpine3.18
steps:
- name: Install build deps
run: apk add make g++ python3 git
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
submodules: true
- name: Install dependencies
- run: yarn install --frozen-lockfile
+ run: npm ci
- name: Run tests
- run: yarn test
+ run: npm test
test-freebsd:
strategy:
matrix:
- node-version: [16, 18, 20]
+ node-version: [18, 20]
name: Test for node-${{ matrix.node-version }} on FreeBSD
- runs-on: macos-12
+ runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
submodules: true
- - uses: vmactions/freebsd-vm@v0
+ - uses: vmactions/freebsd-vm@v1
with:
- envs: 'YARN_GPG npm_config_debug npm_config_build_from_source'
prepare: |
- pkg install -y gmake python3 yarn-node${{ matrix.node-version }}
+ pkg install -y gmake python3 npm-node${{ matrix.node-version }}
run: |
- yarn install --frozen-lockfile
- yarn test
+ npm ci
+ npm test
sync: sshfs
diff --git a/.github/workflows/clang-format.yml b/.github/workflows/clang-format.yml
index 4eac446..7a775b9 100644
--- a/.github/workflows/clang-format.yml
+++ b/.github/workflows/clang-format.yml
@@ -13,9 +13,14 @@ on:
jobs:
check-format:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
+
+ - name: Setup LLVM repository
+ run: |
+ wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
+ sudo add-apt-repository -y 'deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy main'
- name: Install clang-format
run: sudo apt update -q && sudo apt install -yq clang-format
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 3eb3bab..ad5b0b3 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -1,177 +1,232 @@
name: Release
on:
+ push:
+
+ pull_request:
+ branches:
+ - master
+
release:
types: [published]
-env:
- npm_config_build_from_source: true
+ workflow_dispatch:
+ inputs:
+ new-version:
+ description: New version to be published, overrides tag
+ required: true
+ type: string
+
+ npm-tag:
+ description: NPM tag
+ required: true
+ default: latest
+ type: choice
+ options:
+ - latest
+ - next
jobs:
- publish:
- name: Publish package
- runs-on: ubuntu-20.04
-
- steps:
- - name: Checkout
- uses: actions/checkout@v3
- with:
- submodules: true
-
- - name: Use Node.js 16
- uses: actions/setup-node@v3
- with:
- cache: yarn
- node-version: 16
- registry-url: https://registry.npmjs.org/
-
- - name: Install dependencies
- run: yarn install --frozen-lockfile
-
- - name: Publish to NPM
- run: yarn publish
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
-
build:
strategy:
matrix:
- os: [ubuntu-20.04, macos-11, macos-m1, windows-2019]
-
- name: Build on ${{ matrix.os }}
+ include:
+ - os: ubuntu-22.04
+ arch: linux-x64-glibc
+ - os: ubuntu-22.04-arm
+ arch: linux-arm64-glibc
+ - os: macos-13
+ arch: darwin-x64
+ - os: macos-14
+ arch: darwin-arm64
+ - os: windows-2019
+ arch: win32-x64
+
+ name: Build for ${{ matrix.arch }}
runs-on: ${{ matrix.os }}
steps:
- - name: Checkout
- uses: actions/checkout@v3
- with:
- submodules: true
-
- - name: Use Node.js 16
- uses: actions/setup-node@v3
- with:
- cache: yarn
- node-version: 16
-
- - name: Install dependencies
- run: yarn install --frozen-lockfile
-
- - name: Package artifacts
- run: yarn node-pre-gyp package
-
- - name: Upload to Release
- uses: csexton/release-asset-action@v2
- with:
- pattern: build/stage/**/argon2*.tar.gz
- github-token: ${{ secrets.GITHUB_TOKEN }}
- release-url: ${{ github.event.release.upload_url }}
-
- build-alpine:
- name: Build on Alpine Linux
- runs-on: ubuntu-latest
- container:
- image: node:16-alpine
-
- steps:
- - name: Install build deps
- run: apk add make g++ python3 git
-
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
submodules: true
+ - name: Use Node.js 20
+ uses: actions/setup-node@v4
+ with:
+ cache: npm
+ node-version: 20
+
- name: Install dependencies
- run: yarn install --frozen-lockfile
+ run: npm ci
- - name: Package artifacts
- run: yarn node-pre-gyp package
+ - name: Prebuild
+ run: npm run build
- - name: Upload to Release
- uses: csexton/release-asset-action@v2
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v4
with:
- pattern: build/stage/**/argon2*.tar.gz
- github-token: ${{ secrets.GITHUB_TOKEN }}
- release-url: ${{ github.event.release.upload_url }}
+ name: prebuild-${{ matrix.arch }}
+ path: prebuilds/**/*.node
- build-arm:
- name: Build on arm64
- runs-on: ubuntu-latest
+ build-musl:
strategy:
matrix:
include:
- - arch: aarch64
- distro: ubuntu20.04
- - arch: aarch64
- distro: alpine_latest
+ - os: ubuntu-22.04
+ arch: x64
+ platform: linux/amd64
+ - os: ubuntu-22.04-arm
+ arch: arm64
+ platform: linux/arm64
+ - os: ubuntu-22.04-arm
+ arch: armv7
+ platform: linux/arm/v7
+
+ name: Build for linux-${{ matrix.arch }}-musl
+ runs-on: ${{ matrix.os }}
steps:
- - name: Checkout
- uses: actions/checkout@v3
- with:
- submodules: true
-
- - uses: uraimo/run-on-arch-action@v2.2.0
- name: Package artifacts
- id: build
- with:
- arch: ${{ matrix.arch }}
- distro: ${{ matrix.distro }}
- setup: mkdir -p "${PWD}/artifacts"
- dockerRunArgs: --volume "${PWD}:/repo"
- env: |
- npm_config_build_from_source: true
- install: |
- case "${{ matrix.distro }}" in
- ubuntu*|jessie|stretch|buster)
- apt-get update -y
- apt-get install -y curl
- curl -fsSL https://deb.nodesource.com/setup_16.x | bash -
- apt-get install -y make g++ python nodejs
- npm install --global yarn
- ;;
- alpine*)
- apk add --update make g++ python3
- apk add --no-cache --repository https://dl-cdn.alpinelinux.org/alpine/v3.15/main/ nodejs~=16 npm
- npm install --global yarn
- ;;
- esac
- run: |
- cd /repo
- yarn install --frozen-lockfile
- yarn node-pre-gyp package
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: true
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+ with:
+ platforms: ${{ matrix.platform }}
+
+ - name: Prebuild
+ uses: addnab/docker-run-action@v3
+ with:
+ image: node:20-alpine
+ options: --platform=${{ matrix.platform }} --volume=${{ github.workspace }}:/repo --workdir=/repo
+ run: |
+ apk add --no-cache g++ make python3
+ npm ci
+ npm run build
- - name: Upload to Release
- uses: csexton/release-asset-action@v2
- with:
- pattern: build/stage/**/argon2*.tar.gz
- github-token: ${{ secrets.GITHUB_TOKEN }}
- release-url: ${{ github.event.release.upload_url }}
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: prebuild-linux-${{ matrix.arch }}-musl
+ path: prebuilds/**/*.node
- build-freebsd:
- name: Build on FreeBSD
- runs-on: macos-12
+ build-freebsd-x64:
+ name: Build for freebsd-x64
+ runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
submodules: true
- - uses: vmactions/freebsd-vm@v0
- name: Package artifacts
+ - name: Prebuild
+ uses: vmactions/freebsd-vm@v1
with:
- envs: 'npm_config_build_from_source'
prepare: |
- pkg install -y gmake python3 yarn-node16
+ pkg install -y gmake python3 npm-node20
run: |
- yarn install --frozen-lockfile
- yarn node-pre-gyp package
+ npm ci
+ npm run build
sync: sshfs
- - name: Upload to Release
- uses: csexton/release-asset-action@v2
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v4
with:
- pattern: build/stage/**/argon2*.tar.gz
- github-token: ${{ secrets.GITHUB_TOKEN }}
- release-url: ${{ github.event.release.upload_url }}
+ name: prebuild-freebsd-x64
+ path: prebuilds/**/*.node
+
+ build-linux-armv7-glibc:
+ name: Build for linux-armv7-glibc
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: true
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+ with:
+ platforms: linux/arm/v7
+
+ - name: Prebuild
+ uses: addnab/docker-run-action@v3
+ with:
+ image: node:20-bullseye
+ options: --platform=linux/arm/v7 --volume=${{ github.workspace }}:/repo --workdir=/repo
+ run: |
+ apt update -yq && apt install -yq wget
+ wget -qL https://deb.nodesource.com/setup_20.x | bash -
+ apt install -yq g++ make python3 nodejs
+ npm ci
+ npm run build
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: prebuild-linux-armv7-glibc
+ path: prebuilds/**/*.node
+
+ publish:
+ name: Publish package
+ runs-on: ubuntu-latest
+
+ permissions:
+ contents: read
+ id-token: write
+
+ needs:
+ - build
+ - build-musl
+ - build-freebsd-x64
+ - build-linux-armv7-glibc
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: true
+
+ - name: Setup npm with Node.js 20
+ uses: actions/setup-node@v4
+ with:
+ cache: npm
+ node-version: 20
+ token: ${{ secrets.NPM_TOKEN }}
+ registry-url: 'https://registry.npmjs.org'
+
+ - name: Install dependencies
+ run: npm ci --ignore-scripts
+
+ - name: Download artifacts
+ id: download-artifact
+ uses: actions/download-artifact@v4
+
+ - name: Move prebuild artifacts
+ run: mkdir prebuilds && cp --recursive prebuild-*/* prebuilds/
+
+ - name: Pack package
+ run: npm pack
+ if: ${{ github.event_name == 'push' }}
+
+ - name: Upload package artifact
+ uses: actions/upload-artifact@v4
+ if: ${{ github.event_name == 'push' }}
+ with:
+ name: package
+ path: '*.tgz'
+
+ - name: Publish to NPM
+ run: |
+ npm version --allow-same-version --no-git-tag-version $VERSION
+ npm publish --provenance --tag $TAG
+ if: ${{ !env.ACT && github.event_name != 'push' }}
+ env:
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
+ TAG: ${{ inputs.npm-tag || 'latest' }}
+ VERSION: ${{ inputs.new-version || github.ref_name }}
diff --git a/.gitignore b/.gitignore
index 48518c6..cc7e16e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,66 +1,22 @@
-
-# Created by https://www.gitignore.io/api/node
-
### Node ###
# Logs
logs
*.log
npm-debug.log*
-yarn-debug.log*
-yarn-error.log*
-
-# Runtime data
-pids
-*.pid
-*.seed
-*.pid.lock
-
-# Directory for instrumented libs generated by jscoverage/JSCover
-lib-cov
# Coverage directory used by tools like istanbul
coverage
-# nyc test coverage
-.nyc_output
-
-# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
-.grunt
-
-# Bower dependency directory (https://bower.io/)
-bower_components
-
-# node-waf configuration
-.lock-wscript
-
# Compiled binary addons (http://nodejs.org/api/addons.html)
lib/
build*/
# Dependency directories
node_modules/
-jspm_packages/
-
-# Typescript v1 declaration files
-typings/
-
-# Optional npm cache directory
-.npm
-
-# Optional eslint cache
-.eslintcache
-
-# Optional REPL history
-.node_repl_history
# Output of 'npm pack'
*.tgz
-# Yarn Integrity file
-.yarn-integrity
-
-# dotenv environment variables file
-.env
-
-
-# End of https://www.gitignore.io/api/node
+# Generated Typescript declarations
+*.d.cts
+*.d.cts.map
diff --git a/README.md b/README.md
index 6be0f2e..49c7964 100644
--- a/README.md
+++ b/README.md
@@ -3,15 +3,10 @@
[![Financial contributors on Open Collective][opencollective-image]][opencollective-url]
[![Build status][actions-image]][actions-url]
[![NPM package][npm-image]][npm-url]
-[![Coverage status][coverage-image]][coverage-url]
-[![Code Quality][codequality-image]][codequality-url]
Bindings to the reference [Argon2](https://github.com/P-H-C/phc-winner-argon2)
implementation.
-**Want to use it on the command line? Instead check
-[node-argon2-cli](https://github.com/ranisalt/node-argon2-cli).**
-
## Usage
It's possible to hash using either Argon2i, Argon2d or Argon2id (default), and
verify if a password matches a hash.
@@ -68,8 +63,8 @@ The current prebuilt binaries are built and tested with the following systems:
- MacOS 11 (x86-64)
- MacOS 12 (ARM64 from v0.29.0)
- Windows Server 2019 (x86-64)
-- Alpine Linux 3.13 (x86-64 from v0.28.1; ARM64 from v0.28.2)
-- FreeBSD 13.1 (x86-64 from v0.29.1)
+- Alpine Linux 3.18 (x86-64 from v0.28.1; ARM64 from v0.28.2)
+- FreeBSD 14 (x86-64 from v0.29.1)
Binaries should also work for any version more recent than the ones listed
above. For example, the binary for Ubuntu 20.04 also works on Ubuntu 22.04, or
@@ -86,7 +81,7 @@ You **MUST** have a **node-gyp** global install before proceeding with the insta
along with GCC >= 5 / Clang >= 3.3. On Windows, you must compile under Visual
Studio 2015 or newer.
-**node-argon2** works only and is tested against Node >=14.0.0.
+**node-argon2** works only and is tested against Node >=18.0.0.
### OSX
To install GCC >= 5 on OSX, use [homebrew](http://brew.sh/):
@@ -141,7 +136,7 @@ you'll need to specify that in the command.
2. Ignore `node-argon2` install script and build manually.
```bash
$ npm install argon2 --ignore-scripts
- $ npx @mapbox/node-pre-gyp rebuild -C ./node_modules/argon2
+ $ npx node-gyp rebuild -C ./node_modules/argon2
```
@@ -149,7 +144,7 @@ you'll need to specify that in the command.
I installed Node as a snap, and I can't install node-argon2.
-
+
This seems to be an issue related to snap (see [#345 (comment)](https://github.com/ranisalt/node-argon2/issues/345#issuecomment-1164178674)). Installing Node with another package manager, such as [asdf](https://asdf-vm.com/) or [nvm](https://github.com/nvm-sh/nvm), is a possible workaround.
@@ -212,7 +207,3 @@ license over Argon2 and the reference implementation.
[npm-url]: https://www.npmjs.com/package/argon2
[actions-image]: https://img.shields.io/github/actions/workflow/status/ranisalt/node-argon2/ci.yml?branch=master&style=flat-square
[actions-url]: https://github.com/ranisalt/node-argon2/actions
-[coverage-image]: https://img.shields.io/codacy/coverage/3aa6daee00154e1492660ecb2f788f73/master.svg?style=flat-square
-[coverage-url]: https://app.codacy.com/gh/ranisalt/node-argon2
-[codequality-image]: https://img.shields.io/codacy/grade/3aa6daee00154e1492660ecb2f788f73/master.svg?style=flat-square
-[codequality-url]: https://app.codacy.com/gh/ranisalt/node-argon2
diff --git a/argon2.cjs b/argon2.cjs
new file mode 100644
index 0000000..16b9a46
--- /dev/null
+++ b/argon2.cjs
@@ -0,0 +1,197 @@
+const assert = require("node:assert");
+const { randomBytes, timingSafeEqual } = require("node:crypto");
+const { promisify } = require("node:util");
+const { deserialize, serialize } = require("@phc/format");
+const gypBuild = require("node-gyp-build");
+
+const { hash: bindingsHash } = gypBuild(__dirname);
+
+/** @type {(size: number) => Promise} */
+const generateSalt = promisify(randomBytes);
+
+const argon2d = 0;
+const argon2i = 1;
+const argon2id = 2;
+
+module.exports.argon2d = argon2d;
+module.exports.argon2i = argon2i;
+module.exports.argon2id = argon2id;
+
+/** @enum {argon2i | argon2d | argon2id} */
+const types = Object.freeze({ argon2d, argon2i, argon2id });
+
+/** @enum {'argon2d' | 'argon2i' | 'argon2id'} */
+const names = Object.freeze({
+ [types.argon2d]: "argon2d",
+ [types.argon2i]: "argon2i",
+ [types.argon2id]: "argon2id",
+});
+
+const defaults = {
+ hashLength: 32,
+ timeCost: 3,
+ memoryCost: 1 << 16,
+ parallelism: 4,
+ type: argon2id,
+ version: 0x13,
+};
+
+/**
+ * @typedef {Object} Options
+ * @property {number} [hashLength=32]
+ * @property {number} [timeCost=3]
+ * @property {number} [memoryCost=65536]
+ * @property {number} [parallelism=4]
+ * @property {keyof typeof names} [type=argon2id]
+ * @property {number} [version=19]
+ * @property {Buffer} [salt]
+ * @property {Buffer} [associatedData]
+ * @property {Buffer} [secret]
+ */
+
+/**
+ * Hashes a password with Argon2, producing a raw hash
+ *
+ * @overload
+ * @param {Buffer | string} password The plaintext password to be hashed
+ * @param {Options & { raw: true }} options The parameters for Argon2
+ * @returns {Promise} The raw hash generated from `password`
+ */
+/**
+ * Hashes a password with Argon2, producing an encoded hash
+ *
+ * @overload
+ * @param {Buffer | string} password The plaintext password to be hashed
+ * @param {Options & { raw?: boolean }} [options] The parameters for Argon2
+ * @returns {Promise} The encoded hash generated from `password`
+ */
+/**
+ * @param {Buffer | string} password The plaintext password to be hashed
+ * @param {Options & { raw?: boolean }} [options] The parameters for Argon2
+ */
+async function hash(password, options) {
+ let { raw, salt, ...rest } = { ...defaults, ...options };
+
+ if (rest.hashLength > 2 ** 32 - 1) {
+ throw new RangeError("Hash length is too large");
+ }
+
+ if (rest.memoryCost > 2 ** 32 - 1) {
+ throw new RangeError("Memory cost is too large");
+ }
+
+ if (rest.timeCost > 2 ** 32 - 1) {
+ throw new RangeError("Time cost is too large");
+ }
+
+ if (rest.parallelism > 2 ** 24 - 1) {
+ throw new RangeError("Parallelism is too large");
+ }
+
+ salt = salt ?? (await generateSalt(16));
+
+ const {
+ hashLength,
+ secret = Buffer.alloc(0),
+ type,
+ version,
+ memoryCost: m,
+ timeCost: t,
+ parallelism: p,
+ associatedData: data = Buffer.alloc(0),
+ } = rest;
+
+ const hash = await bindingsHash({
+ password: Buffer.from(password),
+ salt,
+ secret,
+ data,
+ hashLength,
+ m,
+ t,
+ p,
+ version,
+ type,
+ });
+ if (raw) {
+ return hash;
+ }
+
+ return serialize({
+ id: names[type],
+ version,
+ params: { m, t, p, ...(data.byteLength > 0 ? { data } : {}) },
+ salt,
+ hash,
+ });
+}
+module.exports.hash = hash;
+
+/**
+ * @param {string} digest The digest to be checked
+ * @param {Object} [options] The current parameters for Argon2
+ * @param {number} [options.timeCost=3]
+ * @param {number} [options.memoryCost=65536]
+ * @param {number} [options.parallelism=4]
+ * @param {number} [options.version=0x13]
+ * @returns {boolean} `true` if the digest parameters do not match the parameters in `options`, otherwise `false`
+ */
+function needsRehash(digest, options = {}) {
+ const { memoryCost, timeCost, parallelism, version } = {
+ ...defaults,
+ ...options,
+ };
+
+ const {
+ version: v,
+ params: { m, t, p },
+ } = deserialize(digest);
+
+ return (
+ +v !== +version ||
+ +m !== +memoryCost ||
+ +t !== +timeCost ||
+ +p !== +parallelism
+ );
+}
+module.exports.needsRehash = needsRehash;
+
+/**
+ * @param {string} digest The digest to be checked
+ * @param {Buffer | string} password The plaintext password to be verified
+ * @param {Object} [options] The current parameters for Argon2
+ * @param {Buffer} [options.secret]
+ * @returns {Promise} `true` if the digest parameters matches the hash generated from `password`, otherwise `false`
+ */
+async function verify(digest, password, options = {}) {
+ const { id, ...rest } = deserialize(digest);
+ if (!(id in types)) {
+ return false;
+ }
+
+ const {
+ version = 0x10,
+ params: { m, t, p, data = "" },
+ salt,
+ hash,
+ } = rest;
+
+ const { secret = Buffer.alloc(0) } = options;
+
+ return timingSafeEqual(
+ await bindingsHash({
+ password: Buffer.from(password),
+ salt,
+ secret,
+ data: Buffer.from(data, "base64"),
+ hashLength: hash.byteLength,
+ m: +m,
+ t: +t,
+ p: +p,
+ version: +version,
+ type: types[id],
+ }),
+ hash,
+ );
+}
+module.exports.verify = verify;
diff --git a/argon2.cpp b/argon2.cpp
new file mode 100644
index 0000000..ddbb02b
--- /dev/null
+++ b/argon2.cpp
@@ -0,0 +1,114 @@
+#include "argon2/include/argon2.h"
+
+#include
+#include
+
+namespace {
+
+class HashWorker final : public Napi::AsyncWorker {
+public:
+ HashWorker(const Napi::Env &env, const Napi::Buffer &plain,
+ const Napi::Buffer &salt,
+ const Napi::Buffer &secret,
+ const Napi::Buffer &ad, uint32_t hash_length,
+ uint32_t memory_cost, uint32_t time_cost, uint32_t parallelism,
+ uint32_t version, uint32_t type)
+ : AsyncWorker{env, "argon2:HashWorker"}, deferred{env},
+ plain{plain.Data(), plain.Data() + plain.ByteLength()},
+ salt{salt.Data(), salt.Data() + salt.ByteLength()},
+ secret{secret.Data(), secret.Data() + secret.ByteLength()},
+ ad{ad.Data(), ad.Data() + ad.ByteLength()}, hash_length{hash_length},
+ memory_cost{memory_cost}, time_cost{time_cost},
+ parallelism{parallelism}, version{version},
+ type{static_cast(type)} {}
+
+ auto GetPromise() -> Napi::Promise { return deferred.Promise(); }
+
+protected:
+ void Execute() override {
+ hash.resize(hash_length);
+
+ argon2_context ctx;
+ ctx.out = hash.data();
+ ctx.outlen = static_cast(hash.size());
+ ctx.pwd = plain.data();
+ ctx.pwdlen = static_cast(plain.size());
+ ctx.salt = salt.data();
+ ctx.saltlen = static_cast(salt.size());
+ ctx.secret = secret.empty() ? nullptr : secret.data();
+ ctx.secretlen = static_cast(secret.size());
+ ctx.ad = ad.empty() ? nullptr : ad.data();
+ ctx.adlen = static_cast(ad.size());
+ ctx.m_cost = memory_cost;
+ ctx.t_cost = time_cost;
+ ctx.lanes = parallelism;
+ ctx.threads = parallelism;
+ ctx.allocate_cbk = nullptr;
+ ctx.free_cbk = nullptr;
+ ctx.flags = ARGON2_FLAG_CLEAR_PASSWORD | ARGON2_FLAG_CLEAR_SECRET;
+ ctx.version = version;
+
+ if (const int result = argon2_ctx(&ctx, type); result != ARGON2_OK) {
+ /* LCOV_EXCL_START */
+ SetError(argon2_error_message(result));
+ /* LCOV_EXCL_STOP */
+ }
+ }
+
+ void OnOK() override {
+ deferred.Resolve(
+ Napi::Buffer::Copy(Env(), hash.data(), hash.size()));
+ }
+
+ void OnError(const Napi::Error &err) override {
+ deferred.Reject(err.Value());
+ }
+
+private:
+ using ustring = std::vector;
+
+ Napi::Promise::Deferred deferred;
+ ustring hash = {};
+
+ ustring plain;
+ ustring salt;
+ ustring secret;
+ ustring ad;
+
+ uint32_t hash_length;
+ uint32_t memory_cost;
+ uint32_t time_cost;
+ uint32_t parallelism;
+ uint32_t version;
+
+ argon2_type type;
+};
+
+auto Hash(const Napi::CallbackInfo &info) -> Napi::Value {
+ NAPI_CHECK(info.Length() == 1, "Hash", "expected 1 argument");
+
+ const auto &args = info[0].As();
+ auto *worker = new HashWorker{info.Env(),
+ args["password"].As>(),
+ args["salt"].As>(),
+ args["secret"].As>(),
+ args["data"].As>(),
+ args["hashLength"].ToNumber(),
+ args["m"].ToNumber(),
+ args["t"].ToNumber(),
+ args["p"].ToNumber(),
+ args["version"].ToNumber(),
+ args["type"].ToNumber()};
+
+ worker->Queue();
+ return worker->GetPromise();
+}
+
+auto init(Napi::Env env, Napi::Object exports) -> Napi::Object {
+ exports["hash"] = Napi::Function::New(env, Hash);
+ return exports;
+}
+
+} // namespace
+
+NODE_API_MODULE(argon2_lib, init)
diff --git a/argon2.d.ts b/argon2.d.ts
deleted file mode 100644
index 0e7bc7a..0000000
--- a/argon2.d.ts
+++ /dev/null
@@ -1,50 +0,0 @@
-// Type definitions for argon2 v0.19.2
-
-///
-
-export const argon2d: 0;
-export const argon2i: 1;
-export const argon2id: 2;
-
-export interface Options {
- hashLength?: number;
- timeCost?: number;
- memoryCost?: number;
- parallelism?: number;
- type?: typeof argon2d | typeof argon2i | typeof argon2id;
- version?: number;
- salt?: Buffer;
- saltLength?: number;
- raw?: boolean;
- secret?: Buffer;
- associatedData?: Buffer;
-}
-
-export interface NumericLimit {
- max: number;
- min: number;
-}
-
-export interface OptionLimits {
- hashLength: NumericLimit;
- memoryCost: NumericLimit;
- timeCost: NumericLimit;
- parallelism: NumericLimit;
-}
-
-export const defaults: Options;
-export const limits: OptionLimits;
-export function hash(
- plain: Buffer | string,
- options: Options & { raw: true },
-): Promise;
-export function hash(
- plain: Buffer | string,
- options?: Options & { raw?: false },
-): Promise;
-export function verify(
- hash: string,
- plain: Buffer | string,
- options?: Options,
-): Promise;
-export function needsRehash(hash: string, options?: Options): boolean;
diff --git a/argon2.js b/argon2.js
deleted file mode 100644
index 0e6cfa2..0000000
--- a/argon2.js
+++ /dev/null
@@ -1,121 +0,0 @@
-"use strict";
-const assert = require("assert");
-const { randomBytes, timingSafeEqual } = require("crypto");
-const path = require("path");
-const { promisify } = require("util");
-const binary = require("@mapbox/node-pre-gyp");
-
-const bindingPath = binary.find(path.resolve(__dirname, "./package.json"));
-const { hash: _hash } = require(bindingPath);
-
-const { deserialize, serialize } = require("@phc/format");
-
-const types = Object.freeze({ argon2d: 0, argon2i: 1, argon2id: 2 });
-
-const defaults = Object.freeze({
- hashLength: 32,
- saltLength: 16,
- timeCost: 3,
- memoryCost: 1 << 16,
- parallelism: 4,
- type: types.argon2id,
- version: 0x13,
-});
-
-const limits = Object.freeze({
- hashLength: { min: 4, max: 2 ** 32 - 1 },
- memoryCost: { min: 1 << 10, max: 2 ** 32 - 1 },
- timeCost: { min: 2, max: 2 ** 32 - 1 },
- parallelism: { min: 1, max: 2 ** 24 - 1 },
-});
-
-const names = Object.freeze({
- [types.argon2d]: "argon2d",
- [types.argon2i]: "argon2i",
- [types.argon2id]: "argon2id",
-});
-
-const bindingsHash = promisify(_hash);
-const generateSalt = promisify(randomBytes);
-
-const assertLimits =
- (options) =>
- ([key, { max, min }]) => {
- const value = options[key];
- assert(
- min <= value && value <= max,
- `Invalid ${key}, must be between ${min} and ${max}.`,
- );
- };
-
-const hash = async (plain, { raw, salt, ...options } = {}) => {
- options = { ...defaults, ...options };
-
- Object.entries(limits).forEach(assertLimits(options));
-
- salt = salt || (await generateSalt(options.saltLength));
-
- const hash = await bindingsHash(Buffer.from(plain), salt, options);
- if (raw) {
- return hash;
- }
-
- const {
- type,
- version,
- memoryCost: m,
- timeCost: t,
- parallelism: p,
- associatedData: data,
- } = options;
- return serialize({
- id: names[type],
- version,
- params: { m, t, p, ...(data ? { data } : {}) },
- salt,
- hash,
- });
-};
-
-const needsRehash = (digest, options) => {
- const { memoryCost, timeCost, version } = { ...defaults, ...options };
-
- const {
- version: v,
- params: { m, t },
- } = deserialize(digest);
- return +v !== +version || +m !== +memoryCost || +t !== +timeCost;
-};
-
-const verify = async (digest, plain, options) => {
- const obj = deserialize(digest);
- // Only these have the "params" key, so if the password was encoded
- // using any other method, the destructuring throws an error
- if (!(obj.id in types)) {
- return false;
- }
-
- const {
- id,
- version = 0x10,
- params: { m, t, p, data },
- salt,
- hash,
- } = obj;
-
- return timingSafeEqual(
- await bindingsHash(Buffer.from(plain), salt, {
- ...options,
- type: types[id],
- version: +version,
- hashLength: hash.length,
- memoryCost: +m,
- timeCost: +t,
- parallelism: +p,
- ...(data ? { associatedData: Buffer.from(data, "base64") } : {}),
- }),
- hash,
- );
-};
-
-module.exports = { defaults, limits, hash, needsRehash, verify, ...types };
diff --git a/argon2_node.cpp b/argon2_node.cpp
deleted file mode 100644
index 8b43233..0000000
--- a/argon2_node.cpp
+++ /dev/null
@@ -1,127 +0,0 @@
-
-#include "argon2/include/argon2.h"
-#include
-#include
-#include
-#include
-
-using namespace Napi;
-using ustring = std::vector;
-
-static ustring from_buffer(const Value &value) {
- const auto &buf = value.As>();
- const auto &data = buf.Data();
- return {data, data + buf.Length()};
-}
-
-static Buffer to_buffer(const Env &env, const ustring &str) {
- return Buffer::Copy(env, str.data(), str.size());
-}
-
-struct Options {
- ustring secret;
- ustring ad;
-
- uint32_t hash_length;
- uint32_t time_cost;
- uint32_t memory_cost;
- uint32_t parallelism;
- uint32_t version;
-
- argon2_type type;
-};
-
-static argon2_context make_context(uint8_t *buf, ustring &plain, ustring &salt,
- Options &opts) {
- argon2_context ctx;
-
- ctx.out = buf;
- ctx.outlen = opts.hash_length;
- ctx.pwd = plain.data();
- ctx.pwdlen = plain.size();
- ctx.salt = salt.data();
- ctx.saltlen = salt.size();
- ctx.secret = opts.secret.empty() ? nullptr : opts.secret.data();
- ctx.secretlen = opts.secret.size();
- ctx.ad = opts.ad.empty() ? nullptr : opts.ad.data();
- ctx.adlen = opts.ad.size();
- ctx.t_cost = opts.time_cost;
- ctx.m_cost = opts.memory_cost;
- ctx.lanes = opts.parallelism;
- ctx.threads = opts.parallelism;
- ctx.allocate_cbk = nullptr;
- ctx.free_cbk = nullptr;
- ctx.flags = ARGON2_FLAG_CLEAR_PASSWORD | ARGON2_FLAG_CLEAR_SECRET;
- ctx.version = opts.version;
-
- return ctx;
-}
-
-class HashWorker final : public AsyncWorker {
-public:
- HashWorker(const Function &callback, ustring &&plain, ustring &&salt,
- Options &&opts)
- : AsyncWorker{callback, "argon2:HashWorker"}, plain{std::move(plain)},
- salt{std::move(salt)}, opts{std::move(opts)} {}
-
- void Execute() override {
- auto buf = std::make_unique(opts.hash_length);
-
- auto ctx = make_context(buf.get(), plain, salt, opts);
- int result = argon2_ctx(&ctx, opts.type);
-
- if (result != ARGON2_OK) {
- /* LCOV_EXCL_START */
- SetError(argon2_error_message(result));
- /* LCOV_EXCL_STOP */
- } else {
- hash.assign(buf.get(), buf.get() + opts.hash_length);
- }
- }
-
- void OnOK() override {
- const auto &env = Env();
- HandleScope scope{env};
- Callback()({env.Undefined(), to_buffer(env, hash)});
- }
-
-private:
- ustring plain;
- ustring salt;
- Options opts;
-
- ustring hash;
-};
-
-static Options extract_opts(const Object &opts) {
- return {
- opts.Has("secret") ? from_buffer(opts["secret"]) : ustring{},
- opts.Has("associatedData") ? from_buffer(opts["associatedData"])
- : ustring{},
- opts["hashLength"].ToNumber(),
- opts["timeCost"].ToNumber(),
- opts["memoryCost"].ToNumber(),
- opts["parallelism"].ToNumber(),
- opts["version"].ToNumber(),
- argon2_type(int(opts["type"].ToNumber())),
- };
-}
-
-static Value Hash(const CallbackInfo &info) {
- assert(info.Length() == 4 && info[0].IsBuffer() && info[1].IsBuffer() &&
- info[2].IsObject() && info[3].IsFunction());
-
- auto worker = new HashWorker{info[3].As(), from_buffer(info[0]),
- from_buffer(info[1]),
- extract_opts(info[2].As