From afb9307c596482853bedd0808e351225ec2b5231 Mon Sep 17 00:00:00 2001 From: Jannik Hollenbach Date: Wed, 3 Sep 2025 14:25:57 +0200 Subject: [PATCH] Migrate parser-sdk to typescript Signed-off-by: Jannik Hollenbach --- .templates/new-scanner/parser/parser.test.js | 8 +- .../integrating-a-scanner/parser-dir.md | 4 +- .../contributing/test-concept/scanner-test.md | 4 +- parser-sdk/nodejs/Dockerfile | 3 +- parser-sdk/nodejs/package-lock.json | 32 +++-- parser-sdk/nodejs/package.json | 4 +- parser-sdk/nodejs/parser-utils.js | 81 ----------- parser-sdk/nodejs/parser-utils.ts | 136 ++++++++++++++++++ .../{parser-wrapper.js => parser-wrapper.ts} | 50 +++++-- scanners/ffuf/parser/parser.test.js | 18 +-- .../git-repo-scanner/parser/parser.test.js | 4 +- scanners/gitleaks/parser/parser.test.js | 12 +- scanners/kube-hunter/parser/parser.test.js | 4 +- scanners/ncrack/parser/parser.test.js | 8 +- scanners/nikto/parser/parser.test.js | 8 +- scanners/nmap/parser/parser.test.js | 14 +- scanners/nuclei/parser/parser.test.js | 12 +- scanners/screenshooter/parser/parser.test.js | 4 +- scanners/semgrep/parser/parser.test.js | 6 +- scanners/ssh-audit/parser/parser.test.js | 6 +- scanners/sslyze/parser/parser.test.js | 18 +-- scanners/subfinder/parser/parser.test.js | 10 +- scanners/trivy-sbom/parser/parser.test.js | 4 +- scanners/trivy/parser/parser.test.js | 16 +-- scanners/whatweb/parser/parser.test.js | 8 +- scanners/wpscan/parser/parser.test.js | 6 +- .../parser/parser.test.js | 12 +- 27 files changed, 288 insertions(+), 204 deletions(-) delete mode 100644 parser-sdk/nodejs/parser-utils.js create mode 100644 parser-sdk/nodejs/parser-utils.ts rename parser-sdk/nodejs/{parser-wrapper.js => parser-wrapper.ts} (81%) diff --git a/.templates/new-scanner/parser/parser.test.js b/.templates/new-scanner/parser/parser.test.js index f747350079..5daf88095c 100644 --- a/.templates/new-scanner/parser/parser.test.js +++ b/.templates/new-scanner/parser/parser.test.js @@ -11,11 +11,11 @@ test("should properly parse new-scanner json file", async () => { const fileContent = JSON.parse( await readFile(import.meta.dirname + "/__testFiles__/example.com.json", { encoding: "utf8", - }) + }), ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(); }); @@ -23,10 +23,10 @@ test("should properly parse empty json file", async () => { const fileContent = JSON.parse( await readFile(import.meta.dirname + "/__testFiles__/empty.json", { encoding: "utf8", - }) + }), ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(); }); diff --git a/documentation/docs/contributing/integrating-a-scanner/parser-dir.md b/documentation/docs/contributing/integrating-a-scanner/parser-dir.md index 795cf3cdac..5840f449b3 100644 --- a/documentation/docs/contributing/integrating-a-scanner/parser-dir.md +++ b/documentation/docs/contributing/integrating-a-scanner/parser-dir.md @@ -28,7 +28,7 @@ COPY --from=build --chown=root:root --chmod=755 /home/app/node_modules/ ./node_m COPY --chown=root:root --chmod=755 ./parser.js ./parser.js ``` -If your parser does not require any external dependencies, A multi-stage build is not needed. +If your parser does not require any external dependencies, A multi-stage build is not needed. Instead, a simpler Dockerfile can be used. ```dockerfile @@ -94,7 +94,7 @@ Please provide some tests for your parser in the `parser.test.js` file. To make import { validateParser } from "@securecodebox/parser-sdk-nodejs/parser-utils"; const findings = await parse(fileContent); -await expect(validateParser(findings)).resolves.toBeUndefined(); +expect(validateParser(findings)).toBeUndefined(); ``` If you need additional files for your test please save these in the `__testFiles__` directory. Please take a look at [Integration Tests | secureCodeBox](/docs/contributing/integrating-a-scanner/integration-tests) for more information. diff --git a/documentation/docs/contributing/test-concept/scanner-test.md b/documentation/docs/contributing/test-concept/scanner-test.md index a996322f43..5af9945986 100644 --- a/documentation/docs/contributing/test-concept/scanner-test.md +++ b/documentation/docs/contributing/test-concept/scanner-test.md @@ -12,7 +12,7 @@ We employ two types of tests: Unit tests for the parser and integration-tests. B ### Unit Tests for Parser -Each scanner has a parser and each parser has a unit test file. The unit test file is named parser.test.js. This file contains different test scenarios. In each test, the results from parser.js and the folder `_snapshots_` are compared. If they are the same, the unit test is successful. +Each scanner has a parser and each parser has a unit test file. The unit test file is named parser.test.js. This file contains different test scenarios. In each test, the results from parser.js and the folder `_snapshots_` are compared. If they are the same, the unit test is successful. A unit test can look like this: ```js @@ -24,7 +24,7 @@ test("parser parses large json result without vulnerable extensions successfully } ); const findings = await parse(JSON.parse(fileContent)); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); diff --git a/parser-sdk/nodejs/Dockerfile b/parser-sdk/nodejs/Dockerfile index 926973fde7..c5b2168448 100644 --- a/parser-sdk/nodejs/Dockerfile +++ b/parser-sdk/nodejs/Dockerfile @@ -6,7 +6,7 @@ FROM oven/bun:1.2 AS build WORKDIR /home/app/ COPY package.json package-lock.json ./ RUN bun install --ignore-scripts -COPY *.js ./ +COPY *.ts findings-schema.json ./ RUN bun run build FROM node:22-alpine @@ -14,7 +14,6 @@ ARG NODE_ENV RUN addgroup --system --gid 1001 app && adduser app --system --uid 1001 --ingroup app WORKDIR /home/app/parser-wrapper/ COPY --chown=root:root --chmod=755 ./package.json ./package-lock.json ./ -COPY --chown=root:root --chmod=755 ./findings-schema.json ./findings-schema.json COPY --from=build --chown=root:root --chmod=755 /home/app/build/ ./ USER 1001 ENV NODE_ENV=${NODE_ENV:-production} diff --git a/parser-sdk/nodejs/package-lock.json b/parser-sdk/nodejs/package-lock.json index 1e0ae4e296..5fadd103f4 100644 --- a/parser-sdk/nodejs/package-lock.json +++ b/parser-sdk/nodejs/package-lock.json @@ -15,7 +15,9 @@ "ajv-formats": "^3.0.1", "jsonpointer": "^5.0.1" }, - "devDependencies": {} + "devDependencies": { + "@types/node": "^22.18.0" + } }, "node_modules/@jsep-plugin/assignment": { "version": "1.3.0", @@ -72,12 +74,12 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.10.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.2.tgz", - "integrity": "sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==", + "version": "22.18.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.0.tgz", + "integrity": "sha512-m5ObIqwsUp6BZzyiy4RdZpzWGub9bqLJMvZDD0QMXhxjqMHMENlj+SqF5QxoUwaQNFe+8kz8XM8ZQhqkQPTgMQ==", "license": "MIT", "dependencies": { - "undici-types": "~6.20.0" + "undici-types": "~6.21.0" } }, "node_modules/@types/node-fetch": { @@ -778,9 +780,9 @@ "license": "MIT" }, "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", "license": "MIT" }, "node_modules/webidl-conversions": { @@ -869,11 +871,11 @@ "integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==" }, "@types/node": { - "version": "22.10.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.2.tgz", - "integrity": "sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==", + "version": "22.18.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.0.tgz", + "integrity": "sha512-m5ObIqwsUp6BZzyiy4RdZpzWGub9bqLJMvZDD0QMXhxjqMHMENlj+SqF5QxoUwaQNFe+8kz8XM8ZQhqkQPTgMQ==", "requires": { - "undici-types": "~6.20.0" + "undici-types": "~6.21.0" } }, "@types/node-fetch": { @@ -1354,9 +1356,9 @@ "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, "undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==" + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==" }, "webidl-conversions": { "version": "3.0.1", diff --git a/parser-sdk/nodejs/package.json b/parser-sdk/nodejs/package.json index 78ea9a283c..3f740bc2dd 100644 --- a/parser-sdk/nodejs/package.json +++ b/parser-sdk/nodejs/package.json @@ -17,5 +17,7 @@ "ajv-formats": "^3.0.1", "jsonpointer": "^5.0.1" }, - "devDependencies": {} + "devDependencies": { + "@types/node": "^22.18.0" + } } diff --git a/parser-sdk/nodejs/parser-utils.js b/parser-sdk/nodejs/parser-utils.js deleted file mode 100644 index 5f072896b9..0000000000 --- a/parser-sdk/nodejs/parser-utils.js +++ /dev/null @@ -1,81 +0,0 @@ -// SPDX-FileCopyrightText: the secureCodeBox authors -// -// SPDX-License-Identifier: Apache-2.0 - -import { readFile } from "node:fs/promises"; -import { randomUUID } from "node:crypto"; - -import addFormats from "ajv-formats"; -import { get } from "jsonpointer"; -import Ajv from "ajv-draft-04"; - -const ajv = new Ajv(); -addFormats(ajv); - -export async function validate(findings) { - const jsonSchemaString = await readFile( - import.meta.dirname + "/findings-schema.json", - "utf8", - ); - const jsonSchema = JSON.parse(jsonSchemaString); - const validator = ajv.compile(jsonSchema); - const valid = validator(findings); - if (!valid) { - const errorMessage = generateErrorMessage(validator.errors, findings); - throw new Error(errorMessage); - } -} - -export function addScanMetadata(findings, scan) { - const scanMetadata = { - created_at: scan.metadata.creationTimestamp, - name: scan.metadata.name, - namespace: scan.metadata.namespace, - scan_type: scan.spec.scanType, - }; - - return findings.map((finding) => ({ - ...finding, - scan: scanMetadata, - })); -} - -export function addIdsAndDates(findings) { - return findings.map((finding) => { - return { - ...finding, - id: randomUUID(), - parsed_at: new Date().toISOString(), - }; - }); -} - -// used for tests to validate if the parser sets all required fields correctly. Adds sample IDs and Dates to the findings which would normally be set by the parser-sdk. -export async function validateParser(findings) { - const sampleScan = { - metadata: { - creationTimestamp: new Date().toISOString(), - name: "sample-scan-name", - namespace: "sample-namespace", - }, - spec: { - scanType: "sample-scan-type", - }, - }; - // add sample IDs and Dates only if the findings Array is not empty - const extendedData = addScanMetadata(addIdsAndDates(findings), sampleScan); - return validate(extendedData); -} - -function generateErrorMessage(errors, findings) { - return JSON.stringify( - errors.map((error) => { - return { - ...error, - invalidValue: get(findings, error.instancePath), - }; - }), - null, - 2, - ); -} diff --git a/parser-sdk/nodejs/parser-utils.ts b/parser-sdk/nodejs/parser-utils.ts new file mode 100644 index 0000000000..d01b13075d --- /dev/null +++ b/parser-sdk/nodejs/parser-utils.ts @@ -0,0 +1,136 @@ +// SPDX-FileCopyrightText: the secureCodeBox authors +// +// SPDX-License-Identifier: Apache-2.0 + +import { randomUUID } from "node:crypto"; + +import addFormats from "ajv-formats"; +import { get } from "jsonpointer"; +import Ajv, { type ErrorObject } from "ajv-draft-04"; +import findingsSchema from "./findings-schema.json" with { type: "json" }; + +const ajv = new Ajv(); +addFormats(ajv); + +export type Severity = "INFORMATIONAL" | "LOW" | "MEDIUM" | "HIGH"; + +export interface Reference { + type: string; + value: string; +} + +export interface ScanSummary { + created_at: string; // ISO8601 date-time + name: string; + namespace: string; + scan_type: string; +} + +// parsers do not need to set all fields as fields like the ID are set by the parser-sdk +export interface FindingFromParser { + identified_at?: string | null; // ISO8601 date-time + name: string; + description?: string | null; + category: string; + severity: Severity; + mitigation?: string | null; + references?: Reference[] | null; + attributes?: Record; + location?: string | null; +} + +export interface FindingWithIdsAndDates extends FindingFromParser { + id: string; // UUID v4 + parsed_at: string; // ISO8601 date-time +} + +export interface Finding extends FindingWithIdsAndDates { + scan: ScanSummary; +} + +export interface Scan { + metadata: { + name: string; + namespace: string; + creationTimestamp: string; + }; + spec: { + scanType: string; + }; + status: { + rawResultType: string; + }; +} + +export function validate(findings: unknown): asserts findings is Finding[] { + const validator = ajv.compile(findingsSchema); + const valid = validator(findings); + if (!valid && validator.errors) { + const errorMessage = generateErrorMessage(validator.errors, findings); + throw new Error(errorMessage); + } else if (!valid) { + throw new Error("Validation of findings failed for unknown reasons."); + } +} + +export function addScanMetadata( + findings: FindingWithIdsAndDates[], + scan: Scan, +): Finding[] { + const scanMetadata = { + created_at: scan.metadata.creationTimestamp, + name: scan.metadata.name, + namespace: scan.metadata.namespace, + scan_type: scan.spec.scanType, + }; + + return findings.map((finding) => ({ + ...finding, + scan: scanMetadata, + })); +} + +export function addIdsAndDates( + findings: FindingFromParser[], +): FindingWithIdsAndDates[] { + return findings.map((finding) => { + return { + ...finding, + id: randomUUID(), + parsed_at: new Date().toISOString(), + }; + }); +} + +// used for tests to validate if the parser sets all required fields correctly. Adds sample IDs and Dates to the findings which would normally be set by the parser-sdk. +export function validateParser(findings: FindingFromParser[]) { + const sampleScan: Scan = { + metadata: { + creationTimestamp: new Date().toISOString(), + name: "sample-scan-name", + namespace: "sample-namespace", + }, + spec: { + scanType: "sample-scan-type", + }, + status: { + rawResultType: "example-results", + }, + }; + // add sample IDs and Dates only if the findings Array is not empty + const extendedData = addScanMetadata(addIdsAndDates(findings), sampleScan); + return validate(extendedData); +} + +function generateErrorMessage(errors: ErrorObject[], findings: Finding[]) { + return JSON.stringify( + errors.map((error) => { + return { + ...error, + invalidValue: get(findings, error.instancePath), + }; + }), + null, + 2, + ); +} diff --git a/parser-sdk/nodejs/parser-wrapper.js b/parser-sdk/nodejs/parser-wrapper.ts similarity index 81% rename from parser-sdk/nodejs/parser-wrapper.js rename to parser-sdk/nodejs/parser-wrapper.ts index 4534185bc7..e46c2c59f2 100644 --- a/parser-sdk/nodejs/parser-wrapper.js +++ b/parser-sdk/nodejs/parser-wrapper.ts @@ -10,17 +10,43 @@ import { PatchStrategy, } from "@kubernetes/client-node"; +// @ts-ignore: parsers are provided during the docker build of the acutal parsers. import { parse } from "./parser/parser.js"; -import { validate, addIdsAndDates, addScanMetadata } from "./parser-utils.js"; +import { + validate, + addIdsAndDates, + addScanMetadata, + type Finding, + type Severity, + type Scan, +} from "./parser-utils.js"; const kc = new KubeConfig(); kc.loadFromCluster(); const k8sApi = kc.makeApiClient(CustomObjectsApi); -const scanName = process.env["SCAN_NAME"]; -const namespace = process.env["NAMESPACE"]; +const scanName = process.env["SCAN_NAME"]!; +if (!scanName) { + console.error( + "Parser was started without `SCAN_NAME` environment variable set. This is normally done by the operator.", + ); + console.error( + "If you are seeing this error during a normal scan execution please open up an issue..", + ); + process.exit(1); +} +const namespace = process.env["NAMESPACE"]!; +if (!namespace) { + console.error( + "Parser was started without `NAMESPACE` environment variable set. This is normally done by the operator.", + ); + console.error( + "If you are seeing this error during a normal scan execution please open up an issue..", + ); + process.exit(1); +} -function severityCount(findings, severity) { +function severityCount(findings: Finding[], severity: Severity) { return findings.filter( ({ severity: findingSeverity }) => findingSeverity.toUpperCase() === severity, @@ -28,8 +54,8 @@ function severityCount(findings, severity) { } async function uploadResultToFileStorageService( - resultUploadUrl, - findingsWithIdsAndDates, + resultUploadUrl: string, + findingsWithIdsAndDates: Finding[], ) { try { const res = await fetch(resultUploadUrl, { @@ -61,7 +87,7 @@ async function uploadResultToFileStorageService( } } -async function updateScanStatus(findings) { +async function updateScanStatus(findings: Finding[]) { try { const findingCategories = new Map(); for (const { category } of findings) { @@ -104,7 +130,7 @@ async function updateScanStatus(findings) { } } -async function extractScan() { +async function extractScan(): Promise { try { return await k8sApi.getNamespacedCustomObject({ group: "execution.securecodebox.io", @@ -120,7 +146,7 @@ async function extractScan() { } } -async function extractParseDefinition(scan) { +async function extractParseDefinition(scan: Scan) { try { return await k8sApi.getNamespacedCustomObject({ group: "execution.securecodebox.io", @@ -136,7 +162,7 @@ async function extractParseDefinition(scan) { } } -async function fetchResultFile(resultFileUrl, contentType) { +async function fetchResultFile(resultFileUrl: string, contentType?: "Binary") { try { const response = await fetch(resultFileUrl, { method: "GET" }); if (!response.ok) { @@ -164,7 +190,7 @@ async function main() { const resultUploadUrl = process.argv[3]; console.log("Fetching result file"); - let data = null; + let data: string | Buffer | null = null; try { data = await fetchResultFile( resultFileUrl, @@ -201,7 +227,7 @@ async function main() { crash_on_failed_validation, ); try { - await validate(findingsWithMetadata); + validate(findingsWithMetadata); console.log("The Findings were successfully validated"); } catch (error) { console.error("The Findings Validation failed with error(s):"); diff --git a/scanners/ffuf/parser/parser.test.js b/scanners/ffuf/parser/parser.test.js index a00a0eb604..14802095d8 100644 --- a/scanners/ffuf/parser/parser.test.js +++ b/scanners/ffuf/parser/parser.test.js @@ -16,7 +16,7 @@ test("should properly parse ffuf json file", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -81,7 +81,7 @@ test("should properly parse ffuf json file wih multiple fuzz keyword inputs", as const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -122,7 +122,7 @@ test("should properly parse ffuf json file with postdata", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -162,7 +162,7 @@ test("should properly parse empty json file", async () => { }); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); @@ -175,7 +175,7 @@ test("should properly parse juice-shop findings json file", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -241,27 +241,27 @@ test("should properly parse zero findings json file", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); test("should properly parse empty string", async () => { const findings = await parse(""); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); test("should properly parse null", async () => { const findings = await parse(null); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); test("should properly parse undefined", async () => { const findings = await parse(undefined); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); diff --git a/scanners/git-repo-scanner/parser/parser.test.js b/scanners/git-repo-scanner/parser/parser.test.js index 3180436f75..108df857f6 100644 --- a/scanners/git-repo-scanner/parser/parser.test.js +++ b/scanners/git-repo-scanner/parser/parser.test.js @@ -15,7 +15,7 @@ test("should properly parse empty json file", async () => { }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); @@ -27,7 +27,7 @@ test("should properly parse git-scanner json file", async () => { }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { diff --git a/scanners/gitleaks/parser/parser.test.js b/scanners/gitleaks/parser/parser.test.js index fd83d8f03b..84da650a5f 100644 --- a/scanners/gitleaks/parser/parser.test.js +++ b/scanners/gitleaks/parser/parser.test.js @@ -15,7 +15,7 @@ test("should properly parse empty gitleaks json file", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchObject([]); }); @@ -27,7 +27,7 @@ test("should properly parse gitleaks json file with null result", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchObject([]); }); @@ -39,7 +39,7 @@ test("should properly parse gitleaks json file", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -132,7 +132,7 @@ test("should define severity based on tags in result file", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ @@ -222,7 +222,7 @@ test("should properly construct commit URL if given in scan annotation without t }, ); const findings = await parse(jsonContent, scan); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ @@ -329,7 +329,7 @@ test("should properly construct commit URL if given in scan annotation with trai }, ); const findings = await parse(jsonContent, scan); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ diff --git a/scanners/kube-hunter/parser/parser.test.js b/scanners/kube-hunter/parser/parser.test.js index 9cddcab818..0c980c85a4 100644 --- a/scanners/kube-hunter/parser/parser.test.js +++ b/scanners/kube-hunter/parser/parser.test.js @@ -17,7 +17,7 @@ test("parses result from kind-1.18-in-cluster-scan correctly", async () => { ), ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -29,6 +29,6 @@ test("should properly parse empty kube-hunter json file", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); diff --git a/scanners/ncrack/parser/parser.test.js b/scanners/ncrack/parser/parser.test.js index 2e30ff3040..a74a0c8d3a 100644 --- a/scanners/ncrack/parser/parser.test.js +++ b/scanners/ncrack/parser/parser.test.js @@ -16,7 +16,7 @@ it("should return no findings when ncrack has not found credentials", async () = }, ); const findings = await parse(ncrackXML); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings.length).toBe(0); }); @@ -29,7 +29,7 @@ it("should return findings when ncrack found credentials", async () => { }, ); const findings = await parse(ncrackXML); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); const [finding, ...otherFindings] = findings; expect(finding).toMatchInlineSnapshot(` { @@ -64,7 +64,7 @@ it("should return no findings when ncrack has not found credentials scanning two }, ); const findings = await parse(ncrackXML); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings.length).toBe(0); }); @@ -78,7 +78,7 @@ it("should return findings when ncrack found two credentials scanning two servic }, ); const findings = await parse(ncrackXML); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { diff --git a/scanners/nikto/parser/parser.test.js b/scanners/nikto/parser/parser.test.js index bf1737d27c..1a56dbbcda 100644 --- a/scanners/nikto/parser/parser.test.js +++ b/scanners/nikto/parser/parser.test.js @@ -15,7 +15,7 @@ test("parses www.securecodebox.io result file into findings", async () => { }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -27,7 +27,7 @@ test("parses OWASP Juice Shop result file into findings", async () => { }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -39,7 +39,7 @@ test("should properly parse empty json file", async () => { }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); @@ -51,5 +51,5 @@ test("parses 'no web server found' finding correctly", async () => { }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); }); diff --git a/scanners/nmap/parser/parser.test.js b/scanners/nmap/parser/parser.test.js index 595f0a97e8..b6baedb458 100644 --- a/scanners/nmap/parser/parser.test.js +++ b/scanners/nmap/parser/parser.test.js @@ -16,7 +16,7 @@ test("should properly parse nmap xml file", async () => { ); const findings = await parse(xmlContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -147,7 +147,7 @@ test("should properly parse a nmap xml without any ports", async () => { ); const findings = await parse(xmlContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -178,7 +178,7 @@ test("should properly parse a nmap xml without any host", async () => { ); const findings = await parse(xmlContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); @@ -191,7 +191,7 @@ test("should properly parse a nmap xml with missing service information", async ); const findings = await parse(xmlContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -247,7 +247,7 @@ test("Should properly parse a nmap xml with script specific SMB findings", async ); const findings = await parse(xmlContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(await parse(xmlContent)).toMatchInlineSnapshot(` [ { @@ -452,7 +452,7 @@ test("should properly parse a script finding for ftp in an xml file", async () = }, ); const findings = await parse(xmlContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(await parse(xmlContent)).toMatchInlineSnapshot(` [ { @@ -544,7 +544,7 @@ test("should parse scanme.nmap.org results properly", async () => { }, ); const findings = await parse(xmlContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(await parse(xmlContent)).toMatchInlineSnapshot(` [ { diff --git a/scanners/nuclei/parser/parser.test.js b/scanners/nuclei/parser/parser.test.js index 7ed9d91189..2628589bd8 100644 --- a/scanners/nuclei/parser/parser.test.js +++ b/scanners/nuclei/parser/parser.test.js @@ -16,7 +16,7 @@ test("parses empty result correctly", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -29,7 +29,7 @@ test("parses the example.com result correctly", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -42,7 +42,7 @@ test("parses secureCodeBox.io result correctly", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -55,7 +55,7 @@ test("parses log4shell result correctly", async () => { ); const findings = await parse(JSON.parse(fileContent)); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -69,7 +69,7 @@ test("parses results with requests & responses correctly", async () => { ); const findings = await parse(JSON.parse(fileContent)); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -83,7 +83,7 @@ test("parses findings with hostnames which do not contain a port correctly", asy ); const findings = await parse(JSON.parse(fileContent)); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings[0]).toEqual( expect.objectContaining({ location: "example.com", diff --git a/scanners/screenshooter/parser/parser.test.js b/scanners/screenshooter/parser/parser.test.js index dceabb9798..7f804e5947 100644 --- a/scanners/screenshooter/parser/parser.test.js +++ b/scanners/screenshooter/parser/parser.test.js @@ -25,7 +25,7 @@ beforeEach(() => { test("should create finding correctly", async () => { const findings = await parse("thisisabinarystringformatedimage", scan); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -46,6 +46,6 @@ test("should create finding correctly", async () => { test("should not create finding if image is empty", async () => { scan.spec.parameters = ["https://www.iteratec.de"]; const findings = await parse("", scan); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); diff --git a/scanners/semgrep/parser/parser.test.js b/scanners/semgrep/parser/parser.test.js index 97c0e3b40a..01e88f8913 100644 --- a/scanners/semgrep/parser/parser.test.js +++ b/scanners/semgrep/parser/parser.test.js @@ -16,7 +16,7 @@ test("should properly parse file from inline semgrep usage", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -45,7 +45,7 @@ test("should properly parse file with a single result", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -87,7 +87,7 @@ test("should properly parse file with multiple results", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { diff --git a/scanners/ssh-audit/parser/parser.test.js b/scanners/ssh-audit/parser/parser.test.js index 6bbc414ec2..0de7f751d6 100644 --- a/scanners/ssh-audit/parser/parser.test.js +++ b/scanners/ssh-audit/parser/parser.test.js @@ -12,7 +12,7 @@ test("ssh-audit parser parses a result into proper findings for dummy-ssh", asyn encoding: "utf8", }); const findings = await parse(hosts); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -460,7 +460,7 @@ test("should properly parse empty json file", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); @@ -469,7 +469,7 @@ test("ssh-audit parser parses a result into proper findings for an example with encoding: "utf8", }); const findings = await parse(hosts); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { diff --git a/scanners/sslyze/parser/parser.test.js b/scanners/sslyze/parser/parser.test.js index 6acbb812c8..8c29b4fa55 100644 --- a/scanners/sslyze/parser/parser.test.js +++ b/scanners/sslyze/parser/parser.test.js @@ -16,7 +16,7 @@ test("parses result file for www.securecodebox.io correctly", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ @@ -64,7 +64,7 @@ test("parses result file for tls-v1-0.badssl.com:1010 correctly", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ @@ -171,7 +171,7 @@ test("parses result file for expired.badssl.com correctly", async () => { const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ @@ -293,7 +293,7 @@ test("parses result file for wrong.host.badssl.com correctly", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ @@ -416,7 +416,7 @@ test("parses result file for untrusted-root.badssl.com correctly", async () => { const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -537,7 +537,7 @@ test("parses result file for self-signed.badssl.com correctly", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ @@ -659,7 +659,7 @@ test("parses result file for target without certificate_deployments correctly", ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ @@ -730,7 +730,7 @@ test("parses an empty result file correctly", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toEqual([]); }); @@ -742,6 +742,6 @@ test("should properly parse empty json file", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); diff --git a/scanners/subfinder/parser/parser.test.js b/scanners/subfinder/parser/parser.test.js index b13e75a1c6..9e1c85abc2 100644 --- a/scanners/subfinder/parser/parser.test.js +++ b/scanners/subfinder/parser/parser.test.js @@ -15,7 +15,7 @@ test("should properly parse subfinder json file without ip output", async () => ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -28,8 +28,8 @@ test("should properly parse subfinder json file with ip output", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); - expect(findings).toMatchSnapshot() + expect(validateParser(findings)).toBeUndefined(); + expect(findings).toMatchSnapshot(); }); test("should properly parse empty json file", async () => { @@ -38,6 +38,6 @@ test("should properly parse empty json file", async () => { }); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); - expect(findings).toMatchSnapshot() + expect(validateParser(findings)).toBeUndefined(); + expect(findings).toMatchSnapshot(); }); diff --git a/scanners/trivy-sbom/parser/parser.test.js b/scanners/trivy-sbom/parser/parser.test.js index e5e016c992..45393059bd 100644 --- a/scanners/trivy-sbom/parser/parser.test.js +++ b/scanners/trivy-sbom/parser/parser.test.js @@ -36,7 +36,7 @@ test("should create finding correctly", async () => { }; const findings = await parse(JSON.stringify(result), scan); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -62,7 +62,7 @@ test("should properly parse cyclonedx json sbom file", async () => { ); const findings = await parse(fileContent, scan); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { diff --git a/scanners/trivy/parser/parser.test.js b/scanners/trivy/parser/parser.test.js index 99ee187c53..2c03624907 100644 --- a/scanners/trivy/parser/parser.test.js +++ b/scanners/trivy/parser/parser.test.js @@ -15,7 +15,7 @@ test("parses bkimminich/juice-shop:v10.2.0 result file into findings", async () }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -27,7 +27,7 @@ test("parses bkimminich/juice-shop:v12.10.2 result file into findings", async () }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -39,7 +39,7 @@ test("parses securecodebox:master result file into findings", async () => { }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -51,7 +51,7 @@ test("should properly parse a json file with no .Results", async () => { }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); @@ -63,7 +63,7 @@ test("should parse a trivy-k8s scan result of a cluster running secureCodeBox it }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -87,7 +87,7 @@ test("should parse a trivy-k8s scan result", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -99,7 +99,7 @@ test("should properly parse a json file with empty .Results", async () => { }, ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); @@ -111,6 +111,6 @@ test("should properly parse empty json file", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); diff --git a/scanners/whatweb/parser/parser.test.js b/scanners/whatweb/parser/parser.test.js index 5e833a0cfc..742288ee82 100644 --- a/scanners/whatweb/parser/parser.test.js +++ b/scanners/whatweb/parser/parser.test.js @@ -16,7 +16,7 @@ test("should properly parse whatweb json file", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -49,7 +49,7 @@ test("should properly parse empty whatweb json file", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); @@ -62,7 +62,7 @@ test("should properly parse securecodebox.io whatweb json file with higher aggre ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -140,7 +140,7 @@ test("should properly parse whatweb json file with two domains", async () => { ); const findings = await parse(fileContent); // validate findings - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { diff --git a/scanners/wpscan/parser/parser.test.js b/scanners/wpscan/parser/parser.test.js index 068de0aff8..b85a6c4fcc 100644 --- a/scanners/wpscan/parser/parser.test.js +++ b/scanners/wpscan/parser/parser.test.js @@ -16,7 +16,7 @@ test("WPScan parser parses a successfully scan result with at least one informat ); const findings = await parse(scanResults); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -195,7 +195,7 @@ test("WPScan parser parses a scan result file without a detected wp version corr ); const findings = await parse(scanResults); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(` [ { @@ -336,6 +336,6 @@ test("should properly parse empty json file", async () => { }, ); const findings = await parse(jsonContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); diff --git a/scanners/zap-automation-framework/parser/parser.test.js b/scanners/zap-automation-framework/parser/parser.test.js index 204340d49c..213e5229b2 100644 --- a/scanners/zap-automation-framework/parser/parser.test.js +++ b/scanners/zap-automation-framework/parser/parser.test.js @@ -16,7 +16,7 @@ test("Parsing the juice-shop results.", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -29,7 +29,7 @@ test("Parsing the example.com results.", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -42,7 +42,7 @@ test("Parsing the docs.securecodebox.io results.", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -55,7 +55,7 @@ test("Parsing an empty result.", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchInlineSnapshot(`[]`); }); @@ -68,7 +68,7 @@ test("Parsing a nginx result.", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); }); @@ -81,6 +81,6 @@ test("Parsing a bodgeit result.", async () => { ); const findings = await parse(fileContent); - await expect(validateParser(findings)).resolves.toBeUndefined(); + expect(validateParser(findings)).toBeUndefined(); expect(findings).toMatchSnapshot(); });