diff --git a/.cargo/config.toml b/.cargo/config.toml index c73baee6b159c..92d135658137c 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -34,12 +34,6 @@ rustflags = [ "target-feature=+crt-static" ] -[target.aarch64-pc-windows-msvc] -linker = "rust-lld" - -[target.'cfg(target_os = "macos")'] -linker = "rust-lld" - [target.'cfg(all(target_os = "linux", target_env = "gnu"))'] rustflags = [ "--cfg", diff --git a/.changeset/README.md b/.changeset/README.md new file mode 100644 index 0000000000000..e5b6d8d6a67ad --- /dev/null +++ b/.changeset/README.md @@ -0,0 +1,8 @@ +# Changesets + +Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works +with multi-package repos, or single-package repos to help you version and publish your code. You can +find the full documentation for it [in our repository](https://github.com/changesets/changesets) + +We have a quick list of common questions to get you started engaging with this project in +[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md) diff --git a/.changeset/config.json b/.changeset/config.json new file mode 100644 index 0000000000000..f61ae85885617 --- /dev/null +++ b/.changeset/config.json @@ -0,0 +1,11 @@ +{ + "$schema": "https://unpkg.com/@changesets/config@3.1.1/schema.json", + "changelog": ["@changesets/changelog-github", { "repo": "vercel/next.js" }], + "commit": false, + "fixed": [["next", "@next/swc"]], + "linked": [], + "access": "public", + "baseBranch": "canary", + "updateInternalDependencies": "patch", + "ignore": [] +} diff --git a/.changeset/dry-roses-nail.md b/.changeset/dry-roses-nail.md new file mode 100644 index 0000000000000..84b8b97378321 --- /dev/null +++ b/.changeset/dry-roses-nail.md @@ -0,0 +1,5 @@ +--- +"next": patch +--- + +Enable `ppr` when `dynamicIO` is enabled diff --git a/.changeset/empty-paths-check.md b/.changeset/empty-paths-check.md new file mode 100644 index 0000000000000..0074f64bdaab5 --- /dev/null +++ b/.changeset/empty-paths-check.md @@ -0,0 +1,5 @@ +--- +'next': patch +--- + +Add `regions` to the function config manifest file diff --git a/.changeset/giant-bushes-sink.md b/.changeset/giant-bushes-sink.md new file mode 100644 index 0000000000000..12493bb3b955e --- /dev/null +++ b/.changeset/giant-bushes-sink.md @@ -0,0 +1,5 @@ +--- +'next': patch +--- + +Resolved bug where hitting the parameterized path directly would cause a fallback shell generation instead of just rendering the route with the parameterized placeholders. diff --git a/.changeset/loose-cows-pump.md b/.changeset/loose-cows-pump.md new file mode 100644 index 0000000000000..94a3f73c91175 --- /dev/null +++ b/.changeset/loose-cows-pump.md @@ -0,0 +1,5 @@ +--- +'next': patch +--- + +Fix dangling promise in unstable_cache diff --git a/.changeset/lovely-bulldogs-dress.md b/.changeset/lovely-bulldogs-dress.md new file mode 100644 index 0000000000000..cf8a76baebdbd --- /dev/null +++ b/.changeset/lovely-bulldogs-dress.md @@ -0,0 +1,5 @@ +--- +"next": patch +--- + +Always pass implicit/soft tags into the `CacheHandler.get` method diff --git a/.changeset/open-dodos-admire.md b/.changeset/open-dodos-admire.md new file mode 100644 index 0000000000000..d596f86d830ef --- /dev/null +++ b/.changeset/open-dodos-admire.md @@ -0,0 +1,5 @@ +--- +'next': patch +--- + +Fix to use https urls in meta data images when using --experimental-https flag diff --git a/.changeset/pretty-suns-watch.md b/.changeset/pretty-suns-watch.md new file mode 100644 index 0000000000000..87c9353fccf6b --- /dev/null +++ b/.changeset/pretty-suns-watch.md @@ -0,0 +1,5 @@ +--- +"next": patch +--- + +Do not cache fetches with 'no-store' in "use cache" during SSG diff --git a/.changeset/seven-seas-run.md b/.changeset/seven-seas-run.md new file mode 100644 index 0000000000000..844d3ea79dcb2 --- /dev/null +++ b/.changeset/seven-seas-run.md @@ -0,0 +1,5 @@ +--- +"next": patch +--- + +[TypeScript Plugin] Match method signature (`someFunc(): void`) type for client boundary warnings. diff --git a/.changeset/shaggy-owls-visit.md b/.changeset/shaggy-owls-visit.md new file mode 100644 index 0000000000000..753479a46fb75 --- /dev/null +++ b/.changeset/shaggy-owls-visit.md @@ -0,0 +1,5 @@ +--- +'next': patch +--- + +Fixed rewrite params of the interception routes not being parsed correctly in certain deployed environments diff --git a/.changeset/shaggy-pears-tell.md b/.changeset/shaggy-pears-tell.md new file mode 100644 index 0000000000000..4cd6e97ad2275 --- /dev/null +++ b/.changeset/shaggy-pears-tell.md @@ -0,0 +1,5 @@ +--- +"next": patch +--- + +Use `onPostpone` to determine if segment prefetch is partial diff --git a/.changeset/shy-impalas-add.md b/.changeset/shy-impalas-add.md new file mode 100644 index 0000000000000..e9f93508de5b1 --- /dev/null +++ b/.changeset/shy-impalas-add.md @@ -0,0 +1,5 @@ +--- +"next": patch +--- + +[TypeScript Plugin] Moved the diagnostics' positions to the prop's type instead of the value for client-boundary warnings. diff --git a/.changeset/smooth-bears-run.md b/.changeset/smooth-bears-run.md new file mode 100644 index 0000000000000..2a2ea3b1c0355 --- /dev/null +++ b/.changeset/smooth-bears-run.md @@ -0,0 +1,8 @@ +--- +'next': patch +--- + +[dev-overlay] Show error overlay on any thrown value + +We used to only show the error overlay on thrown values with a stack property. +On other thrown values we kept the overlay collapsed. diff --git a/.changeset/spotty-hotels-train.md b/.changeset/spotty-hotels-train.md new file mode 100644 index 0000000000000..fea1a4e6b846c --- /dev/null +++ b/.changeset/spotty-hotels-train.md @@ -0,0 +1,5 @@ +--- +"next": patch +--- + +[Segment Cache] Fix: Ensure server references can be prerendered diff --git a/.changeset/swift-socks-find.md b/.changeset/swift-socks-find.md new file mode 100644 index 0000000000000..babf8d42cd199 --- /dev/null +++ b/.changeset/swift-socks-find.md @@ -0,0 +1,5 @@ +--- +'next': patch +--- + +Sourcemap errors during prerender if `experimental.enablePrerenderSourceMaps` is enabled diff --git a/.changeset/tough-peaches-burn.md b/.changeset/tough-peaches-burn.md new file mode 100644 index 0000000000000..5814def9a6b3d --- /dev/null +++ b/.changeset/tough-peaches-burn.md @@ -0,0 +1,5 @@ +--- +"next": patch +--- + +Fix name tracking for closures in server actions transform diff --git a/.changeset/tricky-planes-worry.md b/.changeset/tricky-planes-worry.md new file mode 100644 index 0000000000000..b8d38dfa92379 --- /dev/null +++ b/.changeset/tricky-planes-worry.md @@ -0,0 +1,5 @@ +--- +"next": patch +--- + +[dynamicIO] Avoid timeout errors with dynamic params in `"use cache"` diff --git a/.eslintrc.json b/.eslintrc.json index c27a07867b783..a0ee52fcc9db6 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -48,7 +48,7 @@ "jest/no-export": "off", "jest/no-standalone-expect": [ "error", - { "additionalTestBlockFunctions": ["retry"] } + { "additionalTestBlockFunctions": ["retry", "itCI", "itHeaded"] } ] } }, @@ -126,8 +126,10 @@ } }, { - "files": ["packages/**/*.tsx?"], + "files": ["packages/**/*.ts", "packages/**/*.tsx"], + "plugins": ["@next/eslint-plugin-internal"], "rules": { + "@next/internal/typechecked-require": "error", "jsdoc/no-types": "error", "jsdoc/no-undefined-types": "error" } @@ -166,6 +168,7 @@ "files": ["packages/**"], "excludedFiles": [ "packages/next/taskfile*.js", + "packages/next/next-devtools.webpack-config.js", "packages/next/next-runtime.webpack-config.js" ], "rules": { @@ -294,6 +297,17 @@ "no-octal": "error", "no-octal-escape": "error", "no-regex-spaces": "error", + "no-restricted-imports": [ + "error", + { + "patterns": [ + { + "group": ["*/next-devtools/dev-overlay*"], + "message": "Use `next/dist/compiled/next-devtools` (`src/next-devtools/dev-overlay/entrypoint.ts`) instead. Prefer `src/next-devtools/shared/` for shared utils." + } + ] + } + ], "no-restricted-syntax": [ "error", "WithStatement", diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 7999e1d4cdcb3..26bade4c2de9b 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -9,4 +9,30 @@ 64b718c6618b6c419872abbf22163ae543ac259e # Replace createNextDescribe with nextTestSetup -c6320ed87ab41eee6f3ac54352ad02a239f329b2 \ No newline at end of file +c6320ed87ab41eee6f3ac54352ad02a239f329b2 + +# Enable prettier for turbopack +eaad5b4c1451f35c4b3b6a3c9c67445b434c4a80 + +# Turbopack: Make `turbopack-core` Rust 2024 +f5d537f23de7c61e427fd915b5b70d2298fdd5ad +# Turbopack: Make `turbo-tasks-malloc` crate Rust 2024 +7e9b55da8b0d72c22708ae08a71581858e802bef +# Turbopack: Make `turbopack-ecmascript` Rust 2024 +f9e78faee6e959a2bbc8891e3f7761d0d7d7dcb1 +# Turbopack: Make `turbopack-resolve` and `turbopack-trace-server` Rust 2024 +75bc3548e154453897d2dfaecab2cdf60912ae0c +# Turbopack: Make `turbo-tasks-memory` crate Rust 2024 +1226d7e47c54173f568c9dd4bf87ef09f22122b5 +# Turbopack: Make `turbo-tasks-env` crate Rust 2024 +5a4dba153defeaa6330d2c23d0934594f2df3383 +# Turbopack: Make `turbo-macros` crate Rust 2024 +305635343df51bbabc19c0df8a82bb8302d86aef +# Turbopack: Make `turbo-backend` crate Rust 2024 +9bf6e364d484f3ab7eed18b67fbd878dfc3bee6f +# Turbopack: Make `turbo-rcstr` crate Rust 2024 +9156b3b25bcdd74b4dee58eede731ba895c9e0d9 +# Turbopack: Make `napi` crate Rust 2024 +02219cc880e0991b2060f1591757d97d9f678f9f +# Turbopack: Make `turbo-tasks` crate Rust 2024 +47689a6e2552328cf90a2d1f14573d006868f197 diff --git a/.gitattributes b/.gitattributes index 413fc08cb9b07..a032aa154c986 100644 --- a/.gitattributes +++ b/.gitattributes @@ -4,3 +4,6 @@ packages/next/compiled/** -text linguist-vendored # Make next/src/build folder indexable for github search build/** linguist-generated=false + +# Custom merge driver for auto-generated errors.json +packages/next/errors.json merge=errors-json diff --git a/.github/ISSUE_TEMPLATE/4.docs_report.yml b/.github/ISSUE_TEMPLATE/4.docs_report.yml index fb8421c7329e3..79464bc4a23e5 100644 --- a/.github/ISSUE_TEMPLATE/4.docs_report.yml +++ b/.github/ISSUE_TEMPLATE/4.docs_report.yml @@ -9,9 +9,8 @@ body: Before opening a new documentation issue, is this something you can help us with? Your contributions are welcomed and appreciated. See our [Docs Contribution Guide](https://nextjs.org/docs/community/contribution-guide) to learn how to edit the Next.js docs. If you are reporting about an documentation request, please open it in our [discussions](https://github.com/vercel/next.js/discussions/new?category=ideas) instead. - - type: markdown - attributes: - value: Thank you for helping us update our docs! + + Thank you for helping us update our docs! - type: textarea attributes: label: What is the documentation issue? diff --git a/.github/actions/setup-rust/action.yml b/.github/actions/setup-rust/action.yml index aeac13a92142a..f766dc3bea3c1 100644 --- a/.github/actions/setup-rust/action.yml +++ b/.github/actions/setup-rust/action.yml @@ -19,7 +19,9 @@ runs: - name: 'Install LLD (LLVM Linker) for Linux' if: runner.os == 'Linux' shell: bash - run: sudo apt-get -y update && sudo apt-get install -y lld + run: | + sudo apt-get -y -o DPkg::Lock::Timeout=60 update + sudo apt-get -o DPkg::Lock::Timeout=60 -y install lld - name: 'Add cargo problem matchers' shell: bash diff --git a/.github/actions/upload-turboyet-data/dist/index.js b/.github/actions/upload-turboyet-data/dist/index.js index df475597ce346..2b0b605c49954 100644 --- a/.github/actions/upload-turboyet-data/dist/index.js +++ b/.github/actions/upload-turboyet-data/dist/index.js @@ -5332,6 +5332,7 @@ async function main() { await collectAndUpload(kv, { jsonPrefix: 'rspack-', kvPrefix: 'rspack-', + deploymentDomain: 'arewerspackyet.com', }) } catch (error) { console.log(error) diff --git a/.github/actions/upload-turboyet-data/dist/index.js.map b/.github/actions/upload-turboyet-data/dist/index.js.map index 174cbe3e02c58..629bf392569a1 100644 --- a/.github/actions/upload-turboyet-data/dist/index.js.map +++ b/.github/actions/upload-turboyet-data/dist/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/1HA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACtyBA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrJA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","sources":["../../../node_modules/.pnpm/@upstash+redis@1.34.3/node_modules/@upstash/redis/nodejs.js","../../../node_modules/.pnpm/crypto-js@4.2.0/node_modules/crypto-js/core.js","../../../node_modules/.pnpm/crypto-js@4.2.0/node_modules/crypto-js/enc-hex.js","../../../node_modules/.pnpm/crypto-js@4.2.0/node_modules/crypto-js/sha1.js","../external node-commonjs \"crypto\"","../external node-commonjs \"fs/promises\"","../external node-commonjs \"path\"","../../../node_modules/.pnpm/@vercel+kv@3.0.0/node_modules/@vercel/kv/dist/index.cjs","../webpack/bootstrap","../webpack/runtime/compat",".././src/main.js"],"sourcesContent":["\"use strict\";\nvar __create = Object.create;\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __getProtoOf = Object.getPrototypeOf;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(\n // If the importer is in node compatibility mode or this is not an ESM\n // file that has been converted to a CommonJS file using a Babel-\n // compatible transform (i.e. \"__esModule\" has not been set), then set\n // \"default\" to the CommonJS \"module.exports\" for node compatibility.\n isNodeMode || !mod || !mod.__esModule ? __defProp(target, \"default\", { value: mod, enumerable: true }) : target,\n mod\n));\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// platforms/nodejs.ts\nvar nodejs_exports = {};\n__export(nodejs_exports, {\n Redis: () => Redis2,\n errors: () => error_exports\n});\nmodule.exports = __toCommonJS(nodejs_exports);\n\n// pkg/error.ts\nvar error_exports = {};\n__export(error_exports, {\n UpstashError: () => UpstashError,\n UrlError: () => UrlError\n});\nvar UpstashError = class extends Error {\n constructor(message) {\n super(message);\n this.name = \"UpstashError\";\n }\n};\nvar UrlError = class extends Error {\n constructor(url) {\n super(\n `Upstash Redis client was passed an invalid URL. You should pass a URL starting with https. Received: \"${url}\". `\n );\n this.name = \"UrlError\";\n }\n};\n\n// pkg/http.ts\nvar HttpClient = class {\n baseUrl;\n headers;\n options;\n readYourWrites;\n upstashSyncToken = \"\";\n hasCredentials;\n retry;\n constructor(config) {\n this.options = {\n backend: config.options?.backend,\n agent: config.agent,\n responseEncoding: config.responseEncoding ?? \"base64\",\n // default to base64\n cache: config.cache,\n signal: config.signal,\n keepAlive: config.keepAlive ?? true\n };\n this.upstashSyncToken = \"\";\n this.readYourWrites = config.readYourWrites ?? true;\n this.baseUrl = (config.baseUrl || \"\").replace(/\\/$/, \"\");\n const urlRegex = /^https?:\\/\\/[^\\s#$./?].\\S*$/;\n if (this.baseUrl && !urlRegex.test(this.baseUrl)) {\n throw new UrlError(this.baseUrl);\n }\n this.headers = {\n \"Content-Type\": \"application/json\",\n ...config.headers\n };\n this.hasCredentials = Boolean(this.baseUrl && this.headers.authorization.split(\" \")[1]);\n if (this.options.responseEncoding === \"base64\") {\n this.headers[\"Upstash-Encoding\"] = \"base64\";\n }\n this.retry = typeof config.retry === \"boolean\" && !config.retry ? {\n attempts: 1,\n backoff: () => 0\n } : {\n attempts: config.retry?.retries ?? 5,\n backoff: config.retry?.backoff ?? ((retryCount) => Math.exp(retryCount) * 50)\n };\n }\n mergeTelemetry(telemetry) {\n this.headers = merge(this.headers, \"Upstash-Telemetry-Runtime\", telemetry.runtime);\n this.headers = merge(this.headers, \"Upstash-Telemetry-Platform\", telemetry.platform);\n this.headers = merge(this.headers, \"Upstash-Telemetry-Sdk\", telemetry.sdk);\n }\n async request(req) {\n const requestOptions = {\n //@ts-expect-error this should throw due to bun regression\n cache: this.options.cache,\n method: \"POST\",\n headers: this.headers,\n body: JSON.stringify(req.body),\n keepalive: this.options.keepAlive,\n agent: this.options.agent,\n signal: this.options.signal,\n /**\n * Fastly specific\n */\n backend: this.options.backend\n };\n if (!this.hasCredentials) {\n console.warn(\n \"[Upstash Redis] Redis client was initialized without url or token. Failed to execute command.\"\n );\n }\n if (this.readYourWrites) {\n const newHeader = this.upstashSyncToken;\n this.headers[\"upstash-sync-token\"] = newHeader;\n }\n let res = null;\n let error = null;\n for (let i = 0; i <= this.retry.attempts; i++) {\n try {\n res = await fetch([this.baseUrl, ...req.path ?? []].join(\"/\"), requestOptions);\n break;\n } catch (error_) {\n if (this.options.signal?.aborted) {\n const myBlob = new Blob([\n JSON.stringify({ result: this.options.signal.reason ?? \"Aborted\" })\n ]);\n const myOptions = {\n status: 200,\n statusText: this.options.signal.reason ?? \"Aborted\"\n };\n res = new Response(myBlob, myOptions);\n break;\n }\n error = error_;\n await new Promise((r) => setTimeout(r, this.retry.backoff(i)));\n }\n }\n if (!res) {\n throw error ?? new Error(\"Exhausted all retries\");\n }\n const body = await res.json();\n if (!res.ok) {\n throw new UpstashError(`${body.error}, command was: ${JSON.stringify(req.body)}`);\n }\n if (this.readYourWrites) {\n const headers = res.headers;\n this.upstashSyncToken = headers.get(\"upstash-sync-token\") ?? \"\";\n }\n if (this.readYourWrites) {\n const headers = res.headers;\n this.upstashSyncToken = headers.get(\"upstash-sync-token\") ?? \"\";\n }\n if (this.options.responseEncoding === \"base64\") {\n if (Array.isArray(body)) {\n return body.map(({ result: result2, error: error2 }) => ({\n result: decode(result2),\n error: error2\n }));\n }\n const result = decode(body.result);\n return { result, error: body.error };\n }\n return body;\n }\n};\nfunction base64decode(b64) {\n let dec = \"\";\n try {\n const binString = atob(b64);\n const size = binString.length;\n const bytes = new Uint8Array(size);\n for (let i = 0; i < size; i++) {\n bytes[i] = binString.charCodeAt(i);\n }\n dec = new TextDecoder().decode(bytes);\n } catch {\n dec = b64;\n }\n return dec;\n}\nfunction decode(raw) {\n let result = void 0;\n switch (typeof raw) {\n case \"undefined\": {\n return raw;\n }\n case \"number\": {\n result = raw;\n break;\n }\n case \"object\": {\n if (Array.isArray(raw)) {\n result = raw.map(\n (v) => typeof v === \"string\" ? base64decode(v) : Array.isArray(v) ? v.map((element) => decode(element)) : v\n );\n } else {\n result = null;\n }\n break;\n }\n case \"string\": {\n result = raw === \"OK\" ? \"OK\" : base64decode(raw);\n break;\n }\n default: {\n break;\n }\n }\n return result;\n}\nfunction merge(obj, key, value) {\n if (!value) {\n return obj;\n }\n obj[key] = obj[key] ? [obj[key], value].join(\",\") : value;\n return obj;\n}\n\n// pkg/auto-pipeline.ts\nfunction createAutoPipelineProxy(_redis, json) {\n const redis = _redis;\n if (!redis.autoPipelineExecutor) {\n redis.autoPipelineExecutor = new AutoPipelineExecutor(redis);\n }\n return new Proxy(redis, {\n get: (redis2, command) => {\n if (command === \"pipelineCounter\") {\n return redis2.autoPipelineExecutor.pipelineCounter;\n }\n if (command === \"json\") {\n return createAutoPipelineProxy(redis2, true);\n }\n const commandInRedisButNotPipeline = command in redis2 && !(command in redis2.autoPipelineExecutor.pipeline);\n if (commandInRedisButNotPipeline) {\n return redis2[command];\n }\n const isFunction = json ? typeof redis2.autoPipelineExecutor.pipeline.json[command] === \"function\" : typeof redis2.autoPipelineExecutor.pipeline[command] === \"function\";\n if (isFunction) {\n return (...args) => {\n return redis2.autoPipelineExecutor.withAutoPipeline((pipeline) => {\n if (json) {\n pipeline.json[command](\n ...args\n );\n } else {\n pipeline[command](...args);\n }\n });\n };\n }\n return redis2.autoPipelineExecutor.pipeline[command];\n }\n });\n}\nvar AutoPipelineExecutor = class {\n pipelinePromises = /* @__PURE__ */ new WeakMap();\n activePipeline = null;\n indexInCurrentPipeline = 0;\n redis;\n pipeline;\n // only to make sure that proxy can work\n pipelineCounter = 0;\n // to keep track of how many times a pipeline was executed\n constructor(redis) {\n this.redis = redis;\n this.pipeline = redis.pipeline();\n }\n async withAutoPipeline(executeWithPipeline) {\n const pipeline = this.activePipeline ?? this.redis.pipeline();\n if (!this.activePipeline) {\n this.activePipeline = pipeline;\n this.indexInCurrentPipeline = 0;\n }\n const index = this.indexInCurrentPipeline++;\n executeWithPipeline(pipeline);\n const pipelineDone = this.deferExecution().then(() => {\n if (!this.pipelinePromises.has(pipeline)) {\n const pipelinePromise = pipeline.exec({ keepErrors: true });\n this.pipelineCounter += 1;\n this.pipelinePromises.set(pipeline, pipelinePromise);\n this.activePipeline = null;\n }\n return this.pipelinePromises.get(pipeline);\n });\n const results = await pipelineDone;\n const commandResult = results[index];\n if (commandResult.error) {\n throw new UpstashError(`Command failed: ${commandResult.error}`);\n }\n return commandResult.result;\n }\n async deferExecution() {\n await Promise.resolve();\n await Promise.resolve();\n }\n};\n\n// pkg/util.ts\nfunction parseRecursive(obj) {\n const parsed = Array.isArray(obj) ? obj.map((o) => {\n try {\n return parseRecursive(o);\n } catch {\n return o;\n }\n }) : JSON.parse(obj);\n if (typeof parsed === \"number\" && parsed.toString() !== obj) {\n return obj;\n }\n return parsed;\n}\nfunction parseResponse(result) {\n try {\n return parseRecursive(result);\n } catch {\n return result;\n }\n}\nfunction deserializeScanResponse(result) {\n return [result[0], ...parseResponse(result.slice(1))];\n}\n\n// pkg/commands/command.ts\nvar defaultSerializer = (c) => {\n switch (typeof c) {\n case \"string\":\n case \"number\":\n case \"boolean\": {\n return c;\n }\n default: {\n return JSON.stringify(c);\n }\n }\n};\nvar Command = class {\n command;\n serialize;\n deserialize;\n /**\n * Create a new command instance.\n *\n * You can define a custom `deserialize` function. By default we try to deserialize as json.\n */\n constructor(command, opts) {\n this.serialize = defaultSerializer;\n this.deserialize = opts?.automaticDeserialization === void 0 || opts.automaticDeserialization ? opts?.deserialize ?? parseResponse : (x) => x;\n this.command = command.map((c) => this.serialize(c));\n if (opts?.latencyLogging) {\n const originalExec = this.exec.bind(this);\n this.exec = async (client) => {\n const start = performance.now();\n const result = await originalExec(client);\n const end = performance.now();\n const loggerResult = (end - start).toFixed(2);\n console.log(\n `Latency for \\x1B[38;2;19;185;39m${this.command[0].toString().toUpperCase()}\\x1B[0m: \\x1B[38;2;0;255;255m${loggerResult} ms\\x1B[0m`\n );\n return result;\n };\n }\n }\n /**\n * Execute the command using a client.\n */\n async exec(client) {\n const { result, error } = await client.request({\n body: this.command,\n upstashSyncToken: client.upstashSyncToken\n });\n if (error) {\n throw new UpstashError(error);\n }\n if (result === void 0) {\n throw new TypeError(\"Request did not return a result\");\n }\n return this.deserialize(result);\n }\n};\n\n// pkg/commands/append.ts\nvar AppendCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"append\", ...cmd], opts);\n }\n};\n\n// pkg/commands/bitcount.ts\nvar BitCountCommand = class extends Command {\n constructor([key, start, end], opts) {\n const command = [\"bitcount\", key];\n if (typeof start === \"number\") {\n command.push(start);\n }\n if (typeof end === \"number\") {\n command.push(end);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/bitfield.ts\nvar BitFieldCommand = class {\n constructor(args, client, opts, execOperation = (command) => command.exec(this.client)) {\n this.client = client;\n this.opts = opts;\n this.execOperation = execOperation;\n this.command = [\"bitfield\", ...args];\n }\n command;\n chain(...args) {\n this.command.push(...args);\n return this;\n }\n get(...args) {\n return this.chain(\"get\", ...args);\n }\n set(...args) {\n return this.chain(\"set\", ...args);\n }\n incrby(...args) {\n return this.chain(\"incrby\", ...args);\n }\n overflow(overflow) {\n return this.chain(\"overflow\", overflow);\n }\n exec() {\n const command = new Command(this.command, this.opts);\n return this.execOperation(command);\n }\n};\n\n// pkg/commands/bitop.ts\nvar BitOpCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"bitop\", ...cmd], opts);\n }\n};\n\n// pkg/commands/bitpos.ts\nvar BitPosCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"bitpos\", ...cmd], opts);\n }\n};\n\n// pkg/commands/copy.ts\nvar CopyCommand = class extends Command {\n constructor([key, destinationKey, opts], commandOptions) {\n super([\"COPY\", key, destinationKey, ...opts?.replace ? [\"REPLACE\"] : []], {\n ...commandOptions,\n deserialize(result) {\n if (result > 0) {\n return \"COPIED\";\n }\n return \"NOT_COPIED\";\n }\n });\n }\n};\n\n// pkg/commands/dbsize.ts\nvar DBSizeCommand = class extends Command {\n constructor(opts) {\n super([\"dbsize\"], opts);\n }\n};\n\n// pkg/commands/decr.ts\nvar DecrCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"decr\", ...cmd], opts);\n }\n};\n\n// pkg/commands/decrby.ts\nvar DecrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"decrby\", ...cmd], opts);\n }\n};\n\n// pkg/commands/del.ts\nvar DelCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"del\", ...cmd], opts);\n }\n};\n\n// pkg/commands/echo.ts\nvar EchoCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"echo\", ...cmd], opts);\n }\n};\n\n// pkg/commands/eval.ts\nvar EvalCommand = class extends Command {\n constructor([script, keys, args], opts) {\n super([\"eval\", script, keys.length, ...keys, ...args ?? []], opts);\n }\n};\n\n// pkg/commands/evalsha.ts\nvar EvalshaCommand = class extends Command {\n constructor([sha, keys, args], opts) {\n super([\"evalsha\", sha, keys.length, ...keys, ...args ?? []], opts);\n }\n};\n\n// pkg/commands/exists.ts\nvar ExistsCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"exists\", ...cmd], opts);\n }\n};\n\n// pkg/commands/expire.ts\nvar ExpireCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"expire\", ...cmd.filter(Boolean)], opts);\n }\n};\n\n// pkg/commands/expireat.ts\nvar ExpireAtCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"expireat\", ...cmd], opts);\n }\n};\n\n// pkg/commands/flushall.ts\nvar FlushAllCommand = class extends Command {\n constructor(args, opts) {\n const command = [\"flushall\"];\n if (args && args.length > 0 && args[0].async) {\n command.push(\"async\");\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/flushdb.ts\nvar FlushDBCommand = class extends Command {\n constructor([opts], cmdOpts) {\n const command = [\"flushdb\"];\n if (opts?.async) {\n command.push(\"async\");\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/geo_add.ts\nvar GeoAddCommand = class extends Command {\n constructor([key, arg1, ...arg2], opts) {\n const command = [\"geoadd\", key];\n if (\"nx\" in arg1 && arg1.nx) {\n command.push(\"nx\");\n } else if (\"xx\" in arg1 && arg1.xx) {\n command.push(\"xx\");\n }\n if (\"ch\" in arg1 && arg1.ch) {\n command.push(\"ch\");\n }\n if (\"latitude\" in arg1 && arg1.latitude) {\n command.push(arg1.longitude, arg1.latitude, arg1.member);\n }\n command.push(\n ...arg2.flatMap(({ latitude, longitude, member }) => [longitude, latitude, member])\n );\n super(command, opts);\n }\n};\n\n// pkg/commands/geo_dist.ts\nvar GeoDistCommand = class extends Command {\n constructor([key, member1, member2, unit = \"M\"], opts) {\n super([\"GEODIST\", key, member1, member2, unit], opts);\n }\n};\n\n// pkg/commands/geo_hash.ts\nvar GeoHashCommand = class extends Command {\n constructor(cmd, opts) {\n const [key] = cmd;\n const members = Array.isArray(cmd[1]) ? cmd[1] : cmd.slice(1);\n super([\"GEOHASH\", key, ...members], opts);\n }\n};\n\n// pkg/commands/geo_pos.ts\nvar GeoPosCommand = class extends Command {\n constructor(cmd, opts) {\n const [key] = cmd;\n const members = Array.isArray(cmd[1]) ? cmd[1] : cmd.slice(1);\n super([\"GEOPOS\", key, ...members], {\n deserialize: (result) => transform(result),\n ...opts\n });\n }\n};\nfunction transform(result) {\n const final = [];\n for (const pos of result) {\n if (!pos?.[0] || !pos?.[1]) {\n continue;\n }\n final.push({ lng: Number.parseFloat(pos[0]), lat: Number.parseFloat(pos[1]) });\n }\n return final;\n}\n\n// pkg/commands/geo_search.ts\nvar GeoSearchCommand = class extends Command {\n constructor([key, centerPoint, shape, order, opts], commandOptions) {\n const command = [\"GEOSEARCH\", key];\n if (centerPoint.type === \"FROMMEMBER\" || centerPoint.type === \"frommember\") {\n command.push(centerPoint.type, centerPoint.member);\n }\n if (centerPoint.type === \"FROMLONLAT\" || centerPoint.type === \"fromlonlat\") {\n command.push(centerPoint.type, centerPoint.coordinate.lon, centerPoint.coordinate.lat);\n }\n if (shape.type === \"BYRADIUS\" || shape.type === \"byradius\") {\n command.push(shape.type, shape.radius, shape.radiusType);\n }\n if (shape.type === \"BYBOX\" || shape.type === \"bybox\") {\n command.push(shape.type, shape.rect.width, shape.rect.height, shape.rectType);\n }\n command.push(order);\n if (opts?.count) {\n command.push(\"COUNT\", opts.count.limit, ...opts.count.any ? [\"ANY\"] : []);\n }\n const transform2 = (result) => {\n if (!opts?.withCoord && !opts?.withDist && !opts?.withHash) {\n return result.map((member) => {\n try {\n return { member: JSON.parse(member) };\n } catch {\n return { member };\n }\n });\n }\n return result.map((members) => {\n let counter = 1;\n const obj = {};\n try {\n obj.member = JSON.parse(members[0]);\n } catch {\n obj.member = members[0];\n }\n if (opts.withDist) {\n obj.dist = Number.parseFloat(members[counter++]);\n }\n if (opts.withHash) {\n obj.hash = members[counter++].toString();\n }\n if (opts.withCoord) {\n obj.coord = {\n long: Number.parseFloat(members[counter][0]),\n lat: Number.parseFloat(members[counter][1])\n };\n }\n return obj;\n });\n };\n super(\n [\n ...command,\n ...opts?.withCoord ? [\"WITHCOORD\"] : [],\n ...opts?.withDist ? [\"WITHDIST\"] : [],\n ...opts?.withHash ? [\"WITHHASH\"] : []\n ],\n {\n deserialize: transform2,\n ...commandOptions\n }\n );\n }\n};\n\n// pkg/commands/geo_search_store.ts\nvar GeoSearchStoreCommand = class extends Command {\n constructor([destination, key, centerPoint, shape, order, opts], commandOptions) {\n const command = [\"GEOSEARCHSTORE\", destination, key];\n if (centerPoint.type === \"FROMMEMBER\" || centerPoint.type === \"frommember\") {\n command.push(centerPoint.type, centerPoint.member);\n }\n if (centerPoint.type === \"FROMLONLAT\" || centerPoint.type === \"fromlonlat\") {\n command.push(centerPoint.type, centerPoint.coordinate.lon, centerPoint.coordinate.lat);\n }\n if (shape.type === \"BYRADIUS\" || shape.type === \"byradius\") {\n command.push(shape.type, shape.radius, shape.radiusType);\n }\n if (shape.type === \"BYBOX\" || shape.type === \"bybox\") {\n command.push(shape.type, shape.rect.width, shape.rect.height, shape.rectType);\n }\n command.push(order);\n if (opts?.count) {\n command.push(\"COUNT\", opts.count.limit, ...opts.count.any ? [\"ANY\"] : []);\n }\n super([...command, ...opts?.storeDist ? [\"STOREDIST\"] : []], commandOptions);\n }\n};\n\n// pkg/commands/get.ts\nvar GetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"get\", ...cmd], opts);\n }\n};\n\n// pkg/commands/getbit.ts\nvar GetBitCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"getbit\", ...cmd], opts);\n }\n};\n\n// pkg/commands/getdel.ts\nvar GetDelCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"getdel\", ...cmd], opts);\n }\n};\n\n// pkg/commands/getrange.ts\nvar GetRangeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"getrange\", ...cmd], opts);\n }\n};\n\n// pkg/commands/getset.ts\nvar GetSetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"getset\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hdel.ts\nvar HDelCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hdel\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hexists.ts\nvar HExistsCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hexists\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hget.ts\nvar HGetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hget\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hgetall.ts\nfunction deserialize(result) {\n if (result.length === 0) {\n return null;\n }\n const obj = {};\n while (result.length >= 2) {\n const key = result.shift();\n const value = result.shift();\n try {\n const valueIsNumberAndNotSafeInteger = !Number.isNaN(Number(value)) && !Number.isSafeInteger(Number(value));\n obj[key] = valueIsNumberAndNotSafeInteger ? value : JSON.parse(value);\n } catch {\n obj[key] = value;\n }\n }\n return obj;\n}\nvar HGetAllCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hgetall\", ...cmd], {\n deserialize: (result) => deserialize(result),\n ...opts\n });\n }\n};\n\n// pkg/commands/hincrby.ts\nvar HIncrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hincrby\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hincrbyfloat.ts\nvar HIncrByFloatCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hincrbyfloat\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hkeys.ts\nvar HKeysCommand = class extends Command {\n constructor([key], opts) {\n super([\"hkeys\", key], opts);\n }\n};\n\n// pkg/commands/hlen.ts\nvar HLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hlen\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hmget.ts\nfunction deserialize2(fields, result) {\n if (result.every((field) => field === null)) {\n return null;\n }\n const obj = {};\n for (const [i, field] of fields.entries()) {\n try {\n obj[field] = JSON.parse(result[i]);\n } catch {\n obj[field] = result[i];\n }\n }\n return obj;\n}\nvar HMGetCommand = class extends Command {\n constructor([key, ...fields], opts) {\n super([\"hmget\", key, ...fields], {\n deserialize: (result) => deserialize2(fields, result),\n ...opts\n });\n }\n};\n\n// pkg/commands/hmset.ts\nvar HMSetCommand = class extends Command {\n constructor([key, kv], opts) {\n super([\"hmset\", key, ...Object.entries(kv).flatMap(([field, value]) => [field, value])], opts);\n }\n};\n\n// pkg/commands/hrandfield.ts\nfunction deserialize3(result) {\n if (result.length === 0) {\n return null;\n }\n const obj = {};\n while (result.length >= 2) {\n const key = result.shift();\n const value = result.shift();\n try {\n obj[key] = JSON.parse(value);\n } catch {\n obj[key] = value;\n }\n }\n return obj;\n}\nvar HRandFieldCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"hrandfield\", cmd[0]];\n if (typeof cmd[1] === \"number\") {\n command.push(cmd[1]);\n }\n if (cmd[2]) {\n command.push(\"WITHVALUES\");\n }\n super(command, {\n // @ts-expect-error to silence compiler\n deserialize: cmd[2] ? (result) => deserialize3(result) : opts?.deserialize,\n ...opts\n });\n }\n};\n\n// pkg/commands/hscan.ts\nvar HScanCommand = class extends Command {\n constructor([key, cursor, cmdOpts], opts) {\n const command = [\"hscan\", key, cursor];\n if (cmdOpts?.match) {\n command.push(\"match\", cmdOpts.match);\n }\n if (typeof cmdOpts?.count === \"number\") {\n command.push(\"count\", cmdOpts.count);\n }\n super(command, {\n deserialize: deserializeScanResponse,\n ...opts\n });\n }\n};\n\n// pkg/commands/hset.ts\nvar HSetCommand = class extends Command {\n constructor([key, kv], opts) {\n super([\"hset\", key, ...Object.entries(kv).flatMap(([field, value]) => [field, value])], opts);\n }\n};\n\n// pkg/commands/hsetnx.ts\nvar HSetNXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hsetnx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hstrlen.ts\nvar HStrLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hstrlen\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hvals.ts\nvar HValsCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hvals\", ...cmd], opts);\n }\n};\n\n// pkg/commands/incr.ts\nvar IncrCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"incr\", ...cmd], opts);\n }\n};\n\n// pkg/commands/incrby.ts\nvar IncrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"incrby\", ...cmd], opts);\n }\n};\n\n// pkg/commands/incrbyfloat.ts\nvar IncrByFloatCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"incrbyfloat\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrappend.ts\nvar JsonArrAppendCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRAPPEND\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrindex.ts\nvar JsonArrIndexCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRINDEX\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrinsert.ts\nvar JsonArrInsertCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRINSERT\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrlen.ts\nvar JsonArrLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRLEN\", cmd[0], cmd[1] ?? \"$\"], opts);\n }\n};\n\n// pkg/commands/json_arrpop.ts\nvar JsonArrPopCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRPOP\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrtrim.ts\nvar JsonArrTrimCommand = class extends Command {\n constructor(cmd, opts) {\n const path = cmd[1] ?? \"$\";\n const start = cmd[2] ?? 0;\n const stop = cmd[3] ?? 0;\n super([\"JSON.ARRTRIM\", cmd[0], path, start, stop], opts);\n }\n};\n\n// pkg/commands/json_clear.ts\nvar JsonClearCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.CLEAR\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_del.ts\nvar JsonDelCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.DEL\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_forget.ts\nvar JsonForgetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.FORGET\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_get.ts\nvar JsonGetCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"JSON.GET\"];\n if (typeof cmd[1] === \"string\") {\n command.push(...cmd);\n } else {\n command.push(cmd[0]);\n if (cmd[1]) {\n if (cmd[1].indent) {\n command.push(\"INDENT\", cmd[1].indent);\n }\n if (cmd[1].newline) {\n command.push(\"NEWLINE\", cmd[1].newline);\n }\n if (cmd[1].space) {\n command.push(\"SPACE\", cmd[1].space);\n }\n }\n command.push(...cmd.slice(2));\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/json_mget.ts\nvar JsonMGetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.MGET\", ...cmd[0], cmd[1]], opts);\n }\n};\n\n// pkg/commands/json_mset.ts\nvar JsonMSetCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"JSON.MSET\"];\n for (const c of cmd) {\n command.push(c.key, c.path, c.value);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/json_numincrby.ts\nvar JsonNumIncrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.NUMINCRBY\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_nummultby.ts\nvar JsonNumMultByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.NUMMULTBY\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_objkeys.ts\nvar JsonObjKeysCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.OBJKEYS\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_objlen.ts\nvar JsonObjLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.OBJLEN\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_resp.ts\nvar JsonRespCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.RESP\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_set.ts\nvar JsonSetCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"JSON.SET\", cmd[0], cmd[1], cmd[2]];\n if (cmd[3]) {\n if (cmd[3].nx) {\n command.push(\"NX\");\n } else if (cmd[3].xx) {\n command.push(\"XX\");\n }\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/json_strappend.ts\nvar JsonStrAppendCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.STRAPPEND\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_strlen.ts\nvar JsonStrLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.STRLEN\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_toggle.ts\nvar JsonToggleCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.TOGGLE\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_type.ts\nvar JsonTypeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.TYPE\", ...cmd], opts);\n }\n};\n\n// pkg/commands/keys.ts\nvar KeysCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"keys\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lindex.ts\nvar LIndexCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lindex\", ...cmd], opts);\n }\n};\n\n// pkg/commands/linsert.ts\nvar LInsertCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"linsert\", ...cmd], opts);\n }\n};\n\n// pkg/commands/llen.ts\nvar LLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"llen\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lmove.ts\nvar LMoveCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lmove\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lmpop.ts\nvar LmPopCommand = class extends Command {\n constructor(cmd, opts) {\n const [numkeys, keys, direction, count] = cmd;\n super([\"LMPOP\", numkeys, ...keys, direction, ...count ? [\"COUNT\", count] : []], opts);\n }\n};\n\n// pkg/commands/lpop.ts\nvar LPopCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lpop\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lpos.ts\nvar LPosCommand = class extends Command {\n constructor(cmd, opts) {\n const args = [\"lpos\", cmd[0], cmd[1]];\n if (typeof cmd[2]?.rank === \"number\") {\n args.push(\"rank\", cmd[2].rank);\n }\n if (typeof cmd[2]?.count === \"number\") {\n args.push(\"count\", cmd[2].count);\n }\n if (typeof cmd[2]?.maxLen === \"number\") {\n args.push(\"maxLen\", cmd[2].maxLen);\n }\n super(args, opts);\n }\n};\n\n// pkg/commands/lpush.ts\nvar LPushCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lpush\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lpushx.ts\nvar LPushXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lpushx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lrange.ts\nvar LRangeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lrange\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lrem.ts\nvar LRemCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lrem\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lset.ts\nvar LSetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lset\", ...cmd], opts);\n }\n};\n\n// pkg/commands/ltrim.ts\nvar LTrimCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"ltrim\", ...cmd], opts);\n }\n};\n\n// pkg/commands/mget.ts\nvar MGetCommand = class extends Command {\n constructor(cmd, opts) {\n const keys = Array.isArray(cmd[0]) ? cmd[0] : cmd;\n super([\"mget\", ...keys], opts);\n }\n};\n\n// pkg/commands/mset.ts\nvar MSetCommand = class extends Command {\n constructor([kv], opts) {\n super([\"mset\", ...Object.entries(kv).flatMap(([key, value]) => [key, value])], opts);\n }\n};\n\n// pkg/commands/msetnx.ts\nvar MSetNXCommand = class extends Command {\n constructor([kv], opts) {\n super([\"msetnx\", ...Object.entries(kv).flat()], opts);\n }\n};\n\n// pkg/commands/persist.ts\nvar PersistCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"persist\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pexpire.ts\nvar PExpireCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pexpire\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pexpireat.ts\nvar PExpireAtCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pexpireat\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pfadd.ts\nvar PfAddCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pfadd\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pfcount.ts\nvar PfCountCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pfcount\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pfmerge.ts\nvar PfMergeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pfmerge\", ...cmd], opts);\n }\n};\n\n// pkg/commands/ping.ts\nvar PingCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"ping\"];\n if (cmd?.[0] !== void 0) {\n command.push(cmd[0]);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/psetex.ts\nvar PSetEXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"psetex\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pttl.ts\nvar PTtlCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pttl\", ...cmd], opts);\n }\n};\n\n// pkg/commands/publish.ts\nvar PublishCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"publish\", ...cmd], opts);\n }\n};\n\n// pkg/commands/randomkey.ts\nvar RandomKeyCommand = class extends Command {\n constructor(opts) {\n super([\"randomkey\"], opts);\n }\n};\n\n// pkg/commands/rename.ts\nvar RenameCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"rename\", ...cmd], opts);\n }\n};\n\n// pkg/commands/renamenx.ts\nvar RenameNXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"renamenx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/rpop.ts\nvar RPopCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"rpop\", ...cmd], opts);\n }\n};\n\n// pkg/commands/rpush.ts\nvar RPushCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"rpush\", ...cmd], opts);\n }\n};\n\n// pkg/commands/rpushx.ts\nvar RPushXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"rpushx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sadd.ts\nvar SAddCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sadd\", ...cmd], opts);\n }\n};\n\n// pkg/commands/scan.ts\nvar ScanCommand = class extends Command {\n constructor([cursor, opts], cmdOpts) {\n const command = [\"scan\", cursor];\n if (opts?.match) {\n command.push(\"match\", opts.match);\n }\n if (typeof opts?.count === \"number\") {\n command.push(\"count\", opts.count);\n }\n if (opts?.type && opts.type.length > 0) {\n command.push(\"type\", opts.type);\n }\n super(command, {\n deserialize: deserializeScanResponse,\n ...cmdOpts\n });\n }\n};\n\n// pkg/commands/scard.ts\nvar SCardCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"scard\", ...cmd], opts);\n }\n};\n\n// pkg/commands/script_exists.ts\nvar ScriptExistsCommand = class extends Command {\n constructor(hashes, opts) {\n super([\"script\", \"exists\", ...hashes], {\n deserialize: (result) => result,\n ...opts\n });\n }\n};\n\n// pkg/commands/script_flush.ts\nvar ScriptFlushCommand = class extends Command {\n constructor([opts], cmdOpts) {\n const cmd = [\"script\", \"flush\"];\n if (opts?.sync) {\n cmd.push(\"sync\");\n } else if (opts?.async) {\n cmd.push(\"async\");\n }\n super(cmd, cmdOpts);\n }\n};\n\n// pkg/commands/script_load.ts\nvar ScriptLoadCommand = class extends Command {\n constructor(args, opts) {\n super([\"script\", \"load\", ...args], opts);\n }\n};\n\n// pkg/commands/sdiff.ts\nvar SDiffCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sdiff\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sdiffstore.ts\nvar SDiffStoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sdiffstore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/set.ts\nvar SetCommand = class extends Command {\n constructor([key, value, opts], cmdOpts) {\n const command = [\"set\", key, value];\n if (opts) {\n if (\"nx\" in opts && opts.nx) {\n command.push(\"nx\");\n } else if (\"xx\" in opts && opts.xx) {\n command.push(\"xx\");\n }\n if (\"get\" in opts && opts.get) {\n command.push(\"get\");\n }\n if (\"ex\" in opts && typeof opts.ex === \"number\") {\n command.push(\"ex\", opts.ex);\n } else if (\"px\" in opts && typeof opts.px === \"number\") {\n command.push(\"px\", opts.px);\n } else if (\"exat\" in opts && typeof opts.exat === \"number\") {\n command.push(\"exat\", opts.exat);\n } else if (\"pxat\" in opts && typeof opts.pxat === \"number\") {\n command.push(\"pxat\", opts.pxat);\n } else if (\"keepTtl\" in opts && opts.keepTtl) {\n command.push(\"keepTtl\");\n }\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/setbit.ts\nvar SetBitCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"setbit\", ...cmd], opts);\n }\n};\n\n// pkg/commands/setex.ts\nvar SetExCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"setex\", ...cmd], opts);\n }\n};\n\n// pkg/commands/setnx.ts\nvar SetNxCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"setnx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/setrange.ts\nvar SetRangeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"setrange\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sinter.ts\nvar SInterCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sinter\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sinterstore.ts\nvar SInterStoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sinterstore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sismember.ts\nvar SIsMemberCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sismember\", ...cmd], opts);\n }\n};\n\n// pkg/commands/smembers.ts\nvar SMembersCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"smembers\", ...cmd], opts);\n }\n};\n\n// pkg/commands/smismember.ts\nvar SMIsMemberCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"smismember\", cmd[0], ...cmd[1]], opts);\n }\n};\n\n// pkg/commands/smove.ts\nvar SMoveCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"smove\", ...cmd], opts);\n }\n};\n\n// pkg/commands/spop.ts\nvar SPopCommand = class extends Command {\n constructor([key, count], opts) {\n const command = [\"spop\", key];\n if (typeof count === \"number\") {\n command.push(count);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/srandmember.ts\nvar SRandMemberCommand = class extends Command {\n constructor([key, count], opts) {\n const command = [\"srandmember\", key];\n if (typeof count === \"number\") {\n command.push(count);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/srem.ts\nvar SRemCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"srem\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sscan.ts\nvar SScanCommand = class extends Command {\n constructor([key, cursor, opts], cmdOpts) {\n const command = [\"sscan\", key, cursor];\n if (opts?.match) {\n command.push(\"match\", opts.match);\n }\n if (typeof opts?.count === \"number\") {\n command.push(\"count\", opts.count);\n }\n super(command, {\n deserialize: deserializeScanResponse,\n ...cmdOpts\n });\n }\n};\n\n// pkg/commands/strlen.ts\nvar StrLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"strlen\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sunion.ts\nvar SUnionCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sunion\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sunionstore.ts\nvar SUnionStoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sunionstore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/time.ts\nvar TimeCommand = class extends Command {\n constructor(opts) {\n super([\"time\"], opts);\n }\n};\n\n// pkg/commands/touch.ts\nvar TouchCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"touch\", ...cmd], opts);\n }\n};\n\n// pkg/commands/ttl.ts\nvar TtlCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"ttl\", ...cmd], opts);\n }\n};\n\n// pkg/commands/type.ts\nvar TypeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"type\", ...cmd], opts);\n }\n};\n\n// pkg/commands/unlink.ts\nvar UnlinkCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"unlink\", ...cmd], opts);\n }\n};\n\n// pkg/commands/xack.ts\nvar XAckCommand = class extends Command {\n constructor([key, group, id], opts) {\n const ids = Array.isArray(id) ? [...id] : [id];\n super([\"XACK\", key, group, ...ids], opts);\n }\n};\n\n// pkg/commands/xadd.ts\nvar XAddCommand = class extends Command {\n constructor([key, id, entries, opts], commandOptions) {\n const command = [\"XADD\", key];\n if (opts) {\n if (opts.nomkStream) {\n command.push(\"NOMKSTREAM\");\n }\n if (opts.trim) {\n command.push(opts.trim.type, opts.trim.comparison, opts.trim.threshold);\n if (opts.trim.limit !== void 0) {\n command.push(\"LIMIT\", opts.trim.limit);\n }\n }\n }\n command.push(id);\n for (const [k, v] of Object.entries(entries)) {\n command.push(k, v);\n }\n super(command, commandOptions);\n }\n};\n\n// pkg/commands/xautoclaim.ts\nvar XAutoClaim = class extends Command {\n constructor([key, group, consumer, minIdleTime, start, options], opts) {\n const commands = [];\n if (options?.count) {\n commands.push(\"COUNT\", options.count);\n }\n if (options?.justId) {\n commands.push(\"JUSTID\");\n }\n super([\"XAUTOCLAIM\", key, group, consumer, minIdleTime, start, ...commands], opts);\n }\n};\n\n// pkg/commands/xclaim.ts\nvar XClaimCommand = class extends Command {\n constructor([key, group, consumer, minIdleTime, id, options], opts) {\n const ids = Array.isArray(id) ? [...id] : [id];\n const commands = [];\n if (options?.idleMS) {\n commands.push(\"IDLE\", options.idleMS);\n }\n if (options?.idleMS) {\n commands.push(\"TIME\", options.timeMS);\n }\n if (options?.retryCount) {\n commands.push(\"RETRYCOUNT\", options.retryCount);\n }\n if (options?.force) {\n commands.push(\"FORCE\");\n }\n if (options?.justId) {\n commands.push(\"JUSTID\");\n }\n if (options?.lastId) {\n commands.push(\"LASTID\", options.lastId);\n }\n super([\"XCLAIM\", key, group, consumer, minIdleTime, ...ids, ...commands], opts);\n }\n};\n\n// pkg/commands/xdel.ts\nvar XDelCommand = class extends Command {\n constructor([key, ids], opts) {\n const cmds = Array.isArray(ids) ? [...ids] : [ids];\n super([\"XDEL\", key, ...cmds], opts);\n }\n};\n\n// pkg/commands/xgroup.ts\nvar XGroupCommand = class extends Command {\n constructor([key, opts], commandOptions) {\n const command = [\"XGROUP\"];\n switch (opts.type) {\n case \"CREATE\": {\n command.push(\"CREATE\", key, opts.group, opts.id);\n if (opts.options) {\n if (opts.options.MKSTREAM) {\n command.push(\"MKSTREAM\");\n }\n if (opts.options.ENTRIESREAD !== void 0) {\n command.push(\"ENTRIESREAD\", opts.options.ENTRIESREAD.toString());\n }\n }\n break;\n }\n case \"CREATECONSUMER\": {\n command.push(\"CREATECONSUMER\", key, opts.group, opts.consumer);\n break;\n }\n case \"DELCONSUMER\": {\n command.push(\"DELCONSUMER\", key, opts.group, opts.consumer);\n break;\n }\n case \"DESTROY\": {\n command.push(\"DESTROY\", key, opts.group);\n break;\n }\n case \"SETID\": {\n command.push(\"SETID\", key, opts.group, opts.id);\n if (opts.options?.ENTRIESREAD !== void 0) {\n command.push(\"ENTRIESREAD\", opts.options.ENTRIESREAD.toString());\n }\n break;\n }\n default: {\n throw new Error(\"Invalid XGROUP\");\n }\n }\n super(command, commandOptions);\n }\n};\n\n// pkg/commands/xinfo.ts\nvar XInfoCommand = class extends Command {\n constructor([key, options], opts) {\n const cmds = [];\n if (options.type === \"CONSUMERS\") {\n cmds.push(\"CONSUMERS\", key, options.group);\n } else {\n cmds.push(\"GROUPS\", key);\n }\n super([\"XINFO\", ...cmds], opts);\n }\n};\n\n// pkg/commands/xlen.ts\nvar XLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"XLEN\", ...cmd], opts);\n }\n};\n\n// pkg/commands/xpending.ts\nvar XPendingCommand = class extends Command {\n constructor([key, group, start, end, count, options], opts) {\n const consumers = options?.consumer === void 0 ? [] : Array.isArray(options.consumer) ? [...options.consumer] : [options.consumer];\n super(\n [\n \"XPENDING\",\n key,\n group,\n ...options?.idleTime ? [\"IDLE\", options.idleTime] : [],\n start,\n end,\n count,\n ...consumers\n ],\n opts\n );\n }\n};\n\n// pkg/commands/xrange.ts\nfunction deserialize4(result) {\n const obj = {};\n for (const e of result) {\n while (e.length >= 2) {\n const streamId = e.shift();\n const entries = e.shift();\n if (!(streamId in obj)) {\n obj[streamId] = {};\n }\n while (entries.length >= 2) {\n const field = entries.shift();\n const value = entries.shift();\n try {\n obj[streamId][field] = JSON.parse(value);\n } catch {\n obj[streamId][field] = value;\n }\n }\n }\n }\n return obj;\n}\nvar XRangeCommand = class extends Command {\n constructor([key, start, end, count], opts) {\n const command = [\"XRANGE\", key, start, end];\n if (typeof count === \"number\") {\n command.push(\"COUNT\", count);\n }\n super(command, {\n deserialize: (result) => deserialize4(result),\n ...opts\n });\n }\n};\n\n// pkg/commands/xread.ts\nvar UNBALANCED_XREAD_ERR = \"ERR Unbalanced XREAD list of streams: for each stream key an ID or '$' must be specified\";\nvar XReadCommand = class extends Command {\n constructor([key, id, options], opts) {\n if (Array.isArray(key) && Array.isArray(id) && key.length !== id.length) {\n throw new Error(UNBALANCED_XREAD_ERR);\n }\n const commands = [];\n if (typeof options?.count === \"number\") {\n commands.push(\"COUNT\", options.count);\n }\n if (typeof options?.blockMS === \"number\") {\n commands.push(\"BLOCK\", options.blockMS);\n }\n commands.push(\n \"STREAMS\",\n ...Array.isArray(key) ? [...key] : [key],\n ...Array.isArray(id) ? [...id] : [id]\n );\n super([\"XREAD\", ...commands], opts);\n }\n};\n\n// pkg/commands/xreadgroup.ts\nvar UNBALANCED_XREADGROUP_ERR = \"ERR Unbalanced XREADGROUP list of streams: for each stream key an ID or '$' must be specified\";\nvar XReadGroupCommand = class extends Command {\n constructor([group, consumer, key, id, options], opts) {\n if (Array.isArray(key) && Array.isArray(id) && key.length !== id.length) {\n throw new Error(UNBALANCED_XREADGROUP_ERR);\n }\n const commands = [];\n if (typeof options?.count === \"number\") {\n commands.push(\"COUNT\", options.count);\n }\n if (typeof options?.blockMS === \"number\") {\n commands.push(\"BLOCK\", options.blockMS);\n }\n if (typeof options?.NOACK === \"boolean\" && options.NOACK) {\n commands.push(\"NOACK\");\n }\n commands.push(\n \"STREAMS\",\n ...Array.isArray(key) ? [...key] : [key],\n ...Array.isArray(id) ? [...id] : [id]\n );\n super([\"XREADGROUP\", \"GROUP\", group, consumer, ...commands], opts);\n }\n};\n\n// pkg/commands/xrevrange.ts\nvar XRevRangeCommand = class extends Command {\n constructor([key, end, start, count], opts) {\n const command = [\"XREVRANGE\", key, end, start];\n if (typeof count === \"number\") {\n command.push(\"COUNT\", count);\n }\n super(command, {\n deserialize: (result) => deserialize5(result),\n ...opts\n });\n }\n};\nfunction deserialize5(result) {\n const obj = {};\n for (const e of result) {\n while (e.length >= 2) {\n const streamId = e.shift();\n const entries = e.shift();\n if (!(streamId in obj)) {\n obj[streamId] = {};\n }\n while (entries.length >= 2) {\n const field = entries.shift();\n const value = entries.shift();\n try {\n obj[streamId][field] = JSON.parse(value);\n } catch {\n obj[streamId][field] = value;\n }\n }\n }\n }\n return obj;\n}\n\n// pkg/commands/xtrim.ts\nvar XTrimCommand = class extends Command {\n constructor([key, options], opts) {\n const { limit, strategy, threshold, exactness = \"~\" } = options;\n super([\"XTRIM\", key, strategy, exactness, threshold, ...limit ? [\"LIMIT\", limit] : []], opts);\n }\n};\n\n// pkg/commands/zadd.ts\nvar ZAddCommand = class extends Command {\n constructor([key, arg1, ...arg2], opts) {\n const command = [\"zadd\", key];\n if (\"nx\" in arg1 && arg1.nx) {\n command.push(\"nx\");\n } else if (\"xx\" in arg1 && arg1.xx) {\n command.push(\"xx\");\n }\n if (\"ch\" in arg1 && arg1.ch) {\n command.push(\"ch\");\n }\n if (\"incr\" in arg1 && arg1.incr) {\n command.push(\"incr\");\n }\n if (\"lt\" in arg1 && arg1.lt) {\n command.push(\"lt\");\n } else if (\"gt\" in arg1 && arg1.gt) {\n command.push(\"gt\");\n }\n if (\"score\" in arg1 && \"member\" in arg1) {\n command.push(arg1.score, arg1.member);\n }\n command.push(...arg2.flatMap(({ score, member }) => [score, member]));\n super(command, opts);\n }\n};\n\n// pkg/commands/zcard.ts\nvar ZCardCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zcard\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zcount.ts\nvar ZCountCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zcount\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zincrby.ts\nvar ZIncrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zincrby\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zinterstore.ts\nvar ZInterStoreCommand = class extends Command {\n constructor([destination, numKeys, keyOrKeys, opts], cmdOpts) {\n const command = [\"zinterstore\", destination, numKeys];\n if (Array.isArray(keyOrKeys)) {\n command.push(...keyOrKeys);\n } else {\n command.push(keyOrKeys);\n }\n if (opts) {\n if (\"weights\" in opts && opts.weights) {\n command.push(\"weights\", ...opts.weights);\n } else if (\"weight\" in opts && typeof opts.weight === \"number\") {\n command.push(\"weights\", opts.weight);\n }\n if (\"aggregate\" in opts) {\n command.push(\"aggregate\", opts.aggregate);\n }\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/zlexcount.ts\nvar ZLexCountCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zlexcount\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zpopmax.ts\nvar ZPopMaxCommand = class extends Command {\n constructor([key, count], opts) {\n const command = [\"zpopmax\", key];\n if (typeof count === \"number\") {\n command.push(count);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/zpopmin.ts\nvar ZPopMinCommand = class extends Command {\n constructor([key, count], opts) {\n const command = [\"zpopmin\", key];\n if (typeof count === \"number\") {\n command.push(count);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/zrange.ts\nvar ZRangeCommand = class extends Command {\n constructor([key, min, max, opts], cmdOpts) {\n const command = [\"zrange\", key, min, max];\n if (opts?.byScore) {\n command.push(\"byscore\");\n }\n if (opts?.byLex) {\n command.push(\"bylex\");\n }\n if (opts?.rev) {\n command.push(\"rev\");\n }\n if (opts?.count !== void 0 && opts.offset !== void 0) {\n command.push(\"limit\", opts.offset, opts.count);\n }\n if (opts?.withScores) {\n command.push(\"withscores\");\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/zrank.ts\nvar ZRankCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zrank\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zrem.ts\nvar ZRemCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zrem\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zremrangebylex.ts\nvar ZRemRangeByLexCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zremrangebylex\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zremrangebyrank.ts\nvar ZRemRangeByRankCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zremrangebyrank\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zremrangebyscore.ts\nvar ZRemRangeByScoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zremrangebyscore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zrevrank.ts\nvar ZRevRankCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zrevrank\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zscan.ts\nvar ZScanCommand = class extends Command {\n constructor([key, cursor, opts], cmdOpts) {\n const command = [\"zscan\", key, cursor];\n if (opts?.match) {\n command.push(\"match\", opts.match);\n }\n if (typeof opts?.count === \"number\") {\n command.push(\"count\", opts.count);\n }\n super(command, {\n deserialize: deserializeScanResponse,\n ...cmdOpts\n });\n }\n};\n\n// pkg/commands/zscore.ts\nvar ZScoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zscore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zunion.ts\nvar ZUnionCommand = class extends Command {\n constructor([numKeys, keyOrKeys, opts], cmdOpts) {\n const command = [\"zunion\", numKeys];\n if (Array.isArray(keyOrKeys)) {\n command.push(...keyOrKeys);\n } else {\n command.push(keyOrKeys);\n }\n if (opts) {\n if (\"weights\" in opts && opts.weights) {\n command.push(\"weights\", ...opts.weights);\n } else if (\"weight\" in opts && typeof opts.weight === \"number\") {\n command.push(\"weights\", opts.weight);\n }\n if (\"aggregate\" in opts) {\n command.push(\"aggregate\", opts.aggregate);\n }\n if (opts.withScores) {\n command.push(\"withscores\");\n }\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/zunionstore.ts\nvar ZUnionStoreCommand = class extends Command {\n constructor([destination, numKeys, keyOrKeys, opts], cmdOpts) {\n const command = [\"zunionstore\", destination, numKeys];\n if (Array.isArray(keyOrKeys)) {\n command.push(...keyOrKeys);\n } else {\n command.push(keyOrKeys);\n }\n if (opts) {\n if (\"weights\" in opts && opts.weights) {\n command.push(\"weights\", ...opts.weights);\n } else if (\"weight\" in opts && typeof opts.weight === \"number\") {\n command.push(\"weights\", opts.weight);\n }\n if (\"aggregate\" in opts) {\n command.push(\"aggregate\", opts.aggregate);\n }\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/zdiffstore.ts\nvar ZDiffStoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zdiffstore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zmscore.ts\nvar ZMScoreCommand = class extends Command {\n constructor(cmd, opts) {\n const [key, members] = cmd;\n super([\"zmscore\", key, ...members], opts);\n }\n};\n\n// pkg/pipeline.ts\nvar Pipeline = class {\n client;\n commands;\n commandOptions;\n multiExec;\n constructor(opts) {\n this.client = opts.client;\n this.commands = [];\n this.commandOptions = opts.commandOptions;\n this.multiExec = opts.multiExec ?? false;\n if (this.commandOptions?.latencyLogging) {\n const originalExec = this.exec.bind(this);\n this.exec = async (options) => {\n const start = performance.now();\n const result = await (options ? originalExec(options) : originalExec());\n const end = performance.now();\n const loggerResult = (end - start).toFixed(2);\n console.log(\n `Latency for \\x1B[38;2;19;185;39m${this.multiExec ? [\"MULTI-EXEC\"] : [\"PIPELINE\"].toString().toUpperCase()}\\x1B[0m: \\x1B[38;2;0;255;255m${loggerResult} ms\\x1B[0m`\n );\n return result;\n };\n }\n }\n exec = async (options) => {\n if (this.commands.length === 0) {\n throw new Error(\"Pipeline is empty\");\n }\n const path = this.multiExec ? [\"multi-exec\"] : [\"pipeline\"];\n const res = await this.client.request({\n path,\n body: Object.values(this.commands).map((c) => c.command)\n });\n return options?.keepErrors ? res.map(({ error, result }, i) => {\n return {\n error,\n result: this.commands[i].deserialize(result)\n };\n }) : res.map(({ error, result }, i) => {\n if (error) {\n throw new UpstashError(\n `Command ${i + 1} [ ${this.commands[i].command[0]} ] failed: ${error}`\n );\n }\n return this.commands[i].deserialize(result);\n });\n };\n /**\n * Returns the length of pipeline before the execution\n */\n length() {\n return this.commands.length;\n }\n /**\n * Pushes a command into the pipeline and returns a chainable instance of the\n * pipeline\n */\n chain(command) {\n this.commands.push(command);\n return this;\n }\n /**\n * @see https://redis.io/commands/append\n */\n append = (...args) => this.chain(new AppendCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/bitcount\n */\n bitcount = (...args) => this.chain(new BitCountCommand(args, this.commandOptions));\n /**\n * Returns an instance that can be used to execute `BITFIELD` commands on one key.\n *\n * @example\n * ```typescript\n * redis.set(\"mykey\", 0);\n * const result = await redis.pipeline()\n * .bitfield(\"mykey\")\n * .set(\"u4\", 0, 16)\n * .incr(\"u4\", \"#1\", 1)\n * .exec();\n * console.log(result); // [[0, 1]]\n * ```\n *\n * @see https://redis.io/commands/bitfield\n */\n bitfield = (...args) => new BitFieldCommand(args, this.client, this.commandOptions, this.chain.bind(this));\n /**\n * @see https://redis.io/commands/bitop\n */\n bitop = (op, destinationKey, sourceKey, ...sourceKeys) => this.chain(\n new BitOpCommand([op, destinationKey, sourceKey, ...sourceKeys], this.commandOptions)\n );\n /**\n * @see https://redis.io/commands/bitpos\n */\n bitpos = (...args) => this.chain(new BitPosCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/copy\n */\n copy = (...args) => this.chain(new CopyCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zdiffstore\n */\n zdiffstore = (...args) => this.chain(new ZDiffStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/dbsize\n */\n dbsize = () => this.chain(new DBSizeCommand(this.commandOptions));\n /**\n * @see https://redis.io/commands/decr\n */\n decr = (...args) => this.chain(new DecrCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/decrby\n */\n decrby = (...args) => this.chain(new DecrByCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/del\n */\n del = (...args) => this.chain(new DelCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/echo\n */\n echo = (...args) => this.chain(new EchoCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/eval\n */\n eval = (...args) => this.chain(new EvalCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/evalsha\n */\n evalsha = (...args) => this.chain(new EvalshaCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/exists\n */\n exists = (...args) => this.chain(new ExistsCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/expire\n */\n expire = (...args) => this.chain(new ExpireCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/expireat\n */\n expireat = (...args) => this.chain(new ExpireAtCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/flushall\n */\n flushall = (args) => this.chain(new FlushAllCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/flushdb\n */\n flushdb = (...args) => this.chain(new FlushDBCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geoadd\n */\n geoadd = (...args) => this.chain(new GeoAddCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geodist\n */\n geodist = (...args) => this.chain(new GeoDistCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geopos\n */\n geopos = (...args) => this.chain(new GeoPosCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geohash\n */\n geohash = (...args) => this.chain(new GeoHashCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geosearch\n */\n geosearch = (...args) => this.chain(new GeoSearchCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geosearchstore\n */\n geosearchstore = (...args) => this.chain(new GeoSearchStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/get\n */\n get = (...args) => this.chain(new GetCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/getbit\n */\n getbit = (...args) => this.chain(new GetBitCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/getdel\n */\n getdel = (...args) => this.chain(new GetDelCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/getrange\n */\n getrange = (...args) => this.chain(new GetRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/getset\n */\n getset = (key, value) => this.chain(new GetSetCommand([key, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/hdel\n */\n hdel = (...args) => this.chain(new HDelCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hexists\n */\n hexists = (...args) => this.chain(new HExistsCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hget\n */\n hget = (...args) => this.chain(new HGetCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hgetall\n */\n hgetall = (...args) => this.chain(new HGetAllCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hincrby\n */\n hincrby = (...args) => this.chain(new HIncrByCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hincrbyfloat\n */\n hincrbyfloat = (...args) => this.chain(new HIncrByFloatCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hkeys\n */\n hkeys = (...args) => this.chain(new HKeysCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hlen\n */\n hlen = (...args) => this.chain(new HLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hmget\n */\n hmget = (...args) => this.chain(new HMGetCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hmset\n */\n hmset = (key, kv) => this.chain(new HMSetCommand([key, kv], this.commandOptions));\n /**\n * @see https://redis.io/commands/hrandfield\n */\n hrandfield = (key, count, withValues) => this.chain(new HRandFieldCommand([key, count, withValues], this.commandOptions));\n /**\n * @see https://redis.io/commands/hscan\n */\n hscan = (...args) => this.chain(new HScanCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hset\n */\n hset = (key, kv) => this.chain(new HSetCommand([key, kv], this.commandOptions));\n /**\n * @see https://redis.io/commands/hsetnx\n */\n hsetnx = (key, field, value) => this.chain(new HSetNXCommand([key, field, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/hstrlen\n */\n hstrlen = (...args) => this.chain(new HStrLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hvals\n */\n hvals = (...args) => this.chain(new HValsCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/incr\n */\n incr = (...args) => this.chain(new IncrCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/incrby\n */\n incrby = (...args) => this.chain(new IncrByCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/incrbyfloat\n */\n incrbyfloat = (...args) => this.chain(new IncrByFloatCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/keys\n */\n keys = (...args) => this.chain(new KeysCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lindex\n */\n lindex = (...args) => this.chain(new LIndexCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/linsert\n */\n linsert = (key, direction, pivot, value) => this.chain(new LInsertCommand([key, direction, pivot, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/llen\n */\n llen = (...args) => this.chain(new LLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lmove\n */\n lmove = (...args) => this.chain(new LMoveCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lpop\n */\n lpop = (...args) => this.chain(new LPopCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lmpop\n */\n lmpop = (...args) => this.chain(new LmPopCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lpos\n */\n lpos = (...args) => this.chain(new LPosCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lpush\n */\n lpush = (key, ...elements) => this.chain(new LPushCommand([key, ...elements], this.commandOptions));\n /**\n * @see https://redis.io/commands/lpushx\n */\n lpushx = (key, ...elements) => this.chain(new LPushXCommand([key, ...elements], this.commandOptions));\n /**\n * @see https://redis.io/commands/lrange\n */\n lrange = (...args) => this.chain(new LRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lrem\n */\n lrem = (key, count, value) => this.chain(new LRemCommand([key, count, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/lset\n */\n lset = (key, index, value) => this.chain(new LSetCommand([key, index, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/ltrim\n */\n ltrim = (...args) => this.chain(new LTrimCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/mget\n */\n mget = (...args) => this.chain(new MGetCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/mset\n */\n mset = (kv) => this.chain(new MSetCommand([kv], this.commandOptions));\n /**\n * @see https://redis.io/commands/msetnx\n */\n msetnx = (kv) => this.chain(new MSetNXCommand([kv], this.commandOptions));\n /**\n * @see https://redis.io/commands/persist\n */\n persist = (...args) => this.chain(new PersistCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pexpire\n */\n pexpire = (...args) => this.chain(new PExpireCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pexpireat\n */\n pexpireat = (...args) => this.chain(new PExpireAtCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pfadd\n */\n pfadd = (...args) => this.chain(new PfAddCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pfcount\n */\n pfcount = (...args) => this.chain(new PfCountCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pfmerge\n */\n pfmerge = (...args) => this.chain(new PfMergeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/ping\n */\n ping = (args) => this.chain(new PingCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/psetex\n */\n psetex = (key, ttl, value) => this.chain(new PSetEXCommand([key, ttl, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/pttl\n */\n pttl = (...args) => this.chain(new PTtlCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/publish\n */\n publish = (...args) => this.chain(new PublishCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/randomkey\n */\n randomkey = () => this.chain(new RandomKeyCommand(this.commandOptions));\n /**\n * @see https://redis.io/commands/rename\n */\n rename = (...args) => this.chain(new RenameCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/renamenx\n */\n renamenx = (...args) => this.chain(new RenameNXCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/rpop\n */\n rpop = (...args) => this.chain(new RPopCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/rpush\n */\n rpush = (key, ...elements) => this.chain(new RPushCommand([key, ...elements], this.commandOptions));\n /**\n * @see https://redis.io/commands/rpushx\n */\n rpushx = (key, ...elements) => this.chain(new RPushXCommand([key, ...elements], this.commandOptions));\n /**\n * @see https://redis.io/commands/sadd\n */\n sadd = (key, member, ...members) => this.chain(new SAddCommand([key, member, ...members], this.commandOptions));\n /**\n * @see https://redis.io/commands/scan\n */\n scan = (...args) => this.chain(new ScanCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/scard\n */\n scard = (...args) => this.chain(new SCardCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/script-exists\n */\n scriptExists = (...args) => this.chain(new ScriptExistsCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/script-flush\n */\n scriptFlush = (...args) => this.chain(new ScriptFlushCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/script-load\n */\n scriptLoad = (...args) => this.chain(new ScriptLoadCommand(args, this.commandOptions));\n /*)*\n * @see https://redis.io/commands/sdiff\n */\n sdiff = (...args) => this.chain(new SDiffCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sdiffstore\n */\n sdiffstore = (...args) => this.chain(new SDiffStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/set\n */\n set = (key, value, opts) => this.chain(new SetCommand([key, value, opts], this.commandOptions));\n /**\n * @see https://redis.io/commands/setbit\n */\n setbit = (...args) => this.chain(new SetBitCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/setex\n */\n setex = (key, ttl, value) => this.chain(new SetExCommand([key, ttl, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/setnx\n */\n setnx = (key, value) => this.chain(new SetNxCommand([key, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/setrange\n */\n setrange = (...args) => this.chain(new SetRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sinter\n */\n sinter = (...args) => this.chain(new SInterCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sinterstore\n */\n sinterstore = (...args) => this.chain(new SInterStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sismember\n */\n sismember = (key, member) => this.chain(new SIsMemberCommand([key, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/smembers\n */\n smembers = (...args) => this.chain(new SMembersCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/smismember\n */\n smismember = (key, members) => this.chain(new SMIsMemberCommand([key, members], this.commandOptions));\n /**\n * @see https://redis.io/commands/smove\n */\n smove = (source, destination, member) => this.chain(new SMoveCommand([source, destination, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/spop\n */\n spop = (...args) => this.chain(new SPopCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/srandmember\n */\n srandmember = (...args) => this.chain(new SRandMemberCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/srem\n */\n srem = (key, ...members) => this.chain(new SRemCommand([key, ...members], this.commandOptions));\n /**\n * @see https://redis.io/commands/sscan\n */\n sscan = (...args) => this.chain(new SScanCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/strlen\n */\n strlen = (...args) => this.chain(new StrLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sunion\n */\n sunion = (...args) => this.chain(new SUnionCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sunionstore\n */\n sunionstore = (...args) => this.chain(new SUnionStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/time\n */\n time = () => this.chain(new TimeCommand(this.commandOptions));\n /**\n * @see https://redis.io/commands/touch\n */\n touch = (...args) => this.chain(new TouchCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/ttl\n */\n ttl = (...args) => this.chain(new TtlCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/type\n */\n type = (...args) => this.chain(new TypeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/unlink\n */\n unlink = (...args) => this.chain(new UnlinkCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zadd\n */\n zadd = (...args) => {\n if (\"score\" in args[1]) {\n return this.chain(\n new ZAddCommand([args[0], args[1], ...args.slice(2)], this.commandOptions)\n );\n }\n return this.chain(\n new ZAddCommand(\n [args[0], args[1], ...args.slice(2)],\n this.commandOptions\n )\n );\n };\n /**\n * @see https://redis.io/commands/xadd\n */\n xadd = (...args) => this.chain(new XAddCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xack\n */\n xack = (...args) => this.chain(new XAckCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xdel\n */\n xdel = (...args) => this.chain(new XDelCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xgroup\n */\n xgroup = (...args) => this.chain(new XGroupCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xread\n */\n xread = (...args) => this.chain(new XReadCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xreadgroup\n */\n xreadgroup = (...args) => this.chain(new XReadGroupCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xinfo\n */\n xinfo = (...args) => this.chain(new XInfoCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xlen\n */\n xlen = (...args) => this.chain(new XLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xpending\n */\n xpending = (...args) => this.chain(new XPendingCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xclaim\n */\n xclaim = (...args) => this.chain(new XClaimCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xautoclaim\n */\n xautoclaim = (...args) => this.chain(new XAutoClaim(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xtrim\n */\n xtrim = (...args) => this.chain(new XTrimCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xrange\n */\n xrange = (...args) => this.chain(new XRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xrevrange\n */\n xrevrange = (...args) => this.chain(new XRevRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zcard\n */\n zcard = (...args) => this.chain(new ZCardCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zcount\n */\n zcount = (...args) => this.chain(new ZCountCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zincrby\n */\n zincrby = (key, increment, member) => this.chain(new ZIncrByCommand([key, increment, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/zinterstore\n */\n zinterstore = (...args) => this.chain(new ZInterStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zlexcount\n */\n zlexcount = (...args) => this.chain(new ZLexCountCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zmscore\n */\n zmscore = (...args) => this.chain(new ZMScoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zpopmax\n */\n zpopmax = (...args) => this.chain(new ZPopMaxCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zpopmin\n */\n zpopmin = (...args) => this.chain(new ZPopMinCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zrange\n */\n zrange = (...args) => this.chain(new ZRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zrank\n */\n zrank = (key, member) => this.chain(new ZRankCommand([key, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/zrem\n */\n zrem = (key, ...members) => this.chain(new ZRemCommand([key, ...members], this.commandOptions));\n /**\n * @see https://redis.io/commands/zremrangebylex\n */\n zremrangebylex = (...args) => this.chain(new ZRemRangeByLexCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zremrangebyrank\n */\n zremrangebyrank = (...args) => this.chain(new ZRemRangeByRankCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zremrangebyscore\n */\n zremrangebyscore = (...args) => this.chain(new ZRemRangeByScoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zrevrank\n */\n zrevrank = (key, member) => this.chain(new ZRevRankCommand([key, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/zscan\n */\n zscan = (...args) => this.chain(new ZScanCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zscore\n */\n zscore = (key, member) => this.chain(new ZScoreCommand([key, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/zunionstore\n */\n zunionstore = (...args) => this.chain(new ZUnionStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zunion\n */\n zunion = (...args) => this.chain(new ZUnionCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/?group=json\n */\n get json() {\n return {\n /**\n * @see https://redis.io/commands/json.arrappend\n */\n arrappend: (...args) => this.chain(new JsonArrAppendCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrindex\n */\n arrindex: (...args) => this.chain(new JsonArrIndexCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrinsert\n */\n arrinsert: (...args) => this.chain(new JsonArrInsertCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrlen\n */\n arrlen: (...args) => this.chain(new JsonArrLenCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrpop\n */\n arrpop: (...args) => this.chain(new JsonArrPopCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrtrim\n */\n arrtrim: (...args) => this.chain(new JsonArrTrimCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.clear\n */\n clear: (...args) => this.chain(new JsonClearCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.del\n */\n del: (...args) => this.chain(new JsonDelCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.forget\n */\n forget: (...args) => this.chain(new JsonForgetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.get\n */\n get: (...args) => this.chain(new JsonGetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.mget\n */\n mget: (...args) => this.chain(new JsonMGetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.mset\n */\n mset: (...args) => this.chain(new JsonMSetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.numincrby\n */\n numincrby: (...args) => this.chain(new JsonNumIncrByCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.nummultby\n */\n nummultby: (...args) => this.chain(new JsonNumMultByCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.objkeys\n */\n objkeys: (...args) => this.chain(new JsonObjKeysCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.objlen\n */\n objlen: (...args) => this.chain(new JsonObjLenCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.resp\n */\n resp: (...args) => this.chain(new JsonRespCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.set\n */\n set: (...args) => this.chain(new JsonSetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.strappend\n */\n strappend: (...args) => this.chain(new JsonStrAppendCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.strlen\n */\n strlen: (...args) => this.chain(new JsonStrLenCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.toggle\n */\n toggle: (...args) => this.chain(new JsonToggleCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.type\n */\n type: (...args) => this.chain(new JsonTypeCommand(args, this.commandOptions))\n };\n }\n};\n\n// pkg/script.ts\nvar import_enc_hex = __toESM(require(\"crypto-js/enc-hex.js\"));\nvar import_sha1 = __toESM(require(\"crypto-js/sha1.js\"));\nvar Script = class {\n script;\n sha1;\n redis;\n constructor(redis, script) {\n this.redis = redis;\n this.sha1 = this.digest(script);\n this.script = script;\n }\n /**\n * Send an `EVAL` command to redis.\n */\n async eval(keys, args) {\n return await this.redis.eval(this.script, keys, args);\n }\n /**\n * Calculates the sha1 hash of the script and then calls `EVALSHA`.\n */\n async evalsha(keys, args) {\n return await this.redis.evalsha(this.sha1, keys, args);\n }\n /**\n * Optimistically try to run `EVALSHA` first.\n * If the script is not loaded in redis, it will fall back and try again with `EVAL`.\n *\n * Following calls will be able to use the cached script\n */\n async exec(keys, args) {\n const res = await this.redis.evalsha(this.sha1, keys, args).catch(async (error) => {\n if (error instanceof Error && error.message.toLowerCase().includes(\"noscript\")) {\n return await this.redis.eval(this.script, keys, args);\n }\n throw error;\n });\n return res;\n }\n /**\n * Compute the sha1 hash of the script and return its hex representation.\n */\n digest(s) {\n return import_enc_hex.default.stringify((0, import_sha1.default)(s));\n }\n};\n\n// pkg/redis.ts\nvar Redis = class {\n client;\n opts;\n enableTelemetry;\n enableAutoPipelining;\n /**\n * Create a new redis client\n *\n * @example\n * ```typescript\n * const redis = new Redis({\n * url: \"\",\n * token: \"\",\n * });\n * ```\n */\n constructor(client, opts) {\n this.client = client;\n this.opts = opts;\n this.enableTelemetry = opts?.enableTelemetry ?? true;\n if (opts?.readYourWrites === false) {\n this.client.readYourWrites = false;\n }\n this.enableAutoPipelining = opts?.enableAutoPipelining ?? true;\n }\n get readYourWritesSyncToken() {\n return this.client.upstashSyncToken;\n }\n set readYourWritesSyncToken(session) {\n this.client.upstashSyncToken = session;\n }\n get json() {\n return {\n /**\n * @see https://redis.io/commands/json.arrappend\n */\n arrappend: (...args) => new JsonArrAppendCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrindex\n */\n arrindex: (...args) => new JsonArrIndexCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrinsert\n */\n arrinsert: (...args) => new JsonArrInsertCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrlen\n */\n arrlen: (...args) => new JsonArrLenCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrpop\n */\n arrpop: (...args) => new JsonArrPopCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrtrim\n */\n arrtrim: (...args) => new JsonArrTrimCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.clear\n */\n clear: (...args) => new JsonClearCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.del\n */\n del: (...args) => new JsonDelCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.forget\n */\n forget: (...args) => new JsonForgetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.get\n */\n get: (...args) => new JsonGetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.mget\n */\n mget: (...args) => new JsonMGetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.mset\n */\n mset: (...args) => new JsonMSetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.numincrby\n */\n numincrby: (...args) => new JsonNumIncrByCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.nummultby\n */\n nummultby: (...args) => new JsonNumMultByCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.objkeys\n */\n objkeys: (...args) => new JsonObjKeysCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.objlen\n */\n objlen: (...args) => new JsonObjLenCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.resp\n */\n resp: (...args) => new JsonRespCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.set\n */\n set: (...args) => new JsonSetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.strappend\n */\n strappend: (...args) => new JsonStrAppendCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.strlen\n */\n strlen: (...args) => new JsonStrLenCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.toggle\n */\n toggle: (...args) => new JsonToggleCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.type\n */\n type: (...args) => new JsonTypeCommand(args, this.opts).exec(this.client)\n };\n }\n /**\n * Wrap a new middleware around the HTTP client.\n */\n use = (middleware) => {\n const makeRequest = this.client.request.bind(this.client);\n this.client.request = (req) => middleware(req, makeRequest);\n };\n /**\n * Technically this is not private, we can hide it from intellisense by doing this\n */\n addTelemetry = (telemetry) => {\n if (!this.enableTelemetry) {\n return;\n }\n try {\n this.client.mergeTelemetry(telemetry);\n } catch {\n }\n };\n createScript(script) {\n return new Script(this, script);\n }\n /**\n * Create a new pipeline that allows you to send requests in bulk.\n *\n * @see {@link Pipeline}\n */\n pipeline = () => new Pipeline({\n client: this.client,\n commandOptions: this.opts,\n multiExec: false\n });\n autoPipeline = () => {\n return createAutoPipelineProxy(this);\n };\n /**\n * Create a new transaction to allow executing multiple steps atomically.\n *\n * All the commands in a transaction are serialized and executed sequentially. A request sent by\n * another client will never be served in the middle of the execution of a Redis Transaction. This\n * guarantees that the commands are executed as a single isolated operation.\n *\n * @see {@link Pipeline}\n */\n multi = () => new Pipeline({\n client: this.client,\n commandOptions: this.opts,\n multiExec: true\n });\n /**\n * Returns an instance that can be used to execute `BITFIELD` commands on one key.\n *\n * @example\n * ```typescript\n * redis.set(\"mykey\", 0);\n * const result = await redis.bitfield(\"mykey\")\n * .set(\"u4\", 0, 16)\n * .incr(\"u4\", \"#1\", 1)\n * .exec();\n * console.log(result); // [0, 1]\n * ```\n *\n * @see https://redis.io/commands/bitfield\n */\n bitfield = (...args) => new BitFieldCommand(args, this.client, this.opts);\n /**\n * @see https://redis.io/commands/append\n */\n append = (...args) => new AppendCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/bitcount\n */\n bitcount = (...args) => new BitCountCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/bitop\n */\n bitop = (op, destinationKey, sourceKey, ...sourceKeys) => new BitOpCommand([op, destinationKey, sourceKey, ...sourceKeys], this.opts).exec(\n this.client\n );\n /**\n * @see https://redis.io/commands/bitpos\n */\n bitpos = (...args) => new BitPosCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/copy\n */\n copy = (...args) => new CopyCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/dbsize\n */\n dbsize = () => new DBSizeCommand(this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/decr\n */\n decr = (...args) => new DecrCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/decrby\n */\n decrby = (...args) => new DecrByCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/del\n */\n del = (...args) => new DelCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/echo\n */\n echo = (...args) => new EchoCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/eval\n */\n eval = (...args) => new EvalCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/evalsha\n */\n evalsha = (...args) => new EvalshaCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/exists\n */\n exists = (...args) => new ExistsCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/expire\n */\n expire = (...args) => new ExpireCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/expireat\n */\n expireat = (...args) => new ExpireAtCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/flushall\n */\n flushall = (args) => new FlushAllCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/flushdb\n */\n flushdb = (...args) => new FlushDBCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geoadd\n */\n geoadd = (...args) => new GeoAddCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geopos\n */\n geopos = (...args) => new GeoPosCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geodist\n */\n geodist = (...args) => new GeoDistCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geohash\n */\n geohash = (...args) => new GeoHashCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geosearch\n */\n geosearch = (...args) => new GeoSearchCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geosearchstore\n */\n geosearchstore = (...args) => new GeoSearchStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/get\n */\n get = (...args) => new GetCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/getbit\n */\n getbit = (...args) => new GetBitCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/getdel\n */\n getdel = (...args) => new GetDelCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/getrange\n */\n getrange = (...args) => new GetRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/getset\n */\n getset = (key, value) => new GetSetCommand([key, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hdel\n */\n hdel = (...args) => new HDelCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hexists\n */\n hexists = (...args) => new HExistsCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hget\n */\n hget = (...args) => new HGetCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hgetall\n */\n hgetall = (...args) => new HGetAllCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hincrby\n */\n hincrby = (...args) => new HIncrByCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hincrbyfloat\n */\n hincrbyfloat = (...args) => new HIncrByFloatCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hkeys\n */\n hkeys = (...args) => new HKeysCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hlen\n */\n hlen = (...args) => new HLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hmget\n */\n hmget = (...args) => new HMGetCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hmset\n */\n hmset = (key, kv) => new HMSetCommand([key, kv], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hrandfield\n */\n hrandfield = (key, count, withValues) => new HRandFieldCommand([key, count, withValues], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hscan\n */\n hscan = (...args) => new HScanCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hset\n */\n hset = (key, kv) => new HSetCommand([key, kv], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hsetnx\n */\n hsetnx = (key, field, value) => new HSetNXCommand([key, field, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hstrlen\n */\n hstrlen = (...args) => new HStrLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hvals\n */\n hvals = (...args) => new HValsCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/incr\n */\n incr = (...args) => new IncrCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/incrby\n */\n incrby = (...args) => new IncrByCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/incrbyfloat\n */\n incrbyfloat = (...args) => new IncrByFloatCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/keys\n */\n keys = (...args) => new KeysCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lindex\n */\n lindex = (...args) => new LIndexCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/linsert\n */\n linsert = (key, direction, pivot, value) => new LInsertCommand([key, direction, pivot, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/llen\n */\n llen = (...args) => new LLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lmove\n */\n lmove = (...args) => new LMoveCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lpop\n */\n lpop = (...args) => new LPopCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lmpop\n */\n lmpop = (...args) => new LmPopCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lpos\n */\n lpos = (...args) => new LPosCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lpush\n */\n lpush = (key, ...elements) => new LPushCommand([key, ...elements], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lpushx\n */\n lpushx = (key, ...elements) => new LPushXCommand([key, ...elements], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lrange\n */\n lrange = (...args) => new LRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lrem\n */\n lrem = (key, count, value) => new LRemCommand([key, count, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lset\n */\n lset = (key, index, value) => new LSetCommand([key, index, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/ltrim\n */\n ltrim = (...args) => new LTrimCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/mget\n */\n mget = (...args) => new MGetCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/mset\n */\n mset = (kv) => new MSetCommand([kv], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/msetnx\n */\n msetnx = (kv) => new MSetNXCommand([kv], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/persist\n */\n persist = (...args) => new PersistCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pexpire\n */\n pexpire = (...args) => new PExpireCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pexpireat\n */\n pexpireat = (...args) => new PExpireAtCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pfadd\n */\n pfadd = (...args) => new PfAddCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pfcount\n */\n pfcount = (...args) => new PfCountCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pfmerge\n */\n pfmerge = (...args) => new PfMergeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/ping\n */\n ping = (args) => new PingCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/psetex\n */\n psetex = (key, ttl, value) => new PSetEXCommand([key, ttl, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pttl\n */\n pttl = (...args) => new PTtlCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/publish\n */\n publish = (...args) => new PublishCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/randomkey\n */\n randomkey = () => new RandomKeyCommand().exec(this.client);\n /**\n * @see https://redis.io/commands/rename\n */\n rename = (...args) => new RenameCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/renamenx\n */\n renamenx = (...args) => new RenameNXCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/rpop\n */\n rpop = (...args) => new RPopCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/rpush\n */\n rpush = (key, ...elements) => new RPushCommand([key, ...elements], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/rpushx\n */\n rpushx = (key, ...elements) => new RPushXCommand([key, ...elements], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sadd\n */\n sadd = (key, member, ...members) => new SAddCommand([key, member, ...members], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/scan\n */\n scan = (...args) => new ScanCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/scard\n */\n scard = (...args) => new SCardCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/script-exists\n */\n scriptExists = (...args) => new ScriptExistsCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/script-flush\n */\n scriptFlush = (...args) => new ScriptFlushCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/script-load\n */\n scriptLoad = (...args) => new ScriptLoadCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sdiff\n */\n sdiff = (...args) => new SDiffCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sdiffstore\n */\n sdiffstore = (...args) => new SDiffStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/set\n */\n set = (key, value, opts) => new SetCommand([key, value, opts], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/setbit\n */\n setbit = (...args) => new SetBitCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/setex\n */\n setex = (key, ttl, value) => new SetExCommand([key, ttl, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/setnx\n */\n setnx = (key, value) => new SetNxCommand([key, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/setrange\n */\n setrange = (...args) => new SetRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sinter\n */\n sinter = (...args) => new SInterCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sinterstore\n */\n sinterstore = (...args) => new SInterStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sismember\n */\n sismember = (key, member) => new SIsMemberCommand([key, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/smismember\n */\n smismember = (key, members) => new SMIsMemberCommand([key, members], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/smembers\n */\n smembers = (...args) => new SMembersCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/smove\n */\n smove = (source, destination, member) => new SMoveCommand([source, destination, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/spop\n */\n spop = (...args) => new SPopCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/srandmember\n */\n srandmember = (...args) => new SRandMemberCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/srem\n */\n srem = (key, ...members) => new SRemCommand([key, ...members], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sscan\n */\n sscan = (...args) => new SScanCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/strlen\n */\n strlen = (...args) => new StrLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sunion\n */\n sunion = (...args) => new SUnionCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sunionstore\n */\n sunionstore = (...args) => new SUnionStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/time\n */\n time = () => new TimeCommand().exec(this.client);\n /**\n * @see https://redis.io/commands/touch\n */\n touch = (...args) => new TouchCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/ttl\n */\n ttl = (...args) => new TtlCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/type\n */\n type = (...args) => new TypeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/unlink\n */\n unlink = (...args) => new UnlinkCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xadd\n */\n xadd = (...args) => new XAddCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xack\n */\n xack = (...args) => new XAckCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xdel\n */\n xdel = (...args) => new XDelCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xgroup\n */\n xgroup = (...args) => new XGroupCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xread\n */\n xread = (...args) => new XReadCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xreadgroup\n */\n xreadgroup = (...args) => new XReadGroupCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xinfo\n */\n xinfo = (...args) => new XInfoCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xlen\n */\n xlen = (...args) => new XLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xpending\n */\n xpending = (...args) => new XPendingCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xclaim\n */\n xclaim = (...args) => new XClaimCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xautoclaim\n */\n xautoclaim = (...args) => new XAutoClaim(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xtrim\n */\n xtrim = (...args) => new XTrimCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xrange\n */\n xrange = (...args) => new XRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xrevrange\n */\n xrevrange = (...args) => new XRevRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zadd\n */\n zadd = (...args) => {\n if (\"score\" in args[1]) {\n return new ZAddCommand([args[0], args[1], ...args.slice(2)], this.opts).exec(\n this.client\n );\n }\n return new ZAddCommand(\n [args[0], args[1], ...args.slice(2)],\n this.opts\n ).exec(this.client);\n };\n /**\n * @see https://redis.io/commands/zcard\n */\n zcard = (...args) => new ZCardCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zcount\n */\n zcount = (...args) => new ZCountCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zdiffstore\n */\n zdiffstore = (...args) => new ZDiffStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zincrby\n */\n zincrby = (key, increment, member) => new ZIncrByCommand([key, increment, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zinterstore\n */\n zinterstore = (...args) => new ZInterStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zlexcount\n */\n zlexcount = (...args) => new ZLexCountCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zmscore\n */\n zmscore = (...args) => new ZMScoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zpopmax\n */\n zpopmax = (...args) => new ZPopMaxCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zpopmin\n */\n zpopmin = (...args) => new ZPopMinCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zrange\n */\n zrange = (...args) => new ZRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zrank\n */\n zrank = (key, member) => new ZRankCommand([key, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zrem\n */\n zrem = (key, ...members) => new ZRemCommand([key, ...members], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zremrangebylex\n */\n zremrangebylex = (...args) => new ZRemRangeByLexCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zremrangebyrank\n */\n zremrangebyrank = (...args) => new ZRemRangeByRankCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zremrangebyscore\n */\n zremrangebyscore = (...args) => new ZRemRangeByScoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zrevrank\n */\n zrevrank = (key, member) => new ZRevRankCommand([key, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zscan\n */\n zscan = (...args) => new ZScanCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zscore\n */\n zscore = (key, member) => new ZScoreCommand([key, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zunion\n */\n zunion = (...args) => new ZUnionCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zunionstore\n */\n zunionstore = (...args) => new ZUnionStoreCommand(args, this.opts).exec(this.client);\n};\n\n// version.ts\nvar VERSION = \"v1.34.3\";\n\n// platforms/nodejs.ts\nif (typeof atob === \"undefined\") {\n global.atob = (b64) => Buffer.from(b64, \"base64\").toString(\"utf8\");\n}\nvar Redis2 = class _Redis extends Redis {\n /**\n * Create a new redis client by providing a custom `Requester` implementation\n *\n * @example\n * ```ts\n *\n * import { UpstashRequest, Requester, UpstashResponse, Redis } from \"@upstash/redis\"\n *\n * const requester: Requester = {\n * request: (req: UpstashRequest): Promise> => {\n * // ...\n * }\n * }\n *\n * const redis = new Redis(requester)\n * ```\n */\n constructor(configOrRequester) {\n if (\"request\" in configOrRequester) {\n super(configOrRequester);\n return;\n }\n if (!configOrRequester.url) {\n console.warn(\n `[Upstash Redis] The 'url' property is missing or undefined in your Redis config.`\n );\n } else if (configOrRequester.url.startsWith(\" \") || configOrRequester.url.endsWith(\" \") || /\\r|\\n/.test(configOrRequester.url)) {\n console.warn(\n \"[Upstash Redis] The redis url contains whitespace or newline, which can cause errors!\"\n );\n }\n if (!configOrRequester.token) {\n console.warn(\n `[Upstash Redis] The 'token' property is missing or undefined in your Redis config.`\n );\n } else if (configOrRequester.token.startsWith(\" \") || configOrRequester.token.endsWith(\" \") || /\\r|\\n/.test(configOrRequester.token)) {\n console.warn(\n \"[Upstash Redis] The redis token contains whitespace or newline, which can cause errors!\"\n );\n }\n const client = new HttpClient({\n baseUrl: configOrRequester.url,\n retry: configOrRequester.retry,\n headers: { authorization: `Bearer ${configOrRequester.token}` },\n agent: configOrRequester.agent,\n responseEncoding: configOrRequester.responseEncoding,\n cache: configOrRequester.cache ?? \"no-store\",\n signal: configOrRequester.signal,\n keepAlive: configOrRequester.keepAlive,\n readYourWrites: configOrRequester.readYourWrites\n });\n super(client, {\n automaticDeserialization: configOrRequester.automaticDeserialization,\n enableTelemetry: !process.env.UPSTASH_DISABLE_TELEMETRY,\n latencyLogging: configOrRequester.latencyLogging,\n enableAutoPipelining: configOrRequester.enableAutoPipelining\n });\n this.addTelemetry({\n runtime: (\n // @ts-expect-error to silence compiler\n typeof EdgeRuntime === \"string\" ? \"edge-light\" : `node@${process.version}`\n ),\n platform: process.env.VERCEL ? \"vercel\" : process.env.AWS_REGION ? \"aws\" : \"unknown\",\n sdk: `@upstash/redis@${VERSION}`\n });\n if (this.enableAutoPipelining) {\n return this.autoPipeline();\n }\n }\n /**\n * Create a new Upstash Redis instance from environment variables.\n *\n * Use this to automatically load connection secrets from your environment\n * variables. For instance when using the Vercel integration.\n *\n * This tries to load `UPSTASH_REDIS_REST_URL` and `UPSTASH_REDIS_REST_TOKEN` from\n * your environment using `process.env`.\n */\n static fromEnv(config) {\n if (process.env === void 0) {\n throw new TypeError(\n '[Upstash Redis] Unable to get environment variables, `process.env` is undefined. If you are deploying to cloudflare, please import from \"@upstash/redis/cloudflare\" instead'\n );\n }\n const url = process.env.UPSTASH_REDIS_REST_URL || process.env.KV_REST_API_URL;\n if (!url) {\n console.warn(\"[Upstash Redis] Unable to find environment variable: `UPSTASH_REDIS_REST_URL`\");\n }\n const token = process.env.UPSTASH_REDIS_REST_TOKEN || process.env.KV_REST_API_TOKEN;\n if (!token) {\n console.warn(\n \"[Upstash Redis] Unable to find environment variable: `UPSTASH_REDIS_REST_TOKEN`\"\n );\n }\n return new _Redis({ ...config, url, token });\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n Redis,\n errors\n});\n",";(function (root, factory) {\n\tif (typeof exports === \"object\") {\n\t\t// CommonJS\n\t\tmodule.exports = exports = factory();\n\t}\n\telse if (typeof define === \"function\" && define.amd) {\n\t\t// AMD\n\t\tdefine([], factory);\n\t}\n\telse {\n\t\t// Global (browser)\n\t\troot.CryptoJS = factory();\n\t}\n}(this, function () {\n\n\t/*globals window, global, require*/\n\n\t/**\n\t * CryptoJS core components.\n\t */\n\tvar CryptoJS = CryptoJS || (function (Math, undefined) {\n\n\t var crypto;\n\n\t // Native crypto from window (Browser)\n\t if (typeof window !== 'undefined' && window.crypto) {\n\t crypto = window.crypto;\n\t }\n\n\t // Native crypto in web worker (Browser)\n\t if (typeof self !== 'undefined' && self.crypto) {\n\t crypto = self.crypto;\n\t }\n\n\t // Native crypto from worker\n\t if (typeof globalThis !== 'undefined' && globalThis.crypto) {\n\t crypto = globalThis.crypto;\n\t }\n\n\t // Native (experimental IE 11) crypto from window (Browser)\n\t if (!crypto && typeof window !== 'undefined' && window.msCrypto) {\n\t crypto = window.msCrypto;\n\t }\n\n\t // Native crypto from global (NodeJS)\n\t if (!crypto && typeof global !== 'undefined' && global.crypto) {\n\t crypto = global.crypto;\n\t }\n\n\t // Native crypto import via require (NodeJS)\n\t if (!crypto && typeof require === 'function') {\n\t try {\n\t crypto = require('crypto');\n\t } catch (err) {}\n\t }\n\n\t /*\n\t * Cryptographically secure pseudorandom number generator\n\t *\n\t * As Math.random() is cryptographically not safe to use\n\t */\n\t var cryptoSecureRandomInt = function () {\n\t if (crypto) {\n\t // Use getRandomValues method (Browser)\n\t if (typeof crypto.getRandomValues === 'function') {\n\t try {\n\t return crypto.getRandomValues(new Uint32Array(1))[0];\n\t } catch (err) {}\n\t }\n\n\t // Use randomBytes method (NodeJS)\n\t if (typeof crypto.randomBytes === 'function') {\n\t try {\n\t return crypto.randomBytes(4).readInt32LE();\n\t } catch (err) {}\n\t }\n\t }\n\n\t throw new Error('Native crypto module could not be used to get secure random number.');\n\t };\n\n\t /*\n\t * Local polyfill of Object.create\n\n\t */\n\t var create = Object.create || (function () {\n\t function F() {}\n\n\t return function (obj) {\n\t var subtype;\n\n\t F.prototype = obj;\n\n\t subtype = new F();\n\n\t F.prototype = null;\n\n\t return subtype;\n\t };\n\t }());\n\n\t /**\n\t * CryptoJS namespace.\n\t */\n\t var C = {};\n\n\t /**\n\t * Library namespace.\n\t */\n\t var C_lib = C.lib = {};\n\n\t /**\n\t * Base object for prototypal inheritance.\n\t */\n\t var Base = C_lib.Base = (function () {\n\n\n\t return {\n\t /**\n\t * Creates a new object that inherits from this object.\n\t *\n\t * @param {Object} overrides Properties to copy into the new object.\n\t *\n\t * @return {Object} The new object.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var MyType = CryptoJS.lib.Base.extend({\n\t * field: 'value',\n\t *\n\t * method: function () {\n\t * }\n\t * });\n\t */\n\t extend: function (overrides) {\n\t // Spawn\n\t var subtype = create(this);\n\n\t // Augment\n\t if (overrides) {\n\t subtype.mixIn(overrides);\n\t }\n\n\t // Create default initializer\n\t if (!subtype.hasOwnProperty('init') || this.init === subtype.init) {\n\t subtype.init = function () {\n\t subtype.$super.init.apply(this, arguments);\n\t };\n\t }\n\n\t // Initializer's prototype is the subtype object\n\t subtype.init.prototype = subtype;\n\n\t // Reference supertype\n\t subtype.$super = this;\n\n\t return subtype;\n\t },\n\n\t /**\n\t * Extends this object and runs the init method.\n\t * Arguments to create() will be passed to init().\n\t *\n\t * @return {Object} The new object.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var instance = MyType.create();\n\t */\n\t create: function () {\n\t var instance = this.extend();\n\t instance.init.apply(instance, arguments);\n\n\t return instance;\n\t },\n\n\t /**\n\t * Initializes a newly created object.\n\t * Override this method to add some logic when your objects are created.\n\t *\n\t * @example\n\t *\n\t * var MyType = CryptoJS.lib.Base.extend({\n\t * init: function () {\n\t * // ...\n\t * }\n\t * });\n\t */\n\t init: function () {\n\t },\n\n\t /**\n\t * Copies properties into this object.\n\t *\n\t * @param {Object} properties The properties to mix in.\n\t *\n\t * @example\n\t *\n\t * MyType.mixIn({\n\t * field: 'value'\n\t * });\n\t */\n\t mixIn: function (properties) {\n\t for (var propertyName in properties) {\n\t if (properties.hasOwnProperty(propertyName)) {\n\t this[propertyName] = properties[propertyName];\n\t }\n\t }\n\n\t // IE won't copy toString using the loop above\n\t if (properties.hasOwnProperty('toString')) {\n\t this.toString = properties.toString;\n\t }\n\t },\n\n\t /**\n\t * Creates a copy of this object.\n\t *\n\t * @return {Object} The clone.\n\t *\n\t * @example\n\t *\n\t * var clone = instance.clone();\n\t */\n\t clone: function () {\n\t return this.init.prototype.extend(this);\n\t }\n\t };\n\t }());\n\n\t /**\n\t * An array of 32-bit words.\n\t *\n\t * @property {Array} words The array of 32-bit words.\n\t * @property {number} sigBytes The number of significant bytes in this word array.\n\t */\n\t var WordArray = C_lib.WordArray = Base.extend({\n\t /**\n\t * Initializes a newly created word array.\n\t *\n\t * @param {Array} words (Optional) An array of 32-bit words.\n\t * @param {number} sigBytes (Optional) The number of significant bytes in the words.\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.lib.WordArray.create();\n\t * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607]);\n\t * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607], 6);\n\t */\n\t init: function (words, sigBytes) {\n\t words = this.words = words || [];\n\n\t if (sigBytes != undefined) {\n\t this.sigBytes = sigBytes;\n\t } else {\n\t this.sigBytes = words.length * 4;\n\t }\n\t },\n\n\t /**\n\t * Converts this word array to a string.\n\t *\n\t * @param {Encoder} encoder (Optional) The encoding strategy to use. Default: CryptoJS.enc.Hex\n\t *\n\t * @return {string} The stringified word array.\n\t *\n\t * @example\n\t *\n\t * var string = wordArray + '';\n\t * var string = wordArray.toString();\n\t * var string = wordArray.toString(CryptoJS.enc.Utf8);\n\t */\n\t toString: function (encoder) {\n\t return (encoder || Hex).stringify(this);\n\t },\n\n\t /**\n\t * Concatenates a word array to this word array.\n\t *\n\t * @param {WordArray} wordArray The word array to append.\n\t *\n\t * @return {WordArray} This word array.\n\t *\n\t * @example\n\t *\n\t * wordArray1.concat(wordArray2);\n\t */\n\t concat: function (wordArray) {\n\t // Shortcuts\n\t var thisWords = this.words;\n\t var thatWords = wordArray.words;\n\t var thisSigBytes = this.sigBytes;\n\t var thatSigBytes = wordArray.sigBytes;\n\n\t // Clamp excess bits\n\t this.clamp();\n\n\t // Concat\n\t if (thisSigBytes % 4) {\n\t // Copy one byte at a time\n\t for (var i = 0; i < thatSigBytes; i++) {\n\t var thatByte = (thatWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;\n\t thisWords[(thisSigBytes + i) >>> 2] |= thatByte << (24 - ((thisSigBytes + i) % 4) * 8);\n\t }\n\t } else {\n\t // Copy one word at a time\n\t for (var j = 0; j < thatSigBytes; j += 4) {\n\t thisWords[(thisSigBytes + j) >>> 2] = thatWords[j >>> 2];\n\t }\n\t }\n\t this.sigBytes += thatSigBytes;\n\n\t // Chainable\n\t return this;\n\t },\n\n\t /**\n\t * Removes insignificant bits.\n\t *\n\t * @example\n\t *\n\t * wordArray.clamp();\n\t */\n\t clamp: function () {\n\t // Shortcuts\n\t var words = this.words;\n\t var sigBytes = this.sigBytes;\n\n\t // Clamp\n\t words[sigBytes >>> 2] &= 0xffffffff << (32 - (sigBytes % 4) * 8);\n\t words.length = Math.ceil(sigBytes / 4);\n\t },\n\n\t /**\n\t * Creates a copy of this word array.\n\t *\n\t * @return {WordArray} The clone.\n\t *\n\t * @example\n\t *\n\t * var clone = wordArray.clone();\n\t */\n\t clone: function () {\n\t var clone = Base.clone.call(this);\n\t clone.words = this.words.slice(0);\n\n\t return clone;\n\t },\n\n\t /**\n\t * Creates a word array filled with random bytes.\n\t *\n\t * @param {number} nBytes The number of random bytes to generate.\n\t *\n\t * @return {WordArray} The random word array.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.lib.WordArray.random(16);\n\t */\n\t random: function (nBytes) {\n\t var words = [];\n\n\t for (var i = 0; i < nBytes; i += 4) {\n\t words.push(cryptoSecureRandomInt());\n\t }\n\n\t return new WordArray.init(words, nBytes);\n\t }\n\t });\n\n\t /**\n\t * Encoder namespace.\n\t */\n\t var C_enc = C.enc = {};\n\n\t /**\n\t * Hex encoding strategy.\n\t */\n\t var Hex = C_enc.Hex = {\n\t /**\n\t * Converts a word array to a hex string.\n\t *\n\t * @param {WordArray} wordArray The word array.\n\t *\n\t * @return {string} The hex string.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var hexString = CryptoJS.enc.Hex.stringify(wordArray);\n\t */\n\t stringify: function (wordArray) {\n\t // Shortcuts\n\t var words = wordArray.words;\n\t var sigBytes = wordArray.sigBytes;\n\n\t // Convert\n\t var hexChars = [];\n\t for (var i = 0; i < sigBytes; i++) {\n\t var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;\n\t hexChars.push((bite >>> 4).toString(16));\n\t hexChars.push((bite & 0x0f).toString(16));\n\t }\n\n\t return hexChars.join('');\n\t },\n\n\t /**\n\t * Converts a hex string to a word array.\n\t *\n\t * @param {string} hexStr The hex string.\n\t *\n\t * @return {WordArray} The word array.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.enc.Hex.parse(hexString);\n\t */\n\t parse: function (hexStr) {\n\t // Shortcut\n\t var hexStrLength = hexStr.length;\n\n\t // Convert\n\t var words = [];\n\t for (var i = 0; i < hexStrLength; i += 2) {\n\t words[i >>> 3] |= parseInt(hexStr.substr(i, 2), 16) << (24 - (i % 8) * 4);\n\t }\n\n\t return new WordArray.init(words, hexStrLength / 2);\n\t }\n\t };\n\n\t /**\n\t * Latin1 encoding strategy.\n\t */\n\t var Latin1 = C_enc.Latin1 = {\n\t /**\n\t * Converts a word array to a Latin1 string.\n\t *\n\t * @param {WordArray} wordArray The word array.\n\t *\n\t * @return {string} The Latin1 string.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var latin1String = CryptoJS.enc.Latin1.stringify(wordArray);\n\t */\n\t stringify: function (wordArray) {\n\t // Shortcuts\n\t var words = wordArray.words;\n\t var sigBytes = wordArray.sigBytes;\n\n\t // Convert\n\t var latin1Chars = [];\n\t for (var i = 0; i < sigBytes; i++) {\n\t var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;\n\t latin1Chars.push(String.fromCharCode(bite));\n\t }\n\n\t return latin1Chars.join('');\n\t },\n\n\t /**\n\t * Converts a Latin1 string to a word array.\n\t *\n\t * @param {string} latin1Str The Latin1 string.\n\t *\n\t * @return {WordArray} The word array.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.enc.Latin1.parse(latin1String);\n\t */\n\t parse: function (latin1Str) {\n\t // Shortcut\n\t var latin1StrLength = latin1Str.length;\n\n\t // Convert\n\t var words = [];\n\t for (var i = 0; i < latin1StrLength; i++) {\n\t words[i >>> 2] |= (latin1Str.charCodeAt(i) & 0xff) << (24 - (i % 4) * 8);\n\t }\n\n\t return new WordArray.init(words, latin1StrLength);\n\t }\n\t };\n\n\t /**\n\t * UTF-8 encoding strategy.\n\t */\n\t var Utf8 = C_enc.Utf8 = {\n\t /**\n\t * Converts a word array to a UTF-8 string.\n\t *\n\t * @param {WordArray} wordArray The word array.\n\t *\n\t * @return {string} The UTF-8 string.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var utf8String = CryptoJS.enc.Utf8.stringify(wordArray);\n\t */\n\t stringify: function (wordArray) {\n\t try {\n\t return decodeURIComponent(escape(Latin1.stringify(wordArray)));\n\t } catch (e) {\n\t throw new Error('Malformed UTF-8 data');\n\t }\n\t },\n\n\t /**\n\t * Converts a UTF-8 string to a word array.\n\t *\n\t * @param {string} utf8Str The UTF-8 string.\n\t *\n\t * @return {WordArray} The word array.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.enc.Utf8.parse(utf8String);\n\t */\n\t parse: function (utf8Str) {\n\t return Latin1.parse(unescape(encodeURIComponent(utf8Str)));\n\t }\n\t };\n\n\t /**\n\t * Abstract buffered block algorithm template.\n\t *\n\t * The property blockSize must be implemented in a concrete subtype.\n\t *\n\t * @property {number} _minBufferSize The number of blocks that should be kept unprocessed in the buffer. Default: 0\n\t */\n\t var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm = Base.extend({\n\t /**\n\t * Resets this block algorithm's data buffer to its initial state.\n\t *\n\t * @example\n\t *\n\t * bufferedBlockAlgorithm.reset();\n\t */\n\t reset: function () {\n\t // Initial values\n\t this._data = new WordArray.init();\n\t this._nDataBytes = 0;\n\t },\n\n\t /**\n\t * Adds new data to this block algorithm's buffer.\n\t *\n\t * @param {WordArray|string} data The data to append. Strings are converted to a WordArray using UTF-8.\n\t *\n\t * @example\n\t *\n\t * bufferedBlockAlgorithm._append('data');\n\t * bufferedBlockAlgorithm._append(wordArray);\n\t */\n\t _append: function (data) {\n\t // Convert string to WordArray, else assume WordArray already\n\t if (typeof data == 'string') {\n\t data = Utf8.parse(data);\n\t }\n\n\t // Append\n\t this._data.concat(data);\n\t this._nDataBytes += data.sigBytes;\n\t },\n\n\t /**\n\t * Processes available data blocks.\n\t *\n\t * This method invokes _doProcessBlock(offset), which must be implemented by a concrete subtype.\n\t *\n\t * @param {boolean} doFlush Whether all blocks and partial blocks should be processed.\n\t *\n\t * @return {WordArray} The processed data.\n\t *\n\t * @example\n\t *\n\t * var processedData = bufferedBlockAlgorithm._process();\n\t * var processedData = bufferedBlockAlgorithm._process(!!'flush');\n\t */\n\t _process: function (doFlush) {\n\t var processedWords;\n\n\t // Shortcuts\n\t var data = this._data;\n\t var dataWords = data.words;\n\t var dataSigBytes = data.sigBytes;\n\t var blockSize = this.blockSize;\n\t var blockSizeBytes = blockSize * 4;\n\n\t // Count blocks ready\n\t var nBlocksReady = dataSigBytes / blockSizeBytes;\n\t if (doFlush) {\n\t // Round up to include partial blocks\n\t nBlocksReady = Math.ceil(nBlocksReady);\n\t } else {\n\t // Round down to include only full blocks,\n\t // less the number of blocks that must remain in the buffer\n\t nBlocksReady = Math.max((nBlocksReady | 0) - this._minBufferSize, 0);\n\t }\n\n\t // Count words ready\n\t var nWordsReady = nBlocksReady * blockSize;\n\n\t // Count bytes ready\n\t var nBytesReady = Math.min(nWordsReady * 4, dataSigBytes);\n\n\t // Process blocks\n\t if (nWordsReady) {\n\t for (var offset = 0; offset < nWordsReady; offset += blockSize) {\n\t // Perform concrete-algorithm logic\n\t this._doProcessBlock(dataWords, offset);\n\t }\n\n\t // Remove processed words\n\t processedWords = dataWords.splice(0, nWordsReady);\n\t data.sigBytes -= nBytesReady;\n\t }\n\n\t // Return processed words\n\t return new WordArray.init(processedWords, nBytesReady);\n\t },\n\n\t /**\n\t * Creates a copy of this object.\n\t *\n\t * @return {Object} The clone.\n\t *\n\t * @example\n\t *\n\t * var clone = bufferedBlockAlgorithm.clone();\n\t */\n\t clone: function () {\n\t var clone = Base.clone.call(this);\n\t clone._data = this._data.clone();\n\n\t return clone;\n\t },\n\n\t _minBufferSize: 0\n\t });\n\n\t /**\n\t * Abstract hasher template.\n\t *\n\t * @property {number} blockSize The number of 32-bit words this hasher operates on. Default: 16 (512 bits)\n\t */\n\t var Hasher = C_lib.Hasher = BufferedBlockAlgorithm.extend({\n\t /**\n\t * Configuration options.\n\t */\n\t cfg: Base.extend(),\n\n\t /**\n\t * Initializes a newly created hasher.\n\t *\n\t * @param {Object} cfg (Optional) The configuration options to use for this hash computation.\n\t *\n\t * @example\n\t *\n\t * var hasher = CryptoJS.algo.SHA256.create();\n\t */\n\t init: function (cfg) {\n\t // Apply config defaults\n\t this.cfg = this.cfg.extend(cfg);\n\n\t // Set initial values\n\t this.reset();\n\t },\n\n\t /**\n\t * Resets this hasher to its initial state.\n\t *\n\t * @example\n\t *\n\t * hasher.reset();\n\t */\n\t reset: function () {\n\t // Reset data buffer\n\t BufferedBlockAlgorithm.reset.call(this);\n\n\t // Perform concrete-hasher logic\n\t this._doReset();\n\t },\n\n\t /**\n\t * Updates this hasher with a message.\n\t *\n\t * @param {WordArray|string} messageUpdate The message to append.\n\t *\n\t * @return {Hasher} This hasher.\n\t *\n\t * @example\n\t *\n\t * hasher.update('message');\n\t * hasher.update(wordArray);\n\t */\n\t update: function (messageUpdate) {\n\t // Append\n\t this._append(messageUpdate);\n\n\t // Update the hash\n\t this._process();\n\n\t // Chainable\n\t return this;\n\t },\n\n\t /**\n\t * Finalizes the hash computation.\n\t * Note that the finalize operation is effectively a destructive, read-once operation.\n\t *\n\t * @param {WordArray|string} messageUpdate (Optional) A final message update.\n\t *\n\t * @return {WordArray} The hash.\n\t *\n\t * @example\n\t *\n\t * var hash = hasher.finalize();\n\t * var hash = hasher.finalize('message');\n\t * var hash = hasher.finalize(wordArray);\n\t */\n\t finalize: function (messageUpdate) {\n\t // Final message update\n\t if (messageUpdate) {\n\t this._append(messageUpdate);\n\t }\n\n\t // Perform concrete-hasher logic\n\t var hash = this._doFinalize();\n\n\t return hash;\n\t },\n\n\t blockSize: 512/32,\n\n\t /**\n\t * Creates a shortcut function to a hasher's object interface.\n\t *\n\t * @param {Hasher} hasher The hasher to create a helper for.\n\t *\n\t * @return {Function} The shortcut function.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var SHA256 = CryptoJS.lib.Hasher._createHelper(CryptoJS.algo.SHA256);\n\t */\n\t _createHelper: function (hasher) {\n\t return function (message, cfg) {\n\t return new hasher.init(cfg).finalize(message);\n\t };\n\t },\n\n\t /**\n\t * Creates a shortcut function to the HMAC's object interface.\n\t *\n\t * @param {Hasher} hasher The hasher to use in this HMAC helper.\n\t *\n\t * @return {Function} The shortcut function.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var HmacSHA256 = CryptoJS.lib.Hasher._createHmacHelper(CryptoJS.algo.SHA256);\n\t */\n\t _createHmacHelper: function (hasher) {\n\t return function (message, key) {\n\t return new C_algo.HMAC.init(hasher, key).finalize(message);\n\t };\n\t }\n\t });\n\n\t /**\n\t * Algorithm namespace.\n\t */\n\t var C_algo = C.algo = {};\n\n\t return C;\n\t}(Math));\n\n\n\treturn CryptoJS;\n\n}));",";(function (root, factory) {\n\tif (typeof exports === \"object\") {\n\t\t// CommonJS\n\t\tmodule.exports = exports = factory(require(\"./core\"));\n\t}\n\telse if (typeof define === \"function\" && define.amd) {\n\t\t// AMD\n\t\tdefine([\"./core\"], factory);\n\t}\n\telse {\n\t\t// Global (browser)\n\t\tfactory(root.CryptoJS);\n\t}\n}(this, function (CryptoJS) {\n\n\treturn CryptoJS.enc.Hex;\n\n}));",";(function (root, factory) {\n\tif (typeof exports === \"object\") {\n\t\t// CommonJS\n\t\tmodule.exports = exports = factory(require(\"./core\"));\n\t}\n\telse if (typeof define === \"function\" && define.amd) {\n\t\t// AMD\n\t\tdefine([\"./core\"], factory);\n\t}\n\telse {\n\t\t// Global (browser)\n\t\tfactory(root.CryptoJS);\n\t}\n}(this, function (CryptoJS) {\n\n\t(function () {\n\t // Shortcuts\n\t var C = CryptoJS;\n\t var C_lib = C.lib;\n\t var WordArray = C_lib.WordArray;\n\t var Hasher = C_lib.Hasher;\n\t var C_algo = C.algo;\n\n\t // Reusable object\n\t var W = [];\n\n\t /**\n\t * SHA-1 hash algorithm.\n\t */\n\t var SHA1 = C_algo.SHA1 = Hasher.extend({\n\t _doReset: function () {\n\t this._hash = new WordArray.init([\n\t 0x67452301, 0xefcdab89,\n\t 0x98badcfe, 0x10325476,\n\t 0xc3d2e1f0\n\t ]);\n\t },\n\n\t _doProcessBlock: function (M, offset) {\n\t // Shortcut\n\t var H = this._hash.words;\n\n\t // Working variables\n\t var a = H[0];\n\t var b = H[1];\n\t var c = H[2];\n\t var d = H[3];\n\t var e = H[4];\n\n\t // Computation\n\t for (var i = 0; i < 80; i++) {\n\t if (i < 16) {\n\t W[i] = M[offset + i] | 0;\n\t } else {\n\t var n = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];\n\t W[i] = (n << 1) | (n >>> 31);\n\t }\n\n\t var t = ((a << 5) | (a >>> 27)) + e + W[i];\n\t if (i < 20) {\n\t t += ((b & c) | (~b & d)) + 0x5a827999;\n\t } else if (i < 40) {\n\t t += (b ^ c ^ d) + 0x6ed9eba1;\n\t } else if (i < 60) {\n\t t += ((b & c) | (b & d) | (c & d)) - 0x70e44324;\n\t } else /* if (i < 80) */ {\n\t t += (b ^ c ^ d) - 0x359d3e2a;\n\t }\n\n\t e = d;\n\t d = c;\n\t c = (b << 30) | (b >>> 2);\n\t b = a;\n\t a = t;\n\t }\n\n\t // Intermediate hash value\n\t H[0] = (H[0] + a) | 0;\n\t H[1] = (H[1] + b) | 0;\n\t H[2] = (H[2] + c) | 0;\n\t H[3] = (H[3] + d) | 0;\n\t H[4] = (H[4] + e) | 0;\n\t },\n\n\t _doFinalize: function () {\n\t // Shortcuts\n\t var data = this._data;\n\t var dataWords = data.words;\n\n\t var nBitsTotal = this._nDataBytes * 8;\n\t var nBitsLeft = data.sigBytes * 8;\n\n\t // Add padding\n\t dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32);\n\t dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000);\n\t dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal;\n\t data.sigBytes = dataWords.length * 4;\n\n\t // Hash final blocks\n\t this._process();\n\n\t // Return final computed hash\n\t return this._hash;\n\t },\n\n\t clone: function () {\n\t var clone = Hasher.clone.call(this);\n\t clone._hash = this._hash.clone();\n\n\t return clone;\n\t }\n\t });\n\n\t /**\n\t * Shortcut function to the hasher's object interface.\n\t *\n\t * @param {WordArray|string} message The message to hash.\n\t *\n\t * @return {WordArray} The hash.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var hash = CryptoJS.SHA1('message');\n\t * var hash = CryptoJS.SHA1(wordArray);\n\t */\n\t C.SHA1 = Hasher._createHelper(SHA1);\n\n\t /**\n\t * Shortcut function to the HMAC's object interface.\n\t *\n\t * @param {WordArray|string} message The message to hash.\n\t * @param {WordArray|string} key The secret key.\n\t *\n\t * @return {WordArray} The HMAC.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var hmac = CryptoJS.HmacSHA1(message, key);\n\t */\n\t C.HmacSHA1 = Hasher._createHmacHelper(SHA1);\n\t}());\n\n\n\treturn CryptoJS.SHA1;\n\n}));","module.exports = require(\"crypto\");","module.exports = require(\"fs/promises\");","module.exports = require(\"path\");","\"use strict\";Object.defineProperty(exports, \"__esModule\", {value: true});// src/index.ts\nvar _redis = require('@upstash/redis');\nvar _kv = null;\nprocess.env.UPSTASH_DISABLE_TELEMETRY = \"1\";\nvar VercelKV = class extends _redis.Redis {\n // This API is based on https://github.com/redis/node-redis#scan-iterator which is not supported in @upstash/redis\n /**\n * Same as `scan` but returns an AsyncIterator to allow iteration via `for await`.\n */\n async *scanIterator(options) {\n let cursor = \"0\";\n let keys;\n do {\n [cursor, keys] = await this.scan(cursor, options);\n for (const key of keys) {\n yield key;\n }\n } while (cursor !== \"0\");\n }\n /**\n * Same as `hscan` but returns an AsyncIterator to allow iteration via `for await`.\n */\n async *hscanIterator(key, options) {\n let cursor = \"0\";\n let items;\n do {\n [cursor, items] = await this.hscan(key, cursor, options);\n for (const item of items) {\n yield item;\n }\n } while (cursor !== \"0\");\n }\n /**\n * Same as `sscan` but returns an AsyncIterator to allow iteration via `for await`.\n */\n async *sscanIterator(key, options) {\n let cursor = \"0\";\n let items;\n do {\n [cursor, items] = await this.sscan(key, cursor, options);\n for (const item of items) {\n yield item;\n }\n } while (cursor !== \"0\");\n }\n /**\n * Same as `zscan` but returns an AsyncIterator to allow iteration via `for await`.\n */\n async *zscanIterator(key, options) {\n let cursor = \"0\";\n let items;\n do {\n [cursor, items] = await this.zscan(key, cursor, options);\n for (const item of items) {\n yield item;\n }\n } while (cursor !== \"0\");\n }\n};\nfunction createClient(config) {\n return new VercelKV({\n // The Next.js team recommends no value or `default` for fetch requests's `cache` option\n // upstash/redis defaults to `no-store`, so we enforce `default`\n cache: \"default\",\n enableAutoPipelining: true,\n ...config\n });\n}\nvar src_default = new Proxy(\n {},\n {\n get(target, prop, receiver) {\n if (prop === \"then\" || prop === \"parse\") {\n return Reflect.get(target, prop, receiver);\n }\n if (!_kv) {\n if (!process.env.KV_REST_API_URL || !process.env.KV_REST_API_TOKEN) {\n throw new Error(\n \"@vercel/kv: Missing required environment variables KV_REST_API_URL and KV_REST_API_TOKEN\"\n );\n }\n console.warn(\n '\\x1B[33m\"The default export has been moved to a named export and it will be removed in version 1, change to import { kv }\\x1B[0m\"'\n );\n _kv = createClient({\n url: process.env.KV_REST_API_URL,\n token: process.env.KV_REST_API_TOKEN\n });\n }\n return Reflect.get(_kv, prop);\n }\n }\n);\nvar kv = new Proxy(\n {},\n {\n get(target, prop) {\n if (!_kv) {\n if (!process.env.KV_REST_API_URL || !process.env.KV_REST_API_TOKEN) {\n throw new Error(\n \"@vercel/kv: Missing required environment variables KV_REST_API_URL and KV_REST_API_TOKEN\"\n );\n }\n _kv = createClient({\n url: process.env.KV_REST_API_URL,\n token: process.env.KV_REST_API_TOKEN\n });\n }\n return Reflect.get(_kv, prop);\n }\n }\n);\n\n\n\n\n\nexports.VercelKV = VercelKV; exports.createClient = createClient; exports.default = src_default; exports.kv = kv;\n//# sourceMappingURL=index.cjs.map","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","const fs = require('fs/promises')\nconst path = require('path')\n\nconst { createClient } = require('@vercel/kv')\n\nasync function collectExamplesResult(manifestFile) {\n const file = path.join(process.cwd(), manifestFile)\n const contents = await fs.readFile(file, 'utf-8')\n const results = JSON.parse(contents)\n\n let failingCount = 0\n let passingCount = 0\n\n const currentDate = new Date()\n const isoString = currentDate.toISOString()\n const timestamp = isoString.slice(0, 19).replace('T', ' ')\n\n for (const isPassing of Object.values(results)) {\n if (isPassing) {\n passingCount += 1\n } else {\n failingCount += 1\n }\n }\n const status = `${process.env.GITHUB_SHA}\\t${timestamp}\\t${passingCount}/${\n passingCount + failingCount\n }`\n\n return {\n status,\n // Uses JSON.stringify to create minified JSON, otherwise whitespace is preserved.\n data: JSON.stringify(results),\n }\n}\n\nasync function collectResults(manifestFile) {\n const file = path.join(process.cwd(), manifestFile)\n const contents = await fs.readFile(file, 'utf-8')\n const results = JSON.parse(contents)\n\n let passingTests = ''\n let failingTests = ''\n let passCount = 0\n let failCount = 0\n\n const currentDate = new Date()\n const isoString = currentDate.toISOString()\n const timestamp = isoString.slice(0, 19).replace('T', ' ')\n\n if (results.version === 2) {\n for (const [testFileName, result] of Object.entries(results.suites)) {\n let suitePassCount = 0\n let suiteFailCount = 0\n\n suitePassCount += result.passed.length\n suiteFailCount += result.failed.length\n\n if (suitePassCount > 0) {\n passingTests += `${testFileName}\\n`\n }\n\n if (suiteFailCount > 0) {\n failingTests += `${testFileName}\\n`\n }\n\n for (const passed of result.passed) {\n const passedName = passed.replaceAll('`', '\\\\`')\n passingTests += `* ${passedName}\\n`\n }\n\n for (const passed of result.failed) {\n const failedName = passed.replaceAll('`', '\\\\`')\n failingTests += `* ${failedName}\\n`\n }\n\n passCount += suitePassCount\n failCount += suiteFailCount\n\n if (suitePassCount > 0) {\n passingTests += `\\n`\n }\n\n if (suiteFailCount > 0) {\n failingTests += `\\n`\n }\n }\n\n const testRun = `${process.env.GITHUB_SHA}\\t${timestamp}\\t${passCount}/${\n passCount + failCount\n }`\n return { testRun, passingTests, failingTests }\n } else {\n for (const [testFileName, result] of Object.entries(results)) {\n let suitePassCount = 0\n let suiteFailCount = 0\n\n suitePassCount += result.passed.length\n suiteFailCount += result.failed.length\n\n if (suitePassCount > 0) {\n passingTests += `${testFileName}\\n`\n }\n\n if (suiteFailCount > 0) {\n failingTests += `${testFileName}\\n`\n }\n\n for (const passed of result.passed) {\n const passedName = passed.replaceAll('`', '\\\\`')\n passingTests += `* ${passedName}\\n`\n }\n\n for (const passed of result.failed) {\n const failedName = passed.replaceAll('`', '\\\\`')\n failingTests += `* ${failedName}\\n`\n }\n\n passCount += suitePassCount\n failCount += suiteFailCount\n\n if (suitePassCount > 0) {\n passingTests += `\\n`\n }\n\n if (suiteFailCount > 0) {\n failingTests += `\\n`\n }\n }\n const testRun = `${process.env.GITHUB_SHA}\\t${timestamp}\\t${passCount}/${\n passCount + failCount\n }`\n\n return { testRun, passingTests, failingTests }\n }\n}\n\nasync function collectAndUpload(\n kv,\n { jsonPrefix, kvPrefix, deploymentDomain }\n) {\n const developmentResult = await collectResults(\n `test/${jsonPrefix}dev-tests-manifest.json`\n )\n const productionResult = await collectResults(\n `test/${jsonPrefix}build-tests-manifest.json`\n )\n const developmentExamplesResult = await collectExamplesResult(\n `test/${jsonPrefix}dev-examples-manifest.json`\n )\n\n console.log('TEST RESULT DEVELOPMENT')\n console.log(developmentResult.testRun)\n\n console.log('TEST RESULT PRODUCTION')\n console.log(productionResult.testRun)\n\n console.log('EXAMPLES RESULT')\n console.log(developmentExamplesResult.status)\n\n await kv.rpush(`${kvPrefix}test-runs`, developmentResult.testRun)\n await kv.rpush(`${kvPrefix}test-runs-production`, productionResult.testRun)\n await kv.rpush(`${kvPrefix}examples-runs`, developmentExamplesResult.status)\n console.log('SUCCESSFULLY SAVED RUNS')\n\n await kv.set(`${kvPrefix}passing-tests`, developmentResult.passingTests)\n await kv.set(\n `${kvPrefix}passing-tests-production`,\n productionResult.passingTests\n )\n console.log('SUCCESSFULLY SAVED PASSING')\n\n await kv.set(`${kvPrefix}failing-tests`, developmentResult.failingTests)\n await kv.set(\n `${kvPrefix}failing-tests-production`,\n productionResult.failingTests\n )\n console.log('SUCCESSFULLY SAVED FAILING')\n\n await kv.set(`${kvPrefix}examples-data`, developmentExamplesResult.data)\n console.log('SUCCESSFULLY SAVED EXAMPLES')\n\n if (deploymentDomain != null) {\n // Upstash does not provide strong consistency, so just wait a couple\n // seconds before invalidating the cache in case of replication lag.\n //\n // https://upstash.com/docs/redis/features/consistency\n await new Promise((resolve) => setTimeout(resolve, 2000))\n try {\n const response = await fetch(\n `https://${deploymentDomain}/api/revalidate`,\n {\n method: 'POST',\n headers: {\n 'X-Auth-Token': process.env.TURBOYET_TOKEN,\n 'Content-Type': 'application/json',\n },\n }\n )\n const responseJson = await response.json()\n if (!responseJson.revalidated) {\n throw new Error(responseJson.error)\n }\n console.log('SUCCESSFULLY REVALIDATED VERCEL DATA CACHE')\n } catch (error) {\n // non-fatal: the cache will eventually expire anyways\n console.error('FAILED TO REVALIDATE VERCEL DATA CACHE', error)\n }\n }\n}\n\nasync function main() {\n try {\n const kv = createClient({\n url: process.env.TURBOYET_KV_REST_API_URL,\n token: process.env.TURBOYET_KV_REST_API_TOKEN,\n })\n console.log('### UPLOADING TURBOPACK DATA')\n await collectAndUpload(kv, {\n jsonPrefix: 'turbopack-',\n kvPrefix: '',\n deploymentDomain: 'areweturboyet.com',\n })\n console.log('### UPLOADING RSPACK DATA')\n await collectAndUpload(kv, {\n jsonPrefix: 'rspack-',\n kvPrefix: 'rspack-',\n })\n } catch (error) {\n console.log(error)\n }\n}\n\nmain()\n"],"names":[],"sourceRoot":""} \ No newline at end of file +{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/1HA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACtyBA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrJA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","sources":["../../../node_modules/.pnpm/@upstash+redis@1.34.3/node_modules/@upstash/redis/nodejs.js","../../../node_modules/.pnpm/crypto-js@4.2.0/node_modules/crypto-js/core.js","../../../node_modules/.pnpm/crypto-js@4.2.0/node_modules/crypto-js/enc-hex.js","../../../node_modules/.pnpm/crypto-js@4.2.0/node_modules/crypto-js/sha1.js","../external node-commonjs \"crypto\"","../external node-commonjs \"fs/promises\"","../external node-commonjs \"path\"","../../../node_modules/.pnpm/@vercel+kv@3.0.0/node_modules/@vercel/kv/dist/index.cjs","../webpack/bootstrap","../webpack/runtime/compat",".././src/main.js"],"sourcesContent":["\"use strict\";\nvar __create = Object.create;\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __getProtoOf = Object.getPrototypeOf;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(\n // If the importer is in node compatibility mode or this is not an ESM\n // file that has been converted to a CommonJS file using a Babel-\n // compatible transform (i.e. \"__esModule\" has not been set), then set\n // \"default\" to the CommonJS \"module.exports\" for node compatibility.\n isNodeMode || !mod || !mod.__esModule ? __defProp(target, \"default\", { value: mod, enumerable: true }) : target,\n mod\n));\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// platforms/nodejs.ts\nvar nodejs_exports = {};\n__export(nodejs_exports, {\n Redis: () => Redis2,\n errors: () => error_exports\n});\nmodule.exports = __toCommonJS(nodejs_exports);\n\n// pkg/error.ts\nvar error_exports = {};\n__export(error_exports, {\n UpstashError: () => UpstashError,\n UrlError: () => UrlError\n});\nvar UpstashError = class extends Error {\n constructor(message) {\n super(message);\n this.name = \"UpstashError\";\n }\n};\nvar UrlError = class extends Error {\n constructor(url) {\n super(\n `Upstash Redis client was passed an invalid URL. You should pass a URL starting with https. Received: \"${url}\". `\n );\n this.name = \"UrlError\";\n }\n};\n\n// pkg/http.ts\nvar HttpClient = class {\n baseUrl;\n headers;\n options;\n readYourWrites;\n upstashSyncToken = \"\";\n hasCredentials;\n retry;\n constructor(config) {\n this.options = {\n backend: config.options?.backend,\n agent: config.agent,\n responseEncoding: config.responseEncoding ?? \"base64\",\n // default to base64\n cache: config.cache,\n signal: config.signal,\n keepAlive: config.keepAlive ?? true\n };\n this.upstashSyncToken = \"\";\n this.readYourWrites = config.readYourWrites ?? true;\n this.baseUrl = (config.baseUrl || \"\").replace(/\\/$/, \"\");\n const urlRegex = /^https?:\\/\\/[^\\s#$./?].\\S*$/;\n if (this.baseUrl && !urlRegex.test(this.baseUrl)) {\n throw new UrlError(this.baseUrl);\n }\n this.headers = {\n \"Content-Type\": \"application/json\",\n ...config.headers\n };\n this.hasCredentials = Boolean(this.baseUrl && this.headers.authorization.split(\" \")[1]);\n if (this.options.responseEncoding === \"base64\") {\n this.headers[\"Upstash-Encoding\"] = \"base64\";\n }\n this.retry = typeof config.retry === \"boolean\" && !config.retry ? {\n attempts: 1,\n backoff: () => 0\n } : {\n attempts: config.retry?.retries ?? 5,\n backoff: config.retry?.backoff ?? ((retryCount) => Math.exp(retryCount) * 50)\n };\n }\n mergeTelemetry(telemetry) {\n this.headers = merge(this.headers, \"Upstash-Telemetry-Runtime\", telemetry.runtime);\n this.headers = merge(this.headers, \"Upstash-Telemetry-Platform\", telemetry.platform);\n this.headers = merge(this.headers, \"Upstash-Telemetry-Sdk\", telemetry.sdk);\n }\n async request(req) {\n const requestOptions = {\n //@ts-expect-error this should throw due to bun regression\n cache: this.options.cache,\n method: \"POST\",\n headers: this.headers,\n body: JSON.stringify(req.body),\n keepalive: this.options.keepAlive,\n agent: this.options.agent,\n signal: this.options.signal,\n /**\n * Fastly specific\n */\n backend: this.options.backend\n };\n if (!this.hasCredentials) {\n console.warn(\n \"[Upstash Redis] Redis client was initialized without url or token. Failed to execute command.\"\n );\n }\n if (this.readYourWrites) {\n const newHeader = this.upstashSyncToken;\n this.headers[\"upstash-sync-token\"] = newHeader;\n }\n let res = null;\n let error = null;\n for (let i = 0; i <= this.retry.attempts; i++) {\n try {\n res = await fetch([this.baseUrl, ...req.path ?? []].join(\"/\"), requestOptions);\n break;\n } catch (error_) {\n if (this.options.signal?.aborted) {\n const myBlob = new Blob([\n JSON.stringify({ result: this.options.signal.reason ?? \"Aborted\" })\n ]);\n const myOptions = {\n status: 200,\n statusText: this.options.signal.reason ?? \"Aborted\"\n };\n res = new Response(myBlob, myOptions);\n break;\n }\n error = error_;\n await new Promise((r) => setTimeout(r, this.retry.backoff(i)));\n }\n }\n if (!res) {\n throw error ?? new Error(\"Exhausted all retries\");\n }\n const body = await res.json();\n if (!res.ok) {\n throw new UpstashError(`${body.error}, command was: ${JSON.stringify(req.body)}`);\n }\n if (this.readYourWrites) {\n const headers = res.headers;\n this.upstashSyncToken = headers.get(\"upstash-sync-token\") ?? \"\";\n }\n if (this.readYourWrites) {\n const headers = res.headers;\n this.upstashSyncToken = headers.get(\"upstash-sync-token\") ?? \"\";\n }\n if (this.options.responseEncoding === \"base64\") {\n if (Array.isArray(body)) {\n return body.map(({ result: result2, error: error2 }) => ({\n result: decode(result2),\n error: error2\n }));\n }\n const result = decode(body.result);\n return { result, error: body.error };\n }\n return body;\n }\n};\nfunction base64decode(b64) {\n let dec = \"\";\n try {\n const binString = atob(b64);\n const size = binString.length;\n const bytes = new Uint8Array(size);\n for (let i = 0; i < size; i++) {\n bytes[i] = binString.charCodeAt(i);\n }\n dec = new TextDecoder().decode(bytes);\n } catch {\n dec = b64;\n }\n return dec;\n}\nfunction decode(raw) {\n let result = void 0;\n switch (typeof raw) {\n case \"undefined\": {\n return raw;\n }\n case \"number\": {\n result = raw;\n break;\n }\n case \"object\": {\n if (Array.isArray(raw)) {\n result = raw.map(\n (v) => typeof v === \"string\" ? base64decode(v) : Array.isArray(v) ? v.map((element) => decode(element)) : v\n );\n } else {\n result = null;\n }\n break;\n }\n case \"string\": {\n result = raw === \"OK\" ? \"OK\" : base64decode(raw);\n break;\n }\n default: {\n break;\n }\n }\n return result;\n}\nfunction merge(obj, key, value) {\n if (!value) {\n return obj;\n }\n obj[key] = obj[key] ? [obj[key], value].join(\",\") : value;\n return obj;\n}\n\n// pkg/auto-pipeline.ts\nfunction createAutoPipelineProxy(_redis, json) {\n const redis = _redis;\n if (!redis.autoPipelineExecutor) {\n redis.autoPipelineExecutor = new AutoPipelineExecutor(redis);\n }\n return new Proxy(redis, {\n get: (redis2, command) => {\n if (command === \"pipelineCounter\") {\n return redis2.autoPipelineExecutor.pipelineCounter;\n }\n if (command === \"json\") {\n return createAutoPipelineProxy(redis2, true);\n }\n const commandInRedisButNotPipeline = command in redis2 && !(command in redis2.autoPipelineExecutor.pipeline);\n if (commandInRedisButNotPipeline) {\n return redis2[command];\n }\n const isFunction = json ? typeof redis2.autoPipelineExecutor.pipeline.json[command] === \"function\" : typeof redis2.autoPipelineExecutor.pipeline[command] === \"function\";\n if (isFunction) {\n return (...args) => {\n return redis2.autoPipelineExecutor.withAutoPipeline((pipeline) => {\n if (json) {\n pipeline.json[command](\n ...args\n );\n } else {\n pipeline[command](...args);\n }\n });\n };\n }\n return redis2.autoPipelineExecutor.pipeline[command];\n }\n });\n}\nvar AutoPipelineExecutor = class {\n pipelinePromises = /* @__PURE__ */ new WeakMap();\n activePipeline = null;\n indexInCurrentPipeline = 0;\n redis;\n pipeline;\n // only to make sure that proxy can work\n pipelineCounter = 0;\n // to keep track of how many times a pipeline was executed\n constructor(redis) {\n this.redis = redis;\n this.pipeline = redis.pipeline();\n }\n async withAutoPipeline(executeWithPipeline) {\n const pipeline = this.activePipeline ?? this.redis.pipeline();\n if (!this.activePipeline) {\n this.activePipeline = pipeline;\n this.indexInCurrentPipeline = 0;\n }\n const index = this.indexInCurrentPipeline++;\n executeWithPipeline(pipeline);\n const pipelineDone = this.deferExecution().then(() => {\n if (!this.pipelinePromises.has(pipeline)) {\n const pipelinePromise = pipeline.exec({ keepErrors: true });\n this.pipelineCounter += 1;\n this.pipelinePromises.set(pipeline, pipelinePromise);\n this.activePipeline = null;\n }\n return this.pipelinePromises.get(pipeline);\n });\n const results = await pipelineDone;\n const commandResult = results[index];\n if (commandResult.error) {\n throw new UpstashError(`Command failed: ${commandResult.error}`);\n }\n return commandResult.result;\n }\n async deferExecution() {\n await Promise.resolve();\n await Promise.resolve();\n }\n};\n\n// pkg/util.ts\nfunction parseRecursive(obj) {\n const parsed = Array.isArray(obj) ? obj.map((o) => {\n try {\n return parseRecursive(o);\n } catch {\n return o;\n }\n }) : JSON.parse(obj);\n if (typeof parsed === \"number\" && parsed.toString() !== obj) {\n return obj;\n }\n return parsed;\n}\nfunction parseResponse(result) {\n try {\n return parseRecursive(result);\n } catch {\n return result;\n }\n}\nfunction deserializeScanResponse(result) {\n return [result[0], ...parseResponse(result.slice(1))];\n}\n\n// pkg/commands/command.ts\nvar defaultSerializer = (c) => {\n switch (typeof c) {\n case \"string\":\n case \"number\":\n case \"boolean\": {\n return c;\n }\n default: {\n return JSON.stringify(c);\n }\n }\n};\nvar Command = class {\n command;\n serialize;\n deserialize;\n /**\n * Create a new command instance.\n *\n * You can define a custom `deserialize` function. By default we try to deserialize as json.\n */\n constructor(command, opts) {\n this.serialize = defaultSerializer;\n this.deserialize = opts?.automaticDeserialization === void 0 || opts.automaticDeserialization ? opts?.deserialize ?? parseResponse : (x) => x;\n this.command = command.map((c) => this.serialize(c));\n if (opts?.latencyLogging) {\n const originalExec = this.exec.bind(this);\n this.exec = async (client) => {\n const start = performance.now();\n const result = await originalExec(client);\n const end = performance.now();\n const loggerResult = (end - start).toFixed(2);\n console.log(\n `Latency for \\x1B[38;2;19;185;39m${this.command[0].toString().toUpperCase()}\\x1B[0m: \\x1B[38;2;0;255;255m${loggerResult} ms\\x1B[0m`\n );\n return result;\n };\n }\n }\n /**\n * Execute the command using a client.\n */\n async exec(client) {\n const { result, error } = await client.request({\n body: this.command,\n upstashSyncToken: client.upstashSyncToken\n });\n if (error) {\n throw new UpstashError(error);\n }\n if (result === void 0) {\n throw new TypeError(\"Request did not return a result\");\n }\n return this.deserialize(result);\n }\n};\n\n// pkg/commands/append.ts\nvar AppendCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"append\", ...cmd], opts);\n }\n};\n\n// pkg/commands/bitcount.ts\nvar BitCountCommand = class extends Command {\n constructor([key, start, end], opts) {\n const command = [\"bitcount\", key];\n if (typeof start === \"number\") {\n command.push(start);\n }\n if (typeof end === \"number\") {\n command.push(end);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/bitfield.ts\nvar BitFieldCommand = class {\n constructor(args, client, opts, execOperation = (command) => command.exec(this.client)) {\n this.client = client;\n this.opts = opts;\n this.execOperation = execOperation;\n this.command = [\"bitfield\", ...args];\n }\n command;\n chain(...args) {\n this.command.push(...args);\n return this;\n }\n get(...args) {\n return this.chain(\"get\", ...args);\n }\n set(...args) {\n return this.chain(\"set\", ...args);\n }\n incrby(...args) {\n return this.chain(\"incrby\", ...args);\n }\n overflow(overflow) {\n return this.chain(\"overflow\", overflow);\n }\n exec() {\n const command = new Command(this.command, this.opts);\n return this.execOperation(command);\n }\n};\n\n// pkg/commands/bitop.ts\nvar BitOpCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"bitop\", ...cmd], opts);\n }\n};\n\n// pkg/commands/bitpos.ts\nvar BitPosCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"bitpos\", ...cmd], opts);\n }\n};\n\n// pkg/commands/copy.ts\nvar CopyCommand = class extends Command {\n constructor([key, destinationKey, opts], commandOptions) {\n super([\"COPY\", key, destinationKey, ...opts?.replace ? [\"REPLACE\"] : []], {\n ...commandOptions,\n deserialize(result) {\n if (result > 0) {\n return \"COPIED\";\n }\n return \"NOT_COPIED\";\n }\n });\n }\n};\n\n// pkg/commands/dbsize.ts\nvar DBSizeCommand = class extends Command {\n constructor(opts) {\n super([\"dbsize\"], opts);\n }\n};\n\n// pkg/commands/decr.ts\nvar DecrCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"decr\", ...cmd], opts);\n }\n};\n\n// pkg/commands/decrby.ts\nvar DecrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"decrby\", ...cmd], opts);\n }\n};\n\n// pkg/commands/del.ts\nvar DelCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"del\", ...cmd], opts);\n }\n};\n\n// pkg/commands/echo.ts\nvar EchoCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"echo\", ...cmd], opts);\n }\n};\n\n// pkg/commands/eval.ts\nvar EvalCommand = class extends Command {\n constructor([script, keys, args], opts) {\n super([\"eval\", script, keys.length, ...keys, ...args ?? []], opts);\n }\n};\n\n// pkg/commands/evalsha.ts\nvar EvalshaCommand = class extends Command {\n constructor([sha, keys, args], opts) {\n super([\"evalsha\", sha, keys.length, ...keys, ...args ?? []], opts);\n }\n};\n\n// pkg/commands/exists.ts\nvar ExistsCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"exists\", ...cmd], opts);\n }\n};\n\n// pkg/commands/expire.ts\nvar ExpireCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"expire\", ...cmd.filter(Boolean)], opts);\n }\n};\n\n// pkg/commands/expireat.ts\nvar ExpireAtCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"expireat\", ...cmd], opts);\n }\n};\n\n// pkg/commands/flushall.ts\nvar FlushAllCommand = class extends Command {\n constructor(args, opts) {\n const command = [\"flushall\"];\n if (args && args.length > 0 && args[0].async) {\n command.push(\"async\");\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/flushdb.ts\nvar FlushDBCommand = class extends Command {\n constructor([opts], cmdOpts) {\n const command = [\"flushdb\"];\n if (opts?.async) {\n command.push(\"async\");\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/geo_add.ts\nvar GeoAddCommand = class extends Command {\n constructor([key, arg1, ...arg2], opts) {\n const command = [\"geoadd\", key];\n if (\"nx\" in arg1 && arg1.nx) {\n command.push(\"nx\");\n } else if (\"xx\" in arg1 && arg1.xx) {\n command.push(\"xx\");\n }\n if (\"ch\" in arg1 && arg1.ch) {\n command.push(\"ch\");\n }\n if (\"latitude\" in arg1 && arg1.latitude) {\n command.push(arg1.longitude, arg1.latitude, arg1.member);\n }\n command.push(\n ...arg2.flatMap(({ latitude, longitude, member }) => [longitude, latitude, member])\n );\n super(command, opts);\n }\n};\n\n// pkg/commands/geo_dist.ts\nvar GeoDistCommand = class extends Command {\n constructor([key, member1, member2, unit = \"M\"], opts) {\n super([\"GEODIST\", key, member1, member2, unit], opts);\n }\n};\n\n// pkg/commands/geo_hash.ts\nvar GeoHashCommand = class extends Command {\n constructor(cmd, opts) {\n const [key] = cmd;\n const members = Array.isArray(cmd[1]) ? cmd[1] : cmd.slice(1);\n super([\"GEOHASH\", key, ...members], opts);\n }\n};\n\n// pkg/commands/geo_pos.ts\nvar GeoPosCommand = class extends Command {\n constructor(cmd, opts) {\n const [key] = cmd;\n const members = Array.isArray(cmd[1]) ? cmd[1] : cmd.slice(1);\n super([\"GEOPOS\", key, ...members], {\n deserialize: (result) => transform(result),\n ...opts\n });\n }\n};\nfunction transform(result) {\n const final = [];\n for (const pos of result) {\n if (!pos?.[0] || !pos?.[1]) {\n continue;\n }\n final.push({ lng: Number.parseFloat(pos[0]), lat: Number.parseFloat(pos[1]) });\n }\n return final;\n}\n\n// pkg/commands/geo_search.ts\nvar GeoSearchCommand = class extends Command {\n constructor([key, centerPoint, shape, order, opts], commandOptions) {\n const command = [\"GEOSEARCH\", key];\n if (centerPoint.type === \"FROMMEMBER\" || centerPoint.type === \"frommember\") {\n command.push(centerPoint.type, centerPoint.member);\n }\n if (centerPoint.type === \"FROMLONLAT\" || centerPoint.type === \"fromlonlat\") {\n command.push(centerPoint.type, centerPoint.coordinate.lon, centerPoint.coordinate.lat);\n }\n if (shape.type === \"BYRADIUS\" || shape.type === \"byradius\") {\n command.push(shape.type, shape.radius, shape.radiusType);\n }\n if (shape.type === \"BYBOX\" || shape.type === \"bybox\") {\n command.push(shape.type, shape.rect.width, shape.rect.height, shape.rectType);\n }\n command.push(order);\n if (opts?.count) {\n command.push(\"COUNT\", opts.count.limit, ...opts.count.any ? [\"ANY\"] : []);\n }\n const transform2 = (result) => {\n if (!opts?.withCoord && !opts?.withDist && !opts?.withHash) {\n return result.map((member) => {\n try {\n return { member: JSON.parse(member) };\n } catch {\n return { member };\n }\n });\n }\n return result.map((members) => {\n let counter = 1;\n const obj = {};\n try {\n obj.member = JSON.parse(members[0]);\n } catch {\n obj.member = members[0];\n }\n if (opts.withDist) {\n obj.dist = Number.parseFloat(members[counter++]);\n }\n if (opts.withHash) {\n obj.hash = members[counter++].toString();\n }\n if (opts.withCoord) {\n obj.coord = {\n long: Number.parseFloat(members[counter][0]),\n lat: Number.parseFloat(members[counter][1])\n };\n }\n return obj;\n });\n };\n super(\n [\n ...command,\n ...opts?.withCoord ? [\"WITHCOORD\"] : [],\n ...opts?.withDist ? [\"WITHDIST\"] : [],\n ...opts?.withHash ? [\"WITHHASH\"] : []\n ],\n {\n deserialize: transform2,\n ...commandOptions\n }\n );\n }\n};\n\n// pkg/commands/geo_search_store.ts\nvar GeoSearchStoreCommand = class extends Command {\n constructor([destination, key, centerPoint, shape, order, opts], commandOptions) {\n const command = [\"GEOSEARCHSTORE\", destination, key];\n if (centerPoint.type === \"FROMMEMBER\" || centerPoint.type === \"frommember\") {\n command.push(centerPoint.type, centerPoint.member);\n }\n if (centerPoint.type === \"FROMLONLAT\" || centerPoint.type === \"fromlonlat\") {\n command.push(centerPoint.type, centerPoint.coordinate.lon, centerPoint.coordinate.lat);\n }\n if (shape.type === \"BYRADIUS\" || shape.type === \"byradius\") {\n command.push(shape.type, shape.radius, shape.radiusType);\n }\n if (shape.type === \"BYBOX\" || shape.type === \"bybox\") {\n command.push(shape.type, shape.rect.width, shape.rect.height, shape.rectType);\n }\n command.push(order);\n if (opts?.count) {\n command.push(\"COUNT\", opts.count.limit, ...opts.count.any ? [\"ANY\"] : []);\n }\n super([...command, ...opts?.storeDist ? [\"STOREDIST\"] : []], commandOptions);\n }\n};\n\n// pkg/commands/get.ts\nvar GetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"get\", ...cmd], opts);\n }\n};\n\n// pkg/commands/getbit.ts\nvar GetBitCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"getbit\", ...cmd], opts);\n }\n};\n\n// pkg/commands/getdel.ts\nvar GetDelCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"getdel\", ...cmd], opts);\n }\n};\n\n// pkg/commands/getrange.ts\nvar GetRangeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"getrange\", ...cmd], opts);\n }\n};\n\n// pkg/commands/getset.ts\nvar GetSetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"getset\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hdel.ts\nvar HDelCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hdel\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hexists.ts\nvar HExistsCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hexists\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hget.ts\nvar HGetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hget\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hgetall.ts\nfunction deserialize(result) {\n if (result.length === 0) {\n return null;\n }\n const obj = {};\n while (result.length >= 2) {\n const key = result.shift();\n const value = result.shift();\n try {\n const valueIsNumberAndNotSafeInteger = !Number.isNaN(Number(value)) && !Number.isSafeInteger(Number(value));\n obj[key] = valueIsNumberAndNotSafeInteger ? value : JSON.parse(value);\n } catch {\n obj[key] = value;\n }\n }\n return obj;\n}\nvar HGetAllCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hgetall\", ...cmd], {\n deserialize: (result) => deserialize(result),\n ...opts\n });\n }\n};\n\n// pkg/commands/hincrby.ts\nvar HIncrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hincrby\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hincrbyfloat.ts\nvar HIncrByFloatCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hincrbyfloat\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hkeys.ts\nvar HKeysCommand = class extends Command {\n constructor([key], opts) {\n super([\"hkeys\", key], opts);\n }\n};\n\n// pkg/commands/hlen.ts\nvar HLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hlen\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hmget.ts\nfunction deserialize2(fields, result) {\n if (result.every((field) => field === null)) {\n return null;\n }\n const obj = {};\n for (const [i, field] of fields.entries()) {\n try {\n obj[field] = JSON.parse(result[i]);\n } catch {\n obj[field] = result[i];\n }\n }\n return obj;\n}\nvar HMGetCommand = class extends Command {\n constructor([key, ...fields], opts) {\n super([\"hmget\", key, ...fields], {\n deserialize: (result) => deserialize2(fields, result),\n ...opts\n });\n }\n};\n\n// pkg/commands/hmset.ts\nvar HMSetCommand = class extends Command {\n constructor([key, kv], opts) {\n super([\"hmset\", key, ...Object.entries(kv).flatMap(([field, value]) => [field, value])], opts);\n }\n};\n\n// pkg/commands/hrandfield.ts\nfunction deserialize3(result) {\n if (result.length === 0) {\n return null;\n }\n const obj = {};\n while (result.length >= 2) {\n const key = result.shift();\n const value = result.shift();\n try {\n obj[key] = JSON.parse(value);\n } catch {\n obj[key] = value;\n }\n }\n return obj;\n}\nvar HRandFieldCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"hrandfield\", cmd[0]];\n if (typeof cmd[1] === \"number\") {\n command.push(cmd[1]);\n }\n if (cmd[2]) {\n command.push(\"WITHVALUES\");\n }\n super(command, {\n // @ts-expect-error to silence compiler\n deserialize: cmd[2] ? (result) => deserialize3(result) : opts?.deserialize,\n ...opts\n });\n }\n};\n\n// pkg/commands/hscan.ts\nvar HScanCommand = class extends Command {\n constructor([key, cursor, cmdOpts], opts) {\n const command = [\"hscan\", key, cursor];\n if (cmdOpts?.match) {\n command.push(\"match\", cmdOpts.match);\n }\n if (typeof cmdOpts?.count === \"number\") {\n command.push(\"count\", cmdOpts.count);\n }\n super(command, {\n deserialize: deserializeScanResponse,\n ...opts\n });\n }\n};\n\n// pkg/commands/hset.ts\nvar HSetCommand = class extends Command {\n constructor([key, kv], opts) {\n super([\"hset\", key, ...Object.entries(kv).flatMap(([field, value]) => [field, value])], opts);\n }\n};\n\n// pkg/commands/hsetnx.ts\nvar HSetNXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hsetnx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hstrlen.ts\nvar HStrLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hstrlen\", ...cmd], opts);\n }\n};\n\n// pkg/commands/hvals.ts\nvar HValsCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"hvals\", ...cmd], opts);\n }\n};\n\n// pkg/commands/incr.ts\nvar IncrCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"incr\", ...cmd], opts);\n }\n};\n\n// pkg/commands/incrby.ts\nvar IncrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"incrby\", ...cmd], opts);\n }\n};\n\n// pkg/commands/incrbyfloat.ts\nvar IncrByFloatCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"incrbyfloat\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrappend.ts\nvar JsonArrAppendCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRAPPEND\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrindex.ts\nvar JsonArrIndexCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRINDEX\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrinsert.ts\nvar JsonArrInsertCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRINSERT\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrlen.ts\nvar JsonArrLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRLEN\", cmd[0], cmd[1] ?? \"$\"], opts);\n }\n};\n\n// pkg/commands/json_arrpop.ts\nvar JsonArrPopCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.ARRPOP\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_arrtrim.ts\nvar JsonArrTrimCommand = class extends Command {\n constructor(cmd, opts) {\n const path = cmd[1] ?? \"$\";\n const start = cmd[2] ?? 0;\n const stop = cmd[3] ?? 0;\n super([\"JSON.ARRTRIM\", cmd[0], path, start, stop], opts);\n }\n};\n\n// pkg/commands/json_clear.ts\nvar JsonClearCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.CLEAR\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_del.ts\nvar JsonDelCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.DEL\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_forget.ts\nvar JsonForgetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.FORGET\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_get.ts\nvar JsonGetCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"JSON.GET\"];\n if (typeof cmd[1] === \"string\") {\n command.push(...cmd);\n } else {\n command.push(cmd[0]);\n if (cmd[1]) {\n if (cmd[1].indent) {\n command.push(\"INDENT\", cmd[1].indent);\n }\n if (cmd[1].newline) {\n command.push(\"NEWLINE\", cmd[1].newline);\n }\n if (cmd[1].space) {\n command.push(\"SPACE\", cmd[1].space);\n }\n }\n command.push(...cmd.slice(2));\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/json_mget.ts\nvar JsonMGetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.MGET\", ...cmd[0], cmd[1]], opts);\n }\n};\n\n// pkg/commands/json_mset.ts\nvar JsonMSetCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"JSON.MSET\"];\n for (const c of cmd) {\n command.push(c.key, c.path, c.value);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/json_numincrby.ts\nvar JsonNumIncrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.NUMINCRBY\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_nummultby.ts\nvar JsonNumMultByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.NUMMULTBY\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_objkeys.ts\nvar JsonObjKeysCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.OBJKEYS\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_objlen.ts\nvar JsonObjLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.OBJLEN\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_resp.ts\nvar JsonRespCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.RESP\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_set.ts\nvar JsonSetCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"JSON.SET\", cmd[0], cmd[1], cmd[2]];\n if (cmd[3]) {\n if (cmd[3].nx) {\n command.push(\"NX\");\n } else if (cmd[3].xx) {\n command.push(\"XX\");\n }\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/json_strappend.ts\nvar JsonStrAppendCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.STRAPPEND\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_strlen.ts\nvar JsonStrLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.STRLEN\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_toggle.ts\nvar JsonToggleCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.TOGGLE\", ...cmd], opts);\n }\n};\n\n// pkg/commands/json_type.ts\nvar JsonTypeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"JSON.TYPE\", ...cmd], opts);\n }\n};\n\n// pkg/commands/keys.ts\nvar KeysCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"keys\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lindex.ts\nvar LIndexCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lindex\", ...cmd], opts);\n }\n};\n\n// pkg/commands/linsert.ts\nvar LInsertCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"linsert\", ...cmd], opts);\n }\n};\n\n// pkg/commands/llen.ts\nvar LLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"llen\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lmove.ts\nvar LMoveCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lmove\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lmpop.ts\nvar LmPopCommand = class extends Command {\n constructor(cmd, opts) {\n const [numkeys, keys, direction, count] = cmd;\n super([\"LMPOP\", numkeys, ...keys, direction, ...count ? [\"COUNT\", count] : []], opts);\n }\n};\n\n// pkg/commands/lpop.ts\nvar LPopCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lpop\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lpos.ts\nvar LPosCommand = class extends Command {\n constructor(cmd, opts) {\n const args = [\"lpos\", cmd[0], cmd[1]];\n if (typeof cmd[2]?.rank === \"number\") {\n args.push(\"rank\", cmd[2].rank);\n }\n if (typeof cmd[2]?.count === \"number\") {\n args.push(\"count\", cmd[2].count);\n }\n if (typeof cmd[2]?.maxLen === \"number\") {\n args.push(\"maxLen\", cmd[2].maxLen);\n }\n super(args, opts);\n }\n};\n\n// pkg/commands/lpush.ts\nvar LPushCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lpush\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lpushx.ts\nvar LPushXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lpushx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lrange.ts\nvar LRangeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lrange\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lrem.ts\nvar LRemCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lrem\", ...cmd], opts);\n }\n};\n\n// pkg/commands/lset.ts\nvar LSetCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"lset\", ...cmd], opts);\n }\n};\n\n// pkg/commands/ltrim.ts\nvar LTrimCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"ltrim\", ...cmd], opts);\n }\n};\n\n// pkg/commands/mget.ts\nvar MGetCommand = class extends Command {\n constructor(cmd, opts) {\n const keys = Array.isArray(cmd[0]) ? cmd[0] : cmd;\n super([\"mget\", ...keys], opts);\n }\n};\n\n// pkg/commands/mset.ts\nvar MSetCommand = class extends Command {\n constructor([kv], opts) {\n super([\"mset\", ...Object.entries(kv).flatMap(([key, value]) => [key, value])], opts);\n }\n};\n\n// pkg/commands/msetnx.ts\nvar MSetNXCommand = class extends Command {\n constructor([kv], opts) {\n super([\"msetnx\", ...Object.entries(kv).flat()], opts);\n }\n};\n\n// pkg/commands/persist.ts\nvar PersistCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"persist\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pexpire.ts\nvar PExpireCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pexpire\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pexpireat.ts\nvar PExpireAtCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pexpireat\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pfadd.ts\nvar PfAddCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pfadd\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pfcount.ts\nvar PfCountCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pfcount\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pfmerge.ts\nvar PfMergeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pfmerge\", ...cmd], opts);\n }\n};\n\n// pkg/commands/ping.ts\nvar PingCommand = class extends Command {\n constructor(cmd, opts) {\n const command = [\"ping\"];\n if (cmd?.[0] !== void 0) {\n command.push(cmd[0]);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/psetex.ts\nvar PSetEXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"psetex\", ...cmd], opts);\n }\n};\n\n// pkg/commands/pttl.ts\nvar PTtlCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"pttl\", ...cmd], opts);\n }\n};\n\n// pkg/commands/publish.ts\nvar PublishCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"publish\", ...cmd], opts);\n }\n};\n\n// pkg/commands/randomkey.ts\nvar RandomKeyCommand = class extends Command {\n constructor(opts) {\n super([\"randomkey\"], opts);\n }\n};\n\n// pkg/commands/rename.ts\nvar RenameCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"rename\", ...cmd], opts);\n }\n};\n\n// pkg/commands/renamenx.ts\nvar RenameNXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"renamenx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/rpop.ts\nvar RPopCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"rpop\", ...cmd], opts);\n }\n};\n\n// pkg/commands/rpush.ts\nvar RPushCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"rpush\", ...cmd], opts);\n }\n};\n\n// pkg/commands/rpushx.ts\nvar RPushXCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"rpushx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sadd.ts\nvar SAddCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sadd\", ...cmd], opts);\n }\n};\n\n// pkg/commands/scan.ts\nvar ScanCommand = class extends Command {\n constructor([cursor, opts], cmdOpts) {\n const command = [\"scan\", cursor];\n if (opts?.match) {\n command.push(\"match\", opts.match);\n }\n if (typeof opts?.count === \"number\") {\n command.push(\"count\", opts.count);\n }\n if (opts?.type && opts.type.length > 0) {\n command.push(\"type\", opts.type);\n }\n super(command, {\n deserialize: deserializeScanResponse,\n ...cmdOpts\n });\n }\n};\n\n// pkg/commands/scard.ts\nvar SCardCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"scard\", ...cmd], opts);\n }\n};\n\n// pkg/commands/script_exists.ts\nvar ScriptExistsCommand = class extends Command {\n constructor(hashes, opts) {\n super([\"script\", \"exists\", ...hashes], {\n deserialize: (result) => result,\n ...opts\n });\n }\n};\n\n// pkg/commands/script_flush.ts\nvar ScriptFlushCommand = class extends Command {\n constructor([opts], cmdOpts) {\n const cmd = [\"script\", \"flush\"];\n if (opts?.sync) {\n cmd.push(\"sync\");\n } else if (opts?.async) {\n cmd.push(\"async\");\n }\n super(cmd, cmdOpts);\n }\n};\n\n// pkg/commands/script_load.ts\nvar ScriptLoadCommand = class extends Command {\n constructor(args, opts) {\n super([\"script\", \"load\", ...args], opts);\n }\n};\n\n// pkg/commands/sdiff.ts\nvar SDiffCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sdiff\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sdiffstore.ts\nvar SDiffStoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sdiffstore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/set.ts\nvar SetCommand = class extends Command {\n constructor([key, value, opts], cmdOpts) {\n const command = [\"set\", key, value];\n if (opts) {\n if (\"nx\" in opts && opts.nx) {\n command.push(\"nx\");\n } else if (\"xx\" in opts && opts.xx) {\n command.push(\"xx\");\n }\n if (\"get\" in opts && opts.get) {\n command.push(\"get\");\n }\n if (\"ex\" in opts && typeof opts.ex === \"number\") {\n command.push(\"ex\", opts.ex);\n } else if (\"px\" in opts && typeof opts.px === \"number\") {\n command.push(\"px\", opts.px);\n } else if (\"exat\" in opts && typeof opts.exat === \"number\") {\n command.push(\"exat\", opts.exat);\n } else if (\"pxat\" in opts && typeof opts.pxat === \"number\") {\n command.push(\"pxat\", opts.pxat);\n } else if (\"keepTtl\" in opts && opts.keepTtl) {\n command.push(\"keepTtl\");\n }\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/setbit.ts\nvar SetBitCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"setbit\", ...cmd], opts);\n }\n};\n\n// pkg/commands/setex.ts\nvar SetExCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"setex\", ...cmd], opts);\n }\n};\n\n// pkg/commands/setnx.ts\nvar SetNxCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"setnx\", ...cmd], opts);\n }\n};\n\n// pkg/commands/setrange.ts\nvar SetRangeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"setrange\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sinter.ts\nvar SInterCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sinter\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sinterstore.ts\nvar SInterStoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sinterstore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sismember.ts\nvar SIsMemberCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sismember\", ...cmd], opts);\n }\n};\n\n// pkg/commands/smembers.ts\nvar SMembersCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"smembers\", ...cmd], opts);\n }\n};\n\n// pkg/commands/smismember.ts\nvar SMIsMemberCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"smismember\", cmd[0], ...cmd[1]], opts);\n }\n};\n\n// pkg/commands/smove.ts\nvar SMoveCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"smove\", ...cmd], opts);\n }\n};\n\n// pkg/commands/spop.ts\nvar SPopCommand = class extends Command {\n constructor([key, count], opts) {\n const command = [\"spop\", key];\n if (typeof count === \"number\") {\n command.push(count);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/srandmember.ts\nvar SRandMemberCommand = class extends Command {\n constructor([key, count], opts) {\n const command = [\"srandmember\", key];\n if (typeof count === \"number\") {\n command.push(count);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/srem.ts\nvar SRemCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"srem\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sscan.ts\nvar SScanCommand = class extends Command {\n constructor([key, cursor, opts], cmdOpts) {\n const command = [\"sscan\", key, cursor];\n if (opts?.match) {\n command.push(\"match\", opts.match);\n }\n if (typeof opts?.count === \"number\") {\n command.push(\"count\", opts.count);\n }\n super(command, {\n deserialize: deserializeScanResponse,\n ...cmdOpts\n });\n }\n};\n\n// pkg/commands/strlen.ts\nvar StrLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"strlen\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sunion.ts\nvar SUnionCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sunion\", ...cmd], opts);\n }\n};\n\n// pkg/commands/sunionstore.ts\nvar SUnionStoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"sunionstore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/time.ts\nvar TimeCommand = class extends Command {\n constructor(opts) {\n super([\"time\"], opts);\n }\n};\n\n// pkg/commands/touch.ts\nvar TouchCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"touch\", ...cmd], opts);\n }\n};\n\n// pkg/commands/ttl.ts\nvar TtlCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"ttl\", ...cmd], opts);\n }\n};\n\n// pkg/commands/type.ts\nvar TypeCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"type\", ...cmd], opts);\n }\n};\n\n// pkg/commands/unlink.ts\nvar UnlinkCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"unlink\", ...cmd], opts);\n }\n};\n\n// pkg/commands/xack.ts\nvar XAckCommand = class extends Command {\n constructor([key, group, id], opts) {\n const ids = Array.isArray(id) ? [...id] : [id];\n super([\"XACK\", key, group, ...ids], opts);\n }\n};\n\n// pkg/commands/xadd.ts\nvar XAddCommand = class extends Command {\n constructor([key, id, entries, opts], commandOptions) {\n const command = [\"XADD\", key];\n if (opts) {\n if (opts.nomkStream) {\n command.push(\"NOMKSTREAM\");\n }\n if (opts.trim) {\n command.push(opts.trim.type, opts.trim.comparison, opts.trim.threshold);\n if (opts.trim.limit !== void 0) {\n command.push(\"LIMIT\", opts.trim.limit);\n }\n }\n }\n command.push(id);\n for (const [k, v] of Object.entries(entries)) {\n command.push(k, v);\n }\n super(command, commandOptions);\n }\n};\n\n// pkg/commands/xautoclaim.ts\nvar XAutoClaim = class extends Command {\n constructor([key, group, consumer, minIdleTime, start, options], opts) {\n const commands = [];\n if (options?.count) {\n commands.push(\"COUNT\", options.count);\n }\n if (options?.justId) {\n commands.push(\"JUSTID\");\n }\n super([\"XAUTOCLAIM\", key, group, consumer, minIdleTime, start, ...commands], opts);\n }\n};\n\n// pkg/commands/xclaim.ts\nvar XClaimCommand = class extends Command {\n constructor([key, group, consumer, minIdleTime, id, options], opts) {\n const ids = Array.isArray(id) ? [...id] : [id];\n const commands = [];\n if (options?.idleMS) {\n commands.push(\"IDLE\", options.idleMS);\n }\n if (options?.idleMS) {\n commands.push(\"TIME\", options.timeMS);\n }\n if (options?.retryCount) {\n commands.push(\"RETRYCOUNT\", options.retryCount);\n }\n if (options?.force) {\n commands.push(\"FORCE\");\n }\n if (options?.justId) {\n commands.push(\"JUSTID\");\n }\n if (options?.lastId) {\n commands.push(\"LASTID\", options.lastId);\n }\n super([\"XCLAIM\", key, group, consumer, minIdleTime, ...ids, ...commands], opts);\n }\n};\n\n// pkg/commands/xdel.ts\nvar XDelCommand = class extends Command {\n constructor([key, ids], opts) {\n const cmds = Array.isArray(ids) ? [...ids] : [ids];\n super([\"XDEL\", key, ...cmds], opts);\n }\n};\n\n// pkg/commands/xgroup.ts\nvar XGroupCommand = class extends Command {\n constructor([key, opts], commandOptions) {\n const command = [\"XGROUP\"];\n switch (opts.type) {\n case \"CREATE\": {\n command.push(\"CREATE\", key, opts.group, opts.id);\n if (opts.options) {\n if (opts.options.MKSTREAM) {\n command.push(\"MKSTREAM\");\n }\n if (opts.options.ENTRIESREAD !== void 0) {\n command.push(\"ENTRIESREAD\", opts.options.ENTRIESREAD.toString());\n }\n }\n break;\n }\n case \"CREATECONSUMER\": {\n command.push(\"CREATECONSUMER\", key, opts.group, opts.consumer);\n break;\n }\n case \"DELCONSUMER\": {\n command.push(\"DELCONSUMER\", key, opts.group, opts.consumer);\n break;\n }\n case \"DESTROY\": {\n command.push(\"DESTROY\", key, opts.group);\n break;\n }\n case \"SETID\": {\n command.push(\"SETID\", key, opts.group, opts.id);\n if (opts.options?.ENTRIESREAD !== void 0) {\n command.push(\"ENTRIESREAD\", opts.options.ENTRIESREAD.toString());\n }\n break;\n }\n default: {\n throw new Error(\"Invalid XGROUP\");\n }\n }\n super(command, commandOptions);\n }\n};\n\n// pkg/commands/xinfo.ts\nvar XInfoCommand = class extends Command {\n constructor([key, options], opts) {\n const cmds = [];\n if (options.type === \"CONSUMERS\") {\n cmds.push(\"CONSUMERS\", key, options.group);\n } else {\n cmds.push(\"GROUPS\", key);\n }\n super([\"XINFO\", ...cmds], opts);\n }\n};\n\n// pkg/commands/xlen.ts\nvar XLenCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"XLEN\", ...cmd], opts);\n }\n};\n\n// pkg/commands/xpending.ts\nvar XPendingCommand = class extends Command {\n constructor([key, group, start, end, count, options], opts) {\n const consumers = options?.consumer === void 0 ? [] : Array.isArray(options.consumer) ? [...options.consumer] : [options.consumer];\n super(\n [\n \"XPENDING\",\n key,\n group,\n ...options?.idleTime ? [\"IDLE\", options.idleTime] : [],\n start,\n end,\n count,\n ...consumers\n ],\n opts\n );\n }\n};\n\n// pkg/commands/xrange.ts\nfunction deserialize4(result) {\n const obj = {};\n for (const e of result) {\n while (e.length >= 2) {\n const streamId = e.shift();\n const entries = e.shift();\n if (!(streamId in obj)) {\n obj[streamId] = {};\n }\n while (entries.length >= 2) {\n const field = entries.shift();\n const value = entries.shift();\n try {\n obj[streamId][field] = JSON.parse(value);\n } catch {\n obj[streamId][field] = value;\n }\n }\n }\n }\n return obj;\n}\nvar XRangeCommand = class extends Command {\n constructor([key, start, end, count], opts) {\n const command = [\"XRANGE\", key, start, end];\n if (typeof count === \"number\") {\n command.push(\"COUNT\", count);\n }\n super(command, {\n deserialize: (result) => deserialize4(result),\n ...opts\n });\n }\n};\n\n// pkg/commands/xread.ts\nvar UNBALANCED_XREAD_ERR = \"ERR Unbalanced XREAD list of streams: for each stream key an ID or '$' must be specified\";\nvar XReadCommand = class extends Command {\n constructor([key, id, options], opts) {\n if (Array.isArray(key) && Array.isArray(id) && key.length !== id.length) {\n throw new Error(UNBALANCED_XREAD_ERR);\n }\n const commands = [];\n if (typeof options?.count === \"number\") {\n commands.push(\"COUNT\", options.count);\n }\n if (typeof options?.blockMS === \"number\") {\n commands.push(\"BLOCK\", options.blockMS);\n }\n commands.push(\n \"STREAMS\",\n ...Array.isArray(key) ? [...key] : [key],\n ...Array.isArray(id) ? [...id] : [id]\n );\n super([\"XREAD\", ...commands], opts);\n }\n};\n\n// pkg/commands/xreadgroup.ts\nvar UNBALANCED_XREADGROUP_ERR = \"ERR Unbalanced XREADGROUP list of streams: for each stream key an ID or '$' must be specified\";\nvar XReadGroupCommand = class extends Command {\n constructor([group, consumer, key, id, options], opts) {\n if (Array.isArray(key) && Array.isArray(id) && key.length !== id.length) {\n throw new Error(UNBALANCED_XREADGROUP_ERR);\n }\n const commands = [];\n if (typeof options?.count === \"number\") {\n commands.push(\"COUNT\", options.count);\n }\n if (typeof options?.blockMS === \"number\") {\n commands.push(\"BLOCK\", options.blockMS);\n }\n if (typeof options?.NOACK === \"boolean\" && options.NOACK) {\n commands.push(\"NOACK\");\n }\n commands.push(\n \"STREAMS\",\n ...Array.isArray(key) ? [...key] : [key],\n ...Array.isArray(id) ? [...id] : [id]\n );\n super([\"XREADGROUP\", \"GROUP\", group, consumer, ...commands], opts);\n }\n};\n\n// pkg/commands/xrevrange.ts\nvar XRevRangeCommand = class extends Command {\n constructor([key, end, start, count], opts) {\n const command = [\"XREVRANGE\", key, end, start];\n if (typeof count === \"number\") {\n command.push(\"COUNT\", count);\n }\n super(command, {\n deserialize: (result) => deserialize5(result),\n ...opts\n });\n }\n};\nfunction deserialize5(result) {\n const obj = {};\n for (const e of result) {\n while (e.length >= 2) {\n const streamId = e.shift();\n const entries = e.shift();\n if (!(streamId in obj)) {\n obj[streamId] = {};\n }\n while (entries.length >= 2) {\n const field = entries.shift();\n const value = entries.shift();\n try {\n obj[streamId][field] = JSON.parse(value);\n } catch {\n obj[streamId][field] = value;\n }\n }\n }\n }\n return obj;\n}\n\n// pkg/commands/xtrim.ts\nvar XTrimCommand = class extends Command {\n constructor([key, options], opts) {\n const { limit, strategy, threshold, exactness = \"~\" } = options;\n super([\"XTRIM\", key, strategy, exactness, threshold, ...limit ? [\"LIMIT\", limit] : []], opts);\n }\n};\n\n// pkg/commands/zadd.ts\nvar ZAddCommand = class extends Command {\n constructor([key, arg1, ...arg2], opts) {\n const command = [\"zadd\", key];\n if (\"nx\" in arg1 && arg1.nx) {\n command.push(\"nx\");\n } else if (\"xx\" in arg1 && arg1.xx) {\n command.push(\"xx\");\n }\n if (\"ch\" in arg1 && arg1.ch) {\n command.push(\"ch\");\n }\n if (\"incr\" in arg1 && arg1.incr) {\n command.push(\"incr\");\n }\n if (\"lt\" in arg1 && arg1.lt) {\n command.push(\"lt\");\n } else if (\"gt\" in arg1 && arg1.gt) {\n command.push(\"gt\");\n }\n if (\"score\" in arg1 && \"member\" in arg1) {\n command.push(arg1.score, arg1.member);\n }\n command.push(...arg2.flatMap(({ score, member }) => [score, member]));\n super(command, opts);\n }\n};\n\n// pkg/commands/zcard.ts\nvar ZCardCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zcard\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zcount.ts\nvar ZCountCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zcount\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zincrby.ts\nvar ZIncrByCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zincrby\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zinterstore.ts\nvar ZInterStoreCommand = class extends Command {\n constructor([destination, numKeys, keyOrKeys, opts], cmdOpts) {\n const command = [\"zinterstore\", destination, numKeys];\n if (Array.isArray(keyOrKeys)) {\n command.push(...keyOrKeys);\n } else {\n command.push(keyOrKeys);\n }\n if (opts) {\n if (\"weights\" in opts && opts.weights) {\n command.push(\"weights\", ...opts.weights);\n } else if (\"weight\" in opts && typeof opts.weight === \"number\") {\n command.push(\"weights\", opts.weight);\n }\n if (\"aggregate\" in opts) {\n command.push(\"aggregate\", opts.aggregate);\n }\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/zlexcount.ts\nvar ZLexCountCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zlexcount\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zpopmax.ts\nvar ZPopMaxCommand = class extends Command {\n constructor([key, count], opts) {\n const command = [\"zpopmax\", key];\n if (typeof count === \"number\") {\n command.push(count);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/zpopmin.ts\nvar ZPopMinCommand = class extends Command {\n constructor([key, count], opts) {\n const command = [\"zpopmin\", key];\n if (typeof count === \"number\") {\n command.push(count);\n }\n super(command, opts);\n }\n};\n\n// pkg/commands/zrange.ts\nvar ZRangeCommand = class extends Command {\n constructor([key, min, max, opts], cmdOpts) {\n const command = [\"zrange\", key, min, max];\n if (opts?.byScore) {\n command.push(\"byscore\");\n }\n if (opts?.byLex) {\n command.push(\"bylex\");\n }\n if (opts?.rev) {\n command.push(\"rev\");\n }\n if (opts?.count !== void 0 && opts.offset !== void 0) {\n command.push(\"limit\", opts.offset, opts.count);\n }\n if (opts?.withScores) {\n command.push(\"withscores\");\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/zrank.ts\nvar ZRankCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zrank\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zrem.ts\nvar ZRemCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zrem\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zremrangebylex.ts\nvar ZRemRangeByLexCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zremrangebylex\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zremrangebyrank.ts\nvar ZRemRangeByRankCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zremrangebyrank\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zremrangebyscore.ts\nvar ZRemRangeByScoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zremrangebyscore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zrevrank.ts\nvar ZRevRankCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zrevrank\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zscan.ts\nvar ZScanCommand = class extends Command {\n constructor([key, cursor, opts], cmdOpts) {\n const command = [\"zscan\", key, cursor];\n if (opts?.match) {\n command.push(\"match\", opts.match);\n }\n if (typeof opts?.count === \"number\") {\n command.push(\"count\", opts.count);\n }\n super(command, {\n deserialize: deserializeScanResponse,\n ...cmdOpts\n });\n }\n};\n\n// pkg/commands/zscore.ts\nvar ZScoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zscore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zunion.ts\nvar ZUnionCommand = class extends Command {\n constructor([numKeys, keyOrKeys, opts], cmdOpts) {\n const command = [\"zunion\", numKeys];\n if (Array.isArray(keyOrKeys)) {\n command.push(...keyOrKeys);\n } else {\n command.push(keyOrKeys);\n }\n if (opts) {\n if (\"weights\" in opts && opts.weights) {\n command.push(\"weights\", ...opts.weights);\n } else if (\"weight\" in opts && typeof opts.weight === \"number\") {\n command.push(\"weights\", opts.weight);\n }\n if (\"aggregate\" in opts) {\n command.push(\"aggregate\", opts.aggregate);\n }\n if (opts.withScores) {\n command.push(\"withscores\");\n }\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/zunionstore.ts\nvar ZUnionStoreCommand = class extends Command {\n constructor([destination, numKeys, keyOrKeys, opts], cmdOpts) {\n const command = [\"zunionstore\", destination, numKeys];\n if (Array.isArray(keyOrKeys)) {\n command.push(...keyOrKeys);\n } else {\n command.push(keyOrKeys);\n }\n if (opts) {\n if (\"weights\" in opts && opts.weights) {\n command.push(\"weights\", ...opts.weights);\n } else if (\"weight\" in opts && typeof opts.weight === \"number\") {\n command.push(\"weights\", opts.weight);\n }\n if (\"aggregate\" in opts) {\n command.push(\"aggregate\", opts.aggregate);\n }\n }\n super(command, cmdOpts);\n }\n};\n\n// pkg/commands/zdiffstore.ts\nvar ZDiffStoreCommand = class extends Command {\n constructor(cmd, opts) {\n super([\"zdiffstore\", ...cmd], opts);\n }\n};\n\n// pkg/commands/zmscore.ts\nvar ZMScoreCommand = class extends Command {\n constructor(cmd, opts) {\n const [key, members] = cmd;\n super([\"zmscore\", key, ...members], opts);\n }\n};\n\n// pkg/pipeline.ts\nvar Pipeline = class {\n client;\n commands;\n commandOptions;\n multiExec;\n constructor(opts) {\n this.client = opts.client;\n this.commands = [];\n this.commandOptions = opts.commandOptions;\n this.multiExec = opts.multiExec ?? false;\n if (this.commandOptions?.latencyLogging) {\n const originalExec = this.exec.bind(this);\n this.exec = async (options) => {\n const start = performance.now();\n const result = await (options ? originalExec(options) : originalExec());\n const end = performance.now();\n const loggerResult = (end - start).toFixed(2);\n console.log(\n `Latency for \\x1B[38;2;19;185;39m${this.multiExec ? [\"MULTI-EXEC\"] : [\"PIPELINE\"].toString().toUpperCase()}\\x1B[0m: \\x1B[38;2;0;255;255m${loggerResult} ms\\x1B[0m`\n );\n return result;\n };\n }\n }\n exec = async (options) => {\n if (this.commands.length === 0) {\n throw new Error(\"Pipeline is empty\");\n }\n const path = this.multiExec ? [\"multi-exec\"] : [\"pipeline\"];\n const res = await this.client.request({\n path,\n body: Object.values(this.commands).map((c) => c.command)\n });\n return options?.keepErrors ? res.map(({ error, result }, i) => {\n return {\n error,\n result: this.commands[i].deserialize(result)\n };\n }) : res.map(({ error, result }, i) => {\n if (error) {\n throw new UpstashError(\n `Command ${i + 1} [ ${this.commands[i].command[0]} ] failed: ${error}`\n );\n }\n return this.commands[i].deserialize(result);\n });\n };\n /**\n * Returns the length of pipeline before the execution\n */\n length() {\n return this.commands.length;\n }\n /**\n * Pushes a command into the pipeline and returns a chainable instance of the\n * pipeline\n */\n chain(command) {\n this.commands.push(command);\n return this;\n }\n /**\n * @see https://redis.io/commands/append\n */\n append = (...args) => this.chain(new AppendCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/bitcount\n */\n bitcount = (...args) => this.chain(new BitCountCommand(args, this.commandOptions));\n /**\n * Returns an instance that can be used to execute `BITFIELD` commands on one key.\n *\n * @example\n * ```typescript\n * redis.set(\"mykey\", 0);\n * const result = await redis.pipeline()\n * .bitfield(\"mykey\")\n * .set(\"u4\", 0, 16)\n * .incr(\"u4\", \"#1\", 1)\n * .exec();\n * console.log(result); // [[0, 1]]\n * ```\n *\n * @see https://redis.io/commands/bitfield\n */\n bitfield = (...args) => new BitFieldCommand(args, this.client, this.commandOptions, this.chain.bind(this));\n /**\n * @see https://redis.io/commands/bitop\n */\n bitop = (op, destinationKey, sourceKey, ...sourceKeys) => this.chain(\n new BitOpCommand([op, destinationKey, sourceKey, ...sourceKeys], this.commandOptions)\n );\n /**\n * @see https://redis.io/commands/bitpos\n */\n bitpos = (...args) => this.chain(new BitPosCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/copy\n */\n copy = (...args) => this.chain(new CopyCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zdiffstore\n */\n zdiffstore = (...args) => this.chain(new ZDiffStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/dbsize\n */\n dbsize = () => this.chain(new DBSizeCommand(this.commandOptions));\n /**\n * @see https://redis.io/commands/decr\n */\n decr = (...args) => this.chain(new DecrCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/decrby\n */\n decrby = (...args) => this.chain(new DecrByCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/del\n */\n del = (...args) => this.chain(new DelCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/echo\n */\n echo = (...args) => this.chain(new EchoCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/eval\n */\n eval = (...args) => this.chain(new EvalCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/evalsha\n */\n evalsha = (...args) => this.chain(new EvalshaCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/exists\n */\n exists = (...args) => this.chain(new ExistsCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/expire\n */\n expire = (...args) => this.chain(new ExpireCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/expireat\n */\n expireat = (...args) => this.chain(new ExpireAtCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/flushall\n */\n flushall = (args) => this.chain(new FlushAllCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/flushdb\n */\n flushdb = (...args) => this.chain(new FlushDBCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geoadd\n */\n geoadd = (...args) => this.chain(new GeoAddCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geodist\n */\n geodist = (...args) => this.chain(new GeoDistCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geopos\n */\n geopos = (...args) => this.chain(new GeoPosCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geohash\n */\n geohash = (...args) => this.chain(new GeoHashCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geosearch\n */\n geosearch = (...args) => this.chain(new GeoSearchCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/geosearchstore\n */\n geosearchstore = (...args) => this.chain(new GeoSearchStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/get\n */\n get = (...args) => this.chain(new GetCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/getbit\n */\n getbit = (...args) => this.chain(new GetBitCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/getdel\n */\n getdel = (...args) => this.chain(new GetDelCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/getrange\n */\n getrange = (...args) => this.chain(new GetRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/getset\n */\n getset = (key, value) => this.chain(new GetSetCommand([key, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/hdel\n */\n hdel = (...args) => this.chain(new HDelCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hexists\n */\n hexists = (...args) => this.chain(new HExistsCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hget\n */\n hget = (...args) => this.chain(new HGetCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hgetall\n */\n hgetall = (...args) => this.chain(new HGetAllCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hincrby\n */\n hincrby = (...args) => this.chain(new HIncrByCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hincrbyfloat\n */\n hincrbyfloat = (...args) => this.chain(new HIncrByFloatCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hkeys\n */\n hkeys = (...args) => this.chain(new HKeysCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hlen\n */\n hlen = (...args) => this.chain(new HLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hmget\n */\n hmget = (...args) => this.chain(new HMGetCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hmset\n */\n hmset = (key, kv) => this.chain(new HMSetCommand([key, kv], this.commandOptions));\n /**\n * @see https://redis.io/commands/hrandfield\n */\n hrandfield = (key, count, withValues) => this.chain(new HRandFieldCommand([key, count, withValues], this.commandOptions));\n /**\n * @see https://redis.io/commands/hscan\n */\n hscan = (...args) => this.chain(new HScanCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hset\n */\n hset = (key, kv) => this.chain(new HSetCommand([key, kv], this.commandOptions));\n /**\n * @see https://redis.io/commands/hsetnx\n */\n hsetnx = (key, field, value) => this.chain(new HSetNXCommand([key, field, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/hstrlen\n */\n hstrlen = (...args) => this.chain(new HStrLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/hvals\n */\n hvals = (...args) => this.chain(new HValsCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/incr\n */\n incr = (...args) => this.chain(new IncrCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/incrby\n */\n incrby = (...args) => this.chain(new IncrByCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/incrbyfloat\n */\n incrbyfloat = (...args) => this.chain(new IncrByFloatCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/keys\n */\n keys = (...args) => this.chain(new KeysCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lindex\n */\n lindex = (...args) => this.chain(new LIndexCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/linsert\n */\n linsert = (key, direction, pivot, value) => this.chain(new LInsertCommand([key, direction, pivot, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/llen\n */\n llen = (...args) => this.chain(new LLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lmove\n */\n lmove = (...args) => this.chain(new LMoveCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lpop\n */\n lpop = (...args) => this.chain(new LPopCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lmpop\n */\n lmpop = (...args) => this.chain(new LmPopCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lpos\n */\n lpos = (...args) => this.chain(new LPosCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lpush\n */\n lpush = (key, ...elements) => this.chain(new LPushCommand([key, ...elements], this.commandOptions));\n /**\n * @see https://redis.io/commands/lpushx\n */\n lpushx = (key, ...elements) => this.chain(new LPushXCommand([key, ...elements], this.commandOptions));\n /**\n * @see https://redis.io/commands/lrange\n */\n lrange = (...args) => this.chain(new LRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/lrem\n */\n lrem = (key, count, value) => this.chain(new LRemCommand([key, count, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/lset\n */\n lset = (key, index, value) => this.chain(new LSetCommand([key, index, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/ltrim\n */\n ltrim = (...args) => this.chain(new LTrimCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/mget\n */\n mget = (...args) => this.chain(new MGetCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/mset\n */\n mset = (kv) => this.chain(new MSetCommand([kv], this.commandOptions));\n /**\n * @see https://redis.io/commands/msetnx\n */\n msetnx = (kv) => this.chain(new MSetNXCommand([kv], this.commandOptions));\n /**\n * @see https://redis.io/commands/persist\n */\n persist = (...args) => this.chain(new PersistCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pexpire\n */\n pexpire = (...args) => this.chain(new PExpireCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pexpireat\n */\n pexpireat = (...args) => this.chain(new PExpireAtCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pfadd\n */\n pfadd = (...args) => this.chain(new PfAddCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pfcount\n */\n pfcount = (...args) => this.chain(new PfCountCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/pfmerge\n */\n pfmerge = (...args) => this.chain(new PfMergeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/ping\n */\n ping = (args) => this.chain(new PingCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/psetex\n */\n psetex = (key, ttl, value) => this.chain(new PSetEXCommand([key, ttl, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/pttl\n */\n pttl = (...args) => this.chain(new PTtlCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/publish\n */\n publish = (...args) => this.chain(new PublishCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/randomkey\n */\n randomkey = () => this.chain(new RandomKeyCommand(this.commandOptions));\n /**\n * @see https://redis.io/commands/rename\n */\n rename = (...args) => this.chain(new RenameCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/renamenx\n */\n renamenx = (...args) => this.chain(new RenameNXCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/rpop\n */\n rpop = (...args) => this.chain(new RPopCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/rpush\n */\n rpush = (key, ...elements) => this.chain(new RPushCommand([key, ...elements], this.commandOptions));\n /**\n * @see https://redis.io/commands/rpushx\n */\n rpushx = (key, ...elements) => this.chain(new RPushXCommand([key, ...elements], this.commandOptions));\n /**\n * @see https://redis.io/commands/sadd\n */\n sadd = (key, member, ...members) => this.chain(new SAddCommand([key, member, ...members], this.commandOptions));\n /**\n * @see https://redis.io/commands/scan\n */\n scan = (...args) => this.chain(new ScanCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/scard\n */\n scard = (...args) => this.chain(new SCardCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/script-exists\n */\n scriptExists = (...args) => this.chain(new ScriptExistsCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/script-flush\n */\n scriptFlush = (...args) => this.chain(new ScriptFlushCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/script-load\n */\n scriptLoad = (...args) => this.chain(new ScriptLoadCommand(args, this.commandOptions));\n /*)*\n * @see https://redis.io/commands/sdiff\n */\n sdiff = (...args) => this.chain(new SDiffCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sdiffstore\n */\n sdiffstore = (...args) => this.chain(new SDiffStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/set\n */\n set = (key, value, opts) => this.chain(new SetCommand([key, value, opts], this.commandOptions));\n /**\n * @see https://redis.io/commands/setbit\n */\n setbit = (...args) => this.chain(new SetBitCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/setex\n */\n setex = (key, ttl, value) => this.chain(new SetExCommand([key, ttl, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/setnx\n */\n setnx = (key, value) => this.chain(new SetNxCommand([key, value], this.commandOptions));\n /**\n * @see https://redis.io/commands/setrange\n */\n setrange = (...args) => this.chain(new SetRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sinter\n */\n sinter = (...args) => this.chain(new SInterCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sinterstore\n */\n sinterstore = (...args) => this.chain(new SInterStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sismember\n */\n sismember = (key, member) => this.chain(new SIsMemberCommand([key, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/smembers\n */\n smembers = (...args) => this.chain(new SMembersCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/smismember\n */\n smismember = (key, members) => this.chain(new SMIsMemberCommand([key, members], this.commandOptions));\n /**\n * @see https://redis.io/commands/smove\n */\n smove = (source, destination, member) => this.chain(new SMoveCommand([source, destination, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/spop\n */\n spop = (...args) => this.chain(new SPopCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/srandmember\n */\n srandmember = (...args) => this.chain(new SRandMemberCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/srem\n */\n srem = (key, ...members) => this.chain(new SRemCommand([key, ...members], this.commandOptions));\n /**\n * @see https://redis.io/commands/sscan\n */\n sscan = (...args) => this.chain(new SScanCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/strlen\n */\n strlen = (...args) => this.chain(new StrLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sunion\n */\n sunion = (...args) => this.chain(new SUnionCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/sunionstore\n */\n sunionstore = (...args) => this.chain(new SUnionStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/time\n */\n time = () => this.chain(new TimeCommand(this.commandOptions));\n /**\n * @see https://redis.io/commands/touch\n */\n touch = (...args) => this.chain(new TouchCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/ttl\n */\n ttl = (...args) => this.chain(new TtlCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/type\n */\n type = (...args) => this.chain(new TypeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/unlink\n */\n unlink = (...args) => this.chain(new UnlinkCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zadd\n */\n zadd = (...args) => {\n if (\"score\" in args[1]) {\n return this.chain(\n new ZAddCommand([args[0], args[1], ...args.slice(2)], this.commandOptions)\n );\n }\n return this.chain(\n new ZAddCommand(\n [args[0], args[1], ...args.slice(2)],\n this.commandOptions\n )\n );\n };\n /**\n * @see https://redis.io/commands/xadd\n */\n xadd = (...args) => this.chain(new XAddCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xack\n */\n xack = (...args) => this.chain(new XAckCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xdel\n */\n xdel = (...args) => this.chain(new XDelCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xgroup\n */\n xgroup = (...args) => this.chain(new XGroupCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xread\n */\n xread = (...args) => this.chain(new XReadCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xreadgroup\n */\n xreadgroup = (...args) => this.chain(new XReadGroupCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xinfo\n */\n xinfo = (...args) => this.chain(new XInfoCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xlen\n */\n xlen = (...args) => this.chain(new XLenCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xpending\n */\n xpending = (...args) => this.chain(new XPendingCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xclaim\n */\n xclaim = (...args) => this.chain(new XClaimCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xautoclaim\n */\n xautoclaim = (...args) => this.chain(new XAutoClaim(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xtrim\n */\n xtrim = (...args) => this.chain(new XTrimCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xrange\n */\n xrange = (...args) => this.chain(new XRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/xrevrange\n */\n xrevrange = (...args) => this.chain(new XRevRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zcard\n */\n zcard = (...args) => this.chain(new ZCardCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zcount\n */\n zcount = (...args) => this.chain(new ZCountCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zincrby\n */\n zincrby = (key, increment, member) => this.chain(new ZIncrByCommand([key, increment, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/zinterstore\n */\n zinterstore = (...args) => this.chain(new ZInterStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zlexcount\n */\n zlexcount = (...args) => this.chain(new ZLexCountCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zmscore\n */\n zmscore = (...args) => this.chain(new ZMScoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zpopmax\n */\n zpopmax = (...args) => this.chain(new ZPopMaxCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zpopmin\n */\n zpopmin = (...args) => this.chain(new ZPopMinCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zrange\n */\n zrange = (...args) => this.chain(new ZRangeCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zrank\n */\n zrank = (key, member) => this.chain(new ZRankCommand([key, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/zrem\n */\n zrem = (key, ...members) => this.chain(new ZRemCommand([key, ...members], this.commandOptions));\n /**\n * @see https://redis.io/commands/zremrangebylex\n */\n zremrangebylex = (...args) => this.chain(new ZRemRangeByLexCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zremrangebyrank\n */\n zremrangebyrank = (...args) => this.chain(new ZRemRangeByRankCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zremrangebyscore\n */\n zremrangebyscore = (...args) => this.chain(new ZRemRangeByScoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zrevrank\n */\n zrevrank = (key, member) => this.chain(new ZRevRankCommand([key, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/zscan\n */\n zscan = (...args) => this.chain(new ZScanCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zscore\n */\n zscore = (key, member) => this.chain(new ZScoreCommand([key, member], this.commandOptions));\n /**\n * @see https://redis.io/commands/zunionstore\n */\n zunionstore = (...args) => this.chain(new ZUnionStoreCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/zunion\n */\n zunion = (...args) => this.chain(new ZUnionCommand(args, this.commandOptions));\n /**\n * @see https://redis.io/commands/?group=json\n */\n get json() {\n return {\n /**\n * @see https://redis.io/commands/json.arrappend\n */\n arrappend: (...args) => this.chain(new JsonArrAppendCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrindex\n */\n arrindex: (...args) => this.chain(new JsonArrIndexCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrinsert\n */\n arrinsert: (...args) => this.chain(new JsonArrInsertCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrlen\n */\n arrlen: (...args) => this.chain(new JsonArrLenCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrpop\n */\n arrpop: (...args) => this.chain(new JsonArrPopCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.arrtrim\n */\n arrtrim: (...args) => this.chain(new JsonArrTrimCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.clear\n */\n clear: (...args) => this.chain(new JsonClearCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.del\n */\n del: (...args) => this.chain(new JsonDelCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.forget\n */\n forget: (...args) => this.chain(new JsonForgetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.get\n */\n get: (...args) => this.chain(new JsonGetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.mget\n */\n mget: (...args) => this.chain(new JsonMGetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.mset\n */\n mset: (...args) => this.chain(new JsonMSetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.numincrby\n */\n numincrby: (...args) => this.chain(new JsonNumIncrByCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.nummultby\n */\n nummultby: (...args) => this.chain(new JsonNumMultByCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.objkeys\n */\n objkeys: (...args) => this.chain(new JsonObjKeysCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.objlen\n */\n objlen: (...args) => this.chain(new JsonObjLenCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.resp\n */\n resp: (...args) => this.chain(new JsonRespCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.set\n */\n set: (...args) => this.chain(new JsonSetCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.strappend\n */\n strappend: (...args) => this.chain(new JsonStrAppendCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.strlen\n */\n strlen: (...args) => this.chain(new JsonStrLenCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.toggle\n */\n toggle: (...args) => this.chain(new JsonToggleCommand(args, this.commandOptions)),\n /**\n * @see https://redis.io/commands/json.type\n */\n type: (...args) => this.chain(new JsonTypeCommand(args, this.commandOptions))\n };\n }\n};\n\n// pkg/script.ts\nvar import_enc_hex = __toESM(require(\"crypto-js/enc-hex.js\"));\nvar import_sha1 = __toESM(require(\"crypto-js/sha1.js\"));\nvar Script = class {\n script;\n sha1;\n redis;\n constructor(redis, script) {\n this.redis = redis;\n this.sha1 = this.digest(script);\n this.script = script;\n }\n /**\n * Send an `EVAL` command to redis.\n */\n async eval(keys, args) {\n return await this.redis.eval(this.script, keys, args);\n }\n /**\n * Calculates the sha1 hash of the script and then calls `EVALSHA`.\n */\n async evalsha(keys, args) {\n return await this.redis.evalsha(this.sha1, keys, args);\n }\n /**\n * Optimistically try to run `EVALSHA` first.\n * If the script is not loaded in redis, it will fall back and try again with `EVAL`.\n *\n * Following calls will be able to use the cached script\n */\n async exec(keys, args) {\n const res = await this.redis.evalsha(this.sha1, keys, args).catch(async (error) => {\n if (error instanceof Error && error.message.toLowerCase().includes(\"noscript\")) {\n return await this.redis.eval(this.script, keys, args);\n }\n throw error;\n });\n return res;\n }\n /**\n * Compute the sha1 hash of the script and return its hex representation.\n */\n digest(s) {\n return import_enc_hex.default.stringify((0, import_sha1.default)(s));\n }\n};\n\n// pkg/redis.ts\nvar Redis = class {\n client;\n opts;\n enableTelemetry;\n enableAutoPipelining;\n /**\n * Create a new redis client\n *\n * @example\n * ```typescript\n * const redis = new Redis({\n * url: \"\",\n * token: \"\",\n * });\n * ```\n */\n constructor(client, opts) {\n this.client = client;\n this.opts = opts;\n this.enableTelemetry = opts?.enableTelemetry ?? true;\n if (opts?.readYourWrites === false) {\n this.client.readYourWrites = false;\n }\n this.enableAutoPipelining = opts?.enableAutoPipelining ?? true;\n }\n get readYourWritesSyncToken() {\n return this.client.upstashSyncToken;\n }\n set readYourWritesSyncToken(session) {\n this.client.upstashSyncToken = session;\n }\n get json() {\n return {\n /**\n * @see https://redis.io/commands/json.arrappend\n */\n arrappend: (...args) => new JsonArrAppendCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrindex\n */\n arrindex: (...args) => new JsonArrIndexCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrinsert\n */\n arrinsert: (...args) => new JsonArrInsertCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrlen\n */\n arrlen: (...args) => new JsonArrLenCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrpop\n */\n arrpop: (...args) => new JsonArrPopCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.arrtrim\n */\n arrtrim: (...args) => new JsonArrTrimCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.clear\n */\n clear: (...args) => new JsonClearCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.del\n */\n del: (...args) => new JsonDelCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.forget\n */\n forget: (...args) => new JsonForgetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.get\n */\n get: (...args) => new JsonGetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.mget\n */\n mget: (...args) => new JsonMGetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.mset\n */\n mset: (...args) => new JsonMSetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.numincrby\n */\n numincrby: (...args) => new JsonNumIncrByCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.nummultby\n */\n nummultby: (...args) => new JsonNumMultByCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.objkeys\n */\n objkeys: (...args) => new JsonObjKeysCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.objlen\n */\n objlen: (...args) => new JsonObjLenCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.resp\n */\n resp: (...args) => new JsonRespCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.set\n */\n set: (...args) => new JsonSetCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.strappend\n */\n strappend: (...args) => new JsonStrAppendCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.strlen\n */\n strlen: (...args) => new JsonStrLenCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.toggle\n */\n toggle: (...args) => new JsonToggleCommand(args, this.opts).exec(this.client),\n /**\n * @see https://redis.io/commands/json.type\n */\n type: (...args) => new JsonTypeCommand(args, this.opts).exec(this.client)\n };\n }\n /**\n * Wrap a new middleware around the HTTP client.\n */\n use = (middleware) => {\n const makeRequest = this.client.request.bind(this.client);\n this.client.request = (req) => middleware(req, makeRequest);\n };\n /**\n * Technically this is not private, we can hide it from intellisense by doing this\n */\n addTelemetry = (telemetry) => {\n if (!this.enableTelemetry) {\n return;\n }\n try {\n this.client.mergeTelemetry(telemetry);\n } catch {\n }\n };\n createScript(script) {\n return new Script(this, script);\n }\n /**\n * Create a new pipeline that allows you to send requests in bulk.\n *\n * @see {@link Pipeline}\n */\n pipeline = () => new Pipeline({\n client: this.client,\n commandOptions: this.opts,\n multiExec: false\n });\n autoPipeline = () => {\n return createAutoPipelineProxy(this);\n };\n /**\n * Create a new transaction to allow executing multiple steps atomically.\n *\n * All the commands in a transaction are serialized and executed sequentially. A request sent by\n * another client will never be served in the middle of the execution of a Redis Transaction. This\n * guarantees that the commands are executed as a single isolated operation.\n *\n * @see {@link Pipeline}\n */\n multi = () => new Pipeline({\n client: this.client,\n commandOptions: this.opts,\n multiExec: true\n });\n /**\n * Returns an instance that can be used to execute `BITFIELD` commands on one key.\n *\n * @example\n * ```typescript\n * redis.set(\"mykey\", 0);\n * const result = await redis.bitfield(\"mykey\")\n * .set(\"u4\", 0, 16)\n * .incr(\"u4\", \"#1\", 1)\n * .exec();\n * console.log(result); // [0, 1]\n * ```\n *\n * @see https://redis.io/commands/bitfield\n */\n bitfield = (...args) => new BitFieldCommand(args, this.client, this.opts);\n /**\n * @see https://redis.io/commands/append\n */\n append = (...args) => new AppendCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/bitcount\n */\n bitcount = (...args) => new BitCountCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/bitop\n */\n bitop = (op, destinationKey, sourceKey, ...sourceKeys) => new BitOpCommand([op, destinationKey, sourceKey, ...sourceKeys], this.opts).exec(\n this.client\n );\n /**\n * @see https://redis.io/commands/bitpos\n */\n bitpos = (...args) => new BitPosCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/copy\n */\n copy = (...args) => new CopyCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/dbsize\n */\n dbsize = () => new DBSizeCommand(this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/decr\n */\n decr = (...args) => new DecrCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/decrby\n */\n decrby = (...args) => new DecrByCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/del\n */\n del = (...args) => new DelCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/echo\n */\n echo = (...args) => new EchoCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/eval\n */\n eval = (...args) => new EvalCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/evalsha\n */\n evalsha = (...args) => new EvalshaCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/exists\n */\n exists = (...args) => new ExistsCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/expire\n */\n expire = (...args) => new ExpireCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/expireat\n */\n expireat = (...args) => new ExpireAtCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/flushall\n */\n flushall = (args) => new FlushAllCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/flushdb\n */\n flushdb = (...args) => new FlushDBCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geoadd\n */\n geoadd = (...args) => new GeoAddCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geopos\n */\n geopos = (...args) => new GeoPosCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geodist\n */\n geodist = (...args) => new GeoDistCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geohash\n */\n geohash = (...args) => new GeoHashCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geosearch\n */\n geosearch = (...args) => new GeoSearchCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/geosearchstore\n */\n geosearchstore = (...args) => new GeoSearchStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/get\n */\n get = (...args) => new GetCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/getbit\n */\n getbit = (...args) => new GetBitCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/getdel\n */\n getdel = (...args) => new GetDelCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/getrange\n */\n getrange = (...args) => new GetRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/getset\n */\n getset = (key, value) => new GetSetCommand([key, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hdel\n */\n hdel = (...args) => new HDelCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hexists\n */\n hexists = (...args) => new HExistsCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hget\n */\n hget = (...args) => new HGetCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hgetall\n */\n hgetall = (...args) => new HGetAllCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hincrby\n */\n hincrby = (...args) => new HIncrByCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hincrbyfloat\n */\n hincrbyfloat = (...args) => new HIncrByFloatCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hkeys\n */\n hkeys = (...args) => new HKeysCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hlen\n */\n hlen = (...args) => new HLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hmget\n */\n hmget = (...args) => new HMGetCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hmset\n */\n hmset = (key, kv) => new HMSetCommand([key, kv], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hrandfield\n */\n hrandfield = (key, count, withValues) => new HRandFieldCommand([key, count, withValues], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hscan\n */\n hscan = (...args) => new HScanCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hset\n */\n hset = (key, kv) => new HSetCommand([key, kv], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hsetnx\n */\n hsetnx = (key, field, value) => new HSetNXCommand([key, field, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hstrlen\n */\n hstrlen = (...args) => new HStrLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/hvals\n */\n hvals = (...args) => new HValsCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/incr\n */\n incr = (...args) => new IncrCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/incrby\n */\n incrby = (...args) => new IncrByCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/incrbyfloat\n */\n incrbyfloat = (...args) => new IncrByFloatCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/keys\n */\n keys = (...args) => new KeysCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lindex\n */\n lindex = (...args) => new LIndexCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/linsert\n */\n linsert = (key, direction, pivot, value) => new LInsertCommand([key, direction, pivot, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/llen\n */\n llen = (...args) => new LLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lmove\n */\n lmove = (...args) => new LMoveCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lpop\n */\n lpop = (...args) => new LPopCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lmpop\n */\n lmpop = (...args) => new LmPopCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lpos\n */\n lpos = (...args) => new LPosCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lpush\n */\n lpush = (key, ...elements) => new LPushCommand([key, ...elements], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lpushx\n */\n lpushx = (key, ...elements) => new LPushXCommand([key, ...elements], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lrange\n */\n lrange = (...args) => new LRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lrem\n */\n lrem = (key, count, value) => new LRemCommand([key, count, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/lset\n */\n lset = (key, index, value) => new LSetCommand([key, index, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/ltrim\n */\n ltrim = (...args) => new LTrimCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/mget\n */\n mget = (...args) => new MGetCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/mset\n */\n mset = (kv) => new MSetCommand([kv], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/msetnx\n */\n msetnx = (kv) => new MSetNXCommand([kv], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/persist\n */\n persist = (...args) => new PersistCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pexpire\n */\n pexpire = (...args) => new PExpireCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pexpireat\n */\n pexpireat = (...args) => new PExpireAtCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pfadd\n */\n pfadd = (...args) => new PfAddCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pfcount\n */\n pfcount = (...args) => new PfCountCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pfmerge\n */\n pfmerge = (...args) => new PfMergeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/ping\n */\n ping = (args) => new PingCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/psetex\n */\n psetex = (key, ttl, value) => new PSetEXCommand([key, ttl, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/pttl\n */\n pttl = (...args) => new PTtlCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/publish\n */\n publish = (...args) => new PublishCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/randomkey\n */\n randomkey = () => new RandomKeyCommand().exec(this.client);\n /**\n * @see https://redis.io/commands/rename\n */\n rename = (...args) => new RenameCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/renamenx\n */\n renamenx = (...args) => new RenameNXCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/rpop\n */\n rpop = (...args) => new RPopCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/rpush\n */\n rpush = (key, ...elements) => new RPushCommand([key, ...elements], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/rpushx\n */\n rpushx = (key, ...elements) => new RPushXCommand([key, ...elements], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sadd\n */\n sadd = (key, member, ...members) => new SAddCommand([key, member, ...members], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/scan\n */\n scan = (...args) => new ScanCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/scard\n */\n scard = (...args) => new SCardCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/script-exists\n */\n scriptExists = (...args) => new ScriptExistsCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/script-flush\n */\n scriptFlush = (...args) => new ScriptFlushCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/script-load\n */\n scriptLoad = (...args) => new ScriptLoadCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sdiff\n */\n sdiff = (...args) => new SDiffCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sdiffstore\n */\n sdiffstore = (...args) => new SDiffStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/set\n */\n set = (key, value, opts) => new SetCommand([key, value, opts], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/setbit\n */\n setbit = (...args) => new SetBitCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/setex\n */\n setex = (key, ttl, value) => new SetExCommand([key, ttl, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/setnx\n */\n setnx = (key, value) => new SetNxCommand([key, value], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/setrange\n */\n setrange = (...args) => new SetRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sinter\n */\n sinter = (...args) => new SInterCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sinterstore\n */\n sinterstore = (...args) => new SInterStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sismember\n */\n sismember = (key, member) => new SIsMemberCommand([key, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/smismember\n */\n smismember = (key, members) => new SMIsMemberCommand([key, members], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/smembers\n */\n smembers = (...args) => new SMembersCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/smove\n */\n smove = (source, destination, member) => new SMoveCommand([source, destination, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/spop\n */\n spop = (...args) => new SPopCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/srandmember\n */\n srandmember = (...args) => new SRandMemberCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/srem\n */\n srem = (key, ...members) => new SRemCommand([key, ...members], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sscan\n */\n sscan = (...args) => new SScanCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/strlen\n */\n strlen = (...args) => new StrLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sunion\n */\n sunion = (...args) => new SUnionCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/sunionstore\n */\n sunionstore = (...args) => new SUnionStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/time\n */\n time = () => new TimeCommand().exec(this.client);\n /**\n * @see https://redis.io/commands/touch\n */\n touch = (...args) => new TouchCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/ttl\n */\n ttl = (...args) => new TtlCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/type\n */\n type = (...args) => new TypeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/unlink\n */\n unlink = (...args) => new UnlinkCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xadd\n */\n xadd = (...args) => new XAddCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xack\n */\n xack = (...args) => new XAckCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xdel\n */\n xdel = (...args) => new XDelCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xgroup\n */\n xgroup = (...args) => new XGroupCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xread\n */\n xread = (...args) => new XReadCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xreadgroup\n */\n xreadgroup = (...args) => new XReadGroupCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xinfo\n */\n xinfo = (...args) => new XInfoCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xlen\n */\n xlen = (...args) => new XLenCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xpending\n */\n xpending = (...args) => new XPendingCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xclaim\n */\n xclaim = (...args) => new XClaimCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xautoclaim\n */\n xautoclaim = (...args) => new XAutoClaim(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xtrim\n */\n xtrim = (...args) => new XTrimCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xrange\n */\n xrange = (...args) => new XRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/xrevrange\n */\n xrevrange = (...args) => new XRevRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zadd\n */\n zadd = (...args) => {\n if (\"score\" in args[1]) {\n return new ZAddCommand([args[0], args[1], ...args.slice(2)], this.opts).exec(\n this.client\n );\n }\n return new ZAddCommand(\n [args[0], args[1], ...args.slice(2)],\n this.opts\n ).exec(this.client);\n };\n /**\n * @see https://redis.io/commands/zcard\n */\n zcard = (...args) => new ZCardCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zcount\n */\n zcount = (...args) => new ZCountCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zdiffstore\n */\n zdiffstore = (...args) => new ZDiffStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zincrby\n */\n zincrby = (key, increment, member) => new ZIncrByCommand([key, increment, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zinterstore\n */\n zinterstore = (...args) => new ZInterStoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zlexcount\n */\n zlexcount = (...args) => new ZLexCountCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zmscore\n */\n zmscore = (...args) => new ZMScoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zpopmax\n */\n zpopmax = (...args) => new ZPopMaxCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zpopmin\n */\n zpopmin = (...args) => new ZPopMinCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zrange\n */\n zrange = (...args) => new ZRangeCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zrank\n */\n zrank = (key, member) => new ZRankCommand([key, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zrem\n */\n zrem = (key, ...members) => new ZRemCommand([key, ...members], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zremrangebylex\n */\n zremrangebylex = (...args) => new ZRemRangeByLexCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zremrangebyrank\n */\n zremrangebyrank = (...args) => new ZRemRangeByRankCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zremrangebyscore\n */\n zremrangebyscore = (...args) => new ZRemRangeByScoreCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zrevrank\n */\n zrevrank = (key, member) => new ZRevRankCommand([key, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zscan\n */\n zscan = (...args) => new ZScanCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zscore\n */\n zscore = (key, member) => new ZScoreCommand([key, member], this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zunion\n */\n zunion = (...args) => new ZUnionCommand(args, this.opts).exec(this.client);\n /**\n * @see https://redis.io/commands/zunionstore\n */\n zunionstore = (...args) => new ZUnionStoreCommand(args, this.opts).exec(this.client);\n};\n\n// version.ts\nvar VERSION = \"v1.34.3\";\n\n// platforms/nodejs.ts\nif (typeof atob === \"undefined\") {\n global.atob = (b64) => Buffer.from(b64, \"base64\").toString(\"utf8\");\n}\nvar Redis2 = class _Redis extends Redis {\n /**\n * Create a new redis client by providing a custom `Requester` implementation\n *\n * @example\n * ```ts\n *\n * import { UpstashRequest, Requester, UpstashResponse, Redis } from \"@upstash/redis\"\n *\n * const requester: Requester = {\n * request: (req: UpstashRequest): Promise> => {\n * // ...\n * }\n * }\n *\n * const redis = new Redis(requester)\n * ```\n */\n constructor(configOrRequester) {\n if (\"request\" in configOrRequester) {\n super(configOrRequester);\n return;\n }\n if (!configOrRequester.url) {\n console.warn(\n `[Upstash Redis] The 'url' property is missing or undefined in your Redis config.`\n );\n } else if (configOrRequester.url.startsWith(\" \") || configOrRequester.url.endsWith(\" \") || /\\r|\\n/.test(configOrRequester.url)) {\n console.warn(\n \"[Upstash Redis] The redis url contains whitespace or newline, which can cause errors!\"\n );\n }\n if (!configOrRequester.token) {\n console.warn(\n `[Upstash Redis] The 'token' property is missing or undefined in your Redis config.`\n );\n } else if (configOrRequester.token.startsWith(\" \") || configOrRequester.token.endsWith(\" \") || /\\r|\\n/.test(configOrRequester.token)) {\n console.warn(\n \"[Upstash Redis] The redis token contains whitespace or newline, which can cause errors!\"\n );\n }\n const client = new HttpClient({\n baseUrl: configOrRequester.url,\n retry: configOrRequester.retry,\n headers: { authorization: `Bearer ${configOrRequester.token}` },\n agent: configOrRequester.agent,\n responseEncoding: configOrRequester.responseEncoding,\n cache: configOrRequester.cache ?? \"no-store\",\n signal: configOrRequester.signal,\n keepAlive: configOrRequester.keepAlive,\n readYourWrites: configOrRequester.readYourWrites\n });\n super(client, {\n automaticDeserialization: configOrRequester.automaticDeserialization,\n enableTelemetry: !process.env.UPSTASH_DISABLE_TELEMETRY,\n latencyLogging: configOrRequester.latencyLogging,\n enableAutoPipelining: configOrRequester.enableAutoPipelining\n });\n this.addTelemetry({\n runtime: (\n // @ts-expect-error to silence compiler\n typeof EdgeRuntime === \"string\" ? \"edge-light\" : `node@${process.version}`\n ),\n platform: process.env.VERCEL ? \"vercel\" : process.env.AWS_REGION ? \"aws\" : \"unknown\",\n sdk: `@upstash/redis@${VERSION}`\n });\n if (this.enableAutoPipelining) {\n return this.autoPipeline();\n }\n }\n /**\n * Create a new Upstash Redis instance from environment variables.\n *\n * Use this to automatically load connection secrets from your environment\n * variables. For instance when using the Vercel integration.\n *\n * This tries to load `UPSTASH_REDIS_REST_URL` and `UPSTASH_REDIS_REST_TOKEN` from\n * your environment using `process.env`.\n */\n static fromEnv(config) {\n if (process.env === void 0) {\n throw new TypeError(\n '[Upstash Redis] Unable to get environment variables, `process.env` is undefined. If you are deploying to cloudflare, please import from \"@upstash/redis/cloudflare\" instead'\n );\n }\n const url = process.env.UPSTASH_REDIS_REST_URL || process.env.KV_REST_API_URL;\n if (!url) {\n console.warn(\"[Upstash Redis] Unable to find environment variable: `UPSTASH_REDIS_REST_URL`\");\n }\n const token = process.env.UPSTASH_REDIS_REST_TOKEN || process.env.KV_REST_API_TOKEN;\n if (!token) {\n console.warn(\n \"[Upstash Redis] Unable to find environment variable: `UPSTASH_REDIS_REST_TOKEN`\"\n );\n }\n return new _Redis({ ...config, url, token });\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n Redis,\n errors\n});\n",";(function (root, factory) {\n\tif (typeof exports === \"object\") {\n\t\t// CommonJS\n\t\tmodule.exports = exports = factory();\n\t}\n\telse if (typeof define === \"function\" && define.amd) {\n\t\t// AMD\n\t\tdefine([], factory);\n\t}\n\telse {\n\t\t// Global (browser)\n\t\troot.CryptoJS = factory();\n\t}\n}(this, function () {\n\n\t/*globals window, global, require*/\n\n\t/**\n\t * CryptoJS core components.\n\t */\n\tvar CryptoJS = CryptoJS || (function (Math, undefined) {\n\n\t var crypto;\n\n\t // Native crypto from window (Browser)\n\t if (typeof window !== 'undefined' && window.crypto) {\n\t crypto = window.crypto;\n\t }\n\n\t // Native crypto in web worker (Browser)\n\t if (typeof self !== 'undefined' && self.crypto) {\n\t crypto = self.crypto;\n\t }\n\n\t // Native crypto from worker\n\t if (typeof globalThis !== 'undefined' && globalThis.crypto) {\n\t crypto = globalThis.crypto;\n\t }\n\n\t // Native (experimental IE 11) crypto from window (Browser)\n\t if (!crypto && typeof window !== 'undefined' && window.msCrypto) {\n\t crypto = window.msCrypto;\n\t }\n\n\t // Native crypto from global (NodeJS)\n\t if (!crypto && typeof global !== 'undefined' && global.crypto) {\n\t crypto = global.crypto;\n\t }\n\n\t // Native crypto import via require (NodeJS)\n\t if (!crypto && typeof require === 'function') {\n\t try {\n\t crypto = require('crypto');\n\t } catch (err) {}\n\t }\n\n\t /*\n\t * Cryptographically secure pseudorandom number generator\n\t *\n\t * As Math.random() is cryptographically not safe to use\n\t */\n\t var cryptoSecureRandomInt = function () {\n\t if (crypto) {\n\t // Use getRandomValues method (Browser)\n\t if (typeof crypto.getRandomValues === 'function') {\n\t try {\n\t return crypto.getRandomValues(new Uint32Array(1))[0];\n\t } catch (err) {}\n\t }\n\n\t // Use randomBytes method (NodeJS)\n\t if (typeof crypto.randomBytes === 'function') {\n\t try {\n\t return crypto.randomBytes(4).readInt32LE();\n\t } catch (err) {}\n\t }\n\t }\n\n\t throw new Error('Native crypto module could not be used to get secure random number.');\n\t };\n\n\t /*\n\t * Local polyfill of Object.create\n\n\t */\n\t var create = Object.create || (function () {\n\t function F() {}\n\n\t return function (obj) {\n\t var subtype;\n\n\t F.prototype = obj;\n\n\t subtype = new F();\n\n\t F.prototype = null;\n\n\t return subtype;\n\t };\n\t }());\n\n\t /**\n\t * CryptoJS namespace.\n\t */\n\t var C = {};\n\n\t /**\n\t * Library namespace.\n\t */\n\t var C_lib = C.lib = {};\n\n\t /**\n\t * Base object for prototypal inheritance.\n\t */\n\t var Base = C_lib.Base = (function () {\n\n\n\t return {\n\t /**\n\t * Creates a new object that inherits from this object.\n\t *\n\t * @param {Object} overrides Properties to copy into the new object.\n\t *\n\t * @return {Object} The new object.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var MyType = CryptoJS.lib.Base.extend({\n\t * field: 'value',\n\t *\n\t * method: function () {\n\t * }\n\t * });\n\t */\n\t extend: function (overrides) {\n\t // Spawn\n\t var subtype = create(this);\n\n\t // Augment\n\t if (overrides) {\n\t subtype.mixIn(overrides);\n\t }\n\n\t // Create default initializer\n\t if (!subtype.hasOwnProperty('init') || this.init === subtype.init) {\n\t subtype.init = function () {\n\t subtype.$super.init.apply(this, arguments);\n\t };\n\t }\n\n\t // Initializer's prototype is the subtype object\n\t subtype.init.prototype = subtype;\n\n\t // Reference supertype\n\t subtype.$super = this;\n\n\t return subtype;\n\t },\n\n\t /**\n\t * Extends this object and runs the init method.\n\t * Arguments to create() will be passed to init().\n\t *\n\t * @return {Object} The new object.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var instance = MyType.create();\n\t */\n\t create: function () {\n\t var instance = this.extend();\n\t instance.init.apply(instance, arguments);\n\n\t return instance;\n\t },\n\n\t /**\n\t * Initializes a newly created object.\n\t * Override this method to add some logic when your objects are created.\n\t *\n\t * @example\n\t *\n\t * var MyType = CryptoJS.lib.Base.extend({\n\t * init: function () {\n\t * // ...\n\t * }\n\t * });\n\t */\n\t init: function () {\n\t },\n\n\t /**\n\t * Copies properties into this object.\n\t *\n\t * @param {Object} properties The properties to mix in.\n\t *\n\t * @example\n\t *\n\t * MyType.mixIn({\n\t * field: 'value'\n\t * });\n\t */\n\t mixIn: function (properties) {\n\t for (var propertyName in properties) {\n\t if (properties.hasOwnProperty(propertyName)) {\n\t this[propertyName] = properties[propertyName];\n\t }\n\t }\n\n\t // IE won't copy toString using the loop above\n\t if (properties.hasOwnProperty('toString')) {\n\t this.toString = properties.toString;\n\t }\n\t },\n\n\t /**\n\t * Creates a copy of this object.\n\t *\n\t * @return {Object} The clone.\n\t *\n\t * @example\n\t *\n\t * var clone = instance.clone();\n\t */\n\t clone: function () {\n\t return this.init.prototype.extend(this);\n\t }\n\t };\n\t }());\n\n\t /**\n\t * An array of 32-bit words.\n\t *\n\t * @property {Array} words The array of 32-bit words.\n\t * @property {number} sigBytes The number of significant bytes in this word array.\n\t */\n\t var WordArray = C_lib.WordArray = Base.extend({\n\t /**\n\t * Initializes a newly created word array.\n\t *\n\t * @param {Array} words (Optional) An array of 32-bit words.\n\t * @param {number} sigBytes (Optional) The number of significant bytes in the words.\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.lib.WordArray.create();\n\t * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607]);\n\t * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607], 6);\n\t */\n\t init: function (words, sigBytes) {\n\t words = this.words = words || [];\n\n\t if (sigBytes != undefined) {\n\t this.sigBytes = sigBytes;\n\t } else {\n\t this.sigBytes = words.length * 4;\n\t }\n\t },\n\n\t /**\n\t * Converts this word array to a string.\n\t *\n\t * @param {Encoder} encoder (Optional) The encoding strategy to use. Default: CryptoJS.enc.Hex\n\t *\n\t * @return {string} The stringified word array.\n\t *\n\t * @example\n\t *\n\t * var string = wordArray + '';\n\t * var string = wordArray.toString();\n\t * var string = wordArray.toString(CryptoJS.enc.Utf8);\n\t */\n\t toString: function (encoder) {\n\t return (encoder || Hex).stringify(this);\n\t },\n\n\t /**\n\t * Concatenates a word array to this word array.\n\t *\n\t * @param {WordArray} wordArray The word array to append.\n\t *\n\t * @return {WordArray} This word array.\n\t *\n\t * @example\n\t *\n\t * wordArray1.concat(wordArray2);\n\t */\n\t concat: function (wordArray) {\n\t // Shortcuts\n\t var thisWords = this.words;\n\t var thatWords = wordArray.words;\n\t var thisSigBytes = this.sigBytes;\n\t var thatSigBytes = wordArray.sigBytes;\n\n\t // Clamp excess bits\n\t this.clamp();\n\n\t // Concat\n\t if (thisSigBytes % 4) {\n\t // Copy one byte at a time\n\t for (var i = 0; i < thatSigBytes; i++) {\n\t var thatByte = (thatWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;\n\t thisWords[(thisSigBytes + i) >>> 2] |= thatByte << (24 - ((thisSigBytes + i) % 4) * 8);\n\t }\n\t } else {\n\t // Copy one word at a time\n\t for (var j = 0; j < thatSigBytes; j += 4) {\n\t thisWords[(thisSigBytes + j) >>> 2] = thatWords[j >>> 2];\n\t }\n\t }\n\t this.sigBytes += thatSigBytes;\n\n\t // Chainable\n\t return this;\n\t },\n\n\t /**\n\t * Removes insignificant bits.\n\t *\n\t * @example\n\t *\n\t * wordArray.clamp();\n\t */\n\t clamp: function () {\n\t // Shortcuts\n\t var words = this.words;\n\t var sigBytes = this.sigBytes;\n\n\t // Clamp\n\t words[sigBytes >>> 2] &= 0xffffffff << (32 - (sigBytes % 4) * 8);\n\t words.length = Math.ceil(sigBytes / 4);\n\t },\n\n\t /**\n\t * Creates a copy of this word array.\n\t *\n\t * @return {WordArray} The clone.\n\t *\n\t * @example\n\t *\n\t * var clone = wordArray.clone();\n\t */\n\t clone: function () {\n\t var clone = Base.clone.call(this);\n\t clone.words = this.words.slice(0);\n\n\t return clone;\n\t },\n\n\t /**\n\t * Creates a word array filled with random bytes.\n\t *\n\t * @param {number} nBytes The number of random bytes to generate.\n\t *\n\t * @return {WordArray} The random word array.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.lib.WordArray.random(16);\n\t */\n\t random: function (nBytes) {\n\t var words = [];\n\n\t for (var i = 0; i < nBytes; i += 4) {\n\t words.push(cryptoSecureRandomInt());\n\t }\n\n\t return new WordArray.init(words, nBytes);\n\t }\n\t });\n\n\t /**\n\t * Encoder namespace.\n\t */\n\t var C_enc = C.enc = {};\n\n\t /**\n\t * Hex encoding strategy.\n\t */\n\t var Hex = C_enc.Hex = {\n\t /**\n\t * Converts a word array to a hex string.\n\t *\n\t * @param {WordArray} wordArray The word array.\n\t *\n\t * @return {string} The hex string.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var hexString = CryptoJS.enc.Hex.stringify(wordArray);\n\t */\n\t stringify: function (wordArray) {\n\t // Shortcuts\n\t var words = wordArray.words;\n\t var sigBytes = wordArray.sigBytes;\n\n\t // Convert\n\t var hexChars = [];\n\t for (var i = 0; i < sigBytes; i++) {\n\t var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;\n\t hexChars.push((bite >>> 4).toString(16));\n\t hexChars.push((bite & 0x0f).toString(16));\n\t }\n\n\t return hexChars.join('');\n\t },\n\n\t /**\n\t * Converts a hex string to a word array.\n\t *\n\t * @param {string} hexStr The hex string.\n\t *\n\t * @return {WordArray} The word array.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.enc.Hex.parse(hexString);\n\t */\n\t parse: function (hexStr) {\n\t // Shortcut\n\t var hexStrLength = hexStr.length;\n\n\t // Convert\n\t var words = [];\n\t for (var i = 0; i < hexStrLength; i += 2) {\n\t words[i >>> 3] |= parseInt(hexStr.substr(i, 2), 16) << (24 - (i % 8) * 4);\n\t }\n\n\t return new WordArray.init(words, hexStrLength / 2);\n\t }\n\t };\n\n\t /**\n\t * Latin1 encoding strategy.\n\t */\n\t var Latin1 = C_enc.Latin1 = {\n\t /**\n\t * Converts a word array to a Latin1 string.\n\t *\n\t * @param {WordArray} wordArray The word array.\n\t *\n\t * @return {string} The Latin1 string.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var latin1String = CryptoJS.enc.Latin1.stringify(wordArray);\n\t */\n\t stringify: function (wordArray) {\n\t // Shortcuts\n\t var words = wordArray.words;\n\t var sigBytes = wordArray.sigBytes;\n\n\t // Convert\n\t var latin1Chars = [];\n\t for (var i = 0; i < sigBytes; i++) {\n\t var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;\n\t latin1Chars.push(String.fromCharCode(bite));\n\t }\n\n\t return latin1Chars.join('');\n\t },\n\n\t /**\n\t * Converts a Latin1 string to a word array.\n\t *\n\t * @param {string} latin1Str The Latin1 string.\n\t *\n\t * @return {WordArray} The word array.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.enc.Latin1.parse(latin1String);\n\t */\n\t parse: function (latin1Str) {\n\t // Shortcut\n\t var latin1StrLength = latin1Str.length;\n\n\t // Convert\n\t var words = [];\n\t for (var i = 0; i < latin1StrLength; i++) {\n\t words[i >>> 2] |= (latin1Str.charCodeAt(i) & 0xff) << (24 - (i % 4) * 8);\n\t }\n\n\t return new WordArray.init(words, latin1StrLength);\n\t }\n\t };\n\n\t /**\n\t * UTF-8 encoding strategy.\n\t */\n\t var Utf8 = C_enc.Utf8 = {\n\t /**\n\t * Converts a word array to a UTF-8 string.\n\t *\n\t * @param {WordArray} wordArray The word array.\n\t *\n\t * @return {string} The UTF-8 string.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var utf8String = CryptoJS.enc.Utf8.stringify(wordArray);\n\t */\n\t stringify: function (wordArray) {\n\t try {\n\t return decodeURIComponent(escape(Latin1.stringify(wordArray)));\n\t } catch (e) {\n\t throw new Error('Malformed UTF-8 data');\n\t }\n\t },\n\n\t /**\n\t * Converts a UTF-8 string to a word array.\n\t *\n\t * @param {string} utf8Str The UTF-8 string.\n\t *\n\t * @return {WordArray} The word array.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var wordArray = CryptoJS.enc.Utf8.parse(utf8String);\n\t */\n\t parse: function (utf8Str) {\n\t return Latin1.parse(unescape(encodeURIComponent(utf8Str)));\n\t }\n\t };\n\n\t /**\n\t * Abstract buffered block algorithm template.\n\t *\n\t * The property blockSize must be implemented in a concrete subtype.\n\t *\n\t * @property {number} _minBufferSize The number of blocks that should be kept unprocessed in the buffer. Default: 0\n\t */\n\t var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm = Base.extend({\n\t /**\n\t * Resets this block algorithm's data buffer to its initial state.\n\t *\n\t * @example\n\t *\n\t * bufferedBlockAlgorithm.reset();\n\t */\n\t reset: function () {\n\t // Initial values\n\t this._data = new WordArray.init();\n\t this._nDataBytes = 0;\n\t },\n\n\t /**\n\t * Adds new data to this block algorithm's buffer.\n\t *\n\t * @param {WordArray|string} data The data to append. Strings are converted to a WordArray using UTF-8.\n\t *\n\t * @example\n\t *\n\t * bufferedBlockAlgorithm._append('data');\n\t * bufferedBlockAlgorithm._append(wordArray);\n\t */\n\t _append: function (data) {\n\t // Convert string to WordArray, else assume WordArray already\n\t if (typeof data == 'string') {\n\t data = Utf8.parse(data);\n\t }\n\n\t // Append\n\t this._data.concat(data);\n\t this._nDataBytes += data.sigBytes;\n\t },\n\n\t /**\n\t * Processes available data blocks.\n\t *\n\t * This method invokes _doProcessBlock(offset), which must be implemented by a concrete subtype.\n\t *\n\t * @param {boolean} doFlush Whether all blocks and partial blocks should be processed.\n\t *\n\t * @return {WordArray} The processed data.\n\t *\n\t * @example\n\t *\n\t * var processedData = bufferedBlockAlgorithm._process();\n\t * var processedData = bufferedBlockAlgorithm._process(!!'flush');\n\t */\n\t _process: function (doFlush) {\n\t var processedWords;\n\n\t // Shortcuts\n\t var data = this._data;\n\t var dataWords = data.words;\n\t var dataSigBytes = data.sigBytes;\n\t var blockSize = this.blockSize;\n\t var blockSizeBytes = blockSize * 4;\n\n\t // Count blocks ready\n\t var nBlocksReady = dataSigBytes / blockSizeBytes;\n\t if (doFlush) {\n\t // Round up to include partial blocks\n\t nBlocksReady = Math.ceil(nBlocksReady);\n\t } else {\n\t // Round down to include only full blocks,\n\t // less the number of blocks that must remain in the buffer\n\t nBlocksReady = Math.max((nBlocksReady | 0) - this._minBufferSize, 0);\n\t }\n\n\t // Count words ready\n\t var nWordsReady = nBlocksReady * blockSize;\n\n\t // Count bytes ready\n\t var nBytesReady = Math.min(nWordsReady * 4, dataSigBytes);\n\n\t // Process blocks\n\t if (nWordsReady) {\n\t for (var offset = 0; offset < nWordsReady; offset += blockSize) {\n\t // Perform concrete-algorithm logic\n\t this._doProcessBlock(dataWords, offset);\n\t }\n\n\t // Remove processed words\n\t processedWords = dataWords.splice(0, nWordsReady);\n\t data.sigBytes -= nBytesReady;\n\t }\n\n\t // Return processed words\n\t return new WordArray.init(processedWords, nBytesReady);\n\t },\n\n\t /**\n\t * Creates a copy of this object.\n\t *\n\t * @return {Object} The clone.\n\t *\n\t * @example\n\t *\n\t * var clone = bufferedBlockAlgorithm.clone();\n\t */\n\t clone: function () {\n\t var clone = Base.clone.call(this);\n\t clone._data = this._data.clone();\n\n\t return clone;\n\t },\n\n\t _minBufferSize: 0\n\t });\n\n\t /**\n\t * Abstract hasher template.\n\t *\n\t * @property {number} blockSize The number of 32-bit words this hasher operates on. Default: 16 (512 bits)\n\t */\n\t var Hasher = C_lib.Hasher = BufferedBlockAlgorithm.extend({\n\t /**\n\t * Configuration options.\n\t */\n\t cfg: Base.extend(),\n\n\t /**\n\t * Initializes a newly created hasher.\n\t *\n\t * @param {Object} cfg (Optional) The configuration options to use for this hash computation.\n\t *\n\t * @example\n\t *\n\t * var hasher = CryptoJS.algo.SHA256.create();\n\t */\n\t init: function (cfg) {\n\t // Apply config defaults\n\t this.cfg = this.cfg.extend(cfg);\n\n\t // Set initial values\n\t this.reset();\n\t },\n\n\t /**\n\t * Resets this hasher to its initial state.\n\t *\n\t * @example\n\t *\n\t * hasher.reset();\n\t */\n\t reset: function () {\n\t // Reset data buffer\n\t BufferedBlockAlgorithm.reset.call(this);\n\n\t // Perform concrete-hasher logic\n\t this._doReset();\n\t },\n\n\t /**\n\t * Updates this hasher with a message.\n\t *\n\t * @param {WordArray|string} messageUpdate The message to append.\n\t *\n\t * @return {Hasher} This hasher.\n\t *\n\t * @example\n\t *\n\t * hasher.update('message');\n\t * hasher.update(wordArray);\n\t */\n\t update: function (messageUpdate) {\n\t // Append\n\t this._append(messageUpdate);\n\n\t // Update the hash\n\t this._process();\n\n\t // Chainable\n\t return this;\n\t },\n\n\t /**\n\t * Finalizes the hash computation.\n\t * Note that the finalize operation is effectively a destructive, read-once operation.\n\t *\n\t * @param {WordArray|string} messageUpdate (Optional) A final message update.\n\t *\n\t * @return {WordArray} The hash.\n\t *\n\t * @example\n\t *\n\t * var hash = hasher.finalize();\n\t * var hash = hasher.finalize('message');\n\t * var hash = hasher.finalize(wordArray);\n\t */\n\t finalize: function (messageUpdate) {\n\t // Final message update\n\t if (messageUpdate) {\n\t this._append(messageUpdate);\n\t }\n\n\t // Perform concrete-hasher logic\n\t var hash = this._doFinalize();\n\n\t return hash;\n\t },\n\n\t blockSize: 512/32,\n\n\t /**\n\t * Creates a shortcut function to a hasher's object interface.\n\t *\n\t * @param {Hasher} hasher The hasher to create a helper for.\n\t *\n\t * @return {Function} The shortcut function.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var SHA256 = CryptoJS.lib.Hasher._createHelper(CryptoJS.algo.SHA256);\n\t */\n\t _createHelper: function (hasher) {\n\t return function (message, cfg) {\n\t return new hasher.init(cfg).finalize(message);\n\t };\n\t },\n\n\t /**\n\t * Creates a shortcut function to the HMAC's object interface.\n\t *\n\t * @param {Hasher} hasher The hasher to use in this HMAC helper.\n\t *\n\t * @return {Function} The shortcut function.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var HmacSHA256 = CryptoJS.lib.Hasher._createHmacHelper(CryptoJS.algo.SHA256);\n\t */\n\t _createHmacHelper: function (hasher) {\n\t return function (message, key) {\n\t return new C_algo.HMAC.init(hasher, key).finalize(message);\n\t };\n\t }\n\t });\n\n\t /**\n\t * Algorithm namespace.\n\t */\n\t var C_algo = C.algo = {};\n\n\t return C;\n\t}(Math));\n\n\n\treturn CryptoJS;\n\n}));",";(function (root, factory) {\n\tif (typeof exports === \"object\") {\n\t\t// CommonJS\n\t\tmodule.exports = exports = factory(require(\"./core\"));\n\t}\n\telse if (typeof define === \"function\" && define.amd) {\n\t\t// AMD\n\t\tdefine([\"./core\"], factory);\n\t}\n\telse {\n\t\t// Global (browser)\n\t\tfactory(root.CryptoJS);\n\t}\n}(this, function (CryptoJS) {\n\n\treturn CryptoJS.enc.Hex;\n\n}));",";(function (root, factory) {\n\tif (typeof exports === \"object\") {\n\t\t// CommonJS\n\t\tmodule.exports = exports = factory(require(\"./core\"));\n\t}\n\telse if (typeof define === \"function\" && define.amd) {\n\t\t// AMD\n\t\tdefine([\"./core\"], factory);\n\t}\n\telse {\n\t\t// Global (browser)\n\t\tfactory(root.CryptoJS);\n\t}\n}(this, function (CryptoJS) {\n\n\t(function () {\n\t // Shortcuts\n\t var C = CryptoJS;\n\t var C_lib = C.lib;\n\t var WordArray = C_lib.WordArray;\n\t var Hasher = C_lib.Hasher;\n\t var C_algo = C.algo;\n\n\t // Reusable object\n\t var W = [];\n\n\t /**\n\t * SHA-1 hash algorithm.\n\t */\n\t var SHA1 = C_algo.SHA1 = Hasher.extend({\n\t _doReset: function () {\n\t this._hash = new WordArray.init([\n\t 0x67452301, 0xefcdab89,\n\t 0x98badcfe, 0x10325476,\n\t 0xc3d2e1f0\n\t ]);\n\t },\n\n\t _doProcessBlock: function (M, offset) {\n\t // Shortcut\n\t var H = this._hash.words;\n\n\t // Working variables\n\t var a = H[0];\n\t var b = H[1];\n\t var c = H[2];\n\t var d = H[3];\n\t var e = H[4];\n\n\t // Computation\n\t for (var i = 0; i < 80; i++) {\n\t if (i < 16) {\n\t W[i] = M[offset + i] | 0;\n\t } else {\n\t var n = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];\n\t W[i] = (n << 1) | (n >>> 31);\n\t }\n\n\t var t = ((a << 5) | (a >>> 27)) + e + W[i];\n\t if (i < 20) {\n\t t += ((b & c) | (~b & d)) + 0x5a827999;\n\t } else if (i < 40) {\n\t t += (b ^ c ^ d) + 0x6ed9eba1;\n\t } else if (i < 60) {\n\t t += ((b & c) | (b & d) | (c & d)) - 0x70e44324;\n\t } else /* if (i < 80) */ {\n\t t += (b ^ c ^ d) - 0x359d3e2a;\n\t }\n\n\t e = d;\n\t d = c;\n\t c = (b << 30) | (b >>> 2);\n\t b = a;\n\t a = t;\n\t }\n\n\t // Intermediate hash value\n\t H[0] = (H[0] + a) | 0;\n\t H[1] = (H[1] + b) | 0;\n\t H[2] = (H[2] + c) | 0;\n\t H[3] = (H[3] + d) | 0;\n\t H[4] = (H[4] + e) | 0;\n\t },\n\n\t _doFinalize: function () {\n\t // Shortcuts\n\t var data = this._data;\n\t var dataWords = data.words;\n\n\t var nBitsTotal = this._nDataBytes * 8;\n\t var nBitsLeft = data.sigBytes * 8;\n\n\t // Add padding\n\t dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32);\n\t dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000);\n\t dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal;\n\t data.sigBytes = dataWords.length * 4;\n\n\t // Hash final blocks\n\t this._process();\n\n\t // Return final computed hash\n\t return this._hash;\n\t },\n\n\t clone: function () {\n\t var clone = Hasher.clone.call(this);\n\t clone._hash = this._hash.clone();\n\n\t return clone;\n\t }\n\t });\n\n\t /**\n\t * Shortcut function to the hasher's object interface.\n\t *\n\t * @param {WordArray|string} message The message to hash.\n\t *\n\t * @return {WordArray} The hash.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var hash = CryptoJS.SHA1('message');\n\t * var hash = CryptoJS.SHA1(wordArray);\n\t */\n\t C.SHA1 = Hasher._createHelper(SHA1);\n\n\t /**\n\t * Shortcut function to the HMAC's object interface.\n\t *\n\t * @param {WordArray|string} message The message to hash.\n\t * @param {WordArray|string} key The secret key.\n\t *\n\t * @return {WordArray} The HMAC.\n\t *\n\t * @static\n\t *\n\t * @example\n\t *\n\t * var hmac = CryptoJS.HmacSHA1(message, key);\n\t */\n\t C.HmacSHA1 = Hasher._createHmacHelper(SHA1);\n\t}());\n\n\n\treturn CryptoJS.SHA1;\n\n}));","module.exports = require(\"crypto\");","module.exports = require(\"fs/promises\");","module.exports = require(\"path\");","\"use strict\";Object.defineProperty(exports, \"__esModule\", {value: true});// src/index.ts\nvar _redis = require('@upstash/redis');\nvar _kv = null;\nprocess.env.UPSTASH_DISABLE_TELEMETRY = \"1\";\nvar VercelKV = class extends _redis.Redis {\n // This API is based on https://github.com/redis/node-redis#scan-iterator which is not supported in @upstash/redis\n /**\n * Same as `scan` but returns an AsyncIterator to allow iteration via `for await`.\n */\n async *scanIterator(options) {\n let cursor = \"0\";\n let keys;\n do {\n [cursor, keys] = await this.scan(cursor, options);\n for (const key of keys) {\n yield key;\n }\n } while (cursor !== \"0\");\n }\n /**\n * Same as `hscan` but returns an AsyncIterator to allow iteration via `for await`.\n */\n async *hscanIterator(key, options) {\n let cursor = \"0\";\n let items;\n do {\n [cursor, items] = await this.hscan(key, cursor, options);\n for (const item of items) {\n yield item;\n }\n } while (cursor !== \"0\");\n }\n /**\n * Same as `sscan` but returns an AsyncIterator to allow iteration via `for await`.\n */\n async *sscanIterator(key, options) {\n let cursor = \"0\";\n let items;\n do {\n [cursor, items] = await this.sscan(key, cursor, options);\n for (const item of items) {\n yield item;\n }\n } while (cursor !== \"0\");\n }\n /**\n * Same as `zscan` but returns an AsyncIterator to allow iteration via `for await`.\n */\n async *zscanIterator(key, options) {\n let cursor = \"0\";\n let items;\n do {\n [cursor, items] = await this.zscan(key, cursor, options);\n for (const item of items) {\n yield item;\n }\n } while (cursor !== \"0\");\n }\n};\nfunction createClient(config) {\n return new VercelKV({\n // The Next.js team recommends no value or `default` for fetch requests's `cache` option\n // upstash/redis defaults to `no-store`, so we enforce `default`\n cache: \"default\",\n enableAutoPipelining: true,\n ...config\n });\n}\nvar src_default = new Proxy(\n {},\n {\n get(target, prop, receiver) {\n if (prop === \"then\" || prop === \"parse\") {\n return Reflect.get(target, prop, receiver);\n }\n if (!_kv) {\n if (!process.env.KV_REST_API_URL || !process.env.KV_REST_API_TOKEN) {\n throw new Error(\n \"@vercel/kv: Missing required environment variables KV_REST_API_URL and KV_REST_API_TOKEN\"\n );\n }\n console.warn(\n '\\x1B[33m\"The default export has been moved to a named export and it will be removed in version 1, change to import { kv }\\x1B[0m\"'\n );\n _kv = createClient({\n url: process.env.KV_REST_API_URL,\n token: process.env.KV_REST_API_TOKEN\n });\n }\n return Reflect.get(_kv, prop);\n }\n }\n);\nvar kv = new Proxy(\n {},\n {\n get(target, prop) {\n if (!_kv) {\n if (!process.env.KV_REST_API_URL || !process.env.KV_REST_API_TOKEN) {\n throw new Error(\n \"@vercel/kv: Missing required environment variables KV_REST_API_URL and KV_REST_API_TOKEN\"\n );\n }\n _kv = createClient({\n url: process.env.KV_REST_API_URL,\n token: process.env.KV_REST_API_TOKEN\n });\n }\n return Reflect.get(_kv, prop);\n }\n }\n);\n\n\n\n\n\nexports.VercelKV = VercelKV; exports.createClient = createClient; exports.default = src_default; exports.kv = kv;\n//# sourceMappingURL=index.cjs.map","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","const fs = require('fs/promises')\nconst path = require('path')\n\nconst { createClient } = require('@vercel/kv')\n\nasync function collectExamplesResult(manifestFile) {\n const file = path.join(process.cwd(), manifestFile)\n const contents = await fs.readFile(file, 'utf-8')\n const results = JSON.parse(contents)\n\n let failingCount = 0\n let passingCount = 0\n\n const currentDate = new Date()\n const isoString = currentDate.toISOString()\n const timestamp = isoString.slice(0, 19).replace('T', ' ')\n\n for (const isPassing of Object.values(results)) {\n if (isPassing) {\n passingCount += 1\n } else {\n failingCount += 1\n }\n }\n const status = `${process.env.GITHUB_SHA}\\t${timestamp}\\t${passingCount}/${\n passingCount + failingCount\n }`\n\n return {\n status,\n // Uses JSON.stringify to create minified JSON, otherwise whitespace is preserved.\n data: JSON.stringify(results),\n }\n}\n\nasync function collectResults(manifestFile) {\n const file = path.join(process.cwd(), manifestFile)\n const contents = await fs.readFile(file, 'utf-8')\n const results = JSON.parse(contents)\n\n let passingTests = ''\n let failingTests = ''\n let passCount = 0\n let failCount = 0\n\n const currentDate = new Date()\n const isoString = currentDate.toISOString()\n const timestamp = isoString.slice(0, 19).replace('T', ' ')\n\n if (results.version === 2) {\n for (const [testFileName, result] of Object.entries(results.suites)) {\n let suitePassCount = 0\n let suiteFailCount = 0\n\n suitePassCount += result.passed.length\n suiteFailCount += result.failed.length\n\n if (suitePassCount > 0) {\n passingTests += `${testFileName}\\n`\n }\n\n if (suiteFailCount > 0) {\n failingTests += `${testFileName}\\n`\n }\n\n for (const passed of result.passed) {\n const passedName = passed.replaceAll('`', '\\\\`')\n passingTests += `* ${passedName}\\n`\n }\n\n for (const passed of result.failed) {\n const failedName = passed.replaceAll('`', '\\\\`')\n failingTests += `* ${failedName}\\n`\n }\n\n passCount += suitePassCount\n failCount += suiteFailCount\n\n if (suitePassCount > 0) {\n passingTests += `\\n`\n }\n\n if (suiteFailCount > 0) {\n failingTests += `\\n`\n }\n }\n\n const testRun = `${process.env.GITHUB_SHA}\\t${timestamp}\\t${passCount}/${\n passCount + failCount\n }`\n return { testRun, passingTests, failingTests }\n } else {\n for (const [testFileName, result] of Object.entries(results)) {\n let suitePassCount = 0\n let suiteFailCount = 0\n\n suitePassCount += result.passed.length\n suiteFailCount += result.failed.length\n\n if (suitePassCount > 0) {\n passingTests += `${testFileName}\\n`\n }\n\n if (suiteFailCount > 0) {\n failingTests += `${testFileName}\\n`\n }\n\n for (const passed of result.passed) {\n const passedName = passed.replaceAll('`', '\\\\`')\n passingTests += `* ${passedName}\\n`\n }\n\n for (const passed of result.failed) {\n const failedName = passed.replaceAll('`', '\\\\`')\n failingTests += `* ${failedName}\\n`\n }\n\n passCount += suitePassCount\n failCount += suiteFailCount\n\n if (suitePassCount > 0) {\n passingTests += `\\n`\n }\n\n if (suiteFailCount > 0) {\n failingTests += `\\n`\n }\n }\n const testRun = `${process.env.GITHUB_SHA}\\t${timestamp}\\t${passCount}/${\n passCount + failCount\n }`\n\n return { testRun, passingTests, failingTests }\n }\n}\n\nasync function collectAndUpload(\n kv,\n { jsonPrefix, kvPrefix, deploymentDomain }\n) {\n const developmentResult = await collectResults(\n `test/${jsonPrefix}dev-tests-manifest.json`\n )\n const productionResult = await collectResults(\n `test/${jsonPrefix}build-tests-manifest.json`\n )\n const developmentExamplesResult = await collectExamplesResult(\n `test/${jsonPrefix}dev-examples-manifest.json`\n )\n\n console.log('TEST RESULT DEVELOPMENT')\n console.log(developmentResult.testRun)\n\n console.log('TEST RESULT PRODUCTION')\n console.log(productionResult.testRun)\n\n console.log('EXAMPLES RESULT')\n console.log(developmentExamplesResult.status)\n\n await kv.rpush(`${kvPrefix}test-runs`, developmentResult.testRun)\n await kv.rpush(`${kvPrefix}test-runs-production`, productionResult.testRun)\n await kv.rpush(`${kvPrefix}examples-runs`, developmentExamplesResult.status)\n console.log('SUCCESSFULLY SAVED RUNS')\n\n await kv.set(`${kvPrefix}passing-tests`, developmentResult.passingTests)\n await kv.set(\n `${kvPrefix}passing-tests-production`,\n productionResult.passingTests\n )\n console.log('SUCCESSFULLY SAVED PASSING')\n\n await kv.set(`${kvPrefix}failing-tests`, developmentResult.failingTests)\n await kv.set(\n `${kvPrefix}failing-tests-production`,\n productionResult.failingTests\n )\n console.log('SUCCESSFULLY SAVED FAILING')\n\n await kv.set(`${kvPrefix}examples-data`, developmentExamplesResult.data)\n console.log('SUCCESSFULLY SAVED EXAMPLES')\n\n if (deploymentDomain != null) {\n // Upstash does not provide strong consistency, so just wait a couple\n // seconds before invalidating the cache in case of replication lag.\n //\n // https://upstash.com/docs/redis/features/consistency\n await new Promise((resolve) => setTimeout(resolve, 2000))\n try {\n const response = await fetch(\n `https://${deploymentDomain}/api/revalidate`,\n {\n method: 'POST',\n headers: {\n 'X-Auth-Token': process.env.TURBOYET_TOKEN,\n 'Content-Type': 'application/json',\n },\n }\n )\n const responseJson = await response.json()\n if (!responseJson.revalidated) {\n throw new Error(responseJson.error)\n }\n console.log('SUCCESSFULLY REVALIDATED VERCEL DATA CACHE')\n } catch (error) {\n // non-fatal: the cache will eventually expire anyways\n console.error('FAILED TO REVALIDATE VERCEL DATA CACHE', error)\n }\n }\n}\n\nasync function main() {\n try {\n const kv = createClient({\n url: process.env.TURBOYET_KV_REST_API_URL,\n token: process.env.TURBOYET_KV_REST_API_TOKEN,\n })\n console.log('### UPLOADING TURBOPACK DATA')\n await collectAndUpload(kv, {\n jsonPrefix: 'turbopack-',\n kvPrefix: '',\n deploymentDomain: 'areweturboyet.com',\n })\n console.log('### UPLOADING RSPACK DATA')\n await collectAndUpload(kv, {\n jsonPrefix: 'rspack-',\n kvPrefix: 'rspack-',\n deploymentDomain: 'arewerspackyet.com',\n })\n } catch (error) {\n console.log(error)\n }\n}\n\nmain()\n"],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/.github/actions/upload-turboyet-data/src/main.js b/.github/actions/upload-turboyet-data/src/main.js index f518be5a81833..89fe7fa9ca586 100644 --- a/.github/actions/upload-turboyet-data/src/main.js +++ b/.github/actions/upload-turboyet-data/src/main.js @@ -224,6 +224,7 @@ async function main() { await collectAndUpload(kv, { jsonPrefix: 'rspack-', kvPrefix: 'rspack-', + deploymentDomain: 'arewerspackyet.com', }) } catch (error) { console.log(error) diff --git a/.github/labeler.json b/.github/labeler.json index 0b71fa368ec18..6cba95c70db4e 100644 --- a/.github/labeler.json +++ b/.github/labeler.json @@ -5,7 +5,35 @@ "examples": ["examples/**"], "Font (next/font)": ["**/*font*"], "tests": ["test/**", "bench/**"], - "Turbopack": ["crates/next-*/**"], + "Turbopack": ["crates/next-*/**", "crates/napi/**", "turbopack/**"], + "Rspack": [ + { "type": "user", "pattern": "9aoy" }, + { "type": "user", "pattern": "ahabhgk" }, + { "type": "user", "pattern": "bvanjoi" }, + { "type": "user", "pattern": "chenjiahan" }, + { "type": "user", "pattern": "CPunisher" }, + { "type": "user", "pattern": "easy1090" }, + { "type": "user", "pattern": "fi3ework" }, + { "type": "user", "pattern": "GiveMe-A-Name" }, + { "type": "user", "pattern": "h-a-n-a" }, + { "type": "user", "pattern": "hardfist" }, + { "type": "user", "pattern": "inottn" }, + { "type": "user", "pattern": "jerrykingxyz" }, + { "type": "user", "pattern": "JSerFeng" }, + { "type": "user", "pattern": "lingyucoder" }, + { "type": "user", "pattern": "nyqykk" }, + { "type": "user", "pattern": "sanyuan0704" }, + { "type": "user", "pattern": "ScriptedAlchemy" }, + { "type": "user", "pattern": "SoonIter" }, + { "type": "user", "pattern": "stormslowly" }, + { "type": "user", "pattern": "SyMind" }, + { "type": "user", "pattern": "Timeless0911" }, + { "type": "user", "pattern": "valorkin" }, + { "type": "user", "pattern": "xc2" }, + { "type": "user", "pattern": "zackarychapple" }, + { "type": "user", "pattern": "zoolsher" }, + "packages/next/src/build/**" + ], "created-by: Chrome Aurora": [ { "type": "user", "pattern": "atcastle" }, { "type": "user", "pattern": "devknoll" }, @@ -31,6 +59,7 @@ { "type": "user", "pattern": "ijjk" }, { "type": "user", "pattern": "lazarv" }, { "type": "user", "pattern": "lubieowoce" }, + { "type": "user", "pattern": "RobPruzan" }, { "type": "user", "pattern": "samcx" }, { "type": "user", "pattern": "sebmarkbage" }, { "type": "user", "pattern": "shuding" }, @@ -45,12 +74,14 @@ { "type": "user", "pattern": "leerob" }, { "type": "user", "pattern": "manovotny" }, { "type": "user", "pattern": "molebox" }, - { "type": "user", "pattern": "timeyoutakeit" } + { "type": "user", "pattern": "timeyoutakeit" }, + { "type": "user", "pattern": "icyJoseph" } ], "created-by: Turbopack team": [ { "type": "user", "pattern": "bgw" }, { "type": "user", "pattern": "Cy-Tek" }, { "type": "user", "pattern": "kdy1" }, + { "type": "user", "pattern": "lukesandberg" }, { "type": "user", "pattern": "mischnic" }, { "type": "user", "pattern": "padmaia" }, { "type": "user", "pattern": "sokra" }, diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml deleted file mode 100644 index f116ec5a09e42..0000000000000 --- a/.github/workflows/bench.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: Benchmark - -on: - push: - branches: - - canary - paths: - - '**/crates/**' - pull_request: - types: ['labeled'] - -concurrency: - group: ${{ github.workflow }}-${{ github.sha }} - cancel-in-progress: ${{ github.event_name == 'pull_request' }} - -env: - CI: 1 - CARGO_INCREMENTAL: 0 - # For faster CI - RUST_LOG: 'off' - -jobs: - list-crates: - name: List crates to benchmark - runs-on: - - 'self-hosted' - - 'linux' - - 'x64' - - 'metal' - if: ${{ github.event.label.name == 'benchmark' }} - outputs: - crates: ${{ steps.list-crates.outputs.crates }} - steps: - - uses: actions/checkout@v4 - - - name: List crates - id: list-crates - run: echo "crates=$(./scripts/cargo/bench/list-crates-with-bench.sh)" >> $GITHUB_OUTPUT - - benchmark-crate: - name: Benchmark ${{ matrix.crate }} - runs-on: ubuntu-22.04 - needs: list-crates - # Limit the number of concurrent jobs to 1 - concurrency: - group: ${{ github.workflow }} - strategy: - matrix: - crate: ${{fromJson(needs.list-crates.outputs.crates)}} - steps: - - uses: actions/checkout@v4 - - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - profile: minimal - - - name: Install cargo-codspeed - uses: taiki-e/install-action@v2 - with: - tool: cargo-codspeed@2.8.1 - - - name: Build the benchmark target(s) - run: cargo codspeed build -p ${{ matrix.crate }} - - - name: Run the benchmarks - uses: CodSpeedHQ/action@v3 - with: - run: cargo codspeed run - token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.github/workflows/build_and_deploy.yml b/.github/workflows/build_and_deploy.yml index 950af755de81b..a069ee307df56 100644 --- a/.github/workflows/build_and_deploy.yml +++ b/.github/workflows/build_and_deploy.yml @@ -3,27 +3,41 @@ name: build-and-deploy on: push: - # TODO: Run only on canary pushes but PR syncs. - # Requires checking if CI is approved + # we need the preview tarball for deploy tests + pull_request: + types: [opened, synchronize] workflow_dispatch: env: - NAPI_CLI_VERSION: 2.16.2 + NAPI_CLI_VERSION: 2.18.4 TURBO_VERSION: 2.3.3 NODE_LTS_VERSION: 20 CARGO_PROFILE_RELEASE_LTO: 'true' TURBO_TEAM: 'vercel' TURBO_CACHE: 'remote:rw' + # Without this environment variable, rust-lld will fail because some dependencies defaults to newer version of macOS by default. + # + # See https://doc.rust-lang.org/rustc/platform-support/apple-darwin.html#os-version for more details + MACOSX_DEPLOYMENT_TARGET: 11.0 + # This will become "true" if the latest commit (merged release PR) is either: + # - "Version Packages (#)" + # - "Version Pacakges (canary/rc) (#)" + # set from scripts/check-is-release.js + __NEW_RELEASE: 'false' jobs: deploy-target: runs-on: ubuntu-latest + # Don't trigger this job on `pull_request` events from upstream branches. + # Those would already run this job on the `push` event + if: ${{ github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork }} outputs: value: ${{ steps.deploy-target.outputs.value }} steps: - uses: actions/checkout@v4 with: fetch-depth: 1 + - run: echo "${{ github.event.after }}" - name: Setup node uses: actions/setup-node@v4 with: @@ -39,7 +53,13 @@ jobs: # 'staging' for canary branch since that will eventually be published i.e. become the production build. id: deploy-target run: | - if [[ $(node ./scripts/check-is-release.js 2> /dev/null || :) = v* ]]; + # TODO: Remove the new release check once the new release workflow is fully replaced. + RELEASE_CHECK=$(node ./scripts/check-is-release.js 2> /dev/null || :) + if [[ $RELEASE_CHECK == 'new-release' ]]; + then + echo "__NEW_RELEASE=true" >> $GITHUB_ENV + echo "value=production" >> $GITHUB_OUTPUT + elif [[ $RELEASE_CHECK == v* ]]; then echo "value=production" >> $GITHUB_OUTPUT elif [ '${{ github.ref }}' == 'refs/heads/canary' ] @@ -516,7 +536,9 @@ jobs: path: crates/wasm - name: Create tarballs - run: node scripts/create-preview-tarballs.js "${{ github.sha }}" "${{ runner.temp }}/preview-tarballs" + # github.event.after is available on push and pull_request#synchronize events. + # For workflow_dispatch events, github.sha is the head commit. + run: node scripts/create-preview-tarballs.js "${{ github.sha }}" "${{ github.event.after || github.sha }}" "${{ runner.temp }}/preview-tarballs" - name: Upload tarballs uses: actions/upload-artifact@v4 @@ -581,13 +603,37 @@ jobs: - run: npm i -g npm@10.4.0 # need latest version for provenance (pinning to avoid bugs) - run: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc - run: ./scripts/publish-native.js + # Legacy release process - run: ./scripts/publish-release.js + if: ${{ env.__NEW_RELEASE == 'false' }} env: RELEASE_BOT_GITHUB_TOKEN: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + # New release process + - name: Publish to NPM + id: changesets + # TODO: Change to IS_RELEASE condition when new release becomes stable. + if: ${{ env.__NEW_RELEASE == 'true' }} + uses: changesets/action@v1 + with: + publish: pnpm ci:publish + env: + GITHUB_TOKEN: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN_ELEVATED }} + + - name: Send a Slack notification of the publish status + # TODO: Change to IS_RELEASE condition when new release becomes stable. + if: ${{ env.__NEW_RELEASE == 'true' && (steps.changesets.outputs.published == 'true' || steps.changesets.outputs.published == 'false') }} + run: pnpm tsx scripts/release/slack.ts + env: + SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }} + RELEASE_STATUS: ${{ steps.changesets.outputs.published }} + WORKFLOW_LINK: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + WORKFLOW_ACTOR: ${{ github.actor }} + - name: Upload npm log artifact + if: steps.changesets.outputs.published == 'true' uses: actions/upload-artifact@v4 - if: always() with: name: npm-publish-logs path: /home/runner/.npm/_logs/* @@ -651,6 +697,15 @@ jobs: VERCEL_API_TOKEN: ${{ secrets.VERCEL_API_TOKEN }} DEPLOY_ENVIRONMENT: production + buildPassed: + needs: ['deploy-target', 'build', 'build-wasm', 'build-native'] + if: ${{ always() && needs.deploy-target.outputs.value != '' }} + name: thank you, build + runs-on: ubuntu-latest + steps: + - run: exit 1 + if: ${{ always() && (contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')) }} + releaseStats: name: Release Stats runs-on: diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 20b2158864cd5..a2fe5c97dba76 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -6,27 +6,13 @@ on: pull_request: types: [opened, synchronize] +# NOTE: anything in `afterBuild` inherits environment variables defined in +# `build_reusable.yml` (not these!) because that job executes within the context +# of that workflow. Environment variables are not automatically passed to +# reusable workflows. env: - NAPI_CLI_VERSION: 2.14.7 - TURBO_VERSION: 2.3.3 NODE_MAINTENANCE_VERSION: 18 NODE_LTS_VERSION: 20 - TEST_CONCURRENCY: 8 - # disable backtrace for test snapshots - RUST_BACKTRACE: 0 - - TURBO_TEAM: 'vercel' - TURBO_CACHE: 'remote:rw' - NEXT_TELEMETRY_DISABLED: 1 - # we build a dev binary for use in CI so skip downloading - # canary next-swc binaries in the monorepo - NEXT_SKIP_NATIVE_POSTINSTALL: 1 - DATADOG_API_KEY: ${{ secrets.DATA_DOG_API_KEY }} - NEXT_JUNIT_TEST_REPORT: 'true' - DD_ENV: 'ci' - TEST_TIMINGS_TOKEN: ${{ secrets.TEST_TIMINGS_TOKEN }} - NEXT_TEST_JOB: 1 - NEXT_TEST_PREFER_OFFLINE: 1 jobs: optimize-ci: @@ -53,7 +39,8 @@ jobs: - name: check for release id: is-release run: | - if [[ $(node ./scripts/check-is-release.js 2> /dev/null || :) = v* ]]; + RELEASE_CHECK=$(node ./scripts/check-is-release.js 2> /dev/null || :) + if [[ $RELEASE_CHECK == "new-release" || $RELEASE_CHECK == v* ]]; then echo "IS_RELEASE=true" >> $GITHUB_OUTPUT else @@ -63,6 +50,11 @@ jobs: outputs: docs-only: ${{ steps.docs-change.outputs.DOCS_ONLY != 'false' }} is-release: ${{ steps.is-release.outputs.IS_RELEASE == 'true' }} + rspack: >- + ${{ + github.event_name == 'pull_request' && + contains(github.event.pull_request.labels.*.name, 'Rspack') + }} build-native: name: build-native @@ -100,7 +92,12 @@ jobs: needs: ['build-next'] uses: ./.github/workflows/build_reusable.yml with: - afterBuild: pnpm lint-no-typescript && pnpm check-examples + skipNativeBuild: 'yes' + skipNativeInstall: 'yes' + afterBuild: | + pnpm lint-no-typescript + pnpm check-examples + pnpm validate-externals-doc stepName: 'lint' secrets: inherit @@ -210,7 +207,11 @@ jobs: - '--scenario=heavy-npm-deps-build-turbo-cache-enabled --page=homepage' uses: ./.github/workflows/build_reusable.yml with: - afterBuild: pnpm install && ./node_modules/.bin/devlow-bench ./scripts/devlow-bench.mjs --datadog=ubuntu-latest-16-core ${{ matrix.mode }} ${{ matrix.selector }} + afterBuild: | + ./node_modules/.bin/devlow-bench ./scripts/devlow-bench.mjs \ + --datadog=ubuntu-latest-16-core \ + ${{ matrix.mode }} \ + ${{ matrix.selector }} stepName: 'devlow-bench-${{ matrix.mode }}-${{ matrix.selector }}' secrets: inherit @@ -220,8 +221,10 @@ jobs: if: ${{ needs.optimize-ci.outputs.skip == 'false' && needs.changes.outputs.docs-only == 'false' }} uses: ./.github/workflows/build_reusable.yml with: + skipNativeBuild: 'yes' stepName: 'test-devlow' - afterBuild: pnpm install && pnpm run --filter=devlow-bench test + afterBuild: | + pnpm run --filter=devlow-bench test secrets: inherit test-turbopack-dev: @@ -240,7 +243,17 @@ jobs: react: ['', '18.3.1'] uses: ./.github/workflows/build_reusable.yml with: - afterBuild: RUST_BACKTRACE=0 NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/turbopack-dev-tests-manifest.json" IS_TURBOPACK_TEST=1 TURBOPACK_DEV=1 NEXT_E2E_TEST_TIMEOUT=240000 NEXT_TEST_MODE=dev NEXT_TEST_REACT_VERSION="${{ matrix.react }}" node run-tests.js --test-pattern '^(test\/(development|e2e))/.*\.test\.(js|jsx|ts|tsx)$' --timings -g ${{ matrix.group }} -c ${TEST_CONCURRENCY} + afterBuild: | + export NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/turbopack-dev-tests-manifest.json" + export IS_TURBOPACK_TEST=1 + export TURBOPACK_DEV=1 + export NEXT_TEST_MODE=dev + export NEXT_TEST_REACT_VERSION="${{ matrix.react }}" + + node run-tests.js \ + --test-pattern '^(test\/(development|e2e))/.*\.test\.(js|jsx|ts|tsx)$' \ + --timings \ + -g ${{ matrix.group }} stepName: 'test-turbopack-dev-react-${{ matrix.react }}-${{ matrix.group }}' secrets: inherit @@ -262,7 +275,16 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: nodeVersion: 18.18.2 - afterBuild: RUST_BACKTRACE=0 NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/turbopack-dev-tests-manifest.json" IS_TURBOPACK_TEST=1 TURBOPACK_DEV=1 NEXT_TEST_REACT_VERSION="${{ matrix.react }}" node run-tests.js --timings -g ${{ matrix.group }} -c ${TEST_CONCURRENCY} --type integration + afterBuild: | + export NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/turbopack-dev-tests-manifest.json" + export IS_TURBOPACK_TEST=1 + export TURBOPACK_DEV=1 + export NEXT_TEST_REACT_VERSION="${{ matrix.react }}" + + node run-tests.js \ + --timings \ + -g ${{ matrix.group }} \ + --type integration stepName: 'test-turbopack-integration-react-${{ matrix.react }}-${{ matrix.group }}' secrets: inherit @@ -287,7 +309,16 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: nodeVersion: 18.18.2 - afterBuild: RUST_BACKTRACE=0 NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/turbopack-build-tests-manifest.json" IS_TURBOPACK_TEST=1 TURBOPACK_BUILD=1 NEXT_TEST_MODE=start NEXT_TEST_REACT_VERSION="${{ matrix.react }}" node run-tests.js --timings -g ${{ matrix.group }} -c ${TEST_CONCURRENCY} --type production + afterBuild: | + export NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/turbopack-build-tests-manifest.json" + export IS_TURBOPACK_TEST=1 + export TURBOPACK_BUILD=1 + export NEXT_TEST_MODE=start + export NEXT_TEST_REACT_VERSION="${{ matrix.react }}" + # TODO(PACK-4578): Remove + export TURBOPACK_TEMP_DISABLE_DUPLICATE_MODULES_CHECK=1 + + node run-tests.js --timings -g ${{ matrix.group }} --type production stepName: 'test-turbopack-production-react-${{ matrix.react }}-${{ matrix.group }}' secrets: inherit @@ -303,10 +334,136 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: nodeVersion: 18.18.2 - afterBuild: RUST_BACKTRACE=0 NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/turbopack-build-tests-manifest.json" IS_TURBOPACK_TEST=1 TURBOPACK_BUILD=1 node run-tests.js --timings -g ${{ matrix.group }} -c ${TEST_CONCURRENCY} --type integration + afterBuild: | + export NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/turbopack-build-tests-manifest.json" + export IS_TURBOPACK_TEST=1 + export TURBOPACK_BUILD=1 + + node run-tests.js \ + --timings \ + -g ${{ matrix.group }} \ + --type integration stepName: 'test-turbopack-production-integration-${{ matrix.group }}' secrets: inherit + test-rspack-dev: + name: test rspack dev + needs: ['optimize-ci', 'changes', 'build-next', 'build-native'] + if: ${{ needs.optimize-ci.outputs.skip == 'false' && needs.changes.outputs.docs-only == 'false' && needs.changes.outputs.rspack == 'true' }} + strategy: + fail-fast: false + matrix: + group: [1/5, 2/5, 3/5, 4/5, 5/5] + uses: ./.github/workflows/build_reusable.yml + with: + afterBuild: | + export NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/rspack-dev-tests-manifest.json" + export NEXT_TEST_MODE=dev + + # rspack flags + export NEXT_RSPACK=1 + export NEXT_TEST_USE_RSPACK=1 + + # HACK: Despite the name, this environment variable is only used to gate + # tests, so it's applicable to rspack + export TURBOPACK_DEV=1 + + node run-tests.js \ + --test-pattern '^(test\/(development|e2e))/.*\.test\.(js|jsx|ts|tsx)$' \ + --timings \ + -g ${{ matrix.group }} + stepName: 'test-rspack-dev-react-${{ matrix.react }}-${{ matrix.group }}' + secrets: inherit + + test-rspack-integration: + name: test rspack development integration + needs: ['optimize-ci', 'changes', 'build-next', 'build-native'] + if: ${{ needs.optimize-ci.outputs.skip == 'false' && needs.changes.outputs.docs-only == 'false' && needs.changes.outputs.rspack == 'true' }} + strategy: + fail-fast: false + matrix: + group: [1/6, 2/6, 3/6, 4/6, 5/6, 6/6] + uses: ./.github/workflows/build_reusable.yml + with: + nodeVersion: 18.18.2 + afterBuild: | + export NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/rspack-dev-tests-manifest.json" + + # rspack flags + export NEXT_RSPACK=1 + export NEXT_TEST_USE_RSPACK=1 + + # HACK: Despite the name, this environment variable is only used to gate + # tests, so it's applicable to rspack + export TURBOPACK_DEV=1 + + node run-tests.js \ + --timings \ + -g ${{ matrix.group }} \ + --type integration + stepName: 'test-rspack-integration-react-${{ matrix.react }}-${{ matrix.group }}' + secrets: inherit + + test-rspack-production: + name: test rspack production + needs: ['optimize-ci', 'changes', 'build-next', 'build-native'] + if: ${{ needs.optimize-ci.outputs.skip == 'false' && needs.changes.outputs.docs-only == 'false' && needs.changes.outputs.rspack == 'true' }} + strategy: + fail-fast: false + matrix: + exclude: + # Excluding React 18 tests unless on `canary` branch until budget is approved. + - react: ${{ github.event_name == 'pull_request' && !contains(github.event.pull_request.labels.*.name, 'run-react-18-tests') && '18.3.1' }} + group: [1/7, 2/7, 3/7, 4/7, 5/7, 6/7, 7/7] + # Empty value uses default + uses: ./.github/workflows/build_reusable.yml + with: + nodeVersion: 18.18.2 + afterBuild: | + export NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/rspack-build-tests-manifest.json" + export NEXT_TEST_MODE=start + + # rspack flags + export NEXT_RSPACK=1 + export NEXT_TEST_USE_RSPACK=1 + + # HACK: Despite the name, this environment variable is only used to gate + # tests, so it's applicable to rspack + export TURBOPACK_BUILD=1 + + node run-tests.js --timings -g ${{ matrix.group }} --type production + stepName: 'test-rspack-production-react-${{ matrix.react }}-${{ matrix.group }}' + secrets: inherit + + test-rspack-production-integration: + name: test rspack production integration + needs: ['optimize-ci', 'changes', 'build-next', 'build-native'] + if: ${{ needs.optimize-ci.outputs.skip == 'false' && needs.changes.outputs.docs-only == 'false' && needs.changes.outputs.rspack == 'true' }} + strategy: + fail-fast: false + matrix: + group: [1/7, 2/7, 3/7, 4/7, 5/7, 6/7, 7/7] + uses: ./.github/workflows/build_reusable.yml + with: + nodeVersion: 18.18.2 + afterBuild: | + export NEXT_EXTERNAL_TESTS_FILTERS="$(pwd)/test/rspack-build-tests-manifest.json" + + # rspack flags + export NEXT_RSPACK=1 + export NEXT_TEST_USE_RSPACK=1 + + # HACK: Despite the name, this environment variable is only used to gate + # tests, so it's applicable to rspack + export TURBOPACK_BUILD=1 + + node run-tests.js \ + --timings \ + -g ${{ matrix.group }} \ + --type integration + stepName: 'test-rspack-production-integration-${{ matrix.group }}' + secrets: inherit + test-next-swc-wasm: name: test next-swc wasm needs: ['optimize-ci', 'changes', 'build-next'] @@ -314,7 +471,19 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: - afterBuild: rustup target add wasm32-unknown-unknown && curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh && node ./scripts/normalize-version-bump.js && pnpm dlx turbo@${TURBO_VERSION} run build-wasm -- --target nodejs && git checkout . && mv crates/wasm/pkg crates/wasm/pkg-nodejs && node ./scripts/setup-wasm.mjs && NEXT_TEST_MODE=start TEST_WASM=true node run-tests.js test/production/pages-dir/production/test/index.test.ts test/e2e/streaming-ssr/index.test.ts + skipNativeBuild: 'yes' + skipNativeInstall: 'yes' + afterBuild: | + rustup target add wasm32-unknown-unknown + node ./scripts/normalize-version-bump.js + pnpm dlx turbo@${TURBO_VERSION} run build-wasm -- --target nodejs + git checkout . + + export NEXT_TEST_MODE=start + export NEXT_TEST_WASM=true + node run-tests.js \ + test/production/pages-dir/production/test/index.test.ts \ + test/e2e/streaming-ssr/index.test.ts stepName: 'test-next-swc-wasm' secrets: inherit @@ -330,13 +499,17 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: - afterBuild: rustup target add wasm32-wasip1-threads && pnpm dlx turbo@${TURBO_VERSION} run build-native-wasi + skipNativeBuild: 'yes' + skipNativeInstall: 'yes' + afterBuild: | + rustup target add wasm32-wasip1-threads + pnpm dlx turbo@${TURBO_VERSION} run build-native-wasi stepName: 'test-next-swc-napi-wasi' secrets: inherit test-unit: name: test unit - needs: ['changes'] + needs: ['changes', 'build-next', 'build-native'] if: ${{ needs.changes.outputs.docs-only == 'false' }} strategy: @@ -347,7 +520,7 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: nodeVersion: ${{ matrix.node }} - afterBuild: node run-tests.js -c ${TEST_CONCURRENCY} --type unit + afterBuild: node run-tests.js --type unit stepName: 'test-unit-${{ matrix.node }}' secrets: inherit @@ -365,7 +538,7 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: nodeVersion: ${{ matrix.node }} - afterBuild: node run-tests.js -c ${TEST_CONCURRENCY} --type unit + afterBuild: node run-tests.js --type unit stepName: 'test-unit-windows-${{ matrix.node }}' runs_on_labels: '["windows","self-hosted","x64"]' buildNativeTarget: 'x86_64-pc-windows-msvc' @@ -384,7 +557,11 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: - afterBuild: node scripts/test-new-tests.mjs --flake-detection --mode dev --group ${{ matrix.group }} + afterBuild: | + node scripts/test-new-tests.mjs \ + --flake-detection \ + --mode dev \ + --group ${{ matrix.group }} stepName: 'test-new-tests-dev-${{matrix.group}}' timeout_minutes: 60 # Increase the default timeout as tests are intentionally run multiple times to detect flakes @@ -402,7 +579,11 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: - afterBuild: node scripts/test-new-tests.mjs --flake-detection --mode start --group ${{ matrix.group }} + afterBuild: | + node scripts/test-new-tests.mjs \ + --flake-detection \ + --mode start \ + --group ${{ matrix.group }} stepName: 'test-new-tests-start-${{matrix.group}}' timeout_minutes: 60 # Increase the default timeout as tests are intentionally run multiple times to detect flakes @@ -412,7 +593,7 @@ jobs: name: Test new tests when deployed needs: ['optimize-ci', 'test-prod', 'test-new-tests-dev', 'test-new-tests-start'] - if: ${{ needs.optimize-ci.outputs.skip == 'false' && needs.changes.outputs.docs-only == 'false' && !github.event.pull_request.head.repo.fork }} + if: ${{ needs.optimize-ci.outputs.skip == 'false' }} strategy: fail-fast: false @@ -421,7 +602,12 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: - afterBuild: NEXT_E2E_TEST_TIMEOUT=240000 node scripts/test-new-tests.mjs --mode deploy --group ${{ matrix.group }} + afterBuild: | + export NEXT_E2E_TEST_TIMEOUT=240000 + export GH_PR_NUMBER=${{ github.event.pull_request && github.event.pull_request.number || '' }} + node scripts/test-new-tests.mjs \ + --mode deploy \ + --group ${{ matrix.group }} stepName: 'test-new-tests-deploy-${{matrix.group}}' secrets: inherit @@ -442,7 +628,14 @@ jobs: react: ['', '18.3.1'] uses: ./.github/workflows/build_reusable.yml with: - afterBuild: NEXT_TEST_MODE=dev NEXT_TEST_REACT_VERSION="${{ matrix.react }}" node run-tests.js --timings -g ${{ matrix.group }} -c ${TEST_CONCURRENCY} --type development + afterBuild: | + export NEXT_TEST_MODE=dev + export NEXT_TEST_REACT_VERSION="${{ matrix.react }}" + + node run-tests.js \ + --timings \ + -g ${{ matrix.group }} \ + --type development stepName: 'test-dev-react-${{ matrix.react }}-${{ matrix.group }}' secrets: inherit @@ -460,7 +653,12 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: - afterBuild: NEXT_TEST_MODE=dev node run-tests.js -c ${TEST_CONCURRENCY} test/e2e/app-dir/app/index.test.ts test/e2e/app-dir/app-edge/app-edge.test.ts + afterBuild: | + export NEXT_TEST_MODE=dev + + node run-tests.js \ + test/e2e/app-dir/app/index.test.ts \ + test/e2e/app-dir/app-edge/app-edge.test.ts stepName: 'test-dev-windows' runs_on_labels: '["windows","self-hosted","x64"]' buildNativeTarget: 'x86_64-pc-windows-msvc' @@ -481,7 +679,14 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: nodeVersion: 18.18.2 - afterBuild: node run-tests.js -c 4 test/production/pages-dir/production/test/index.test.ts test/integration/css-client-nav/test/index.test.js test/integration/rewrites-has-condition/test/index.test.js test/integration/create-next-app/index.test.ts test/integration/create-next-app/package-manager/pnpm.test.ts + afterBuild: | + node run-tests.js \ + --concurrency 4 \ + test/production/pages-dir/production/test/index.test.ts \ + test/integration/css-client-nav/test/index.test.js \ + test/integration/rewrites-has-condition/test/index.test.js \ + test/integration/create-next-app/index.test.ts \ + test/integration/create-next-app/package-manager/pnpm.test.ts stepName: 'test-integration-windows' runs_on_labels: '["windows","self-hosted","x64"]' buildNativeTarget: 'x86_64-pc-windows-msvc' @@ -501,7 +706,13 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: - afterBuild: NEXT_TEST_MODE=start node run-tests.js test/e2e/app-dir/app/index.test.ts test/e2e/app-dir/app-edge/app-edge.test.ts test/e2e/app-dir/metadata-edge/index.test.ts + afterBuild: | + export NEXT_TEST_MODE=start + + node run-tests.js \ + test/e2e/app-dir/app/index.test.ts \ + test/e2e/app-dir/app-edge/app-edge.test.ts \ + test/e2e/app-dir/metadata-edge/index.test.ts stepName: 'test-prod-windows' runs_on_labels: '["windows","self-hosted","x64"]' buildNativeTarget: 'x86_64-pc-windows-msvc' @@ -523,7 +734,11 @@ jobs: react: ['', '18.3.1'] uses: ./.github/workflows/build_reusable.yml with: - afterBuild: NEXT_TEST_MODE=start NEXT_TEST_REACT_VERSION="${{ matrix.react }}" node run-tests.js --timings -g ${{ matrix.group }} -c ${TEST_CONCURRENCY} --type production + afterBuild: | + export NEXT_TEST_MODE=start + export NEXT_TEST_REACT_VERSION="${{ matrix.react }}" + + node run-tests.js --timings -g ${{ matrix.group }} --type production stepName: 'test-prod-react-${{ matrix.react }}-${{ matrix.group }}' secrets: inherit @@ -558,7 +773,13 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: nodeVersion: 18.18.2 - afterBuild: NEXT_TEST_REACT_VERSION="${{ matrix.react }}" node run-tests.js --timings -g ${{ matrix.group }} -c ${TEST_CONCURRENCY} --type integration + afterBuild: | + export NEXT_TEST_REACT_VERSION="${{ matrix.react }}" + + node run-tests.js \ + --timings \ + -g ${{ matrix.group }} \ + --type integration stepName: 'test-integration-${{ matrix.group }}-react-${{ matrix.react }}' secrets: inherit @@ -569,11 +790,22 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: - afterBuild: pnpm playwright install && - BROWSER_NAME=firefox node run-tests.js test/production/pages-dir/production/test/index.test.ts && - NEXT_TEST_MODE=start BROWSER_NAME=safari node run-tests.js -c 1 test/production/pages-dir/production/test/index.test.ts test/e2e/basepath/basepath.test.ts && - BROWSER_NAME=safari DEVICE_NAME='iPhone XR' node run-tests.js -c 1 test/production/prerender-prefetch/index.test.ts + afterBuild: | + pnpm playwright install + # these all run without concurrency because they're heavier + export TEST_CONCURRENCY=1 + + BROWSER_NAME=firefox node run-tests.js \ + test/production/pages-dir/production/test/index.test.ts + + NEXT_TEST_MODE=start BROWSER_NAME=safari node run-tests.js \ + test/production/pages-dir/production/test/index.test.ts \ + test/e2e/basepath/basepath.test.ts \ + test/e2e/basepath/error-pages.test.ts + + BROWSER_NAME=safari DEVICE_NAME='iPhone XR' node run-tests.js \ + test/production/prerender-prefetch/index.test.ts stepName: 'test-firefox-safari' secrets: inherit @@ -587,7 +819,13 @@ jobs: uses: ./.github/workflows/build_reusable.yml with: nodeVersion: 18.18.2 - afterBuild: __NEXT_EXPERIMENTAL_PPR=true NEXT_EXTERNAL_TESTS_FILTERS="test/ppr-tests-manifest.json" node run-tests.js --timings -c ${TEST_CONCURRENCY} --type integration + afterBuild: | + export __NEXT_EXPERIMENTAL_PPR=true + export NEXT_EXTERNAL_TESTS_FILTERS="test/ppr-tests-manifest.json" + + node run-tests.js \ + --timings \ + --type integration stepName: 'test-ppr-integration' secrets: inherit @@ -602,7 +840,15 @@ jobs: group: [1/6, 2/6, 3/6, 4/6, 5/6, 6/6] uses: ./.github/workflows/build_reusable.yml with: - afterBuild: __NEXT_EXPERIMENTAL_PPR=true NEXT_EXTERNAL_TESTS_FILTERS="test/ppr-tests-manifest.json" NEXT_TEST_MODE=dev node run-tests.js --timings -g ${{ matrix.group }} -c ${TEST_CONCURRENCY} --type development + afterBuild: | + export __NEXT_EXPERIMENTAL_PPR=true + export NEXT_EXTERNAL_TESTS_FILTERS="test/ppr-tests-manifest.json" + export NEXT_TEST_MODE=dev + + node run-tests.js \ + --timings \ + -g ${{ matrix.group }} \ + --type development stepName: 'test-ppr-dev-${{ matrix.group }}' secrets: inherit @@ -617,7 +863,15 @@ jobs: group: [1/7, 2/7, 3/7, 4/7, 5/7, 6/7, 7/7] uses: ./.github/workflows/build_reusable.yml with: - afterBuild: __NEXT_EXPERIMENTAL_PPR=true NEXT_EXTERNAL_TESTS_FILTERS="test/ppr-tests-manifest.json" NEXT_TEST_MODE=start node run-tests.js --timings -g ${{ matrix.group }} -c ${TEST_CONCURRENCY} --type production + afterBuild: | + export __NEXT_EXPERIMENTAL_PPR=true + export NEXT_EXTERNAL_TESTS_FILTERS="test/ppr-tests-manifest.json" + export NEXT_TEST_MODE=start + + node run-tests.js \ + --timings \ + -g ${{ matrix.group }} \ + --type production stepName: 'test-ppr-prod-${{ matrix.group }}' secrets: inherit diff --git a/.github/workflows/build_reusable.yml b/.github/workflows/build_reusable.yml index 420ef70cf32b3..a0b290e27d38f 100644 --- a/.github/workflows/build_reusable.yml +++ b/.github/workflows/build_reusable.yml @@ -42,6 +42,11 @@ on: required: false description: 'if nextest rust dep is needed' type: string + rustBuildProfile: + required: false + description: 'The profile to use for the build, default is `release-with-assertions`, also supports `` for debug and `release` for normal release' + type: string + default: 'release-with-assertions' uploadSwcArtifact: required: false description: 'if swc artifact needs uploading' @@ -71,15 +76,19 @@ on: default: 'x86_64-unknown-linux-gnu' env: - NAPI_CLI_VERSION: 2.14.7 + NAPI_CLI_VERSION: 2.18.4 TURBO_VERSION: 2.3.3 NODE_LTS_VERSION: 20.9.0 + # run-tests.js reads `TEST_CONCURRENCY` if no explicit `--concurrency` or `-c` + # argument is provided TEST_CONCURRENCY: 8 # disable backtrace for test snapshots RUST_BACKTRACE: 0 TURBO_TEAM: 'vercel' TURBO_CACHE: 'remote:rw' + TURBO_API: ${{ secrets.HOSTED_TURBO_API }} + TURBO_TOKEN: ${{ secrets.HOSTED_TURBO_TOKEN }} NEXT_TELEMETRY_DISABLED: 1 # allow not skipping install-native postinstall script if we don't have a binary available already NEXT_SKIP_NATIVE_POSTINSTALL: ${{ inputs.skipNativeInstall == 'yes' && '1' || '' }} @@ -175,7 +184,7 @@ jobs: with: cache-provider: 'turbo' save-if: ${{ github.ref_name == 'canary' }} - shared-key: ${{ inputs.rustCacheKey }}-${{ inputs.buildNativeTarget }}-build-${{ hashFiles('.cargo/config.toml') }} + shared-key: ${{ inputs.rustCacheKey }}-${{ inputs.buildNativeTarget }}-build-${{ inputs.rustBuildProfile }}-${{ hashFiles('.cargo/config.toml') }} # clean up any previous artifacts to avoid hitting disk space limits - run: git clean -xdf && rm -rf /tmp/next-repo-*; rm -rf /tmp/next-install-* /tmp/yarn-* /tmp/ncc-cache target @@ -193,7 +202,7 @@ jobs: - run: node scripts/normalize-version-bump.js name: normalize versions - - run: pnpm dlx turbo@${TURBO_VERSION} run build-native-release -v --env-mode loose --remote-cache-timeout 90 --summarize -- --target ${{ inputs.buildNativeTarget }} + - run: pnpm dlx turbo@${TURBO_VERSION} run build-native-${{ inputs.rustBuildProfile }} -v --env-mode loose --remote-cache-timeout 90 --summarize -- --target ${{ inputs.buildNativeTarget }} if: ${{ inputs.skipNativeBuild != 'yes' }} - name: Upload next-swc artifact @@ -218,6 +227,16 @@ jobs: - run: ANALYZE=1 pnpm build if: ${{ inputs.skipInstallBuild != 'yes' }} + # Some packages e.g. `devlow-bench` depend on `pnpm build` to generate + # their `dist` directory. The first run of `pnpm install` will generate + # warnings because these don't exist yet. + # + # We need to run `pnpm install` a _second_ time to fix this. Fortunately, + # this second run is very fast and cheap. + - name: Re-run pnpm install to link built packages into node_modules/.bin + run: pnpm install + if: ${{ inputs.skipInstallBuild != 'yes' }} + - run: pnpm playwright install-deps if: ${{ inputs.skipInstallBuild != 'yes' }} diff --git a/.github/workflows/code_freeze.yml b/.github/workflows/code_freeze.yml index d3a7e9884d224..a229847f38054 100644 --- a/.github/workflows/code_freeze.yml +++ b/.github/workflows/code_freeze.yml @@ -16,7 +16,7 @@ on: name: Code Freeze env: - NAPI_CLI_VERSION: 2.14.7 + NAPI_CLI_VERSION: 2.18.4 TURBO_VERSION: 2.3.3 NODE_LTS_VERSION: 20 diff --git a/.github/workflows/force_merge_canary_release_pr.yml b/.github/workflows/force_merge_canary_release_pr.yml new file mode 100644 index 0000000000000..1f22b160fcc57 --- /dev/null +++ b/.github/workflows/force_merge_canary_release_pr.yml @@ -0,0 +1,23 @@ +name: Force Merge Canary Release PR + +on: pull_request + +permissions: + # To bypass and merge PR + pull-requests: write + +jobs: + force-merge-canary-release-pr: + runs-on: ubuntu-latest + # Validate the login, PR title, and the label to ensure the PR is + # from the release PR and prevent spoofing. + if: | + github.event.pull_request.user.login == 'vercel-release-bot' && + github.event.pull_request.title == 'Version Packages (canary)' && + contains(github.event.pull_request.labels.*.name, 'created-by: CI') + steps: + - name: Bypass required status checks and merge PR + run: gh pr merge --admin --squash "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GH_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/.github/workflows/integration_tests_reusable.yml b/.github/workflows/integration_tests_reusable.yml index 686ad442a7b19..32f289d2641de 100644 --- a/.github/workflows/integration_tests_reusable.yml +++ b/.github/workflows/integration_tests_reusable.yml @@ -92,7 +92,6 @@ jobs: # e2e and ${{ inputs.test_type }} tests with `node run-tests.js` export NEXT_TEST_CONTINUE_ON_ERROR=TRUE - export NEXT_E2E_TEST_TIMEOUT=240000 export NEXT_TEST_MODE=${{ inputs.test_type == 'development' && 'dev' || 'start' }} @@ -101,7 +100,6 @@ jobs: node run-tests.js \ --group ${{ matrix.group }}/${{ inputs.e2e_groups }} \ - --concurrency $TEST_CONCURRENCY \ --retries ${{ inputs.num_retries }} \ --type ${{ inputs.test_type }} stepName: test-${{ inputs.name }}-${{ matrix.group }} @@ -126,7 +124,6 @@ jobs: # legacy integration tests with `node run-tests.js` export NEXT_TEST_CONTINUE_ON_ERROR=TRUE - export NEXT_E2E_TEST_TIMEOUT=240000 # HACK: Despite the name, these environment variables are just used to # gate tests, so they're applicable to both turbopack and rspack tests @@ -140,7 +137,6 @@ jobs: node run-tests.js \ --group ${{ matrix.group }}/${{ inputs.integration_groups }} \ - --concurrency $TEST_CONCURRENCY \ --retries ${{ inputs.num_retries }} \ --type integration stepName: test-${{ inputs.name }}-integration-${{ matrix.group }} diff --git a/.github/workflows/pull_request_stats.yml b/.github/workflows/pull_request_stats.yml index 2b56692cf9618..73c091ba6c4ec 100644 --- a/.github/workflows/pull_request_stats.yml +++ b/.github/workflows/pull_request_stats.yml @@ -5,7 +5,7 @@ on: name: Generate Pull Request Stats env: - NAPI_CLI_VERSION: 2.14.7 + NAPI_CLI_VERSION: 2.18.4 TURBO_VERSION: 2.3.3 NODE_LTS_VERSION: 20 TEST_CONCURRENCY: 6 diff --git a/.github/workflows/retry_test.yml b/.github/workflows/retry_test.yml index 6348ac8182a28..a7b396ee5c211 100644 --- a/.github/workflows/retry_test.yml +++ b/.github/workflows/retry_test.yml @@ -21,7 +21,6 @@ jobs: ${{ github.event.workflow_run.conclusion == 'failure' && github.repository == 'vercel/next.js' && - github.event.workflow.name != 'build-and-deploy' && github.event.workflow_run.run_attempt < 3 }} runs-on: ubuntu-latest @@ -43,7 +42,7 @@ jobs: if: >- ${{ github.event.workflow_run.conclusion == 'failure' && - (github.event.workflow_run.run_attempt >= 3 || github.event.workflow.name == 'build-and-deploy') && + github.event.workflow_run.run_attempt >= 3 && !github.event.workflow_run.head_repository.fork }} runs-on: ubuntu-latest diff --git a/.github/workflows/rspack-update-tests-manifest.yml b/.github/workflows/rspack-update-tests-manifest.yml index 686d79d2d6a4c..ed01821d35093 100644 --- a/.github/workflows/rspack-update-tests-manifest.yml +++ b/.github/workflows/rspack-update-tests-manifest.yml @@ -46,8 +46,8 @@ jobs: # possible for us to regress on tests. We need to skip the # only-promote-to-passing merge logic. SCRIPT: test/update-bundler-manifest.js --bundler rspack --test-suite dev --override - PR_TITLE: Update bundler development test manifest - PR_BODY: This auto-generated PR updates the development integration test manifest used when testing alternative bundlers. + PR_TITLE: Update Rspack development test manifest + PR_BODY: This auto-generated PR updates the development integration test manifest used when testing Rspack. update_build_manifest: name: Update and upload Rspack production test manifest if: github.repository_owner == 'vercel' @@ -82,5 +82,5 @@ jobs: GITHUB_TOKEN: ${{ secrets.GH_TOKEN_PULL_REQUESTS }} BRANCH_NAME: rspack-manifest SCRIPT: test/update-bundler-manifest.js --bundler rspack --test-suite build --override - PR_TITLE: Update bundler production test manifest - PR_BODY: This auto-generated PR updates the production integration test manifest used when testing alternative bundlers. + PR_TITLE: Update Rspack production test manifest + PR_BODY: This auto-generated PR updates the production integration test manifest used when testing Rspack. diff --git a/.github/workflows/test-turbopack-rust-bench-test.yml b/.github/workflows/test-turbopack-rust-bench-test.yml index 2751e68db2926..6afe541c848b0 100644 --- a/.github/workflows/test-turbopack-rust-bench-test.yml +++ b/.github/workflows/test-turbopack-rust-bench-test.yml @@ -42,6 +42,10 @@ jobs: check-latest: true - run: corepack enable + # We need to install the dependencies for the benchmark apps + - run: pnpm install + working-directory: turbopack/benchmark-apps + - name: Build benchmarks for tests timeout-minutes: 120 run: | diff --git a/.github/workflows/test_e2e_deploy_release.yml b/.github/workflows/test_e2e_deploy_release.yml index 7bb51cbda9499..c78f078f359b8 100644 --- a/.github/workflows/test_e2e_deploy_release.yml +++ b/.github/workflows/test_e2e_deploy_release.yml @@ -6,11 +6,20 @@ on: types: [published] # allow triggering manually as well workflow_dispatch: + inputs: + nextVersion: + description: canary or custom tarball URL + default: canary + type: string env: VERCEL_TEST_TEAM: vtest314-next-e2e-tests VERCEL_TEST_TOKEN: ${{ secrets.VERCEL_TEST_TOKEN }} DATADOG_API_KEY: ${{ secrets.DATA_DOG_API_KEY }} + TURBO_TEAM: 'vercel' + TURBO_CACHE: 'remote:rw' + TURBO_API: ${{ secrets.HOSTED_TURBO_API }} + TURBO_TOKEN: ${{ secrets.HOSTED_TURBO_TOKEN }} DD_ENV: 'ci' jobs: @@ -50,7 +59,7 @@ jobs: matrix: group: [1/6, 2/6, 3/6, 4/6, 5/6, 6/6] with: - afterBuild: npm i -g vercel@latest && NEXT_E2E_TEST_TIMEOUT=240000 NEXT_TEST_MODE=deploy NEXT_EXTERNAL_TESTS_FILTERS="test/deploy-tests-manifest.json" node run-tests.js --timings -g ${{ matrix.group }} -c 2 --type e2e + afterBuild: npm i -g vercel@latest && NEXT_E2E_TEST_TIMEOUT=240000 NEXT_TEST_MODE=deploy NEXT_EXTERNAL_TESTS_FILTERS="test/deploy-tests-manifest.json" NEXT_TEST_VERSION="${{ github.event.inputs.nextVersion || 'canary' }}" node run-tests.js --timings -g ${{ matrix.group }} -c 2 --type e2e skipNativeBuild: 'yes' skipNativeInstall: 'no' stepName: 'test-deploy-${{ matrix.group }}' diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml index e09d97324e794..22c87c190f5f9 100644 --- a/.github/workflows/triage.yml +++ b/.github/workflows/triage.yml @@ -43,3 +43,5 @@ jobs: reproduction-invalid-label: 'invalid link' reproduction-issue-labels: 'bug,' comment-unhelpful-weight: 0.5 + webhook-url: ${{ secrets.NISSUER_WEBHOOK_URL }} + webhook-secret: ${{ secrets.NISSUER_WEBHOOK_SECRET }} diff --git a/.github/workflows/trigger_release.yml b/.github/workflows/trigger_release.yml index fb075cc7becd8..5a3e38e70430e 100644 --- a/.github/workflows/trigger_release.yml +++ b/.github/workflows/trigger_release.yml @@ -34,7 +34,7 @@ on: name: Trigger Release env: - NAPI_CLI_VERSION: 2.14.7 + NAPI_CLI_VERSION: 2.18.4 TURBO_VERSION: 2.3.3 NODE_LTS_VERSION: 20 diff --git a/.github/workflows/trigger_release_new.yml b/.github/workflows/trigger_release_new.yml new file mode 100644 index 0000000000000..330eb2b42e647 --- /dev/null +++ b/.github/workflows/trigger_release_new.yml @@ -0,0 +1,123 @@ +name: Trigger Release (New) + +on: + # Run every day at 23:15 UTC + # TODO: Disabled cron for now, but uncomment + # once replaced the old release workflow. + # schedule: + # - cron: '15 23 * * *' + # Run manually + workflow_dispatch: + inputs: + releaseType: + description: Release Type + required: true + type: choice + # Cron job will run canary release + default: canary + options: + - canary + - stable + - release-candidate + + force: + description: Forced Release + default: false + type: boolean + +concurrency: ${{ github.workflow }}-${{ github.ref }} + +env: + NAPI_CLI_VERSION: 2.18.4 + TURBO_VERSION: 2.3.3 + NODE_LTS_VERSION: 20 + +permissions: + # To create PR + pull-requests: write + +jobs: + start: + if: github.repository_owner == 'vercel' + runs-on: ubuntu-latest + env: + NEXT_TELEMETRY_DISABLED: 1 + # we build a dev binary for use in CI so skip downloading + # canary next-swc binaries in the monorepo + NEXT_SKIP_NATIVE_POSTINSTALL: 1 + + environment: release-${{ github.event.inputs.releaseType }} + steps: + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_LTS_VERSION }} + check-latest: true + + # Since actions/checkout won't include the latest tag information, + # use the old clone workflow while still preserving branch specific + # checkout behavior to support backports. + # x-ref: https://github.com/vercel/next.js/pull/63167 + - name: Clone Next.js repository + run: git clone https://github.com/vercel/next.js.git --depth=25 --single-branch --branch ${GITHUB_REF_NAME:-canary} . + + - name: Check token + run: gh auth status + env: + GITHUB_TOKEN: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + + - name: Get commit of the latest tag + run: echo "LATEST_TAG_COMMIT=$(git rev-list -n 1 $(git describe --tags --abbrev=0))" >> $GITHUB_ENV + + - name: Get latest commit + run: echo "LATEST_COMMIT=$(git rev-parse HEAD)" >> $GITHUB_ENV + + - name: Check if new commits since last tag + if: ${{ github.event.inputs.releaseType != 'stable' && github.event.inputs.force != true }} + run: | + if [ "$LATEST_TAG_COMMIT" = "$LATEST_COMMIT" ]; then + echo "No new commits. Exiting..." + exit 1 + fi + + # https://github.com/actions/virtual-environments/issues/1187 + - name: tune linux network + run: sudo ethtool -K eth0 tx off rx off + + - name: Setup corepack + run: | + npm i -g corepack@0.31 + corepack enable + pnpm --version + + - id: get-store-path + run: echo STORE_PATH=$(pnpm store path) >> $GITHUB_OUTPUT + + - uses: actions/cache@v4 + timeout-minutes: 5 + id: cache-pnpm-store + with: + path: ${{ steps.get-store-path.outputs.STORE_PATH }} + key: pnpm-store-${{ hashFiles('pnpm-lock.yaml') }} + restore-keys: | + pnpm-store- + pnpm-store-${{ hashFiles('pnpm-lock.yaml') }} + + - run: pnpm install + - run: pnpm run build + + - name: Create Release Pull Request + id: changesets + uses: changesets/action@v1 + with: + version: pnpm ci:version + env: + GITHUB_TOKEN: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} + RELEASE_TYPE: ${{ github.event.inputs.releaseType }} + + # Add label to verify the PR is created from this workflow. + - name: Add label to PR + if: steps.changesets.outputs.pullRequestNumber + run: 'gh pr edit ${{ steps.changesets.outputs.pullRequestNumber }} --add-label "created-by: CI"' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/turbopack-benchmark.yml b/.github/workflows/turbopack-benchmark.yml new file mode 100644 index 0000000000000..14bb8234b0ffe --- /dev/null +++ b/.github/workflows/turbopack-benchmark.yml @@ -0,0 +1,112 @@ +name: Turbopack Benchmark + +on: + workflow_dispatch: + push: + branches: + - canary + pull_request: + types: ['opened', 'reopened', 'synchronize', 'labeled'] + paths: + - '**/crates/**' + - '**/Cargo.toml' + - '**/Cargo.lock' + +concurrency: + # Limit concurrent runs to 1 per PR, but allow concurrent runs on canary branch + group: ${{ github.event_name == 'pull_request' && format('{0}-{1}', github.workflow, github.event.pull_request.number) || format('{0}-{1}-{2}', github.workflow, github.ref_name, github.run_id) }} + cancel-in-progress: ${{ github.event_name == 'pull_request' }} + +env: + CI: 1 + CARGO_INCREMENTAL: 0 + # For faster CI + RUST_LOG: 'off' + TURBO_TEAM: 'vercel' + TURBO_CACHE: 'remote:rw' + TURBO_TOKEN: ${{ secrets.HOSTED_TURBO_TOKEN }} + +jobs: + benchmark-tiny: + name: Benchmark Rust Crates (tiny) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Rust toolchain + uses: ./.github/actions/setup-rust + + - name: Install cargo-codspeed + uses: taiki-e/install-action@v2 + with: + tool: cargo-codspeed@2.10.1 + + - name: Build app build benchmarks + run: cargo codspeed build -p next-api + + - name: Run the benchmarks + uses: CodSpeedHQ/action@v3 + with: + run: cargo codspeed run + token: ${{ secrets.CODSPEED_TOKEN }} + + benchmark-small-apps: + name: Benchmark Rust Crates (small apps) + runs-on: ['self-hosted', 'linux', 'x64', 'metal'] + steps: + - uses: actions/checkout@v4 + + - name: Setup Rust toolchain + uses: ./.github/actions/setup-rust + + - name: Install cargo-codspeed + uses: taiki-e/install-action@v2 + with: + tool: cargo-codspeed@2.10.1 + + - name: Cache on ${{ github.ref_name }} + uses: ijjk/rust-cache@turbo-cache-v1.0.8 + with: + save-if: 'true' + cache-provider: 'turbo' + shared-key: build-turbopack-benchmark-small-apps-${{ hashFiles('.cargo/config.toml') }} + + - name: Install pnpm dependencies + working-directory: turbopack/benchmark-apps + run: | + npm i -g corepack@0.31 + corepack enable + pnpm install --loglevel error + + - name: Build app build benchmarks + run: cargo codspeed build -p turbopack-cli small_apps + + - name: Run the benchmarks + uses: CodSpeedHQ/action@v3 + with: + run: cargo codspeed run + token: ${{ secrets.CODSPEED_TOKEN }} + + benchmark-large: + name: Benchmark Rust Crates (large) + if: ${{ github.event.label.name == 'benchmark' || github.event_name == 'workflow_dispatch' }} + runs-on: ['self-hosted', 'linux', 'x64', 'metal'] + steps: + - uses: actions/checkout@v4 + + - name: Setup Rust toolchain + uses: ./.github/actions/setup-rust + + - name: Install cargo-codspeed + uses: taiki-e/install-action@v2 + with: + tool: cargo-codspeed@2.10.1 + + - name: Build the benchmark target(s) + run: cargo codspeed build -p turbopack -p turbopack-bench + + - name: Run the benchmarks + uses: CodSpeedHQ/action@v3 + with: + run: cargo codspeed run + token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.gitignore b/.gitignore index f78dbd04e33ff..ed992f47cb2f3 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,9 @@ tarballs/ packages/**/*.tgz .errors/ +# rust compiler crashes +rustc-ice-*.txt + # dependencies node_modules package-lock.json @@ -28,8 +31,9 @@ pids coverage # test output -test/**/out* +test/**/out/* test/**/next-env.d.ts +test/**/.next* .DS_Store /e2e-tests test/tmp/** @@ -42,7 +46,7 @@ test/traces .nvmrc # examples -examples/**/out +examples/**/out/* examples/**/.env*.local pr-stats.md diff --git a/.prettierignore b/.prettierignore index 2a6d925dda925..598a86ad2000c 100644 --- a/.prettierignore +++ b/.prettierignore @@ -17,6 +17,7 @@ packages/next/wasm/@next packages/next/errors.json .github/actions/next-stats-action/.work +.changeset/*.md crates/**/tests/**/output* crates/core/tests/loader/issue-32553/input.js @@ -55,7 +56,7 @@ test/e2e/app-dir/server-source-maps/fixtures/default/internal-pkg/sourcemapped.j test/e2e/app-dir/server-source-maps/fixtures/default/external-pkg/sourcemapped.js test/e2e/async-modules/amp-validator-wasm.js -# turbopack crates +# turbopack crates, disable for some tests and precompiled dependencies. /turbopack/crates/*/js/src/compiled /turbopack/crates/turbopack/bench.json /turbopack/crates/turbopack/tests @@ -64,7 +65,8 @@ test/e2e/async-modules/amp-validator-wasm.js /turbopack/crates/next-transform-strip-page-exports/tests /turbopack/crates/next-transform-dynamic/tests /turbopack/crates/turbopack-tests/tests/execution/turbopack/basic/error/input/broken.js +/turbopack/crates/turbopack-tests/tests/snapshot/import-meta/cjs/input/mod.cjs +/turbopack/crates/turbopack-tests/tests/snapshot/source_maps/* /turbopack/crates/turbopack-tests/tests/**/output* +/turbopack/crates/turbopack-tests/tests/**/static -# temporarily disable prettier for the turbopack directory -/turbopack/ diff --git a/.vscode/launch.json b/.vscode/launch.json index 41e591077f14e..a9d9f638642f9 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -45,19 +45,17 @@ "webpack://_N_E/[.]/(.*)": "${workspaceFolder}/${input:appDirname}/.next/server/$1", "webpack-internal:///(ssr)/./*": "${workspaceFolder}/${input:appDirname}/*", "webpack://(?:_N_E)?/(?:../)*src/(.*)": "${workspaceFolder}/packages/next/src/$1", - "webpack:///./dist/src/*": "${workspaceFolder}/packages/next/src/*", - "webpack:///./dist/compiled/*": "${workspaceFolder}/packages/next/src/compiled/*", + "webpack://next/./dist/src/*": "${workspaceFolder}/packages/next/src/*", + "webpack://next/./dist/compiled/*": "${workspaceFolder}/packages/next/src/compiled/*", "webpack:///(?:../)*packages/next/dist/compiled/(.*)": "${workspaceFolder}/packages/next/src/compiled/$1", - "webpack:///./src/*": "${workspaceFolder}/packages/next/src/*", + "webpack://next/./src/*": "${workspaceFolder}/packages/next/src/*", "webpack-internal:///\\(rsc\\)/(?:../)*packages/next/dist/(.*)": "${workspaceFolder}/packages/next/src/$1", "webpack-internal:///(react-server)/./dist/compiled/*": "${workspaceFolder}/packages/next/src/compiled/*", "turbopack:///[project]/*": "${workspaceFolder}/*" }, "env": { // Enable the following environment variables to use turbopack instead of webpack: - // "TURBOPACK": "1", - // "TURBOPACK_DEV": "1", - // "TURBOPACK_BUILD": "1", + // "IS_TURBOPACK_TEST": "1", "NEXT_PRIVATE_LOCAL_WEBPACK": "1", "NEXT_PRIVATE_SKIP_CANARY_CHECK": "1", "NEXT_TELEMETRY_DISABLED": "1" @@ -82,10 +80,10 @@ "webpack://_N_E/[.]/(.*)": "${workspaceFolder}/${fileDirname}/.next/server/$1", "webpack-internal:///(ssr)/./*": "${workspaceFolder}/${fileDirname}/*", "webpack://(?:_N_E)?/(?:../)*src/(.*)": "${workspaceFolder}/packages/next/src/$1", - "webpack:///./dist/src/*": "${workspaceFolder}/packages/next/src/*", - "webpack:///./dist/compiled/*": "${workspaceFolder}/packages/next/src/compiled/*", + "webpack://next/./dist/src/*": "${workspaceFolder}/packages/next/src/*", + "webpack://next/./dist/compiled/*": "${workspaceFolder}/packages/next/src/compiled/*", "webpack:///(?:../)*packages/next/dist/compiled/(.*)": "${workspaceFolder}/packages/next/src/compiled/$1", - "webpack:///./src/*": "${workspaceFolder}/packages/next/src/*", + "webpack://next/./src/*": "${workspaceFolder}/packages/next/src/*", "webpack-internal:///\\(rsc\\)/(?:../)*packages/next/dist/(.*)": "${workspaceFolder}/packages/next/src/$1", "webpack-internal:///(react-server)/./dist/compiled/*": "${workspaceFolder}/packages/next/src/compiled/*", "turbopack:///[project]/*": "${workspaceFolder}/*" @@ -111,10 +109,10 @@ "webpack://_N_E/[.]/(.*)": "${workspaceFolder}/${fileDirname}/.next/server/$1", "webpack-internal:///(ssr)/./*": "${workspaceFolder}/${fileDirname}/*", "webpack://(?:_N_E)?/(?:../)*src/(.*)": "${workspaceFolder}/packages/next/src/$1", - "webpack:///./dist/src/*": "${workspaceFolder}/packages/next/src/*", - "webpack:///./dist/compiled/*": "${workspaceFolder}/packages/next/src/compiled/*", + "webpack://next/./dist/src/*": "${workspaceFolder}/packages/next/src/*", + "webpack://next/./dist/compiled/*": "${workspaceFolder}/packages/next/src/compiled/*", "webpack:///(?:../)*packages/next/dist/compiled/(.*)": "${workspaceFolder}/packages/next/src/compiled/$1", - "webpack:///./src/*": "${workspaceFolder}/packages/next/src/*", + "webpack://next/./src/*": "${workspaceFolder}/packages/next/src/*", "webpack-internal:///\\(rsc\\)/(?:../)*packages/next/dist/(.*)": "${workspaceFolder}/packages/next/src/$1", "webpack-internal:///(react-server)/./dist/compiled/*": "${workspaceFolder}/packages/next/src/compiled/*", "turbopack:///[project]/*": "${workspaceFolder}/*" diff --git a/.vscode/settings.json b/.vscode/settings.json index 57313bc8e8d7a..b195325c3742b 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -59,7 +59,7 @@ // singleton modules should always use "*.external" instead of "*-instance" "packages/next/src/server/app-render/action-async-storage-instance.ts", "packages/next/src/server/app-render/after-task-async-storage-instance.ts", - "packages/next/src/server/app-render/clean-async-snapshot-instance.ts", + "packages/next/src/server/app-render/dynamic-access-async-storage-instance.ts", "packages/next/src/server/app-render/work-async-storage-instance.ts", "packages/next/src/server/app-render/work-unit-async-storage-instance.ts", "packages/next/src/client/components/segment-cache-impl/*" diff --git a/Cargo.lock b/Cargo.lock index ac6cfacff7a05..c93e7aa32a94e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -33,13 +33,25 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +[[package]] +name = "afl" +version = "0.15.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eda81c043843d2eeb489c3f30774953326fa043b7a6470d4c2ad7c3cdfd9847b" +dependencies = [ + "home", + "libc", + "rustc_version", + "xdg", +] + [[package]] name = "ahash" version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom", + "getrandom 0.2.15", "once_cell", "version_check", ] @@ -51,11 +63,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom", + "getrandom 0.2.15", "once_cell", "serde", "version_check", - "zerocopy", + "zerocopy 0.7.32", ] [[package]] @@ -172,7 +184,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "220044e6a1bb31ddee4e3db724d29767f352de47445a6cd75e1a173142136c83" dependencies = [ - "nom", + "nom 7.1.3", "vte", ] @@ -232,18 +244,21 @@ checksum = "70033777eb8b5124a81a1889416543dddef2de240019b674c81285a2635a7e1e" [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" dependencies = [ "backtrace", ] [[package]] name = "arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +dependencies = [ + "derive_arbitrary", +] [[package]] name = "arc-swap" @@ -259,7 +274,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -298,37 +313,14 @@ dependencies = [ [[package]] name = "ast_node" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fb5864e2f5bf9fd9797b94b2dfd1554d4c3092b535008b27d7e15c86675a2f" +checksum = "c6ea666cbca3830383d6ce836593e88ade6f61b12c6066c09dc1257c3079a5b6" dependencies = [ "proc-macro2", "quote", "swc_macros_common", - "syn 2.0.95", -] - -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - -[[package]] -name = "async-channel" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" -dependencies = [ - "concurrent-queue", - "event-listener-strategy", - "futures-core", - "pin-project-lite", + "syn 2.0.100", ] [[package]] @@ -344,164 +336,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "async-executor" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7ebdfa2ebdab6b1760375fa7d6f382b9f486eac35fc994625a00e89280bdbb7" -dependencies = [ - "async-task", - "concurrent-queue", - "fastrand 2.2.0", - "futures-lite 2.3.0", - "slab", -] - -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.3.1", - "async-executor", - "async-io 2.3.3", - "async-lock 3.4.0", - "blocking", - "futures-lite 2.3.0", - "once_cell", -] - -[[package]] -name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.27", - "slab", - "socket2 0.4.9", - "waker-fn", -] - -[[package]] -name = "async-io" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" -dependencies = [ - "async-lock 3.4.0", - "cfg-if", - "concurrent-queue", - "futures-io", - "futures-lite 2.3.0", - "parking", - "polling 3.7.2", - "rustix 0.38.41", - "slab", - "tracing", - "windows-sys 0.52.0", -] - -[[package]] -name = "async-lock" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", -] - -[[package]] -name = "async-lock" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" -dependencies = [ - "event-listener 5.4.0", - "event-listener-strategy", - "pin-project-lite", -] - -[[package]] -name = "async-object-pool" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aeb901c30ebc2fc4ab46395bbfbdba9542c16559d853645d75190c3056caf3bc" -dependencies = [ - "async-std", -] - -[[package]] -name = "async-process" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" -dependencies = [ - "async-io 1.13.0", - "async-lock 2.8.0", - "async-signal", - "blocking", - "cfg-if", - "event-listener 3.1.0", - "futures-lite 1.13.0", - "rustix 0.38.41", - "windows-sys 0.48.0", -] - -[[package]] -name = "async-signal" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb3634b73397aa844481f814fad23bbf07fdb0eabec10f2eb95e58944b1ec32" -dependencies = [ - "async-io 2.3.3", - "async-lock 3.4.0", - "atomic-waker", - "cfg-if", - "futures-core", - "futures-io", - "rustix 0.38.41", - "signal-hook-registry", - "slab", - "windows-sys 0.52.0", -] - -[[package]] -name = "async-std" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" -dependencies = [ - "async-channel 1.9.0", - "async-global-executor", - "async-io 1.13.0", - "async-lock 2.8.0", - "async-process", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite 1.13.0", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - [[package]] name = "async-stream" version = "0.3.4" @@ -524,12 +358,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "async-task" -version = "4.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" - [[package]] name = "async-trait" version = "0.1.86" @@ -538,7 +366,7 @@ checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -575,8 +403,9 @@ name = "auto-hash-map" version = "0.1.0" dependencies = [ "hashbrown 0.14.5", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", + "shrink-to-fit", "smallvec", ] @@ -588,7 +417,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -607,7 +436,7 @@ dependencies = [ "arrayvec 0.7.4", "itertools 0.10.5", "log", - "nom", + "nom 7.1.3", "num-rational", "v_frame", ] @@ -643,7 +472,7 @@ dependencies = [ "rustversion", "serde", "sync_wrapper", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", ] @@ -710,11 +539,21 @@ dependencies = [ "simd-abstraction", ] +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref 0.5.2", + "vsimd", +] + [[package]] name = "better_scoped_tls" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50fd297a11c709be8348aec039c8b91de16075d2b2bdaee1bd562c0875993664" +checksum = "7cd228125315b132eed175bf47619ac79b945b26e56b848ba203ae4ea8603609" dependencies = [ "scoped-tls", ] @@ -734,7 +573,7 @@ version = "0.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.9.0", "cexpr", "clang-sys", "itertools 0.13.0", @@ -745,21 +584,21 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "binding_macros" -version = "20.0.0" +version = "28.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75fa4ddc354a7e46572bc5e745d19245fc1a4c980cd8a6a82debffdca13f39" +checksum = "cd2db24612502c080c6466d8a502a2b51954a831f2b5a87aa282ce84cbd2262a" dependencies = [ "anyhow", "console_error_panic_hook", "js-sys", "once_cell", "serde", - "serde-wasm-bindgen", + "serde-wasm-bindgen 0.4.5", "swc", "swc_common", "swc_ecma_ast", @@ -769,6 +608,26 @@ dependencies = [ "wasm-bindgen-futures", ] +[[package]] +name = "bitfield" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7e6caee68becd795bfd65f1a026e4d00d8f0c2bc9be5eb568e1015f9ce3c34" +dependencies = [ + "bitfield-macros", +] + +[[package]] +name = "bitfield-macros" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "331afbb18ce7b644c0b428726d369c5dd37ca0b815d72a459fcc2896c3c8ad32" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "bitflags" version = "1.3.2" @@ -777,9 +636,12 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +dependencies = [ + "serde", +] [[package]] name = "bitreader" @@ -831,19 +693,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "blocking" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" -dependencies = [ - "async-channel 2.3.1", - "async-task", - "futures-io", - "futures-lite 2.3.0", - "piper", -] - [[package]] name = "brotli-decompressor" version = "2.3.4" @@ -856,16 +705,16 @@ dependencies = [ [[package]] name = "browserslist-rs" -version = "0.17.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74c973b79d9b6b89854493185ab760c6ef8e54bcfad10ad4e33991e46b374ac8" +checksum = "2f95aff901882c66e4b642f3f788ceee152ef44f8a5ef12cb1ddee5479c483be" dependencies = [ "ahash 0.8.11", "chrono", "either", "indexmap 2.7.1", "itertools 0.13.0", - "nom", + "nom 7.1.3", "serde", "serde_json", "thiserror 1.0.69", @@ -938,7 +787,7 @@ checksum = "523363cbe1df49b68215efdf500b103ac3b0fb4836aed6d15689a076eadb8fff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -961,13 +810,24 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" dependencies = [ "serde", ] +[[package]] +name = "bytes-str" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ea3da8de225a11bcd5cee66b00eddcc7fd03e55bbe839fa4735d3281afb758" +dependencies = [ + "bytes", + "rkyv 0.8.9", + "serde", +] + [[package]] name = "bytesize" version = "1.3.0" @@ -992,7 +852,7 @@ version = "8.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "031718ddb8f78aa5def78a09e90defe30151d1f6c672f937af4dd916429ed996" dependencies = [ - "semver 1.0.23", + "semver", "serde", "toml 0.5.11", "url", @@ -1015,7 +875,7 @@ checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037" dependencies = [ "camino", "cargo-platform", - "semver 1.0.23", + "semver", "serde", "serde_json", "thiserror 1.0.69", @@ -1029,7 +889,7 @@ checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", "cargo-platform", - "semver 1.0.23", + "semver", "serde", "serde_json", "thiserror 2.0.12", @@ -1041,12 +901,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" -[[package]] -name = "castaway" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" - [[package]] name = "castaway" version = "0.2.3" @@ -1078,7 +932,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" dependencies = [ - "nom", + "nom 7.1.3", ] [[package]] @@ -1103,6 +957,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chromiumoxide" version = "0.5.4" @@ -1170,9 +1030,9 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ "android-tzdata", "iana-time-zone", @@ -1180,7 +1040,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -1241,6 +1101,7 @@ dependencies = [ "anstyle", "clap_lex", "strsim 0.11.1", + "terminal_size", ] [[package]] @@ -1252,7 +1113,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -1276,6 +1137,101 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" +[[package]] +name = "codspeed" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f4cce9c27c49c4f101fffeebb1826f41a9df2e7498b7cd4d95c0658b796c6c" +dependencies = [ + "colored", + "libc", + "serde", + "serde_json", + "uuid", +] + +[[package]] +name = "codspeed-criterion-compat" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c23d880a28a2aab52d38ca8481dd7a3187157d0a952196b6db1db3c8499725" +dependencies = [ + "codspeed", + "codspeed-criterion-compat-walltime", + "colored", + "futures", + "tokio", +] + +[[package]] +name = "codspeed-criterion-compat-walltime" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0a2f7365e347f4f22a67e9ea689bf7bc89900a354e22e26cf8a531a42c8fbb" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "codspeed", + "criterion-plot", + "futures", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "tokio", + "walkdir", +] + +[[package]] +name = "codspeed-divan-compat" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8620a09dfaf37b3c45f982c4b65bd8f9b0203944da3ffa705c0fcae6b84655ff" +dependencies = [ + "codspeed", + "codspeed-divan-compat-macros", + "codspeed-divan-compat-walltime", +] + +[[package]] +name = "codspeed-divan-compat-macros" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30fe872bc4214626b35d3a1706a905d0243503bb6ba3bb7be2fc59083d5d680c" +dependencies = [ + "divan-macros", + "itertools 0.14.0", + "proc-macro-crate 3.3.0", + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "codspeed-divan-compat-walltime" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "104caa97b36d4092d89e24e4b103b40ede1edab03c0372d19e14a33f9393132b" +dependencies = [ + "cfg-if", + "clap", + "codspeed", + "condtype", + "divan-macros", + "libc", + "regex-lite", +] + [[package]] name = "color_quant" version = "1.1.0" @@ -1288,6 +1244,16 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +[[package]] +name = "colored" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" +dependencies = [ + "lazy_static", + "windows-sys 0.59.0", +] + [[package]] name = "combine" version = "4.6.7" @@ -1304,7 +1270,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f86b9c4c00838774a6d902ef931eff7470720c51d90c2e32cfe15dc304737b3f" dependencies = [ - "castaway 0.2.3", + "castaway", "cfg-if", "itoa", "ryu", @@ -1320,6 +1286,12 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "condtype" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf0a07a401f374238ab8e2f11a104d2851bf9ce711ec69804834de8af45c7af" + [[package]] name = "console" version = "0.15.10" @@ -1471,6 +1443,15 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "cow-replace" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cab584c4b83b5b36f81a10bd15191fd77f70432d624787ee68ec64edd6d7ed" +dependencies = [ + "ascii", +] + [[package]] name = "cpufeatures" version = "0.2.9" @@ -1569,21 +1550,6 @@ version = "0.110.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56b08621c00321efcfa3eee6a3179adc009e21ea8d24ca7adc3c326184bc3f48" -[[package]] -name = "crc" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403" - [[package]] name = "crc32fast" version = "1.4.2" @@ -1593,34 +1559,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "criterion" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" -dependencies = [ - "anes", - "cast", - "ciborium", - "clap", - "criterion-plot", - "futures", - "is-terminal", - "itertools 0.10.5", - "num-traits", - "once_cell", - "oorandom", - "plotters", - "rayon", - "regex", - "serde", - "serde_derive", - "serde_json", - "tinytemplate", - "tokio", - "walkdir", -] - [[package]] name = "criterion-plot" version = "0.5.0" @@ -1774,7 +1712,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -1784,7 +1722,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd4056f63fce3b82d852c3da92b08ea59959890813a7f4ce9c0ff85b10cf301b" dependencies = [ "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -1797,37 +1735,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "curl" -version = "0.4.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" -dependencies = [ - "curl-sys", - "libc", - "openssl-probe", - "openssl-sys", - "schannel", - "socket2 0.5.8", - "windows-sys 0.52.0", -] - -[[package]] -name = "curl-sys" -version = "0.4.74+curl-8.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8af10b986114528fcdc4b63b6f5f021b7057618411046a4de2ba0f0149a097bf" -dependencies = [ - "cc", - "libc", - "libnghttp2-sys", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", - "windows-sys 0.52.0", -] - [[package]] name = "darling" version = "0.14.4" @@ -1873,7 +1780,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.11.1", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -1895,7 +1802,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core 0.20.10", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -1961,14 +1868,14 @@ dependencies = [ ] [[package]] -name = "derivative" -version = "2.2.0" +name = "derive_arbitrary" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.100", ] [[package]] @@ -2010,7 +1917,7 @@ dependencies = [ "darling 0.20.10", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -2030,7 +1937,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core 0.20.2", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -2041,7 +1948,7 @@ checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -2061,7 +1968,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", "unicode-xid", ] @@ -2112,7 +2019,18 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", +] + +[[package]] +name = "divan-macros" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8dc51d98e636f5e3b0759a39257458b22619cac7e96d932da6eeb052891bb67c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", ] [[package]] @@ -2130,7 +2048,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88e3201db19ec4199af513d38c49fcbc5f8ca31d268f942e97324a826c9e9fdb" dependencies = [ - "nom", + "nom 7.1.3", ] [[package]] @@ -2228,7 +2146,7 @@ dependencies = [ "darling 0.20.10", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -2249,29 +2167,12 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.8" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18" dependencies = [ "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "event-listener" -version = "2.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" - -[[package]] -name = "event-listener" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", + "windows-sys 0.59.0", ] [[package]] @@ -2285,31 +2186,12 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "event-listener-strategy" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" -dependencies = [ - "event-listener 5.4.0", - "pin-project-lite", -] - [[package]] name = "fallible-iterator" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" -[[package]] -name = "fastrand" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] - [[package]] name = "fastrand" version = "2.2.0" @@ -2365,6 +2247,12 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "foreign-types" version = "0.3.2" @@ -2391,13 +2279,13 @@ dependencies = [ [[package]] name = "from_variant" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d7ccf961415e7aa17ef93dcb6c2441faaa8e768abe09e659b908089546f74c5" +checksum = "accfe8b52dc15c1bace718020831f72ce91a4c096709a4d733868f4f4034e22a" dependencies = [ "proc-macro2", "swc_macros_common", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -2454,49 +2342,21 @@ checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" - -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand 1.9.0", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] - -[[package]] -name = "futures-lite" -version = "2.3.0" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ - "fastrand 2.2.0", "futures-core", - "futures-io", - "parking", - "pin-project-lite", + "futures-task", + "futures-util", ] +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + [[package]] name = "futures-macro" version = "0.3.31" @@ -2505,7 +2365,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -2555,19 +2415,6 @@ dependencies = [ "slab", ] -[[package]] -name = "generator" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "979f00864edc7516466d6b3157706e06c032f22715700ddd878228a91d02bc56" -dependencies = [ - "cfg-if", - "libc", - "log", - "rustversion", - "windows 0.58.0", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -2587,7 +2434,21 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", "wasm-bindgen", ] @@ -2626,9 +2487,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.14" +version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" dependencies = [ "aho-corasick", "bstr", @@ -2637,18 +2498,6 @@ dependencies = [ "regex-syntax 0.8.5", ] -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "glyph-names" version = "0.2.0" @@ -2760,6 +2609,11 @@ name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] [[package]] name = "hdrhistogram" @@ -2770,7 +2624,7 @@ dependencies = [ "base64 0.21.4", "byteorder", "flate2", - "nom", + "nom 7.1.3", "num-traits", ] @@ -2782,9 +2636,9 @@ checksum = "db04bc24a18b9ea980628ecf00e6c0264f3c1426dac36c00cb49b6fbad8b0743" dependencies = [ "atomic-polyfill", "hash32", - "rustc_version 0.4.0", + "rustc_version", "serde", - "spin 0.9.8", + "spin", "stable_deref_trait", ] @@ -2832,15 +2686,15 @@ dependencies = [ [[package]] name = "hstr" -version = "1.0.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71399f53a92ef72ee336a4b30201c6e944827e14e0af23204c291aad9c24cc85" +checksum = "2d1638d2018a21b9ff65d7fc28c2271c76a5af6ff4f621b204d032bc649763a4" dependencies = [ "hashbrown 0.14.5", "new_debug_unreachable", "once_cell", "phf", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "triomphe 0.1.13", ] @@ -2902,9 +2756,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.8.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "httpdate" @@ -2912,33 +2766,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" -[[package]] -name = "httpmock" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b02e044d3b4c2f94936fb05f9649efa658ca788f44eb6b87554e2033fc8ce93" -dependencies = [ - "assert-json-diff", - "async-object-pool", - "async-trait", - "base64 0.21.4", - "crossbeam-utils", - "form_urlencoded", - "futures-util", - "hyper 0.14.28", - "isahc", - "lazy_static", - "levenshtein", - "log", - "regex", - "serde", - "serde_json", - "serde_regex", - "similar", - "tokio", - "url", -] - [[package]] name = "humantime" version = "2.1.0" @@ -2962,7 +2789,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.9", + "socket2 0.5.10", "tokio", "tower-service", "tracing", @@ -2971,9 +2798,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.2" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" dependencies = [ "bytes", "futures-channel", @@ -2992,15 +2819,19 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.23.2" +version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ - "http 0.2.11", - "hyper 0.14.28", - "rustls 0.20.9", + "http 1.1.0", + "hyper 1.6.0", + "hyper-util", + "rustls", + "rustls-pki-types", "tokio", "tokio-rustls", + "tower-service", + "webpki-roots 1.0.0", ] [[package]] @@ -3009,7 +2840,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.5.2", + "hyper 1.6.0", "hyper-util", "pin-project-lite", "tokio", @@ -3029,6 +2860,22 @@ dependencies = [ "tokio-native-tls", ] +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + [[package]] name = "hyper-tungstenite" version = "0.9.0" @@ -3044,18 +2891,23 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.10" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", "http 1.1.0", "http-body 1.0.1", - "hyper 1.5.2", + "hyper 1.6.0", + "ipnet", + "libc", + "percent-encoding", "pin-project-lite", - "socket2 0.5.8", + "socket2 0.5.10", "tokio", "tower-service", "tracing", @@ -3072,7 +2924,7 @@ dependencies = [ "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows 0.48.0", + "windows", ] [[package]] @@ -3199,7 +3051,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -3330,7 +3182,6 @@ checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", "hashbrown 0.15.2", - "rayon", "serde", ] @@ -3342,11 +3193,11 @@ checksum = "9f2cb48b81b1dc9f39676bf99f5499babfec7cd8fe14307f7b3d747208fb5690" [[package]] name = "inotify" -version = "0.9.6" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" +checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.9.0", "inotify-sys", "libc", ] @@ -3390,15 +3241,6 @@ dependencies = [ "similar", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] - [[package]] name = "interpolate_name" version = "0.2.4" @@ -3407,18 +3249,7 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.2", - "libc", - "windows-sys 0.48.0", + "syn 2.0.100", ] [[package]] @@ -3436,6 +3267,16 @@ dependencies = [ "ipnet", ] +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is-macro" version = "0.3.5" @@ -3445,7 +3286,7 @@ dependencies = [ "Inflector", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -3459,33 +3300,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "isahc" -version = "1.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" -dependencies = [ - "async-channel 1.9.0", - "castaway 0.1.2", - "crossbeam-utils", - "curl", - "curl-sys", - "encoding_rs", - "event-listener 2.5.3", - "futures-lite 1.13.0", - "http 0.2.11", - "log", - "mime", - "once_cell", - "polling 2.8.0", - "slab", - "sluice", - "tracing", - "tracing-futures", - "url", - "waker-fn", -] - [[package]] name = "itertools" version = "0.10.5" @@ -3513,12 +3327,62 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +[[package]] +name = "jiff" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a064218214dc6a10fbae5ec5fa888d80c45d611aba169222fc272072bf7aef6" +dependencies = [ + "jiff-static", + "jiff-tzdb-platform", + "log", + "portable-atomic", + "portable-atomic-util", + "serde", + "windows-sys 0.59.0", +] + +[[package]] +name = "jiff-static" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "199b7932d97e325aff3a7030e141eafe7f2c6268e1d1b24859b753a627f45254" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "jiff-tzdb" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1283705eb0a21404d2bfd6eef2a7593d240bc42a0bdb39db0ad6fa2ec026524" + +[[package]] +name = "jiff-tzdb-platform" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "875a5a69ac2bab1a891711cf5eccbec1ce0341ea805560dcd90b7a2e925132e8" +dependencies = [ + "jiff-tzdb", +] + [[package]] name = "jni" version = "0.21.1" @@ -3552,19 +3416,14 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.68" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "406cda4b368d531c842222cf9d2600a9a4acce8d29423695379c6868a143a9ee" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ + "once_cell", "wasm-bindgen", ] -[[package]] -name = "json" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "078e285eafdfb6c4b434e0d31e8cfcb5115b651496faca5749b88fafd4f23bfd" - [[package]] name = "jsonc-parser" version = "0.21.0" @@ -3603,15 +3462,6 @@ dependencies = [ "libc", ] -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - [[package]] name = "lazy-regex" version = "3.0.1" @@ -3632,7 +3482,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -3647,12 +3497,6 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" -[[package]] -name = "levenshtein" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" - [[package]] name = "lexical" version = "6.1.1" @@ -3737,19 +3581,18 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.169" +version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" [[package]] name = "libfuzzer-sys" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a96cfd5557eb82f2b83fed4955246c988d331975a002961b07c81584d107e7f7" +checksum = "cf78f52d400cf2d84a3a973a78a592b4adc535739e0a5597a0da6f0c357adc75" dependencies = [ "arbitrary", "cc", - "once_cell", ] [[package]] @@ -3773,14 +3616,10 @@ dependencies = [ ] [[package]] -name = "libnghttp2-sys" -version = "0.1.10+1.61.0" +name = "libunwind" +version = "1.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "959c25552127d2e1fa72f0e52548ec04fc386e827ba71a7bd01db46a447dc135" -dependencies = [ - "cc", - "libc", -] +checksum = "0c6639b70a7ce854b79c70d7e83f16b5dc0137cc914f3d7d03803b513ecc67ac" [[package]] name = "libyml" @@ -3792,32 +3631,21 @@ dependencies = [ "version_check", ] -[[package]] -name = "libz-sys" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - [[package]] name = "lightningcss" -version = "1.0.0-alpha.65" +version = "1.0.0-alpha.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c84f971730745f4aaac013b6cf4328baf1548efc973c0d95cfd843a3c1ca07af" +checksum = "798fba4e1205eed356b8ed7754cc3f7f04914e27855ca641409f4a532e992149" dependencies = [ "ahash 0.8.11", - "bitflags 2.5.0", + "bitflags 2.9.0", + "browserslist-rs", "const-str", "cssparser", "cssparser-color", "dashmap 5.5.3", "data-encoding", - "getrandom", + "getrandom 0.2.15", "indexmap 2.7.1", "itertools 0.10.5", "lazy_static", @@ -3846,9 +3674,9 @@ dependencies = [ [[package]] name = "lightningcss-napi" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f5e41d371670417f71b779fe6d66959a68c21fbda563a747d795ac525f72450" +checksum = "2b254219299448a95dada4fa2b3bc70c73112d5d4858722b1e2b4ada1591ae22" dependencies = [ "cssparser", "lightningcss", @@ -3877,15 +3705,15 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.3.8" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "litemap" @@ -3938,22 +3766,6 @@ name = "log" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" -dependencies = [ - "value-bag", -] - -[[package]] -name = "loom" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" -dependencies = [ - "cfg-if", - "generator", - "scoped-tls", - "tracing", - "tracing-subscriber", -] [[package]] name = "loop9" @@ -3973,6 +3785,12 @@ dependencies = [ "hashbrown 0.13.2", ] +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + [[package]] name = "lsp-server" version = "0.7.6" @@ -4025,6 +3843,17 @@ dependencies = [ "libc", ] +[[package]] +name = "macho-unwind-info" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb4bdc8b0ce69932332cf76d24af69c3a155242af95c226b2ab6c2e371ed1149" +dependencies = [ + "thiserror 2.0.12", + "zerocopy 0.8.24", + "zerocopy-derive 0.8.24", +] + [[package]] name = "malloc_buf" version = "0.0.6" @@ -4042,9 +3871,9 @@ checksum = "0ca88d725a0a943b096803bd34e73a4437208b6077654cc4ecb2947a5f91618d" [[package]] name = "markdown" -version = "1.0.0-alpha.22" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "790c11786cb51d02938e3eb38276575e1658b1dd8555875f5788a40670a33934" +checksum = "a5cab8f2cadc416a82d2e783a1946388b31654d391d1c7d92cc1f03e295b1deb" dependencies = [ "unicode-id", ] @@ -4081,11 +3910,11 @@ dependencies = [ [[package]] name = "mdxjs" -version = "0.3.3" -source = "git+https://github.com/kdy1/mdxjs-rs?branch=swc-core-21#477c41dd9cb462fa31f91359c9930b14fa920d67" +version = "1.0.4" +source = "git+https://github.com/kdy1/mdxjs-rs.git?branch=swc-core-29#14e6c9fff0ad0171ed04ea066ff66d932207962b" dependencies = [ "markdown", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "swc_core", ] @@ -4154,7 +3983,7 @@ checksum = "dcf09caffaac8068c346b6df2a7fc27a177fd20b39421a39ce0a211bde679a6c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -4225,7 +4054,7 @@ checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.48.0", ] @@ -4237,15 +4066,38 @@ checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", "log", - "wasi", - "windows-sys 0.52.0", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", +] + +[[package]] +name = "mockito" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7760e0e418d9b7e5777c0374009ca4c93861b9066f18cb334a20ce50ab63aa48" +dependencies = [ + "assert-json-diff", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "log", + "rand 0.9.0", + "regex", + "serde_json", + "serde_urlencoded", + "similar", + "tokio", ] [[package]] name = "modularize_imports" -version = "0.80.0" +version = "0.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7216b54b877bf3f39bd39ba72ed94f4db6eb51832cf6259fee42c592a8b95d34" +checksum = "af33628faa5b89dc5d1ffeb3b587475ece84fc9531e9d552a0487a609e8b17c5" dependencies = [ "convert_case", "handlebars", @@ -4288,7 +4140,7 @@ checksum = "1bb5c1d8184f13f7d0ccbeeca0def2f9a181bce2624302793005f5ca8aa62e5e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -4298,7 +4150,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "214f07a80874bb96a8433b3cdfc84980d56c7b02e1a0d7ba4ba0db5cef785e2b" dependencies = [ "anyhow", - "bitflags 2.5.0", + "bitflags 2.9.0", "ctor", "napi-derive", "napi-sys", @@ -4325,7 +4177,7 @@ dependencies = [ "napi-derive-backend", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -4339,8 +4191,8 @@ dependencies = [ "proc-macro2", "quote", "regex", - "semver 1.0.23", - "syn 2.0.95", + "semver", + "syn 2.0.100", ] [[package]] @@ -4396,34 +4248,34 @@ name = "next-api" version = "0.1.0" dependencies = [ "anyhow", - "auto-hash-map", + "codspeed-divan-compat", "either", "futures", "indexmap 2.7.1", "next-core", - "petgraph 0.6.3", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "swc_core", + "tempfile", + "tokio", "tracing", "turbo-rcstr", "turbo-tasks", + "turbo-tasks-backend", "turbo-tasks-build", "turbo-tasks-env", "turbo-tasks-fs", - "turbo-tasks-hash", - "turbo-tasks-memory", + "turbo-tasks-malloc", "turbopack", "turbopack-browser", - "turbopack-cli-utils", "turbopack-core", "turbopack-ecmascript", - "turbopack-env", "turbopack-node", "turbopack-nodejs", - "vergen 9.0.5", + "turbopack-wasm", + "vergen", ] [[package]] @@ -4431,6 +4283,7 @@ name = "next-build" version = "0.1.0" dependencies = [ "next-core", + "turbo-rcstr", "turbo-tasks-build", "turbopack-core", ] @@ -4444,7 +4297,7 @@ dependencies = [ "next-api", "next-core", "num_cpus", - "rand", + "rand 0.9.0", "serde_json", "tokio", "tokio-stream", @@ -4454,18 +4307,7 @@ dependencies = [ "turbo-tasks", "turbo-tasks-backend", "turbo-tasks-build", - "turbo-tasks-env", - "turbo-tasks-fs", "turbo-tasks-malloc", - "turbopack", - "turbopack-browser", - "turbopack-cli-utils", - "turbopack-core", - "turbopack-ecmascript", - "turbopack-ecmascript-runtime", - "turbopack-env", - "turbopack-node", - "turbopack-nodejs", "turbopack-trace-utils", ] @@ -4476,7 +4318,6 @@ dependencies = [ "allsorts", "anyhow", "async-trait", - "auto-hash-map", "base64 0.21.4", "either", "futures", @@ -4484,7 +4325,6 @@ dependencies = [ "indoc", "itertools 0.10.5", "lazy-regex", - "lazy_static", "mime_guess", "modularize_imports", "next-custom-transforms", @@ -4494,13 +4334,14 @@ dependencies = [ "react_remove_properties", "regex", "remove_console", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", + "serde_path_to_error", "swc_core", - "swc_relay", "thiserror 1.0.69", "tracing", + "turbo-esregex", "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", @@ -4515,12 +4356,10 @@ dependencies = [ "turbopack-ecmascript", "turbopack-ecmascript-plugins", "turbopack-ecmascript-runtime", - "turbopack-env", "turbopack-image", "turbopack-node", "turbopack-nodejs", "turbopack-static", - "turbopack-trace-server", "turbopack-trace-utils", ] @@ -4530,14 +4369,13 @@ version = "0.0.0" dependencies = [ "anyhow", "base64 0.21.4", + "bytes-str", "chrono", - "dashmap 6.1.0", "easy-error", "either", "hex", "indexmap 2.7.1", "indoc", - "lazy_static", "modularize_imports", "once_cell", "pathdiff", @@ -4545,8 +4383,7 @@ dependencies = [ "react_remove_properties", "regex", "remove_console", - "rustc-hash 1.1.0", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "sha1", @@ -4560,7 +4397,6 @@ dependencies = [ "turbo-rcstr", "turbopack-ecmascript-plugins", "urlencoding", - "walkdir", ] [[package]] @@ -4568,13 +4404,10 @@ name = "next-swc-napi" version = "0.0.0" dependencies = [ "anyhow", - "backtrace", "console-subscriber", - "dashmap 6.1.0", "dhat", - "getrandom", + "getrandom 0.2.15", "iana-time-zone", - "indexmap 2.7.1", "lightningcss-napi", "mdxjs", "napi", @@ -4586,9 +4419,8 @@ dependencies = [ "next-custom-transforms", "once_cell", "owo-colors 3.5.0", - "par-core", - "rand", - "rustc-hash 2.1.0", + "rand 0.9.0", + "rustc-hash 2.1.1", "serde", "serde_json", "supports-hyperlinks", @@ -4604,8 +4436,6 @@ dependencies = [ "turbo-tasks-build", "turbo-tasks-fs", "turbo-tasks-malloc", - "turbo-tasks-memory", - "turbopack", "turbopack-core", "turbopack-ecmascript-hmr-protocol", "turbopack-ecmascript-plugins", @@ -4635,12 +4465,22 @@ version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.9.0", "cfg-if", - "cfg_aliases", + "cfg_aliases 0.1.1", "libc", ] +[[package]] +name = "nom" +version = "5.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08959a387a676302eebf4ddbcbc611da04285579f76f88ee0506c63b1a61dd4b" +dependencies = [ + "memchr", + "version_check", +] + [[package]] name = "nom" version = "7.1.3" @@ -4668,23 +4508,29 @@ dependencies = [ [[package]] name = "notify" -version = "6.1.1" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" +checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943" dependencies = [ - "bitflags 2.5.0", - "crossbeam-channel", + "bitflags 2.9.0", "filetime", "fsevent-sys", "inotify", "kqueue", "libc", "log", - "mio 0.8.11", + "mio 1.0.3", + "notify-types", "walkdir", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] +[[package]] +name = "notify-types" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" + [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -4721,7 +4567,7 @@ checksum = "9e6a0fd4f737c707bd9086cc16c925f294943eb62eb71499e9fd4cf71f8b9f4e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -4790,7 +4636,7 @@ version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 1.3.1", "proc-macro2", "quote", "syn 1.0.109", @@ -4915,7 +4761,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -4924,6 +4770,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f222829ae9293e33a9f5e9f440c6760a3d450a64affe1846486b140db81c1f4" +[[package]] +name = "outref" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" + [[package]] name = "overload" version = "0.1.1" @@ -4957,9 +4809,9 @@ dependencies = [ [[package]] name = "par-core" -version = "1.0.3" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b506ab63a8bd3cd38858c7bfc2d078a189dc3210c7f8c9be1bbaf50c082a0ae" +checksum = "e96cbd21255b7fb29a5d51ef38a779b517a91abd59e2756c039583f43ef4c90f" dependencies = [ "once_cell", "rayon", @@ -4967,9 +4819,9 @@ dependencies = [ [[package]] name = "par-iter" -version = "1.0.2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a5b20f31e9ba82bfcbbb54a67aa40be6cebec9f668ba5753be138f9523c531a" +checksum = "3eae0176a010bb94b9a67f0eb9da0fd31410817d58850649c54f485124c9a71a" dependencies = [ "either", "par-core", @@ -4977,17 +4829,17 @@ dependencies = [ [[package]] name = "parcel_selectors" -version = "0.28.1" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dccbc6fb560df303a44e511618256029410efbc87779018f751ef12c488271fe" +checksum = "54fd03f1ad26cb6b3ec1b7414fa78a3bd639e7dbb421b1a60513c96ce886a196" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.9.0", "cssparser", "log", "phf", "phf_codegen", "precomputed-hash", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "smallvec", "static-self", @@ -4999,7 +4851,7 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "485b74d7218068b2b7c0e3ff12fbc61ae11d57cb5d8224f525bd304c6be05bbb" dependencies = [ - "base64-simd", + "base64-simd 0.7.0", "data-url", "rkyv 0.7.45", "serde", @@ -5038,9 +4890,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "path-clean" @@ -5106,7 +4958,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -5169,7 +5021,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ "phf_shared", - "rand", + "rand 0.8.5", ] [[package]] @@ -5182,7 +5034,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -5211,7 +5063,7 @@ checksum = "39407670928234ebc5e6e580247dd567ad73a3578460c5990f9503df207e8f07" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -5226,17 +5078,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "piper" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1d5c74c9876f070d3e8fd503d748c7d974c3e48da8f41350fa5222ef9b4391" -dependencies = [ - "atomic-waker", - "fastrand 2.2.0", - "futures-io", -] - [[package]] name = "pkg-config" version = "0.3.26" @@ -5279,7 +5120,7 @@ checksum = "52a40bc70c2c58040d2d8b167ba9a5ff59fc9dab7ad44771cfde3dcfde7a09c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -5296,34 +5137,18 @@ dependencies = [ ] [[package]] -name = "polling" -version = "2.8.0" +name = "portable-atomic" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite", - "windows-sys 0.48.0", -] +checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" [[package]] -name = "polling" -version = "3.7.2" +name = "portable-atomic-util" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" +checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" dependencies = [ - "cfg-if", - "concurrent-queue", - "hermit-abi 0.4.0", - "pin-project-lite", - "rustix 0.38.41", - "tracing", - "windows-sys 0.52.0", + "portable-atomic", ] [[package]] @@ -5332,7 +5157,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be97d76faf1bfab666e1375477b23fde79eccf0276e9b63b92a39d676a889ba9" dependencies = [ - "rand", + "rand 0.8.5", ] [[package]] @@ -5377,17 +5202,17 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "preset_env_base" -version = "2.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07852df2dda2f0ab8c3407a6fd19e9389563af11c20f6c299bd07ff9fc96d6ae" +checksum = "d7ef56d3bd1b2cb104e716ec6babbca1df3b59754d4e3e99426163572e6bc0cc" dependencies = [ "anyhow", "browserslist-rs", "dashmap 5.5.3", "from_variant", "once_cell", - "rustc-hash 2.1.0", - "semver 1.0.23", + "rustc-hash 2.1.1", + "semver", "serde", "st-map", "tracing", @@ -5410,7 +5235,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "483f8c21f64f3ea09fe0f30f5d48c3e8eefe5dac9129f0075f76593b4c1da705" dependencies = [ "proc-macro2", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -5423,6 +5248,15 @@ dependencies = [ "toml_edit 0.19.15", ] +[[package]] +name = "proc-macro-crate" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" +dependencies = [ + "toml_edit 0.22.27", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -5466,14 +5300,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] @@ -5494,7 +5328,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8021cf59c8ec9c432cfc2526ac6b8aa508ecaf29cd415f271b8406c1b851c3fd" dependencies = [ "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -5517,7 +5351,7 @@ dependencies = [ "itertools 0.13.0", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -5575,7 +5409,7 @@ checksum = "ca414edb151b4c8d125c12566ab0d74dc9cdba36fb80eb7b848c15f495fd32d1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -5627,15 +5461,76 @@ dependencies = [ "parking_lot", ] +[[package]] +name = "quinn" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" +dependencies = [ + "bytes", + "cfg_aliases 0.2.1", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.1.1", + "rustls", + "socket2 0.5.10", + "thiserror 2.0.12", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" +dependencies = [ + "bytes", + "getrandom 0.3.2", + "lru-slab", + "rand 0.9.0", + "ring", + "rustc-hash 2.1.1", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.12", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee4e529991f949c5e25755532370b8af5d114acae52326361d68d47af64aa842" +dependencies = [ + "cfg_aliases 0.2.1", + "libc", + "once_cell", + "socket2 0.5.10", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "quote" -version = "1.0.36" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "radium" version = "0.7.0" @@ -5664,8 +5559,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", - "rand_core", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", + "zerocopy 0.8.24", ] [[package]] @@ -5675,7 +5581,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", ] [[package]] @@ -5684,7 +5600,16 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.15", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.2", ] [[package]] @@ -5713,8 +5638,8 @@ dependencies = [ "once_cell", "paste", "profiling", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "simd_helpers", "system-deps", "thiserror 1.0.69", @@ -5764,9 +5689,9 @@ dependencies = [ [[package]] name = "react_remove_properties" -version = "0.34.0" +version = "0.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21596ac4c46d13af94ad83662e689118721b17faf82f04b8af294286d946b546" +checksum = "d41f0e87a906823014d055b3423ff6c47418d683252a9d77b2805f044f2ca788" dependencies = [ "serde", "swc_atoms", @@ -5802,7 +5727,7 @@ checksum = "7f7473c2cfcf90008193dd0e3e16599455cb601a9fce322b5bb55de799664925" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -5850,6 +5775,12 @@ dependencies = [ "regex-syntax 0.8.5", ] +[[package]] +name = "regex-lite" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" + [[package]] name = "regex-syntax" version = "0.6.29" @@ -5874,6 +5805,16 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "regress" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ef7fa9ed0256d64a688a3747d0fef7a88851c18a5e1d57f115f38ec2e09366" +dependencies = [ + "hashbrown 0.15.2", + "memchr", +] + [[package]] name = "relative-path" version = "1.9.0" @@ -5882,9 +5823,9 @@ checksum = "c707298afce11da2efef2f600116fa93ffa7a032b5d7b628aa17711ec81383ca" [[package]] name = "remove_console" -version = "0.35.0" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c92fdad4c3efa4d17866c535cdbf0d18e89151f2d23deee137509c017125c0f9" +checksum = "e57135d82b8e10acd8c15aaf8ea63d7edbdc9d32220740c6d6ba0727505e3c8e" dependencies = [ "serde", "swc_atoms", @@ -5933,8 +5874,7 @@ dependencies = [ "http 0.2.11", "http-body 0.4.5", "hyper 0.14.28", - "hyper-rustls", - "hyper-tls", + "hyper-tls 0.5.0", "ipnet", "js-sys", "log", @@ -5943,45 +5883,71 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.20.9", - "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", "tokio", "tokio-native-tls", - "tokio-rustls", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots 0.22.6", "winreg 0.10.1", ] [[package]] -name = "rgb" -version = "0.8.50" +name = "reqwest" +version = "0.12.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57397d16646700483b67d2dd6511d79318f9d057fdbd21a4066aeac8b41d310a" +checksum = "eabf4c97d9130e2bf606614eb937e86edac8292eaa6f422f995d7e8de1eb1813" dependencies = [ - "bytemuck", + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.6.0", + "hyper-rustls", + "hyper-tls 0.6.0", + "hyper-util", + "js-sys", + "log", + "native-tls", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tokio-util", + "tower 0.5.2", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots 1.0.0", ] [[package]] -name = "ring" -version = "0.16.20" +name = "rgb" +version = "0.8.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +checksum = "57397d16646700483b67d2dd6511d79318f9d057fdbd21a4066aeac8b41d310a" dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", + "bytemuck", ] [[package]] @@ -5992,13 +5958,24 @@ checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", "cfg-if", - "getrandom", + "getrandom 0.2.15", "libc", - "spin 0.9.8", - "untrusted 0.9.0", + "spin", + "untrusted", "windows-sys 0.52.0", ] +[[package]] +name = "ringmap" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "963156bc0da83715cf1aa699f60533a20fcd771c4bbb06548d628eccf1c1ac3e" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", + "serde", +] + [[package]] name = "rkyv" version = "0.7.45" @@ -6055,7 +6032,7 @@ checksum = "beb382a4d9f53bd5c0be86b10d8179c3f8a14c30bf774ff77096ed6581e35981" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -6078,7 +6055,7 @@ dependencies = [ "futures", "futures-timer", "rstest_macros", - "rustc_version 0.4.0", + "rustc_version", ] [[package]] @@ -6090,7 +6067,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "rustc_version 0.4.0", + "rustc_version", "syn 1.0.109", "unicode-ident", ] @@ -6102,8 +6079,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45f80dcc84beab3a327bbe161f77db25f336a1452428176787c8c79ac79d7073" dependencies = [ "quote", - "rand", - "rustc_version 0.4.0", + "rand 0.8.5", + "rustc_version", "syn 1.0.109", ] @@ -6121,18 +6098,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" - -[[package]] -name = "rustc_version" -version = "0.2.3" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -dependencies = [ - "semver 0.9.0", -] +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc_version" @@ -6140,21 +6108,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.23", -] - -[[package]] -name = "rustix" -version = "0.37.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", + "semver", ] [[package]] @@ -6163,7 +6117,7 @@ version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.9.0", "errno", "libc", "linux-raw-sys 0.4.14", @@ -6171,15 +6125,16 @@ dependencies = [ ] [[package]] -name = "rustls" -version = "0.20.9" +name = "rustix" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ - "log", - "ring 0.16.20", - "sct", - "webpki", + "bitflags 2.9.0", + "errno", + "libc", + "linux-raw-sys 0.9.4", + "windows-sys 0.59.0", ] [[package]] @@ -6190,27 +6145,21 @@ checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" dependencies = [ "log", "once_cell", - "ring 0.17.8", + "ring", "rustls-pki-types", "rustls-webpki", "subtle", "zeroize", ] -[[package]] -name = "rustls-pemfile" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" -dependencies = [ - "base64 0.21.4", -] - [[package]] name = "rustls-pki-types" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" +dependencies = [ + "web-time", +] [[package]] name = "rustls-webpki" @@ -6218,9 +6167,9 @@ version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ - "ring 0.17.8", + "ring", "rustls-pki-types", - "untrusted 0.9.0", + "untrusted", ] [[package]] @@ -6265,6 +6214,16 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4950d85bc52415f8432144c97c4791bd0c4f7954de32a7270ee9cccd3c22b12b" +[[package]] +name = "saffron" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03fb9a628596fc7590eb7edbf7b0613287be78df107f5f97b118aad59fb2eea9" +dependencies = [ + "chrono", + "nom 5.1.3", +] + [[package]] name = "same-file" version = "1.0.6" @@ -6290,6 +6249,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" dependencies = [ "dyn-clone", + "indexmap 2.7.1", "schemars_derive", "serde", "serde_json", @@ -6305,7 +6265,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -6320,16 +6280,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "sct" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" -dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", -] - [[package]] name = "seahash" version = "4.1.0" @@ -6365,15 +6315,6 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "58bf37232d3bb9a2c4e641ca2a11d83b5062066f88df7fed36c28772046d65ba" -[[package]] -name = "semver" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" -dependencies = [ - "semver-parser", -] - [[package]] name = "semver" version = "1.0.23" @@ -6383,20 +6324,20 @@ dependencies = [ "serde", ] -[[package]] -name = "semver-parser" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" - [[package]] name = "send-trace-to-jaeger" version = "0.1.0" dependencies = [ - "reqwest", + "reqwest 0.11.17", "serde_json", ] +[[package]] +name = "seq-macro" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc711410fbe7399f390ca1c3b60ad0f53f80e95c5eb935e52268a0e2cd49acc" + [[package]] name = "serde" version = "1.0.217" @@ -6427,6 +6368,17 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "serde-wasm-bindgen" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + [[package]] name = "serde_bytes" version = "0.11.15" @@ -6444,7 +6396,7 @@ checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -6455,7 +6407,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -6510,7 +6462,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -6570,7 +6522,7 @@ dependencies = [ "darling 0.20.10", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -6641,6 +6593,7 @@ version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "939a4c696178684fc5fc1426625b882805418bbb56e056d21f9d4946a9d6ff51" dependencies = [ + "hashbrown 0.15.2", "indexmap 2.7.1", "serde_json", "shrink-to-fit-macro", @@ -6655,7 +6608,7 @@ checksum = "16d9bafdb4ba0cafd45a5aea7e8bc35b0f6280a603795c2ba9a823ca6afaba73" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -6694,7 +6647,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9cadb29c57caadc51ff8346233b5cec1d240b68ce55cf1afc764818791876987" dependencies = [ - "outref", + "outref 0.1.0", ] [[package]] @@ -6745,17 +6698,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03b634d87b960ab1a38c4fe143b508576f075e7c978bfad18217645ebfdfa2ec" -[[package]] -name = "sluice" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" -dependencies = [ - "async-channel 1.9.0", - "futures-core", - "futures-io", -] - [[package]] name = "smallvec" version = "1.13.1" @@ -6805,39 +6747,14 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.8" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", ] -[[package]] -name = "sourcemap" -version = "9.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27c4ea7042fd1a155ad95335b5d505ab00d5124ea0332a06c8390d200bb1a76a" -dependencies = [ - "base64-simd", - "bitvec", - "data-encoding", - "debugid", - "if_chain", - "rustc-hash 1.1.0", - "rustc_version 0.2.3", - "serde", - "serde_json", - "unicode-id-start", - "url", -] - -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - [[package]] name = "spin" version = "0.9.8" @@ -6885,7 +6802,7 @@ dependencies = [ "pmutil", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -6918,14 +6835,14 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "string_enum" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9fe66b8ee349846ce2f9557a26b8f1e74843c4a13fb381f9a3d73617a5f956a" +checksum = "24b0e5369ebc6ec5fadbc400599467eb6ba5a614c03de094fcb233dddac2f5f4" dependencies = [ "proc-macro2", "quote", "swc_macros_common", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -6942,14 +6859,14 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "styled_components" -version = "0.108.0" +version = "0.117.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e36ce3c43b2274eb050e25be507147a0fd9ba2def42f8631f9115b82648ffe39" +checksum = "4235b01243a0771e5652d14b179230f0196ebeb1a9c7ea8ae76c8c1b054d494c" dependencies = [ "Inflector", "once_cell", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "swc_atoms", "swc_common", @@ -6961,15 +6878,15 @@ dependencies = [ [[package]] name = "styled_jsx" -version = "0.84.0" +version = "0.93.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb8a3e56055d66000e98dbc5dbd490fcce5206e1c264c597c2c9070b86076e2a" +checksum = "61a00e432196d7f7779b5ba6e296680b8f3190d5885823d42bd1af528b9529f1" dependencies = [ "anyhow", "lightningcss", "parcel_selectors", "preset_env_base", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "swc_atoms", "swc_common", @@ -6981,7 +6898,7 @@ dependencies = [ "swc_css_prefixer", "swc_css_visit", "swc_ecma_ast", - "swc_ecma_minifier 14.0.0", + "swc_ecma_minifier", "swc_ecma_parser", "swc_ecma_transforms_base", "swc_ecma_utils", @@ -7004,12 +6921,13 @@ checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b" [[package]] name = "swc" -version = "20.0.0" +version = "28.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1311db8bbe20ad5374dd10a92d2dba3bf5ac59ade06292bdd381d1897c74664a" +checksum = "2ea9e984d162fe322e4dc496099ec71015add0ca59b69517b961d2d1d898bf75" dependencies = [ "anyhow", "base64 0.22.1", + "bytes-str", "dashmap 5.5.3", "either", "indexmap 2.7.1", @@ -7023,21 +6941,19 @@ dependencies = [ "parking_lot", "pathdiff", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", - "sourcemap", "swc_atoms", - "swc_cached", "swc_common", "swc_compiler_base", "swc_config", "swc_ecma_ast", - "swc_ecma_codegen 10.0.0", + "swc_ecma_codegen", "swc_ecma_ext_transforms", "swc_ecma_lints", "swc_ecma_loader", - "swc_ecma_minifier 16.0.0", + "swc_ecma_minifier", "swc_ecma_parser", "swc_ecma_preset_env", "swc_ecma_transforms", @@ -7050,6 +6966,7 @@ dependencies = [ "swc_node_comments", "swc_plugin_proxy", "swc_plugin_runner", + "swc_sourcemap", "swc_timer", "swc_transform_common", "swc_typescript", @@ -7080,58 +6997,26 @@ dependencies = [ "bumpalo", "hashbrown 0.14.5", "ptr_meta 0.3.0", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "triomphe 0.1.13", ] [[package]] name = "swc_atoms" -version = "5.0.0" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d7077ba879f95406459bc0c81f3141c529b34580bc64d7ab7bd15e7118a0391" +checksum = "ebf4c40238f7224596754940676547dab6bbf8f33d9f4560b966fc66f2fe00db" dependencies = [ "bytecheck 0.8.0", "hstr", "once_cell", "rancor", "rkyv 0.8.9", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "shrink-to-fit", ] -[[package]] -name = "swc_bundler" -version = "15.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900a8dba7470ecf731cba5d733e06134108142b1309147c25d32fab1f6846ac2" -dependencies = [ - "anyhow", - "crc", - "dashmap 5.5.3", - "indexmap 2.7.1", - "is-macro", - "once_cell", - "parking_lot", - "petgraph 0.7.1", - "radix_fmt", - "rayon", - "relative-path", - "rustc-hash 2.1.0", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_ecma_codegen 10.0.0", - "swc_ecma_loader", - "swc_ecma_parser", - "swc_ecma_transforms_base", - "swc_ecma_transforms_optimization", - "swc_ecma_utils", - "swc_ecma_visit", - "swc_graph_analyzer", - "tracing", -] - [[package]] name = "swc_cached" version = "2.0.0" @@ -7142,20 +7027,21 @@ dependencies = [ "dashmap 5.5.3", "once_cell", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", ] [[package]] name = "swc_common" -version = "8.0.1" +version = "13.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e4a932c152e7142de2d5dba1c393e5523c47cd8fe656e5b0d411954bbaf1810" +checksum = "6865f71f363e63306cedec3f3cf1cb9e80acaa9229261ba2569467a19060c7c8" dependencies = [ "anyhow", "ast_node", "better_scoped_tls", "bytecheck 0.8.0", + "bytes-str", "cfg-if", "either", "from_variant", @@ -7165,14 +7051,14 @@ dependencies = [ "parking_lot", "rancor", "rkyv 0.8.9", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "shrink-to-fit", "siphasher", - "sourcemap", "swc_allocator", "swc_atoms", "swc_eq_ignore_macros", + "swc_sourcemap", "swc_visit", "termcolor", "tracing", @@ -7182,77 +7068,83 @@ dependencies = [ [[package]] name = "swc_compiler_base" -version = "17.0.0" +version = "25.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc96c4a212a3120deeefbc180bd7a4fed3d076af87d452d0b5773cc98b3934f6" +checksum = "2a45b0fe21cd1e18247069717627228075039305ee548d40e2969e6fefe57fef" dependencies = [ "anyhow", "base64 0.22.1", + "bytes-str", "napi", "napi-derive", "once_cell", "pathdiff", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", - "sourcemap", "swc_allocator", "swc_atoms", "swc_common", "swc_config", "swc_ecma_ast", - "swc_ecma_codegen 10.0.0", - "swc_ecma_minifier 16.0.0", + "swc_ecma_codegen", + "swc_ecma_minifier", "swc_ecma_parser", "swc_ecma_visit", + "swc_sourcemap", "swc_timer", ] [[package]] name = "swc_config" -version = "2.0.0" +version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb63364aebd1a8490a80fa8933825c6916d4df55d5472312d5adb62c9fb4e4ba" +checksum = "d94f41e0f3c4c119a06af5e164674b63ae7eb6d7c1c60e46036c4a548f9fbe44" dependencies = [ "anyhow", + "bytes-str", + "dashmap 5.5.3", + "globset", "indexmap 2.7.1", + "once_cell", + "regex", + "regress", + "rustc-hash 2.1.1", "serde", "serde_json", - "sourcemap", - "swc_cached", "swc_config_macro", + "swc_sourcemap", ] [[package]] name = "swc_config_macro" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2ebd37ef52a8555c8c9be78b694d64adcb5e3bc16c928f030d82f1d65fac57" +checksum = "7b416e8ce6de17dc5ea496e10c7012b35bbc0e3fef38d2e065eed936490db0b3" dependencies = [ "proc-macro2", "quote", "swc_macros_common", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "swc_core" -version = "21.0.1" +version = "29.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c89e035b38b98cbfa1950ca38f7cae673e3377ce80ed4e1dc58242a0f81dd8b8" +checksum = "0df473ca42b70e49aab2e90d4f76574f0e3eb4a42ddc95a33f8c93d64af67b55" dependencies = [ "binding_macros", + "par-core", "swc", "swc_allocator", "swc_atoms", - "swc_bundler", - "swc_cached", "swc_common", "swc_ecma_ast", - "swc_ecma_codegen 10.0.0", + "swc_ecma_codegen", "swc_ecma_lints", "swc_ecma_loader", - "swc_ecma_minifier 16.0.0", + "swc_ecma_minifier", "swc_ecma_parser", "swc_ecma_preset_env", "swc_ecma_quote_macros", @@ -7269,14 +7161,14 @@ dependencies = [ "swc_plugin_proxy", "swc_plugin_runner", "testing", - "vergen 9.0.4", + "vergen", ] [[package]] name = "swc_css_ast" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9482f1ab79c5de548a8872421a6625fbf7a70102a354bb16da280689edd1768" +checksum = "ccf4593805ec1ea036deec19c79396cf0b40220209588ac7650f95e44206d919" dependencies = [ "is-macro", "string_enum", @@ -7286,13 +7178,13 @@ dependencies = [ [[package]] name = "swc_css_codegen" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f75578c97d9338cb6ae4007bda693690bac1c5b5d0bf817d43cd0b0e08632a1" +checksum = "93b5eaa8cb60684e99308b5b27449f95ef14aa0f190991b44a26541fe9c6276b" dependencies = [ "auto_impl", - "bitflags 2.5.0", - "rustc-hash 2.1.0", + "bitflags 2.9.0", + "rustc-hash 2.1.1", "serde", "swc_atoms", "swc_common", @@ -7303,23 +7195,23 @@ dependencies = [ [[package]] name = "swc_css_codegen_macros" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50abd25b3b79f18423cdf99b0d11dee24e64496be3b8abe18c10a2c40bd6c91f" +checksum = "3189549f4991e1f97ecbabf8a65c6c8c443581087575e8ca5ddc84a986670c59" dependencies = [ "proc-macro2", "quote", "swc_macros_common", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "swc_css_compat" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbfec0694cca0950515b8d87dd8dda991e45d36e257f5efd8cb7682e7617c125" +checksum = "53ae4654e40c5be3d783d4288cddc23852858474b329328a14a5b48fea022978" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.9.0", "once_cell", "serde", "serde_json", @@ -7332,11 +7224,11 @@ dependencies = [ [[package]] name = "swc_css_minifier" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6183c2b64cc58c0d9fccc993c7084360242c08ec9a1e3f93e798bf99fcd37a23" +checksum = "3bfb906a537a51da83be9fb5f71bbad4f9fcbdc3995fb065097c480b3de966b7" dependencies = [ - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "swc_atoms", "swc_common", @@ -7347,9 +7239,9 @@ dependencies = [ [[package]] name = "swc_css_parser" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d445ed47fad61fa7f0a60c1bfd6364c6686d5f3482959f8c13bed2789e98ee26" +checksum = "fd735dbda9ced56a1fd244763a204e4d9c3bac188b7a97674d606fac220dd2e6" dependencies = [ "lexical", "serde", @@ -7360,13 +7252,13 @@ dependencies = [ [[package]] name = "swc_css_prefixer" -version = "9.0.0" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bac1e4bfc032749c2521ea63972e59bf3472a2897d6bbfa7747566eb7ccf7b89" +checksum = "0ab14c4f3989cf7124c0df82815d7c5c471ef67aa24bdf1beaa3efa5b58592d1" dependencies = [ "once_cell", "preset_env_base", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "swc_atoms", @@ -7378,12 +7270,12 @@ dependencies = [ [[package]] name = "swc_css_utils" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0976acf568ddc227f407ade30677f4e36122bd9906cc4c3997796bf76fb773cd" +checksum = "01dd9afe2c110ec0ed49c40eed88a06e3aaf96b70714e7c2bc6a586955eb7a78" dependencies = [ "once_cell", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "swc_atoms", @@ -7394,9 +7286,9 @@ dependencies = [ [[package]] name = "swc_css_visit" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b941b45c434b6875146a0675a4c60c65a79d8c41a37457f8a33c05519d5c7f" +checksum = "722aea467676951de05afbe2df94082bb0abb73cf814bd50e20318f1ae09dcf3" dependencies = [ "serde", "swc_atoms", @@ -7407,17 +7299,19 @@ dependencies = [ [[package]] name = "swc_ecma_ast" -version = "8.1.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01f80679b1afc52ae0663eed0a2539cc3c108d48c287b5601712f9850d9fa9c2" +checksum = "f1ddc264ed13ae03aa30e1c89798502f9ddbe765a4ad695054add1074ffbc5cb" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.9.0", "bytecheck 0.8.0", "is-macro", "num-bigint", + "once_cell", "phf", "rancor", "rkyv 0.8.9", + "rustc-hash 2.1.1", "scoped-tls", "serde", "shrink-to-fit", @@ -7430,32 +7324,9 @@ dependencies = [ [[package]] name = "swc_ecma_codegen" -version = "8.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f131ade75f9a3cfea38dbce11893f5636b0954de973ad29a2556124322a08372" -dependencies = [ - "ascii", - "compact_str", - "memchr", - "num-bigint", - "once_cell", - "regex", - "rustc-hash 2.1.0", - "serde", - "sourcemap", - "swc_allocator", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_ecma_codegen_macros 1.0.1", - "tracing", -] - -[[package]] -name = "swc_ecma_codegen" -version = "10.0.0" +version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b85453d346d0642f296c2b3aa204886a6ae2b9652262c3468d6f4556c1ed020d" +checksum = "1719b3bb5bff1c99cfb6fbd2129e7a7a363d3ddf50e22b95143c1877559d872a" dependencies = [ "ascii", "compact_str", @@ -7463,48 +7334,37 @@ dependencies = [ "num-bigint", "once_cell", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", + "ryu-js", "serde", - "sourcemap", "swc_allocator", "swc_atoms", "swc_common", "swc_ecma_ast", - "swc_ecma_codegen_macros 2.0.0", + "swc_ecma_codegen_macros", + "swc_sourcemap", "tracing", ] [[package]] name = "swc_ecma_codegen_macros" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac2ff0957329e0dfcde86a1ac465382e189bf42a5989720d3476bea78eaa31a" -dependencies = [ - "proc-macro2", - "quote", - "swc_macros_common", - "syn 2.0.95", -] - -[[package]] -name = "swc_ecma_codegen_macros" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e99e1931669a67c83e2c2b4375674f6901d1480994a76aa75b23f1389e6c5076" +checksum = "845c8312c82545780f837992bb15fff1dc3464f644465d5ed0abd1196cd090d3" dependencies = [ "proc-macro2", "quote", "swc_macros_common", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "swc_ecma_compat_bugfixes" -version = "12.0.0" +version = "19.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e908297dfe18472b82b391ae444a72dbd63c4b5f2823eba52c1bf7972903952" +checksum = "356e45a22368e4a4c7ff1438d778e071daf77a7bf1e506b3a9877d3b5d0d6d42" dependencies = [ - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "swc_atoms", "swc_common", "swc_ecma_ast", @@ -7518,9 +7378,9 @@ dependencies = [ [[package]] name = "swc_ecma_compat_common" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2d5902317bbf8e8c1944e63f19057e6dff1fb60a8a73f33bb26bdb2d365662" +checksum = "bc2df0e12f54d47cca0255d99ae03766e314579ee10804f83699d0a8d7af17fa" dependencies = [ "swc_common", "swc_ecma_ast", @@ -7531,14 +7391,14 @@ dependencies = [ [[package]] name = "swc_ecma_compat_es2015" -version = "12.0.0" +version = "19.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb1efa640c57cbc4eaa40625275a86ff99a29cd0f4997668c88117e86390e821" +checksum = "613d59fc91170b523608ee9b2e3206dc969fc763ecd4709cca1cb7606b869f3d" dependencies = [ "arrayvec 0.7.4", "indexmap 2.7.1", "is-macro", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_derive", "smallvec", @@ -7558,9 +7418,9 @@ dependencies = [ [[package]] name = "swc_ecma_compat_es2016" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b890417e8080d460e1962c73d58f94cca5b27c5ec89f8ba37a114c7dd6a76b" +checksum = "223c7fe67b8586117a8ed969ce14cc9e4b9f7d4792052dc227485be4f6d58597" dependencies = [ "swc_atoms", "swc_common", @@ -7575,9 +7435,9 @@ dependencies = [ [[package]] name = "swc_ecma_compat_es2017" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c2d327146bb2b7b936b0d78e4212b039b1aa4149bbc187fd76db1ee3176e755" +checksum = "46ec946ed7f16de4850fd46022d99b6b10ed05c9736d6ee5efcbfa861b003c95" dependencies = [ "serde", "swc_atoms", @@ -7593,9 +7453,9 @@ dependencies = [ [[package]] name = "swc_ecma_compat_es2018" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a41fe86e2a237f1b87ed4d34c20a3721665328fc8f1b8e5e6bdeb022ce52f148" +checksum = "edb019c275026b788b240c86d07b41757bbcc883a3df5283f2310d2afee9af20" dependencies = [ "serde", "swc_atoms", @@ -7612,9 +7472,9 @@ dependencies = [ [[package]] name = "swc_ecma_compat_es2019" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e06197f2f74f2a6366cfbf68d4de4feabf42bd2532413c71347ba7cdbe964c40" +checksum = "2ad53e027532c3e30855395c7d88862501654b43aaa3c65316a66905c6a5b6cc" dependencies = [ "swc_atoms", "swc_common", @@ -7628,9 +7488,9 @@ dependencies = [ [[package]] name = "swc_ecma_compat_es2020" -version = "13.0.0" +version = "19.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92568d138eec2894c644fbf865401778026b42b45fa1073739b732cd66d55b42" +checksum = "6ea2e62880d1a83aee7e653443a7f878e03d9072b69c1649fceb6b2fbc8036a0" dependencies = [ "serde", "swc_atoms", @@ -7646,9 +7506,9 @@ dependencies = [ [[package]] name = "swc_ecma_compat_es2021" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b38614b689a8ed0b4cda05bee30a7f908ea621db6010888f407be282884ecbe" +checksum = "1d4d588af0fb0928ce0dc19db304e90a1dccb20701fac9ced61bdb7ea65b9227" dependencies = [ "swc_atoms", "swc_common", @@ -7662,11 +7522,11 @@ dependencies = [ [[package]] name = "swc_ecma_compat_es2022" -version = "13.0.0" +version = "19.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2923bf7ce2236f36aef951bd204ec115a17af421cdc696ff526c9ba22983533f" +checksum = "2e9390f39b879e5f7f2a62990a05d29e2c0e8dad2c6d3f76bab3104512c41e92" dependencies = [ - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "swc_atoms", "swc_common", "swc_ecma_ast", @@ -7682,9 +7542,9 @@ dependencies = [ [[package]] name = "swc_ecma_compat_es3" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b386df40a8b1d0a71eb54b5766ce483bb4f9311c4df931035542a39341861" +checksum = "89174560c3fdc0964797afd57b7ea9ba084ba1946eeb4fcac3590602ef911336" dependencies = [ "swc_common", "swc_ecma_ast", @@ -7697,9 +7557,9 @@ dependencies = [ [[package]] name = "swc_ecma_ext_transforms" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0064bdc27ebff66cb92e596b13e9c0e13c671c56b327c0083c200e4793c8db2b" +checksum = "0583067e30e1eac8bd6adba3654d34739dd7d67175f3424b9fb00084cc9b0e8b" dependencies = [ "phf", "swc_atoms", @@ -7709,18 +7569,46 @@ dependencies = [ "swc_ecma_visit", ] +[[package]] +name = "swc_ecma_lexer" +version = "17.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61f2e87edcfeac99e09c8b47b7b4a921fe2fef0a733f330dc4420727e637d774" +dependencies = [ + "arrayvec 0.7.4", + "ascii", + "bitflags 2.9.0", + "cow-replace", + "either", + "new_debug_unreachable", + "num-bigint", + "num-traits", + "phf", + "rustc-hash 2.1.1", + "seq-macro", + "serde", + "smallvec", + "smartstring", + "stacker", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "tracing", + "typed-arena", +] + [[package]] name = "swc_ecma_lints" -version = "12.0.0" +version = "19.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac89356dc7ab49dc30e9219fdb57cfc35a80aec3c0ae2e12c2a3488f9cfce7dd" +checksum = "a6e0d4862d46c0b6b92828b266bd663580d684f5206d1c4932739fa7be136a23" dependencies = [ "auto_impl", "dashmap 5.5.3", "par-core", "parking_lot", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "swc_atoms", "swc_common", @@ -7732,9 +7620,9 @@ dependencies = [ [[package]] name = "swc_ecma_loader" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a801462c997b71e4add7684ce4953c7d6200c75b5552b8d594783da84ad9564c" +checksum = "9b08fa5f55ac0188a35a75f27574329e129b06cbb0d517ab7b2093eff45b2745" dependencies = [ "anyhow", "dashmap 5.5.3", @@ -7744,59 +7632,22 @@ dependencies = [ "parking_lot", "path-clean 0.1.0", "pathdiff", - "rustc-hash 2.1.0", - "serde", - "serde_json", - "swc_atoms", - "swc_cached", - "swc_common", - "tracing", -] - -[[package]] -name = "swc_ecma_minifier" -version = "14.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb6c3ea74a80bf1d21bba94f823aa9e90f903b81b345f99c1595faf87d732c63" -dependencies = [ - "arrayvec 0.7.4", - "indexmap 2.7.1", - "num-bigint", - "num_cpus", - "once_cell", - "par-core", - "par-iter", - "parking_lot", - "phf", - "radix_fmt", - "regex", - "rustc-hash 2.1.0", - "ryu-js", + "rustc-hash 2.1.1", "serde", "serde_json", - "swc_allocator", "swc_atoms", "swc_common", - "swc_config", - "swc_ecma_ast", - "swc_ecma_codegen 8.1.0", - "swc_ecma_parser", - "swc_ecma_transforms_base", - "swc_ecma_transforms_optimization", - "swc_ecma_usage_analyzer", - "swc_ecma_utils", - "swc_ecma_visit", - "swc_timer", "tracing", ] [[package]] name = "swc_ecma_minifier" -version = "16.0.0" +version = "23.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96929d4669712d21609fbd1ba24461c964d93ed23702bd87cc530f4b307ecc34" +checksum = "074576937cf3c1749edd06cd05573fc2fed9570c57e9e28f870c1e525ec97bb1" dependencies = [ "arrayvec 0.7.4", + "bitflags 2.9.0", "indexmap 2.7.1", "num-bigint", "num_cpus", @@ -7807,7 +7658,7 @@ dependencies = [ "phf", "radix_fmt", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "ryu-js", "serde", "serde_json", @@ -7816,7 +7667,7 @@ dependencies = [ "swc_common", "swc_config", "swc_ecma_ast", - "swc_ecma_codegen 10.0.0", + "swc_ecma_codegen", "swc_ecma_parser", "swc_ecma_transforms_base", "swc_ecma_transforms_optimization", @@ -7829,16 +7680,18 @@ dependencies = [ [[package]] name = "swc_ecma_parser" -version = "11.0.0" +version = "17.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41e06ecaef86a547831f7f01f342434e4b0d0f363762f8e7a2b84da7a0a5f92e" +checksum = "8317bbac117ce986efd166b32017f3f5c07def31291e91f709b764501e103890" dependencies = [ + "arrayvec 0.7.4", + "bitflags 2.9.0", "either", "new_debug_unreachable", "num-bigint", "num-traits", "phf", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "smallvec", "smartstring", @@ -7846,23 +7699,24 @@ dependencies = [ "swc_atoms", "swc_common", "swc_ecma_ast", + "swc_ecma_lexer", "tracing", "typed-arena", ] [[package]] name = "swc_ecma_preset_env" -version = "15.0.0" +version = "23.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd6e90087fe511cde69e3648bceb90b04be01236451ced67486371f827d691e1" +checksum = "e996c47428aab2686a420cc28aaac584427dcf58c54a7c44f3f5f64a3fed2813" dependencies = [ "anyhow", "dashmap 5.5.3", "indexmap 2.7.1", "once_cell", "preset_env_base", - "rustc-hash 2.1.0", - "semver 1.0.23", + "rustc-hash 2.1.1", + "semver", "serde", "serde_json", "st-map", @@ -7877,27 +7731,27 @@ dependencies = [ [[package]] name = "swc_ecma_quote_macros" -version = "11.0.0" +version = "17.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26132f0851c46a258f954cc00ca6c71fe6ab4520f6fde722e6e8a200c61f6c83" +checksum = "ad2796d0a1f8dc7f03773331349ddea2d3d9b3ba11f291c18590c6fb37e7d89b" dependencies = [ "anyhow", "proc-macro2", "quote", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "swc_atoms", "swc_common", "swc_ecma_ast", "swc_ecma_parser", "swc_macros_common", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "swc_ecma_testing" -version = "8.0.0" +version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e72a43b7acd904fa0c6d244a72aeda66febbc5a9720975481cb836d6804b604" +checksum = "bccfc5af9527cf3efe6d59749011769bcc69213dda8eadbfa0211da2c19c4812" dependencies = [ "anyhow", "hex", @@ -7908,9 +7762,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms" -version = "15.0.0" +version = "22.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13aeeeb6ba750d144d49d96f900063706e8e4ff45d63d1ccde0ce5f441bcee6a" +checksum = "997f66127d99492f5eba755503915912dab5adfbac9a3a95257fb9088ac2e834" dependencies = [ "par-core", "swc_atoms", @@ -7929,18 +7783,18 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_base" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0b747f04a004d9b56b903305e4567e1d30c9cd226a8310a29cac06f7ac8173a" +checksum = "a6a9971c1f27f6b3ebcad7424e81861c35bfd009be81786da71a6e3a7002d808" dependencies = [ "better_scoped_tls", - "bitflags 2.5.0", + "bitflags 2.9.0", "indexmap 2.7.1", "once_cell", "par-core", "phf", "rayon", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "smallvec", "swc_atoms", @@ -7954,9 +7808,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_classes" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d871bbd46d14d032a48c14096abd778a8a87831638343f28b581c3025daa7086" +checksum = "5fd24f6d56d12d2170178d3d90593c7eb2df7f15656f05522b2f872f0ed7af5d" dependencies = [ "swc_atoms", "swc_common", @@ -7968,9 +7822,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_compat" -version = "13.0.0" +version = "20.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfdfb50bd6db7991105f371b23ebb7cc79d48f43f53866a9a55dfbf7cfacd36" +checksum = "0a4920aadf61e7554104de3cf12a8b0f34b091a4e216d1486f47bdac3196780f" dependencies = [ "arrayvec 0.7.4", "indexmap 2.7.1", @@ -8006,35 +7860,35 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_macros" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6845dfb88569f3e8cd05901505916a8ebe98be3922f94769ca49f84e8ccec8f7" +checksum = "bc777288799bf6786e5200325a56e4fbabba590264a4a48a0c70b16ad0cf5cd8" dependencies = [ "proc-macro2", "quote", "swc_macros_common", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "swc_ecma_transforms_module" -version = "13.0.0" +version = "20.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0cf50886962aa3d7d20317a486971b91002a930b236c1e4af1f1050280b4070" +checksum = "e9adf8a0e833abee9b6baf05e7504be1bdda9937837a65f877ea05599b3e4d1f" dependencies = [ "Inflector", "anyhow", - "bitflags 2.5.0", + "bitflags 2.9.0", "indexmap 2.7.1", "is-macro", "path-clean 1.0.1", "pathdiff", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "swc_atoms", - "swc_cached", "swc_common", + "swc_config", "swc_ecma_ast", "swc_ecma_loader", "swc_ecma_parser", @@ -8046,17 +7900,18 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_optimization" -version = "12.0.0" +version = "19.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6646a0a5e3662a2a86369a42f5203f1c92584c37502f9b79d4d10613db0c1fb3" +checksum = "148b59208253c618c0e1c363f6f5bdd6ff309fb09743dfa4c0fa0b1bd220e454" dependencies = [ + "bytes-str", "dashmap 5.5.3", "indexmap 2.7.1", "once_cell", "par-core", "petgraph 0.7.1", "rayon", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde_json", "swc_atoms", "swc_common", @@ -8071,12 +7926,12 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_proposal" -version = "12.0.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "048ba8acaa043f9468bb3bd1f5aae6f2e6b06865119226f9c45a971a012cc2d8" +checksum = "64f990b680ccc79fc42283051d6fd264d78849a1eb95fd8df7de8e00e5e1ab21" dependencies = [ "either", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "smallvec", "swc_atoms", @@ -8091,16 +7946,17 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_react" -version = "13.0.0" +version = "20.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b66c31438de864f9694493d3f3a08744a5604b59df03774d09e0f541f29976c" +checksum = "212612746ce4f866a966b980474b00b93378fde8fa1af37e7da6e9fc3d003eac" dependencies = [ "base64 0.22.1", + "bytes-str", "dashmap 5.5.3", "indexmap 2.7.1", "once_cell", "rayon", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "sha1", "string_enum", @@ -8118,9 +7974,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_testing" -version = "14.0.0" +version = "21.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "001e32bba70b2456a495dc7394673b233a19f76cdb46d7b0ce52ef1af8ecc573" +checksum = "dc4df5696950c51104c179e90ed76a711be652d68cc385ee76fb35031b18c6e6" dependencies = [ "ansi_term", "anyhow", @@ -8129,28 +7985,29 @@ dependencies = [ "serde", "serde_json", "sha2", - "sourcemap", "swc_allocator", "swc_common", "swc_ecma_ast", - "swc_ecma_codegen 10.0.0", + "swc_ecma_codegen", "swc_ecma_parser", "swc_ecma_testing", "swc_ecma_transforms_base", "swc_ecma_utils", "swc_ecma_visit", + "swc_sourcemap", "tempfile", "testing", ] [[package]] name = "swc_ecma_transforms_typescript" -version = "13.0.0" +version = "20.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cec3c91a2c37372746ebc5608e30b7c2c3af60216768b59ec6413ee2bfe44c29" +checksum = "020205bafa5e6a01023bff2aa66b1bcef03c10b4113a3f73443f478392d209c5" dependencies = [ + "bytes-str", "once_cell", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "ryu-js", "serde", "swc_atoms", @@ -8164,12 +8021,13 @@ dependencies = [ [[package]] name = "swc_ecma_usage_analyzer" -version = "13.0.0" +version = "19.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037ca87d5d7c72a341f1aef8059b7eeca4785fedca7361e6d380f749a6f53c58" +checksum = "c357c7ff7ae2bf50adbe89c04ed94c9b99d34563df69386aba15a05e4560fc9a" dependencies = [ + "bitflags 2.9.0", "indexmap 2.7.1", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "swc_atoms", "swc_common", "swc_ecma_ast", @@ -8181,9 +8039,9 @@ dependencies = [ [[package]] name = "swc_ecma_utils" -version = "12.0.0" +version = "18.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71d6c8ba7d987dcc254f05ad2c23e7a6ec3f259611af2923a8c1a0602556cd21" +checksum = "938751c806f23256256e6839914ab33e4ac6e79a3fa2e717d004469881d2e3df" dependencies = [ "indexmap 2.7.1", "num_cpus", @@ -8191,7 +8049,7 @@ dependencies = [ "par-core", "par-iter", "rayon", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "ryu-js", "swc_atoms", "swc_common", @@ -8203,9 +8061,9 @@ dependencies = [ [[package]] name = "swc_ecma_visit" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7a65fa06d0c0f709f1df4e820ccdc4eca7b3db7f9d131545e20c2ac2f1cd23" +checksum = "7ad28e3449b376bfe1f2bde28bfcf305961ba23c1e205bedb03a7c108a1d1ff6" dependencies = [ "new_debug_unreachable", "num-bigint", @@ -8219,45 +8077,46 @@ dependencies = [ [[package]] name = "swc_emotion" -version = "0.84.0" +version = "0.93.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25ed6d006839d82e7afee335b71ef02689baf985dc5774f2fe499d37c0f56126" +checksum = "597b4dc24051c53685f00d97b2b64f40197c2fafbf3fbce0aca43f2bd9caede6" dependencies = [ "base64 0.22.1", "byteorder", + "bytes-str", "once_cell", "radix_fmt", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", - "sourcemap", "swc_atoms", "swc_common", "swc_ecma_ast", - "swc_ecma_codegen 8.1.0", + "swc_ecma_codegen", "swc_ecma_transforms", "swc_ecma_utils", "swc_ecma_visit", + "swc_sourcemap", "swc_trace_macro", "tracing", ] [[package]] name = "swc_eq_ignore_macros" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96e15288bf385ab85eb83cff7f9e2d834348da58d0a31b33bdb572e66ee413e" +checksum = "c16ce73424a6316e95e09065ba6a207eba7765496fed113702278b7711d4b632" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "swc_error_reporters" -version = "9.1.2" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc812b793a12b737b6cb949a6e8c87dfcd006c07d9490a5c4529d61e890003dc" +checksum = "2f667f51b431565cf70ae62cee971eca8d9472e8b5ff17e37116842014dfacc8" dependencies = [ "anyhow", "miette", @@ -8269,47 +8128,34 @@ dependencies = [ "swc_common", ] -[[package]] -name = "swc_graph_analyzer" -version = "9.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0f4e0f8aa5907f0070ab5d234b8efc2fb0542859421a0e155b401de1549d05" -dependencies = [ - "auto_impl", - "petgraph 0.7.1", - "rustc-hash 2.1.0", - "swc_common", - "tracing", -] - [[package]] name = "swc_macros_common" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a509f56fca05b39ba6c15f3e58636c3924c78347d63853632ed2ffcb6f5a0ac7" +checksum = "aae1efbaa74943dc5ad2a2fb16cbd78b77d7e4d63188f3c5b4df2b4dcd2faaae" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "swc_node_comments" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f97dba66fc5f0df68c706dc99ade59bcba4ce55c585117eefccafe1337ca270f" +checksum = "76aeca25bbc7c9bc4a73ea9d1ea407f2392f7b4829e03b10f59b04af49bd4e96" dependencies = [ "dashmap 5.5.3", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "swc_atoms", "swc_common", ] [[package]] name = "swc_nodejs_common" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb8398dd81e4dc5ea31d9f932cbdcebef0c16fb85c289b7c4aa976f68a43d87e" +checksum = "53ccd9a395f5c7d485221b23206f0ce36d00d8e08cf9fa5d398ba56a8302b554" dependencies = [ "anyhow", "napi", @@ -8321,26 +8167,26 @@ dependencies = [ [[package]] name = "swc_plugin_macro" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0917ccfdcd3fa6cf41bdacef2388702a3b274f9ea708d930e1e8db37c7c3e1c6" +checksum = "ace467dfafbbdf3aecff786b8605b35db57d945e92fd88800569aa2cba0cdf61" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "swc_plugin_proxy" -version = "8.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a18c199683d9f946db8dfca444212a3551e74a7c563196b154d5ac30f3bf9de6" +checksum = "4592caaec04f5de44f5a507a8d14bedee21e9cd8ca366ccf48169347162ac250" dependencies = [ "better_scoped_tls", "bytecheck 0.8.0", "rancor", "rkyv 0.8.9", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "swc_common", "swc_ecma_ast", "swc_trace_macro", @@ -8349,26 +8195,27 @@ dependencies = [ [[package]] name = "swc_plugin_runner" -version = "9.0.0" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "132317a7245b344d654da8cd3c6cdb6fcd72d1e7d028d1085aa679cfb5e9801a" +checksum = "d893bf51dce4d733d45789bedffc8b312e33bec187ae1f2022408fc61b859d28" dependencies = [ "anyhow", "enumset", "futures", "once_cell", "parking_lot", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", + "swc_atoms", "swc_common", "swc_ecma_ast", "swc_plugin_proxy", "swc_transform_common", "tokio", "tracing", - "vergen 9.0.4", - "virtual-fs 0.19.0", + "vergen", + "virtual-fs", "wasmer", "wasmer-cache", "wasmer-compiler-cranelift", @@ -8377,9 +8224,9 @@ dependencies = [ [[package]] name = "swc_relay" -version = "0.54.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3798e23ffcc216dc177f12ff184163adc6e546fd02888c5ac3bf9af46c6c4ec9" +checksum = "3418f3bb6a05b62b37f808b0c173251e18312789d935540504b434ff9dd42688" dependencies = [ "once_cell", "regex", @@ -8393,6 +8240,25 @@ dependencies = [ "tracing", ] +[[package]] +name = "swc_sourcemap" +version = "9.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9755c673c6a83c461e98fa018f681adb8394a3f44f89a06f27e80fd4fe4fa1e4" +dependencies = [ + "base64-simd 0.8.0", + "bitvec", + "bytes-str", + "data-encoding", + "debugid", + "if_chain", + "rustc-hash 2.1.1", + "serde", + "serde_json", + "unicode-id-start", + "url", +] + [[package]] name = "swc_timer" version = "1.0.0" @@ -8404,24 +8270,24 @@ dependencies = [ [[package]] name = "swc_trace_macro" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c78717a841565df57f811376a3d19c9156091c55175e12d378f3a522de70cef" +checksum = "559185db338f1bcb50297aafd4f79c0956c84dc71a66da4cffb57acf9d93fd88" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "swc_transform_common" -version = "2.0.0" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e40bbeef964d6edd66081a31bbfeef913bb0be536e398392f99e8e91b7da63eb" +checksum = "f958ab7a99fad6a7c68bbf7a10e857255bbc4e9a23d79dec7ad2912cbb9292c1" dependencies = [ "better_scoped_tls", "once_cell", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "swc_common", @@ -8429,13 +8295,13 @@ dependencies = [ [[package]] name = "swc_typescript" -version = "11.0.1" +version = "17.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8538a8b2e8d8a3ebbf58fe7f933d7b4bb01a291fbd7356352ea255cc15bbc70" +checksum = "965205aa6b60a2edc9d59369bb5f984221fd7a0c612443f1a0892d56bcfe8889" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.9.0", "petgraph 0.7.1", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "swc_atoms", "swc_common", "swc_ecma_ast", @@ -8446,9 +8312,9 @@ dependencies = [ [[package]] name = "swc_visit" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9138b6a36bbe76dd6753c4c0794f7e26480ea757bee499738bedbbb3ae3ec5f3" +checksum = "62fb71484b486c185e34d2172f0eabe7f4722742aad700f426a494bb2de232a2" dependencies = [ "either", "new_debug_unreachable", @@ -8467,9 +8333,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.95" +version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46f71c0377baf4ef1cc3e3402ded576dccc315800fbc62dfc7fe04b009773b4a" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", @@ -8481,6 +8347,9 @@ name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] [[package]] name = "synstructure" @@ -8490,7 +8359,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -8564,16 +8433,22 @@ version = "0.12.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" +[[package]] +name = "target-triple" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790" + [[package]] name = "tempfile" -version = "3.14.0" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ - "cfg-if", - "fastrand 2.2.0", + "fastrand", + "getrandom 0.3.2", "once_cell", - "rustix 0.38.41", + "rustix 1.0.7", "windows-sys 0.59.0", ] @@ -8613,9 +8488,9 @@ dependencies = [ [[package]] name = "testing" -version = "8.0.0" +version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d32ddc0e2ebd072cbffe7424087267da990708a5bc3ae29e075904468450275" +checksum = "57ab605afeade5ad021de4acc02431c38a3beabca406d3a651a2feb445f60994" dependencies = [ "ansi_term", "cargo_metadata 0.18.1", @@ -8623,7 +8498,7 @@ dependencies = [ "once_cell", "pretty_assertions", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "swc_common", @@ -8635,9 +8510,9 @@ dependencies = [ [[package]] name = "testing_macros" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2d27bf245b90a80d5aa231133418ae7db98f032855ce5292e12071ab29c4b26" +checksum = "b7442bd3ca09f38d4788dc5ebafbc1967c3717726b4b074db011d470b353548b" dependencies = [ "anyhow", "glob", @@ -8646,7 +8521,7 @@ dependencies = [ "quote", "regex", "relative-path", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -8686,7 +8561,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -8697,7 +8572,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -8797,7 +8672,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.8", + "socket2 0.5.10", "tokio-macros", "tracing", "windows-sys 0.52.0", @@ -8811,7 +8686,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] @@ -8826,23 +8701,12 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.23.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" -dependencies = [ - "rustls 0.20.9", - "tokio", - "webpki", -] - -[[package]] -name = "tokio-scoped" -version = "0.2.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4beb8ba13bc53ac53ce1d52b42f02e5d8060f0f42138862869beb769722b256" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" dependencies = [ + "rustls", "tokio", - "tokio-stream", ] [[package]] @@ -8907,21 +8771,22 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.19" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "c6a4b9e8023eb94392d3dca65d717c53abc5dad49c07cb65bb8fcd87115fa325" dependencies = [ + "indexmap 2.7.1", "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.20", + "toml_edit 0.21.1", ] [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" dependencies = [ "serde", ] @@ -8941,15 +8806,26 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.20" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ "indexmap 2.7.1", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.18", + "winnow 0.5.15", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap 2.7.1", + "toml_datetime", + "winnow 0.7.11", ] [[package]] @@ -8967,16 +8843,16 @@ dependencies = [ "http 1.1.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.2", + "hyper 1.6.0", "hyper-timeout", "hyper-util", "percent-encoding", "pin-project", "prost", - "socket2 0.5.8", + "socket2 0.5.10", "tokio", "tokio-stream", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", "tracing", @@ -8993,7 +8869,7 @@ dependencies = [ "indexmap 1.9.3", "pin-project", "pin-project-lite", - "rand", + "rand 0.8.5", "slab", "tokio", "tokio-util", @@ -9002,17 +8878,50 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.9.0", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "iri-string", + "pin-project-lite", + "tower 0.5.2", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" @@ -9034,17 +8943,17 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] name = "tracing-chrome" -version = "0.5.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb13184244c7cd22758b79e7c993c515ad67a8e730edcb7e05fe7bcabb283c7" +checksum = "bf0a738ed5d6450a9fb96e86a23ad808de2b727fd1394585da5cdd6788ffe724" dependencies = [ - "json", - "tracing", + "serde_json", + "tracing-core", "tracing-subscriber", ] @@ -9058,16 +8967,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -9138,16 +9037,17 @@ checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" [[package]] name = "trybuild" -version = "1.0.99" +version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "207aa50d36c4be8d8c6ea829478be44a372c6a77669937bb39c698e52f1491e8" +checksum = "6ae08be68c056db96f0e6c6dd820727cca756ced9e1f4cc7fdd20e2a55e23898" dependencies = [ "glob", "serde", "serde_derive", "serde_json", + "target-triple", "termcolor", - "toml 0.8.19", + "toml 0.8.9", ] [[package]] @@ -9162,7 +9062,7 @@ dependencies = [ "http 0.2.11", "httparse", "log", - "rand", + "rand 0.8.5", "sha1", "thiserror 1.0.69", "url", @@ -9181,7 +9081,7 @@ dependencies = [ "http 0.2.11", "httparse", "log", - "rand", + "rand 0.8.5", "sha1", "thiserror 1.0.69", "url", @@ -9200,35 +9100,59 @@ dependencies = [ "http 1.1.0", "httparse", "log", - "rand", + "rand 0.8.5", "sha1", "thiserror 1.0.69", "url", "utf-8", ] +[[package]] +name = "turbo-esregex" +version = "0.1.0" +dependencies = [ + "anyhow", + "regex", + "regress", + "serde", + "serde_json", + "turbo-tasks", + "turbo-tasks-build", +] + [[package]] name = "turbo-persistence" version = "0.1.0" dependencies = [ "anyhow", "byteorder", + "either", + "jiff", "lzzzz", "memmap2 0.9.5", "parking_lot", "pot", "qfilter", "quick_cache", - "rand", + "rand 0.9.0", "rayon", - "rustc-hash 2.1.0", - "serde", + "rustc-hash 2.1.1", + "smallvec", "tempfile", "thread_local", + "tracing", "twox-hash 2.1.0", "zstd", ] +[[package]] +name = "turbo-persistence-tools" +version = "0.1.0" +dependencies = [ + "anyhow", + "turbo-persistence", +] + [[package]] name = "turbo-prehash" version = "0.1.0" @@ -9237,8 +9161,11 @@ version = "0.1.0" name = "turbo-rcstr" version = "0.1.0" dependencies = [ - "criterion", + "bytes-str", + "codspeed-criterion-compat", + "napi", "new_debug_unreachable", + "rustc-hash 2.1.1", "serde", "shrink-to-fit", "triomphe 0.1.12", @@ -9257,14 +9184,13 @@ dependencies = [ "lsp-server", "lsp-types", "proc-macro2", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "serde_path_to_error", - "syn 2.0.95", + "syn 2.0.100", "tracing", "tracing-subscriber", - "walkdir", ] [[package]] @@ -9278,7 +9204,7 @@ dependencies = [ "dashmap 6.1.0", "either", "erased-serde", - "event-listener 5.4.0", + "event-listener", "futures", "indexmap 2.7.1", "mopa", @@ -9287,7 +9213,7 @@ dependencies = [ "pin-project-lite", "rayon", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "serde_regex", @@ -9312,10 +9238,10 @@ version = "0.1.0" dependencies = [ "anyhow", "arc-swap", - "async-trait", "auto-hash-map", + "bitfield", "byteorder", - "criterion", + "codspeed-criterion-compat", "dashmap 6.1.0", "either", "hashbrown 0.14.5", @@ -9324,28 +9250,44 @@ dependencies = [ "once_cell", "parking_lot", "pot", - "rand", + "rand 0.9.0", "rayon", "regex", - "rustc-hash 2.1.0", + "ringmap", + "rstest", + "rustc-hash 2.1.1", "serde", "serde_json", "serde_path_to_error", "smallvec", + "tempfile", "thread_local", "tokio", - "tokio-scoped", "tracing", "turbo-persistence", - "turbo-prehash", "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", - "turbo-tasks-hash", - "turbo-tasks-malloc", "turbo-tasks-testing", ] +[[package]] +name = "turbo-tasks-backend-fuzz" +version = "0.0.0" +dependencies = [ + "afl", + "anyhow", + "arbitrary", + "libfuzzer-sys", + "once_cell", + "serde", + "tokio", + "turbo-tasks", + "turbo-tasks-backend", + "turbo-tasks-build", + "turbo-tasks-malloc", +] + [[package]] name = "turbo-tasks-build" version = "0.1.0" @@ -9354,8 +9296,8 @@ dependencies = [ "cargo-lock", "glob", "quote", - "rustc-hash 2.1.0", - "syn 1.0.109", + "rustc-hash 2.1.1", + "syn 2.0.100", "turbo-tasks-macros-shared", ] @@ -9379,8 +9321,6 @@ version = "0.1.0" dependencies = [ "anyhow", "dotenvs", - "indexmap 2.7.1", - "serde", "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", @@ -9392,15 +9332,14 @@ name = "turbo-tasks-fetch" version = "0.1.0" dependencies = [ "anyhow", - "httpmock", - "reqwest", - "serde", + "mockito", + "reqwest 0.12.20", "tokio", "turbo-rcstr", "turbo-tasks", + "turbo-tasks-backend", "turbo-tasks-build", "turbo-tasks-fs", - "turbo-tasks-memory", "turbo-tasks-testing", "turbopack-core", ] @@ -9413,8 +9352,8 @@ dependencies = [ "auto-hash-map", "bitflags 1.3.2", "bytes", + "codspeed-criterion-compat", "concurrent-queue", - "criterion", "dashmap 6.1.0", "dunce", "futures", @@ -9426,8 +9365,9 @@ dependencies = [ "notify", "parking_lot", "rayon", + "regex", "rstest", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_bytes", "serde_json", @@ -9439,15 +9379,29 @@ dependencies = [ "triomphe 0.1.12", "turbo-rcstr", "turbo-tasks", + "turbo-tasks-backend", "turbo-tasks-build", "turbo-tasks-hash", - "turbo-tasks-memory", "turbo-tasks-testing", - "unicode-segmentation", - "unsize", "urlencoding", ] +[[package]] +name = "turbo-tasks-fuzz" +version = "0.0.0" +dependencies = [ + "anyhow", + "clap", + "rand 0.9.0", + "rustc-hash 2.1.1", + "tokio", + "turbo-rcstr", + "turbo-tasks", + "turbo-tasks-backend", + "turbo-tasks-build", + "turbo-tasks-fs", +] + [[package]] name = "turbo-tasks-hash" version = "0.1.0" @@ -9465,8 +9419,8 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash 2.1.0", - "syn 1.0.109", + "rustc-hash 2.1.1", + "syn 2.0.100", "turbo-tasks-macros-shared", ] @@ -9476,7 +9430,7 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.100", ] [[package]] @@ -9488,8 +9442,8 @@ dependencies = [ "tokio", "trybuild", "turbo-tasks", + "turbo-tasks-backend", "turbo-tasks-build", - "turbo-tasks-memory", "turbo-tasks-testing", ] @@ -9500,50 +9454,14 @@ dependencies = [ "mimalloc", ] -[[package]] -name = "turbo-tasks-memory" -version = "0.1.0" -dependencies = [ - "anyhow", - "auto-hash-map", - "concurrent-queue", - "criterion", - "dashmap 6.1.0", - "either", - "indexmap 2.7.1", - "loom", - "num_cpus", - "once_cell", - "parking_lot", - "rand", - "ref-cast", - "regex", - "rstest", - "rustc-hash 2.1.0", - "serde", - "serde_json", - "smallvec", - "tokio", - "tracing", - "turbo-prehash", - "turbo-rcstr", - "turbo-tasks", - "turbo-tasks-build", - "turbo-tasks-hash", - "turbo-tasks-malloc", - "turbo-tasks-testing", -] - [[package]] name = "turbo-tasks-testing" version = "0.1.0" dependencies = [ "anyhow", - "auto-hash-map", "futures", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "tokio", - "turbo-rcstr", "turbo-tasks", ] @@ -9552,28 +9470,26 @@ name = "turbopack" version = "0.1.0" dependencies = [ "anyhow", - "criterion", + "codspeed-criterion-compat", "difference", - "futures", - "indexmap 2.7.1", - "lazy_static", "regex", "rstest", "rstest_reuse", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "smallvec", "tokio", "tracing", + "turbo-esregex", "turbo-rcstr", "turbo-tasks", + "turbo-tasks-backend", "turbo-tasks-build", "turbo-tasks-env", "turbo-tasks-fs", "turbo-tasks-hash", "turbo-tasks-malloc", - "turbo-tasks-memory", "turbopack-core", "turbopack-css", "turbopack-ecmascript", @@ -9592,16 +9508,16 @@ version = "0.1.0" dependencies = [ "anyhow", "chromiumoxide", - "criterion", + "codspeed-criterion-compat", "futures", "nix 0.26.4", "once_cell", "owo-colors 3.5.0", "parking_lot", "portpicker", - "rand", + "rand 0.9.0", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde_json", "tempfile", "tokio", @@ -9618,9 +9534,7 @@ version = "0.1.0" dependencies = [ "anyhow", "either", - "indexmap 2.7.1", "indoc", - "rustc-hash 2.1.0", "serde", "serde_json", "serde_qs", @@ -9643,14 +9557,13 @@ version = "0.1.0" dependencies = [ "anyhow", "clap", + "codspeed-criterion-compat", "console-subscriber", - "criterion", "dunce", "futures", - "mime", "owo-colors 3.5.0", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "tokio", "tracing", @@ -9660,7 +9573,6 @@ dependencies = [ "turbo-tasks-backend", "turbo-tasks-build", "turbo-tasks-env", - "turbo-tasks-fetch", "turbo-tasks-fs", "turbo-tasks-malloc", "turbopack", @@ -9687,8 +9599,9 @@ dependencies = [ "clap", "crossterm 0.26.1", "owo-colors 3.5.0", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", + "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", "turbo-tasks-fs", @@ -9704,12 +9617,12 @@ dependencies = [ "async-trait", "auto-hash-map", "browserslist-rs", + "bytes-str", "const_format", "data-encoding", "either", "futures", "indexmap 2.7.1", - "lazy_static", "once_cell", "patricia_tree", "petgraph 0.6.3", @@ -9717,13 +9630,13 @@ dependencies = [ "regex", "roaring", "rstest", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_bytes", "serde_json", "smallvec", - "sourcemap", "swc_core", + "swc_sourcemap", "tokio", "tracing", "turbo-prehash", @@ -9753,14 +9666,11 @@ name = "turbopack-css" version = "0.1.0" dependencies = [ "anyhow", - "indexmap 2.7.1", "indoc", "lightningcss", - "once_cell", "parcel_selectors", "parcel_sourcemap", - "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "smallvec", "swc_core", @@ -9770,10 +9680,8 @@ dependencies = [ "turbo-tasks", "turbo-tasks-build", "turbo-tasks-fs", - "turbo-tasks-hash", "turbopack-core", "turbopack-ecmascript", - "turbopack-swc-utils", "urlencoding", ] @@ -9787,12 +9695,11 @@ dependencies = [ "futures", "hyper 0.14.28", "hyper-tungstenite", - "indexmap 2.7.1", "mime", "mime_guess", "parking_lot", "pin-project-lite", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "serde_qs", @@ -9821,40 +9728,42 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "criterion", + "auto-hash-map", + "bitvec", + "bytes-str", + "codspeed-criterion-compat", "data-encoding", "either", "indexmap 2.7.1", "indoc", - "lazy_static", + "itertools 0.10.5", "num-bigint", "num-traits", "once_cell", - "par-core", "parking_lot", "petgraph 0.6.3", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "smallvec", - "sourcemap", "strsim 0.11.1", "swc_core", + "swc_sourcemap", "tokio", "tracing", + "turbo-esregex", "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", "turbo-tasks-fs", "turbo-tasks-hash", - "turbo-tasks-memory", + "turbo-tasks-malloc", "turbo-tasks-testing", "turbopack-core", "turbopack-resolve", "turbopack-swc-utils", "url", - "urlencoding", ] [[package]] @@ -9863,6 +9772,7 @@ version = "0.1.0" dependencies = [ "serde", "serde_json", + "turbo-rcstr", "turbopack-cli-utils", "turbopack-core", ] @@ -9874,9 +9784,8 @@ dependencies = [ "anyhow", "async-trait", "indexmap 2.7.1", - "lightningcss", "modularize_imports", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "styled_components", @@ -9914,8 +9823,6 @@ name = "turbopack-env" version = "0.1.0" dependencies = [ "anyhow", - "indexmap 2.7.1", - "serde", "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", @@ -9935,9 +9842,10 @@ dependencies = [ "mime", "once_cell", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_with", + "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", "turbo-tasks-fs", @@ -9949,7 +9857,6 @@ name = "turbopack-json" version = "0.1.0" dependencies = [ "anyhow", - "serde", "serde_json", "turbo-rcstr", "turbo-tasks", @@ -9966,7 +9873,6 @@ dependencies = [ "anyhow", "markdown", "mdxjs", - "serde", "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", @@ -9982,18 +9888,17 @@ dependencies = [ "anyhow", "async-stream", "async-trait", + "base64 0.21.4", "const_format", "either", "futures", "futures-retry", - "indexmap 2.7.1", "indoc", - "mime", "once_cell", "owo-colors 3.5.0", "parking_lot", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "serde_with", @@ -10017,12 +9922,7 @@ name = "turbopack-nodejs" version = "0.1.0" dependencies = [ "anyhow", - "either", - "indexmap 2.7.1", "indoc", - "rustc-hash 2.1.0", - "serde", - "smallvec", "tracing", "turbo-rcstr", "turbo-tasks", @@ -10040,8 +9940,6 @@ name = "turbopack-resolve" version = "0.1.0" dependencies = [ "anyhow", - "indexmap 2.7.1", - "lazy_static", "regex", "serde", "serde_json", @@ -10058,7 +9956,6 @@ name = "turbopack-static" version = "0.1.0" dependencies = [ "anyhow", - "serde", "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", @@ -10089,7 +9986,7 @@ dependencies = [ "anyhow", "once_cell", "regex", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "similar", "turbo-rcstr", @@ -10107,15 +10004,12 @@ version = "0.1.0" dependencies = [ "anyhow", "dunce", - "futures", - "indexmap 2.7.1", "once_cell", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "testing", "tokio", - "tracing", "tracing-subscriber", "turbo-rcstr", "turbo-tasks", @@ -10126,7 +10020,6 @@ dependencies = [ "turbo-tasks-fs", "turbopack", "turbopack-browser", - "turbopack-cli-utils", "turbopack-core", "turbopack-ecmascript-plugins", "turbopack-ecmascript-runtime", @@ -10145,12 +10038,13 @@ dependencies = [ "anyhow", "either", "flate2", + "hashbrown 0.14.5", "indexmap 2.7.1", "itertools 0.10.5", "postcard", "rayon", "rustc-demangle", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "serde_json", "tungstenite 0.21.0", @@ -10168,7 +10062,7 @@ dependencies = [ "once_cell", "parking_lot", "postcard", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "serde", "thread_local", "tokio", @@ -10182,13 +10076,13 @@ name = "turbopack-wasm" version = "0.1.0" dependencies = [ "anyhow", - "indexmap 2.7.1", "indoc", "serde", "turbo-rcstr", "turbo-tasks", "turbo-tasks-build", "turbo-tasks-fs", + "turbo-tasks-hash", "turbopack-core", "turbopack-ecmascript", "wasmparser 0.110.0", @@ -10202,7 +10096,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", - "rand", + "rand 0.8.5", "static_assertions", ] @@ -10212,7 +10106,7 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7b17f197b3050ba473acf9181f7b1d3b66d1cf7356c6cc57886662276e65908" dependencies = [ - "rand", + "rand 0.8.5", ] [[package]] @@ -10326,12 +10220,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - [[package]] name = "untrusted" version = "0.9.0" @@ -10348,7 +10236,7 @@ dependencies = [ "flate2", "log", "once_cell", - "rustls 0.23.20", + "rustls", "rustls-pki-types", "url", "webpki-roots 0.26.7", @@ -10398,9 +10286,14 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.5.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc" +checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" +dependencies = [ + "getrandom 0.3.2", + "js-sys", + "wasm-bindgen", +] [[package]] name = "v_frame" @@ -10419,12 +10312,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" -[[package]] -name = "value-bag" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" - [[package]] name = "vcpkg" version = "0.2.15" @@ -10433,9 +10320,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vergen" -version = "9.0.4" +version = "9.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0d2f179f8075b805a43a2a21728a46f0cc2921b3c58695b28fa8817e103cd9a" +checksum = "6b2bf58be11fc9414104c6d3a2e464163db5ef74b12296bda593cac37b6e4777" dependencies = [ "anyhow", "cargo_metadata 0.19.2", @@ -10443,33 +10330,21 @@ dependencies = [ "regex", "rustversion", "time", - "vergen-lib 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "vergen" -version = "9.0.5" -source = "git+https://github.com/bgw/vergen.git?branch=bgw%2Fno-optional-locks#0c9698692edd542772619f6db103681a832f61a7" -dependencies = [ - "anyhow", - "cargo_metadata 0.19.2", - "derive_builder 0.20.2", - "regex", - "rustversion", - "vergen-lib 0.1.6 (git+https://github.com/bgw/vergen.git?branch=bgw%2Fno-optional-locks)", + "vergen-lib", ] [[package]] name = "vergen-gitcl" -version = "1.0.6" -source = "git+https://github.com/bgw/vergen.git?branch=bgw%2Fno-optional-locks#0c9698692edd542772619f6db103681a832f61a7" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9dfc1de6eb2e08a4ddf152f1b179529638bedc0ea95e6d667c014506377aefe" dependencies = [ "anyhow", "derive_builder 0.20.2", "rustversion", "time", - "vergen 9.0.5", - "vergen-lib 0.1.6 (git+https://github.com/bgw/vergen.git?branch=bgw%2Fno-optional-locks)", + "vergen", + "vergen-lib", ] [[package]] @@ -10483,16 +10358,6 @@ dependencies = [ "rustversion", ] -[[package]] -name = "vergen-lib" -version = "0.1.6" -source = "git+https://github.com/bgw/vergen.git?branch=bgw%2Fno-optional-locks#0c9698692edd542772619f6db103681a832f61a7" -dependencies = [ - "anyhow", - "derive_builder 0.20.2", - "rustversion", -] - [[package]] name = "version-compare" version = "0.1.1" @@ -10507,33 +10372,9 @@ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "virtual-fs" -version = "0.19.0" +version = "0.600.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14d2456ec960b74e5b0423159c70dd9796da1445de462013fe03eefd2545b631" -dependencies = [ - "async-trait", - "bytes", - "dashmap 6.1.0", - "derivative", - "dunce", - "futures", - "getrandom", - "indexmap 1.9.3", - "lazy_static", - "pin-project-lite", - "replace_with", - "shared-buffer", - "slab", - "thiserror 1.0.69", - "tokio", - "tracing", - "wasmer-package 0.2.0", -] - -[[package]] -name = "virtual-fs" -version = "0.21.0" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +checksum = "558995609ae4e69538c3f1eec3ad1d195ee8a1ed9d39768713728a57ed4ba6fe" dependencies = [ "anyhow", "async-trait", @@ -10544,9 +10385,8 @@ dependencies = [ "filetime", "fs_extra", "futures", - "getrandom", - "indexmap 1.9.3", - "lazy_static", + "getrandom 0.2.15", + "indexmap 2.7.1", "libc", "pin-project-lite", "replace_with", @@ -10555,29 +10395,31 @@ dependencies = [ "thiserror 1.0.69", "tokio", "tracing", - "wasmer-package 0.4.0", + "wasmer-package", "webc", ] [[package]] name = "virtual-mio" -version = "0.7.0" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "0.600.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5564ace0b5098d394f6ca9aa62046da4e2e5912703cc7fec71a79dd3ae76b5d" dependencies = [ "async-trait", "bytes", "futures", "mio 1.0.3", "serde", - "socket2 0.5.8", + "socket2 0.5.10", "thiserror 1.0.69", "tracing", ] [[package]] name = "virtual-net" -version = "0.14.0" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "0.600.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31b44e5bdba1d7f0223ee4f9e6aab55f993c610629503a855fc55fabd6c10f4e" dependencies = [ "anyhow", "async-trait", @@ -10587,12 +10429,16 @@ dependencies = [ "bytes", "derive_more 1.0.0", "futures-util", + "idna_adapter", "ipnet", "iprange", + "libc", + "mio 1.0.3", "pin-project-lite", "rkyv 0.8.9", "serde", "smoltcp", + "socket2 0.5.10", "thiserror 1.0.69", "tokio", "tracing", @@ -10605,6 +10451,12 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65dd7eed29412da847b0f78bcec0ac98588165988a8cfe41d4ea1d429f8ccfff" +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + [[package]] name = "vte" version = "0.10.1" @@ -10724,17 +10576,26 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + [[package]] name = "wasm" version = "0.0.0" dependencies = [ "anyhow", "console_error_panic_hook", - "getrandom", + "getrandom 0.2.15", "js-sys", "mdxjs", "next-custom-transforms", - "serde-wasm-bindgen", + "serde-wasm-bindgen 0.4.5", "serde_json", "swc_core", "tracing", @@ -10744,26 +10605,27 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.91" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", + "once_cell", + "rustversion", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.91" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", "wasm-bindgen-shared", ] @@ -10781,9 +10643,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.91" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -10791,22 +10653,25 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.91" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.91" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] name = "wasm-encoder" @@ -10817,21 +10682,37 @@ dependencies = [ "leb128", ] +[[package]] +name = "wasm-streams" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e072d4e72f700fb3443d8fe94a39315df013eef1104903cdb0a2abd322bbecd" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wasmer" -version = "5.0.5-rc1" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b8204e4eb959d89b41d4a536e61ce73f5416bccc81c7d3b7fa993995538ee97" dependencies = [ "bindgen", "bytes", "cfg-if", "cmake", - "indexmap 1.9.3", + "derive_more 1.0.0", + "indexmap 2.7.1", "js-sys", "more-asserts", + "paste", "rustc-demangle", "serde", - "serde-wasm-bindgen", + "serde-wasm-bindgen 0.6.5", "shared-buffer", "tar", "target-lexicon", @@ -10844,15 +10725,16 @@ dependencies = [ "wasmer-derive", "wasmer-types", "wasmer-vm", - "wasmparser 0.216.0", + "wasmparser 0.224.1", "wat", "windows-sys 0.59.0", ] [[package]] name = "wasmer-cache" -version = "5.0.5-rc1" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3077ff8340cc09dcc53d4a140681ea7fb3d5525eb028f3853db1bc81434c3bad" dependencies = [ "blake3", "hex", @@ -10862,17 +10744,18 @@ dependencies = [ [[package]] name = "wasmer-compiler" -version = "5.0.5-rc1" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "690827b8ec4f3858d8b001d96ddfc25c28a255cbfa984ba5bd1ed173f29ffc2a" dependencies = [ "backtrace", "bytes", "cfg-if", "enum-iterator", "enumset", - "lazy_static", "leb128", "libc", + "macho-unwind-info", "memmap2 0.6.2", "more-asserts", "object 0.32.2", @@ -10885,15 +10768,16 @@ dependencies = [ "thiserror 1.0.69", "wasmer-types", "wasmer-vm", - "wasmparser 0.216.0", + "wasmparser 0.224.1", "windows-sys 0.59.0", "xxhash-rust", ] [[package]] name = "wasmer-compiler-cranelift" -version = "5.0.5-rc1" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a46a83b498a2f0dcdc2e97d611db9eae92a38f20fc5dac4709d645bdfd8d2d6" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -10911,30 +10795,9 @@ dependencies = [ [[package]] name = "wasmer-config" -version = "0.10.0" +version = "0.600.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "666d97272c1042e20957be5f7e4a42f28ae5367c32a79ae953339335a55512e3" -dependencies = [ - "anyhow", - "bytesize", - "ciborium", - "derive_builder 0.12.0", - "hex", - "indexmap 2.7.1", - "schemars", - "semver 1.0.23", - "serde", - "serde_json", - "serde_yml", - "thiserror 1.0.69", - "toml 0.8.19", - "url", -] - -[[package]] -name = "wasmer-config" -version = "0.12.0" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +checksum = "51dab2fa03bfb28b8b8c2f546531f56b341743557305e51ea621b1d8f7075b29" dependencies = [ "anyhow", "bytesize", @@ -10942,20 +10805,22 @@ dependencies = [ "derive_builder 0.12.0", "hex", "indexmap 2.7.1", + "saffron", "schemars", - "semver 1.0.23", + "semver", "serde", "serde_json", "serde_yml", "thiserror 1.0.69", - "toml 0.8.19", + "toml 0.8.9", "url", ] [[package]] name = "wasmer-derive" -version = "5.0.5-rc1" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccaedaf20c22736904ad842127cdbe46432998dbcdd840b024dda856a8b52265" dependencies = [ "proc-macro-error2", "proc-macro2", @@ -10965,8 +10830,9 @@ dependencies = [ [[package]] name = "wasmer-journal" -version = "0.18.0" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "0.600.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd746264554197deae474fa069949c6352e2758dceb3157389af1436e121e9e" dependencies = [ "anyhow", "async-trait", @@ -10980,52 +10846,30 @@ dependencies = [ "rkyv 0.8.9", "serde", "serde_json", + "shared-buffer", "thiserror 1.0.69", "tracing", - "virtual-fs 0.21.0", + "virtual-fs", "virtual-net", "wasmer", + "wasmer-config", "wasmer-wasix-types", ] [[package]] name = "wasmer-package" -version = "0.2.0" +version = "0.600.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98d05a5cd47f324ed784481d79351e12a02ad3289148dfa72432aa5d394634b8" -dependencies = [ - "anyhow", - "bytes", - "cfg-if", - "ciborium", - "flate2", - "insta", - "semver 1.0.23", - "serde", - "serde_json", - "sha2", - "shared-buffer", - "tar", - "tempfile", - "thiserror 1.0.69", - "toml 0.8.19", - "url", - "wasmer-config 0.10.0", - "webc", -] - -[[package]] -name = "wasmer-package" -version = "0.4.0" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +checksum = "20614419fe563480822cec9f67818aced0b1b2cc26f88e96372bbaec9fd14c0f" dependencies = [ "anyhow", "bytes", "cfg-if", "ciborium", "flate2", + "ignore", "insta", - "semver 1.0.23", + "semver", "serde", "serde_json", "sha2", @@ -11033,21 +10877,23 @@ dependencies = [ "tar", "tempfile", "thiserror 1.0.69", - "toml 0.8.19", + "toml 0.8.9", "url", - "wasmer-config 0.12.0", + "wasmer-config", + "wasmer-types", "webc", ] [[package]] name = "wasmer-types" -version = "5.0.5-rc1" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b45fd1274b21365d3232732afe53c220ecbcdb78946405087e7016e7b2369a0" dependencies = [ "bytecheck 0.6.11", "enum-iterator", "enumset", - "getrandom", + "getrandom 0.2.15", "hex", "indexmap 2.7.1", "more-asserts", @@ -11056,13 +10902,15 @@ dependencies = [ "sha2", "target-lexicon", "thiserror 1.0.69", + "wasmparser 0.224.1", "xxhash-rust", ] [[package]] name = "wasmer-vm" -version = "5.0.5-rc1" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac4e7cec7b509e70664773f03907e6122d1633c100cb28009da770786806e6db" dependencies = [ "backtrace", "cc", @@ -11073,8 +10921,8 @@ dependencies = [ "enum-iterator", "fnv", "indexmap 2.7.1", - "lazy_static", "libc", + "libunwind", "mach2", "memoffset 0.9.0", "more-asserts", @@ -11087,8 +10935,9 @@ dependencies = [ [[package]] name = "wasmer-wasix" -version = "0.35.0" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "0.600.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc74d209309baed0338fda8310847a9641bb77c2aeb1ae25af309006c22153f" dependencies = [ "anyhow", "async-trait", @@ -11103,12 +10952,11 @@ dependencies = [ "dashmap 6.1.0", "derive_more 1.0.0", "futures", - "getrandom", + "getrandom 0.2.15", "heapless", "hex", "http 1.1.0", "js-sys", - "lazy_static", "libc", "linked_hash_set", "lz4_flex", @@ -11117,10 +10965,11 @@ dependencies = [ "petgraph 0.6.3", "pin-project", "pin-utils", - "rand", + "rand 0.8.5", + "reqwest 0.12.20", "rkyv 0.8.9", "rusty_pool", - "semver 1.0.23", + "semver", "serde", "serde_derive", "serde_json", @@ -11133,20 +10982,20 @@ dependencies = [ "thiserror 1.0.69", "tokio", "tokio-stream", - "toml 0.8.19", + "toml 0.8.9", "tracing", "url", "urlencoding", - "virtual-fs 0.21.0", + "virtual-fs", "virtual-mio", "virtual-net", "waker-fn", "wasm-bindgen", "wasm-bindgen-futures", "wasmer", - "wasmer-config 0.12.0", + "wasmer-config", "wasmer-journal", - "wasmer-package 0.4.0", + "wasmer-package", "wasmer-types", "wasmer-wasix-types", "web-sys", @@ -11158,8 +11007,9 @@ dependencies = [ [[package]] name = "wasmer-wasix-types" -version = "0.35.0" -source = "git+https://github.com/kdy1/wasmer?branch=build-deps#afedc9315eb1c7fefddff7a3c6ada0235e78678a" +version = "0.600.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3732cc6f863b06ef80e066a3c2558c322de6fbe76caf704aefe9ca7ccaf25f10" dependencies = [ "anyhow", "bitflags 1.3.2", @@ -11186,20 +11036,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dfcdb72d96f01e6c85b6bf20102e7423bdbaad5c337301bab2bbf253d26413c" dependencies = [ "indexmap 2.7.1", - "semver 1.0.23", + "semver", ] [[package]] name = "wasmparser" -version = "0.216.0" +version = "0.224.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcdee6bea3619d311fb4b299721e89a986c3470f804b6d534340e412589028e3" +checksum = "04f17a5917c2ddd3819e84c661fae0d6ba29d7b9c1f0e96c708c65a9c4188e11" dependencies = [ - "ahash 0.8.11", - "bitflags 2.5.0", - "hashbrown 0.14.5", - "indexmap 2.7.1", - "semver 1.0.23", + "bitflags 2.9.0", ] [[package]] @@ -11234,6 +11080,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webbrowser" version = "0.8.15" @@ -11253,9 +11109,9 @@ dependencies = [ [[package]] name = "webc" -version = "7.0.0-rc.2" +version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6893cbe58d5b97a0daa2dd77055d621db1c8b94fe0f2bbd719c8de747226ea6" +checksum = "38544ae3a351279fa913b4dee9c548f0aa3b27ca05756531c3f2e6bc4e22c27d" dependencies = [ "anyhow", "base64 0.22.1", @@ -11264,13 +11120,13 @@ dependencies = [ "ciborium", "document-features", "ignore", - "indexmap 1.9.3", + "indexmap 2.7.1", "leb128", "lexical-sort", "libc", "once_cell", "path-clean 1.0.1", - "rand", + "rand 0.8.5", "serde", "serde_json", "sha2", @@ -11279,30 +11135,20 @@ dependencies = [ "url", ] -[[package]] -name = "webpki" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" -dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", -] - [[package]] name = "webpki-roots" -version = "0.22.6" +version = "0.26.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" dependencies = [ - "webpki", + "rustls-pki-types", ] [[package]] name = "webpki-roots" -version = "0.26.7" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" +checksum = "2853738d1cc4f2da3a225c18ec6c3721abb31961096e9dbf5ab35fa88b19cfdb" dependencies = [ "rustls-pki-types", ] @@ -11366,68 +11212,10 @@ dependencies = [ ] [[package]] -name = "windows" -version = "0.58.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" -dependencies = [ - "windows-core", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-core" -version = "0.58.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" -dependencies = [ - "windows-implement", - "windows-interface", - "windows-result", - "windows-strings", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-implement" -version = "0.58.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.95", -] - -[[package]] -name = "windows-interface" -version = "0.58.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.95", -] - -[[package]] -name = "windows-result" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-strings" -version = "0.1.0" +name = "windows-link" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" -dependencies = [ - "windows-result", - "windows-targets 0.52.6", -] +checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" [[package]] name = "windows-sys" @@ -11669,9 +11457,9 @@ dependencies = [ [[package]] name = "winnow" -version = "0.6.18" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd" dependencies = [ "memchr", ] @@ -11695,6 +11483,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags 2.9.0", +] + [[package]] name = "write16" version = "1.0.0" @@ -11733,6 +11530,12 @@ dependencies = [ "rustix 0.38.41", ] +[[package]] +name = "xdg" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213b7324336b53d2414b2db8537e56544d981803139155afa84f76eeebb7a546" + [[package]] name = "xtask" version = "0.1.0" @@ -11746,8 +11549,8 @@ dependencies = [ "num-format", "owo-colors 3.5.0", "plotters", - "rustc-hash 2.1.0", - "semver 1.0.23", + "rustc-hash 2.1.1", + "semver", "serde", "serde_json", "tabled", @@ -11786,7 +11589,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", "synstructure", ] @@ -11796,7 +11599,16 @@ version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" dependencies = [ - "zerocopy-derive", + "zerocopy-derive 0.7.32", +] + +[[package]] +name = "zerocopy" +version = "0.8.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879" +dependencies = [ + "zerocopy-derive 0.8.24", ] [[package]] @@ -11807,7 +11619,18 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", ] [[package]] @@ -11827,7 +11650,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", "synstructure", ] @@ -11856,7 +11679,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.95", + "syn 2.0.100", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 3ebf1c6ab718d..beb4161f232fb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,24 +11,24 @@ members = [ "crates/next-core", "crates/next-custom-transforms", "turbopack/crates/*", + "turbopack/crates/*/fuzz", "turbopack/xtask", ] -exclude = [ - "crates/next-error-code-swc-plugin", -] +exclude = ["crates/next-error-code-swc-plugin"] [workspace.lints.clippy] too_many_arguments = "allow" +[workspace.lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(rust_analyzer)', 'cfg(fuzzing)'] } + # This crate is particularly sensitive to compiler optimizations -[profile.dev.package.turbo-tasks-memory] +[profile.dev.package.turbo-persistence] opt-level = 1 # Set the options for dependencies (not crates in the workspace), this mostly impacts cold builds [profile.dev.package."*"] -# This is a workaround for wasm timeout issue -debug-assertions = false opt-level = 1 # Set the settings for build scripts and proc-macros. @@ -162,7 +162,7 @@ opt-level = "s" [profile.release.package.zstd-sys] opt-level = 3 -[profile.release.package.sourcemap] +[profile.release.package.swc_sourcemap] opt-level = 3 [profile.release.package.turbopack-wasm] @@ -240,6 +240,12 @@ opt-level = 3 [profile.release.package.serde] opt-level = 3 +# Use a custom profile for CI where many tests are performance sensitive but we still want the additional validation of debug-assertions +[profile.release-with-assertions] +inherits = "release" +debug-assertions = true +overflow-checks = true + [workspace.dependencies] # Workspace crates next-api = { path = "crates/next-api" } @@ -249,9 +255,9 @@ next-custom-transforms = { path = "crates/next-custom-transforms" } # Turbopack auto-hash-map = { path = "turbopack/crates/turbo-tasks-auto-hash-map" } -swc-ast-explorer = { path = "turbopack/crates/turbopack-swc-ast-explorer" } turbo-prehash = { path = "turbopack/crates/turbo-prehash" } turbo-rcstr = { path = "turbopack/crates/turbo-rcstr" } +turbo-esregex = { path = "turbopack/crates/turbo-esregex" } turbo-persistence = { path = "turbopack/crates/turbo-persistence" } turbo-tasks-malloc = { path = "turbopack/crates/turbo-tasks-malloc", default-features = false } turbo-tasks = { path = "turbopack/crates/turbo-tasks" } @@ -264,13 +270,10 @@ turbo-tasks-fs = { path = "turbopack/crates/turbo-tasks-fs" } turbo-tasks-hash = { path = "turbopack/crates/turbo-tasks-hash" } turbo-tasks-macros = { path = "turbopack/crates/turbo-tasks-macros" } turbo-tasks-macros-shared = { path = "turbopack/crates/turbo-tasks-macros-shared" } -turbo-tasks-macros-tests = { path = "turbopack/crates/turbo-tasks-macros-tests" } -turbo-tasks-memory = { path = "turbopack/crates/turbo-tasks-memory" } turbo-tasks-testing = { path = "turbopack/crates/turbo-tasks-testing" } turbopack = { path = "turbopack/crates/turbopack" } turbopack-bench = { path = "turbopack/crates/turbopack-bench" } turbopack-nodejs = { path = "turbopack/crates/turbopack-nodejs" } -turbopack-cli = { path = "turbopack/crates/turbopack-cli" } turbopack-cli-utils = { path = "turbopack/crates/turbopack-cli-utils" } turbopack-core = { path = "turbopack/crates/turbopack-core" } turbopack-create-test-app = { path = "turbopack/crates/turbopack-create-test-app" } @@ -290,27 +293,26 @@ turbopack-resolve = { path = "turbopack/crates/turbopack-resolve" } turbopack-static = { path = "turbopack/crates/turbopack-static" } turbopack-swc-utils = { path = "turbopack/crates/turbopack-swc-utils" } turbopack-test-utils = { path = "turbopack/crates/turbopack-test-utils" } -turbopack-tests = { path = "turbopack/crates/turbopack-tests" } turbopack-trace-server = { path = "turbopack/crates/turbopack-trace-server" } turbopack-trace-utils = { path = "turbopack/crates/turbopack-trace-utils" } turbopack-wasm = { path = "turbopack/crates/turbopack-wasm" } # SWC crates -swc_core = { version = "21.0.1", features = [ +swc_core = { version = "29.2.0", features = [ "ecma_loader_lru", "ecma_loader_parking_lot", + "parallel_rayon", ] } -testing = { version = "8.0.0" } +testing = { version = "14.0.0" } # Keep consistent with preset_env_base through swc_core -browserslist-rs = { version = "0.17.0" } -miette = { version = "5.10.0", features = ["fancy"] } -mdxjs = "0.3.3" -modularize_imports = { version = "0.80.0" } -styled_components = { version = "0.108.0" } -styled_jsx = { version = "0.84.0" } -swc_emotion = { version = "0.84.0" } -swc_relay = { version = "0.54.0" } +browserslist-rs = { version = "0.18.0" } +mdxjs = "1.0.3" +modularize_imports = { version = "0.89.0" } +styled_components = { version = "0.117.0" } +styled_jsx = { version = "0.93.2" } +swc_emotion = { version = "0.93.0" } +swc_relay = { version = "0.63.0" } # General Deps chromiumoxide = { version = "0.5.4", features = [ @@ -324,27 +326,26 @@ tungstenite = "0.20.1" allsorts = { version = "0.14.0", default-features = false, features = [ "flate2_rust", ] } -anyhow = "1.0.69" -assert_cmd = "2.0.8" +anyhow = "1.0.98" async-compression = { version = "0.3.13", default-features = false, features = [ "gzip", "tokio", ] } async-trait = "0.1.64" -atty = "0.2.14" +bitfield = "0.18.0" bytes = "1.1.0" +bytes-str = "0.2.6" chrono = "0.4.23" clap = { version = "4.5.2", features = ["derive"] } concurrent-queue = "2.5.0" -console = "0.15.5" console-subscriber = "0.4.1" const_format = "0.2.30" -criterion = "0.5.1" +criterion = { package = "codspeed-criterion-compat", version = "2.10.1" } crossbeam-channel = "0.5.8" dashmap = "6.1.0" data-encoding = "2.3.3" +divan = { version = "2.10.1", package = "codspeed-divan-compat" } dhat = { version = "0.3.2" } -dialoguer = "0.10.3" dunce = "1.0.3" either = "1.9.0" erased-serde = "0.4.5" @@ -354,43 +355,49 @@ hashbrown = "0.14.5" httpmock = { version = "0.6.8", default-features = false } image = { version = "0.25.0", default-features = false } indexmap = "2.7.1" -indicatif = "0.17.3" indoc = "2.0.0" itertools = "0.10.5" -lazy_static = "1.4.0" -log = "0.4.17" -lightningcss = { version = "1.0.0-alpha.65", features = [ +lightningcss = { version = "1.0.0-alpha.67", features = [ "serde", "visitor", "into_owned", + "browserslist" +] } +lightningcss-napi = { version = "0.4.4", default-features = false, features = [ + "visitor", ] } -lightningcss-napi = { version = "0.4.3", default-features = false, features = [ - "visitor" -]} markdown = "1.0.0-alpha.18" mime = "0.3.16" -nohash-hasher = "0.2.0" -notify = "6.1.1" +napi = { version = "2", default-features = false, features = [ + "napi3", + "serde-json", + "tokio_rt", + "error_anyhow", + # Lightningcss uses this features + "napi4", + "napi5", + "compat-mode" +] } +notify = "8.0.0" once_cell = "1.17.1" owo-colors = "3.5.0" -par-core = { version = "1.0.3", features = ["rayon"] } -parcel_selectors = "0.28.1" +parcel_selectors = "0.28.2" parking_lot = "0.12.1" pathdiff = "0.2.1" petgraph = "0.6.3" pin-project-lite = "0.2.9" postcard = "1.0.4" -predicates = "2.1.5" -pretty_assertions = "1.3.0" proc-macro2 = "1.0.79" qstring = "0.7.2" quote = "1.0.23" -rand = "0.8.5" +rand = "0.9.0" rayon = "1.10.0" regex = "1.10.6" -reqwest = { version = "=0.11.17", default-features = false } +regress = "0.10.3" +reqwest = { version = "0.12.20", default-features = false } +ringmap = "0.1.3" rstest = "0.16.0" -rustc-hash = "2.1.0" +rustc-hash = "2.1.1" semver = "1.0.16" serde = { version = "1.0.217", features = ["derive"] } serde_json = "1.0.138" @@ -404,33 +411,28 @@ smallvec = { version = "1.13.1", features = [ "union", "const_new", ] } -sourcemap = "9.0.0" +swc_sourcemap = "9.3.2" strsim = "0.11.1" shrink-to-fit = "0.2.10" -swc-rustc-hash = { package = "rustc-hash", version = "1.1.0" } # used with swc -syn = "1.0.107" -tempfile = "3.3.0" +syn = "2.0.100" +tempfile = "3.20.0" thread_local = "1.1.8" thiserror = "1.0.48" -tiny-gradient = "0.1.0" tokio = "1.43.0" tokio-util = { version = "0.7.13", features = ["io", "rt"] } tracing = "0.1.37" tracing-subscriber = "0.3.16" triomphe = { git = "https://github.com/sokra/triomphe", branch = "sokra/unstable" } -unicode-segmentation = "1.10.1" unsize = "1.1.0" url = "2.2.2" urlencoding = "2.1.2" -vergen = { git = "https://github.com/bgw/vergen.git", branch = "bgw/no-optional-locks", features = ["cargo"] } -vergen-gitcl = { git = "https://github.com/bgw/vergen.git", branch = "bgw/no-optional-locks", features = ["cargo"] } +vergen = { version = "9.0.6", features = [ + "cargo", +] } +vergen-gitcl = { version = "1.0.8", features = [ + "cargo", +] } webbrowser = "0.8.7" [patch.crates-io] -# Remove this once https://github.com/wasmerio/wasmer/pull/5333 is merged and released -wasmer = { git = "https://github.com/kdy1/wasmer", branch = "build-deps" } -wasmer-cache = { git = "https://github.com/kdy1/wasmer", branch = "build-deps" } -wasmer-compiler-cranelift = { git = "https://github.com/kdy1/wasmer", branch = "build-deps" } -wasmer-wasix = { git = "https://github.com/kdy1/wasmer", branch = "build-deps" } - -mdxjs = { git = "https://github.com/kdy1/mdxjs-rs", branch = "swc-core-21" } +mdxjs = { git = "https://github.com/kdy1/mdxjs-rs.git", branch = "swc-core-29" } diff --git a/contributing/core/developing.md b/contributing/core/developing.md index 570776c1f8940..c186e1554b187 100644 --- a/contributing/core/developing.md +++ b/contributing/core/developing.md @@ -56,13 +56,13 @@ see **[Developing Using Your Local Version of Next.js](./developing-using-local- Since Turbopack doesn't support symlinks when pointing outside of the workspace directory, it can be difficult to develop against a local Next.js version. Neither `pnpm link` nor `file:` imports quite cut it. An alternative is to pack the Next.js version you want to test into a tarball and add it to the pnpm overrides of your test application. The following script will do it for you: ```bash -pnpm pack-next --release && pnpm unpack-next path/to/project +pnpm pack-next --tar && pnpm unpack-next path/to/project ``` Or without running the build: ```bash -pnpm pack-next --no-build --release && pnpm unpack-next path/to/project +pnpm pack-next --no-js-build --tar && pnpm unpack-next path/to/project ``` Without going through a tarball (only works if you've added the overrides from `pack-next`): @@ -74,21 +74,19 @@ pnpm patch-next path/to/project Supports the same arguments: ```bash -pnpm patch-next --no-build --release path/to/project +pnpm patch-next --no-js-build path/to/project ``` ### Explanation of the scripts ```bash # Generate a tarball of the Next.js version you want to test -$ pnpm pack-next +$ pnpm pack-next --tar -# If you need to build in release mode: -$ pnpm pack-next --release # You can also pass any cargo argument to the script # To skip the `pnpm i` and `pnpm build` steps in next.js (e. g. if you are running `pnpm dev`) -$ pnpm pack-next --no-build +$ pnpm pack-next --no-js-build ``` Afterwards, you'll need to unpack the tarball into your test project. You can either manually edit the `package.json` to point to the new tarballs (see the stdout from `pack-next` script), or you can automatically unpack it with: diff --git a/contributing/core/vscode-debugger.md b/contributing/core/vscode-debugger.md index 22be0b556b33d..52c9033e7823b 100644 --- a/contributing/core/vscode-debugger.md +++ b/contributing/core/vscode-debugger.md @@ -24,4 +24,4 @@ To see the changes you make to the Next.js codebase during development, you can When developing/debugging Next.js, you can set breakpoints anywhere in the `packages/next` source code that will stop the debugger at certain locations so you can examine the behavior. Read more about [breakpoints in the VS Code documentation](https://code.visualstudio.com/docs/nodejs/nodejs-debugging#_breakpoints). -To ensure that the original names are displayed in the "Variables" section, build the Next.js source code with `NEXT_SERVER_EVAL_SOURCE_MAPS=1`. This is automatically applied when using `pnpm dev`. +To ensure that the original names are displayed in the "Variables" section, build the Next.js source code with `NEXT_SERVER_NO_MANGLE=1`. This is automatically applied when using `pnpm dev`. diff --git a/crates/napi/Cargo.toml b/crates/napi/Cargo.toml index c2238735e50d6..ca9543bd35763 100644 --- a/crates/napi/Cargo.toml +++ b/crates/napi/Cargo.toml @@ -1,5 +1,5 @@ [package] -edition = "2021" +edition = "2024" name = "next-swc-napi" version = "0.0.0" publish = false @@ -43,23 +43,22 @@ tokio-console = ["dep:console-subscriber"] [lints] workspace = true +[package.metadata.cargo-shear] +ignored = [ + # we need to set features on these packages when building for WASM, but we don't directly use them + "getrandom", + "iana-time-zone", + # the plugins feature needs to set a feature on this transitively depended-on package, we never + # directly import it + "turbopack-ecmascript-plugins", +] + [dependencies] anyhow = "1.0.66" -backtrace = "0.3" console-subscriber = { workspace = true, optional = true } dhat = { workspace = true, optional = true } -indexmap = { workspace = true } owo-colors = { workspace = true } -napi = { version = "2", default-features = false, features = [ - "napi3", - "serde-json", - "tokio_rt", - "error_anyhow", - # Lightningcss uses this features - "napi4", - "napi5", - "compat-mode" -] } +napi = { workspace = true } napi-derive = "2" next-custom-transforms = { workspace = true } rand = { workspace = true } @@ -70,17 +69,14 @@ supports-hyperlinks = "3.1.0" terminal_hyperlink = "0.1.0" tracing = { workspace = true } tracing-subscriber = { workspace = true } -tracing-chrome = "0.5.0" +tracing-chrome = "0.7.2" url = { workspace = true } urlencoding = { workspace = true } once_cell = { workspace = true } -dashmap = { workspace = true } swc_core = { workspace = true, features = [ "base_concurrent", "base_node", - "bundler", - "bundler_concurrent", "common_concurrent", "ecma_ast", "ecma_ast_serde", @@ -97,15 +93,13 @@ swc_core = { workspace = true, features = [ "ecma_utils", "ecma_visit", ] } -par-core = { workspace = true, features = ["rayon"] } # Dependencies for the native, non-wasm32 build. [target.'cfg(not(target_arch = "wasm32"))'.dependencies] lightningcss-napi = { workspace = true } tokio = { workspace = true, features = ["full"] } -turbo-rcstr = { workspace = true } +turbo-rcstr = { workspace = true, features = ["napi"] } turbo-tasks = { workspace = true } -turbo-tasks-memory = { workspace = true } turbo-tasks-backend = { workspace = true } turbo-tasks-fs = { workspace = true } next-api = { workspace = true } @@ -118,7 +112,6 @@ turbo-tasks-malloc = { workspace = true, default-features = false, features = [ "custom_allocator" ] } -turbopack = { workspace = true } turbopack-core = { workspace = true } turbopack-ecmascript-hmr-protocol = { workspace = true } turbopack-trace-utils = { workspace = true } diff --git a/crates/napi/build.rs b/crates/napi/build.rs index b398cfc19b97a..f5a70875dc2f1 100644 --- a/crates/napi/build.rs +++ b/crates/napi/build.rs @@ -1,4 +1,6 @@ -use std::{env, process::Command, str}; +use std::{env, fs, path::Path, process::Command, str}; + +use serde_json::Value; extern crate napi_build; @@ -6,6 +8,25 @@ fn main() -> anyhow::Result<()> { println!("cargo:rerun-if-env-changed=CI"); let is_ci = env::var("CI").is_ok_and(|value| !value.is_empty()); + let nextjs_version = { + let package_json_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../..") + .join("packages/next/package.json"); + + println!("cargo:rerun-if-changed={}", package_json_path.display()); + + let package_json_content = fs::read_to_string(&package_json_path)?; + let package_json: Value = serde_json::from_str(&package_json_content)?; + + package_json["version"] + .as_str() + .expect("Expected a Next.js `version` string in its package.json") + .to_string() + }; + + // Make the Next.js version available as a build-time environment variable + println!("cargo:rustc-env=NEXTJS_VERSION={nextjs_version}"); + // Generates, stores build-time information as static values. // There are some places relying on correct values for this (i.e telemetry), // So failing build if this fails. diff --git a/crates/napi/src/lib.rs b/crates/napi/src/lib.rs index c9314e5df52e7..8544f38045644 100644 --- a/crates/napi/src/lib.rs +++ b/crates/napi/src/lib.rs @@ -50,6 +50,8 @@ pub mod minify; #[cfg(not(target_arch = "wasm32"))] pub mod next_api; pub mod parse; +pub mod react_compiler; +pub mod rspack; pub mod transform; #[cfg(not(target_arch = "wasm32"))] pub mod turbo_trace_server; @@ -67,11 +69,19 @@ static ALLOC: dhat::Alloc = dhat::Alloc; #[cfg(not(target_arch = "wasm32"))] #[napi::module_init] - fn init() { + use std::panic::{set_hook, take_hook}; + use tokio::runtime::Builder; + use turbo_tasks::panic_hooks::handle_panic; use turbo_tasks_malloc::TurboMalloc; + let prev_hook = take_hook(); + set_hook(Box::new(move |info| { + handle_panic(info); + prev_hook(info); + })); + let rt = Builder::new_multi_thread() .enable_all() .on_thread_stop(|| { diff --git a/crates/napi/src/mdx.rs b/crates/napi/src/mdx.rs index 4fcf76616685e..6d4bd57ffa708 100644 --- a/crates/napi/src/mdx.rs +++ b/crates/napi/src/mdx.rs @@ -1,4 +1,4 @@ -use mdxjs::{compile, Options}; +use mdxjs::{Options, compile}; use napi::bindgen_prelude::*; pub struct MdxCompileTask { @@ -40,5 +40,5 @@ pub fn mdx_compile_sync(value: String, option: Buffer) -> napi::Result { let option: Options = serde_json::from_slice(&option)?; compile(value.as_str(), &option) - .map_err(|err| napi::Error::new(Status::GenericFailure, format!("{:?}", err))) + .map_err(|err| napi::Error::new(Status::GenericFailure, format!("{err:?}"))) } diff --git a/crates/napi/src/minify.rs b/crates/napi/src/minify.rs index fa531f37ab5af..4d8aa6f95201f 100644 --- a/crates/napi/src/minify.rs +++ b/crates/napi/src/minify.rs @@ -26,52 +26,19 @@ IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +use anyhow::Context; use napi::bindgen_prelude::*; -use rustc_hash::FxHashMap; -use serde::Deserialize; use swc_core::{ - base::{config::JsMinifyOptions, try_with_handler, BoolOrDataConfig, TransformOutput}, - common::{errors::ColorConfig, sync::Lrc, FileName, SourceFile, SourceMap, GLOBALS}, - ecma::minifier::option::{ - terser::{TerserCompressorOptions, TerserInlineOption}, - MangleOptions, - }, + base::{TransformOutput, config::JsMinifyOptions, try_with_handler}, + common::{FileName, GLOBALS, errors::ColorConfig}, }; use crate::{get_compiler, util::MapErr}; pub struct MinifyTask { c: swc_core::base::Compiler, - code: MinifyTarget, - opts: swc_core::base::config::JsMinifyOptions, -} - -#[derive(Deserialize)] -#[serde(untagged)] -enum MinifyTarget { - /// Code to minify. - Single(String), - /// `{ filename: code }` - Map(FxHashMap), -} - -impl MinifyTarget { - fn to_file(&self, cm: Lrc) -> Lrc { - match self { - MinifyTarget::Single(code) => cm.new_source_file(FileName::Anon.into(), code.clone()), - MinifyTarget::Map(codes) => { - assert_eq!( - codes.len(), - 1, - "swc.minify does not support concatenating multiple files yet" - ); - - let (filename, code) = codes.iter().next().unwrap(); - - cm.new_source_file(FileName::Real(filename.clone().into()).into(), code.clone()) - } - } - } + code: Option, + opts: JsMinifyOptions, } #[napi] @@ -81,6 +48,8 @@ impl Task for MinifyTask { type JsValue = TransformOutput; fn compute(&mut self) -> napi::Result { + let code = self.code.take().unwrap_or_default(); + try_with_handler( self.c.cm.clone(), swc_core::base::HandlerOpts { @@ -89,12 +58,13 @@ impl Task for MinifyTask { }, |handler| { GLOBALS.set(&Default::default(), || { - let fm = self.code.to_file(self.c.cm.clone()); + let fm = self.c.cm.new_source_file(FileName::Anon.into(), code); self.c.minify(fm, handler, &self.opts, Default::default()) }) }, ) + .map_err(|e| e.to_pretty_error()) .convert_err() } @@ -103,60 +73,38 @@ impl Task for MinifyTask { } } -/// **NOTE** `inline: 3` breaks some codes. -/// -/// -fn patch_opts(opts: &mut JsMinifyOptions) { - opts.compress = BoolOrDataConfig::from_obj(TerserCompressorOptions { - inline: Some(TerserInlineOption::Num(2)), - global_defs: [( - "process.env.__NEXT_PRIVATE_MINIMIZE_MACRO_FALSE".into(), - false.into(), - )] - .iter() - .cloned() - .collect(), - ..Default::default() - }); - - if !opts.mangle.is_false() { - let mut mangle = std::mem::take(&mut opts.mangle); - if mangle.is_true() { - mangle = BoolOrDataConfig::from_obj(MangleOptions::default()); - } - opts.mangle = mangle.map(|mut mangle_opts| { - mangle_opts.reserved.push("AbortSignal".into()); - mangle_opts - }); - } -} - #[napi] pub fn minify( input: Buffer, opts: Buffer, signal: Option, ) -> napi::Result> { - let code = serde_json::from_slice(&input)?; - let mut opts = serde_json::from_slice(&opts)?; - patch_opts(&mut opts); + let code = String::from_utf8(input.into()) + .context("failed to convert input to string") + .convert_err()?; + let opts = serde_json::from_slice(&opts)?; let c = get_compiler(); - let task = MinifyTask { c, code, opts }; + let task = MinifyTask { + c, + code: Some(code), + opts, + }; Ok(AsyncTask::with_optional_signal(task, signal)) } #[napi] pub fn minify_sync(input: Buffer, opts: Buffer) -> napi::Result { - let code: MinifyTarget = serde_json::from_slice(&input)?; - let mut opts = serde_json::from_slice(&opts)?; - patch_opts(&mut opts); + let code = String::from_utf8(input.into()) + .context("failed to convert input to string") + .convert_err()?; + let opts = serde_json::from_slice(&opts)?; let c = get_compiler(); - let fm = code.to_file(c.cm.clone()); + let fm = c.cm.new_source_file(FileName::Anon.into(), code); try_with_handler( c.cm.clone(), @@ -170,5 +118,6 @@ pub fn minify_sync(input: Buffer, opts: Buffer) -> napi::Result }) }, ) + .map_err(|e| e.to_pretty_error()) .convert_err() } diff --git a/crates/napi/src/next_api/endpoint.rs b/crates/napi/src/next_api/endpoint.rs index 9aa75aa661d60..83961eb77f6b3 100644 --- a/crates/napi/src/next_api/endpoint.rs +++ b/crates/napi/src/next_api/endpoint.rs @@ -1,13 +1,13 @@ use std::{ops::Deref, sync::Arc}; use anyhow::Result; -use napi::{bindgen_prelude::External, JsFunction}; +use napi::{JsFunction, bindgen_prelude::External}; use next_api::{ operation::OptionEndpoint, paths::ServerPath, route::{ - endpoint_client_changed_operation, endpoint_server_changed_operation, - endpoint_write_to_disk_operation, EndpointOutputPaths, + EndpointOutputPaths, endpoint_client_changed_operation, endpoint_server_changed_operation, + endpoint_write_to_disk_operation, }, }; use tracing::Instrument; @@ -15,8 +15,8 @@ use turbo_tasks::{Completion, Effects, OperationVc, ReadRef, Vc}; use turbopack_core::{diagnostics::PlainDiagnostic, error::PrettyPrintError, issue::PlainIssue}; use super::utils::{ - strongly_consistent_catch_collectables, subscribe, NapiDiagnostic, NapiIssue, RootTask, - TurbopackResult, VcArc, + NapiDiagnostic, NapiIssue, RootTask, TurbopackResult, VcArc, + strongly_consistent_catch_collectables, subscribe, }; #[napi(object)] diff --git a/crates/napi/src/next_api/project.rs b/crates/napi/src/next_api/project.rs index 155bbb70b5831..bcc3527325542 100644 --- a/crates/napi/src/next_api/project.rs +++ b/crates/napi/src/next_api/project.rs @@ -1,10 +1,10 @@ -use std::{io::Write, path::PathBuf, sync::Arc, thread, time::Duration}; +use std::{borrow::Cow, io::Write, path::PathBuf, sync::Arc, thread, time::Duration}; -use anyhow::{anyhow, bail, Context, Result}; +use anyhow::{Context, Result, anyhow, bail}; use napi::{ - bindgen_prelude::{within_runtime_if_available, External}, - threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode}, JsFunction, Status, + bindgen_prelude::{External, within_runtime_if_available}, + threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode}, }; use next_api::{ entrypoints::Entrypoints, @@ -19,33 +19,37 @@ use next_api::{ route::Endpoint, }; use next_core::tracing_presets::{ - TRACING_NEXT_OVERVIEW_TARGETS, TRACING_NEXT_TARGETS, TRACING_NEXT_TURBOPACK_TARGETS, - TRACING_NEXT_TURBO_TASKS_TARGETS, + TRACING_NEXT_OVERVIEW_TARGETS, TRACING_NEXT_TARGETS, TRACING_NEXT_TURBO_TASKS_TARGETS, + TRACING_NEXT_TURBOPACK_TARGETS, }; use once_cell::sync::Lazy; use rand::Rng; +use serde::{Deserialize, Serialize}; use tokio::{io::AsyncWriteExt, time::Instant}; use tracing::Instrument; -use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, Registry}; -use turbo_rcstr::RcStr; +use tracing_subscriber::{Registry, layer::SubscriberExt, util::SubscriberInitExt}; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ - get_effects, Completion, Effects, FxIndexSet, OperationVc, ReadRef, ResolvedVc, - TransientInstance, TryJoinIterExt, UpdateInfo, Vc, + Completion, Effects, FxIndexSet, NonLocalValue, OperationValue, OperationVc, ReadRef, + ResolvedVc, TaskInput, TransientInstance, TryJoinIterExt, UpdateInfo, Vc, get_effects, + message_queue::{CompilationEvent, Severity, TimingEvent}, + trace::TraceRawVcs, }; +use turbo_tasks_backend::db_invalidation::invalidation_reasons; use turbo_tasks_fs::{ - get_relative_path_to, util::uri_from_file, DiskFileSystem, FileContent, FileSystem, - FileSystemPath, + DiskFileSystem, FileContent, FileSystem, FileSystemPath, get_relative_path_to, + util::uri_from_file, }; use turbopack_core::{ + PROJECT_FILESYSTEM_NAME, SOURCE_URL_PROTOCOL, diagnostics::PlainDiagnostic, error::PrettyPrintError, issue::PlainIssue, output::{OutputAsset, OutputAssets}, - source_map::{OptionSourceMap, OptionStringifiedSourceMap, SourceMap, Token}, + source_map::{OptionStringifiedSourceMap, SourceMap, Token}, version::{PartialUpdate, TotalUpdate, Update, VersionState}, - PROJECT_FILESYSTEM_NAME, SOURCE_URL_PROTOCOL, }; -use turbopack_ecmascript_hmr_protocol::{ClientUpdateInstruction, ResourceIdentifier}; +use turbopack_ecmascript_hmr_protocol::{ClientUpdateInstruction, Issue, ResourceIdentifier}; use turbopack_trace_utils::{ exit::{ExitHandler, ExitReceiver}, filter_layer::FilterLayer, @@ -57,8 +61,8 @@ use url::Url; use super::{ endpoint::ExternalEndpoint, utils::{ - create_turbo_tasks, get_diagnostics, get_issues, subscribe, NapiDiagnostic, NapiIssue, - NextTurboTasks, RootTask, TurbopackResult, VcArc, + NapiDiagnostic, NapiIssue, NextTurboTasks, RootTask, TurbopackResult, VcArc, + create_turbo_tasks, get_diagnostics, get_issues, subscribe, }, }; use crate::{register, util::DhatProfilerGuard}; @@ -66,30 +70,30 @@ use crate::{register, util::DhatProfilerGuard}; /// Used by [`benchmark_file_io`]. This is a noisy benchmark, so set the /// threshold high. const SLOW_FILESYSTEM_THRESHOLD: Duration = Duration::from_millis(100); -static SOURCE_MAP_PREFIX: Lazy = Lazy::new(|| format!("{}///", SOURCE_URL_PROTOCOL)); +static SOURCE_MAP_PREFIX: Lazy = Lazy::new(|| format!("{SOURCE_URL_PROTOCOL}///")); static SOURCE_MAP_PREFIX_PROJECT: Lazy = - Lazy::new(|| format!("{}///[{}]/", SOURCE_URL_PROTOCOL, PROJECT_FILESYSTEM_NAME)); + Lazy::new(|| format!("{SOURCE_URL_PROTOCOL}///[{PROJECT_FILESYSTEM_NAME}]/")); #[napi(object)] #[derive(Clone, Debug)] pub struct NapiEnvVar { - pub name: String, - pub value: String, + pub name: RcStr, + pub value: RcStr, } #[napi(object)] pub struct NapiDraftModeOptions { - pub preview_mode_id: String, - pub preview_mode_encryption_key: String, - pub preview_mode_signing_key: String, + pub preview_mode_id: RcStr, + pub preview_mode_encryption_key: RcStr, + pub preview_mode_signing_key: RcStr, } impl From for DraftModeOptions { fn from(val: NapiDraftModeOptions) -> Self { DraftModeOptions { - preview_mode_id: val.preview_mode_id.into(), - preview_mode_encryption_key: val.preview_mode_encryption_key.into(), - preview_mode_signing_key: val.preview_mode_signing_key.into(), + preview_mode_id: val.preview_mode_id, + preview_mode_encryption_key: val.preview_mode_encryption_key, + preview_mode_signing_key: val.preview_mode_signing_key, } } } @@ -108,23 +112,23 @@ pub struct NapiWatchOptions { pub struct NapiProjectOptions { /// A root path from which all files must be nested under. Trying to access /// a file outside this root will fail. Think of this as a chroot. - pub root_path: String, + pub root_path: RcStr, /// A path inside the root_path which contains the app/pages directories. - pub project_path: String, + pub project_path: RcStr, /// next.config's distDir. Project initialization occurs eariler than /// deserializing next.config, so passing it as separate option. - pub dist_dir: String, + pub dist_dir: RcStr, /// Filesystem watcher options. pub watch: NapiWatchOptions, /// The contents of next.config.js, serialized to JSON. - pub next_config: String, + pub next_config: RcStr, /// The contents of ts/config read by load-jsconfig, serialized to JSON. - pub js_config: String, + pub js_config: RcStr, /// A map of environment variables to use when compiling code. pub env: Vec, @@ -137,16 +141,16 @@ pub struct NapiProjectOptions { pub dev: bool, /// The server actions encryption key. - pub encryption_key: String, + pub encryption_key: RcStr, /// The build id. - pub build_id: String, + pub build_id: RcStr, /// Options for draft mode. pub preview_props: NapiDraftModeOptions, /// The browserslist query to use for targeting browsers. - pub browserslist_query: String, + pub browserslist_query: RcStr, /// When the code is minified, this opts out of the default mangling of /// local names for variables, functions etc., which can be useful for @@ -159,23 +163,23 @@ pub struct NapiProjectOptions { pub struct NapiPartialProjectOptions { /// A root path from which all files must be nested under. Trying to access /// a file outside this root will fail. Think of this as a chroot. - pub root_path: Option, + pub root_path: Option, /// A path inside the root_path which contains the app/pages directories. - pub project_path: Option, + pub project_path: Option, /// next.config's distDir. Project initialization occurs eariler than /// deserializing next.config, so passing it as separate option. - pub dist_dir: Option>, + pub dist_dir: Option>, /// Filesystem watcher options. pub watch: Option, /// The contents of next.config.js, serialized to JSON. - pub next_config: Option, + pub next_config: Option, /// The contents of ts/config read by load-jsconfig, serialized to JSON. - pub js_config: Option, + pub js_config: Option, /// A map of environment variables to use when compiling code. pub env: Option>, @@ -188,16 +192,16 @@ pub struct NapiPartialProjectOptions { pub dev: Option, /// The server actions encryption key. - pub encryption_key: Option, + pub encryption_key: Option, /// The build id. - pub build_id: Option, + pub build_id: Option, /// Options for draft mode. pub preview_props: Option, /// The browserslist query to use for targeting browsers. - pub browserslist_query: Option, + pub browserslist_query: Option, /// When the code is minified, this opts out of the default mangling of /// local names for variables, functions etc., which can be useful for @@ -221,6 +225,8 @@ pub struct NapiTurboEngineOptions { pub memory_limit: Option, /// Track dependencies between tasks. If false, any change during build will error. pub dependency_tracking: Option, + /// Whether the project is running in a CI environment. + pub is_ci: Option, } impl From for WatchOptions { @@ -238,22 +244,22 @@ impl From for WatchOptions { impl From for ProjectOptions { fn from(val: NapiProjectOptions) -> Self { ProjectOptions { - root_path: val.root_path.into(), - project_path: val.project_path.into(), + root_path: val.root_path, + project_path: val.project_path, watch: val.watch.into(), - next_config: val.next_config.into(), - js_config: val.js_config.into(), + next_config: val.next_config, + js_config: val.js_config, env: val .env .into_iter() - .map(|var| (var.name.into(), var.value.into())) + .map(|var| (var.name, var.value)) .collect(), define_env: val.define_env.into(), dev: val.dev, - encryption_key: val.encryption_key.into(), - build_id: val.build_id.into(), + encryption_key: val.encryption_key, + build_id: val.build_id, preview_props: val.preview_props.into(), - browserslist_query: val.browserslist_query.into(), + browserslist_query: val.browserslist_query, no_mangling: val.no_mangling, } } @@ -262,20 +268,18 @@ impl From for ProjectOptions { impl From for PartialProjectOptions { fn from(val: NapiPartialProjectOptions) -> Self { PartialProjectOptions { - root_path: val.root_path.map(From::from), - project_path: val.project_path.map(From::from), + root_path: val.root_path, + project_path: val.project_path, watch: val.watch.map(From::from), - next_config: val.next_config.map(From::from), - js_config: val.js_config.map(From::from), - env: val.env.map(|env| { - env.into_iter() - .map(|var| (var.name.into(), var.value.into())) - .collect() - }), + next_config: val.next_config, + js_config: val.js_config, + env: val + .env + .map(|env| env.into_iter().map(|var| (var.name, var.value)).collect()), define_env: val.define_env.map(|env| env.into()), dev: val.dev, - encryption_key: val.encryption_key.map(From::from), - build_id: val.build_id.map(From::from), + encryption_key: val.encryption_key, + build_id: val.build_id, preview_props: val.preview_props.map(|props| props.into()), } } @@ -287,17 +291,17 @@ impl From for DefineEnv { client: val .client .into_iter() - .map(|var| (var.name.into(), var.value.into())) + .map(|var| (var.name, var.value)) .collect(), edge: val .edge .into_iter() - .map(|var| (var.name.into(), var.value.into())) + .map(|var| (var.name, var.value)) .collect(), nodejs: val .nodejs .into_iter() - .map(|var| (var.name.into(), var.value.into())) + .map(|var| (var.name, var.value)) .collect(), } } @@ -362,9 +366,8 @@ pub async fn project_new( let subscriber = subscriber.with(console_subscriber::spawn()); let subscriber = subscriber.with(FilterLayer::try_new(&trace).unwrap()); - let dist_dir = options.dist_dir.clone(); - let internal_dir = PathBuf::from(&options.project_path).join(dist_dir); + let internal_dir = PathBuf::from(&options.project_path).join(&options.dist_dir); std::fs::create_dir_all(&internal_dir) .context("Unable to create .next directory") .unwrap(); @@ -374,7 +377,9 @@ pub async fn project_new( let subscriber = subscriber.with(RawTraceLayer::new(trace_writer)); exit.on_exit(async move { - tokio::task::spawn_blocking(move || drop(trace_writer_guard)); + tokio::task::spawn_blocking(move || drop(trace_writer_guard)) + .await + .unwrap(); }); let trace_server = std::env::var("NEXT_TURBOPACK_TRACE_SERVER").ok(); @@ -382,7 +387,7 @@ pub async fn project_new( thread::spawn(move || { turbopack_trace_server::start_turbopack_trace_server(trace_file); }); - println!("Turbopack trace server started. View trace at https://turbo-trace-viewer.vercel.app/"); + println!("Turbopack trace server started. View trace at https://trace.nextjs.org"); } subscriber.init(); @@ -394,12 +399,15 @@ pub async fn project_new( .unwrap_or(usize::MAX); let persistent_caching = turbo_engine_options.persistent_caching.unwrap_or_default(); let dependency_tracking = turbo_engine_options.dependency_tracking.unwrap_or(true); + let is_ci = turbo_engine_options.is_ci.unwrap_or(false); let turbo_tasks = create_turbo_tasks( PathBuf::from(&options.dist_dir), persistent_caching, memory_limit, dependency_tracking, + is_ci, )?; + let stats_path = std::env::var_os("NEXT_TURBOPACK_TASK_STATISTICS"); if let Some(stats_path) = stats_path { let task_stats = turbo_tasks.task_statistics().enable().clone(); @@ -419,7 +427,7 @@ pub async fn project_new( let options: ProjectOptions = options.into(); let container = turbo_tasks .run_once(async move { - let project = ProjectContainer::new("next.js".into(), options.dev); + let project = ProjectContainer::new(rcstr!("next.js"), options.dev); let project = project.to_resolved().await?; project.initialize(options).await?; Ok(project) @@ -427,8 +435,9 @@ pub async fn project_new( .await .map_err(|e| napi::Error::from_reason(PrettyPrintError(&e).to_string()))?; + let tasks_ref = turbo_tasks.clone(); turbo_tasks.spawn_once_task(async move { - benchmark_file_io(container.project().node_root()) + benchmark_file_io(tasks_ref, container.project().node_root()) .await .inspect_err(|err| tracing::warn!(%err, "failed to benchmark file IO")) }); @@ -442,6 +451,35 @@ pub async fn project_new( )) } +#[derive(Debug, Clone, Serialize)] +struct SlowFilesystemEvent { + directory: String, + duration_ms: u128, +} + +impl CompilationEvent for SlowFilesystemEvent { + fn type_name(&self) -> &'static str { + "SlowFilesystemEvent" + } + + fn severity(&self) -> Severity { + Severity::Warning + } + + fn message(&self) -> String { + format!( + "Slow filesystem detected. The benchmark took {}ms. If {} is a network drive, \ + consider moving it to a local folder. If you have an antivirus enabled, consider \ + excluding your project directory.", + self.duration_ms, self.directory + ) + } + + fn to_json(&self) -> String { + serde_json::to_string(self).unwrap() + } +} + /// A very simple and low-overhead, but potentially noisy benchmark to detect /// very slow disk IO. Warns the user (via `println!`) if the benchmark takes /// more than `SLOW_FILESYSTEM_THRESHOLD`. @@ -449,8 +487,11 @@ pub async fn project_new( /// This idea is copied from Bun: /// - https://x.com/jarredsumner/status/1637549427677364224 /// - https://github.com/oven-sh/bun/blob/06a9aa80c38b08b3148bfeabe560/src/install/install.zig#L3038 -#[tracing::instrument] -async fn benchmark_file_io(directory: Vc) -> Result> { +#[tracing::instrument(skip(turbo_tasks))] +async fn benchmark_file_io( + turbo_tasks: NextTurboTasks, + directory: Vc, +) -> Result> { // try to get the real file path on disk so that we can use it with tokio let fs = Vc::try_resolve_downcast_type::(directory.fs()) .await? @@ -466,7 +507,7 @@ async fn benchmark_file_io(directory: Vc) -> Result) -> Result SLOW_FILESYSTEM_THRESHOLD { + let duration = Instant::now().duration_since(start); + if duration > SLOW_FILESYSTEM_THRESHOLD { println!( - "Slow filesystem detected. If {} is a network drive, consider moving it to a local \ - folder. If you have an antivirus enabled, consider excluding your project directory.", + "Slow filesystem detected. The benchmark took {}ms. If {} is a network drive, \ + consider moving it to a local folder. If you have an antivirus enabled, consider \ + excluding your project directory.", + duration.as_millis(), directory.to_string_lossy(), ); + + turbo_tasks.send_compilation_event(Arc::new(SlowFilesystemEvent { + directory: directory.to_string_lossy().into(), + duration_ms: duration.as_millis(), + })); } Ok(Completion::new()) @@ -517,6 +566,24 @@ pub async fn project_update( Ok(()) } +/// Invalidates the persistent cache so that it will be deleted next time that a turbopack project +/// is created with persistent caching enabled. +#[napi] +pub async fn project_invalidate_persistent_cache( + #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External, +) -> napi::Result<()> { + tokio::task::spawn_blocking(move || { + // TODO: Let the JS caller specify a reason? We need to limit the reasons to ones we know + // how to generate a message for on the Rust side of the FFI. + project + .turbo_tasks + .invalidate_persistent_cache(invalidation_reasons::USER_REQUEST) + }) + .await + .context("panicked while invalidating persistent cache")??; + Ok(()) +} + /// Runs exit handlers for the project registered using the [`ExitHandler`] API. /// /// This is called by `project_shutdown`, so if you're calling that API, you shouldn't call this @@ -553,7 +620,7 @@ pub async fn project_shutdown( #[derive(Default)] pub struct AppPageNapiRoute { /// The relative path from project_path to the route file - pub original_name: Option, + pub original_name: Option, pub html_endpoint: Option>, pub rsc_endpoint: Option>, @@ -565,7 +632,7 @@ pub struct NapiRoute { /// The router path pub pathname: String, /// The relative path from project_path to the route file - pub original_name: Option, + pub original_name: Option, /// The type of route, eg a Page or App pub r#type: &'static str, @@ -611,7 +678,7 @@ impl NapiRoute { pages .into_iter() .map(|page_route| AppPageNapiRoute { - original_name: Some(page_route.original_name.into_owned()), + original_name: Some(page_route.original_name), html_endpoint: convert_endpoint(page_route.html_endpoint), rsc_endpoint: convert_endpoint(page_route.rsc_endpoint), }) @@ -624,7 +691,7 @@ impl NapiRoute { endpoint, } => NapiRoute { pathname, - original_name: Some(original_name.into_owned()), + original_name: Some(original_name), r#type: "app-route", endpoint: convert_endpoint(endpoint), ..Default::default() @@ -781,11 +848,14 @@ pub async fn project_write_all_entrypoints_to_disk( app_dir_only: bool, ) -> napi::Result> { let turbo_tasks = project.turbo_tasks.clone(); + let compilation_event_sender = turbo_tasks.clone(); + let (entrypoints, issues, diags) = turbo_tasks .run_once(async move { let entrypoints_with_issues_op = get_all_written_entrypoints_with_issues_operation(project.container, app_dir_only); + // Read and compile the files let EntrypointsWithIssues { entrypoints, issues, @@ -794,8 +864,19 @@ pub async fn project_write_all_entrypoints_to_disk( } = &*entrypoints_with_issues_op .read_strongly_consistent() .await?; + + // Start timing writing the files to disk + let now = Instant::now(); + + // Write the files to disk effects.apply().await?; + // Send a compilation event to indicate that the files have been written to disk + compilation_event_sender.send_compilation_event(Arc::new(TimingEvent::new( + "Finished writing to disk".to_owned(), + now.elapsed(), + ))); + Ok((entrypoints.clone(), issues.clone(), diagnostics.clone())) }) .await @@ -947,7 +1028,7 @@ fn project_hmr_update_operation( #[napi(ts_return_type = "{ __napiType: \"RootTask\" }")] pub fn project_hmr_events( #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External, - identifier: String, + identifier: RcStr, func: JsFunction, ) -> napi::Result> { let turbo_tasks = project.turbo_tasks.clone(); @@ -960,7 +1041,7 @@ pub fn project_hmr_events( let outer_identifier = identifier.clone(); let session = session.clone(); move || { - let identifier: RcStr = outer_identifier.clone().into(); + let identifier: RcStr = outer_identifier.clone(); let session = session.clone(); async move { let project = project.project().to_resolved().await?; @@ -992,7 +1073,7 @@ pub fn project_hmr_events( } .instrument(tracing::info_span!( "HMR subscription", - identifier = outer_identifier + identifier = %outer_identifier )) } }, @@ -1005,7 +1086,7 @@ pub fn project_hmr_events( .collect(); let update_issues = issues .iter() - .map(|issue| (&**issue).into()) + .map(|issue| Issue::from(&**issue)) .collect::>(); let identifier = ResourceIdentifier { @@ -1035,7 +1116,7 @@ pub fn project_hmr_events( #[napi(object)] struct HmrIdentifiers { - pub identifiers: Vec, + pub identifiers: Vec, } #[turbo_tasks::value(serialization = "none")] @@ -1101,10 +1182,7 @@ pub fn project_hmr_identifiers_subscribe( Ok(vec![TurbopackResult { result: HmrIdentifiers { - identifiers: identifiers - .iter() - .map(|ident| ident.to_string()) - .collect::>(), + identifiers: ReadRef::into_owned(identifiers), }, issues: issues .iter() @@ -1126,7 +1204,7 @@ pub enum UpdateMessage { #[napi(object)] struct NapiUpdateMessage { - pub update_type: String, + pub update_type: &'static str, pub value: Option, } @@ -1134,11 +1212,11 @@ impl From for NapiUpdateMessage { fn from(update_message: UpdateMessage) -> Self { match update_message { UpdateMessage::Start => NapiUpdateMessage { - update_type: "start".to_string(), + update_type: "start", value: None, }, UpdateMessage::End(info) => NapiUpdateMessage { - update_type: "end".to_string(), + update_type: "end", value: Some(info.into()), }, } @@ -1211,7 +1289,7 @@ pub fn project_update_info_subscribe( if !matches!(status, Status::Ok) { let error = anyhow!("Error calling JS function: {}", status); - eprintln!("{}", error); + eprintln!("{error}"); break; } } @@ -1219,21 +1297,74 @@ pub fn project_update_info_subscribe( Ok(()) } -#[turbo_tasks::value] -#[derive(Debug)] +/// Subscribes to all compilation events that are not cached like timing and progress information. +#[napi] +pub fn project_compilation_events_subscribe( + #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External, + func: JsFunction, + event_types: Option>, +) -> napi::Result<()> { + let turbo_tasks = project.turbo_tasks.clone(); + let tsfn: ThreadsafeFunction> = + func.create_threadsafe_function(0, |ctx| { + let event: Arc = ctx.value; + + let env = ctx.env; + let mut obj = env.create_object()?; + obj.set_named_property("typeName", event.type_name())?; + obj.set_named_property("severity", event.severity().to_string())?; + obj.set_named_property("message", event.message())?; + + let external = env.create_external(event, None); + obj.set_named_property("eventData", external)?; + + Ok(vec![obj]) + })?; + + tokio::spawn(async move { + let mut receiver = turbo_tasks.get_compilation_events_stream(event_types); + while let Some(msg) = receiver.recv().await { + let status = tsfn.call(Ok(msg), ThreadsafeFunctionCallMode::Blocking); + + if status != Status::Ok { + break; + } + } + }); + + Ok(()) +} + #[napi(object)] +#[derive( + Clone, + Debug, + Deserialize, + Eq, + Hash, + NonLocalValue, + OperationValue, + PartialEq, + Serialize, + TaskInput, + TraceRawVcs, +)] pub struct StackFrame { pub is_server: bool, pub is_internal: Option, - pub original_file: Option, - pub file: String, - // 1-indexed, unlike source map tokens + pub original_file: Option, + pub file: RcStr, + /// 1-indexed, unlike source map tokens pub line: Option, - // 1-indexed, unlike source map tokens + /// 1-indexed, unlike source map tokens pub column: Option, - pub method_name: Option, + pub method_name: Option, } +#[turbo_tasks::value(transparent)] +#[derive(Clone)] +pub struct OptionStackFrame(Option); + #[turbo_tasks::function] pub async fn get_source_map_rope( container: Vc, @@ -1300,119 +1431,126 @@ pub fn get_source_map_rope_operation( } #[turbo_tasks::function(operation)] -pub fn get_source_map_operation( +pub async fn project_trace_source_operation( container: ResolvedVc, - file_path: RcStr, -) -> Vc { - let map = get_source_map_rope(*container, file_path); - SourceMap::new_from_rope_cached(map) -} - -#[napi] -pub async fn project_trace_source( - #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External, frame: StackFrame, - current_directory_file_url: String, -) -> napi::Result> { - let turbo_tasks = project.turbo_tasks.clone(); - let container = project.container; - let traced_frame = turbo_tasks - .run_once(async move { - let Some(map) = &*get_source_map_operation(container, RcStr::from(frame.file)) - .read_strongly_consistent() - .await? - else { - return Ok(None); - }; + current_directory_file_url: RcStr, +) -> Result> { + let Some(map) = + &*SourceMap::new_from_rope_cached(get_source_map_rope(*container, frame.file)).await? + else { + return Ok(Vc::cell(None)); + }; - let Some(line) = frame.line else { - return Ok(None); - }; + let Some(line) = frame.line else { + return Ok(Vc::cell(None)); + }; - let token = map - .lookup_token( - line.saturating_sub(1), - frame.column.unwrap_or(1).saturating_sub(1), - ) - .await?; + let token = map + .lookup_token( + line.saturating_sub(1), + frame.column.unwrap_or(1).saturating_sub(1), + ) + .await?; - let (original_file, line, column, name) = match token { - Token::Original(token) => ( - urlencoding::decode(&token.original_file)?.into_owned(), - // JS stack frames are 1-indexed, source map tokens are 0-indexed - Some(token.original_line + 1), - Some(token.original_column + 1), - token.name.clone(), - ), - Token::Synthetic(token) => { - let Some(file) = &token.guessed_original_file else { - return Ok(None); - }; - (file.to_owned(), None, None, None) - } + let (original_file, line, column, method_name) = match token { + Token::Original(token) => ( + match urlencoding::decode(&token.original_file)? { + Cow::Borrowed(_) => token.original_file, + Cow::Owned(original_file) => RcStr::from(original_file), + }, + // JS stack frames are 1-indexed, source map tokens are 0-indexed + Some(token.original_line + 1), + Some(token.original_column + 1), + token.name, + ), + Token::Synthetic(token) => { + let Some(original_file) = token.guessed_original_file else { + return Ok(Vc::cell(None)); }; + (original_file, None, None, None) + } + }; - let project_root_uri = - uri_from_file(project.container.project().project_root_path(), None).await? + "/"; - let (file, original_file, is_internal) = if let Some(source_file) = - original_file.strip_prefix(&project_root_uri) - { - // Client code uses file:// - ( + let project_root_uri = + uri_from_file(container.project().project_root_path(), None).await? + "/"; + let (file, original_file, is_internal) = + if let Some(source_file) = original_file.strip_prefix(&project_root_uri) { + // Client code uses file:// + ( + RcStr::from( get_relative_path_to(¤t_directory_file_url, &original_file) // TODO(sokra) remove this to include a ./ here to make it a relative path - .trim_start_matches("./") - .to_string(), - Some(source_file.to_string()), - false, - ) - } else if let Some(source_file) = - original_file.strip_prefix(&*SOURCE_MAP_PREFIX_PROJECT) - { - // Server code uses turbopack:///[project] - // TODO should this also be file://? - ( + .trim_start_matches("./"), + ), + Some(RcStr::from(source_file)), + false, + ) + } else if let Some(source_file) = original_file.strip_prefix(&*SOURCE_MAP_PREFIX_PROJECT) { + // Server code uses turbopack:///[project] + // TODO should this also be file://? + ( + RcStr::from( get_relative_path_to( ¤t_directory_file_url, - &format!("{}{}", project_root_uri, source_file), + &format!("{project_root_uri}{source_file}"), ) // TODO(sokra) remove this to include a ./ here to make it a relative path - .trim_start_matches("./") - .to_string(), - Some(source_file.to_string()), - false, - ) - } else if let Some(source_file) = original_file.strip_prefix(&*SOURCE_MAP_PREFIX) { - // All other code like turbopack:///[turbopack] is internal code - // TODO(veil): Should the protocol be preserved? - (source_file.to_string(), None, true) - } else { - bail!( - "Original file ({}) outside project ({})", - original_file, - project_root_uri - ) - }; - - Ok(Some(StackFrame { - file, + .trim_start_matches("./"), + ), + Some(RcStr::from(source_file)), + false, + ) + } else if let Some(source_file) = original_file.strip_prefix(&*SOURCE_MAP_PREFIX) { + // All other code like turbopack:///[turbopack] is internal code + // TODO(veil): Should the protocol be preserved? + (RcStr::from(source_file), None, true) + } else { + bail!( + "Original file ({}) outside project ({})", original_file, - method_name: name.as_ref().map(ToString::to_string), - line, - column, - is_server: frame.is_server, - is_internal: Some(is_internal), - })) + project_root_uri + ) + }; + + Ok(Vc::cell(Some(StackFrame { + file, + original_file, + method_name, + line, + column, + is_server: frame.is_server, + is_internal: Some(is_internal), + }))) +} + +#[napi] +pub async fn project_trace_source( + #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External, + frame: StackFrame, + current_directory_file_url: String, +) -> napi::Result> { + let turbo_tasks = project.turbo_tasks.clone(); + let container = project.container; + let traced_frame = turbo_tasks + .run_once(async move { + project_trace_source_operation( + container, + frame, + RcStr::from(current_directory_file_url), + ) + .read_strongly_consistent() + .await }) .await .map_err(|e| napi::Error::from_reason(PrettyPrintError(&e).to_string()))?; - Ok(traced_frame) + Ok(ReadRef::into_owned(traced_frame)) } #[napi] pub async fn project_get_source_for_asset( #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External, - file_path: String, + file_path: RcStr, ) -> napi::Result> { let turbo_tasks = project.turbo_tasks.clone(); let source = turbo_tasks @@ -1423,7 +1561,7 @@ pub async fn project_get_source_for_asset( .project_path() .fs() .root() - .join(file_path.clone().into()) + .join(file_path.clone()) .read() .await?; @@ -1442,14 +1580,14 @@ pub async fn project_get_source_for_asset( #[napi] pub async fn project_get_source_map( #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External, - file_path: String, + file_path: RcStr, ) -> napi::Result> { let turbo_tasks = project.turbo_tasks.clone(); let container = project.container; let source_map = turbo_tasks .run_once(async move { - let Some(map) = &*get_source_map_rope_operation(container, RcStr::from(file_path)) + let Some(map) = &*get_source_map_rope_operation(container, file_path) .read_strongly_consistent() .await? else { @@ -1466,7 +1604,7 @@ pub async fn project_get_source_map( #[napi] pub fn project_get_source_map_sync( #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External, - file_path: String, + file_path: RcStr, ) -> napi::Result> { within_runtime_if_available(|| { tokio::runtime::Handle::current().block_on(project_get_source_map(project, file_path)) diff --git a/crates/napi/src/next_api/utils.rs b/crates/napi/src/next_api/utils.rs index 62c2c6169f69c..9d9bed88cd946 100644 --- a/crates/napi/src/next_api/utils.rs +++ b/crates/napi/src/next_api/utils.rs @@ -1,20 +1,24 @@ use std::{future::Future, ops::Deref, path::PathBuf, sync::Arc, time::Duration}; -use anyhow::{anyhow, Context, Result}; +use anyhow::{Context, Result, anyhow}; use napi::{ + JsFunction, JsObject, JsUnknown, NapiRaw, NapiValue, Status, bindgen_prelude::{External, ToNapiValue}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, - JsFunction, JsObject, JsUnknown, NapiRaw, NapiValue, Status, }; use rustc_hash::FxHashMap; use serde::Serialize; +use tokio::sync::mpsc::Receiver; use turbo_tasks::{ - get_effects, task_statistics::TaskStatisticsApi, trace::TraceRawVcs, Effects, OperationVc, - ReadRef, TaskId, TryJoinIterExt, TurboTasks, TurboTasksApi, UpdateInfo, Vc, VcValueType, + Effects, OperationVc, ReadRef, TaskId, TryJoinIterExt, TurboTasks, TurboTasksApi, UpdateInfo, + Vc, VcValueType, get_effects, + message_queue::{CompilationEvent, Severity}, + task_statistics::TaskStatisticsApi, + trace::TraceRawVcs, }; use turbo_tasks_backend::{ - default_backing_storage, noop_backing_storage, DefaultBackingStorage, GitVersionInfo, - NoopBackingStorage, + BackingStorage, DefaultBackingStorage, GitVersionInfo, NoopBackingStorage, StartupCacheState, + db_invalidation::invalidation_reasons, default_backing_storage, noop_backing_storage, }; use turbo_tasks_fs::FileContent; use turbopack_core::{ @@ -124,6 +128,73 @@ impl NextTurboTasks { NextTurboTasks::PersistentCaching(turbo_tasks) => turbo_tasks.task_statistics(), } } + + pub fn get_compilation_events_stream( + &self, + event_types: Option>, + ) -> Receiver> { + match self { + NextTurboTasks::Memory(turbo_tasks) => { + turbo_tasks.subscribe_to_compilation_events(event_types) + } + NextTurboTasks::PersistentCaching(turbo_tasks) => { + turbo_tasks.subscribe_to_compilation_events(event_types) + } + } + } + + pub fn send_compilation_event(&self, event: Arc) { + match self { + NextTurboTasks::Memory(turbo_tasks) => turbo_tasks.send_compilation_event(event), + NextTurboTasks::PersistentCaching(turbo_tasks) => { + turbo_tasks.send_compilation_event(event) + } + } + } + + pub fn invalidate_persistent_cache(&self, reason_code: &str) -> Result<()> { + match self { + NextTurboTasks::Memory(_) => {} + NextTurboTasks::PersistentCaching(turbo_tasks) => turbo_tasks + .backend() + .backing_storage() + .invalidate(reason_code)?, + } + Ok(()) + } +} + +#[derive(Serialize)] +struct StartupCacheInvalidationEvent { + reason_code: Option, +} + +impl CompilationEvent for StartupCacheInvalidationEvent { + fn type_name(&self) -> &'static str { + "StartupCacheInvalidationEvent" + } + + fn severity(&self) -> Severity { + Severity::Warning + } + + fn message(&self) -> String { + let reason_msg = match self.reason_code.as_deref() { + Some(invalidation_reasons::PANIC) => { + " because we previously detected an internal error in Turbopack" + } + Some(invalidation_reasons::USER_REQUEST) => " as the result of a user request", + _ => "", // ignore unknown reasons + }; + format!( + "Turbopack's persistent cache has been deleted{reason_msg}. Builds or page loads may \ + be slower as a result." + ) + } + + fn to_json(&self) -> String { + serde_json::to_string(self).unwrap() + } } pub fn create_turbo_tasks( @@ -131,6 +202,7 @@ pub fn create_turbo_tasks( persistent_caching: bool, _memory_limit: usize, dependency_tracking: bool, + is_ci: bool, ) -> Result { Ok(if persistent_caching { let version_info = GitVersionInfo { @@ -138,20 +210,24 @@ pub fn create_turbo_tasks( dirty: option_env!("CI").is_none_or(|value| value.is_empty()) && env!("VERGEN_GIT_DIRTY") == "true", }; - NextTurboTasks::PersistentCaching(TurboTasks::new( - turbo_tasks_backend::TurboTasksBackend::new( - turbo_tasks_backend::BackendOptions { - storage_mode: Some(if std::env::var("TURBO_ENGINE_READ_ONLY").is_ok() { - turbo_tasks_backend::StorageMode::ReadOnly - } else { - turbo_tasks_backend::StorageMode::ReadWrite - }), - dependency_tracking, - ..Default::default() - }, - default_backing_storage(&output_path.join("cache/turbopack"), &version_info)?, - ), - )) + let (backing_storage, cache_state) = + default_backing_storage(&output_path.join("cache/turbopack"), &version_info, is_ci)?; + let tt = TurboTasks::new(turbo_tasks_backend::TurboTasksBackend::new( + turbo_tasks_backend::BackendOptions { + storage_mode: Some(if std::env::var("TURBO_ENGINE_READ_ONLY").is_ok() { + turbo_tasks_backend::StorageMode::ReadOnly + } else { + turbo_tasks_backend::StorageMode::ReadWrite + }), + dependency_tracking, + ..Default::default() + }, + backing_storage, + )); + if let StartupCacheState::Invalidated { reason_code } = cache_state { + tt.send_compilation_event(Arc::new(StartupCacheInvalidationEvent { reason_code })); + } + NextTurboTasks::PersistentCaching(tt) } else { NextTurboTasks::Memory(TurboTasks::new( turbo_tasks_backend::TurboTasksBackend::new( @@ -259,7 +335,7 @@ pub struct NapiIssue { pub detail: Option, pub source: Option, pub documentation_link: String, - pub sub_issues: Vec, + pub import_traces: serde_json::Value, } impl From<&PlainIssue> for NapiIssue { @@ -279,11 +355,7 @@ impl From<&PlainIssue> for NapiIssue { severity: issue.severity.as_str().to_string(), source: issue.source.as_ref().map(|source| source.into()), title: serde_json::to_value(StyledStringSerialize::from(&issue.title)).unwrap(), - sub_issues: issue - .sub_issues - .iter() - .map(|issue| (&**issue).into()) - .collect(), + import_traces: serde_json::to_value(&issue.import_traces).unwrap(), } } } @@ -428,10 +500,12 @@ impl ToNapiValue for TurbopackResult { env: napi::sys::napi_env, val: Self, ) -> napi::Result { - let mut obj = napi::Env::from_raw(env).create_object()?; + let mut obj = unsafe { napi::Env::from_raw(env).create_object()? }; - let result = T::to_napi_value(env, val.result)?; - let result = JsUnknown::from_raw(env, result)?; + let result = unsafe { + let result = T::to_napi_value(env, val.result)?; + JsUnknown::from_raw(env, result)? + }; if matches!(result.get_type()?, napi::ValueType::Object) { // SAFETY: We know that result is an object, so we can cast it to a JsObject let result = unsafe { result.cast::() }; @@ -445,7 +519,7 @@ impl ToNapiValue for TurbopackResult { obj.set_named_property("issues", val.issues)?; obj.set_named_property("diagnostics", val.diagnostics)?; - Ok(obj.raw()) + Ok(unsafe { obj.raw() }) } } @@ -471,7 +545,7 @@ pub fn subscribe> + Send, ); if !matches!(status, Status::Ok) { let error = anyhow!("Error calling JS function: {}", status); - eprintln!("{}", error); + eprintln!("{error}"); return Err::, _>(error); } Ok(Default::default()) diff --git a/crates/napi/src/parse.rs b/crates/napi/src/parse.rs index 546db31757171..4ba37c725814f 100644 --- a/crates/napi/src/parse.rs +++ b/crates/napi/src/parse.rs @@ -5,7 +5,7 @@ use napi::bindgen_prelude::*; use swc_core::{ base::{config::ParseOptions, try_with_handler}, common::{ - comments::Comments, errors::ColorConfig, FileName, FilePathMapping, SourceMap, GLOBALS, + FileName, FilePathMapping, GLOBALS, SourceMap, comments::Comments, errors::ColorConfig, }, }; @@ -53,6 +53,7 @@ impl Task for ParseTask { ) }, ) + .map_err(|e| e.to_pretty_error()) .convert_err()?; let ast_json = serde_json::to_string(&program) diff --git a/crates/napi/src/react_compiler.rs b/crates/napi/src/react_compiler.rs new file mode 100644 index 0000000000000..84e82d7e330b8 --- /dev/null +++ b/crates/napi/src/react_compiler.rs @@ -0,0 +1,62 @@ +use std::{path::PathBuf, sync::Arc}; + +use napi::bindgen_prelude::*; +use next_custom_transforms::react_compiler; +use swc_core::{ + common::{GLOBALS, SourceMap}, + ecma::{ + ast::EsVersion, + parser::{Syntax, TsSyntax, parse_file_as_program}, + }, +}; + +pub struct CheckTask { + pub filename: PathBuf, +} + +#[napi] +impl Task for CheckTask { + type Output = bool; + type JsValue = bool; + + fn compute(&mut self) -> napi::Result { + GLOBALS.set(&Default::default(), || { + // + let cm = Arc::new(SourceMap::default()); + let Ok(fm) = cm.load_file(&self.filename.clone()) else { + return Ok(false); + }; + let mut errors = vec![]; + let Ok(program) = parse_file_as_program( + &fm, + Syntax::Typescript(TsSyntax { + tsx: true, + ..Default::default() + }), + EsVersion::EsNext, + None, + &mut errors, + ) else { + return Ok(false); + }; + if !errors.is_empty() { + return Ok(false); + } + + Ok(react_compiler::is_required(&program)) + }) + } + + fn resolve(&mut self, _env: Env, result: Self::Output) -> napi::Result { + Ok(result) + } +} + +#[napi] +pub fn is_react_compiler_required( + filename: String, + signal: Option, +) -> AsyncTask { + let filename = PathBuf::from(filename); + AsyncTask::with_optional_signal(CheckTask { filename }, signal) +} diff --git a/crates/napi/src/rspack.rs b/crates/napi/src/rspack.rs new file mode 100644 index 0000000000000..72e8680ae7e84 --- /dev/null +++ b/crates/napi/src/rspack.rs @@ -0,0 +1,302 @@ +use std::{cell::RefCell, fs, path::PathBuf, sync::Arc}; + +use napi::bindgen_prelude::*; +use swc_core::{ + base::{ + config::{IsModule, ParseOptions}, + try_with_handler, + }, + common::{ + FileName, FilePathMapping, GLOBALS, Mark, SourceMap, SyntaxContext, errors::ColorConfig, + }, + ecma::{ + ast::{Decl, EsVersion, Id}, + atoms::Atom, + parser::{EsSyntax, Syntax, TsSyntax}, + utils::{ExprCtx, find_pat_ids}, + visit::{Visit, VisitMutWith, VisitWith}, + }, + node::MapErr, +}; + +use crate::next_api::utils::{NapiIssueSourceRange, NapiSourcePos}; + +struct Finder { + pub named_exports: Vec, +} + +impl Visit for Finder { + fn visit_export_decl(&mut self, node: &swc_core::ecma::ast::ExportDecl) { + match &node.decl { + Decl::Class(class_decl) => { + self.named_exports.push(class_decl.ident.sym.clone()); + } + Decl::Fn(fn_decl) => { + self.named_exports.push(fn_decl.ident.sym.clone()); + } + Decl::Var(var_decl) => { + let ids: Vec = find_pat_ids(&var_decl.decls); + for id in ids { + self.named_exports.push(id.0); + } + } + _ => {} + } + } + + fn visit_export_named_specifier(&mut self, node: &swc_core::ecma::ast::ExportNamedSpecifier) { + let named_export = if let Some(exported) = &node.exported { + exported.atom().clone() + } else { + node.orig.atom().clone() + }; + self.named_exports.push(named_export); + } + + fn visit_export_namespace_specifier( + &mut self, + node: &swc_core::ecma::ast::ExportNamespaceSpecifier, + ) { + self.named_exports.push(node.name.atom().clone()); + } +} + +pub struct FinderTask { + pub resource_path: Option, +} + +impl Task for FinderTask { + type Output = Vec; + type JsValue = Array; + + fn compute(&mut self) -> napi::Result { + let resource_path = PathBuf::from(self.resource_path.take().unwrap()); + let src = fs::read_to_string(&resource_path) + .map_err(|e| napi::Error::from_reason(e.to_string()))?; + + let syntax = match resource_path + .extension() + .map(|os_str| os_str.to_string_lossy()) + { + Some(ext) if matches!(ext.as_ref(), "ts" | "mts" | "cts") => { + Syntax::Typescript(TsSyntax { + tsx: false, + decorators: true, + dts: false, + no_early_errors: true, + disallow_ambiguous_jsx_like: false, + }) + } + Some(ext) if matches!(ext.as_ref(), "tsx" | "mtsx" | "ctsx") => { + Syntax::Typescript(TsSyntax { + tsx: true, + decorators: true, + dts: false, + no_early_errors: true, + disallow_ambiguous_jsx_like: false, + }) + } + _ => Syntax::Es(EsSyntax { + jsx: true, + fn_bind: true, + decorators: true, + decorators_before_export: true, + export_default_from: true, + import_attributes: true, + allow_super_outside_method: true, + allow_return_outside_function: true, + auto_accessors: true, + explicit_resource_management: true, + }), + }; + + GLOBALS.set(&Default::default(), || { + let c = + swc_core::base::Compiler::new(Arc::new(SourceMap::new(FilePathMapping::empty()))); + + let options = ParseOptions { + comments: false, + syntax, + is_module: IsModule::Unknown, + target: EsVersion::default(), + }; + let fm = + c.cm.new_source_file(Arc::new(FileName::Real(resource_path)), src); + let program = try_with_handler( + c.cm.clone(), + swc_core::base::HandlerOpts { + color: ColorConfig::Never, + skip_filename: false, + }, + |handler| { + c.parse_js( + fm, + handler, + options.target, + options.syntax, + options.is_module, + None, + ) + }, + ) + .map_err(|e| e.to_pretty_error()) + .convert_err()?; + + let mut visitor = Finder { + named_exports: Vec::new(), + }; + // Visit the AST to find named exports + program.visit_with(&mut visitor); + + Ok(visitor.named_exports) + }) + } + + fn resolve(&mut self, env: Env, result: Self::Output) -> napi::Result { + let mut array = env.create_array(result.len() as u32)?; + for (i, name) in result.iter().enumerate() { + let js_val = env.create_string(name.as_str())?; + array.set(i as u32, js_val)?; + } + Ok(array) + } +} + +#[napi(ts_return_type = "Promise")] +pub fn get_module_named_exports(resource_path: String) -> AsyncTask { + AsyncTask::new(FinderTask { + resource_path: Some(resource_path), + }) +} + +#[napi(object)] +pub struct NapiSourceDiagnostic { + pub severity: &'static str, + pub message: String, + pub loc: NapiIssueSourceRange, +} + +pub struct AnalyzeTask { + pub source: Option, + pub is_production: bool, +} + +impl Task for AnalyzeTask { + type Output = Vec; + type JsValue = Vec; + + fn compute(&mut self) -> Result { + GLOBALS.set(&Default::default(), || { + let c = + swc_core::base::Compiler::new(Arc::new(SourceMap::new(FilePathMapping::empty()))); + + let options = ParseOptions { + comments: false, + syntax: Syntax::Es(EsSyntax { + jsx: true, + fn_bind: true, + decorators: true, + decorators_before_export: true, + export_default_from: true, + import_attributes: true, + allow_super_outside_method: true, + allow_return_outside_function: true, + auto_accessors: true, + explicit_resource_management: true, + }), + is_module: IsModule::Unknown, + target: EsVersion::default(), + }; + let source = self.source.take().unwrap(); + let fm = + c.cm.new_source_file(Arc::new(FileName::Anon), source); + let mut program = try_with_handler( + c.cm.clone(), + swc_core::base::HandlerOpts { + color: ColorConfig::Never, + skip_filename: false, + }, + |handler| { + c.parse_js( + fm, + handler, + options.target, + options.syntax, + options.is_module, + None, + ) + }, + ) + .map_err(|e| e.to_pretty_error()) + .convert_err()?; + + let diagnostics = RefCell::new(Vec::new()); + let top_level_mark = Mark::fresh(Mark::root()); + let unresolved_mark = Mark::fresh(Mark::root()); + let mut resolver_visitor = swc_core::ecma::transforms::base::resolver(unresolved_mark, top_level_mark, true); + let mut analyze_visitor = next_custom_transforms::transforms::warn_for_edge_runtime::warn_for_edge_runtime_with_handlers( + c.cm.clone(), + ExprCtx { + is_unresolved_ref_safe: true, + unresolved_ctxt: SyntaxContext::empty().apply_mark(unresolved_mark), + in_strict: false, + remaining_depth: 4, + }, + false, + self.is_production, + |span, msg| { + let start = c.cm.lookup_char_pos(span.lo); + let end = c.cm.lookup_char_pos(span.hi); + diagnostics.borrow_mut().push(NapiSourceDiagnostic { + severity: "Warning", + message: msg, + loc: NapiIssueSourceRange { + start: NapiSourcePos { + line: start.line as u32, + column: start.col_display as u32, + }, + end: NapiSourcePos { + line: end.line as u32, + column: end.col_display as u32, + } + } + }); + }, + |span, msg| { + let start = c.cm.lookup_char_pos(span.lo); + let end = c.cm.lookup_char_pos(span.hi); + diagnostics.borrow_mut().push(NapiSourceDiagnostic { + severity: "Error", + message: msg, + loc: NapiIssueSourceRange { + start: NapiSourcePos { + line: start.line as u32, + column: start.col_display as u32, + }, + end: NapiSourcePos { + line: end.line as u32, + column: end.col_display as u32, + } + } + }); + }); + + program.visit_mut_with(&mut resolver_visitor); + program.visit_with(&mut analyze_visitor); + + Ok(diagnostics.take()) + }) + } + + fn resolve(&mut self, _env: Env, output: Self::Output) -> Result { + Ok(output) + } +} + +#[napi(ts_return_type = "Promise")] +pub fn warn_for_edge_runtime(source: String, is_production: bool) -> AsyncTask { + AsyncTask::new(AnalyzeTask { + source: Some(source), + is_production, + }) +} diff --git a/crates/napi/src/transform.rs b/crates/napi/src/transform.rs index 89e5a4ed2d431..e464cd208a993 100644 --- a/crates/napi/src/transform.rs +++ b/crates/napi/src/transform.rs @@ -29,19 +29,19 @@ DEALINGS IN THE SOFTWARE. use std::{ cell::RefCell, fs::read_to_string, - panic::{catch_unwind, AssertUnwindSafe}, + panic::{AssertUnwindSafe, catch_unwind}, rc::Rc, }; -use anyhow::{anyhow, bail, Context as _}; +use anyhow::{Context as _, anyhow, bail}; use napi::bindgen_prelude::*; -use next_custom_transforms::chain_transforms::{custom_before_pass, TransformOptions}; +use next_custom_transforms::chain_transforms::{TransformOptions, custom_before_pass}; use once_cell::sync::Lazy; use rustc_hash::{FxHashMap, FxHashSet}; use swc_core::{ atoms::Atom, - base::{try_with_handler, Compiler, TransformOutput}, - common::{comments::SingleThreadedComments, errors::ColorConfig, FileName, Mark, GLOBALS}, + base::{Compiler, TransformOutput, try_with_handler}, + common::{FileName, GLOBALS, Mark, comments::SingleThreadedComments, errors::ColorConfig}, ecma::ast::noop_pass, }; @@ -119,7 +119,7 @@ impl Task for TransformTask { self.c.cm.new_source_file( FileName::Real(filename.into()).into(), read_to_string(filename).with_context(|| { - format!("Failed to read source code from {}", filename) + format!("Failed to read source code from {filename}") })?, ) } @@ -177,11 +177,9 @@ impl Task for TransformTask { .into_inner(), ) }) + .map_err(|e| e.to_pretty_error()) .convert_err(), - Err(err) => Err(napi::Error::new( - Status::GenericFailure, - format!("{:?}", err), - )), + Err(err) => Err(napi::Error::new(Status::GenericFailure, format!("{err:?}"))), } }) } @@ -248,7 +246,7 @@ fn test_deser() { let tr: TransformOptions = serde_json::from_str(JSON_STR).unwrap(); - println!("{:#?}", tr); + println!("{tr:#?}"); } #[test] @@ -257,5 +255,5 @@ fn test_deserialize_transform_regenerator() { let tr: TransformOptions = serde_json::from_str(JSON_STR).unwrap(); - println!("{:#?}", tr); + println!("{tr:#?}"); } diff --git a/crates/napi/src/turbopack.rs b/crates/napi/src/turbopack.rs index 99ecde69fdd79..0574beb26f092 100644 --- a/crates/napi/src/turbopack.rs +++ b/crates/napi/src/turbopack.rs @@ -3,8 +3,8 @@ use std::path::PathBuf; use anyhow::Context; use napi::bindgen_prelude::*; use next_build::{ - build_options::{BuildContext, DefineEnv}, BuildOptions as NextBuildOptions, + build_options::{BuildContext, DefineEnv}, }; use next_core::next_config::{Rewrite, Rewrites, RouteHas}; @@ -159,7 +159,7 @@ pub enum NapiRouteHas { impl FromNapiValue for NapiRouteHas { unsafe fn from_napi_value(env: sys::napi_env, napi_val: sys::napi_value) -> Result { - let object = Object::from_napi_value(env, napi_val)?; + let object = unsafe { Object::from_napi_value(env, napi_val)? }; let type_ = object.get_named_property::("type")?; Ok(match type_.as_str() { "header" => NapiRouteHas::Header { @@ -180,8 +180,8 @@ impl FromNapiValue for NapiRouteHas { _ => { return Err(napi::Error::new( Status::GenericFailure, - format!("invalid type for RouteHas: {}", type_), - )) + format!("invalid type for RouteHas: {type_}"), + )); } }) } diff --git a/crates/napi/src/util.rs b/crates/napi/src/util.rs index 2289baf2ea366..71fef339dcac7 100644 --- a/crates/napi/src/util.rs +++ b/crates/napi/src/util.rs @@ -42,7 +42,7 @@ use once_cell::sync::Lazy; use owo_colors::OwoColorize; use terminal_hyperlink::Hyperlink; use tracing_chrome::{ChromeLayerBuilder, FlushGuard}; -use tracing_subscriber::{filter, prelude::*, util::SubscriberInitExt, Layer}; +use tracing_subscriber::{Layer, filter, prelude::*, util::SubscriberInitExt}; use turbopack_core::error::PrettyPrintError; static LOG_THROTTLE: Mutex> = Mutex::new(None); @@ -70,49 +70,49 @@ pub fn log_internal_error_and_inform(internal_error: &anyhow::Error) { // hold open this mutex guard to prevent concurrent writes to the file! let mut last_error_time = LOG_THROTTLE.lock().unwrap(); - if let Some(last_error_time) = last_error_time.as_ref() { - if last_error_time.elapsed().as_secs() < 1 { - // Throttle panic logging to once per second - return; - } + if let Some(last_error_time) = last_error_time.as_ref() + && last_error_time.elapsed().as_secs() < 1 + { + // Throttle panic logging to once per second + return; } *last_error_time = Some(Instant::now()); let size = std::fs::metadata(PANIC_LOG.as_path()).map(|m| m.len()); - if let Ok(size) = size { - if size > 512 * 1024 { - // Truncate the earliest error from log file if it's larger than 512KB - let new_lines = { - let log_read = OpenOptions::new() - .read(true) - .open(PANIC_LOG.as_path()) - .unwrap_or_else(|_| panic!("Failed to open {}", PANIC_LOG.to_string_lossy())); - - io::BufReader::new(&log_read) - .lines() - .skip(1) - .skip_while(|line| match line { - Ok(line) => !line.starts_with(LOG_DIVIDER), - Err(_) => false, - }) - .collect::>() - }; - - let mut log_write = OpenOptions::new() - .create(true) - .truncate(true) - .write(true) + if let Ok(size) = size + && size > 512 * 1024 + { + // Truncate the earliest error from log file if it's larger than 512KB + let new_lines = { + let log_read = OpenOptions::new() + .read(true) .open(PANIC_LOG.as_path()) .unwrap_or_else(|_| panic!("Failed to open {}", PANIC_LOG.to_string_lossy())); - for line in new_lines { - match line { - Ok(line) => { - writeln!(log_write, "{}", line).unwrap(); - } - Err(_) => { - break; - } + io::BufReader::new(&log_read) + .lines() + .skip(1) + .skip_while(|line| match line { + Ok(line) => !line.starts_with(LOG_DIVIDER), + Err(_) => false, + }) + .collect::>() + }; + + let mut log_write = OpenOptions::new() + .create(true) + .truncate(true) + .write(true) + .open(PANIC_LOG.as_path()) + .unwrap_or_else(|_| panic!("Failed to open {}", PANIC_LOG.to_string_lossy())); + + for line in new_lines { + match line { + Ok(line) => { + writeln!(log_write, "{line}").unwrap(); + } + Err(_) => { + break; } } } @@ -131,7 +131,11 @@ pub fn log_internal_error_and_inform(internal_error: &anyhow::Error) { "Turbopack Error: {}", internal_error_str.lines().next().unwrap_or("Unknown") ); - let version_str = format!("Turbopack version: `{}`", env!("VERGEN_GIT_DESCRIBE")); + let version_str = format!( + "Turbopack version: `{}`\nNext.js version: `{}`", + env!("VERGEN_GIT_DESCRIBE"), + env!("NEXTJS_VERSION") + ); let new_discussion_url = if supports_hyperlinks::supports_hyperlinks() { "clicking here.".hyperlink( format!( @@ -165,7 +169,7 @@ pub fn get_target_triple() -> &'static str { pub trait MapErr: Into> { fn convert_err(self) -> napi::Result { self.into() - .map_err(|err| napi::Error::new(Status::GenericFailure, format!("{:?}", err))) + .map_err(|err| napi::Error::new(Status::GenericFailure, format!("{err:?}"))) } } diff --git a/crates/next-api/Cargo.toml b/crates/next-api/Cargo.toml index 5377b2cc6551b..95eed8fc6c2ee 100644 --- a/crates/next-api/Cargo.toml +++ b/crates/next-api/Cargo.toml @@ -3,23 +3,25 @@ name = "next-api" version = "0.1.0" description = "TBD" license = "MIT" -edition = "2021" +edition = "2024" autobenches = false [lib] bench = false +[[bench]] +name = "hmr" +harness = false + [lints] workspace = true [dependencies] anyhow = { workspace = true, features = ["backtrace"] } -auto-hash-map = { workspace = true } either = { workspace = true } futures = { workspace = true } indexmap = { workspace = true } next-core = { workspace = true } -petgraph = { workspace = true, features = ["serde-1"]} regex = { workspace = true } rustc-hash = { workspace = true } serde = { workspace = true } @@ -27,19 +29,24 @@ serde_json = { workspace = true } swc_core = { workspace = true } tracing = { workspace = true } turbo-rcstr = { workspace = true } -turbo-tasks = { workspace = true } +turbo-tasks = { workspace = true, features = ["non_operation_vc_strongly_consistent"] } turbo-tasks-env = { workspace = true } turbo-tasks-fs = { workspace = true } -turbo-tasks-hash = { workspace = true } -turbo-tasks-memory = { workspace = true } turbopack = { workspace = true } turbopack-browser = { workspace = true } -turbopack-cli-utils = { workspace = true } turbopack-core = { workspace = true } turbopack-ecmascript = { workspace = true } -turbopack-env = { workspace = true } turbopack-node = { workspace = true } turbopack-nodejs = { workspace = true } +turbopack-wasm = { workspace = true } + +[dev-dependencies] +turbo-tasks-malloc = { workspace = true } +divan = { workspace = true } +tokio = { workspace = true } +tempfile = { workspace = true } +turbo-tasks-backend = { workspace = true } + [build-dependencies] anyhow = { workspace = true } diff --git a/crates/next-api/benches/hmr.rs b/crates/next-api/benches/hmr.rs new file mode 100644 index 0000000000000..53b587c69cb34 --- /dev/null +++ b/crates/next-api/benches/hmr.rs @@ -0,0 +1,474 @@ +use std::{ + env, + fs::{create_dir_all, write}, + mem::forget, + path::{Path, PathBuf}, + process::Command, + sync::Arc, + time::{Duration, Instant}, +}; + +use anyhow::{Context, Result}; +use next_api::{ + project::{DefineEnv, DraftModeOptions, ProjectContainer, ProjectOptions, WatchOptions}, + register, + route::endpoint_write_to_disk, +}; +use serde_json::json; +use tempfile::TempDir; +use tokio::runtime::Runtime; +use turbo_rcstr::RcStr; +use turbo_tasks::{ + TransientInstance, TurboTasks, TurboTasksApi, Vc, backend::Backend, trace::TraceRawVcs, +}; +use turbo_tasks_backend::noop_backing_storage; + +pub struct HmrBenchmark { + test_app: TestApp, + project_container: Vc, +} + +#[derive(Debug)] +pub struct TestApp { + _path: PathBuf, + /// Prevent temp directory from being dropped + _dir: TempDir, + modules: Vec<(PathBuf, usize)>, +} + +impl TestApp { + pub fn path(&self) -> &Path { + &self._path + } + + pub fn modules(&self) -> &[(PathBuf, usize)] { + &self.modules + } +} + +fn create_test_app(module_count: usize) -> Result { + let temp_dir = tempfile::tempdir().context("Failed to create temp directory")?; + let base_path = temp_dir.path().to_path_buf(); + + // Create basic Next.js structure + let pages_dir = base_path.join("pages"); + let app_dir = base_path.join("app"); + let src_dir = base_path.join("src"); + + create_dir_all(&pages_dir)?; + create_dir_all(&app_dir)?; + create_dir_all(&src_dir)?; + + let mut modules = Vec::new(); + + // Create index page + let index_content = r#"import React from 'react'; + +export default function Home() { + return
Hello World
; +} +"#; + let index_path = pages_dir.join("index.js"); + write(&index_path, index_content)?; + modules.push((index_path, 0)); + + // Create app layout + let layout_content = r#"export default function RootLayout({ children }) { + return ( + + {children} + + ); +} +"#; + let layout_path = app_dir.join("layout.js"); + write(&layout_path, layout_content)?; + modules.push((layout_path, 0)); + + // Create app page + let app_page_content = r#"export default function Page() { + return
App Router Page
; +} +"#; + let app_page_path = app_dir.join("page.js"); + write(&app_page_path, app_page_content)?; + modules.push((app_page_path, 0)); + + // Create additional modules based on module_count + for i in 3..module_count { + let component_content = format!( + r#"import React from 'react'; + +export default function Component{i}() {{ + return
Component {i}
; +}} +"# + ); + + let component_path = src_dir.join(format!("component{i}.js")); + write(&component_path, component_content)?; + modules.push((component_path, 1)); + } + + // Create package.json + let package_json = r#"{ + "name": "hmr-test-app", + "version": "1.0.0", + "dependencies": { + "react": "^19.0.0", + "react-dom": "^19.0.0", + "next": "^15.0.0" + } +} +"#; + write(base_path.join("package.json"), package_json)?; + + // Create next.config.js + let next_config = "module.exports = {}"; + write(base_path.join("next.config.js"), next_config)?; + + // Run `npm install` + let output = Command::new("npm") + .current_dir(&base_path) + .args(["install"]) + .output()?; + + if !output.status.success() { + return Err(anyhow::anyhow!("Failed to run `npm install`")); + } + + Ok(TestApp { + _path: base_path, + _dir: temp_dir, + modules, + }) +} + +fn load_next_config() -> RcStr { + serde_json::to_string(&json!({ + "sassOptions": { + + }, + })) + .unwrap() + .into() +} + +fn runtime() -> Runtime { + tokio::runtime::Builder::new_multi_thread() + .enable_all() + .on_thread_stop(|| { + turbo_tasks_malloc::TurboMalloc::thread_stop(); + }) + .build() + .context("Failed to build tokio runtime") + .unwrap() +} + +impl HmrBenchmark { + pub async fn new(module_count: usize) -> Result { + let test_app = create_test_app(module_count)?; + + let project_container = { + let container = ProjectContainer::new(RcStr::from("hmr-benchmark"), true) + .to_resolved() + .await?; + + let project_path = test_app.path().to_string_lossy().to_string(); + let root_path = test_app.path().to_string_lossy().to_string(); + + let options = ProjectOptions { + root_path: RcStr::from(root_path), + project_path: RcStr::from(project_path.clone()), + next_config: load_next_config(), + js_config: RcStr::from("{}"), + env: vec![( + RcStr::from("PATH"), + RcStr::from(env::var("PATH").unwrap_or_default()), + )], + define_env: DefineEnv { + client: vec![], + edge: vec![], + nodejs: vec![], + }, + watch: WatchOptions { + enable: true, + poll_interval: None, + }, + dev: true, + encryption_key: RcStr::from("test-key"), + build_id: RcStr::from("development"), + preview_props: DraftModeOptions { + preview_mode_id: RcStr::from("development"), + preview_mode_encryption_key: RcStr::from("test-key"), + preview_mode_signing_key: RcStr::from("test-key"), + }, + browserslist_query: RcStr::from("last 2 versions"), + no_mangling: false, + }; + + container.initialize(options).await?; + Ok::<_, anyhow::Error>(container) + }?; + + Ok(Self { + test_app, + project_container: *project_container, + }) + } + + /// Simulate file changes for HMR testing + pub fn make_file_change(&self, file_path: &Path, change_id: usize) -> Result<()> { + let mut content = + std::fs::read_to_string(file_path).context("Failed to read file content")?; + + // Add a comment with a unique identifier to trigger HMR + let change_marker = format!("// HMR_CHANGE_{change_id}\n"); + content.push_str(&change_marker); + + std::fs::write(file_path, content).context("Failed to write modified content")?; + + Ok(()) + } + + /// Benchmark HMR update detection and processing + pub async fn benchmark_hmr_update(&self, num_updates: usize) -> Result { + // Get entrypoints to trigger initial compilation + let entrypoints = self.project_container.entrypoints(); + let initial_result = entrypoints.await?; + + // Check if we have routes available + if initial_result.routes.is_empty() { + return Err(anyhow::anyhow!("No routes found in entrypoints")); + } + + // Get HMR identifiers + let hmr_identifiers = self.project_container.hmr_identifiers(); + let identifiers = hmr_identifiers.await?; + + if identifiers.is_empty() { + return Err(anyhow::anyhow!("No HMR identifiers found")); + } + + // Get project to access HMR methods + let project = self.project_container.project(); + + // Create multiple sessions to simulate real HMR usage + let mut update_durations = Vec::new(); + + for i in 0..num_updates { + let update_start = Instant::now(); + + // Use different identifiers for each update + let identifier = &identifiers[i % identifiers.len()]; + + // Get version state for this update + let session = TransientInstance::new(()); + let version_state = project.hmr_version_state(identifier.clone(), session); + + // Pick a module file to change + let module_index = i % self.test_app.modules().len(); + let (module_path, _) = &self.test_app.modules()[module_index]; + + // Make a file change + self.make_file_change(module_path, i)?; + + // Wait for HMR update and measure time + let _update_result = project + .hmr_update(identifier.clone(), version_state) + .await?; + + update_durations.push(update_start.elapsed()); + } + + Ok(update_durations.iter().sum::()) + } + + /// Benchmark HMR subscription and event handling + pub async fn benchmark_hmr_subscription(&self) -> Result { + let start_time = Instant::now(); + + // Get entrypoints first + let entrypoints = self.project_container.entrypoints(); + let _initial_result = entrypoints.await?; + + // Get HMR identifiers + let hmr_identifiers = self.project_container.hmr_identifiers(); + let identifiers = hmr_identifiers.await?; + + if identifiers.is_empty() { + return Err(anyhow::anyhow!("No HMR identifiers found")); + } + + let project = self.project_container.project(); + + // Test subscription to multiple identifiers + let mut version_states = Vec::new(); + for identifier in identifiers.iter().take(5) { + // Test with first 5 identifiers + let session = TransientInstance::new(()); + let version_state = project.hmr_version_state(identifier.clone(), session); + version_states.push((identifier.clone(), version_state)); + } + + // Simulate multiple rapid updates + for (i, (identifier, version_state)) in version_states.iter().enumerate() { + // Make a file change + if let Some((module_path, _)) = self.test_app.modules().get(i) { + self.make_file_change(module_path, i * 100)?; + + // Check for update + let _update_result = project + .hmr_update(identifier.clone(), *version_state) + .await?; + } + } + + Ok(start_time.elapsed()) + } + + /// Benchmark initial project setup and entrypoint detection + pub async fn benchmark_initial_compilation(&self) -> Result { + let start_time = Instant::now(); + + let entrypoints = self.project_container.entrypoints(); + let result = entrypoints.await?; + + for route in result.routes.values() { + match route { + next_api::route::Route::Page { + html_endpoint, + data_endpoint, + } => { + let _ = endpoint_write_to_disk(**html_endpoint).await?; + let _ = endpoint_write_to_disk(**data_endpoint).await?; + } + next_api::route::Route::PageApi { endpoint } => { + let _ = endpoint_write_to_disk(**endpoint).await?; + } + next_api::route::Route::AppPage(app_page_routes) => { + for route in app_page_routes.iter() { + let _ = endpoint_write_to_disk(*route.html_endpoint).await?; + let _ = endpoint_write_to_disk(*route.rsc_endpoint).await?; + } + } + next_api::route::Route::AppRoute { endpoint, .. } => { + let _ = endpoint_write_to_disk(**endpoint).await?; + } + next_api::route::Route::Conflict => {} + } + } + + Ok(start_time.elapsed()) + } + + /// Get the number of modules in the test app + pub fn module_count(&self) -> usize { + self.test_app.modules().len() + } +} + +async fn setup_benchmark(module_count: usize) -> HmrBenchmark { + register(); + HmrBenchmark::new(module_count).await.unwrap() +} + +fn setup_runtime() -> Runtime { + runtime() +} + +fn setup_turbo_tasks() -> Arc> { + TurboTasks::new(turbo_tasks_backend::TurboTasksBackend::new( + turbo_tasks_backend::BackendOptions { + storage_mode: None, + dependency_tracking: true, + ..Default::default() + }, + noop_backing_storage(), + )) +} + +#[derive(TraceRawVcs)] +struct Setup { + #[turbo_tasks(trace_ignore)] + rt: Arc, + #[turbo_tasks(trace_ignore)] + tt: Arc, + #[turbo_tasks(trace_ignore)] + benchmark: HmrBenchmark, +} + +fn setup_everything(module_count: usize) -> Arc { + let rt = Arc::new(setup_runtime()); + let tt = setup_turbo_tasks(); + + let arc = rt.clone().block_on(async move { + tt.clone() + .run_once(async move { + let benchmark = setup_benchmark(module_count).await; + benchmark.benchmark_initial_compilation().await.unwrap(); + + Ok(Arc::new(Setup { rt, tt, benchmark })) + }) + .await + .unwrap() + }); + + // I don't know why this is needed, but it is required to avoid dropping tokio runtime from + // async scope + forget(arc.clone()); + arc +} + +fn bench_update(bencher: divan::Bencher, module_count: usize, num_updates: usize) { + let s = setup_everything(module_count); + + bencher + .with_inputs(|| { + let setup = s.clone(); + + setup.clone().rt.block_on(async move { + setup.clone().tt.run_once(Box::pin(async move { + let _ = setup + .benchmark + .benchmark_initial_compilation() + .await + .unwrap(); + Ok(()) + })); + }); + + s.clone() + }) + .bench_values(|setup| { + setup.clone().rt.block_on(async move { + setup.clone().tt.run_once(Box::pin(async move { + setup + .benchmark + .benchmark_hmr_update(num_updates) + .await + .unwrap(); + Ok(()) + })); + }) + }); +} + +#[divan::bench(sample_size = 10000, max_time = 60)] +fn hmr_updates_small_5(bencher: divan::Bencher) { + bench_update(bencher, 100, 5); +} + +#[divan::bench(sample_size = 10000, max_time = 60)] +fn hmr_updates_medium_10(bencher: divan::Bencher) { + bench_update(bencher, 200, 10); +} + +#[divan::bench(sample_size = 10000, max_time = 60)] +fn hmr_updates_large_20(bencher: divan::Bencher) { + bench_update(bencher, 500, 20); +} + +fn main() { + divan::main(); +} diff --git a/crates/next-api/src/app.rs b/crates/next-api/src/app.rs index 6b5a7aba55e0d..2be69c2dfa321 100644 --- a/crates/next-api/src/app.rs +++ b/crates/next-api/src/app.rs @@ -1,67 +1,67 @@ -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use next_core::{ all_assets_from_entries, app_segment_config::NextSegmentConfig, app_structure::{ - get_entrypoints, AppPageLoaderTree, Entrypoint as AppEntrypoint, - Entrypoints as AppEntrypoints, FileSystemPathVec, MetadataItem, + AppPageLoaderTree, Entrypoint as AppEntrypoint, Entrypoints as AppEntrypoints, + FileSystemPathVec, MetadataItem, get_entrypoints, }, get_edge_resolve_options_context, get_next_package, next_app::{ - get_app_client_references_chunks, get_app_client_shared_chunk_group, get_app_page_entry, - get_app_route_entry, metadata::route::get_app_metadata_route_entry, AppEntry, AppPage, + AppEntry, AppPage, get_app_client_references_chunks, get_app_client_shared_chunk_group, + get_app_page_entry, get_app_route_entry, metadata::route::get_app_metadata_route_entry, }, next_client::{ - get_client_module_options_context, get_client_resolve_options_context, - get_client_runtime_entries, ClientContextType, RuntimeEntries, + ClientContextType, RuntimeEntries, get_client_module_options_context, + get_client_resolve_options_context, get_client_runtime_entries, }, next_client_reference::{ - find_server_entries, ClientReferenceGraphResult, NextCssClientReferenceTransition, - NextEcmascriptClientReferenceTransition, ServerEntries, + ClientReferenceGraphResult, NextCssClientReferenceTransition, + NextEcmascriptClientReferenceTransition, ServerEntries, find_server_entries, }, next_config::NextConfig, next_dynamic::NextDynamicTransition, next_edge::route_regex::get_named_middleware_regex, next_manifests::{ - client_reference_manifest::ClientReferenceManifestOptions, AppBuildManifest, - AppPathsManifest, BuildManifest, ClientReferenceManifest, EdgeFunctionDefinition, - MiddlewareMatcher, MiddlewaresManifestV2, PagesManifest, Regions, + AppBuildManifest, AppPathsManifest, BuildManifest, ClientReferenceManifest, + EdgeFunctionDefinition, MiddlewareMatcher, MiddlewaresManifestV2, PagesManifest, Regions, + client_reference_manifest::ClientReferenceManifestOptions, }, next_server::{ - get_server_module_options_context, get_server_resolve_options_context, - get_server_runtime_entries, ServerContextType, + ServerContextType, get_server_module_options_context, get_server_resolve_options_context, + get_server_runtime_entries, }, - next_server_utility::{NextServerUtilityTransition, NEXT_SERVER_UTILITY_MERGE_TAG}, + next_server_utility::{NEXT_SERVER_UTILITY_MERGE_TAG, NextServerUtilityTransition}, parse_segment_config_from_source, util::NextRuntime, }; use serde::{Deserialize, Serialize}; use tracing::Instrument; -use turbo_rcstr::RcStr; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ - fxindexmap, fxindexset, trace::TraceRawVcs, Completion, FxIndexSet, NonLocalValue, ResolvedVc, - TryJoinIterExt, Value, ValueToString, Vc, + Completion, FxIndexSet, NonLocalValue, ResolvedVc, TryJoinIterExt, ValueToString, Vc, + fxindexmap, fxindexset, trace::TraceRawVcs, }; use turbo_tasks_env::{CustomProcessEnv, ProcessEnv}; use turbo_tasks_fs::{File, FileContent, FileSystemPath}; use turbopack::{ - module_options::{transition_rule::TransitionRule, ModuleOptionsContext, RuleCondition}, - resolve_options_context::ResolveOptionsContext, - transition::{ContextTransition, FullContextTransition, Transition, TransitionOptions}, ModuleAssetContext, + module_options::{ModuleOptionsContext, RuleCondition, transition_rule::TransitionRule}, + resolve_options_context::ResolveOptionsContext, + transition::{FullContextTransition, Transition, TransitionOptions}, }; use turbopack_core::{ asset::AssetContent, chunk::{ - availability_info::AvailabilityInfo, ChunkGroupResult, ChunkingContext, ChunkingContextExt, - EvaluatableAsset, EvaluatableAssets, + ChunkGroupResult, ChunkingContext, ChunkingContextExt, EvaluatableAsset, EvaluatableAssets, + availability_info::AvailabilityInfo, }, file_source::FileSource, - ident::AssetIdent, + ident::{AssetIdent, Layer}, module::Module, module_graph::{ - chunk_group_info::{ChunkGroup, ChunkGroupEntry}, GraphEntries, ModuleGraph, SingleModuleGraph, VisitedModules, + chunk_group_info::{ChunkGroup, ChunkGroupEntry}, }, output::{OutputAsset, OutputAssets}, raw_output::RawOutput, @@ -73,7 +73,7 @@ use turbopack_core::{ use turbopack_ecmascript::resolve::cjs_resolve; use crate::{ - dynamic_imports::{collect_next_dynamic_chunks, NextDynamicChunkAvailability}, + dynamic_imports::{NextDynamicChunkAvailability, collect_next_dynamic_chunks}, font::create_font_manifest, loadable_manifest::create_react_loadable_manifest, module_graph::get_reduced_graphs_for_endpoint, @@ -99,8 +99,6 @@ pub struct OptionAppProject(Option>); impl AppProject {} -pub(crate) const ECMASCRIPT_CLIENT_TRANSITION_NAME: &str = "next-ecmascript-client-reference"; - fn styles_rule_condition() -> RuleCondition { RuleCondition::any(vec![ RuleCondition::all(vec![ @@ -115,6 +113,12 @@ fn styles_rule_condition() -> RuleCondition { RuleCondition::ResourcePathEndsWith(".sass".into()), RuleCondition::not(RuleCondition::ResourcePathEndsWith(".module.sass".into())), ]), + RuleCondition::all(vec![ + RuleCondition::ContentTypeStartsWith("text/css".into()), + RuleCondition::not(RuleCondition::ContentTypeStartsWith( + "text/css+module".into(), + )), + ]), ]) } fn module_styles_rule_condition() -> RuleCondition { @@ -122,8 +126,14 @@ fn module_styles_rule_condition() -> RuleCondition { RuleCondition::ResourcePathEndsWith(".module.css".into()), RuleCondition::ResourcePathEndsWith(".module.scss".into()), RuleCondition::ResourcePathEndsWith(".module.sass".into()), + RuleCondition::ContentTypeStartsWith("text/css+module".into()), ]) } +impl AppProject { + pub fn client_transition_name() -> RcStr { + rcstr!("next-ecmascript-client-reference") + } +} #[turbo_tasks::value_impl] impl AppProject { @@ -158,9 +168,7 @@ impl AppProject { client_transition: Some(ResolvedVc::upcast( self.client_transition().to_resolved().await?, )), - ecmascript_client_reference_transition_name: Some( - self.client_transition_name().to_resolved().await?, - ), + ecmascript_client_reference_transition_name: Some(Self::client_transition_name()), } .cell()) } @@ -170,9 +178,7 @@ impl AppProject { let this = self.await?; Ok(ServerContextType::AppRoute { app_dir: this.app_dir, - ecmascript_client_reference_transition_name: Some( - self.client_transition_name().to_resolved().await?, - ), + ecmascript_client_reference_transition_name: Some(Self::client_transition_name()), } .cell()) } @@ -187,7 +193,12 @@ impl AppProject { #[turbo_tasks::function] fn app_entrypoints(&self) -> Vc { - get_entrypoints(*self.app_dir, self.project.next_config().page_extensions()) + let conf = self.project.next_config(); + get_entrypoints( + *self.app_dir, + conf.page_extensions(), + conf.is_global_not_found_enabled(), + ) } #[turbo_tasks::function] @@ -196,7 +207,7 @@ impl AppProject { self.project().project_path(), self.project().execution_context(), self.project().client_compile_time_info().environment(), - Value::new(self.client_ty().owned().await?), + self.client_ty().owned().await?, self.project().next_mode(), self.project().next_config(), self.project().encryption_key(), @@ -208,18 +219,13 @@ impl AppProject { async fn client_resolve_options_context(self: Vc) -> Result> { Ok(get_client_resolve_options_context( self.project().project_path(), - Value::new(self.client_ty().owned().await?), + self.client_ty().owned().await?, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), )) } - #[turbo_tasks::function] - pub(crate) fn client_transition_name(self: Vc) -> Vc { - Vc::cell(ECMASCRIPT_CLIENT_TRANSITION_NAME.into()) - } - #[turbo_tasks::function] fn client_transition(self: Vc) -> Vc { let module_context = self.client_module_context(); @@ -231,11 +237,12 @@ impl AppProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(self.rsc_ty().owned().await?), + self.rsc_ty().owned().await?, self.project().next_mode(), self.project().next_config(), NextRuntime::NodeJs, self.project().encryption_key(), + self.project().server_compile_time_info().environment(), )) } @@ -244,11 +251,12 @@ impl AppProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(self.rsc_ty().owned().await?), + self.rsc_ty().owned().await?, self.project().next_mode(), self.project().next_config(), NextRuntime::Edge, self.project().encryption_key(), + self.project().edge_compile_time_info().environment(), )) } @@ -257,11 +265,12 @@ impl AppProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(self.route_ty().owned().await?), + self.route_ty().owned().await?, self.project().next_mode(), self.project().next_config(), NextRuntime::NodeJs, self.project().encryption_key(), + self.project().server_compile_time_info().environment(), )) } @@ -270,11 +279,12 @@ impl AppProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(self.route_ty().owned().await?), + self.route_ty().owned().await?, self.project().next_mode(), self.project().next_config(), NextRuntime::Edge, self.project().encryption_key(), + self.project().edge_compile_time_info().environment(), )) } @@ -282,7 +292,7 @@ impl AppProject { async fn rsc_resolve_options_context(self: Vc) -> Result> { Ok(get_server_resolve_options_context( self.project().project_path(), - Value::new(self.rsc_ty().owned().await?), + self.rsc_ty().owned().await?, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -293,7 +303,7 @@ impl AppProject { async fn edge_rsc_resolve_options_context(self: Vc) -> Result> { Ok(get_edge_resolve_options_context( self.project().project_path(), - Value::new(self.rsc_ty().owned().await?), + self.rsc_ty().owned().await?, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -304,7 +314,7 @@ impl AppProject { async fn route_resolve_options_context(self: Vc) -> Result> { Ok(get_server_resolve_options_context( self.project().project_path(), - Value::new(self.route_ty().owned().await?), + self.route_ty().owned().await?, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -317,7 +327,7 @@ impl AppProject { ) -> Result> { Ok(get_edge_resolve_options_context( self.project().project_path(), - Value::new(self.route_ty().owned().await?), + self.route_ty().owned().await?, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -357,25 +367,28 @@ impl AppProject { Ok(TransitionOptions { named_transitions: [ ( - ECMASCRIPT_CLIENT_TRANSITION_NAME.into(), + AppProject::client_transition_name(), ecmascript_client_reference_transition.to_resolved().await?, ), ( - "next-dynamic".into(), + rcstr!("next-dynamic"), ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), ), ( - "next-dynamic-client".into(), + rcstr!("next-dynamic-client"), ResolvedVc::upcast( NextDynamicTransition::new_client(Vc::upcast(self.client_transition())) .to_resolved() .await?, ), ), - ("next-ssr".into(), ssr_transition.to_resolved().await?), - ("next-shared".into(), shared_transition.to_resolved().await?), + (rcstr!("next-ssr"), ssr_transition.to_resolved().await?), + ( + rcstr!("next-shared"), + shared_transition.to_resolved().await?, + ), ( - "next-server-utility".into(), + rcstr!("next-server-utility"), ResolvedVc::upcast(NextServerUtilityTransition::new().to_resolved().await?), ), ] @@ -416,7 +429,7 @@ impl AppProject { } #[turbo_tasks::function] - async fn rsc_module_context(self: Vc) -> Result> { + fn rsc_module_context(self: Vc) -> Result> { Ok(ModuleAssetContext::new( self.get_rsc_transitions( self.ecmascript_client_reference_transition(), @@ -426,12 +439,12 @@ impl AppProject { self.project().server_compile_time_info(), self.rsc_module_options_context(), self.rsc_resolve_options_context(), - Vc::cell("app-rsc".into()), + Layer::new_with_user_friendly_name(rcstr!("app-rsc"), rcstr!("Server Component")), )) } #[turbo_tasks::function] - async fn edge_rsc_module_context(self: Vc) -> Result> { + fn edge_rsc_module_context(self: Vc) -> Result> { Ok(ModuleAssetContext::new( self.get_rsc_transitions( self.edge_ecmascript_client_reference_transition(), @@ -441,7 +454,10 @@ impl AppProject { self.project().edge_compile_time_info(), self.edge_rsc_module_options_context(), self.edge_rsc_resolve_options_context(), - Vc::cell("app-edge-rsc".into()), + Layer::new_with_user_friendly_name( + rcstr!("app-edge-rsc"), + rcstr!("Edge Server Component"), + ), )) } @@ -449,17 +465,17 @@ impl AppProject { async fn route_module_context(self: Vc) -> Result> { let transitions = [ ( - ECMASCRIPT_CLIENT_TRANSITION_NAME.into(), + AppProject::client_transition_name(), self.ecmascript_client_reference_transition() .to_resolved() .await?, ), ( - "next-dynamic".into(), + rcstr!("next-dynamic"), ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), ), ( - "next-dynamic-client".into(), + rcstr!("next-dynamic-client"), ResolvedVc::upcast( NextDynamicTransition::new_client(Vc::upcast(self.client_transition())) .to_resolved() @@ -467,15 +483,15 @@ impl AppProject { ), ), ( - "next-ssr".into(), + rcstr!("next-ssr"), ResolvedVc::upcast(self.ssr_transition().to_resolved().await?), ), ( - "next-shared".into(), + rcstr!("next-shared"), ResolvedVc::upcast(self.shared_transition().to_resolved().await?), ), ( - "next-server-utility".into(), + rcstr!("next-server-utility"), ResolvedVc::upcast(NextServerUtilityTransition::new().to_resolved().await?), ), ] @@ -492,7 +508,7 @@ impl AppProject { self.project().server_compile_time_info(), self.route_module_options_context(), self.route_resolve_options_context(), - Vc::cell("app-route".into()), + Layer::new_with_user_friendly_name(rcstr!("app-route"), rcstr!("App Route")), )) } @@ -500,17 +516,17 @@ impl AppProject { async fn edge_route_module_context(self: Vc) -> Result> { let transitions = [ ( - ECMASCRIPT_CLIENT_TRANSITION_NAME.into(), + AppProject::client_transition_name(), self.edge_ecmascript_client_reference_transition() .to_resolved() .await?, ), ( - "next-dynamic".into(), + rcstr!("next-dynamic"), ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), ), ( - "next-dynamic-client".into(), + rcstr!("next-dynamic-client"), ResolvedVc::upcast( NextDynamicTransition::new_client(Vc::upcast(self.client_transition())) .to_resolved() @@ -518,15 +534,15 @@ impl AppProject { ), ), ( - "next-ssr".into(), + rcstr!("next-ssr"), ResolvedVc::upcast(self.edge_ssr_transition().to_resolved().await?), ), ( - "next-shared".into(), + rcstr!("next-shared"), ResolvedVc::upcast(self.edge_shared_transition().to_resolved().await?), ), ( - "next-server-utility".into(), + rcstr!("next-server-utility"), ResolvedVc::upcast(NextServerUtilityTransition::new().to_resolved().await?), ), ] @@ -542,7 +558,7 @@ impl AppProject { self.project().edge_compile_time_info(), self.edge_route_module_options_context(), self.edge_route_resolve_options_context(), - Vc::cell("app-edge-route".into()), + Layer::new_with_user_friendly_name(rcstr!("app-edge-route"), rcstr!("Edge App Route")), )) } @@ -550,11 +566,11 @@ impl AppProject { async fn client_module_context(self: Vc) -> Result> { let transitions = [ ( - "next-dynamic".into(), + rcstr!("next-dynamic"), ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), ), ( - "next-dynamic-client".into(), + rcstr!("next-dynamic-client"), ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), ), ] @@ -569,7 +585,10 @@ impl AppProject { self.project().client_compile_time_info(), self.client_module_options_context(), self.client_resolve_options_context(), - Vc::cell("app-client".into()), + Layer::new_with_user_friendly_name( + rcstr!("app-client"), + rcstr!("Client Component Browser"), + ), )) } @@ -578,11 +597,12 @@ impl AppProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(self.ssr_ty().owned().await?), + self.ssr_ty().owned().await?, self.project().next_mode(), self.project().next_config(), NextRuntime::NodeJs, self.project().encryption_key(), + self.project().server_compile_time_info().environment(), )) } @@ -591,11 +611,12 @@ impl AppProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(self.ssr_ty().owned().await?), + self.ssr_ty().owned().await?, self.project().next_mode(), self.project().next_config(), NextRuntime::Edge, self.project().encryption_key(), + self.project().edge_compile_time_info().environment(), )) } @@ -603,7 +624,7 @@ impl AppProject { async fn ssr_resolve_options_context(self: Vc) -> Result> { Ok(get_server_resolve_options_context( self.project().project_path(), - Value::new(self.ssr_ty().owned().await?), + self.ssr_ty().owned().await?, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -614,7 +635,7 @@ impl AppProject { async fn edge_ssr_resolve_options_context(self: Vc) -> Result> { Ok(get_edge_resolve_options_context( self.project().project_path(), - Value::new(self.ssr_ty().owned().await?), + self.ssr_ty().owned().await?, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -625,11 +646,11 @@ impl AppProject { async fn ssr_module_context(self: Vc) -> Result> { let transitions = [ ( - "next-dynamic".into(), + rcstr!("next-dynamic"), ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), ), ( - "next-dynamic-client".into(), + rcstr!("next-dynamic-client"), ResolvedVc::upcast( NextDynamicTransition::new_client(Vc::upcast(self.client_transition())) .to_resolved() @@ -637,7 +658,7 @@ impl AppProject { ), ), ( - "next-shared".into(), + rcstr!("next-shared"), ResolvedVc::upcast(self.shared_transition().to_resolved().await?), ), ] @@ -652,7 +673,7 @@ impl AppProject { self.project().server_compile_time_info(), self.ssr_module_options_context(), self.ssr_resolve_options_context(), - Vc::cell("app-ssr".into()), + Layer::new_with_user_friendly_name(rcstr!("app-ssr"), rcstr!("Client Component SSR")), )) } @@ -663,24 +684,33 @@ impl AppProject { } #[turbo_tasks::function] - fn shared_transition(self: Vc) -> Vc { - ContextTransition::new( + fn shared_module_context(self: Vc) -> Result> { + Ok(ModuleAssetContext::new( + TransitionOptions { + ..Default::default() + } + .cell(), self.project().server_compile_time_info(), self.ssr_module_options_context(), self.ssr_resolve_options_context(), - Vc::cell("app-shared".into()), - ) + Layer::new(rcstr!("app-shared")), + )) + } + + #[turbo_tasks::function] + fn shared_transition(self: Vc) -> Vc> { + Vc::upcast(FullContextTransition::new(self.shared_module_context())) } #[turbo_tasks::function] async fn edge_ssr_module_context(self: Vc) -> Result> { let transitions = [ ( - "next-dynamic".into(), + rcstr!("next-dynamic"), ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), ), ( - "next-dynamic-client".into(), + rcstr!("next-dynamic-client"), ResolvedVc::upcast( NextDynamicTransition::new_client(Vc::upcast(self.client_transition())) .to_resolved() @@ -688,7 +718,7 @@ impl AppProject { ), ), ( - "next-shared".into(), + rcstr!("next-shared"), ResolvedVc::upcast(self.edge_shared_transition().to_resolved().await?), ), ] @@ -703,7 +733,10 @@ impl AppProject { self.project().edge_compile_time_info(), self.edge_ssr_module_options_context(), self.edge_ssr_resolve_options_context(), - Vc::cell("app-edge-ssr".into()), + Layer::new_with_user_friendly_name( + rcstr!("app-edge-ssr"), + rcstr!("Client Component SSR - Edge"), + ), )) } @@ -714,19 +747,30 @@ impl AppProject { } #[turbo_tasks::function] - fn edge_shared_transition(self: Vc) -> Vc { - ContextTransition::new( + fn edge_shared_module_context(self: Vc) -> Result> { + Ok(ModuleAssetContext::new( + TransitionOptions { + ..Default::default() + } + .cell(), self.project().edge_compile_time_info(), self.edge_ssr_module_options_context(), self.edge_ssr_resolve_options_context(), - Vc::cell("app-edge-shared".into()), - ) + Layer::new(rcstr!("app-edge-shared")), + )) + } + + #[turbo_tasks::function] + fn edge_shared_transition(self: Vc) -> Vc> { + Vc::upcast(FullContextTransition::new( + self.edge_shared_module_context(), + )) } #[turbo_tasks::function] async fn runtime_entries(self: Vc) -> Result> { Ok(get_server_runtime_entries( - Value::new(self.rsc_ty().owned().await?), + self.rsc_ty().owned().await?, self.project().next_mode(), )) } @@ -755,7 +799,7 @@ impl AppProject { async fn client_runtime_entries(self: Vc) -> Result> { Ok(get_client_runtime_entries( self.project().project_path(), - Value::new(self.client_ty().owned().await?), + self.client_ty().owned().await?, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -792,10 +836,10 @@ impl AppProject { let client_main_module = cjs_resolve( Vc::upcast(PlainResolveOrigin::new( client_module_context, - self.project().project_path().join("_".into()), + self.project().project_path().join(rcstr!("_")), )), - Request::parse(Value::new(Pattern::Constant( - "next/dist/client/app-next-turbopack.js".into(), + Request::parse(Pattern::Constant(rcstr!( + "next/dist/client/app-next-turbopack.js" ))), None, false, @@ -823,6 +867,7 @@ impl AppProject { .map(|m| ResolvedVc::upcast(*m)) .collect(); + let should_trace = self.project.next_mode().await?.is_production(); if *self.project.per_page_module_graph().await? { // Implements layout segment optimization to compute a graph "chain" for each layout // segment @@ -835,7 +880,7 @@ impl AppProject { let ServerEntries { server_utils, server_component_entries, - } = &*find_server_entries(*rsc_entry).await?; + } = &*find_server_entries(*rsc_entry, should_trace).await?; let graph = SingleModuleGraph::new_with_entries_visited_intern( vec![ @@ -851,6 +896,7 @@ impl AppProject { ChunkGroupEntry::Entry(client_shared_entries), ], VisitedModules::empty(), + should_trace, ); graphs.push(graph); let mut visited_modules = VisitedModules::from_graph(graph); @@ -867,6 +913,7 @@ impl AppProject { // but that breaks everything for some reason. vec![ChunkGroupEntry::Entry(vec![ResolvedVc::upcast(*module)])], visited_modules, + should_trace, ); graphs.push(graph); let is_layout = @@ -888,6 +935,7 @@ impl AppProject { let graph = SingleModuleGraph::new_with_entries_visited_intern( vec![ChunkGroupEntry::Entry(client_shared_entries)], VisitedModules::empty(), + should_trace, ); graphs.push(graph); VisitedModules::from_graph(graph) @@ -896,6 +944,7 @@ impl AppProject { let graph = SingleModuleGraph::new_with_entries_visited_intern( vec![rsc_entry_chunk_group], visited_modules, + should_trace, ); graphs.push(graph); visited_modules = visited_modules.concatenate(graph); @@ -905,6 +954,7 @@ impl AppProject { let additional_module_graph = SingleModuleGraph::new_with_entries_visited_intern( additional_entries.owned().await?, visited_modules, + should_trace, ); graphs.push(additional_module_graph); @@ -989,16 +1039,6 @@ pub fn app_entry_point_to_route( .cell() } -#[turbo_tasks::function] -fn client_shared_chunks_modifier() -> Vc { - Vc::cell("client-shared-chunks".into()) -} - -#[turbo_tasks::function] -fn server_utils_modifier() -> Vc { - Vc::cell("server-utils".into()) -} - #[turbo_tasks::value(transparent)] struct OutputAssetsWithAvailability((ResolvedVc, AvailabilityInfo)); @@ -1152,7 +1192,7 @@ impl AppEndpoint { let node_root = project.node_root().to_resolved().await?; let client_relative_path = project.client_relative_path().to_resolved().await?; - let server_path = node_root.join("server".into()); + let server_path = node_root.join(rcstr!("server")); let mut server_assets = fxindexset![]; let mut client_assets = fxindexset![]; @@ -1192,7 +1232,7 @@ impl AppEndpoint { let client_shared_chunk_group = get_app_client_shared_chunk_group( AssetIdent::from_path(project.project_path()) - .with_modifier(client_shared_chunks_modifier()), + .with_modifier(rcstr!("client-shared-chunks")), this.app_project.client_runtime_entries(), *module_graphs.full, *client_chunking_context, @@ -1222,6 +1262,7 @@ impl AppEndpoint { .get_client_references_for_endpoint( *rsc_entry, matches!(this.ty, AppEndpointType::Page { .. }), + project.next_mode().await?.is_production(), ) .to_resolved() .await?; @@ -1230,7 +1271,7 @@ impl AppEndpoint { *client_references, *module_graphs.full, *client_chunking_context, - Value::new(client_shared_availability_info), + client_shared_availability_info, ssr_chunking_context.map(|ctx| *ctx), ) .to_resolved() @@ -1292,11 +1333,11 @@ impl AppEndpoint { // load it as a RawModule. let next_package = get_next_package(project.project_path()); let polyfill_source = - FileSource::new(next_package.join("dist/build/polyfills/polyfill-nomodule.js".into())); + FileSource::new(next_package.join(rcstr!("dist/build/polyfills/polyfill-nomodule.js"))); let polyfill_output_path = client_chunking_context.chunk_path( Some(Vc::upcast(polyfill_source)), polyfill_source.ident(), - ".js".into(), + rcstr!(".js"), ); let polyfill_output_asset = ResolvedVc::upcast( RawOutput::new(polyfill_output_path, Vc::upcast(polyfill_source)) @@ -1420,7 +1461,6 @@ impl AppEndpoint { entry_name: app_entry.original_name.clone(), client_references, client_references_chunks, - rsc_app_entry_chunks: app_entry_chunks, client_chunking_context, ssr_chunking_context, async_module_info: module_graphs.full.async_module_info().to_resolved().await?, @@ -1457,10 +1497,10 @@ impl AppEndpoint { // // they are created in `setup-dev-bundler.ts` let mut file_paths_from_root = fxindexset![ - "server/server-reference-manifest.js".into(), - "server/middleware-build-manifest.js".into(), - "server/next-font-manifest.js".into(), - "server/interception-route-rewrite-manifest.js".into(), + rcstr!("server/server-reference-manifest.js"), + rcstr!("server/middleware-build-manifest.js"), + rcstr!("server/next-font-manifest.js"), + rcstr!("server/interception-route-rewrite-manifest.js"), ]; let mut wasm_paths_from_root = fxindexset![]; @@ -1481,7 +1521,7 @@ impl AppEndpoint { let all_assets = get_asset_paths_from_root(&node_root_value, &all_output_assets).await?; - let entry_file = "app-edge-has-no-entrypoint".into(); + let entry_file = rcstr!("app-edge-has-no-entrypoint"); if emit_manifests == EmitManifests::Full { let dynamic_import_entries = collect_next_dynamic_chunks( @@ -1522,7 +1562,7 @@ impl AppEndpoint { }; let edge_function_definition = EdgeFunctionDefinition { files: file_paths_from_root.into_iter().collect(), - wasm: wasm_paths_to_bindings(wasm_paths_from_root.into_iter().collect()), + wasm: wasm_paths_to_bindings(wasm_paths_from_root).await?, assets: paths_to_bindings(all_assets), name: app_entry.pathname.clone(), page: app_entry.original_name.clone(), @@ -1639,9 +1679,11 @@ impl AppEndpoint { .await? .is_production() { + let page_name = app_entry.pathname.clone(); server_assets.insert(ResolvedVc::upcast( NftJsonAsset::new( project, + Some(page_name), *rsc_chunk, client_reference_manifest .iter() @@ -1699,7 +1741,7 @@ impl AppEndpoint { .collect(), ), module_graph, - Value::new(AvailabilityInfo::Root), + AvailabilityInfo::Root, ) .await?; @@ -1715,7 +1757,7 @@ impl AppEndpoint { app_entry.rsc_entry.ident(), ChunkGroup::Entry(evaluatable_assets.collect()), module_graph, - Value::new(availability_info), + availability_info, ) .resolve() .await?, @@ -1747,11 +1789,11 @@ impl AppEndpoint { let chunk_group = chunking_context .chunk_group( AssetIdent::from_path(this.app_project.project().project_path()) - .with_modifier(server_utils_modifier()), + .with_modifier(rcstr!("server-utils")), // TODO this should be ChunkGroup::Shared ChunkGroup::Entry(server_utils), module_graph, - Value::new(current_availability_info), + current_availability_info, ) .await?; @@ -1789,7 +1831,7 @@ impl AppEndpoint { server_component.await?.module, )]), module_graph, - Value::new(current_availability_info), + current_availability_info, ) .await?; @@ -1818,7 +1860,7 @@ impl AppEndpoint { Vc::cell(evaluatable_assets), module_graph, current_chunks, - Value::new(current_availability_info), + current_availability_info, ) .to_resolved() .await?, @@ -1936,7 +1978,7 @@ impl Endpoint for AppEndpoint { } .instrument(span) .await - .with_context(|| format!("Failed to write app endpoint {}", page_name)) + .with_context(|| format!("Failed to write app endpoint {page_name}")) } #[turbo_tasks::function] diff --git a/crates/next-api/src/client_references.rs b/crates/next-api/src/client_references.rs index fabf70dafc5ff..925161cbb553c 100644 --- a/crates/next-api/src/client_references.rs +++ b/crates/next-api/src/client_references.rs @@ -7,7 +7,7 @@ use next_core::{ use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; use turbo_tasks::{ - debug::ValueDebugFormat, trace::TraceRawVcs, NonLocalValue, ResolvedVc, TryFlatJoinIterExt, Vc, + NonLocalValue, ResolvedVc, TryFlatJoinIterExt, Vc, debug::ValueDebugFormat, trace::TraceRawVcs, }; use turbopack::css::chunk::CssChunkPlaceable; use turbopack_core::{module::Module, module_graph::SingleModuleGraph}; diff --git a/crates/next-api/src/dynamic_imports.rs b/crates/next-api/src/dynamic_imports.rs index c15c6eed2198d..14dd4779e46b5 100644 --- a/crates/next-api/src/dynamic_imports.rs +++ b/crates/next-api/src/dynamic_imports.rs @@ -26,13 +26,13 @@ use next_core::{ }; use serde::{Deserialize, Serialize}; use turbo_tasks::{ - debug::ValueDebugFormat, trace::TraceRawVcs, FxIndexMap, NonLocalValue, ReadRef, ResolvedVc, - TryFlatJoinIterExt, TryJoinIterExt, Value, Vc, + FxIndexMap, NonLocalValue, ReadRef, ResolvedVc, TryFlatJoinIterExt, TryJoinIterExt, Vc, + debug::ValueDebugFormat, trace::TraceRawVcs, }; use turbopack_core::{ chunk::{ - availability_info::AvailabilityInfo, ChunkItem, ChunkableModule, ChunkingContext, - ModuleChunkItemIdExt, ModuleId, + ChunkItem, ChunkableModule, ChunkingContext, ModuleChunkItemIdExt, ModuleId, + availability_info::AvailabilityInfo, }, module::Module, module_graph::{ModuleGraph, SingleModuleGraph, SingleModuleGraphModuleNode}, @@ -75,11 +75,8 @@ pub(crate) async fn collect_next_dynamic_chunks( } }; - let async_loader = chunking_context.async_loader_chunk_item( - *module, - module_graph, - Value::new(availability_info), - ); + let async_loader = + chunking_context.async_loader_chunk_item(*module, module_graph, availability_info); let async_chunk_group = async_loader.references().to_resolved().await?; let module_id = dynamic_entry @@ -125,20 +122,21 @@ pub async fn map_next_dynamic(graph: Vc) -> Result(*module) - { - return Ok(Some(( - *module, - DynamicImportEntriesMapType::DynamicEntry(dynamic_entry_module), - ))); - } + { + return Ok(Some(( + *module, + DynamicImportEntriesMapType::DynamicEntry(dynamic_entry_module), + ))); } // TODO add this check once these modules have the correct layer // if layer.is_some_and(|layer| &**layer == "app-rsc") { diff --git a/crates/next-api/src/empty.rs b/crates/next-api/src/empty.rs index e1565520780f5..7174aa69a1ca1 100644 --- a/crates/next-api/src/empty.rs +++ b/crates/next-api/src/empty.rs @@ -1,4 +1,4 @@ -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use turbo_tasks::{Completion, Vc}; use turbopack_core::module_graph::GraphEntries; diff --git a/crates/next-api/src/font.rs b/crates/next-api/src/font.rs index de24b5aac4853..335c147e16716 100644 --- a/crates/next-api/src/font.rs +++ b/crates/next-api/src/font.rs @@ -51,7 +51,7 @@ pub(crate) async fn create_font_manifest( Default::default() } else if app_dir { let dir_str = dir.to_string().await?; - let page_path = format!("{}{}", dir_str, original_name).into(); + let page_path = format!("{dir_str}{original_name}").into(); NextFontManifest { app: [(page_path, font_paths)].into_iter().collect(), diff --git a/crates/next-api/src/instrumentation.rs b/crates/next-api/src/instrumentation.rs index a1226107301c8..3947310d02155 100644 --- a/crates/next-api/src/instrumentation.rs +++ b/crates/next-api/src/instrumentation.rs @@ -1,25 +1,25 @@ -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use next_core::{ all_assets_from_entries, next_edge::entry::wrap_edge_entry, next_manifests::{InstrumentationDefinition, MiddlewaresManifestV2}, - next_server::{get_server_runtime_entries, ServerContextType}, + next_server::{ServerContextType, get_server_runtime_entries}, }; use tracing::Instrument; -use turbo_rcstr::RcStr; -use turbo_tasks::{Completion, ResolvedVc, Value, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{Completion, ResolvedVc, Vc}; use turbo_tasks_fs::{File, FileContent, FileSystemPath}; use turbopack_core::{ asset::AssetContent, chunk::{ - availability_info::AvailabilityInfo, ChunkingContext, ChunkingContextExt, - EntryChunkGroupResult, + ChunkingContext, ChunkingContextExt, EntryChunkGroupResult, + availability_info::AvailabilityInfo, }, context::AssetContext, module::Module, module_graph::{ - chunk_group_info::{ChunkGroup, ChunkGroupEntry}, GraphEntries, + chunk_group_info::{ChunkGroup, ChunkGroupEntry}, }, output::{OutputAsset, OutputAssets}, reference_type::{EntryReferenceSubType, ReferenceType}, @@ -44,7 +44,7 @@ pub struct InstrumentationEndpoint { is_edge: bool, app_dir: Option>, - ecmascript_client_reference_transition_name: Option>, + ecmascript_client_reference_transition_name: Option, } #[turbo_tasks::value_impl] @@ -56,7 +56,7 @@ impl InstrumentationEndpoint { source: ResolvedVc>, is_edge: bool, app_dir: Option>, - ecmascript_client_reference_transition_name: Option>, + ecmascript_client_reference_transition_name: Option, ) -> Vc { Self { project, @@ -75,7 +75,7 @@ impl InstrumentationEndpoint { .asset_context .process( *self.source, - Value::new(ReferenceType::Entry(EntryReferenceSubType::Instrumentation)), + ReferenceType::Entry(EntryReferenceSubType::Instrumentation), ) .module() .to_resolved() @@ -85,7 +85,7 @@ impl InstrumentationEndpoint { *self.asset_context, self.project.project_path(), *userland_module, - "instrumentation".into(), + rcstr!("instrumentation"), ) .to_resolved() .await?; @@ -105,11 +105,12 @@ impl InstrumentationEndpoint { let module_graph = this.project.module_graph(*module); let evaluatable_assets = get_server_runtime_entries( - Value::new(ServerContextType::Instrumentation { + ServerContextType::Instrumentation { app_dir: this.app_dir, ecmascript_client_reference_transition_name: this - .ecmascript_client_reference_transition_name, - }), + .ecmascript_client_reference_transition_name + .clone(), + }, this.project.next_mode(), ) .resolve_entries(*this.asset_context) @@ -124,7 +125,7 @@ impl InstrumentationEndpoint { module.ident(), ChunkGroup::Entry(evaluatable_assets), module_graph, - Value::new(AvailabilityInfo::Root), + AvailabilityInfo::Root, ); Ok(edge_files) @@ -147,20 +148,21 @@ impl InstrumentationEndpoint { .entry_chunk_group( this.project .node_root() - .join("server/instrumentation.js".into()), + .join(rcstr!("server/instrumentation.js")), get_server_runtime_entries( - Value::new(ServerContextType::Instrumentation { + ServerContextType::Instrumentation { app_dir: this.app_dir, ecmascript_client_reference_transition_name: this - .ecmascript_client_reference_transition_name, - }), + .ecmascript_client_reference_transition_name + .clone(), + }, this.project.next_mode(), ) .resolve_entries(*this.asset_context) .with_entry(*module), module_graph, OutputAssets::empty(), - Value::new(AvailabilityInfo::Root), + AvailabilityInfo::Root, ) .await?; Ok(*chunk) @@ -187,8 +189,8 @@ impl InstrumentationEndpoint { let instrumentation_definition = InstrumentationDefinition { files: file_paths_from_root, - wasm: wasm_paths_to_bindings(wasm_paths_from_root), - name: "instrumentation".into(), + wasm: wasm_paths_to_bindings(wasm_paths_from_root).await?, + name: rcstr!("instrumentation"), ..Default::default() }; let middleware_manifest_v2 = MiddlewaresManifestV2 { @@ -196,7 +198,7 @@ impl InstrumentationEndpoint { ..Default::default() }; let middleware_manifest_v2 = VirtualOutputAsset::new( - node_root.join("server/instrumentation/middleware-manifest.json".into()), + node_root.join(rcstr!("server/instrumentation/middleware-manifest.json")), AssetContent::file( FileContent::Content(File::from(serde_json::to_string_pretty( &middleware_manifest_v2, @@ -214,7 +216,7 @@ impl InstrumentationEndpoint { let mut output_assets = vec![chunk]; if this.project.next_mode().await?.is_production() { output_assets.push(ResolvedVc::upcast( - NftJsonAsset::new(*this.project, *chunk, vec![]) + NftJsonAsset::new(*this.project, None, *chunk, vec![]) .to_resolved() .await?, )); diff --git a/crates/next-api/src/loadable_manifest.rs b/crates/next-api/src/loadable_manifest.rs index 4d0892bdec3ef..07f3740f7663c 100644 --- a/crates/next-api/src/loadable_manifest.rs +++ b/crates/next-api/src/loadable_manifest.rs @@ -1,8 +1,7 @@ use anyhow::Result; use next_core::{next_manifests::LoadableManifest, util::NextRuntime}; use rustc_hash::FxHashMap; -use turbo_rcstr::RcStr; -use turbo_tasks::{ResolvedVc, TryFlatJoinIterExt, ValueToString, Vc}; +use turbo_tasks::{ResolvedVc, TryFlatJoinIterExt, Vc}; use turbo_tasks_fs::{File, FileContent, FileSystemPath}; use turbopack_core::{ asset::AssetContent, @@ -22,12 +21,12 @@ pub async fn create_react_loadable_manifest( ) -> Result> { let dynamic_import_entries = &*dynamic_import_entries.await?; - let mut loadable_manifest: FxHashMap = FxHashMap::default(); + let mut loadable_manifest: FxHashMap = FxHashMap::default(); for (_, (module_id, chunk_output)) in dynamic_import_entries.into_iter() { let chunk_output = chunk_output.await?; - let id = module_id.to_string().owned().await?; + let id = &*module_id.await?; let client_relative_path_value = client_relative_path.await?; let files = chunk_output @@ -44,11 +43,11 @@ pub async fn create_react_loadable_manifest( .await?; let manifest_item = LoadableManifest { - id: id.clone(), + id: id.into(), files, }; - loadable_manifest.insert(id, manifest_item); + loadable_manifest.insert(id.to_string(), manifest_item); } let manifest_json = serde_json::to_string_pretty(&loadable_manifest)?; diff --git a/crates/next-api/src/middleware.rs b/crates/next-api/src/middleware.rs index eb85bc8dd6a66..2ab19166ae9f6 100644 --- a/crates/next-api/src/middleware.rs +++ b/crates/next-api/src/middleware.rs @@ -1,29 +1,29 @@ use std::future::IntoFuture; -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use next_core::{ all_assets_from_entries, middleware::get_middleware_module, next_edge::entry::wrap_edge_entry, next_manifests::{EdgeFunctionDefinition, MiddlewareMatcher, MiddlewaresManifestV2, Regions}, - next_server::{get_server_runtime_entries, ServerContextType}, - util::{parse_config_from_source, MiddlewareMatcherKind, NextRuntime}, + next_server::{ServerContextType, get_server_runtime_entries}, + util::{MiddlewareMatcherKind, NextRuntime, parse_config_from_source}, }; use tracing::Instrument; -use turbo_rcstr::RcStr; -use turbo_tasks::{Completion, ResolvedVc, Value, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{Completion, ResolvedVc, Vc}; use turbo_tasks_fs::{self, File, FileContent, FileSystemPath}; use turbopack_core::{ asset::AssetContent, chunk::{ - availability_info::AvailabilityInfo, ChunkingContext, ChunkingContextExt, - EntryChunkGroupResult, + ChunkingContext, ChunkingContextExt, EntryChunkGroupResult, + availability_info::AvailabilityInfo, }, context::AssetContext, module::Module, module_graph::{ - chunk_group_info::{ChunkGroup, ChunkGroupEntry}, GraphEntries, + chunk_group_info::{ChunkGroup, ChunkGroupEntry}, }, output::{OutputAsset, OutputAssets}, reference_type::{EntryReferenceSubType, ReferenceType}, @@ -44,11 +44,10 @@ use crate::{ #[turbo_tasks::value] pub struct MiddlewareEndpoint { project: ResolvedVc, - build_id: RcStr, asset_context: ResolvedVc>, source: ResolvedVc>, app_dir: Option>, - ecmascript_client_reference_transition_name: Option>, + ecmascript_client_reference_transition_name: Option, } #[turbo_tasks::value_impl] @@ -56,15 +55,13 @@ impl MiddlewareEndpoint { #[turbo_tasks::function] pub fn new( project: ResolvedVc, - build_id: RcStr, asset_context: ResolvedVc>, source: ResolvedVc>, app_dir: Option>, - ecmascript_client_reference_transition_name: Option>, + ecmascript_client_reference_transition_name: Option, ) -> Vc { Self { project, - build_id, asset_context, source, app_dir, @@ -79,7 +76,7 @@ impl MiddlewareEndpoint { .asset_context .process( *self.source, - Value::new(ReferenceType::Entry(EntryReferenceSubType::Middleware)), + ReferenceType::Entry(EntryReferenceSubType::Middleware), ) .module(); @@ -98,7 +95,7 @@ impl MiddlewareEndpoint { *self.asset_context, self.project.project_path(), module, - "middleware".into(), + rcstr!("middleware"), )) } @@ -110,11 +107,12 @@ impl MiddlewareEndpoint { let module_graph = this.project.module_graph(*module); let evaluatable_assets = get_server_runtime_entries( - Value::new(ServerContextType::Middleware { + ServerContextType::Middleware { app_dir: this.app_dir, ecmascript_client_reference_transition_name: this - .ecmascript_client_reference_transition_name, - }), + .ecmascript_client_reference_transition_name + .clone(), + }, this.project.next_mode(), ) .resolve_entries(*this.asset_context) @@ -129,7 +127,7 @@ impl MiddlewareEndpoint { module.ident(), ChunkGroup::Entry(evaluatable_assets), module_graph, - Value::new(AvailabilityInfo::Root), + AvailabilityInfo::Root, ); Ok(edge_files) } @@ -149,20 +147,23 @@ impl MiddlewareEndpoint { let EntryChunkGroupResult { asset: chunk, .. } = *chunking_context .entry_chunk_group( - this.project.node_root().join("server/middleware.js".into()), + this.project + .node_root() + .join(rcstr!("server/middleware.js")), get_server_runtime_entries( - Value::new(ServerContextType::Middleware { + ServerContextType::Middleware { app_dir: this.app_dir, ecmascript_client_reference_transition_name: this - .ecmascript_client_reference_transition_name, - }), + .ecmascript_client_reference_transition_name + .clone(), + }, this.project.next_mode(), ) .resolve_entries(*this.asset_context) .with_entry(*module), module_graph, OutputAssets::empty(), - Value::new(AvailabilityInfo::Root), + AvailabilityInfo::Root, ) .await?; Ok(*chunk) @@ -236,8 +237,8 @@ impl MiddlewareEndpoint { .collect() } else { vec![MiddlewareMatcher { - regexp: Some("^/.*$".into()), - original_source: "/:path*".into(), + regexp: Some(rcstr!("^/.*$")), + original_source: rcstr!("/:path*"), ..Default::default() }] }; @@ -247,7 +248,7 @@ impl MiddlewareEndpoint { let mut output_assets = vec![chunk]; if this.project.next_mode().await?.is_production() { output_assets.push(ResolvedVc::upcast( - NftJsonAsset::new(*this.project, *chunk, vec![]) + NftJsonAsset::new(*this.project, None, *chunk, vec![]) .to_resolved() .await?, )); @@ -259,7 +260,7 @@ impl MiddlewareEndpoint { let middleware_manifest_v2 = VirtualOutputAsset::new( this.project .node_root() - .join("server/middleware/middleware-manifest.json".into()), + .join(rcstr!("server/middleware/middleware-manifest.json")), AssetContent::file( FileContent::Content(File::from(serde_json::to_string_pretty( &middleware_manifest_v2, @@ -304,22 +305,22 @@ impl MiddlewareEndpoint { let edge_function_definition = EdgeFunctionDefinition { files: file_paths_from_root, - wasm: wasm_paths_to_bindings(wasm_paths_from_root), + wasm: wasm_paths_to_bindings(wasm_paths_from_root).await?, assets: paths_to_bindings(all_assets), - name: "middleware".into(), - page: "/".into(), + name: rcstr!("middleware"), + page: rcstr!("/"), regions, matchers: matchers.clone(), env: this.project.edge_env().owned().await?, }; let middleware_manifest_v2 = MiddlewaresManifestV2 { - middleware: [("/".into(), edge_function_definition)] + middleware: [(rcstr!("/"), edge_function_definition)] .into_iter() .collect(), ..Default::default() }; let middleware_manifest_v2 = VirtualOutputAsset::new( - node_root.join("server/middleware/middleware-manifest.json".into()), + node_root.join(rcstr!("server/middleware/middleware-manifest.json")), AssetContent::file( FileContent::Content(File::from(serde_json::to_string_pretty( &middleware_manifest_v2, @@ -340,7 +341,7 @@ impl MiddlewareEndpoint { self.asset_context .process( *self.source, - Value::new(ReferenceType::Entry(EntryReferenceSubType::Middleware)), + ReferenceType::Entry(EntryReferenceSubType::Middleware), ) .module() } diff --git a/crates/next-api/src/module_graph.rs b/crates/next-api/src/module_graph.rs index 81766e38d3e62..783e15a4bb15c 100644 --- a/crates/next-api/src/module_graph.rs +++ b/crates/next-api/src/module_graph.rs @@ -4,29 +4,32 @@ use anyhow::Result; use either::Either; use next_core::{ next_client_reference::{ - find_server_entries, ClientReference, ClientReferenceGraphResult, ClientReferenceType, - ServerEntries, VisitedClientReferenceGraphNodes, + ClientReference, ClientReferenceGraphResult, ClientReferenceType, ServerEntries, + VisitedClientReferenceGraphNodes, find_server_entries, }, next_dynamic::NextDynamicEntryModule, next_manifests::ActionLayer, }; use rustc_hash::FxHashMap; use tracing::Instrument; +use turbo_rcstr::RcStr; use turbo_tasks::{ CollectiblesSource, FxIndexMap, FxIndexSet, ReadRef, ResolvedVc, TryFlatJoinIterExt, - TryJoinIterExt, Vc, + TryJoinIterExt, ValueToString, Vc, }; +use turbo_tasks_fs::FileSystemPath; +use turbopack::css::{CssModuleAsset, ModuleCssAsset}; use turbopack_core::{ context::AssetContext, - issue::Issue, + issue::{Issue, IssueExt, IssueSeverity, IssueStage, OptionStyledString, StyledString}, module::Module, module_graph::{GraphTraversalAction, ModuleGraph, SingleModuleGraph}, }; use crate::{ - client_references::{map_client_references, ClientReferenceMapType, ClientReferencesSet}, - dynamic_imports::{map_next_dynamic, DynamicImportEntries, DynamicImportEntriesMapType}, - server_actions::{map_server_actions, to_rsc_context, AllActions, AllModuleActions}, + client_references::{ClientReferenceMapType, ClientReferencesSet, map_client_references}, + dynamic_imports::{DynamicImportEntries, DynamicImportEntriesMapType, map_next_dynamic}, + server_actions::{AllActions, AllModuleActions, map_server_actions, to_rsc_context}, }; #[turbo_tasks::value] @@ -348,7 +351,7 @@ impl ClientReferencesGraph { }, |parent_info, node, state_map| { let Some((parent_node, _)) = parent_info else { - return; + return Ok(()); }; let parent_module = parent_node.module; @@ -378,6 +381,7 @@ impl ClientReferencesGraph { } _ => {} }; + Ok(()) }, )?; @@ -398,6 +402,164 @@ impl ClientReferencesGraph { } } +#[turbo_tasks::value(shared)] +struct CssGlobalImportIssue { + parent_module: ResolvedVc>, + module: ResolvedVc>, +} + +impl CssGlobalImportIssue { + fn new( + parent_module: ResolvedVc>, + module: ResolvedVc>, + ) -> Self { + Self { + parent_module, + module, + } + } +} + +#[turbo_tasks::value_impl] +impl Issue for CssGlobalImportIssue { + #[turbo_tasks::function] + async fn title(&self) -> Vc { + StyledString::Stack(vec![ + StyledString::Text("Failed to compile".into()), + StyledString::Text( + "Global CSS cannot be imported from files other than your Custom . Due to \ + the Global nature of stylesheets, and to avoid conflicts, Please move all \ + first-party global CSS imports to pages/_app.js. Or convert the import to \ + Component-Level CSS (CSS Modules)." + .into(), + ), + StyledString::Text("Read more: https://nextjs.org/docs/messages/css-global".into()), + ]) + .cell() + } + + #[turbo_tasks::function] + async fn description(&self) -> Result> { + let parent_path = &self.parent_module.ident().path(); + let module_path = &self.module.ident().path(); + let relative_import_location = parent_path.parent().await?; + + let import_path = match relative_import_location.get_relative_path_to(&*module_path.await?) + { + Some(path) => path, + None => module_path.await?.path.clone(), + }; + let cleaned_import_path = + if import_path.ends_with(".scss.css") || import_path.ends_with(".sass.css") { + RcStr::from(import_path.trim_end_matches(".css")) + } else { + import_path + }; + + Ok(Vc::cell(Some( + StyledString::Stack(vec![ + StyledString::Text(format!("Location: {}", parent_path.await?.path).into()), + StyledString::Text(format!("Import path: {cleaned_import_path}",).into()), + ]) + .resolved_cell(), + ))) + } + + fn severity(&self) -> IssueSeverity { + IssueSeverity::Error + } + + #[turbo_tasks::function] + fn file_path(&self) -> Vc { + self.parent_module.ident().path() + } + + #[turbo_tasks::function] + fn stage(&self) -> Vc { + IssueStage::ProcessModule.into() + } +} + +type FxModuleNameMap = FxIndexMap>, RcStr>; + +#[turbo_tasks::value(transparent)] +struct ModuleNameMap(pub FxModuleNameMap); + +#[turbo_tasks::function] +async fn validate_pages_css_imports( + graph: Vc, + is_single_page: bool, + entry: Vc>, + app_module: ResolvedVc>, + module_name_map: ResolvedVc, +) -> Result<()> { + let graph = &*graph.await?; + let entry = entry.to_resolved().await?; + let module_name_map = module_name_map.await?; + + let entries = if !is_single_page { + // TODO: Optimize this code by checking if the node is an entry using `get_module` and then + // checking if the node is an entry in the graph by looking for the reverse edges. + if !graph.entry_modules().any(|m| m == entry) { + // the graph doesn't contain the entry, e.g. for the additional module graph + return Ok(()); + } + Either::Left(std::iter::once(entry)) + } else { + Either::Right(graph.entry_modules()) + }; + + graph.traverse_edges_from_entries(entries, |parent_info, node| { + let module = node.module; + + // If the module being imported isn't a global css module, there is nothing to validate. + let module_is_global_css = + ResolvedVc::try_downcast_type::(module).is_some(); + + if !module_is_global_css { + return GraphTraversalAction::Continue; + } + + // We allow imports of global CSS files which are inside of `node_modules`. + let module_name_contains_node_modules = module_name_map + .get(&module) + .is_some_and(|s| s.contains("node_modules")); + + if module_name_contains_node_modules { + return GraphTraversalAction::Continue; + } + + // If we're at a root node, there is nothing importing this module and we can skip + // any further validations. + let Some((parent_node, _)) = parent_info else { + return GraphTraversalAction::Continue; + }; + + let parent_module = parent_node.module; + let parent_is_css_module = ResolvedVc::try_downcast_type::(parent_module) + .is_some() + || ResolvedVc::try_downcast_type::(parent_module).is_some(); + + // We also always allow .module css/scss/sass files to import global css files as well. + if parent_is_css_module { + return GraphTraversalAction::Continue; + } + + // If all of the above invariants have been checked, we look to see if the parent module is + // the same as the app module. If it isn't we know it isn't a valid place to import global + // css. + if parent_module != app_module { + CssGlobalImportIssue::new(parent_module, module) + .resolved_cell() + .emit(); + } + + GraphTraversalAction::Continue + })?; + + Ok(()) +} + /// The consumers of this shouldn't need to care about the exact contents since it's abstracted away /// by the accessor functions, but /// - In dev, contains information about the modules of the current endpoint only @@ -407,6 +569,9 @@ pub struct ReducedGraphs { next_dynamic: Vec>, server_actions: Vec>, client_references: Vec>, + // Data for some more ad-hoc operations + bare_graphs: ResolvedVc, + is_single_page: bool, // TODO add other graphs } @@ -414,9 +579,9 @@ pub struct ReducedGraphs { impl ReducedGraphs { #[turbo_tasks::function] pub async fn new(graphs: Vc, is_single_page: bool) -> Result> { - let graphs = &graphs.await?.graphs; + let graphs_ref = &graphs.await?.graphs; let next_dynamic = async { - graphs + graphs_ref .iter() .map(|graph| { NextDynamicGraph::new_with_entries(**graph, is_single_page).to_resolved() @@ -427,7 +592,7 @@ impl ReducedGraphs { .instrument(tracing::info_span!("generating next/dynamic graphs")); let server_actions = async { - graphs + graphs_ref .iter() .map(|graph| { ServerActionsGraph::new_with_entries(**graph, is_single_page).to_resolved() @@ -438,7 +603,7 @@ impl ReducedGraphs { .instrument(tracing::info_span!("generating server actions graphs")); let client_references = async { - graphs + graphs_ref .iter() .map(|graph| { ClientReferencesGraph::new_with_entries(**graph, is_single_page).to_resolved() @@ -455,6 +620,8 @@ impl ReducedGraphs { next_dynamic: next_dynamic?, server_actions: server_actions?, client_references: client_references?, + bare_graphs: graphs.to_resolved().await?, + is_single_page, } .cell()) } @@ -532,6 +699,7 @@ impl ReducedGraphs { &self, entry: Vc>, has_layout_segments: bool, + include_traced: bool, ) -> Result> { let span = tracing::info_span!("collect all client references for endpoint"); async move { @@ -567,7 +735,7 @@ impl ReducedGraphs { let ServerEntries { server_utils, server_component_entries, - } = &*find_server_entries(entry).await?; + } = &*find_server_entries(entry, include_traced).await?; result.server_utils = server_utils.clone(); result.server_component_entries = server_component_entries.clone(); } @@ -577,10 +745,70 @@ impl ReducedGraphs { .instrument(span) .await } + + #[turbo_tasks::function] + /// Validates that the global CSS/SCSS/SASS imports are only valid imports with the following + /// rules: + /// * The import is made from a `node_modules` package + /// * The import is made from a `.module.css` file + /// * The import is made from the `pages/_app.js`, or equivalent file. + pub async fn validate_pages_css_imports( + &self, + entry: Vc>, + app_module: Vc>, + ) -> Result<()> { + let span = tracing::info_span!("validate pages css imports"); + async move { + let graphs = &self.bare_graphs.await?.graphs; + + // We need to collect the module names here to pass into the + // `validate_pages_css_imports` function. This is because the function is + // called for each graph, and we need to know the module names of the parent + // modules to determine if the import is valid. We can't do this in the + // called function because it's within a closure that can't resolve turbo tasks. + let graph_to_module_ident_tuples = async |graph: &ResolvedVc| { + graph + .await? + .graph + .node_weights() + .map(async |n| Ok((n.module(), n.module().ident().to_string().owned().await?))) + .try_join() + .await + }; + + let identifier_map = graphs + .iter() + .map(graph_to_module_ident_tuples) + .try_join() + .await? + .into_iter() + .flatten() + .collect::>(); + let identifier_map = ModuleNameMap(identifier_map).cell(); + + let _ = graphs + .iter() + .map(|graph| { + validate_pages_css_imports( + **graph, + self.is_single_page, + entry, + app_module, + identifier_map, + ) + }) + .try_join() + .await?; + + Ok(()) + } + .instrument(span) + .await + } } #[turbo_tasks::function(operation)] -async fn get_reduced_graphs_for_endpoint_inner_operation( +fn get_reduced_graphs_for_endpoint_inner_operation( module_graph: ResolvedVc, is_single_page: bool, ) -> Vc { diff --git a/crates/next-api/src/nft_json.rs b/crates/next-api/src/nft_json.rs index 972612f5329cb..4e2d15de0c3b1 100644 --- a/crates/next-api/src/nft_json.rs +++ b/crates/next-api/src/nft_json.rs @@ -1,10 +1,10 @@ -use std::collections::BTreeSet; +use std::collections::{BTreeSet, VecDeque}; -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use serde_json::json; use turbo_rcstr::RcStr; use turbo_tasks::{ResolvedVc, Vc}; -use turbo_tasks_fs::{File, FileSystem, FileSystemPath}; +use turbo_tasks_fs::{DirectoryEntry, File, FileSystem, FileSystemPath, glob::Glob}; use turbopack_core::{ asset::{Asset, AssetContent}, output::OutputAsset, @@ -30,6 +30,7 @@ pub struct NftJsonAsset { /// An example of this is the two-phase approach used by the `ClientReferenceManifest` in /// next.js. additional_assets: Vec>>, + page_name: Option, } #[turbo_tasks::value_impl] @@ -37,6 +38,7 @@ impl NftJsonAsset { #[turbo_tasks::function] pub fn new( project: ResolvedVc, + page_name: Option, chunk: ResolvedVc>, additional_assets: Vec>>, ) -> Vc { @@ -44,16 +46,10 @@ impl NftJsonAsset { chunk, project, additional_assets, + page_name, } .cell() } - - #[turbo_tasks::function] - async fn dist_dir(&self) -> Result> { - Ok(Vc::cell( - format!("/{}/", self.project.dist_dir().await?).into(), - )) - } } #[turbo_tasks::value_impl] @@ -102,6 +98,41 @@ fn get_output_specifier( bail!("NftJsonAsset: cannot handle filepath {}", path_ref); } +/// Apply outputFileTracingIncludes patterns to find additional files +async fn apply_includes( + project_root_path: Vc, + glob: Vc, + ident_folder: &FileSystemPath, +) -> Result> { + // Read files matching the glob pattern from the project root + let glob_result = project_root_path.read_glob(glob).await?; + + // Walk the full glob_result using an explicit stack to avoid async recursion overheads. + let mut result = BTreeSet::new(); + let mut stack = VecDeque::new(); + stack.push_back(glob_result); + while let Some(glob_result) = stack.pop_back() { + // Process direct results (files and directories at this level) + for entry in glob_result.results.values() { + let DirectoryEntry::File(file_path) = entry else { + continue; + }; + + let file_path_ref = file_path.await?; + // Convert to relative path from ident_folder to the file + if let Some(relative_path) = ident_folder.get_relative_path_to(&file_path_ref) { + result.insert(relative_path); + } + } + + for nested_result in glob_result.inner.values() { + let nested_result_ref = nested_result.await?; + stack.push_back(nested_result_ref); + } + } + Ok(result) +} + #[turbo_tasks::value_impl] impl Asset for NftJsonAsset { #[turbo_tasks::function] @@ -111,14 +142,21 @@ impl Asset for NftJsonAsset { let output_root_ref = this.project.output_fs().root().await?; let project_root_ref = this.project.project_fs().root().await?; + let next_config = this.project.next_config(); + + // Parse outputFileTracingIncludes and outputFileTracingExcludes from config + let output_file_tracing_includes = &*next_config.output_file_tracing_includes().await?; + let output_file_tracing_excludes = &*next_config.output_file_tracing_excludes().await?; + let client_root = this.project.client_fs().root(); let client_root_ref = client_root.await?; + let project_root_path = this.project.project_root_path(); // Example: [project] + // Example: [output]/apps/my-website/.next/server/app -- without the `.nft.json` let ident_folder = self.path().parent().await?; - let ident_folder_in_project_fs = this - .project - .project_path() - .join(ident_folder.path.clone()) + // Example: [project]/apps/my-website/.next/server/app -- without the `.nft.json` + let ident_folder_in_project_fs = project_root_path + .join(ident_folder.path.clone()) // apps/my-website/.next/server/app .await?; let chunk = this.chunk; @@ -128,6 +166,51 @@ impl Asset for NftJsonAsset { .copied() .chain(std::iter::once(chunk)) .collect(); + + let exclude_glob = if let Some(route) = &this.page_name { + let project_path = this.project.project_path().await?; + + if let Some(excludes_config) = output_file_tracing_excludes { + let mut combined_excludes = BTreeSet::new(); + + if let Some(excludes_obj) = excludes_config.as_object() { + for (glob_pattern, exclude_patterns) in excludes_obj { + // Check if the route matches the glob pattern + let glob = Glob::new(RcStr::from(glob_pattern.clone())).await?; + if glob.matches(route) + && let Some(patterns) = exclude_patterns.as_array() + { + for pattern in patterns { + if let Some(pattern_str) = pattern.as_str() { + combined_excludes.insert(pattern_str); + } + } + } + } + } + + let glob = Glob::new( + format!( + "{project_path}/{{{}}}", + combined_excludes + .iter() + .copied() + .collect::>() + .join(",") + ) + .into(), + ) + .await?; + + Some(glob) + } else { + None + } + } else { + None + }; + + // Collect base assets first for referenced_chunk in all_assets_from_entries(Vc::cell(entries)).await? { if chunk.eq(referenced_chunk) { continue; @@ -138,6 +221,12 @@ impl Asset for NftJsonAsset { continue; } + if let Some(ref exclude_glob) = exclude_glob + && exclude_glob.matches(referenced_chunk_path.path.as_str()) + { + continue; + } + let Some(specifier) = get_output_specifier( &referenced_chunk_path, &ident_folder, @@ -152,6 +241,50 @@ impl Asset for NftJsonAsset { result.insert(specifier); } + // Apply outputFileTracingIncludes and outputFileTracingExcludes + // Extract route from chunk path for pattern matching + if let Some(route) = &this.page_name { + let project_path = this.project.project_path(); + let mut combined_includes = BTreeSet::new(); + + // Process includes + if let Some(includes_config) = output_file_tracing_includes + && let Some(includes_obj) = includes_config.as_object() + { + for (glob_pattern, include_patterns) in includes_obj { + // Check if the route matches the glob pattern + let glob = Glob::new(glob_pattern.as_str().into()).await?; + if glob.matches(route) + && let Some(patterns) = include_patterns.as_array() + { + for pattern in patterns { + if let Some(pattern_str) = pattern.as_str() { + combined_includes.insert(pattern_str); + } + } + } + } + } + + // Apply includes - find additional files that match the include patterns + if !combined_includes.is_empty() { + let glob = Glob::new( + format!( + "{{{}}}", + combined_includes + .iter() + .copied() + .collect::>() + .join(",") + ) + .into(), + ); + let additional_files = + apply_includes(project_path, glob, &ident_folder_in_project_fs).await?; + result.extend(additional_files); + } + } + let json = json!({ "version": 1, "files": result diff --git a/crates/next-api/src/operation.rs b/crates/next-api/src/operation.rs index ad9c28667d015..14059bec682ae 100644 --- a/crates/next-api/src/operation.rs +++ b/crates/next-api/src/operation.rs @@ -2,8 +2,8 @@ use anyhow::Result; use serde::{Deserialize, Serialize}; use turbo_rcstr::RcStr; use turbo_tasks::{ - debug::ValueDebugFormat, get_effects, trace::TraceRawVcs, CollectiblesSource, FxIndexMap, - NonLocalValue, OperationValue, OperationVc, ResolvedVc, TaskInput, Vc, + CollectiblesSource, FxIndexMap, NonLocalValue, OperationValue, OperationVc, ResolvedVc, + TaskInput, Vc, debug::ValueDebugFormat, get_effects, trace::TraceRawVcs, }; use turbopack_core::{diagnostics::Diagnostic, issue::IssueDescriptionExt}; diff --git a/crates/next-api/src/pages.rs b/crates/next-api/src/pages.rs index 5cf4396739112..af58a40c573af 100644 --- a/crates/next-api/src/pages.rs +++ b/crates/next-api/src/pages.rs @@ -1,60 +1,59 @@ -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use futures::future::BoxFuture; use next_core::{ - all_assets_from_entries, create_page_loader_entry_module, get_asset_path_from_pathname, - get_edge_resolve_options_context, + PageLoaderAsset, all_assets_from_entries, create_page_loader_entry_module, + get_asset_path_from_pathname, get_edge_resolve_options_context, hmr_entry::HmrEntryModule, mode::NextMode, next_client::{ - get_client_module_options_context, get_client_resolve_options_context, - get_client_runtime_entries, ClientContextType, RuntimeEntries, + ClientContextType, RuntimeEntries, get_client_module_options_context, + get_client_resolve_options_context, get_client_runtime_entries, }, next_dynamic::NextDynamicTransition, next_edge::route_regex::get_named_middleware_regex, next_manifests::{ BuildManifest, EdgeFunctionDefinition, MiddlewareMatcher, MiddlewaresManifestV2, - PagesManifest, + PagesManifest, Regions, }, next_pages::create_page_ssr_entry_module, next_server::{ - get_server_module_options_context, get_server_resolve_options_context, - get_server_runtime_entries, ServerContextType, + ServerContextType, get_server_module_options_context, get_server_resolve_options_context, + get_server_runtime_entries, }, pages_structure::{ - find_pages_structure, PagesDirectoryStructure, PagesStructure, PagesStructureItem, + PagesDirectoryStructure, PagesStructure, PagesStructureItem, find_pages_structure, }, - util::{get_asset_prefix_from_pathname, parse_config_from_source, NextRuntime}, - PageLoaderAsset, + util::{NextRuntime, get_asset_prefix_from_pathname, parse_config_from_source}, }; use serde::{Deserialize, Serialize}; use tracing::Instrument; -use turbo_rcstr::RcStr; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ - fxindexmap, trace::TraceRawVcs, Completion, FxIndexMap, NonLocalValue, ResolvedVc, TaskInput, - Value, ValueToString, Vc, + Completion, FxIndexMap, NonLocalValue, ResolvedVc, TaskInput, ValueToString, Vc, fxindexmap, + fxindexset, trace::TraceRawVcs, }; use turbo_tasks_fs::{ self, File, FileContent, FileSystem, FileSystemPath, FileSystemPathOption, VirtualFileSystem, }; use turbopack::{ + ModuleAssetContext, module_options::ModuleOptionsContext, resolve_options_context::ResolveOptionsContext, - transition::{ContextTransition, TransitionOptions}, - ModuleAssetContext, + transition::{FullContextTransition, Transition, TransitionOptions}, }; use turbopack_core::{ asset::AssetContent, chunk::{ - availability_info::AvailabilityInfo, ChunkGroupResult, ChunkingContext, ChunkingContextExt, - EvaluatableAsset, EvaluatableAssets, + ChunkGroupResult, ChunkingContext, ChunkingContextExt, EvaluatableAsset, EvaluatableAssets, + availability_info::AvailabilityInfo, }, context::AssetContext, file_source::FileSource, - ident::AssetIdent, + ident::{AssetIdent, Layer}, module::Module, module_graph::{ - chunk_group_info::{ChunkGroup, ChunkGroupEntry}, GraphEntries, ModuleGraph, SingleModuleGraph, VisitedModules, + chunk_group_info::{ChunkGroup, ChunkGroupEntry}, }, output::{OptionOutputAsset, OutputAsset, OutputAssets}, reference_type::{EcmaScriptModulesReferenceSubType, EntryReferenceSubType, ReferenceType}, @@ -67,7 +66,7 @@ use turbopack_nodejs::NodeJsChunkingContext; use crate::{ dynamic_imports::{ - collect_next_dynamic_chunks, DynamicImportedChunks, NextDynamicChunkAvailability, + DynamicImportedChunks, NextDynamicChunkAvailability, collect_next_dynamic_chunks, }, font::create_font_manifest, loadable_manifest::create_react_loadable_manifest, @@ -87,11 +86,6 @@ pub struct PagesProject { project: ResolvedVc, } -#[turbo_tasks::function] -fn client_layer() -> Vc { - Vc::cell("client".into()) -} - #[turbo_tasks::value_impl] impl PagesProject { #[turbo_tasks::function] @@ -116,8 +110,8 @@ impl PagesProject { routes: &mut FxIndexMap, page: Vc, make_route: impl Fn( - Vc, - Vc, + RcStr, + RcStr, Vc, ) -> BoxFuture<'static, Result>, ) -> Result<()> { @@ -127,9 +121,8 @@ impl PagesProject { .. } = *page.await?; let pathname: RcStr = format!("/{}", next_router_path.await?.path).into(); - let pathname_vc = Vc::cell(pathname.clone()); - let original_name = Vc::cell(format!("/{}", original_path.await?.path).into()); - let route = make_route(pathname_vc, original_name, page).await?; + let original_name = format!("/{}", original_path.await?.path).into(); + let route = make_route(pathname.clone(), original_name, page).await?; routes.insert(pathname, route); Ok(()) } @@ -138,8 +131,8 @@ impl PagesProject { routes: &mut FxIndexMap, dir: Vc, make_route: impl Fn( - Vc, - Vc, + RcStr, + RcStr, Vc, ) -> BoxFuture<'static, Result>, ) -> Result<()> { @@ -183,15 +176,15 @@ impl PagesProject { .await?; } - let make_page_route = |pathname, original_name, page| -> BoxFuture<_> { + let make_page_route = |pathname: RcStr, original_name: RcStr, page| -> BoxFuture<_> { Box::pin(async move { Ok(Route::Page { html_endpoint: ResolvedVc::upcast( PageEndpoint::new( PageEndpointType::Html, self, - pathname, - original_name, + pathname.clone(), + original_name.clone(), page, pages_structure, ) @@ -233,12 +226,11 @@ impl PagesProject { .. } = *item.await?; let pathname: RcStr = format!("/{}", next_router_path.await?.path).into(); - let pathname_vc = Vc::cell(pathname.clone()); - let original_name = Vc::cell(format!("/{}", original_path.await?.path).into()); + let original_name = format!("/{}", original_path.await?.path).into(); let endpoint = Vc::upcast(PageEndpoint::new( ty, self, - pathname_vc, + pathname, original_name, item, self.pages_structure(), @@ -285,20 +277,40 @@ impl PagesProject { Ok(if let Some(pages) = self.pages_structure().await?.pages { pages.project_path() } else { - self.project().project_path().join("pages".into()) + self.project().project_path().join(rcstr!("pages")) }) } #[turbo_tasks::function] - async fn transitions(self: Vc) -> Result> { + async fn client_transitions(self: Vc) -> Result> { Ok(TransitionOptions { named_transitions: [ ( - "next-dynamic".into(), + rcstr!("next-dynamic"), ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), ), ( - "next-dynamic-client".into(), + rcstr!("next-dynamic-client"), + ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), + ), + ] + .into_iter() + .collect(), + ..Default::default() + } + .cell()) + } + + #[turbo_tasks::function] + async fn server_transitions(self: Vc) -> Result> { + Ok(TransitionOptions { + named_transitions: [ + ( + rcstr!("next-dynamic"), + ResolvedVc::upcast(NextDynamicTransition::new_marker().to_resolved().await?), + ), + ( + rcstr!("next-dynamic-client"), ResolvedVc::upcast( NextDynamicTransition::new_client(Vc::upcast(self.client_transition())) .to_resolved() @@ -314,13 +326,8 @@ impl PagesProject { } #[turbo_tasks::function] - fn client_transition(self: Vc) -> Vc { - ContextTransition::new( - self.project().client_compile_time_info(), - self.client_module_options_context(), - self.client_resolve_options_context(), - client_layer(), - ) + fn client_transition(self: Vc) -> Vc> { + Vc::upcast(FullContextTransition::new(self.client_module_context())) } #[turbo_tasks::function] @@ -329,9 +336,9 @@ impl PagesProject { self.project().project_path(), self.project().execution_context(), self.project().client_compile_time_info().environment(), - Value::new(ClientContextType::Pages { + ClientContextType::Pages { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), self.project().encryption_key(), @@ -343,9 +350,9 @@ impl PagesProject { async fn client_resolve_options_context(self: Vc) -> Result> { Ok(get_client_resolve_options_context( self.project().project_path(), - Value::new(ClientContextType::Pages { + ClientContextType::Pages { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -353,24 +360,24 @@ impl PagesProject { } #[turbo_tasks::function] - pub(super) fn client_module_context(self: Vc) -> Vc> { - Vc::upcast(ModuleAssetContext::new( - self.transitions(), + pub(super) fn client_module_context(self: Vc) -> Vc { + ModuleAssetContext::new( + self.client_transitions(), self.project().client_compile_time_info(), self.client_module_options_context(), self.client_resolve_options_context(), - client_layer(), - )) + Layer::new_with_user_friendly_name(rcstr!("client"), rcstr!("Browser")), + ) } #[turbo_tasks::function] pub(super) fn ssr_module_context(self: Vc) -> Vc { ModuleAssetContext::new( - self.transitions(), + self.server_transitions(), self.project().server_compile_time_info(), self.ssr_module_options_context(), self.ssr_resolve_options_context(), - Vc::cell("ssr".into()), + Layer::new_with_user_friendly_name(rcstr!("ssr"), rcstr!("SSR")), ) } @@ -379,22 +386,22 @@ impl PagesProject { #[turbo_tasks::function] pub(super) fn api_module_context(self: Vc) -> Vc { ModuleAssetContext::new( - self.transitions(), + self.server_transitions(), self.project().server_compile_time_info(), self.api_module_options_context(), self.ssr_resolve_options_context(), - Vc::cell("api".into()), + Layer::new_with_user_friendly_name(rcstr!("api"), rcstr!("Route")), ) } #[turbo_tasks::function] pub(super) fn ssr_data_module_context(self: Vc) -> Vc { ModuleAssetContext::new( - self.transitions(), + self.server_transitions(), self.project().server_compile_time_info(), self.ssr_data_module_options_context(), self.ssr_resolve_options_context(), - Vc::cell("ssr-data".into()), + Layer::new(rcstr!("ssr-data")), ) } @@ -405,7 +412,7 @@ impl PagesProject { self.project().edge_compile_time_info(), self.edge_ssr_module_options_context(), self.edge_ssr_resolve_options_context(), - Vc::cell("edge-ssr".into()), + Layer::new_with_user_friendly_name(rcstr!("edge-ssr"), rcstr!("Edge SSR")), ) } @@ -416,7 +423,7 @@ impl PagesProject { self.project().edge_compile_time_info(), self.edge_api_module_options_context(), self.edge_ssr_resolve_options_context(), - Vc::cell("edge-api".into()), + Layer::new_with_user_friendly_name(rcstr!("edge-api"), rcstr!("Edge Route")), ) } @@ -427,7 +434,7 @@ impl PagesProject { self.project().edge_compile_time_info(), self.edge_ssr_data_module_options_context(), self.edge_ssr_resolve_options_context(), - Vc::cell("edge-ssr-data".into()), + Layer::new(rcstr!("edge-ssr-data")), ) } @@ -436,13 +443,14 @@ impl PagesProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(ServerContextType::Pages { + ServerContextType::Pages { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), NextRuntime::NodeJs, self.project().encryption_key(), + self.project().server_compile_time_info().environment(), )) } @@ -451,13 +459,14 @@ impl PagesProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(ServerContextType::Pages { + ServerContextType::Pages { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), NextRuntime::Edge, self.project().encryption_key(), + self.project().edge_compile_time_info().environment(), )) } @@ -466,13 +475,14 @@ impl PagesProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(ServerContextType::PagesApi { + ServerContextType::PagesApi { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), NextRuntime::NodeJs, self.project().encryption_key(), + self.project().server_compile_time_info().environment(), )) } @@ -481,13 +491,14 @@ impl PagesProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(ServerContextType::PagesApi { + ServerContextType::PagesApi { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), NextRuntime::Edge, self.project().encryption_key(), + self.project().edge_compile_time_info().environment(), )) } @@ -496,13 +507,14 @@ impl PagesProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(ServerContextType::PagesData { + ServerContextType::PagesData { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), NextRuntime::NodeJs, self.project().encryption_key(), + self.project().server_compile_time_info().environment(), )) } @@ -513,13 +525,14 @@ impl PagesProject { Ok(get_server_module_options_context( self.project().project_path(), self.project().execution_context(), - Value::new(ServerContextType::PagesData { + ServerContextType::PagesData { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), NextRuntime::Edge, self.project().encryption_key(), + self.project().edge_compile_time_info().environment(), )) } @@ -530,9 +543,9 @@ impl PagesProject { // NOTE(alexkirsz) This could be `PagesData` for the data endpoint, but it doesn't // matter (for now at least) because `get_server_resolve_options_context` doesn't // differentiate between the two. - Value::new(ServerContextType::Pages { + ServerContextType::Pages { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -546,9 +559,9 @@ impl PagesProject { // NOTE(alexkirsz) This could be `PagesData` for the data endpoint, but it doesn't // matter (for now at least) because `get_server_resolve_options_context` doesn't // differentiate between the two. - Value::new(ServerContextType::Pages { + ServerContextType::Pages { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), @@ -559,22 +572,22 @@ impl PagesProject { async fn client_runtime_entries(self: Vc) -> Result> { let client_runtime_entries = get_client_runtime_entries( self.project().project_path(), - Value::new(ClientContextType::Pages { + ClientContextType::Pages { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), self.project().next_config(), self.project().execution_context(), ); - Ok(client_runtime_entries.resolve_entries(self.client_module_context())) + Ok(client_runtime_entries.resolve_entries(Vc::upcast(self.client_module_context()))) } #[turbo_tasks::function] async fn runtime_entries(self: Vc) -> Result> { Ok(get_server_runtime_entries( - Value::new(ServerContextType::Pages { + ServerContextType::Pages { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), )) } @@ -582,9 +595,9 @@ impl PagesProject { #[turbo_tasks::function] async fn data_runtime_entries(self: Vc) -> Result> { Ok(get_server_runtime_entries( - Value::new(ServerContextType::PagesData { + ServerContextType::PagesData { pages_dir: self.pages_dir().to_resolved().await?, - }), + }, self.project().next_mode(), )) } @@ -615,21 +628,20 @@ impl PagesProject { #[turbo_tasks::function] pub async fn client_main_module(self: Vc) -> Result>> { - let client_module_context = self.client_module_context(); + let client_module_context = Vc::upcast(self.client_module_context()); let client_main_module = esm_resolve( Vc::upcast(PlainResolveOrigin::new( client_module_context, - self.project().project_path().join("_".into()), + self.project().project_path().join(rcstr!("_")), )), - Request::parse(Value::new(Pattern::Constant( + Request::parse(Pattern::Constant( match *self.project().next_mode().await? { - NextMode::Development => "next/dist/client/next-dev-turbopack.js", - NextMode::Build => "next/dist/client/next-turbopack.js", - } - .into(), - ))), - Value::new(EcmaScriptModulesReferenceSubType::Undefined), + NextMode::Development => rcstr!("next/dist/client/next-dev-turbopack.js"), + NextMode::Build => rcstr!("next/dist/client/next-turbopack.js"), + }, + )), + EcmaScriptModulesReferenceSubType::Undefined, false, None, ) @@ -646,8 +658,8 @@ impl PagesProject { struct PageEndpoint { ty: PageEndpointType, pages_project: ResolvedVc, - pathname: ResolvedVc, - original_name: ResolvedVc, + pathname: RcStr, + original_name: RcStr, page: ResolvedVc, pages_structure: ResolvedVc, } @@ -697,8 +709,8 @@ impl PageEndpoint { fn new( ty: PageEndpointType, pages_project: ResolvedVc, - pathname: ResolvedVc, - original_name: ResolvedVc, + pathname: RcStr, + original_name: RcStr, page: ResolvedVc, pages_structure: ResolvedVc, ) -> Vc { @@ -722,20 +734,19 @@ impl PageEndpoint { async fn client_module(self: Vc) -> Result>> { let this = self.await?; let page_loader = create_page_loader_entry_module( - this.pages_project.client_module_context(), + Vc::upcast(this.pages_project.client_module_context()), self.source(), - *this.pathname, + this.pathname.clone(), ); if matches!( *this.pages_project.project().next_mode().await?, NextMode::Development - ) { - if let Some(chunkable) = Vc::try_resolve_downcast(page_loader).await? { - return Ok(Vc::upcast(HmrEntryModule::new( - AssetIdent::from_path(*this.page.await?.base_path), - chunkable, - ))); - } + ) && let Some(chunkable) = Vc::try_resolve_downcast(page_loader).await? + { + return Ok(Vc::upcast(HmrEntryModule::new( + AssetIdent::from_path(*this.page.await?.base_path), + chunkable, + ))); } Ok(page_loader) } @@ -780,6 +791,7 @@ impl PageEndpoint { let this = self.await?; let project = this.pages_project.project(); + let should_trace = project.next_mode().await?.is_production(); if *project.per_page_module_graph().await? { let ssr_chunk_module = self.internal_ssr_chunk_module().await?; // Implements layout segment optimization to compute a graph "chain" for document, app, @@ -796,6 +808,7 @@ impl PageEndpoint { let graph = SingleModuleGraph::new_with_entries_visited_intern( vec![ChunkGroupEntry::Shared(module)], visited_modules, + should_trace, ); graphs.push(graph); visited_modules = visited_modules.concatenate(graph); @@ -804,6 +817,7 @@ impl PageEndpoint { let graph = SingleModuleGraph::new_with_entries_visited_intern( vec![ChunkGroupEntry::Entry(vec![ssr_chunk_module.ssr_module])], visited_modules, + should_trace, ); graphs.push(graph); @@ -833,7 +847,7 @@ impl PageEndpoint { AssetIdent::from_path(*this.page.await?.base_path), ChunkGroup::Entry(evaluatable_assets), module_graph, - Value::new(AvailabilityInfo::Root), + AvailabilityInfo::Root, ); Ok(client_chunk_group) @@ -853,7 +867,7 @@ impl PageEndpoint { let client_relative_path = self.client_relative_path(); let page_loader = PageLoaderAsset::new( node_root, - *this.pathname, + this.pathname.clone(), client_relative_path, client_chunks, ); @@ -866,19 +880,19 @@ impl PageEndpoint { let (reference_type, project_root, module_context, edge_module_context) = match this.ty { PageEndpointType::Html | PageEndpointType::SsrOnly => ( - Value::new(ReferenceType::Entry(EntryReferenceSubType::Page)), + ReferenceType::Entry(EntryReferenceSubType::Page), this.pages_project.project().project_path(), this.pages_project.ssr_module_context(), this.pages_project.edge_ssr_module_context(), ), PageEndpointType::Data => ( - Value::new(ReferenceType::Entry(EntryReferenceSubType::Page)), + ReferenceType::Entry(EntryReferenceSubType::Page), this.pages_project.project().project_path(), this.pages_project.ssr_data_module_context(), this.pages_project.edge_ssr_data_module_context(), ), PageEndpointType::Api => ( - Value::new(ReferenceType::Entry(EntryReferenceSubType::PagesApi)), + ReferenceType::Entry(EntryReferenceSubType::PagesApi), this.pages_project.project().project_path(), this.pages_project.api_module_context(), this.pages_project.edge_api_module_context(), @@ -890,46 +904,50 @@ impl PageEndpoint { .module(); let config = parse_config_from_source(ssr_module, NextRuntime::default()).await?; - Ok(if config.runtime == NextRuntime::Edge { - let modules = create_page_ssr_entry_module( - *this.pathname, - reference_type, - project_root, - Vc::upcast(edge_module_context), - self.source(), - *this.original_name, - *this.pages_structure, - config.runtime, - this.pages_project.project().next_config(), - ) - .await?; - - InternalSsrChunkModule { - ssr_module: modules.ssr_module, - app_module: modules.app_module, - document_module: modules.document_module, - runtime: config.runtime, - } - } else { - let pathname = &**this.pathname.await?; + Ok( // `/_app` and `/_document` never get rendered directly so they don't need to be - // wrapped in the route module. - if pathname == "/_app" || pathname == "/_document" { + // wrapped in the route module, and don't need to be handled as edge runtime as the + // rendering for edge is part of the page bundle. + if this.pathname == "/_app" || this.pathname == "/_document" { InternalSsrChunkModule { ssr_module: ssr_module.to_resolved().await?, app_module: None, document_module: None, + // /_app and /_document are always rendered for Node.js for this case. For edge + // they're included in the page bundle. + runtime: NextRuntime::NodeJs, + regions: config.regions.clone(), + } + } else if config.runtime == NextRuntime::Edge { + let modules = create_page_ssr_entry_module( + this.pathname.clone(), + reference_type, + project_root, + Vc::upcast(edge_module_context), + self.source(), + this.original_name.clone(), + *this.pages_structure, + config.runtime, + this.pages_project.project().next_config(), + ) + .await?; + + InternalSsrChunkModule { + ssr_module: modules.ssr_module, + app_module: modules.app_module, + document_module: modules.document_module, runtime: config.runtime, + regions: config.regions.clone(), } } else { let modules = create_page_ssr_entry_module( - *this.pathname, + this.pathname.clone(), reference_type, project_root, Vc::upcast(module_context), self.source(), - *this.original_name, + this.original_name.clone(), *this.pages_structure, config.runtime, this.pages_project.project().next_config(), @@ -940,10 +958,11 @@ impl PageEndpoint { app_module: modules.app_module, document_module: modules.document_module, runtime: config.runtime, + regions: config.regions.clone(), } } - } - .cell()) + .cell(), + ) } #[turbo_tasks::function] @@ -964,6 +983,7 @@ impl PageEndpoint { app_module, document_module, runtime, + ref regions, } = *self.internal_ssr_chunk_module().await?; let project = this.pages_project.project(); @@ -981,6 +1001,33 @@ impl PageEndpoint { client_module_graph, *project.per_page_module_graph().await?, ); + + // We only validate the global css imports when there is not a `app` folder at the + // root of the project. + if project.app_project().await?.is_none() { + // We recreate the app_module here because the one provided from the + // `internal_ssr_chunk_module` is not the same as the one + // provided from the `client_module_graph`. There can be cases where + // the `app_module` is None, and we are processing the `pages/_app.js` file + // as a page rather than the app module. + let app_module = project + .pages_project() + .client_module_context() + .process( + Vc::upcast(FileSource::new( + this.pages_structure.await?.app.file_path(), + )), + ReferenceType::Entry(EntryReferenceSubType::Page), + ) + .to_resolved() + .await? + .module(); + + reduced_graphs + .validate_pages_css_imports(self.client_module(), app_module) + .await?; + } + let next_dynamic_imports = reduced_graphs .get_next_dynamic_imports_for_endpoint(self.client_module()) .await?; @@ -1023,7 +1070,7 @@ impl PageEndpoint { layout.ident(), ChunkGroup::Shared(layout), ssr_module_graph, - Value::new(current_availability_info), + current_availability_info, ) .await?; @@ -1053,16 +1100,17 @@ impl PageEndpoint { ssr_module.ident(), ChunkGroup::Entry(evaluatable_assets.collect()), ssr_module_graph, - Value::new(current_availability_info), + current_availability_info, ); Ok(SsrChunk::Edge { files: current_chunks.concatenate(edge_files).to_resolved().await?, dynamic_import_entries, + regions: regions.clone(), } .cell()) } else { - let pathname = &**this.pathname.await?; + let pathname = &this.pathname; let asset_path = get_asset_path_from_pathname(pathname, ".js"); @@ -1074,7 +1122,7 @@ impl PageEndpoint { runtime_entries.with_entry(*ssr_module_evaluatable), ssr_module_graph, current_chunks, - Value::new(current_availability_info), + current_availability_info, ) .to_resolved() .await?; @@ -1092,6 +1140,7 @@ impl PageEndpoint { ResolvedVc::cell(Some(ResolvedVc::upcast( NftJsonAsset::new( project, + Some(this.original_name.clone()), *ssr_entry_chunk, loadable_manifest_output .await? @@ -1131,7 +1180,7 @@ impl PageEndpoint { this.pages_project .project() .node_root() - .join("server".into()), + .join(rcstr!("server")), project.server_chunking_context(true), project.edge_chunking_context(true), this.pages_project.ssr_runtime_entries(), @@ -1147,7 +1196,7 @@ impl PageEndpoint { this.pages_project .project() .node_root() - .join("server/data".into()), + .join(rcstr!("server/data")), this.pages_project.project().server_chunking_context(true), this.pages_project.project().edge_chunking_context(true), this.pages_project.ssr_data_runtime_entries(), @@ -1163,7 +1212,7 @@ impl PageEndpoint { this.pages_project .project() .node_root() - .join("server".into()), + .join(rcstr!("server")), this.pages_project.project().server_chunking_context(false), this.pages_project.project().edge_chunking_context(false), this.pages_project.ssr_runtime_entries(), @@ -1180,17 +1229,17 @@ impl PageEndpoint { let chunk_path = entry_chunk.path().await?; let asset_path = node_root - .join("server".into()) + .join(rcstr!("server")) .await? .get_path_to(&chunk_path) .context("ssr chunk entry path must be inside the node root")?; let pages_manifest = PagesManifest { - pages: [(self.pathname.owned().await?, asset_path.into())] + pages: [(self.pathname.clone(), asset_path.into())] .into_iter() .collect(), }; - let manifest_path_prefix = get_asset_prefix_from_pathname(&self.pathname.await?); + let manifest_path_prefix = get_asset_prefix_from_pathname(&self.pathname); let asset = Vc::upcast(VirtualOutputAsset::new( node_root .join(format!("server/pages{manifest_path_prefix}/pages-manifest.json",).into()), @@ -1200,14 +1249,14 @@ impl PageEndpoint { } #[turbo_tasks::function] - async fn react_loadable_manifest( + fn react_loadable_manifest( &self, dynamic_import_entries: Vc, runtime: NextRuntime, ) -> Result> { let node_root = self.pages_project.project().node_root(); let client_relative_path = self.pages_project.project().client_relative_path(); - let loadable_path_prefix = get_asset_prefix_from_pathname(&self.pathname.await?); + let loadable_path_prefix = get_asset_prefix_from_pathname(&self.pathname); Ok(create_react_loadable_manifest( dynamic_import_entries, client_relative_path, @@ -1225,10 +1274,10 @@ impl PageEndpoint { let node_root = self.pages_project.project().node_root(); let client_relative_path = self.pages_project.project().client_relative_path(); let build_manifest = BuildManifest { - pages: fxindexmap!(self.pathname.owned().await? => client_chunks), + pages: fxindexmap!(self.pathname.clone() => client_chunks), ..Default::default() }; - let manifest_path_prefix = get_asset_prefix_from_pathname(&self.pathname.await?); + let manifest_path_prefix = get_asset_prefix_from_pathname(&self.pathname); Ok(Vc::upcast( build_manifest .build_output( @@ -1264,12 +1313,12 @@ impl PageEndpoint { }; let emit_manifests = !matches!(this.ty, PageEndpointType::Data); - let pathname = this.pathname.owned().await?; - let original_name = &*this.original_name.await?; + let pathname = &this.pathname; + let original_name = &this.original_name; let client_assets = OutputAssets::new(client_assets).to_resolved().await?; - let manifest_path_prefix = get_asset_prefix_from_pathname(&pathname); + let manifest_path_prefix = get_asset_prefix_from_pathname(pathname); let node_root = this.pages_project.project().node_root(); let next_font_manifest_output = create_font_manifest( this.pages_project.project().client_root(), @@ -1277,7 +1326,7 @@ impl PageEndpoint { this.pages_project.pages_dir(), original_name, &manifest_path_prefix, - &pathname, + pathname, *client_assets, false, ) @@ -1333,6 +1382,7 @@ impl PageEndpoint { SsrChunk::Edge { files, dynamic_import_entries, + ref regions, } => { let node_root = this.pages_project.project().node_root(); if emit_manifests { @@ -1353,11 +1403,11 @@ impl PageEndpoint { // // they are created in `setup-dev-bundler.ts` let mut file_paths_from_root = vec![ - "server/server-reference-manifest.js".into(), - "server/middleware-build-manifest.js".into(), - "server/next-font-manifest.js".into(), + rcstr!("server/server-reference-manifest.js"), + rcstr!("server/middleware-build-manifest.js"), + rcstr!("server/next-font-manifest.js"), ]; - let mut wasm_paths_from_root = vec![]; + let mut wasm_paths_from_root = fxindexset![]; let node_root_value = node_root.await?; @@ -1377,20 +1427,31 @@ impl PageEndpoint { let all_assets = get_asset_paths_from_root(&node_root_value, &all_output_assets).await?; - let named_regex = get_named_middleware_regex(&pathname).into(); + let named_regex = get_named_middleware_regex(pathname).into(); let matchers = MiddlewareMatcher { regexp: Some(named_regex), original_source: pathname.clone(), ..Default::default() }; - let original_name = this.original_name.owned().await?; + let regions = if let Some(regions) = regions.as_ref() { + if regions.len() == 1 { + regions + .first() + .map(|region| Regions::Single(region.clone())) + } else { + Some(Regions::Multiple(regions.clone())) + } + } else { + None + }; + let edge_function_definition = EdgeFunctionDefinition { files: file_paths_from_root, - wasm: wasm_paths_to_bindings(wasm_paths_from_root), + wasm: wasm_paths_to_bindings(wasm_paths_from_root).await?, assets: paths_to_bindings(all_assets), name: pathname.clone(), - page: original_name.clone(), - regions: None, + page: this.original_name.clone(), + regions, matchers: vec![matchers], env: this.pages_project.project().edge_env().owned().await?, }; @@ -1401,8 +1462,7 @@ impl PageEndpoint { .collect(), ..Default::default() }; - let manifest_path_prefix = - get_asset_prefix_from_pathname(&this.pathname.await?); + let manifest_path_prefix = get_asset_prefix_from_pathname(&this.pathname); let middleware_manifest_v2 = VirtualOutputAsset::new( node_root.join( format!("server/pages{manifest_path_prefix}/middleware-manifest.json") @@ -1449,16 +1509,17 @@ pub struct InternalSsrChunkModule { pub app_module: Option>>, pub document_module: Option>>, pub runtime: NextRuntime, + pub regions: Option>, } #[turbo_tasks::value_impl] impl Endpoint for PageEndpoint { #[turbo_tasks::function] async fn output(self: ResolvedVc) -> Result> { - let this = self.await?; - let original_name = this.original_name.await?; + let this = &*self.await?; + let original_name = &this.original_name; let span = { - match this.ty { + match &this.ty { PageEndpointType::Html => { tracing::info_span!("page endpoint HTML", name = original_name.to_string()) } @@ -1638,5 +1699,6 @@ pub enum SsrChunk { Edge { files: ResolvedVc, dynamic_import_entries: ResolvedVc, + regions: Option>, }, } diff --git a/crates/next-api/src/paths.rs b/crates/next-api/src/paths.rs index de69f6ebffce0..fc0cc093a7959 100644 --- a/crates/next-api/src/paths.rs +++ b/crates/next-api/src/paths.rs @@ -3,12 +3,15 @@ use next_core::{all_assets_from_entries, next_manifests::AssetBinding}; use serde::{Deserialize, Serialize}; use tracing::Instrument; use turbo_rcstr::RcStr; -use turbo_tasks::{trace::TraceRawVcs, NonLocalValue, ResolvedVc, TryFlatJoinIterExt, Vc}; +use turbo_tasks::{ + NonLocalValue, ResolvedVc, TryFlatJoinIterExt, TryJoinIterExt, Vc, trace::TraceRawVcs, +}; use turbo_tasks_fs::FileSystemPath; use turbopack_core::{ asset::{Asset, AssetContent}, output::{OutputAsset, OutputAssets}, }; +use turbopack_wasm::wasm_edge_var_name; /// A reference to a server file with content hash for change detection #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, TraceRawVcs, NonLocalValue)] @@ -109,8 +112,23 @@ pub(crate) async fn get_js_paths_from_root( pub(crate) async fn get_wasm_paths_from_root( root: &FileSystemPath, output_assets: impl IntoIterator>>, -) -> Result> { - get_paths_from_root(root, output_assets, |path| path.ends_with(".wasm")).await +) -> Result>)>> { + output_assets + .into_iter() + .map(move |&file| async move { + let path = &*file.path().await?; + let Some(relative) = root.get_path_to(path) else { + return Ok(None); + }; + + Ok(if relative.ends_with(".wasm") { + Some((relative.into(), file)) + } else { + None + }) + }) + .try_flat_join() + .await } pub(crate) async fn get_asset_paths_from_root( @@ -137,42 +155,19 @@ pub(crate) async fn get_font_paths_from_root( .await } -fn get_file_stem(path: &str) -> &str { - let file_name = if let Some((_, file_name)) = path.rsplit_once('/') { - file_name - } else { - path - }; - - if let Some((stem, _)) = file_name.split_once('.') { - if stem.is_empty() { - file_name - } else { - stem - } - } else { - file_name - } -} - -pub(crate) fn wasm_paths_to_bindings(paths: Vec) -> Vec { +pub(crate) async fn wasm_paths_to_bindings( + paths: impl IntoIterator>)>, +) -> Result> { paths .into_iter() - .map(|path| { - let stem = get_file_stem(&path); - - // very simple escaping just replacing unsupported characters with `_` - let escaped = stem.replace( - |c: char| !c.is_ascii_alphanumeric() && c != '$' && c != '_', - "_", - ); - - AssetBinding { - name: format!("wasm_{}", escaped).into(), + .map(async |(path, asset)| { + Ok(AssetBinding { + name: wasm_edge_var_name(Vc::upcast(*asset)).owned().await?, file_path: path, - } + }) }) - .collect() + .try_join() + .await } pub(crate) fn paths_to_bindings(paths: Vec) -> Vec { diff --git a/crates/next-api/src/project.rs b/crates/next-api/src/project.rs index 50a1e392cb197..d8115e32e20bd 100644 --- a/crates/next-api/src/project.rs +++ b/crates/next-api/src/project.rs @@ -1,6 +1,6 @@ use std::{path::MAIN_SEPARATOR, time::Duration}; -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use indexmap::map::Entry; use next_core::{ all_assets_from_entries, @@ -13,64 +13,68 @@ use next_core::{ next_client::{get_client_chunking_context, get_client_compile_time_info}, next_config::{JsConfig, ModuleIds as ModuleIdStrategyConfig, NextConfig}, next_server::{ - get_server_chunking_context, get_server_chunking_context_with_client_assets, - get_server_compile_time_info, get_server_module_options_context, - get_server_resolve_options_context, ServerContextType, + ServerContextType, get_server_chunking_context, + get_server_chunking_context_with_client_assets, get_server_compile_time_info, + get_server_module_options_context, get_server_resolve_options_context, }, next_telemetry::NextFeatureTelemetry, - util::{parse_config_from_source, NextRuntime}, + util::{NextRuntime, parse_config_from_source}, }; use serde::{Deserialize, Serialize}; use tracing::Instrument; -use turbo_rcstr::RcStr; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ + Completion, Completions, FxIndexMap, IntoTraitRef, NonLocalValue, OperationValue, OperationVc, + ReadRef, ResolvedVc, State, TaskInput, TransientInstance, TryFlatJoinIterExt, Vc, debug::ValueDebugFormat, fxindexmap, graph::{AdjacencyMap, GraphTraversal}, mark_root, trace::TraceRawVcs, - Completion, Completions, FxIndexMap, IntoTraitRef, NonLocalValue, OperationValue, OperationVc, - ReadRef, ResolvedVc, State, TaskInput, TransientInstance, TryFlatJoinIterExt, Value, Vc, }; use turbo_tasks_env::{EnvMap, ProcessEnv}; -use turbo_tasks_fs::{DiskFileSystem, FileSystem, FileSystemPath, VirtualFileSystem}; +use turbo_tasks_fs::{ + DiskFileSystem, FileSystem, FileSystemPath, VirtualFileSystem, get_relative_path_to, + invalidation, +}; use turbopack::{ - evaluate_context::node_build_environment, global_module_ids::get_global_module_id_strategy, - transition::TransitionOptions, ModuleAssetContext, + ModuleAssetContext, evaluate_context::node_build_environment, + global_module_ids::get_global_module_id_strategy, transition::TransitionOptions, }; use turbopack_core::{ + PROJECT_FILESYSTEM_NAME, changed::content_changed, chunk::{ - module_id_strategies::{DevModuleIdStrategy, ModuleIdStrategy}, ChunkingContext, EvaluatableAssets, SourceMapsType, + module_id_strategies::{DevModuleIdStrategy, ModuleIdStrategy}, }, compile_time_info::CompileTimeInfo, context::AssetContext, diagnostics::DiagnosticExt, file_source::FileSource, + ident::Layer, issue::{ Issue, IssueDescriptionExt, IssueExt, IssueSeverity, IssueStage, OptionStyledString, StyledString, }, module::Module, module_graph::{ - chunk_group_info::ChunkGroupEntry, GraphEntries, ModuleGraph, SingleModuleGraph, - VisitedModules, + GraphEntries, ModuleGraph, SingleModuleGraph, VisitedModules, + chunk_group_info::ChunkGroupEntry, }, output::{OutputAsset, OutputAssets}, reference_type::{EntryReferenceSubType, ReferenceType}, - resolve::{find_context_file, FindContextFileResult}, + resolve::{FindContextFileResult, find_context_file}, source_map::OptionStringifiedSourceMap, version::{ NotFoundVersion, OptionVersionedContent, Update, Version, VersionState, VersionedContent, }, - PROJECT_FILESYSTEM_NAME, }; use turbopack_node::execution_context::ExecutionContext; use turbopack_nodejs::NodeJsChunkingContext; use crate::{ - app::{AppProject, OptionAppProject, ECMASCRIPT_CLIENT_TRANSITION_NAME}, + app::{AppProject, OptionAppProject}, empty::EmptyEndpoint, entrypoints::Entrypoints, instrumentation::InstrumentationEndpoint, @@ -266,7 +270,7 @@ pub struct ProjectContainer { #[turbo_tasks::value_impl] impl ProjectContainer { #[turbo_tasks::function] - pub async fn new(name: RcStr, dev: bool) -> Result> { + pub fn new(name: RcStr, dev: bool) -> Result> { Ok(ProjectContainer { name, // we only need to enable versioning in dev mode, since build @@ -308,12 +312,16 @@ impl ProjectContainer { .start_watching_with_invalidation_reason(watch.poll_interval) .await?; } else { - project_fs.invalidate_with_reason(); + project_fs.invalidate_with_reason(|path| invalidation::Initialize { + path: RcStr::from(path), + }); } let output_fs = output_fs_operation(project) .read_strongly_consistent() .await?; - output_fs.invalidate_with_reason(); + output_fs.invalidate_with_reason(|path| invalidation::Initialize { + path: RcStr::from(path), + }); Ok(()) } @@ -402,11 +410,15 @@ impl ProjectContainer { .start_watching_with_invalidation_reason(watch.poll_interval) .await?; } else { - project_fs.invalidate_with_reason(); + project_fs.invalidate_with_reason(|path| invalidation::Initialize { + path: RcStr::from(path), + }); } } if !ReadRef::ptr_eq(&prev_output_fs, &output_fs) { - prev_output_fs.invalidate_with_reason(); + prev_output_fs.invalidate_with_reason(|path| invalidation::Initialize { + path: RcStr::from(path), + }); } Ok(()) @@ -415,7 +427,7 @@ impl ProjectContainer { #[turbo_tasks::value_impl] impl ProjectContainer { - #[turbo_tasks::function] + #[turbo_tasks::function(invalidator)] pub async fn project(&self) -> Result> { let env_map: Vc; let next_config; @@ -459,7 +471,7 @@ impl ProjectContainer { .await? .dist_dir .as_ref() - .map_or_else(|| ".next".into(), |d| d.clone()); + .map_or_else(|| rcstr!(".next"), |d| d.clone()); Ok(Project { root_path, @@ -590,7 +602,7 @@ struct ConflictIssue { path: ResolvedVc, title: ResolvedVc, description: ResolvedVc, - severity: ResolvedVc, + severity: IssueSeverity, } #[turbo_tasks::value_impl] @@ -600,9 +612,8 @@ impl Issue for ConflictIssue { IssueStage::AppStructure.cell() } - #[turbo_tasks::function] - fn severity(&self) -> Vc { - *self.severity + fn severity(&self) -> IssueSeverity { + self.severity } #[turbo_tasks::function] @@ -649,13 +660,13 @@ impl Project { #[turbo_tasks::function] pub fn client_fs(self: Vc) -> Vc> { - let virtual_fs = VirtualFileSystem::new_with_name("client-fs".into()); + let virtual_fs = VirtualFileSystem::new_with_name(rcstr!("client-fs")); Vc::upcast(virtual_fs) } #[turbo_tasks::function] pub fn output_fs(&self) -> Vc { - DiskFileSystem::new("output".into(), self.project_path.clone(), vec![]) + DiskFileSystem::new(rcstr!("output"), self.root_path.clone(), vec![]) } #[turbo_tasks::function] @@ -666,7 +677,13 @@ impl Project { #[turbo_tasks::function] pub async fn node_root(self: Vc) -> Result> { let this = self.await?; - Ok(self.output_fs().root().join(this.dist_dir.clone())) + let relative_from_root_to_project_path = + get_relative_path_to(&this.root_path, &this.project_path); + Ok(self + .output_fs() + .root() + .join(relative_from_root_to_project_path.into()) + .join(this.dist_dir.clone())) } #[turbo_tasks::function] @@ -685,7 +702,7 @@ impl Project { Ok(self.client_root().join( format!( "{}/_next", - next_config.base_path.clone().unwrap_or_else(|| "".into()), + next_config.base_path.clone().unwrap_or_default(), ) .into(), )) @@ -730,6 +747,11 @@ impl Project { *self.mode } + #[turbo_tasks::function] + pub(super) fn is_watch_enabled(&self) -> Result> { + Ok(Vc::cell(self.watch.enable)) + } + #[turbo_tasks::function] pub(super) async fn per_page_module_graph(&self) -> Result> { Ok(Vc::cell(*self.mode.await? == NextMode::Development)) @@ -753,7 +775,7 @@ impl Project { #[turbo_tasks::function] pub(super) async fn should_create_webpack_stats(&self) -> Result> { Ok(Vc::cell( - self.env.read("TURBOPACK_STATS".into()).await?.is_some(), + self.env.read(rcstr!("TURBOPACK_STATS")).await?.is_some(), )) } @@ -766,10 +788,10 @@ impl Project { NodeJsChunkingContext::builder( self.project_root_path().to_resolved().await?, node_root, - self.node_root_to_root_path().to_resolved().await?, + self.node_root_to_root_path().owned().await?, node_root, - node_root.join("build/chunks".into()).to_resolved().await?, - node_root.join("build/assets".into()).to_resolved().await?, + node_root.join(rcstr!("build/chunks")).to_resolved().await?, + node_root.join(rcstr!("build/assets")).to_resolved().await?, node_build_environment().to_resolved().await?, next_mode.runtime_type(), ) @@ -888,7 +910,7 @@ impl Project { entry: ResolvedVc>, ) -> Result> { Ok(if *self.per_page_module_graph().await? { - ModuleGraph::from_entry_module(*entry) + ModuleGraph::from_entry_module(*entry, self.next_mode().await?.is_production()) } else { *self.whole_app_module_graphs().await?.full }) @@ -906,7 +928,10 @@ impl Project { .copied() .map(ResolvedVc::upcast) .collect(); - ModuleGraph::from_modules(Vc::cell(vec![ChunkGroupEntry::Entry(entries)])) + ModuleGraph::from_modules( + Vc::cell(vec![ChunkGroupEntry::Entry(entries)]), + self.next_mode().await?.is_production(), + ) } else { *self.whole_app_module_graphs().await?.full }) @@ -918,7 +943,7 @@ impl Project { entries: Vc, ) -> Result> { Ok(if *self.per_page_module_graph().await? { - ModuleGraph::from_modules(entries) + ModuleGraph::from_modules(entries, self.next_mode().await?.is_production()) } else { *self.whole_app_module_graphs().await?.full }) @@ -933,7 +958,7 @@ impl Project { // At this point all modules have been computed and we can get rid of the node.js // process pools - if self.await?.watch.enable { + if *self.is_watch_enabled().await? { turbopack_node::evaluate::scale_down(); } else { turbopack_node::evaluate::scale_zero(); @@ -962,27 +987,30 @@ impl Project { Ok(get_edge_compile_time_info( self.project_path(), this.define_env.edge(), + self.env(), )) } #[turbo_tasks::function] pub(super) fn edge_env(&self) -> Vc { let edge_env = fxindexmap! { - "__NEXT_BUILD_ID".into() => self.build_id.clone(), - "NEXT_SERVER_ACTIONS_ENCRYPTION_KEY".into() => self.encryption_key.clone(), - "__NEXT_PREVIEW_MODE_ID".into() => self.preview_props.preview_mode_id.clone(), - "__NEXT_PREVIEW_MODE_ENCRYPTION_KEY".into() => self.preview_props.preview_mode_encryption_key.clone(), - "__NEXT_PREVIEW_MODE_SIGNING_KEY".into() => self.preview_props.preview_mode_signing_key.clone(), + rcstr!("__NEXT_BUILD_ID") => self.build_id.clone(), + rcstr!("NEXT_SERVER_ACTIONS_ENCRYPTION_KEY") => self.encryption_key.clone(), + rcstr!("__NEXT_PREVIEW_MODE_ID") => self.preview_props.preview_mode_id.clone(), + rcstr!("__NEXT_PREVIEW_MODE_ENCRYPTION_KEY") => self.preview_props.preview_mode_encryption_key.clone(), + rcstr!("__NEXT_PREVIEW_MODE_SIGNING_KEY") => self.preview_props.preview_mode_signing_key.clone(), }; Vc::cell(edge_env) } #[turbo_tasks::function] - pub(super) fn client_chunking_context(self: Vc) -> Vc> { - get_client_chunking_context( + pub(super) async fn client_chunking_context( + self: Vc, + ) -> Result>> { + Ok(get_client_chunking_context( self.project_root_path(), self.client_relative_path(), - Vc::cell("/ROOT".into()), + rcstr!("/ROOT"), self.next_config().computed_asset_prefix(), self.next_config().chunk_suffix_path(), self.client_compile_time_info().environment(), @@ -991,49 +1019,52 @@ impl Project { self.next_config().turbo_minify(self.next_mode()), self.next_config().client_source_maps(self.next_mode()), self.no_mangling(), - ) + self.next_config().turbo_scope_hoisting(self.next_mode()), + )) } #[turbo_tasks::function] - pub(super) fn server_chunking_context( + pub(super) async fn server_chunking_context( self: Vc, client_assets: bool, - ) -> Vc { - if client_assets { + ) -> Result> { + Ok(if client_assets { get_server_chunking_context_with_client_assets( self.next_mode(), self.project_root_path(), self.node_root(), - self.node_root_to_root_path(), + self.node_root_to_root_path().owned().await?, self.client_relative_path(), - self.next_config().computed_asset_prefix(), + self.next_config().computed_asset_prefix().owned().await?, self.server_compile_time_info().environment(), self.module_ids(), self.next_config().turbo_minify(self.next_mode()), self.next_config().server_source_maps(), self.no_mangling(), + self.next_config().turbo_scope_hoisting(self.next_mode()), ) } else { get_server_chunking_context( self.next_mode(), self.project_root_path(), self.node_root(), - self.node_root_to_root_path(), + self.node_root_to_root_path().owned().await?, self.server_compile_time_info().environment(), self.module_ids(), self.next_config().turbo_minify(self.next_mode()), self.next_config().server_source_maps(), self.no_mangling(), + self.next_config().turbo_scope_hoisting(self.next_mode()), ) - } + }) } #[turbo_tasks::function] - pub(super) fn edge_chunking_context( + pub(super) async fn edge_chunking_context( self: Vc, client_assets: bool, - ) -> Vc> { - if client_assets { + ) -> Result>> { + Ok(if client_assets { get_edge_chunking_context_with_client_assets( self.next_mode(), self.project_root_path(), @@ -1046,6 +1077,7 @@ impl Project { self.next_config().turbo_minify(self.next_mode()), self.next_config().server_source_maps(), self.no_mangling(), + self.next_config().turbo_scope_hoisting(self.next_mode()), ) } else { get_edge_chunking_context( @@ -1058,8 +1090,9 @@ impl Project { self.next_config().turbo_minify(self.next_mode()), self.next_config().server_source_maps(), self.no_mangling(), + self.next_config().turbo_scope_hoisting(self.next_mode()), ) - } + }) } #[turbo_tasks::function] @@ -1180,7 +1213,7 @@ impl Project { ConflictIssue { path: self.project_path().to_resolved().await?, title: StyledString::Text( - format!("App Router and Pages Router both match path: {}", pathname) + format!("App Router and Pages Router both match path: {pathname}") .into(), ) .resolved_cell(), @@ -1191,7 +1224,7 @@ impl Project { .into(), ) .resolved_cell(), - severity: IssueSeverity::Error.resolved_cell(), + severity: IssueSeverity::Error, } .resolved_cell() .emit(); @@ -1248,14 +1281,12 @@ impl Project { let app_dir = *find_app_dir(self.project_path()).await?; let app_project = *self.app_project().await?; - let ecmascript_client_reference_transition_name = match app_project { - Some(app_project) => Some(app_project.client_transition_name().to_resolved().await?), - None => None, - }; + let ecmascript_client_reference_transition_name = + app_project.map(|_| AppProject::client_transition_name()); if let Some(app_project) = app_project { transitions.push(( - ECMASCRIPT_CLIENT_TRANSITION_NAME.into(), + AppProject::client_transition_name(), app_project .edge_ecmascript_client_reference_transition() .to_resolved() @@ -1273,26 +1304,32 @@ impl Project { get_server_module_options_context( self.project_path(), self.execution_context(), - Value::new(ServerContextType::Middleware { + ServerContextType::Middleware { app_dir, - ecmascript_client_reference_transition_name, - }), + ecmascript_client_reference_transition_name: + ecmascript_client_reference_transition_name.clone(), + }, self.next_mode(), self.next_config(), NextRuntime::Edge, self.encryption_key(), + self.edge_compile_time_info().environment(), ), get_edge_resolve_options_context( self.project_path(), - Value::new(ServerContextType::Middleware { + ServerContextType::Middleware { app_dir, - ecmascript_client_reference_transition_name, - }), + ecmascript_client_reference_transition_name: + ecmascript_client_reference_transition_name.clone(), + }, self.next_mode(), self.next_config(), self.execution_context(), ), - Vc::cell("middleware-edge".into()), + Layer::new_with_user_friendly_name( + rcstr!("middleware-edge"), + rcstr!("Edge Middleware"), + ), ))) } @@ -1303,14 +1340,12 @@ impl Project { let app_dir = *find_app_dir(self.project_path()).await?; let app_project = *self.app_project().await?; - let ecmascript_client_reference_transition_name = match app_project { - Some(app_project) => Some(app_project.client_transition_name().to_resolved().await?), - None => None, - }; + let ecmascript_client_reference_transition_name = + app_project.map(|_| AppProject::client_transition_name()); if let Some(app_project) = app_project { transitions.push(( - ECMASCRIPT_CLIENT_TRANSITION_NAME.into(), + AppProject::client_transition_name(), app_project .edge_ecmascript_client_reference_transition() .to_resolved() @@ -1328,26 +1363,28 @@ impl Project { get_server_module_options_context( self.project_path(), self.execution_context(), - Value::new(ServerContextType::Middleware { + ServerContextType::Middleware { app_dir, - ecmascript_client_reference_transition_name, - }), + ecmascript_client_reference_transition_name: + ecmascript_client_reference_transition_name.clone(), + }, self.next_mode(), self.next_config(), NextRuntime::NodeJs, self.encryption_key(), + self.server_compile_time_info().environment(), ), get_server_resolve_options_context( self.project_path(), - Value::new(ServerContextType::Middleware { + ServerContextType::Middleware { app_dir, ecmascript_client_reference_transition_name, - }), + }, self.next_mode(), self.next_config(), self.execution_context(), ), - Vc::cell("middleware".into()), + Layer::new_with_user_friendly_name(rcstr!("middleware"), rcstr!("Middleware")), ))) } @@ -1364,7 +1401,7 @@ impl Project { let module = edge_module_context .process( source, - Value::new(ReferenceType::Entry(EntryReferenceSubType::Middleware)), + ReferenceType::Entry(EntryReferenceSubType::Middleware), ) .module(); @@ -1395,13 +1432,12 @@ impl Project { let app_dir = *find_app_dir(self.project_path()).await?; let ecmascript_client_reference_transition_name = (*self.app_project().await?) .as_ref() - .map(|app_project| app_project.client_transition_name()); + .map(|_| AppProject::client_transition_name()); let middleware_asset_context = self.middleware_context(); Ok(Vc::upcast(MiddlewareEndpoint::new( self, - self.await?.build_id.clone(), middleware_asset_context, source, app_dir.as_deref().copied(), @@ -1416,14 +1452,13 @@ impl Project { let app_dir = *find_app_dir(self.project_path()).await?; let app_project = &*self.app_project().await?; - let ecmascript_client_reference_transition_name = match app_project { - Some(app_project) => Some(app_project.client_transition_name().to_resolved().await?), - None => None, - }; + let ecmascript_client_reference_transition_name = app_project + .as_ref() + .map(|_| AppProject::client_transition_name()); if let Some(app_project) = app_project { transitions.push(( - ECMASCRIPT_CLIENT_TRANSITION_NAME.into(), + AppProject::client_transition_name(), app_project .ecmascript_client_reference_transition() .to_resolved() @@ -1441,26 +1476,31 @@ impl Project { get_server_module_options_context( self.project_path(), self.execution_context(), - Value::new(ServerContextType::Instrumentation { + ServerContextType::Instrumentation { app_dir, - ecmascript_client_reference_transition_name, - }), + ecmascript_client_reference_transition_name: + ecmascript_client_reference_transition_name.clone(), + }, self.next_mode(), self.next_config(), NextRuntime::NodeJs, self.encryption_key(), + self.server_compile_time_info().environment(), ), get_server_resolve_options_context( self.project_path(), - Value::new(ServerContextType::Instrumentation { + ServerContextType::Instrumentation { app_dir, ecmascript_client_reference_transition_name, - }), + }, self.next_mode(), self.next_config(), self.execution_context(), ), - Vc::cell("instrumentation".into()), + Layer::new_with_user_friendly_name( + rcstr!("instrumentation"), + rcstr!("Instrumentation"), + ), ))) } @@ -1471,14 +1511,13 @@ impl Project { let app_dir = *find_app_dir(self.project_path()).await?; let app_project = &*self.app_project().await?; - let ecmascript_client_reference_transition_name = match app_project { - Some(app_project) => Some(app_project.client_transition_name().to_resolved().await?), - None => None, - }; + let ecmascript_client_reference_transition_name = app_project + .as_ref() + .map(|_| AppProject::client_transition_name()); if let Some(app_project) = app_project { transitions.push(( - ECMASCRIPT_CLIENT_TRANSITION_NAME.into(), + AppProject::client_transition_name(), app_project .edge_ecmascript_client_reference_transition() .to_resolved() @@ -1496,26 +1535,31 @@ impl Project { get_server_module_options_context( self.project_path(), self.execution_context(), - Value::new(ServerContextType::Instrumentation { + ServerContextType::Instrumentation { app_dir, - ecmascript_client_reference_transition_name, - }), + ecmascript_client_reference_transition_name: + ecmascript_client_reference_transition_name.clone(), + }, self.next_mode(), self.next_config(), NextRuntime::Edge, self.encryption_key(), + self.edge_compile_time_info().environment(), ), get_edge_resolve_options_context( self.project_path(), - Value::new(ServerContextType::Instrumentation { + ServerContextType::Instrumentation { app_dir, ecmascript_client_reference_transition_name, - }), + }, self.next_mode(), self.next_config(), self.execution_context(), ), - Vc::cell("instrumentation-edge".into()), + Layer::new_with_user_friendly_name( + rcstr!("instrumentation"), + rcstr!("Edge Instrumentation"), + ), ))) } @@ -1540,7 +1584,7 @@ impl Project { let app_dir = *find_app_dir(self.project_path()).await?; let ecmascript_client_reference_transition_name = (*self.app_project().await?) .as_ref() - .map(|app_project| app_project.client_transition_name()); + .map(|_| AppProject::client_transition_name()); let instrumentation_asset_context = if is_edge { self.edge_instrumentation_context() @@ -1710,16 +1754,7 @@ impl Project { /// Gets the module id strategy for the project. #[turbo_tasks::function] pub async fn module_ids(self: Vc) -> Result>> { - let module_id_strategy = - if let Some(module_id_strategy) = &*self.next_config().module_ids().await? { - *module_id_strategy - } else { - match *self.next_mode().await? { - NextMode::Development => ModuleIdStrategyConfig::Named, - NextMode::Build => ModuleIdStrategyConfig::Deterministic, - } - }; - + let module_id_strategy = *self.next_config().module_ids(self.next_mode()).await?; match module_id_strategy { ModuleIdStrategyConfig::Named => Ok(Vc::upcast(DevModuleIdStrategy::new())), ModuleIdStrategyConfig::Deterministic => { @@ -1739,14 +1774,20 @@ async fn whole_app_module_graph_operation( project: ResolvedVc, ) -> Result> { mark_root(); - let base_single_module_graph = SingleModuleGraph::new_with_entries(project.get_all_entries()); + + let should_trace = project.next_mode().await?.is_production(); + let base_single_module_graph = + SingleModuleGraph::new_with_entries(project.get_all_entries(), should_trace); let base_visited_modules = VisitedModules::from_graph(base_single_module_graph); let base = ModuleGraph::from_single_graph(base_single_module_graph); let additional_entries = project.get_all_additional_entries(base); - let additional_module_graph = - SingleModuleGraph::new_with_entries_visited(additional_entries, base_visited_modules); + let additional_module_graph = SingleModuleGraph::new_with_entries_visited( + additional_entries, + base_visited_modules, + should_trace, + ); let full = ModuleGraph::from_graphs(vec![base_single_module_graph, additional_module_graph]); Ok(ModuleGraphs { @@ -1806,7 +1847,7 @@ async fn get_referenced_output_assets( } #[turbo_tasks::function(operation)] -async fn all_assets_from_entries_operation( +fn all_assets_from_entries_operation( operation: OperationVc, ) -> Result> { let assets = operation.connect(); diff --git a/crates/next-api/src/route.rs b/crates/next-api/src/route.rs index c6baa51efd465..dc55fa95066e5 100644 --- a/crates/next-api/src/route.rs +++ b/crates/next-api/src/route.rs @@ -2,8 +2,8 @@ use anyhow::Result; use serde::{Deserialize, Serialize}; use turbo_rcstr::RcStr; use turbo_tasks::{ - debug::ValueDebugFormat, trace::TraceRawVcs, Completion, FxIndexMap, NonLocalValue, - OperationVc, ResolvedVc, Vc, + Completion, FxIndexMap, NonLocalValue, OperationVc, ResolvedVc, Vc, debug::ValueDebugFormat, + trace::TraceRawVcs, }; use turbopack_core::{ module_graph::{GraphEntries, ModuleGraph}, @@ -49,14 +49,19 @@ pub enum Route { #[turbo_tasks::value_trait] pub trait Endpoint { + #[turbo_tasks::function] fn output(self: Vc) -> Vc; // fn write_to_disk(self: Vc) -> Vc; + #[turbo_tasks::function] fn server_changed(self: Vc) -> Vc; + #[turbo_tasks::function] fn client_changed(self: Vc) -> Vc; /// The entry modules for the modules graph. + #[turbo_tasks::function] fn entries(self: Vc) -> Vc; /// Additional entry modules for the module graph. /// This may read the module graph and return additional modules. + #[turbo_tasks::function] fn additional_entries(self: Vc, _graph: Vc) -> Vc { GraphEntries::empty() } diff --git a/crates/next-api/src/server_actions.rs b/crates/next-api/src/server_actions.rs index 60762702402a7..e43219c8bd841 100644 --- a/crates/next-api/src/server_actions.rs +++ b/crates/next-api/src/server_actions.rs @@ -1,6 +1,6 @@ use std::{collections::BTreeMap, io::Write}; -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use next_core::{ next_manifests::{ ActionLayer, ActionManifestModuleId, ActionManifestWorkerEntry, ServerReferenceManifest, @@ -18,9 +18,9 @@ use swc_core::{ utils::find_pat_ids, }, }; -use turbo_rcstr::RcStr; -use turbo_tasks::{FxIndexMap, ResolvedVc, TryFlatJoinIterExt, Value, ValueToString, Vc}; -use turbo_tasks_fs::{self, rope::RopeBuilder, File, FileSystemPath}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{FxIndexMap, ResolvedVc, TryFlatJoinIterExt, ValueToString, Vc}; +use turbo_tasks_fs::{self, File, FileSystemPath, rope::RopeBuilder}; use turbopack_core::{ asset::AssetContent, chunk::{ChunkItem, ChunkItemExt, ChunkableModule, ChunkingContext, EvaluatableAsset}, @@ -29,8 +29,8 @@ use turbopack_core::{ ident::AssetIdent, module::Module, module_graph::{ - async_module_info::AsyncModulesInfo, ModuleGraph, SingleModuleGraph, - SingleModuleGraphModuleNode, + ModuleGraph, SingleModuleGraph, SingleModuleGraphModuleNode, + async_module_info::AsyncModulesInfo, }, output::OutputAsset, reference_type::{EcmaScriptModulesReferenceSubType, ReferenceType}, @@ -39,8 +39,8 @@ use turbopack_core::{ virtual_source::VirtualSource, }; use turbopack_ecmascript::{ - chunk::EcmascriptChunkPlaceable, parse::ParseResult, - tree_shake::asset::EcmascriptModulePartAsset, EcmascriptParsable, + EcmascriptParsable, chunk::EcmascriptChunkPlaceable, parse::ParseResult, + tree_shake::asset::EcmascriptModulePartAsset, }; #[turbo_tasks::value] @@ -92,11 +92,6 @@ pub(crate) async fn create_server_actions_manifest( .cell()) } -#[turbo_tasks::function] -fn server_actions_loader_modifier() -> Vc { - Vc::cell("server actions loader".into()) -} - /// Builds the "action loader" entry point, which reexports every found action /// behind a lazy dynamic import. /// @@ -132,14 +127,14 @@ pub(crate) async fn build_server_actions_loader( let path = project_path.join(format!(".next-internal/server/app{page_name}/actions.js").into()); let file = File::from(contents.build()); let source = VirtualSource::new_with_ident( - AssetIdent::from_path(path).with_modifier(server_actions_loader_modifier()), + AssetIdent::from_path(path).with_modifier(rcstr!("server actions loader")), AssetContent::file(file.into()), ); let import_map = import_map.into_iter().map(|(k, v)| (v, k)).collect(); let module = asset_context .process( Vc::upcast(source), - Value::new(ReferenceType::Internal(ResolvedVc::cell(import_map))), + ReferenceType::Internal(ResolvedVc::cell(import_map)), ) .module(); @@ -213,14 +208,12 @@ pub async fn to_rsc_context( // module. let source = FileSource::new_with_query( client_module.ident().path().root().join(entry_path.into()), - Vc::cell(entry_query.into()), + entry_query.into(), ); let module = asset_context .process( Vc::upcast(source), - Value::new(ReferenceType::EcmaScriptModules( - EcmaScriptModulesReferenceSubType::Undefined, - )), + ReferenceType::EcmaScriptModules(EcmaScriptModulesReferenceSubType::Undefined), ) .module() .to_resolved() @@ -260,13 +253,12 @@ async fn parse_actions(module: Vc>) -> Result(module).await? - { - if matches!( + && matches!( module.await?.part, ModulePart::Evaluation | ModulePart::Facade - ) { - return Ok(Vc::cell(None)); - } + ) + { + return Ok(Vc::cell(None)); } let original_parsed = ecmascript_asset.parse_original().resolve().await?; @@ -435,14 +427,13 @@ pub async fn map_server_actions(graph: Vc) -> Result { + let layer = match module.ident().await?.layer.as_ref() { + Some(layer) if layer.name() == "app-rsc" || layer.name() == "app-edge-rsc" => { ActionLayer::Rsc } - Some(layer) if &**layer == "app-client" => ActionLayer::ActionBrowser, + Some(layer) if layer.name() == "app-client" => ActionLayer::ActionBrowser, // TODO really ignore SSR? _ => return Ok(None), }; diff --git a/crates/next-api/src/versioned_content_map.rs b/crates/next-api/src/versioned_content_map.rs index ba58fbbaafb88..d449786a0237d 100644 --- a/crates/next-api/src/versioned_content_map.rs +++ b/crates/next-api/src/versioned_content_map.rs @@ -1,12 +1,11 @@ -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use next_core::emit_assets; use rustc_hash::{FxHashMap, FxHashSet}; use serde::{Deserialize, Serialize}; use turbo_rcstr::RcStr; use turbo_tasks::{ - debug::ValueDebugFormat, trace::TraceRawVcs, FxIndexSet, NonLocalValue, OperationValue, - OperationVc, ResolvedVc, State, TryFlatJoinIterExt, TryJoinIterExt, ValueDefault, - ValueToString, Vc, + FxIndexSet, NonLocalValue, OperationValue, OperationVc, ResolvedVc, State, TryFlatJoinIterExt, + TryJoinIterExt, ValueDefault, ValueToString, Vc, debug::ValueDebugFormat, trace::TraceRawVcs, }; use turbo_tasks_fs::FileSystemPath; use turbopack_core::{ @@ -212,16 +211,15 @@ impl VersionedContentMap { assets_operation: _, path_to_asset, }) = &*result + && let Some(&asset) = path_to_asset.get(&path) { - if let Some(&asset) = path_to_asset.get(&path) { - return Ok(Vc::cell(Some(asset))); - } + return Ok(Vc::cell(Some(asset))); } Ok(Vc::cell(None)) } - #[turbo_tasks::function] + #[turbo_tasks::function(invalidator)] pub async fn keys_in_path(&self, root: Vc) -> Result>> { let keys = { let map = &self.map_path_to_op.get().0; @@ -236,7 +234,7 @@ impl VersionedContentMap { Ok(Vc::cell(keys)) } - #[turbo_tasks::function] + #[turbo_tasks::function(invalidator)] fn raw_get(&self, path: ResolvedVc) -> Vc { let assets = { let map = &self.map_path_to_op.get().0; diff --git a/crates/next-build-test/Cargo.toml b/crates/next-build-test/Cargo.toml index 9b2c6bf0899f8..02cfcb2f993e1 100644 --- a/crates/next-build-test/Cargo.toml +++ b/crates/next-build-test/Cargo.toml @@ -3,7 +3,7 @@ name = "next-build-test" version = "0.1.0" description = "TBD" license = "MIT" -edition = "2021" +edition = "2024" autobenches = false [lints] @@ -24,18 +24,7 @@ tracing-subscriber = "0.3" turbo-rcstr = { workspace = true } turbo-tasks = { workspace = true } turbo-tasks-backend = { workspace = true } -turbo-tasks-env = { workspace = true } -turbo-tasks-fs = { workspace = true } turbo-tasks-malloc = { workspace = true } -turbopack = { workspace = true } -turbopack-browser = { workspace = true } -turbopack-cli-utils = { workspace = true } -turbopack-core = { workspace = true } -turbopack-ecmascript = { workspace = true } -turbopack-ecmascript-runtime = { workspace = true } -turbopack-env = { workspace = true } -turbopack-node = { workspace = true } -turbopack-nodejs = { workspace = true } turbopack-trace-utils = { workspace = true } [build-dependencies] diff --git a/crates/next-build-test/src/lib.rs b/crates/next-build-test/src/lib.rs index 21e1c6995e94b..1efa99f2bfed5 100644 --- a/crates/next-build-test/src/lib.rs +++ b/crates/next-build-test/src/lib.rs @@ -9,10 +9,10 @@ use anyhow::{Context, Result}; use futures_util::{StreamExt, TryStreamExt}; use next_api::{ project::{ProjectContainer, ProjectOptions}, - route::{endpoint_write_to_disk, Endpoint, EndpointOutputPaths, Route}, + route::{Endpoint, EndpointOutputPaths, Route, endpoint_write_to_disk}, }; use turbo_rcstr::RcStr; -use turbo_tasks::{get_effects, ReadConsistency, ResolvedVc, TransientInstance, TurboTasks, Vc}; +use turbo_tasks::{ReadConsistency, ResolvedVc, TransientInstance, TurboTasks, Vc, get_effects}; use turbo_tasks_backend::{NoopBackingStorage, TurboTasksBackend}; use turbo_tasks_malloc::TurboMalloc; @@ -150,7 +150,7 @@ impl Strategy { } pub fn shuffle<'a, T: 'a>(items: impl Iterator) -> impl Iterator { - use rand::{seq::SliceRandom, SeedableRng}; + use rand::{SeedableRng, seq::SliceRandom}; let mut rng = rand::rngs::SmallRng::from_seed([0; 32]); let mut input = items.collect::>(); input.shuffle(&mut rng); diff --git a/crates/next-build-test/src/main.rs b/crates/next-build-test/src/main.rs index 1d200a3024133..a5b9813595f7b 100644 --- a/crates/next-build-test/src/main.rs +++ b/crates/next-build-test/src/main.rs @@ -1,14 +1,14 @@ use std::{convert::Infallible, str::FromStr, time::Instant}; use next_api::project::{DefineEnv, ProjectOptions}; -use next_build_test::{main_inner, Strategy}; +use next_build_test::{Strategy, main_inner}; use next_core::tracing_presets::{ - TRACING_NEXT_OVERVIEW_TARGETS, TRACING_NEXT_TARGETS, TRACING_NEXT_TURBOPACK_TARGETS, - TRACING_NEXT_TURBO_TASKS_TARGETS, + TRACING_NEXT_OVERVIEW_TARGETS, TRACING_NEXT_TARGETS, TRACING_NEXT_TURBO_TASKS_TARGETS, + TRACING_NEXT_TURBOPACK_TARGETS, }; -use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, Registry}; +use tracing_subscriber::{Registry, layer::SubscriberExt, util::SubscriberInitExt}; use turbo_tasks::TurboTasks; -use turbo_tasks_backend::{noop_backing_storage, BackendOptions, TurboTasksBackend}; +use turbo_tasks_backend::{BackendOptions, TurboTasksBackend, noop_backing_storage}; use turbo_tasks_malloc::TurboMalloc; use turbopack_trace_utils::{ exit::ExitGuard, filter_layer::FilterLayer, raw_trace::RawTraceLayer, trace_writer::TraceWriter, @@ -169,7 +169,7 @@ fn main() { }; let json = serde_json::to_string_pretty(&options).unwrap(); - println!("{}", json); + println!("{json}"); } } } diff --git a/crates/next-build/Cargo.toml b/crates/next-build/Cargo.toml index 7b57ede7fb424..f06b79992e0c5 100644 --- a/crates/next-build/Cargo.toml +++ b/crates/next-build/Cargo.toml @@ -3,7 +3,7 @@ name = "next-build" version = "0.1.0" description = "TBD" license = "MIT" -edition = "2021" +edition = "2024" autobenches = false [lib] @@ -15,6 +15,7 @@ workspace = true [dependencies] next-core = { workspace = true } turbopack-core = { workspace = true } +turbo-rcstr = { workspace = true } [build-dependencies] turbo-tasks-build = { workspace = true } diff --git a/crates/next-build/src/build_options.rs b/crates/next-build/src/build_options.rs index 6292d24f1355d..37c3395c6a0fa 100644 --- a/crates/next-build/src/build_options.rs +++ b/crates/next-build/src/build_options.rs @@ -1,6 +1,7 @@ use std::path::PathBuf; use next_core::next_config::Rewrites; +use turbo_rcstr::RcStr; use turbopack_core::issue::IssueSeverity; #[derive(Clone, Debug)] @@ -46,7 +47,7 @@ pub struct BuildContext { #[derive(Debug, Clone)] pub struct DefineEnv { - pub client: Vec<(String, String)>, - pub edge: Vec<(String, String)>, - pub nodejs: Vec<(String, String)>, + pub client: Vec<(RcStr, RcStr)>, + pub edge: Vec<(RcStr, RcStr)>, + pub nodejs: Vec<(RcStr, RcStr)>, } diff --git a/crates/next-core/Cargo.toml b/crates/next-core/Cargo.toml index 1387bd74e7d5f..6393b89d765fa 100644 --- a/crates/next-core/Cargo.toml +++ b/crates/next-core/Cargo.toml @@ -3,7 +3,7 @@ name = "next-core" version = "0.1.0" description = "TBD" license = "MIT" -edition = "2021" +edition = "2024" [lib] bench = false @@ -28,19 +28,17 @@ mime_guess = "2.0.4" indoc = { workspace = true } allsorts = { workspace = true } futures = { workspace = true } -lazy_static = { workspace = true } thiserror = { workspace = true } tracing = { workspace = true } rustc-hash = { workspace = true } -react_remove_properties = "0.34.0" -remove_console = "0.35.0" +react_remove_properties = "0.43.0" +remove_console = "0.44.0" itertools = { workspace = true } -auto-hash-map = { workspace = true } percent-encoding = "2.3.1" +serde_path_to_error = { workspace = true } swc_core = { workspace = true, features = [ "base", - "cached", "common_concurrent", "ecma_ast", "ecma_loader_lru", @@ -57,9 +55,9 @@ swc_core = { workspace = true, features = [ "ecma_visit", ] } modularize_imports = { workspace = true } -swc_relay = { workspace = true } turbo-rcstr = { workspace = true } +turbo-esregex = { workspace = true } turbo-tasks = { workspace = true } turbo-tasks-bytes = { workspace = true } turbo-tasks-env = { workspace = true } @@ -70,14 +68,14 @@ turbopack = { workspace = true } turbopack-browser = { workspace = true } turbopack-core = { workspace = true } turbopack-ecmascript = { workspace = true } -turbopack-ecmascript-plugins = { workspace = true, features = ["transform_emotion"] } +turbopack-ecmascript-plugins = { workspace = true, features = [ + "transform_emotion", +] } turbopack-ecmascript-runtime = { workspace = true } -turbopack-env = { workspace = true } turbopack-image = { workspace = true } turbopack-node = { workspace = true } turbopack-nodejs = { workspace = true } turbopack-static = { workspace = true } -turbopack-trace-server = { workspace = true } turbopack-trace-utils = { workspace = true } [build-dependencies] diff --git a/crates/next-core/src/app_page_loader_tree.rs b/crates/next-core/src/app_page_loader_tree.rs index da9c992464c17..310b38afa087f 100644 --- a/crates/next-core/src/app_page_loader_tree.rs +++ b/crates/next-core/src/app_page_loader_tree.rs @@ -7,19 +7,19 @@ use anyhow::Result; use turbo_rcstr::RcStr; use turbo_tasks::{FxIndexMap, ResolvedVc, Vc}; use turbo_tasks_fs::FileSystemPath; -use turbopack::{transition::Transition, ModuleAssetContext}; +use turbopack::{ModuleAssetContext, transition::Transition}; use turbopack_core::{file_source::FileSource, module::Module}; use turbopack_ecmascript::{magic_identifier, text::TextContentFileSource, utils::StringifyJs}; use crate::{ app_structure::{ - get_metadata_route_name, AppDirModules, AppPageLoaderTree, GlobalMetadata, Metadata, - MetadataItem, MetadataWithAltItem, + AppDirModules, AppPageLoaderTree, GlobalMetadata, Metadata, MetadataItem, + MetadataWithAltItem, get_metadata_route_name, }, base_loader_tree::{AppDirModuleType, BaseLoaderTreeBuilder}, next_app::{ - metadata::{get_content_type, image::dynamic_image_metadata_source}, AppPage, + metadata::{get_content_type, image::dynamic_image_metadata_source}, }, next_image::module::{BlurPlaceholderMode, StructuredImageModuleType}, }; @@ -136,7 +136,7 @@ impl AppPageLoaderTreeBuilder { let metadata_manifest_route = get_metadata_route_name(manifest).await?; // prefix with base_path if it exists let manifest_route = if let Some(base_path) = &self.base_path { - format!("{}/{}", base_path, metadata_manifest_route) + format!("{base_path}/{metadata_manifest_route}") } else { metadata_manifest_route.to_string() }; @@ -250,7 +250,7 @@ impl AppPageLoaderTreeBuilder { let s = " "; writeln!(self.loader_tree_code, "{s}(async (props) => [{{")?; let pathname_prefix = if let Some(base_path) = &self.base_path { - format!("{}/{}", base_path, app_page) + format!("{base_path}/{app_page}") } else { app_page.to_string() }; @@ -268,10 +268,16 @@ impl AppPageLoaderTreeBuilder { writeln!(self.loader_tree_code, "{s} width: {identifier}.width,")?; writeln!(self.loader_tree_code, "{s} height: {identifier}.height,")?; } else { - writeln!( - self.loader_tree_code, - "{s} sizes: `${{{identifier}.width}}x${{{identifier}.height}}`," - )?; + let ext = &*path.extension().await?; + // For SVGs, skip sizes and use "any" to let it scale automatically based on viewport, + // For the images doesn't provide the size properly, use "any" as well. + // If the size is presented, use the actual size for the image. + let sizes = if ext == "svg" { + "any".to_string() + } else { + format!("${{{identifier}.width}}x${{{identifier}.height}}") + }; + writeln!(self.loader_tree_code, "{s} sizes: `{sizes}`,")?; } let content_type = get_content_type(path).await?; @@ -329,6 +335,7 @@ impl AppPageLoaderTreeBuilder { default, error, global_error, + global_not_found, layout, loading, template, @@ -369,6 +376,8 @@ impl AppPageLoaderTreeBuilder { .await?; self.write_modules_entry(AppDirModuleType::GlobalError, *global_error) .await?; + self.write_modules_entry(AppDirModuleType::GlobalNotFound, *global_not_found) + .await?; let modules_code = replace(&mut self.loader_tree_code, temp_loader_tree_code); @@ -393,6 +402,7 @@ impl AppPageLoaderTreeBuilder { let loader_tree = &*loader_tree.await?; let modules = &loader_tree.modules; + // load global-error module if let Some(global_error) = modules.global_error { let module = self .base @@ -401,6 +411,17 @@ impl AppPageLoaderTreeBuilder { .await?; self.base.inner_assets.insert(GLOBAL_ERROR.into(), module); }; + // load global-not-found module + if let Some(global_not_found) = modules.global_not_found { + let module = self + .base + .process_source(Vc::upcast(FileSource::new(*global_not_found))) + .to_resolved() + .await?; + self.base + .inner_assets + .insert(GLOBAL_NOT_FOUND.into(), module); + }; self.walk_tree(loader_tree, true).await?; Ok(AppPageLoaderTreeModule { @@ -433,3 +454,4 @@ impl AppPageLoaderTreeModule { } pub const GLOBAL_ERROR: &str = "GLOBAL_ERROR_MODULE"; +pub const GLOBAL_NOT_FOUND: &str = "GLOBAL_NOT_FOUND_MODULE"; diff --git a/crates/next-core/src/app_segment_config.rs b/crates/next-core/src/app_segment_config.rs index dfc716fd041d7..3df7fc7d215ab 100644 --- a/crates/next-core/src/app_segment_config.rs +++ b/crates/next-core/src/app_segment_config.rs @@ -1,16 +1,16 @@ use std::{future::Future, ops::Deref}; -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use serde::{Deserialize, Serialize}; use serde_json::Value; use swc_core::{ - common::{source_map::SmallPos, Span, Spanned, GLOBALS}, + common::{GLOBALS, Span, Spanned, source_map::SmallPos}, ecma::ast::{Decl, Expr, FnExpr, Ident, Program}, }; -use turbo_rcstr::RcStr; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ - trace::TraceRawVcs, util::WrapFuture, NonLocalValue, ResolvedVc, TryJoinIterExt, ValueDefault, - Vc, + NonLocalValue, ResolvedVc, TryJoinIterExt, ValueDefault, Vc, trace::TraceRawVcs, + util::WrapFuture, }; use turbo_tasks_fs::FileSystemPath; use turbopack_core::{ @@ -23,9 +23,9 @@ use turbopack_core::{ source::Source, }; use turbopack_ecmascript::{ - analyzer::{graph::EvalContext, ConstantNumber, ConstantValue, JsValue}, - parse::{parse, ParseResult}, EcmascriptInputTransforms, EcmascriptModuleAssetType, + analyzer::{ConstantNumber, ConstantValue, JsValue, graph::EvalContext}, + parse::{ParseResult, parse}, }; use crate::{app_structure::AppPageLoaderTree, util::NextRuntime}; @@ -199,14 +199,16 @@ impl NextSegmentConfigParsingIssue { #[turbo_tasks::value_impl] impl Issue for NextSegmentConfigParsingIssue { - #[turbo_tasks::function] - fn severity(&self) -> Vc { - IssueSeverity::Warning.into() + fn severity(&self) -> IssueSeverity { + IssueSeverity::Warning } #[turbo_tasks::function] fn title(&self) -> Vc { - StyledString::Text("Unable to parse config export in source file".into()).cell() + StyledString::Text(rcstr!( + "Next.js can't recognize the exported `config` field in route" + )) + .cell() } #[turbo_tasks::function] @@ -222,11 +224,10 @@ impl Issue for NextSegmentConfigParsingIssue { #[turbo_tasks::function] fn description(&self) -> Vc { Vc::cell(Some( - StyledString::Text( + StyledString::Text(rcstr!( "The exported configuration object in a source file needs to have a very specific \ format from which some properties can be statically parsed at compiled-time." - .into(), - ) + )) .resolved_cell(), )) } @@ -238,10 +239,9 @@ impl Issue for NextSegmentConfigParsingIssue { #[turbo_tasks::function] fn documentation_link(&self) -> Vc { - Vc::cell( + Vc::cell(rcstr!( "https://nextjs.org/docs/app/api-reference/file-conventions/route-segment-config" - .into(), - ) + )) } #[turbo_tasks::function] @@ -271,7 +271,7 @@ pub async fn parse_segment_config_from_source( let result = &*parse( *source, - turbo_tasks::Value::new(if path.path.ends_with(".ts") { + if path.path.ends_with(".ts") { EcmascriptModuleAssetType::Typescript { tsx: false, analyze_types: false, @@ -283,7 +283,7 @@ pub async fn parse_segment_config_from_source( } } else { EcmascriptModuleAssetType::Ecmascript - }), + }, EcmascriptInputTransforms::empty(), ) .await?; @@ -394,7 +394,7 @@ async fn parse_config_value( invalid_config( source, span, - &format!("`dynamic` has an invalid value: {}", err), + &format!("`dynamic` has an invalid value: {err}"), &value, ) .await?; @@ -455,7 +455,7 @@ async fn parse_config_value( return invalid_config( source, span, - &format!("`fetchCache` has an invalid value: {}", err), + &format!("`fetchCache` has an invalid value: {err}"), &value, ) .await; @@ -480,7 +480,7 @@ async fn parse_config_value( return invalid_config( source, span, - &format!("`runtime` has an invalid value: {}", err), + &format!("`runtime` has an invalid value: {err}"), &value, ) .await; diff --git a/crates/next-core/src/app_structure.rs b/crates/next-core/src/app_structure.rs index dd2ed86854fc2..4da12f57800b5 100644 --- a/crates/next-core/src/app_structure.rs +++ b/crates/next-core/src/app_structure.rs @@ -1,14 +1,14 @@ use std::collections::BTreeMap; -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use indexmap::map::{Entry, OccupiedEntry}; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; use tracing::Instrument; -use turbo_rcstr::RcStr; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ - debug::ValueDebugFormat, fxindexmap, trace::TraceRawVcs, FxIndexMap, NonLocalValue, ResolvedVc, - TaskInput, TryJoinIterExt, ValueDefault, ValueToString, Vc, + FxIndexMap, NonLocalValue, ResolvedVc, TaskInput, TryJoinIterExt, ValueDefault, ValueToString, + Vc, debug::ValueDebugFormat, fxindexmap, trace::TraceRawVcs, }; use turbo_tasks_fs::{DirectoryContent, DirectoryEntry, FileSystemEntryType, FileSystemPath}; use turbopack_core::issue::{ @@ -17,15 +17,21 @@ use turbopack_core::issue::{ use crate::{ next_app::{ + AppPage, AppPath, PageSegment, PageType, metadata::{ - match_global_metadata_file, match_local_metadata_file, normalize_metadata_route, - GlobalMetadataFileMatch, MetadataFileMatch, + GlobalMetadataFileMatch, MetadataFileMatch, match_global_metadata_file, + match_local_metadata_file, normalize_metadata_route, }, - AppPage, AppPath, PageSegment, PageType, }, next_import_map::get_next_package, }; +// Next.js ignores underscores for routes but you can use %5f to still serve an underscored +// route. +fn normalize_underscore(string: &str) -> String { + string.replace("%5F", "_") +} + /// A final route in the app directory. #[turbo_tasks::value] #[derive(Default, Debug, Clone)] @@ -39,6 +45,8 @@ pub struct AppDirModules { #[serde(skip_serializing_if = "Option::is_none")] pub global_error: Option>, #[serde(skip_serializing_if = "Option::is_none")] + pub global_not_found: Option>, + #[serde(skip_serializing_if = "Option::is_none")] pub loading: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub template: Option>, @@ -63,6 +71,7 @@ impl AppDirModules { layout: self.layout, error: self.error, global_error: self.global_error, + global_not_found: self.global_not_found, loading: self.loading, template: self.template, not_found: self.not_found, @@ -122,7 +131,7 @@ pub async fn get_metadata_route_name(meta: MetadataItem) -> Result> { }; match stem.as_str() { - "manifest" => Vc::cell("manifest.webmanifest".into()), + "manifest" => Vc::cell(rcstr!("manifest.webmanifest")), _ => Vc::cell(stem.clone()), } } @@ -254,8 +263,8 @@ pub struct OptionAppDir(Option>); /// Finds and returns the [DirectoryTree] of the app directory if existing. #[turbo_tasks::function] pub async fn find_app_dir(project_path: Vc) -> Result> { - let app = project_path.join("app".into()); - let src_app = project_path.join("src/app".into()); + let app = project_path.join(rcstr!("app")); + let src_app = project_path.join(rcstr!("src/app")); let app_dir = if *app.get_type().await? == FileSystemEntryType::Directory { app } else if *src_app.get_type().await? == FileSystemEntryType::Directory { @@ -315,22 +324,23 @@ async fn get_directory_tree_internal( if basename.ends_with(".d.ts") { continue; } - if let Some((stem, ext)) = basename.split_once('.') { - if page_extensions_value.iter().any(|e| e == ext) { - match stem { - "page" => modules.page = Some(file), - "layout" => modules.layout = Some(file), - "error" => modules.error = Some(file), - "global-error" => modules.global_error = Some(file), - "loading" => modules.loading = Some(file), - "template" => modules.template = Some(file), - "forbidden" => modules.forbidden = Some(file), - "unauthorized" => modules.unauthorized = Some(file), - "not-found" => modules.not_found = Some(file), - "default" => modules.default = Some(file), - "route" => modules.route = Some(file), - _ => {} - } + if let Some((stem, ext)) = basename.split_once('.') + && page_extensions_value.iter().any(|e| e == ext) + { + match stem { + "page" => modules.page = Some(file), + "layout" => modules.layout = Some(file), + "error" => modules.error = Some(file), + "global-error" => modules.global_error = Some(file), + "global-not-found" => modules.global_not_found = Some(file), + "loading" => modules.loading = Some(file), + "template" => modules.template = Some(file), + "forbidden" => modules.forbidden = Some(file), + "unauthorized" => modules.unauthorized = Some(file), + "not-found" => modules.not_found = Some(file), + "default" => modules.default = Some(file), + "route" => modules.route = Some(file), + _ => {} } } @@ -371,7 +381,7 @@ async fn get_directory_tree_internal( .map_or(file_name, |(basename, _)| basename); let alt_path = file .parent() - .join(format!("{}.alt.txt", basename).into()) + .join(format!("{basename}.alt.txt").into()) .to_resolved() .await?; let alt_path = matches!(&*alt_path.get_type().await?, FileSystemEntryType::File) @@ -564,9 +574,9 @@ fn conflict_issue( value_b: &AppPage, ) { let item_names = if a == b { - format!("{}s", a) + format!("{a}s") } else { - format!("{} and {}", a, b) + format!("{a} and {b}") }; DirectoryTreeIssue { @@ -580,7 +590,7 @@ fn conflict_issue( .into(), ) .resolved_cell(), - severity: IssueSeverity::Error.resolved_cell(), + severity: IssueSeverity::Error, } .resolved_cell() .emit(); @@ -730,11 +740,13 @@ fn add_app_metadata_route( pub fn get_entrypoints( app_dir: Vc, page_extensions: Vc>, + is_global_not_found_enabled: Vc, ) -> Vc { directory_tree_to_entrypoints( app_dir, get_directory_tree(app_dir, page_extensions), get_global_metadata(app_dir, page_extensions), + is_global_not_found_enabled, Default::default(), ) } @@ -744,12 +756,14 @@ fn directory_tree_to_entrypoints( app_dir: Vc, directory_tree: Vc, global_metadata: Vc, + is_global_not_found_enabled: Vc, root_layouts: Vc, ) -> Vc { directory_tree_to_entrypoints_internal( app_dir, global_metadata, - "".into(), + is_global_not_found_enabled, + rcstr!(""), directory_tree, AppPage::new(), root_layouts, @@ -759,6 +773,7 @@ fn directory_tree_to_entrypoints( #[turbo_tasks::value] struct DuplicateParallelRouteIssue { app_dir: ResolvedVc, + previously_inserted_page: AppPage, page: AppPage, } @@ -775,11 +790,17 @@ impl Issue for DuplicateParallelRouteIssue { } #[turbo_tasks::function] - fn title(self: Vc) -> Vc { - StyledString::Text( - "You cannot have two parallel pages that resolve to the same path.".into(), + async fn title(self: Vc) -> Result> { + let this = self.await?; + Ok(StyledString::Text( + format!( + "You cannot have two parallel pages that resolve to the same path. Please check \ + {} and {}.", + this.previously_inserted_page, this.page + ) + .into(), ) - .cell() + .cell()) } } @@ -813,17 +834,17 @@ async fn check_duplicate( ) -> Result<()> { let page_path = page_path_except_parallel(loader_tree); - if let Some(page_path) = page_path { - if let Some(prev) = duplicate.insert(AppPath::from(page_path.clone()), page_path.clone()) { - if prev != page_path { - DuplicateParallelRouteIssue { - app_dir: app_dir.to_resolved().await?, - page: loader_tree.page.clone(), - } - .resolved_cell() - .emit(); - } + if let Some(page_path) = page_path + && let Some(prev) = duplicate.insert(AppPath::from(page_path.clone()), page_path.clone()) + && prev != page_path + { + DuplicateParallelRouteIssue { + app_dir: app_dir.to_resolved().await?, + previously_inserted_page: prev.clone(), + page: loader_tree.page.clone(), } + .resolved_cell() + .emit(); } Ok(()) @@ -889,7 +910,7 @@ async fn directory_tree_to_loader_tree_internal( if modules.not_found.is_none() { modules.not_found = Some( get_next_package(app_dir) - .join("dist/client/components/not-found-error.js".into()) + .join(rcstr!("dist/client/components/builtin/not-found.js")) .to_resolved() .await?, ); @@ -897,7 +918,7 @@ async fn directory_tree_to_loader_tree_internal( if modules.forbidden.is_none() { modules.forbidden = Some( get_next_package(app_dir) - .join("dist/client/components/forbidden-error.js".into()) + .join(rcstr!("dist/client/components/builtin/forbidden.js")) .to_resolved() .await?, ); @@ -905,7 +926,7 @@ async fn directory_tree_to_loader_tree_internal( if modules.unauthorized.is_none() { modules.unauthorized = Some( get_next_package(app_dir) - .join("dist/client/components/unauthorized-error.js".into()) + .join(rcstr!("dist/client/components/builtin/unauthorized.js")) .to_resolved() .await?, ); @@ -923,7 +944,7 @@ async fn directory_tree_to_loader_tree_internal( let current_level_is_parallel_route = is_parallel_route(&directory_name); if current_level_is_parallel_route { - tree.segment = "children".into(); + tree.segment = rcstr!("children"); } if let Some(page) = (app_path == for_app_path || app_path.is_catchall()) @@ -931,10 +952,10 @@ async fn directory_tree_to_loader_tree_internal( .flatten() { tree.parallel_routes.insert( - "children".into(), + rcstr!("children"), AppPageLoaderTree { page: app_page.clone(), - segment: "__PAGE__".into(), + segment: rcstr!("__PAGE__"), parallel_routes: FxIndexMap::default(), modules: AppDirModules { page: Some(page), @@ -946,7 +967,7 @@ async fn directory_tree_to_loader_tree_internal( ); if current_level_is_parallel_route { - tree.segment = "page$".into(); + tree.segment = rcstr!("page$"); } } @@ -961,7 +982,7 @@ async fn directory_tree_to_loader_tree_internal( // When constructing the app_page fails (e. g. due to limitations of the order), // we only want to emit the error when there are actual pages below that // directory. - if let Err(e) = child_app_page.push_str(subdir_name) { + if let Err(e) = child_app_page.push_str(&normalize_underscore(subdir_name)) { illegal_path_error = Some(e); } @@ -1000,10 +1021,10 @@ async fn directory_tree_to_loader_tree_internal( || current_tree.get_specificity() < subtree.get_specificity()) { tree.parallel_routes - .insert("children".into(), subtree.clone()); + .insert(rcstr!("children"), subtree.clone()); } } else { - tree.parallel_routes.insert("children".into(), subtree); + tree.parallel_routes.insert(rcstr!("children"), subtree); } } else if let Some(key) = parallel_route_key { bail!( @@ -1030,7 +1051,7 @@ async fn directory_tree_to_loader_tree_internal( } for key in keys_to_replace { - let subdir_name: RcStr = format!("@{}", key).into(); + let subdir_name: RcStr = format!("@{key}").into(); let default = if key == "children" { modules.default @@ -1067,7 +1088,7 @@ async fn directory_tree_to_loader_tree_internal( } } else if tree.parallel_routes.get("children").is_none() { tree.parallel_routes.insert( - "children".into(), + rcstr!("children"), default_route_tree( app_dir, global_metadata, @@ -1097,7 +1118,7 @@ async fn default_route_tree( ) -> Result { Ok(AppPageLoaderTree { page: app_page.clone(), - segment: "__DEFAULT__".into(), + segment: rcstr!("__DEFAULT__"), parallel_routes: FxIndexMap::default(), modules: if let Some(default) = default_component { AppDirModules { @@ -1109,7 +1130,7 @@ async fn default_route_tree( AppDirModules { default: Some( get_next_package(app_dir) - .join("dist/client/components/parallel-route-default.js".into()) + .join(rcstr!("dist/client/components/builtin/default.js")) .to_resolved() .await?, ), @@ -1124,6 +1145,7 @@ async fn default_route_tree( async fn directory_tree_to_entrypoints_internal( app_dir: ResolvedVc, global_metadata: Vc, + is_global_not_found_enabled: Vc, directory_name: RcStr, directory_tree: Vc, app_page: AppPage, @@ -1133,6 +1155,7 @@ async fn directory_tree_to_entrypoints_internal( directory_tree_to_entrypoints_internal_untraced( app_dir, global_metadata, + is_global_not_found_enabled, directory_name, directory_tree, app_page, @@ -1145,6 +1168,7 @@ async fn directory_tree_to_entrypoints_internal( async fn directory_tree_to_entrypoints_internal_untraced( app_dir: ResolvedVc, global_metadata: Vc, + is_global_not_found_enabled: Vc, directory_name: RcStr, directory_tree: Vc, app_page: AppPage, @@ -1251,7 +1275,7 @@ async fn directory_tree_to_entrypoints_internal_untraced( if modules.layout.is_none() { modules.layout = Some( get_next_package(*app_dir) - .join("dist/client/components/default-layout.js".into()) + .join(rcstr!("dist/client/components/builtin/layout.js")) .to_resolved() .await?, ); @@ -1260,7 +1284,7 @@ async fn directory_tree_to_entrypoints_internal_untraced( if modules.not_found.is_none() { modules.not_found = Some( get_next_package(*app_dir) - .join("dist/client/components/not-found-error.js".into()) + .join(rcstr!("dist/client/components/builtin/not-found.js")) .to_resolved() .await?, ); @@ -1268,7 +1292,7 @@ async fn directory_tree_to_entrypoints_internal_untraced( if modules.forbidden.is_none() { modules.forbidden = Some( get_next_package(*app_dir) - .join("dist/client/components/forbidden-error.js".into()) + .join(rcstr!("dist/client/components/builtin/forbidden.js")) .to_resolved() .await?, ); @@ -1276,7 +1300,7 @@ async fn directory_tree_to_entrypoints_internal_untraced( if modules.unauthorized.is_none() { modules.unauthorized = Some( get_next_package(*app_dir) - .join("dist/client/components/unauthorized-error.js".into()) + .join(rcstr!("dist/client/components/builtin/unauthorized.js")) .to_resolved() .await?, ); @@ -1284,36 +1308,73 @@ async fn directory_tree_to_entrypoints_internal_untraced( // Next.js has this logic in "collect-app-paths", where the root not-found page // is considered as its own entry point. + + // Determine if we enable the global not-found feature. + let is_global_not_found_enabled = *is_global_not_found_enabled.await?; + let use_global_not_found = + is_global_not_found_enabled || modules.global_not_found.is_some(); + + let not_found_root_modules = modules.without_leafs(); let not_found_tree = AppPageLoaderTree { page: app_page.clone(), segment: directory_name.clone(), parallel_routes: fxindexmap! { - "children".into() => AppPageLoaderTree { + rcstr!("children") => AppPageLoaderTree { page: app_page.clone(), - segment: "/_not-found".into(), + segment: rcstr!("/_not-found"), parallel_routes: fxindexmap! { - "children".into() => AppPageLoaderTree { + rcstr!("children") => AppPageLoaderTree { page: app_page.clone(), - segment: "__PAGE__".into(), + segment: rcstr!("__PAGE__"), parallel_routes: FxIndexMap::default(), - modules: AppDirModules { - page: match modules.not_found { - Some(v) => Some(v), - None => Some(get_next_package(*app_dir) - .join("dist/client/components/not-found-error.js".into()) - .to_resolved() - .await?), - }, - ..Default::default() + modules: if use_global_not_found { + // if global-not-found.js is present: + // we use it for the page and no layout, since layout is included in global-not-found.js; + AppDirModules { + layout: None, + page: match modules.global_not_found { + Some(v) => Some(v), + None => Some(get_next_package(*app_dir) + .join(rcstr!("dist/client/components/builtin/global-not-found.js")) + .to_resolved() + .await?), + }, + ..Default::default() + } + } else { + // if global-not-found.js is not present: + // we search if we can compose root layout with the root not-found.js; + AppDirModules { + page: match modules.not_found { + Some(v) => Some(v), + None => Some(get_next_package(*app_dir) + .join(rcstr!("dist/client/components/builtin/not-found.js")) + .to_resolved() + .await?), + }, + ..Default::default() + } }, global_metadata: global_metadata.to_resolved().await?, } }, - modules: AppDirModules::default(), + modules: AppDirModules { + ..Default::default() + }, global_metadata: global_metadata.to_resolved().await?, }, }, - modules: modules.without_leafs(), + modules: AppDirModules { + // `global-not-found.js` does not need a layout since it's included. + // Skip it if it's present. + // Otherwise, we need to compose it with the root layout to compose with not-found.js boundary. + layout: if use_global_not_found { + None + } else { + modules.layout + }, + ..not_found_root_modules + }, global_metadata: global_metadata.to_resolved().await?, } .resolved_cell(); @@ -1338,13 +1399,14 @@ async fn directory_tree_to_entrypoints_internal_untraced( // When constructing the app_page fails (e. g. due to limitations of the order), // we only want to emit the error when there are actual pages below that // directory. - if let Err(e) = child_app_page.push_str(subdir_name) { + if let Err(e) = child_app_page.push_str(&normalize_underscore(subdir_name)) { illegal_path = Some(e); } let map = directory_tree_to_entrypoints_internal( *app_dir, global_metadata, + is_global_not_found_enabled, subdir_name.clone(), *subdirectory, child_app_page.clone(), @@ -1352,10 +1414,10 @@ async fn directory_tree_to_entrypoints_internal_untraced( ) .await?; - if let Some(illegal_path) = illegal_path { - if !map.is_empty() { - return Err(illegal_path); - } + if let Some(illegal_path) = illegal_path + && !map.is_empty() + { + return Err(illegal_path); } let mut loader_trees = Vec::new(); @@ -1467,21 +1529,20 @@ pub async fn get_global_metadata( #[turbo_tasks::value(shared)] struct DirectoryTreeIssue { - pub severity: ResolvedVc, + pub severity: IssueSeverity, pub app_dir: ResolvedVc, pub message: ResolvedVc, } #[turbo_tasks::value_impl] impl Issue for DirectoryTreeIssue { - #[turbo_tasks::function] - fn severity(&self) -> Vc { - *self.severity + fn severity(&self) -> IssueSeverity { + self.severity } #[turbo_tasks::function] fn title(&self) -> Vc { - StyledString::Text("An issue occurred while preparing your Next.js app".into()).cell() + StyledString::Text(rcstr!("An issue occurred while preparing your Next.js app")).cell() } #[turbo_tasks::function] diff --git a/crates/next-core/src/base_loader_tree.rs b/crates/next-core/src/base_loader_tree.rs index 20020a503ad72..e19b63a7ec99e 100644 --- a/crates/next-core/src/base_loader_tree.rs +++ b/crates/next-core/src/base_loader_tree.rs @@ -1,9 +1,9 @@ use anyhow::Result; use indoc::formatdoc; use turbo_rcstr::RcStr; -use turbo_tasks::{FxIndexMap, ResolvedVc, Value, ValueToString, Vc}; +use turbo_tasks::{FxIndexMap, ResolvedVc, ValueToString, Vc}; use turbo_tasks_fs::FileSystemPath; -use turbopack::{transition::Transition, ModuleAssetContext}; +use turbopack::{ModuleAssetContext, transition::Transition}; use turbopack_core::{ file_source::FileSource, module::Module, @@ -32,6 +32,7 @@ pub enum AppDirModuleType { Forbidden, Unauthorized, GlobalError, + GlobalNotFound, } impl AppDirModuleType { @@ -47,6 +48,7 @@ impl AppDirModuleType { AppDirModuleType::Forbidden => "forbidden", AppDirModuleType::Unauthorized => "unauthorized", AppDirModuleType::GlobalError => "global-error", + AppDirModuleType::GlobalNotFound => "global-not-found", } } } @@ -72,9 +74,8 @@ impl BaseLoaderTreeBuilder { } pub fn process_source(&self, source: Vc>) -> Vc> { - let reference_type = Value::new(ReferenceType::EcmaScriptModules( - EcmaScriptModulesReferenceSubType::Undefined, - )); + let reference_type = + ReferenceType::EcmaScriptModules(EcmaScriptModulesReferenceSubType::Undefined); self.server_component_transition .process(source, *self.module_asset_context, reference_type) diff --git a/crates/next-core/src/bootstrap.rs b/crates/next-core/src/bootstrap.rs index 3d1962e9ca58f..8f2f2d087e0e3 100644 --- a/crates/next-core/src/bootstrap.rs +++ b/crates/next-core/src/bootstrap.rs @@ -1,5 +1,5 @@ -use anyhow::{bail, Context, Result}; -use turbo_tasks::{FxIndexMap, ResolvedVc, Value, ValueToString, Vc}; +use anyhow::{Context, Result, bail}; +use turbo_tasks::{FxIndexMap, ResolvedVc, ValueToString, Vc}; use turbo_tasks_fs::{File, FileSystemPath}; use turbopack_core::{ asset::AssetContent, @@ -59,11 +59,7 @@ pub async fn bootstrap( ); }; let path = if let Some((name, ext)) = path.rsplit_once('.') { - if !ext.contains('/') { - name - } else { - path - } + if !ext.contains('/') { name } else { path } } else { path }; @@ -89,9 +85,7 @@ pub async fn bootstrap( .into(), ), )), - Value::new(ReferenceType::Internal( - InnerAssets::empty().to_resolved().await?, - )), + ReferenceType::Internal(InnerAssets::empty().to_resolved().await?), ) .module() .to_resolved() @@ -104,7 +98,7 @@ pub async fn bootstrap( let asset = asset_context .process( bootstrap_asset, - Value::new(ReferenceType::Internal(ResolvedVc::cell(inner_assets))), + ReferenceType::Internal(ResolvedVc::cell(inner_assets)), ) .module() .to_resolved() diff --git a/crates/next-core/src/emit.rs b/crates/next-core/src/emit.rs index cabd41970c62b..41525bbde7ca0 100644 --- a/crates/next-core/src/emit.rs +++ b/crates/next-core/src/emit.rs @@ -1,11 +1,10 @@ use anyhow::Result; -use rustc_hash::FxHashSet; use tracing::Instrument; use turbo_tasks::{ + FxIndexSet, ResolvedVc, TryFlatJoinIterExt, ValueToString, Vc, graph::{AdjacencyMap, GraphTraversal}, - ResolvedVc, TryFlatJoinIterExt, ValueToString, Vc, }; -use turbo_tasks_fs::{rebase, FileSystemPath}; +use turbo_tasks_fs::{FileSystemPath, rebase}; use turbopack_core::{ asset::Asset, output::{OutputAsset, OutputAssets}, @@ -113,7 +112,7 @@ pub async fn all_assets_from_entries(entries: Vc) -> Result>() + .collect::>() .into_iter() .collect(), )) diff --git a/crates/next-core/src/hmr_entry.rs b/crates/next-core/src/hmr_entry.rs index 1ee89fc4f5dcc..b7522643e9daf 100644 --- a/crates/next-core/src/hmr_entry.rs +++ b/crates/next-core/src/hmr_entry.rs @@ -1,9 +1,9 @@ use std::io::Write; use anyhow::Result; -use turbo_rcstr::RcStr; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ResolvedVc, ValueToString, Vc}; -use turbo_tasks_fs::{glob::Glob, rope::RopeBuilder}; +use turbo_tasks_fs::{FileSystem, VirtualFileSystem, glob::Glob, rope::RopeBuilder}; use turbopack_core::{ asset::{Asset, AssetContent}, chunk::{ @@ -25,9 +25,15 @@ use turbopack_ecmascript::{ utils::StringifyJs, }; +/// Each entry point in the HMR system has an ident with a different nested asset. +/// This produces the 'base' ident for the HMR entry point, which is then modified #[turbo_tasks::function] -fn modifier() -> Vc { - Vc::cell("hmr-entry".into()) +fn hmr_entry_point_base_ident() -> Vc { + AssetIdent::from_path( + VirtualFileSystem::new_with_name(rcstr!("hmr-entry")) + .root() + .join(rcstr!("hmr-entry.js")), + ) } #[turbo_tasks::value(shared)] @@ -51,7 +57,7 @@ impl HmrEntryModule { impl Module for HmrEntryModule { #[turbo_tasks::function] fn ident(&self) -> Vc { - self.ident.with_modifier(modifier()) + hmr_entry_point_base_ident().with_asset(rcstr!("ENTRY"), *self.ident) } #[turbo_tasks::function] @@ -124,7 +130,7 @@ impl HmrEntryModuleReference { impl ValueToString for HmrEntryModuleReference { #[turbo_tasks::function] fn to_string(&self) -> Vc { - Vc::cell("entry".into()) + Vc::cell(rcstr!("entry")) } } diff --git a/crates/next-core/src/lib.rs b/crates/next-core/src/lib.rs index de01df2ca1233..f8bdaa2230777 100644 --- a/crates/next-core/src/lib.rs +++ b/crates/next-core/src/lib.rs @@ -49,8 +49,8 @@ pub use next_edge::context::{ get_edge_compile_time_info, get_edge_resolve_options_context, }; pub use next_import_map::get_next_package; -pub use page_loader::{create_page_loader_entry_module, PageLoaderAsset}; -pub use util::{get_asset_path_from_pathname, pathname_for_path, PathType}; +pub use page_loader::{PageLoaderAsset, create_page_loader_entry_module}; +pub use util::{PathType, get_asset_path_from_pathname, pathname_for_path}; pub fn register() { turbo_tasks::register(); diff --git a/crates/next-core/src/middleware.rs b/crates/next-core/src/middleware.rs index afc13094ef46e..aafc7e9e09851 100644 --- a/crates/next-core/src/middleware.rs +++ b/crates/next-core/src/middleware.rs @@ -1,6 +1,6 @@ use anyhow::Result; use turbo_rcstr::RcStr; -use turbo_tasks::{fxindexmap, FxIndexMap, ResolvedVc, Value, Vc}; +use turbo_tasks::{FxIndexMap, ResolvedVc, Vc, fxindexmap}; use turbo_tasks_fs::FileSystemPath; use turbopack_core::{context::AssetContext, module::Module, reference_type::ReferenceType}; @@ -49,7 +49,7 @@ pub async fn get_middleware_module( let module = asset_context .process( source, - Value::new(ReferenceType::Internal(ResolvedVc::cell(inner_assets))), + ReferenceType::Internal(ResolvedVc::cell(inner_assets)), ) .module(); diff --git a/crates/next-core/src/next_app/app_client_references_chunks.rs b/crates/next-core/src/next_app/app_client_references_chunks.rs index 98cac23cd50f6..b26cadfec11b8 100644 --- a/crates/next-core/src/next_app/app_client_references_chunks.rs +++ b/crates/next-core/src/next_app/app_client_references_chunks.rs @@ -1,37 +1,25 @@ use anyhow::Result; use tracing::Instrument; -use turbo_rcstr::RcStr; -use turbo_tasks::{ - FxIndexMap, ResolvedVc, TryFlatJoinIterExt, TryJoinIterExt, Value, ValueToString, Vc, -}; +use turbo_rcstr::rcstr; +use turbo_tasks::{FxIndexMap, ResolvedVc, TryFlatJoinIterExt, TryJoinIterExt, ValueToString, Vc}; use turbopack_core::{ - chunk::{availability_info::AvailabilityInfo, ChunkingContext}, + chunk::{ChunkingContext, availability_info::AvailabilityInfo}, module::Module, - module_graph::{chunk_group_info::ChunkGroup, ModuleGraph}, + module_graph::{ModuleGraph, chunk_group_info::ChunkGroup}, output::OutputAssets, }; use crate::{ next_client_reference::{ + ClientReferenceType, ecmascript_client_reference::ecmascript_client_reference_module::{ - ECMASCRIPT_CLIENT_REFERENCE_MERGE_TAG_CLIENT, ECMASCRIPT_CLIENT_REFERENCE_MERGE_TAG_SSR, + ecmascript_client_reference_merge_tag, ecmascript_client_reference_merge_tag_ssr, }, visit_client_reference::ClientReferenceGraphResult, - ClientReferenceType, }, next_server_component::server_component_module::NextServerComponentModule, }; -#[turbo_tasks::function] -pub fn client_modules_modifier() -> Vc { - Vc::cell("client modules".into()) -} - -#[turbo_tasks::function] -pub fn ssr_modules_modifier() -> Vc { - Vc::cell("ssr modules".into()) -} - #[turbo_tasks::value] pub struct ClientReferencesChunks { pub client_component_client_chunks: @@ -51,7 +39,7 @@ pub async fn get_app_client_references_chunks( app_client_references: Vc, module_graph: Vc, client_chunking_context: Vc>, - client_availability_info: Value, + client_availability_info: AvailabilityInfo, ssr_chunking_context: Option>>, ) -> Result> { async move { @@ -171,7 +159,7 @@ pub async fn get_app_client_references_chunks( let chunk_group_info = module_graph.chunk_group_info(); - let mut current_client_availability_info = client_availability_info.into_value(); + let mut current_client_availability_info = client_availability_info; let mut current_client_chunks = OutputAssets::empty().to_resolved().await?; let mut current_ssr_availability_info = AvailabilityInfo::Root; let mut current_ssr_chunks = OutputAssets::empty().to_resolved().await?; @@ -224,14 +212,14 @@ pub async fn get_app_client_references_chunks( .entered(); ssr_chunking_context.chunk_group( - base_ident.with_modifier(ssr_modules_modifier()), + base_ident.with_modifier(rcstr!("ssr modules")), ChunkGroup::IsolatedMerged { parent: parent_chunk_group, - merge_tag: ECMASCRIPT_CLIENT_REFERENCE_MERGE_TAG_SSR.clone(), + merge_tag: ecmascript_client_reference_merge_tag_ssr(), entries: ssr_modules, }, module_graph, - Value::new(current_ssr_availability_info), + current_ssr_availability_info, ) }) } else { @@ -262,14 +250,14 @@ pub async fn get_app_client_references_chunks( .entered(); Some(client_chunking_context.chunk_group( - base_ident.with_modifier(client_modules_modifier()), + base_ident.with_modifier(rcstr!("client modules")), ChunkGroup::IsolatedMerged { parent: parent_chunk_group, - merge_tag: ECMASCRIPT_CLIENT_REFERENCE_MERGE_TAG_CLIENT.clone(), + merge_tag: ecmascript_client_reference_merge_tag(), entries: client_modules, }, module_graph, - Value::new(current_client_availability_info), + current_client_availability_info, )) } else { None diff --git a/crates/next-core/src/next_app/app_client_shared_chunks.rs b/crates/next-core/src/next_app/app_client_shared_chunks.rs index 364bc9b62fe25..13baaae6924ec 100644 --- a/crates/next-core/src/next_app/app_client_shared_chunks.rs +++ b/crates/next-core/src/next_app/app_client_shared_chunks.rs @@ -1,12 +1,12 @@ use anyhow::Result; use tracing::Instrument; -use turbo_tasks::{ResolvedVc, Value, Vc}; +use turbo_tasks::{ResolvedVc, Vc}; use turbopack_core::{ chunk::{ - availability_info::AvailabilityInfo, ChunkGroupResult, ChunkingContext, EvaluatableAssets, + ChunkGroupResult, ChunkingContext, EvaluatableAssets, availability_info::AvailabilityInfo, }, ident::AssetIdent, - module_graph::{chunk_group_info::ChunkGroup, ModuleGraph}, + module_graph::{ModuleGraph, chunk_group_info::ChunkGroup}, output::OutputAssets, }; @@ -38,7 +38,7 @@ pub async fn get_app_client_shared_chunk_group( .collect(), ), module_graph, - Value::new(AvailabilityInfo::Root), + AvailabilityInfo::Root, ) .resolve() .await diff --git a/crates/next-core/src/next_app/app_favicon_entry.rs b/crates/next-core/src/next_app/app_favicon_entry.rs deleted file mode 100644 index d1e0cf2fc5daa..0000000000000 --- a/crates/next-core/src/next_app/app_favicon_entry.rs +++ /dev/null @@ -1,97 +0,0 @@ -use std::io::Write; - -use anyhow::{bail, Result}; -use base64::{display::Base64Display, engine::general_purpose::STANDARD}; -use indoc::writedoc; -use turbo_tasks::{ValueToString, Vc}; -use turbo_tasks_fs::{self, DiskFileSystem, FileContent, FileSystem, FileSystemPath}; -use turbo_tasks_memory::MemoryBackend; -use turbopack_core::{ - asset::AssetContent, - diagnostics::PlainDiagnostic, - error::PrettyPrintError, - issue::PlainIssue, - source_map::Token, - version::{PartialUpdate, TotalUpdate, Update, VersionState}, - virtual_source::VirtualSource, -}; - -use super::app_route_entry::get_app_route_entry; -use crate::{ - app_structure::MetadataItem, - next_app::{AppEntry, AppPage, PageSegment}, -}; - -/// Computes the entry for a Next.js favicon file. -#[turbo_tasks::function] -pub async fn get_app_route_favicon_entry( - nodejs_context: Vc, - edge_context: Vc, - favicon: MetadataItem, - project_root: Vc, -) -> Result> { - let path = match favicon { - // TODO(alexkirsz) Is there a difference here? - MetadataItem::Static { path } => path, - MetadataItem::Dynamic { path: _ } => bail!("Dynamic metadata is not implemented yet"), - }; - - let mut code = RopeBuilder::default(); - - let content_type = mime_guess::from_ext(&path.extension().await?) - .first_or_octet_stream() - .to_string(); - let original_file_content = path.read().await?; - let original_file_content_b64 = match &*original_file_content { - FileContent::Content(content) => { - let content = content.content().to_bytes()?; - Base64Display::new(&content, &STANDARD).to_string() - } - FileContent::NotFound => { - bail!("favicon file not found: {}", &path.to_string().await?); - } - }; - // Specific to favicon - let cache_control = "public, max-age=0, must-revalidate"; - - // TODO(alexkirsz) Generalize this to any file. - writedoc! { - code, - r#" - import {{ NextResponse }} from 'next/server' - - const contentType = {content_type} - const cacheControl = {cache_control} - const buffer = Buffer.from({original_file_content_b64}, 'base64') - - export function GET() {{ - return new NextResponse(buffer, {{ - headers: {{ - 'Content-Type': contentType, - 'Cache-Control': cacheControl, - }}, - }}) - }} - - export const dynamic = 'force-static' - "#, - content_type = StringifyJs(&content_type), - cache_control = StringifyJs(&cache_control), - original_file_content_b64 = StringifyJs(&original_file_content_b64), - }?; - - let file = File::from(code.build()); - let source = - // TODO(alexkirsz) Figure out how to name this virtual source. - VirtualSource::new(project_root.join("favicon-entry.tsx".to_string()), AssetContent::file(file.into())); - - Ok(get_app_route_entry( - nodejs_context, - edge_context, - Vc::upcast(source), - // TODO(alexkirsz) Get this from the metadata? - AppPage(vec![PageSegment::Static("/favicon.ico".to_string())]), - project_root, - None, - )) -} diff --git a/crates/next-core/src/next_app/app_page_entry.rs b/crates/next-core/src/next_app/app_page_entry.rs index c8d889ec3a10e..1ca3670607572 100644 --- a/crates/next-core/src/next_app/app_page_entry.rs +++ b/crates/next-core/src/next_app/app_page_entry.rs @@ -2,8 +2,8 @@ use std::io::Write; use anyhow::Result; use turbo_rcstr::RcStr; -use turbo_tasks::{fxindexmap, ResolvedVc, TryJoinIterExt, Value, ValueToString, Vc}; -use turbo_tasks_fs::{self, rope::RopeBuilder, File, FileSystemPath}; +use turbo_tasks::{ResolvedVc, TryJoinIterExt, ValueToString, Vc, fxindexmap}; +use turbo_tasks_fs::{self, File, FileSystemPath, rope::RopeBuilder}; use turbopack::ModuleAssetContext; use turbopack_core::{ asset::{Asset, AssetContent}, @@ -27,7 +27,7 @@ use crate::{ next_edge::entry::wrap_edge_entry, next_server_component::NextServerComponentTransition, parse_segment_config_from_loader_tree, - util::{file_content_rope, load_next_js_template, NextRuntime}, + util::{NextRuntime, file_content_rope, load_next_js_template}, }; /// Computes the entry for a Next.js app page. @@ -88,7 +88,7 @@ pub async fn get_app_page_entry( "VAR_MODULE_GLOBAL_ERROR" => if inner_assets.contains_key(GLOBAL_ERROR) { GLOBAL_ERROR.into() } else { - "next/dist/client/components/error-boundary".into() + "next/dist/client/components/builtin/global-error".into() }, }, fxindexmap! { @@ -109,16 +109,14 @@ pub async fn get_app_page_entry( let file = File::from(result.build()); let source = VirtualSource::new_with_ident( - source - .ident() - .with_query(Vc::cell(format!("?{}", query).into())), + source.ident().with_query(RcStr::from(format!("?{query}"))), AssetContent::file(file.into()), ); let mut rsc_entry = module_asset_context .process( Vc::upcast(source), - Value::new(ReferenceType::Internal(ResolvedVc::cell(inner_assets))), + ReferenceType::Internal(ResolvedVc::cell(inner_assets)), ) .module(); @@ -198,7 +196,7 @@ async fn wrap_edge_page( let wrapped = asset_context .process( Vc::upcast(source), - Value::new(ReferenceType::Internal(ResolvedVc::cell(inner_assets))), + ReferenceType::Internal(ResolvedVc::cell(inner_assets)), ) .module(); diff --git a/crates/next-core/src/next_app/app_route_entry.rs b/crates/next-core/src/next_app/app_route_entry.rs index cdffe58ed2e93..085e75e8aa81e 100644 --- a/crates/next-core/src/next_app/app_route_entry.rs +++ b/crates/next-core/src/next_app/app_route_entry.rs @@ -1,6 +1,6 @@ use anyhow::Result; use turbo_rcstr::RcStr; -use turbo_tasks::{fxindexmap, ResolvedVc, Value, ValueToString, Vc}; +use turbo_tasks::{ResolvedVc, ValueToString, Vc, fxindexmap}; use turbo_tasks_fs::FileSystemPath; use turbopack::ModuleAssetContext; use turbopack_core::{ @@ -16,7 +16,7 @@ use crate::{ next_config::{NextConfig, OutputType}, next_edge::entry::wrap_edge_entry, parse_segment_config_from_source, - util::{load_next_js_template, NextRuntime}, + util::{NextRuntime, load_next_js_template}, }; /// Computes the entry for a Next.js app route. @@ -93,7 +93,7 @@ pub async fn get_app_route_entry( let userland_module = module_asset_context .process( source, - Value::new(ReferenceType::Entry(EntryReferenceSubType::AppRoute)), + ReferenceType::Entry(EntryReferenceSubType::AppRoute), ) .module() .to_resolved() @@ -106,7 +106,7 @@ pub async fn get_app_route_entry( let mut rsc_entry = module_asset_context .process( Vc::upcast(virtual_source), - Value::new(ReferenceType::Internal(ResolvedVc::cell(inner_assets))), + ReferenceType::Internal(ResolvedVc::cell(inner_assets)), ) .module(); @@ -162,7 +162,7 @@ async fn wrap_edge_route( let wrapped = asset_context .process( Vc::upcast(source), - Value::new(ReferenceType::Internal(ResolvedVc::cell(inner_assets))), + ReferenceType::Internal(ResolvedVc::cell(inner_assets)), ) .module(); diff --git a/crates/next-core/src/next_app/metadata/image.rs b/crates/next-core/src/next_app/metadata/image.rs index 7a41a3b6a1723..85cdab0196f4a 100644 --- a/crates/next-core/src/next_app/metadata/image.rs +++ b/crates/next-core/src/next_app/metadata/image.rs @@ -2,7 +2,7 @@ //! //! See `next/src/build/webpack/loaders/next-metadata-image-loader` -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use indoc::formatdoc; use turbo_rcstr::RcStr; use turbo_tasks::{ValueToString, Vc}; @@ -29,7 +29,7 @@ async fn hash_file_content(path: Vc) -> Result { Ok(match &*original_file_content { FileContent::Content(content) => { - let content = content.content().to_bytes()?; + let content = content.content().to_bytes(); hash_xxh3_hash64(&*content) } FileContent::NotFound => { @@ -53,18 +53,29 @@ pub async fn dynamic_image_metadata_source( let use_numeric_sizes = ty == "twitter" || ty == "openGraph"; let sizes = if use_numeric_sizes { - "data.width = size.width; data.height = size.height;" + "data.width = size.width; data.height = size.height;".to_string() } else { - "data.sizes = size.width + \"x\" + size.height;" + // Note: This case seemingly can never happen because this code runs for dynamic metadata + // which has e.g. a `.js` or `.ts` extension not `.svg`. Branching code is still here to + // match the static implementation + // + // For SVGs, skip sizes and use "any" to let it scale automatically based on viewport, + // For the images doesn't provide the size properly, use "any" as well. + // If the size is presented, use the actual size for the image. + let sizes = if ext == "svg" { + "any" + } else { + "${size.width}x${size.height}" + }; + + format!("data.sizes = `{sizes}`;") }; let source = Vc::upcast(FileSource::new(path)); let module = asset_context .process( source, - turbo_tasks::Value::new(ReferenceType::EcmaScriptModules( - EcmaScriptModulesReferenceSubType::Undefined, - )), + ReferenceType::EcmaScriptModules(EcmaScriptModulesReferenceSubType::Undefined), ) .module(); let exports = &*collect_direct_exports(module).await?; @@ -113,7 +124,7 @@ pub async fn dynamic_image_metadata_source( }} "#, exported_fields_excluding_default = exported_fields_excluding_default, - resource_path = StringifyJs(&format!("./{}.{}", stem, ext)), + resource_path = StringifyJs(&format!("./{stem}.{ext}")), pathname_prefix = StringifyJs(&page.to_string()), page_segment = StringifyJs(stem), sizes = sizes, diff --git a/crates/next-core/src/next_app/metadata/mod.rs b/crates/next-core/src/next_app/metadata/mod.rs index b9192ceceddd0..cf187c878dd61 100644 --- a/crates/next-core/src/next_app/metadata/mod.rs +++ b/crates/next-core/src/next_app/metadata/mod.rs @@ -247,7 +247,7 @@ pub fn is_metadata_route(mut route: &str) -> bool { let mut page = route.to_string(); if !page.starts_with('/') { - page = format!("/{}", page); + page = format!("/{page}"); } !page.ends_with("/page") && is_metadata_route_file(&page, &[], false) @@ -276,7 +276,12 @@ fn format_radix(mut x: u32, radix: u32) -> String { } result.reverse(); - result[..6].iter().collect() + + // We only need the first 6 characters of the hash but sometimes the hash is too short. + // In JavaScript, we use `toString(36).slice(0, 6)` to get the first 6 characters of the hash, + // but it will automatically take the minimum of the length of the hash and 6. Rust will panic. + let len = result.len().min(6); + result[..len].iter().collect() } /// If there's special convention like (...) or @ in the page path, @@ -360,7 +365,7 @@ pub fn normalize_metadata_route(mut page: AppPage) -> Result { #[cfg(test)] mod test { - use super::normalize_metadata_route; + use super::{djb2_hash, format_radix, normalize_metadata_route}; use crate::next_app::AppPage; #[test] @@ -385,4 +390,10 @@ mod test { assert_eq!(&normalized.to_string(), expected); } } + + #[test] + fn test_format_radix_doesnt_panic_with_result_less_than_6_characters() { + let hash = format_radix(djb2_hash("/lookup/[domain]/(dns)"), 36); + assert!(hash.len() < 6); + } } diff --git a/crates/next-core/src/next_app/metadata/route.rs b/crates/next-core/src/next_app/metadata/route.rs index 4186edf4fe842..62b10c509af54 100644 --- a/crates/next-core/src/next_app/metadata/route.rs +++ b/crates/next-core/src/next_app/metadata/route.rs @@ -2,7 +2,7 @@ //! //! See `next/src/build/webpack/loaders/next-metadata-route-loader` -use anyhow::{bail, Ok, Result}; +use anyhow::{Ok, Result, bail}; use base64::{display::Base64Display, engine::general_purpose::STANDARD}; use indoc::{formatdoc, indoc}; use turbo_tasks::{ValueToString, Vc}; @@ -18,7 +18,7 @@ use crate::{ app_structure::MetadataItem, mode::NextMode, next_app::{ - app_entry::AppEntry, app_route_entry::get_app_route_entry, AppPage, PageSegment, PageType, + AppPage, PageSegment, PageType, app_entry::AppEntry, app_route_entry::get_app_route_entry, }, next_config::NextConfig, parse_segment_config_from_source, @@ -117,7 +117,7 @@ async fn get_base64_file_content(path: Vc) -> Result { Ok(match &*original_file_content { FileContent::Content(content) => { - let content = content.content().to_bytes()?; + let content = content.content().to_bytes(); Base64Display::new(&content, &STANDARD).to_string() } FileContent::NotFound => { @@ -239,8 +239,10 @@ async fn dynamic_text_route_source(path: Vc) -> Result) -> Result, ) -> Result> { - Ok(ImportMapping::Alternatives(vec![ImportMapping::External( - Some(format!("next/dist/compiled/{}", &*package_name.await?).into()), - ExternalType::CommonJs, - ExternalTraced::Traced, - ) - .resolved_cell()]) + Ok(ImportMapping::Alternatives(vec![ + ImportMapping::External( + Some(format!("next/dist/compiled/{}", &*package_name.await?).into()), + ExternalType::CommonJs, + ExternalTraced::Traced, + ) + .resolved_cell(), + ]) .cell()) } diff --git a/crates/next-core/src/next_client/context.rs b/crates/next-core/src/next_client/context.rs index 9e152fefc0497..a62528ba68677 100644 --- a/crates/next-core/src/next_client/context.rs +++ b/crates/next-core/src/next_client/context.rs @@ -1,26 +1,26 @@ use std::iter::once; use anyhow::Result; -use turbo_rcstr::RcStr; -use turbo_tasks::{FxIndexMap, OptionVcExt, ResolvedVc, Value, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{FxIndexMap, OptionVcExt, ResolvedVc, TaskInput, Vc}; use turbo_tasks_env::EnvMap; use turbo_tasks_fs::FileSystemPath; use turbopack::{ css::chunk::CssChunkType, module_options::{ - module_options_context::ModuleOptionsContext, CssOptionsContext, EcmascriptOptionsContext, - JsxTransformOptions, ModuleRule, TypeofWindow, TypescriptTransformOptions, + CssOptionsContext, EcmascriptOptionsContext, JsxTransformOptions, ModuleRule, TypeofWindow, + TypescriptTransformOptions, module_options_context::ModuleOptionsContext, }, resolve_options_context::ResolveOptionsContext, }; use turbopack_browser::{ - react_refresh::assert_can_resolve_react_refresh, BrowserChunkingContext, ContentHashing, - CurrentChunkMethod, + BrowserChunkingContext, ContentHashing, CurrentChunkMethod, + react_refresh::assert_can_resolve_react_refresh, }; use turbopack_core::{ chunk::{ - module_id_strategies::ModuleIdStrategy, ChunkingConfig, ChunkingContext, MangleType, - MinifyType, SourceMapsType, + ChunkingConfig, ChunkingContext, MangleType, MinifyType, SourceMapsType, + module_id_strategies::ModuleIdStrategy, }, compile_time_info::{ CompileTimeDefineValue, CompileTimeDefines, CompileTimeInfo, DefineableNameSegment, @@ -49,8 +49,8 @@ use crate::{ }, next_shared::{ resolve::{ - get_invalid_server_only_resolve_plugin, ModuleFeatureReportResolvePlugin, - NextSharedRuntimeResolvePlugin, + ModuleFeatureReportResolvePlugin, NextSharedRuntimeResolvePlugin, + get_invalid_server_only_resolve_plugin, }, transforms::{ emotion::get_emotion_transform_rule, @@ -102,14 +102,14 @@ async fn next_client_free_vars(define_env: Vc) -> Result, ) -> Result> { CompileTimeInfo::builder( - Environment::new(Value::new(ExecutionEnvironment::Browser( + Environment::new(ExecutionEnvironment::Browser( BrowserEnvironment { dom: true, web_worker: false, @@ -129,7 +129,7 @@ pub async fn get_client_compile_time_info( browserslist_query: browserslist_query.to_owned(), } .resolved_cell(), - ))) + )) .to_resolved() .await?, ) @@ -139,8 +139,8 @@ pub async fn get_client_compile_time_info( .await } -#[turbo_tasks::value(shared, serialization = "auto_for_input")] -#[derive(Debug, Copy, Clone, Hash)] +#[turbo_tasks::value(shared)] +#[derive(Debug, Copy, Clone, Hash, TaskInput)] pub enum ClientContextType { Pages { pages_dir: ResolvedVc, @@ -155,13 +155,13 @@ pub enum ClientContextType { #[turbo_tasks::function] pub async fn get_client_resolve_options_context( project_path: ResolvedVc, - ty: Value, + ty: ClientContextType, mode: Vc, next_config: Vc, execution_context: Vc, ) -> Result> { let next_client_import_map = - get_next_client_import_map(*project_path, ty, next_config, execution_context) + get_next_client_import_map(*project_path, ty, next_config, mode, execution_context) .to_resolved() .await?; let next_client_fallback_import_map = get_next_client_fallback_import_map(ty) @@ -231,7 +231,7 @@ pub async fn get_client_module_options_context( project_path: ResolvedVc, execution_context: ResolvedVc, env: ResolvedVc, - ty: Value, + ty: ClientContextType, mode: Vc, next_config: Vc, encryption_key: ResolvedVc, @@ -265,7 +265,7 @@ pub async fn get_client_module_options_context( // foreign_code_context_condition. This allows to import codes from // node_modules that requires webpack loaders, which next-dev implicitly // does by default. - let conditions = vec!["browser".into(), mode.await?.condition().into()]; + let conditions = vec![rcstr!("browser"), mode.await?.condition().into()]; let foreign_enable_webpack_loaders = webpack_loader_options( project_path, next_config, @@ -273,7 +273,7 @@ pub async fn get_client_module_options_context( conditions .iter() .cloned() - .chain(once("foreign".into())) + .chain(once(rcstr!("foreign"))) .collect(), ) .await?; @@ -291,11 +291,9 @@ pub async fn get_client_module_options_context( let target_browsers = env.runtime_versions(); let mut next_client_rules = - get_next_client_transforms_rules(next_config, ty.into_value(), mode, false, encryption_key) - .await?; + get_next_client_transforms_rules(next_config, ty, mode, false, encryption_key).await?; let foreign_next_client_rules = - get_next_client_transforms_rules(next_config, ty.into_value(), mode, true, encryption_key) - .await?; + get_next_client_transforms_rules(next_config, ty, mode, true, encryption_key).await?; let additional_rules: Vec = vec![ get_swc_ecma_transform_plugin_rule(next_config, project_path).await?, get_relay_transform_rule(next_config, project_path).await?, @@ -344,12 +342,15 @@ pub async fn get_client_module_options_context( source_maps, ..Default::default() }, - preset_env_versions: Some(env), + environment: Some(env), execution_context: Some(execution_context), tree_shaking_mode: tree_shaking_mode_for_user_code, enable_postcss_transform, side_effect_free_packages: next_config.optimize_package_imports().owned().await?, keep_last_successful_parse: next_mode.is_development(), + remove_unused_exports: *next_config + .turbopack_remove_unused_exports(next_mode.is_development()) + .await?, ..Default::default() }; @@ -357,6 +358,8 @@ pub async fn get_client_module_options_context( let foreign_codes_options_context = ModuleOptionsContext { ecmascript: EcmascriptOptionsContext { enable_typeof_window_inlining: None, + // Ignore e.g. import(`${url}`) requests in node_modules. + ignore_dynamic_requests: true, ..module_options_context.ecmascript }, enable_webpack_loaders: foreign_enable_webpack_loaders, @@ -423,7 +426,7 @@ pub async fn get_client_module_options_context( pub async fn get_client_chunking_context( root_path: ResolvedVc, client_root: ResolvedVc, - client_root_to_root_path: ResolvedVc, + client_root_to_root_path: RcStr, asset_prefix: ResolvedVc>, chunk_suffix_path: ResolvedVc>, environment: ResolvedVc, @@ -432,22 +435,25 @@ pub async fn get_client_chunking_context( minify: Vc, source_maps: Vc, no_mangling: Vc, + scope_hoisting: Vc, ) -> Result>> { let next_mode = mode.await?; + let asset_prefix = asset_prefix.owned().await?; + let chunk_suffix_path = chunk_suffix_path.owned().await?; let mut builder = BrowserChunkingContext::builder( root_path, client_root, client_root_to_root_path, client_root, client_root - .join("static/chunks".into()) + .join(rcstr!("static/chunks")) .to_resolved() .await?, get_client_assets_path(*client_root).to_resolved().await?, environment, next_mode.runtime_type(), ) - .chunk_base_path(asset_prefix) + .chunk_base_path(asset_prefix.clone()) .chunk_suffix_path(chunk_suffix_path) .minify_type(if *minify.await? { MinifyType::Minify { @@ -468,23 +474,25 @@ pub async fn get_client_chunking_context( if next_mode.is_development() { builder = builder.hot_module_replacement().use_file_source_map_uris(); } else { - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - min_chunk_size: 50_000, - max_chunk_count_per_group: 40, - max_merge_chunk_size: 200_000, - ..Default::default() - }, - ); - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - max_merge_chunk_size: 100_000, - ..Default::default() - }, - ); - builder = builder.use_content_hashing(ContentHashing::Direct { length: 16 }) + builder = builder + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + min_chunk_size: 50_000, + max_chunk_count_per_group: 40, + max_merge_chunk_size: 200_000, + ..Default::default() + }, + ) + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + max_merge_chunk_size: 100_000, + ..Default::default() + }, + ) + .use_content_hashing(ContentHashing::Direct { length: 16 }) + .module_merging(*scope_hoisting.await?); } Ok(Vc::upcast(builder.build())) @@ -492,13 +500,13 @@ pub async fn get_client_chunking_context( #[turbo_tasks::function] pub fn get_client_assets_path(client_root: Vc) -> Vc { - client_root.join("static/media".into()) + client_root.join(rcstr!("static/media")) } #[turbo_tasks::function] pub async fn get_client_runtime_entries( project_root: Vc, - ty: Value, + ty: ClientContextType, mode: Vc, next_config: Vc, execution_context: Vc, @@ -520,22 +528,22 @@ pub async fn get_client_runtime_entries( runtime_entries.push( RuntimeEntry::Request( request.to_resolved().await?, - project_root.join("_".into()).to_resolved().await?, + project_root.join(rcstr!("_")).to_resolved().await?, ) .resolved_cell(), ) }; } - if matches!(*ty, ClientContextType::App { .. },) { + if matches!(ty, ClientContextType::App { .. },) { runtime_entries.push( RuntimeEntry::Request( - Request::parse(Value::new(Pattern::Constant( - "next/dist/client/app-next-turbopack.js".into(), + Request::parse(Pattern::Constant(rcstr!( + "next/dist/client/app-next-turbopack.js" ))) .to_resolved() .await?, - project_root.join("_".into()).to_resolved().await?, + project_root.join(rcstr!("_")).to_resolved().await?, ) .resolved_cell(), ); diff --git a/crates/next-core/src/next_client/mod.rs b/crates/next-core/src/next_client/mod.rs index 3291b7c685602..0224a7fdf6551 100644 --- a/crates/next-core/src/next_client/mod.rs +++ b/crates/next-core/src/next_client/mod.rs @@ -3,7 +3,8 @@ pub(crate) mod runtime_entry; pub(crate) mod transforms; pub use context::{ - get_client_chunking_context, get_client_compile_time_info, get_client_module_options_context, - get_client_resolve_options_context, get_client_runtime_entries, ClientContextType, + ClientContextType, get_client_chunking_context, get_client_compile_time_info, + get_client_module_options_context, get_client_resolve_options_context, + get_client_runtime_entries, }; pub use runtime_entry::{RuntimeEntries, RuntimeEntry}; diff --git a/crates/next-core/src/next_client/runtime_entry.rs b/crates/next-core/src/next_client/runtime_entry.rs index 048a26f92e13f..fd7ef125a9894 100644 --- a/crates/next-core/src/next_client/runtime_entry.rs +++ b/crates/next-core/src/next_client/runtime_entry.rs @@ -1,4 +1,4 @@ -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use turbo_tasks::{ResolvedVc, ValueToString, Vc}; use turbo_tasks_fs::FileSystemPath; use turbopack_core::{ diff --git a/crates/next-core/src/next_client_reference/css_client_reference/css_client_reference_module.rs b/crates/next-core/src/next_client_reference/css_client_reference/css_client_reference_module.rs index 55b594d073b71..55c0b79285abd 100644 --- a/crates/next-core/src/next_client_reference/css_client_reference/css_client_reference_module.rs +++ b/crates/next-core/src/next_client_reference/css_client_reference/css_client_reference_module.rs @@ -1,5 +1,5 @@ -use anyhow::{bail, Result}; -use turbo_rcstr::RcStr; +use anyhow::{Result, bail}; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ResolvedVc, ValueToString, Vc}; use turbopack::css::chunk::CssChunkPlaceable; use turbopack_core::{ @@ -31,18 +31,13 @@ impl CssClientReferenceModule { } } -#[turbo_tasks::function] -fn css_client_reference_modifier() -> Vc { - Vc::cell("css client reference".into()) -} - #[turbo_tasks::value_impl] impl Module for CssClientReferenceModule { #[turbo_tasks::function] fn ident(&self) -> Vc { self.client_module .ident() - .with_modifier(css_client_reference_modifier()) + .with_modifier(rcstr!("css client reference")) } #[turbo_tasks::function] @@ -85,7 +80,7 @@ impl ChunkableModuleReference for CssClientReference { fn chunking_type(&self) -> Vc { Vc::cell(Some(ChunkingType::Isolated { _ty: ChunkGroupType::Evaluated, - merge_tag: Some("client".into()), + merge_tag: Some(rcstr!("client")), })) } } @@ -102,6 +97,6 @@ impl ModuleReference for CssClientReference { impl ValueToString for CssClientReference { #[turbo_tasks::function] fn to_string(&self) -> Vc { - Vc::cell("css client reference to client".into()) + Vc::cell(rcstr!("css client reference to client")) } } diff --git a/crates/next-core/src/next_client_reference/css_client_reference/css_client_reference_transition.rs b/crates/next-core/src/next_client_reference/css_client_reference/css_client_reference_transition.rs index baf2cbc0cb40f..ec927de6cb51f 100644 --- a/crates/next-core/src/next_client_reference/css_client_reference/css_client_reference_transition.rs +++ b/crates/next-core/src/next_client_reference/css_client_reference/css_client_reference_transition.rs @@ -1,6 +1,6 @@ use anyhow::{Context, Result}; -use turbo_tasks::{ResolvedVc, Value, Vc}; -use turbopack::{css::chunk::CssChunkPlaceable, transition::Transition, ModuleAssetContext}; +use turbo_tasks::{ResolvedVc, Vc}; +use turbopack::{ModuleAssetContext, css::chunk::CssChunkPlaceable, transition::Transition}; use turbopack_core::{context::ProcessResult, reference_type::ReferenceType, source::Source}; use crate::next_client_reference::css_client_reference::css_client_reference_module::CssClientReferenceModule; @@ -25,7 +25,7 @@ impl Transition for NextCssClientReferenceTransition { self: Vc, source: Vc>, rsc_module_asset_context: Vc, - reference_type: Value, + reference_type: ReferenceType, ) -> Result> { let module = self.await? diff --git a/crates/next-core/src/next_client_reference/ecmascript_client_reference/ecmascript_client_reference_module.rs b/crates/next-core/src/next_client_reference/ecmascript_client_reference/ecmascript_client_reference_module.rs index 52f162adbd815..4a1c54d46e0ad 100644 --- a/crates/next-core/src/next_client_reference/ecmascript_client_reference/ecmascript_client_reference_module.rs +++ b/crates/next-core/src/next_client_reference/ecmascript_client_reference/ecmascript_client_reference_module.rs @@ -1,10 +1,9 @@ use std::{io::Write, iter::once}; -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use indoc::writedoc; -use once_cell::sync::Lazy; -use turbo_rcstr::RcStr; -use turbo_tasks::{ResolvedVc, Value, ValueToString, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{IntoTraitRef, ResolvedVc, ValueToString, Vc}; use turbo_tasks_fs::File; use turbopack_core::{ asset::{Asset, AssetContent}, @@ -77,7 +76,7 @@ impl EcmascriptClientReferenceModule { // Adapted from https://github.com/facebook/react/blob/c5b9375767e2c4102d7e5559d383523736f1c902/packages/react-server-dom-webpack/src/ReactFlightWebpackNodeLoader.js#L323-L354 if let EcmascriptExports::EsmExports(exports) = &*self.client_module.get_exports().await? { is_esm = true; - let exports = exports.expand_exports().await?; + let exports = exports.expand_exports(None).await?; if !exports.dynamic_exports.is_empty() { // TODO: throw? warn? @@ -161,10 +160,7 @@ impl EcmascriptClientReferenceModule { let proxy_module = self .server_asset_context - .process( - Vc::upcast(proxy_source), - Value::new(ReferenceType::Undefined), - ) + .process(Vc::upcast(proxy_source), ReferenceType::Undefined) .module(); let Some(proxy_module) = @@ -177,30 +173,21 @@ impl EcmascriptClientReferenceModule { } } -#[turbo_tasks::function] -fn client_reference_modifier() -> Vc { - Vc::cell("client reference/proxy".into()) +pub fn ecmascript_client_reference_merge_tag() -> RcStr { + rcstr!("client") } - -#[turbo_tasks::function] -fn ecmascript_client_reference_client_ref_modifier() -> Vc { - Vc::cell("ecmascript client reference to client".into()) -} - -#[turbo_tasks::function] -fn ecmascript_client_reference_ssr_ref_modifier() -> Vc { - Vc::cell("ecmascript client reference to ssr".into()) +pub fn ecmascript_client_reference_merge_tag_ssr() -> RcStr { + rcstr!("ssr") } -pub static ECMASCRIPT_CLIENT_REFERENCE_MERGE_TAG_CLIENT: Lazy = - Lazy::new(|| "client".into()); -pub static ECMASCRIPT_CLIENT_REFERENCE_MERGE_TAG_SSR: Lazy = Lazy::new(|| "ssr".into()); - #[turbo_tasks::value_impl] impl Module for EcmascriptClientReferenceModule { #[turbo_tasks::function] - fn ident(&self) -> Vc { - self.server_ident.with_modifier(client_reference_modifier()) + async fn ident(&self) -> Result> { + Ok(self + .server_ident + .with_modifier(rcstr!("client reference proxy")) + .with_layer(self.server_asset_context.into_trait_ref().await?.layer())) } #[turbo_tasks::function] @@ -221,8 +208,8 @@ impl Module for EcmascriptClientReferenceModule { EcmascriptClientReference::new( *ResolvedVc::upcast(*client_module), ChunkGroupType::Evaluated, - Some(ECMASCRIPT_CLIENT_REFERENCE_MERGE_TAG_CLIENT.clone()), - ecmascript_client_reference_client_ref_modifier(), + Some(ecmascript_client_reference_merge_tag()), + rcstr!("ecmascript client reference to client"), ) .to_resolved() .await?, @@ -231,8 +218,8 @@ impl Module for EcmascriptClientReferenceModule { EcmascriptClientReference::new( *ResolvedVc::upcast(*ssr_module), ChunkGroupType::Entry, - Some(ECMASCRIPT_CLIENT_REFERENCE_MERGE_TAG_SSR.clone()), - ecmascript_client_reference_ssr_ref_modifier(), + Some(ecmascript_client_reference_merge_tag_ssr()), + rcstr!("ecmascript client reference to ssr"), ) .to_resolved() .await?, @@ -298,11 +285,6 @@ struct EcmascriptClientReferenceProxyChunkItem { chunking_context: ResolvedVc>, } -#[turbo_tasks::function] -fn client_reference_description() -> Vc { - Vc::cell("client references".into()) -} - #[turbo_tasks::value_impl] impl ChunkItem for EcmascriptClientReferenceProxyChunkItem { #[turbo_tasks::function] @@ -348,7 +330,7 @@ pub(crate) struct EcmascriptClientReference { module: ResolvedVc>, ty: ChunkGroupType, merge_tag: Option, - description: ResolvedVc, + description: RcStr, } #[turbo_tasks::value_impl] @@ -358,7 +340,7 @@ impl EcmascriptClientReference { module: ResolvedVc>, ty: ChunkGroupType, merge_tag: Option, - description: ResolvedVc, + description: RcStr, ) -> Vc { Self::cell(EcmascriptClientReference { module, @@ -392,6 +374,6 @@ impl ModuleReference for EcmascriptClientReference { impl ValueToString for EcmascriptClientReference { #[turbo_tasks::function] fn to_string(&self) -> Vc { - *self.description + Vc::cell(self.description.clone()) } } diff --git a/crates/next-core/src/next_client_reference/ecmascript_client_reference/ecmascript_client_reference_transition.rs b/crates/next-core/src/next_client_reference/ecmascript_client_reference/ecmascript_client_reference_transition.rs index 6e8faee8f72d6..a9a5784a03a05 100644 --- a/crates/next-core/src/next_client_reference/ecmascript_client_reference/ecmascript_client_reference_transition.rs +++ b/crates/next-core/src/next_client_reference/ecmascript_client_reference/ecmascript_client_reference_transition.rs @@ -1,7 +1,6 @@ -use anyhow::{bail, Result}; -use turbo_rcstr::RcStr; -use turbo_tasks::{ResolvedVc, Value, Vc}; -use turbopack::{transition::Transition, ModuleAssetContext}; +use anyhow::{Result, bail}; +use turbo_tasks::{ResolvedVc, Vc}; +use turbopack::{ModuleAssetContext, transition::Transition}; use turbopack_core::{ context::ProcessResult, file_source::FileSource, @@ -35,19 +34,14 @@ impl NextEcmascriptClientReferenceTransition { #[turbo_tasks::value_impl] impl Transition for NextEcmascriptClientReferenceTransition { - #[turbo_tasks::function] - fn process_layer(self: Vc, layer: Vc) -> Vc { - layer - } - #[turbo_tasks::function] async fn process( self: Vc, source: Vc>, module_asset_context: Vc, - reference_type: Value, + reference_type: ReferenceType, ) -> Result> { - let part = match &*reference_type { + let part = match reference_type { ReferenceType::EcmaScriptModules(EcmaScriptModulesReferenceSubType::ImportPart( part, )) => Some(part), @@ -71,16 +65,18 @@ impl Transition for NextEcmascriptClientReferenceTransition { .replace("next/dist/esm/", "next/dist/") .into(), ); - Vc::upcast(FileSource::new_with_query(path, *ident_ref.query)) + Vc::upcast(FileSource::new_with_query_and_fragment( + path, + ident_ref.query.clone(), + ident_ref.fragment.clone(), + )) } else { source }; let client_module = this.client_transition.process( client_source, module_asset_context, - Value::new(ReferenceType::Entry( - EntryReferenceSubType::AppClientComponent, - )), + ReferenceType::Entry(EntryReferenceSubType::AppClientComponent), ); let ProcessResult::Module(client_module) = *client_module.await? else { return Ok(ProcessResult::Ignore.cell()); @@ -89,9 +85,7 @@ impl Transition for NextEcmascriptClientReferenceTransition { let ssr_module = this.ssr_transition.process( source, module_asset_context, - Value::new(ReferenceType::Entry( - EntryReferenceSubType::AppClientComponent, - )), + ReferenceType::Entry(EntryReferenceSubType::AppClientComponent), ); let ProcessResult::Module(ssr_module) = *ssr_module.await? else { @@ -118,7 +112,7 @@ impl Transition for NextEcmascriptClientReferenceTransition { *module_asset_context.compile_time_info, *module_asset_context.module_options_context, *module_asset_context.resolve_options_context, - *module_asset_context.layer, + module_asset_context.layer.clone(), ); Ok(ProcessResult::Module(ResolvedVc::upcast( diff --git a/crates/next-core/src/next_client_reference/mod.rs b/crates/next-core/src/next_client_reference/mod.rs index d3cc0742c5ccc..2fa9b10d30cba 100644 --- a/crates/next-core/src/next_client_reference/mod.rs +++ b/crates/next-core/src/next_client_reference/mod.rs @@ -11,6 +11,6 @@ pub use ecmascript_client_reference::{ ecmascript_client_reference_transition::NextEcmascriptClientReferenceTransition, }; pub use visit_client_reference::{ - find_server_entries, ClientReference, ClientReferenceGraphResult, ClientReferenceType, - ClientReferenceTypes, ServerEntries, VisitedClientReferenceGraphNodes, + ClientReference, ClientReferenceGraphResult, ClientReferenceType, ClientReferenceTypes, + ServerEntries, VisitedClientReferenceGraphNodes, find_server_entries, }; diff --git a/crates/next-core/src/next_client_reference/visit_client_reference.rs b/crates/next-core/src/next_client_reference/visit_client_reference.rs index e4c775285cd74..70adbca2b46ae 100644 --- a/crates/next-core/src/next_client_reference/visit_client_reference.rs +++ b/crates/next-core/src/next_client_reference/visit_client_reference.rs @@ -6,10 +6,10 @@ use serde::{Deserialize, Serialize}; use tracing::Instrument; use turbo_rcstr::RcStr; use turbo_tasks::{ + FxIndexMap, FxIndexSet, NonLocalValue, ReadRef, ResolvedVc, TryJoinIterExt, ValueToString, Vc, debug::ValueDebugFormat, graph::{AdjacencyMap, GraphTraversal, Visit, VisitControlFlow}, trace::TraceRawVcs, - FxIndexMap, FxIndexSet, NonLocalValue, ReadRef, ResolvedVc, TryJoinIterExt, ValueToString, Vc, }; use turbo_tasks_fs::FileSystemPath; use turbopack::css::chunk::CssChunkPlaceable; @@ -19,8 +19,8 @@ use turbopack_core::{ use crate::{ next_client_reference::{ - ecmascript_client_reference::ecmascript_client_reference_module::EcmascriptClientReferenceModule, CssClientReferenceModule, + ecmascript_client_reference::ecmascript_client_reference_module::EcmascriptClientReferenceModule, }, next_server_component::server_component_module::NextServerComponentModule, next_server_utility::server_utility_module::NextServerUtilityModule, @@ -151,7 +151,10 @@ pub struct ServerEntries { } #[turbo_tasks::function] -pub async fn find_server_entries(entry: ResolvedVc>) -> Result> { +pub async fn find_server_entries( + entry: ResolvedVc>, + include_traced: bool, +) -> Result> { async move { let entry_path = entry.ident().path().to_resolved().await?; let graph = AdjacencyMap::new() @@ -166,6 +169,7 @@ pub async fn find_server_entries(entry: ResolvedVc>) -> Result>) -> Result for VisitClientReference { fn edges(&mut self, node: &VisitClientReferenceNode) -> Self::EdgesFuture { let node = node.clone(); + let include_traced = self.include_traced; async move { let parent_module = match node.ty { // This should never occur since we always skip visiting these @@ -314,11 +321,12 @@ impl Visit for VisitClientReference { } }; - let referenced_modules = primary_chunkable_referenced_modules(*parent_module).await?; + let referenced_modules = + primary_chunkable_referenced_modules(*parent_module, include_traced).await?; let referenced_modules = referenced_modules .iter() - .flat_map(|(chunking_type, modules)| match chunking_type { + .flat_map(|(chunking_type, _, modules)| match chunking_type { ChunkingType::Traced => None, _ => Some(modules.iter()), }) diff --git a/crates/next-core/src/next_config.rs b/crates/next-core/src/next_config.rs index 3d5665d6943cc..26a350927d151 100644 --- a/crates/next-core/src/next_config.rs +++ b/crates/next-core/src/next_config.rs @@ -1,16 +1,18 @@ -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use rustc_hash::FxHashSet; use serde::{Deserialize, Deserializer, Serialize}; use serde_json::Value as JsonValue; -use turbo_rcstr::RcStr; +use turbo_esregex::EsRegex; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ - debug::ValueDebugFormat, trace::TraceRawVcs, FxIndexMap, NonLocalValue, OperationValue, - ResolvedVc, TaskInput, Vc, + FxIndexMap, NonLocalValue, OperationValue, ResolvedVc, TaskInput, Vc, debug::ValueDebugFormat, + trace::TraceRawVcs, }; use turbo_tasks_env::EnvMap; use turbo_tasks_fs::FileSystemPath; use turbopack::module_options::{ - module_options_context::MdxTransformOptions, LoaderRuleItem, OptionWebpackRules, + ConditionItem, ConditionPath, LoaderRuleItem, OptionWebpackRules, + module_options_context::{MdxTransformOptions, OptionWebpackConditions}, }; use turbopack_core::{ issue::{Issue, IssueSeverity, IssueStage, OptionStyledString, StyledString}, @@ -60,7 +62,7 @@ impl Default for CacheKinds { #[turbo_tasks::value(serialization = "custom", eq = "manual")] #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, OperationValue)] -#[serde(rename_all = "camelCase")] +#[serde(default, rename_all = "camelCase")] pub struct NextConfig { // TODO all fields should be private and access should be wrapped within a turbo-tasks function // Otherwise changing NextConfig will lead to invalidating all tasks accessing it. @@ -96,6 +98,10 @@ pub struct NextConfig { pub output: Option, pub turbopack: Option, production_browser_source_maps: bool, + output_file_tracing_includes: Option, + output_file_tracing_excludes: Option, + // TODO: This option is not respected, it uses Turbopack's root instead. + output_file_tracing_root: Option, /// Enables the bundling of node_modules packages (externals) for pages /// server-side bundles. @@ -541,11 +547,54 @@ pub struct TurbopackConfig { /// This option has been replaced by `rules`. pub loaders: Option, pub rules: Option>, + #[turbo_tasks(trace_ignore)] + pub conditions: Option>, pub resolve_alias: Option>, pub resolve_extensions: Option>, pub module_ids: Option, } +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Debug)] +pub struct RegexComponents { + source: RcStr, + flags: RcStr, +} + +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[serde(tag = "type", content = "value", rename_all = "camelCase")] +pub enum ConfigConditionPath { + Glob(RcStr), + Regex(RegexComponents), +} + +impl TryInto for ConfigConditionPath { + fn try_into(self) -> Result { + Ok(match self { + ConfigConditionPath::Glob(path) => ConditionPath::Glob(path), + ConfigConditionPath::Regex(path) => { + ConditionPath::Regex(EsRegex::new(&path.source, &path.flags)?.resolved_cell()) + } + }) + } + + type Error = anyhow::Error; +} + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] +pub struct ConfigConditionItem { + pub path: ConfigConditionPath, +} + +impl TryInto for ConfigConditionItem { + fn try_into(self) -> Result { + Ok(ConditionItem { + path: self.path.try_into()?, + }) + } + + type Error = anyhow::Error; +} + #[derive( Clone, Debug, PartialEq, Eq, Serialize, Deserialize, TraceRawVcs, NonLocalValue, OperationValue, )] @@ -718,14 +767,10 @@ pub struct ExperimentalConfig { /// Automatically apply the "modularize_imports" optimization to imports of /// the specified packages. optimize_package_imports: Option>, - output_file_tracing_ignores: Option>, - output_file_tracing_includes: Option, - output_file_tracing_root: Option, /// Using this feature will enable the `react@experimental` for the `app` /// directory. ppr: Option, taint: Option, - react_owner_stack: Option, #[serde(rename = "routerBFCache")] router_bfcache: Option, proxy_timeout: Option, @@ -751,6 +796,13 @@ pub struct ExperimentalConfig { turbopack_persistent_caching: Option, turbopack_source_maps: Option, turbopack_tree_shaking: Option, + turbopack_scope_hoisting: Option, + // Whether to enable the global-not-found convention + global_not_found: Option, + /// Defaults to false in development mode, true in production mode. + turbopack_remove_unused_exports: Option, + /// Devtool option for the segment explorer. + devtool_segment_explorer: Option, } #[derive( @@ -1061,13 +1113,17 @@ pub struct OptionSubResourceIntegrity(Option); #[turbo_tasks::value(transparent)] pub struct OptionServerActions(Option); +#[turbo_tasks::value(transparent)] +pub struct OptionJsonValue(pub Option); + #[turbo_tasks::value_impl] impl NextConfig { #[turbo_tasks::function] pub async fn from_string(string: Vc) -> Result> { let string = string.await?; - let config: NextConfig = serde_json::from_str(&string) - .with_context(|| format!("failed to parse next.config.js: {}", string))?; + let mut jdeserializer = serde_json::Deserializer::from_str(&string); + let config: NextConfig = serde_path_to_error::deserialize(&mut jdeserializer) + .with_context(|| format!("failed to parse next.config.js: {string}"))?; Ok(config.cell()) } @@ -1140,6 +1196,11 @@ impl NextConfig { Vc::cell(self.page_extensions.clone()) } + #[turbo_tasks::function] + pub fn is_global_not_found_enabled(&self) -> Vc { + Vc::cell(self.experimental.global_not_found.unwrap_or_default()) + } + #[turbo_tasks::function] pub fn transpile_packages(&self) -> Vc> { Vc::cell(self.transpile_packages.clone().unwrap_or_default()) @@ -1228,6 +1289,24 @@ impl NextConfig { Vc::cell(Some(ResolvedVc::cell(rules))) } + #[turbo_tasks::function] + pub fn webpack_conditions(&self) -> Result> { + let Some(config_conditions) = self.turbopack.as_ref().and_then(|t| t.conditions.as_ref()) + else { + return Ok(Vc::cell(None)); + }; + + let conditions = config_conditions + .iter() + .map(|(k, v)| { + let item: Result = TryInto::::try_into((*v).clone()); + item.map(|item| (k.clone(), item)) + }) + .collect::>>()?; + + Ok(Vc::cell(Some(ResolvedVc::cell(conditions)))) + } + #[turbo_tasks::function] pub fn persistent_caching_enabled(&self) -> Result> { Ok(Vc::cell( @@ -1263,7 +1342,7 @@ impl NextConfig { } #[turbo_tasks::function] - pub async fn import_externals(&self) -> Result> { + pub fn import_externals(&self) -> Result> { Ok(Vc::cell(match self.experimental.esm_externals { Some(EsmExternals::Bool(b)) => b, Some(EsmExternals::Loose(_)) => bail!("esmExternals = \"loose\" is not supported"), @@ -1391,7 +1470,7 @@ impl NextConfig { let this = self.await?; match &this.deployment_id { - Some(deployment_id) => Ok(Vc::cell(Some(format!("?dpl={}", deployment_id).into()))), + Some(deployment_id) => Ok(Vc::cell(Some(format!("?dpl={deployment_id}").into()))), None => Ok(Vc::cell(None)), } } @@ -1417,11 +1496,6 @@ impl NextConfig { Vc::cell(self.experimental.taint.unwrap_or(false)) } - #[turbo_tasks::function] - pub fn enable_react_owner_stack(&self) -> Vc { - Vc::cell(self.experimental.react_owner_stack.unwrap_or(false)) - } - #[turbo_tasks::function] pub fn enable_router_bfcache(&self) -> Vc { Vc::cell(self.experimental.router_bfcache.unwrap_or(false)) @@ -1494,11 +1568,26 @@ impl NextConfig { } #[turbo_tasks::function] - pub fn module_ids(&self) -> Vc { - let Some(module_ids) = self.turbopack.as_ref().and_then(|t| t.module_ids) else { - return Vc::cell(None); - }; - Vc::cell(Some(module_ids)) + pub fn turbopack_remove_unused_exports(&self, is_development: bool) -> Vc { + Vc::cell( + self.experimental + .turbopack_remove_unused_exports + .unwrap_or(!is_development), + ) + } + + #[turbo_tasks::function] + pub async fn module_ids(&self, mode: Vc) -> Result> { + Ok(match *mode.await? { + // Ignore configuration in development mode, HMR only works with `named` + NextMode::Development => ModuleIds::Named.cell(), + NextMode::Build => self + .turbopack + .as_ref() + .and_then(|t| t.module_ids) + .unwrap_or(ModuleIds::Deterministic) + .cell(), + }) } #[turbo_tasks::function] @@ -1510,7 +1599,16 @@ impl NextConfig { } #[turbo_tasks::function] - pub async fn client_source_maps(&self, _mode: Vc) -> Result> { + pub async fn turbo_scope_hoisting(&self, mode: Vc) -> Result> { + Ok(Vc::cell(match *mode.await? { + // Ignore configuration in development mode to not break HMR + NextMode::Development => false, + NextMode::Build => self.experimental.turbopack_scope_hoisting.unwrap_or(true), + })) + } + + #[turbo_tasks::function] + pub fn client_source_maps(&self, _mode: Vc) -> Result> { // Temporarily always enable client source maps as tests regress. // TODO: Respect both `self.experimental.turbopack_source_maps` and // `self.production_browser_source_maps` @@ -1519,13 +1617,13 @@ impl NextConfig { } #[turbo_tasks::function] - pub async fn server_source_maps(&self) -> Result> { + pub fn server_source_maps(&self) -> Result> { let source_maps = self.experimental.turbopack_source_maps; Ok(Vc::cell(source_maps.unwrap_or(true))) } #[turbo_tasks::function] - pub async fn typescript_tsconfig_path(&self) -> Result>> { + pub fn typescript_tsconfig_path(&self) -> Result>> { Ok(Vc::cell( self.typescript .tsconfig_path @@ -1533,6 +1631,16 @@ impl NextConfig { .map(|path| path.to_owned().into()), )) } + + #[turbo_tasks::function] + pub fn output_file_tracing_includes(&self) -> Vc { + Vc::cell(self.output_file_tracing_includes.clone()) + } + + #[turbo_tasks::function] + pub fn output_file_tracing_excludes(&self) -> Vc { + Vc::cell(self.output_file_tracing_excludes.clone()) + } } /// A subset of ts/jsconfig that next.js implicitly @@ -1550,7 +1658,7 @@ impl JsConfig { pub async fn from_string(string: Vc) -> Result> { let string = string.await?; let config: JsConfig = serde_json::from_str(&string) - .with_context(|| format!("failed to parse next.config.js: {}", string))?; + .with_context(|| format!("failed to parse next.config.js: {string}"))?; Ok(config.cell()) } @@ -1571,9 +1679,8 @@ struct OutdatedConfigIssue { #[turbo_tasks::value_impl] impl Issue for OutdatedConfigIssue { - #[turbo_tasks::function] - fn severity(&self) -> Vc { - IssueSeverity::Error.into() + fn severity(&self) -> IssueSeverity { + IssueSeverity::Error } #[turbo_tasks::function] @@ -1590,7 +1697,7 @@ impl Issue for OutdatedConfigIssue { fn title(&self) -> Vc { StyledString::Line(vec![ StyledString::Code(self.old_name.clone()), - StyledString::Text(" has been replaced by ".into()), + StyledString::Text(rcstr!(" has been replaced by ")), StyledString::Code(self.new_name.clone()), ]) .cell() diff --git a/crates/next-core/src/next_dynamic/dynamic_module.rs b/crates/next-core/src/next_dynamic/dynamic_module.rs index f3988615af2f9..4b67c7f59c25c 100644 --- a/crates/next-core/src/next_dynamic/dynamic_module.rs +++ b/crates/next-core/src/next_dynamic/dynamic_module.rs @@ -1,8 +1,8 @@ use std::collections::BTreeMap; -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use indoc::formatdoc; -use turbo_rcstr::RcStr; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ResolvedVc, Vc}; use turbopack_core::{ asset::{Asset, AssetContent}, @@ -11,6 +11,7 @@ use turbopack_core::{ module::Module, module_graph::ModuleGraph, reference::{ModuleReferences, SingleChunkableModuleReference}, + resolve::ExportUsage, }; use turbopack_ecmascript::{ chunk::{ @@ -22,11 +23,6 @@ use turbopack_ecmascript::{ utils::StringifyJs, }; -#[turbo_tasks::function] -fn modifier() -> Vc { - Vc::cell("next/dynamic entry".into()) -} - /// A [`NextDynamicEntryModule`] is a marker asset used to indicate which /// dynamic assets should appear in the dynamic manifest. #[turbo_tasks::value(shared)] @@ -42,16 +38,17 @@ impl NextDynamicEntryModule { } } -#[turbo_tasks::function] -fn dynamic_ref_description() -> Vc { - Vc::cell("next/dynamic reference".into()) +fn dynamic_ref_description() -> RcStr { + rcstr!("next/dynamic reference") } #[turbo_tasks::value_impl] impl Module for NextDynamicEntryModule { #[turbo_tasks::function] fn ident(&self) -> Vc { - self.module.ident().with_modifier(modifier()) + self.module + .ident() + .with_modifier(rcstr!("next/dynamic entry")) } #[turbo_tasks::function] @@ -60,6 +57,7 @@ impl Module for NextDynamicEntryModule { SingleChunkableModuleReference::new( Vc::upcast(*self.module), dynamic_ref_description(), + ExportUsage::all(), ) .to_resolved() .await?, @@ -71,7 +69,7 @@ impl Module for NextDynamicEntryModule { impl Asset for NextDynamicEntryModule { #[turbo_tasks::function] fn content(&self) -> Result> { - bail!("Next.js server component module has no content") + bail!("Next.js Server Component module has no content") } } @@ -102,15 +100,17 @@ impl EcmascriptChunkPlaceable for NextDynamicEntryModule { SingleChunkableModuleReference::new( Vc::upcast(*self.module), dynamic_ref_description(), + ExportUsage::all(), ) .to_resolved() .await?, ); let mut exports = BTreeMap::new(); + let default = rcstr!("default"); exports.insert( - "default".into(), - EsmExport::ImportedBinding(module_reference, "default".into(), false), + default.clone(), + EsmExport::ImportedBinding(module_reference, default, false), ); Ok(EcmascriptExports::EsmExports( diff --git a/crates/next-core/src/next_dynamic/dynamic_transition.rs b/crates/next-core/src/next_dynamic/dynamic_transition.rs index 603d6793e3446..9ee71b323a4aa 100644 --- a/crates/next-core/src/next_dynamic/dynamic_transition.rs +++ b/crates/next-core/src/next_dynamic/dynamic_transition.rs @@ -1,7 +1,6 @@ -use anyhow::{bail, Result}; -use turbo_rcstr::RcStr; -use turbo_tasks::{ResolvedVc, Value, Vc}; -use turbopack::{transition::Transition, ModuleAssetContext}; +use anyhow::{Result, bail}; +use turbo_tasks::{ResolvedVc, Vc}; +use turbopack::{ModuleAssetContext, transition::Transition}; use turbopack_core::{ context::{AssetContext, ProcessResult}, reference_type::ReferenceType, @@ -45,26 +44,19 @@ impl NextDynamicTransition { #[turbo_tasks::value_impl] impl Transition for NextDynamicTransition { - #[turbo_tasks::function] - fn process_layer(self: Vc, layer: Vc) -> Vc { - layer - } - #[turbo_tasks::function] async fn process( self: Vc, source: Vc>, module_asset_context: Vc, - _reference_type: Value, + _reference_type: ReferenceType, ) -> Result> { let module_asset_context = self.process_context(module_asset_context); let module = match self.await?.client_transition { - Some(client_transition) => client_transition.process( - source, - module_asset_context, - Value::new(ReferenceType::Undefined), - ), - None => module_asset_context.process(source, Value::new(ReferenceType::Undefined)), + Some(client_transition) => { + client_transition.process(source, module_asset_context, ReferenceType::Undefined) + } + None => module_asset_context.process(source, ReferenceType::Undefined), }; Ok(match &*module.try_into_module().await? { diff --git a/crates/next-core/src/next_edge/context.rs b/crates/next-core/src/next_edge/context.rs index 71c4b97f2f46a..c957b761b9c9f 100644 --- a/crates/next-core/src/next_edge/context.rs +++ b/crates/next-core/src/next_edge/context.rs @@ -1,20 +1,20 @@ use anyhow::Result; -use turbo_rcstr::RcStr; -use turbo_tasks::{FxIndexMap, OptionVcExt, ResolvedVc, Value, Vc}; -use turbo_tasks_env::EnvMap; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{FxIndexMap, OptionVcExt, ResolvedVc, Vc}; +use turbo_tasks_env::{EnvMap, ProcessEnv}; use turbo_tasks_fs::FileSystemPath; use turbopack::{css::chunk::CssChunkType, resolve_options_context::ResolveOptionsContext}; use turbopack_browser::BrowserChunkingContext; use turbopack_core::{ chunk::{ - module_id_strategies::ModuleIdStrategy, ChunkingConfig, ChunkingContext, MangleType, - MinifyType, SourceMapsType, + ChunkingConfig, ChunkingContext, MangleType, MinifyType, SourceMapsType, + module_id_strategies::ModuleIdStrategy, }, compile_time_info::{ CompileTimeDefineValue, CompileTimeDefines, CompileTimeInfo, DefineableNameSegment, FreeVarReference, FreeVarReferences, }, - environment::{EdgeWorkerEnvironment, Environment, ExecutionEnvironment}, + environment::{EdgeWorkerEnvironment, Environment, ExecutionEnvironment, NodeJsVersion}, free_var_references, }; use turbopack_ecmascript::chunk::EcmascriptChunkType; @@ -27,10 +27,10 @@ use crate::{ next_import_map::get_next_edge_import_map, next_server::context::ServerContextType, next_shared::resolve::{ - get_invalid_client_only_resolve_plugin, get_invalid_styled_jsx_resolve_plugin, ModuleFeatureReportResolvePlugin, NextSharedRuntimeResolvePlugin, + get_invalid_client_only_resolve_plugin, get_invalid_styled_jsx_resolve_plugin, }, - util::{foreign_code_context_condition, NextRuntime}, + util::{NextRuntime, foreign_code_context_condition}, }; fn defines(define_env: &FxIndexMap) -> CompileTimeDefines { @@ -71,9 +71,9 @@ async fn next_edge_free_vars( Ok(free_var_references!( ..defines(&*define_env.await?).into_iter(), Buffer = FreeVarReference::EcmaScriptModule { - request: "buffer".into(), + request: rcstr!("buffer"), lookup_path: Some(project_path), - export: Some("Buffer".into()), + export: Some(rcstr!("Buffer")), }, ) .cell()) @@ -83,11 +83,17 @@ async fn next_edge_free_vars( pub async fn get_edge_compile_time_info( project_path: Vc, define_env: Vc, + process_env: Vc>, ) -> Result> { CompileTimeInfo::builder( - Environment::new(Value::new(ExecutionEnvironment::EdgeWorker( - EdgeWorkerEnvironment {}.resolved_cell(), - ))) + Environment::new(ExecutionEnvironment::EdgeWorker( + EdgeWorkerEnvironment { + node_version: NodeJsVersion::resolved_cell(NodeJsVersion::Current( + process_env.to_resolved().await?, + )), + } + .resolved_cell(), + )) .to_resolved() .await?, ) @@ -104,17 +110,20 @@ pub async fn get_edge_compile_time_info( #[turbo_tasks::function] pub async fn get_edge_resolve_options_context( project_path: ResolvedVc, - ty: Value, + ty: ServerContextType, mode: Vc, next_config: Vc, execution_context: Vc, ) -> Result> { - let next_edge_import_map = - get_next_edge_import_map(*project_path, ty, next_config, execution_context) - .to_resolved() - .await?; - - let ty: ServerContextType = ty.into_value(); + let next_edge_import_map = get_next_edge_import_map( + *project_path, + ty.clone(), + next_config, + mode, + execution_context, + ) + .to_resolved() + .await?; let mut before_resolve_plugins = vec![ResolvedVc::upcast( ModuleFeatureReportResolvePlugin::new(*project_path) @@ -171,7 +180,7 @@ pub async fn get_edge_resolve_options_context( ); if ty.supports_react_server() { - custom_conditions.push("react-server".into()); + custom_conditions.push(rcstr!("react-server")); }; let resolve_options_context = ResolveOptionsContext { @@ -222,23 +231,24 @@ pub async fn get_edge_chunking_context_with_client_assets( turbo_minify: Vc, turbo_source_maps: Vc, no_mangling: Vc, + scope_hoisting: Vc, ) -> Result>> { - let output_root = node_root.join("server/edge".into()).to_resolved().await?; + let output_root = node_root.join(rcstr!("server/edge")).to_resolved().await?; let next_mode = mode.await?; let mut builder = BrowserChunkingContext::builder( root_path, output_root, - output_root_to_root_path, + output_root_to_root_path.owned().await?, client_root, - output_root.join("chunks/ssr".into()).to_resolved().await?, + output_root.join(rcstr!("chunks/ssr")).to_resolved().await?, client_root - .join("static/media".into()) + .join(rcstr!("static/media")) .to_resolved() .await?, environment, next_mode.runtime_type(), ) - .asset_base_path(asset_prefix) + .asset_base_path(asset_prefix.owned().await?) .minify_type(if *turbo_minify.await? { MinifyType::Minify { // React needs deterministic function names to work correctly. @@ -255,20 +265,22 @@ pub async fn get_edge_chunking_context_with_client_assets( .module_id_strategy(module_id_strategy); if !next_mode.is_development() { - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - min_chunk_size: 20_000, - ..Default::default() - }, - ); - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - max_merge_chunk_size: 100_000, - ..Default::default() - }, - ); + builder = builder + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + min_chunk_size: 20_000, + ..Default::default() + }, + ) + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + max_merge_chunk_size: 100_000, + ..Default::default() + }, + ) + .module_merging(*scope_hoisting.await?); } Ok(Vc::upcast(builder.build())) @@ -285,16 +297,17 @@ pub async fn get_edge_chunking_context( turbo_minify: Vc, turbo_source_maps: Vc, no_mangling: Vc, + scope_hoisting: Vc, ) -> Result>> { - let output_root = node_root.join("server/edge".into()).to_resolved().await?; + let output_root = node_root.join(rcstr!("server/edge")).to_resolved().await?; let next_mode = mode.await?; let mut builder = BrowserChunkingContext::builder( root_path, output_root, - node_root_to_root_path, + node_root_to_root_path.owned().await?, output_root, - output_root.join("chunks".into()).to_resolved().await?, - output_root.join("assets".into()).to_resolved().await?, + output_root.join(rcstr!("chunks")).to_resolved().await?, + output_root.join(rcstr!("assets")).to_resolved().await?, environment, next_mode.runtime_type(), ) @@ -302,7 +315,7 @@ pub async fn get_edge_chunking_context( // instead. This special blob url is handled by the custom fetch // implementation in the edge sandbox. It will respond with the // asset from the output directory. - .asset_base_path(ResolvedVc::cell(Some("blob:server/edge/".into()))) + .asset_base_path(Some(rcstr!("blob:server/edge/"))) .minify_type(if *turbo_minify.await? { MinifyType::Minify { mangle: (!*no_mangling.await?).then_some(MangleType::OptimalSize), @@ -318,20 +331,22 @@ pub async fn get_edge_chunking_context( .module_id_strategy(module_id_strategy); if !next_mode.is_development() { - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - min_chunk_size: 20_000, - ..Default::default() - }, - ); - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - max_merge_chunk_size: 100_000, - ..Default::default() - }, - ); + builder = builder + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + min_chunk_size: 20_000, + ..Default::default() + }, + ) + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + max_merge_chunk_size: 100_000, + ..Default::default() + }, + ) + .module_merging(*scope_hoisting.await?); } Ok(Vc::upcast(builder.build())) diff --git a/crates/next-core/src/next_edge/entry.rs b/crates/next-core/src/next_edge/entry.rs index de5b2433758e6..4d74d9fe013e7 100644 --- a/crates/next-core/src/next_edge/entry.rs +++ b/crates/next-core/src/next_edge/entry.rs @@ -1,6 +1,6 @@ use indoc::formatdoc; use turbo_rcstr::RcStr; -use turbo_tasks::{fxindexmap, ResolvedVc, Value, Vc}; +use turbo_tasks::{ResolvedVc, Vc, fxindexmap}; use turbo_tasks_fs::{File, FileSystemPath}; use turbopack_core::{ asset::AssetContent, context::AssetContext, module::Module, reference_type::ReferenceType, @@ -9,7 +9,7 @@ use turbopack_core::{ use turbopack_ecmascript::utils::StringifyJs; #[turbo_tasks::function] -pub async fn wrap_edge_entry( +pub fn wrap_edge_entry( asset_context: Vc>, project_root: Vc, entry: ResolvedVc>, @@ -39,7 +39,7 @@ pub async fn wrap_edge_entry( }}, }}); "#, - StringifyJs(&format_args!("middleware_{}", pathname)) + StringifyJs(&format_args!("middleware_{pathname}")) ); let file = File::from(source); @@ -56,7 +56,7 @@ pub async fn wrap_edge_entry( asset_context .process( Vc::upcast(virtual_source), - Value::new(ReferenceType::Internal(ResolvedVc::cell(inner_assets))), + ReferenceType::Internal(ResolvedVc::cell(inner_assets)), ) .module() } diff --git a/crates/next-core/src/next_edge/route_regex.rs b/crates/next-core/src/next_edge/route_regex.rs index e30ede371a2b2..eeb2202b94540 100644 --- a/crates/next-core/src/next_edge/route_regex.rs +++ b/crates/next-core/src/next_edge/route_regex.rs @@ -134,7 +134,7 @@ fn get_parametrized_route(route: &str) -> (String, FxHashMap) { pub fn get_route_regex(normalized_route: &str) -> RouteRegex { let (parameterized_route, groups) = get_parametrized_route(normalized_route); RouteRegex { - regex: format!("^{}(?:/)?$", parameterized_route), + regex: format!("^{parameterized_route}(?:/)?$"), groups, } } @@ -175,7 +175,7 @@ fn get_safe_key_from_segment( // the named regex let mut cleaned_key = key.replace(|c: char| !c.is_alphanumeric(), ""); if let Some(prefix) = key_prefix { - cleaned_key = format!("{}{}", prefix, cleaned_key); + cleaned_key = format!("{prefix}{cleaned_key}"); } let mut invalid_key = false; @@ -191,15 +191,15 @@ fn get_safe_key_from_segment( cleaned_key = get_safe_route_key(); } if let Some(prefix) = key_prefix { - route_keys.insert(cleaned_key.clone(), format!("{}{}", prefix, key)); + route_keys.insert(cleaned_key.clone(), format!("{prefix}{key}")); } else { route_keys.insert(cleaned_key.clone(), key); } match (repeat, optional) { - (true, true) => format!(r"(?:/(?P<{}>.+?))?", cleaned_key), - (true, false) => format!(r"/(?P<{}>.+?)", cleaned_key), - (false, true) => format!(r"(?:/(?P<{}>[^/]+?))?", cleaned_key), - (false, false) => format!(r"/(?P<{}>[^/]+?)", cleaned_key), + (true, true) => format!(r"(?:/(?P<{cleaned_key}>.+?))?"), + (true, false) => format!(r"/(?P<{cleaned_key}>.+?)"), + (false, true) => format!(r"(?:/(?P<{cleaned_key}>[^/]+?))?"), + (false, false) => format!(r"/(?P<{cleaned_key}>[^/]+?)"), } } @@ -254,7 +254,7 @@ pub fn get_named_route_regex(normalized_route: &str) -> NamedRouteRegex { let regex = get_route_regex(normalized_route); NamedRouteRegex { regex, - named_regex: format!("^{}(?:/)?$", parameterized_route), + named_regex: format!("^{parameterized_route}(?:/)?$"), route_keys, } } @@ -263,5 +263,5 @@ pub fn get_named_route_regex(normalized_route: &str) -> NamedRouteRegex { /// This is intended to be using for build time only. pub fn get_named_middleware_regex(normalized_route: &str) -> String { let (parameterized_route, _route_keys) = get_named_parametrized_route(normalized_route, true); - format!("^{}(?:/)?$", parameterized_route) + format!("^{parameterized_route}(?:/)?$") } diff --git a/crates/next-core/src/next_edge/unsupported.rs b/crates/next-core/src/next_edge/unsupported.rs index d67e571684a4d..0455bb23f9522 100644 --- a/crates/next-core/src/next_edge/unsupported.rs +++ b/crates/next-core/src/next_edge/unsupported.rs @@ -1,19 +1,20 @@ use anyhow::Result; use indoc::formatdoc; +use turbo_rcstr::RcStr; use turbo_tasks::{ResolvedVc, Vc}; use turbo_tasks_fs::{File, FileSystemPath}; use turbopack_core::{ asset::AssetContent, + ident::AssetIdent, resolve::{ + ResolveResult, options::{ImportMapResult, ImportMappingReplacement, ReplacedImportMapping}, parse::Request, pattern::Pattern, - ResolveResult, }, virtual_source::VirtualSource, }; use turbopack_ecmascript::runtime_functions::TURBOPACK_EXPORT_NAMESPACE; -use turbopack_node::execution_context::ExecutionContext; /// Intercepts requests for the given request to `unsupported` error messages /// by returning a VirtualSource proxies to any import request to raise a @@ -21,22 +22,13 @@ use turbopack_node::execution_context::ExecutionContext; /// /// This can be used by import map alias, refer `next_import_map` for the setup. #[turbo_tasks::value(shared)] -pub struct NextEdgeUnsupportedModuleReplacer { - project_path: ResolvedVc, - execution_context: ResolvedVc, -} +pub struct NextEdgeUnsupportedModuleReplacer {} #[turbo_tasks::value_impl] impl NextEdgeUnsupportedModuleReplacer { #[turbo_tasks::function] - pub fn new( - project_path: ResolvedVc, - execution_context: ResolvedVc, - ) -> Vc { - Self::cell(NextEdgeUnsupportedModuleReplacer { - project_path, - execution_context, - }) + pub fn new() -> Vc { + NextEdgeUnsupportedModuleReplacer {}.cell() } } @@ -50,25 +42,36 @@ impl ImportMappingReplacement for NextEdgeUnsupportedModuleReplacer { #[turbo_tasks::function] async fn result( &self, - root_path: Vc, + lookup_path: Vc, request: Vc, ) -> Result> { let request = &*request.await?; if let Request::Module { module, .. } = request { - // packages/next/src/server/web/globals.ts augments global with - // `__import_unsupported` and necessary functions. - let code = formatdoc! { - r#" - {TURBOPACK_EXPORT_NAMESPACE}(__import_unsupported(`{module}`)); - "# - }; - let content = AssetContent::file(File::from(code).into()); - let source = VirtualSource::new(root_path, content).to_resolved().await?; - return Ok( - ImportMapResult::Result(ResolveResult::source(ResolvedVc::upcast(source))).cell(), - ); - }; - - Ok(ImportMapResult::NoEntry.cell()) + // Call out to separate `unsupported_module_source` to only have a single Source cell + // for requests with different subpaths: `fs` and `fs/promises`. + let source = unsupported_module_source(lookup_path.root(), module.clone()) + .to_resolved() + .await?; + Ok(ImportMapResult::Result(ResolveResult::source(ResolvedVc::upcast(source))).cell()) + } else { + Ok(ImportMapResult::NoEntry.cell()) + } } } + +#[turbo_tasks::function] +fn unsupported_module_source(root_path: Vc, module: RcStr) -> Vc { + // packages/next/src/server/web/globals.ts augments global with + // `__import_unsupported` and necessary functions. + let code = formatdoc! { + r#" + {TURBOPACK_EXPORT_NAMESPACE}(__import_unsupported(`{module}`)); + "# + }; + let content = AssetContent::file(File::from(code).into()); + VirtualSource::new_with_ident( + AssetIdent::from_path(root_path) + .with_modifier(format!("unsupported edge import {module}").into()), + content, + ) +} diff --git a/crates/next-core/src/next_font/font_fallback.rs b/crates/next-core/src/next_font/font_fallback.rs index bf027c684ef98..56f467a2b6e02 100644 --- a/crates/next-core/src/next_font/font_fallback.rs +++ b/crates/next-core/src/next_font/font_fallback.rs @@ -1,8 +1,8 @@ use anyhow::Result; use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; -use turbo_rcstr::RcStr; -use turbo_tasks::{trace::TraceRawVcs, NonLocalValue, ResolvedVc, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{NonLocalValue, ResolvedVc, Vc, trace::TraceRawVcs}; pub(crate) struct DefaultFallbackFont { pub name: RcStr, @@ -14,16 +14,16 @@ pub(crate) struct DefaultFallbackFont { // From https://github.com/vercel/next.js/blob/a3893bf69c83fb08e88c87bf8a21d987a0448c8e/packages/font/src/utils.ts#L4 pub(crate) static DEFAULT_SANS_SERIF_FONT: Lazy = Lazy::new(|| DefaultFallbackFont { - name: "Arial".into(), - capsize_key: "arial".into(), + name: rcstr!("Arial"), + capsize_key: rcstr!("arial"), az_avg_width: 934.5116279069767, units_per_em: 2048, }); pub(crate) static DEFAULT_SERIF_FONT: Lazy = Lazy::new(|| DefaultFallbackFont { - name: "Times New Roman".into(), - capsize_key: "timesNewRoman".into(), + name: rcstr!("Times New Roman"), + capsize_key: rcstr!("timesNewRoman"), az_avg_width: 854.3953488372093, units_per_em: 2048, }); @@ -32,9 +32,9 @@ pub(crate) static DEFAULT_SERIF_FONT: Lazy = #[turbo_tasks::value(shared)] pub(crate) struct AutomaticFontFallback { /// e.g. `__Roboto_Fallback_c123b8` - pub scoped_font_family: ResolvedVc, + pub scoped_font_family: RcStr, /// The name of font locally, used in `src: local("{}")` - pub local_font_family: ResolvedVc, + pub local_font_family: RcStr, pub adjustment: Option, } @@ -60,7 +60,7 @@ impl FontFallback { } #[turbo_tasks::value(transparent)] -pub(crate) struct FontFallbacks(Vec>); +pub(crate) struct FontFallbacks(pub Vec>); #[turbo_tasks::value_impl] impl FontFallbacks { diff --git a/crates/next-core/src/next_font/google/font_fallback.rs b/crates/next-core/src/next_font/google/font_fallback.rs index 272e45aa26517..478d6e68f3da0 100644 --- a/crates/next-core/src/next_font/google/font_fallback.rs +++ b/crates/next-core/src/next_font/google/font_fallback.rs @@ -3,8 +3,8 @@ use once_cell::sync::Lazy; use regex::Regex; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; -use turbo_rcstr::RcStr; -use turbo_tasks::{trace::TraceRawVcs, NonLocalValue, ResolvedVc, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{NonLocalValue, ResolvedVc, Vc, trace::TraceRawVcs}; use turbo_tasks_fs::FileSystemPath; use turbopack_core::issue::{IssueExt, IssueSeverity, StyledString}; @@ -12,11 +12,11 @@ use super::options::NextFontGoogleOptions; use crate::{ next_font::{ font_fallback::{ - AutomaticFontFallback, FontAdjustment, FontFallback, DEFAULT_SANS_SERIF_FONT, - DEFAULT_SERIF_FONT, + AutomaticFontFallback, DEFAULT_SANS_SERIF_FONT, DEFAULT_SERIF_FONT, FontAdjustment, + FontFallback, }, issue::NextFontIssue, - util::{get_scoped_font_family, FontFamilyType}, + util::{FontFamilyType, get_scoped_font_family}, }, util::load_next_js_templateon, }; @@ -53,7 +53,7 @@ pub(super) async fn get_font_fallback( None => { let metrics_json = load_next_js_templateon( lookup_path, - "dist/server/capsize-font-metrics.json".into(), + rcstr!("dist/server/capsize-font-metrics.json"), ) .await?; let fallback = lookup_fallback( @@ -65,12 +65,10 @@ pub(super) async fn get_font_fallback( match fallback { Ok(fallback) => FontFallback::Automatic(AutomaticFontFallback { scoped_font_family: get_scoped_font_family( - FontFamilyType::Fallback.cell(), - options_vc.font_family(), - ) - .to_resolved() - .await?, - local_font_family: ResolvedVc::cell(fallback.font_family), + FontFamilyType::Fallback, + options_vc.font_family().await?, + ), + local_font_family: fallback.font_family, adjustment: fallback.adjustment, }) .cell(), @@ -85,11 +83,11 @@ pub(super) async fn get_font_fallback( .into(), ) .resolved_cell(), - description: StyledString::Text( - "Skipping generating a fallback font.".into(), - ) + description: StyledString::Text(rcstr!( + "Skipping generating a fallback font." + )) .resolved_cell(), - severity: IssueSeverity::Warning.resolved_cell(), + severity: IssueSeverity::Warning, } .resolved_cell() .emit(); @@ -172,10 +170,11 @@ fn lookup_fallback( #[cfg(test)] mod tests { use anyhow::Result; + use turbo_rcstr::rcstr; use turbo_tasks_fs::json::parse_json_with_source_context; use super::{FontAdjustment, FontMetricsMap}; - use crate::next_font::google::font_fallback::{lookup_fallback, Fallback}; + use crate::next_font::google::font_fallback::{Fallback, lookup_fallback}; #[test] fn test_fallback_from_metrics_sans_serif() -> Result<()> { @@ -211,7 +210,7 @@ mod tests { assert_eq!( lookup_fallback("Inter", font_metrics, true)?, Fallback { - font_family: "Arial".into(), + font_family: rcstr!("Arial"), adjustment: Some(FontAdjustment { ascent: 0.901_989_700_374_532, descent: -0.224_836_142_322_097_4, @@ -257,7 +256,7 @@ mod tests { assert_eq!( lookup_fallback("Roboto Slab", font_metrics, true)?, Fallback { - font_family: "Times New Roman".into(), + font_family: rcstr!("Times New Roman"), adjustment: Some(FontAdjustment { ascent: 0.885_645_438_273_993_8, descent: -0.229_046_234_036_377_7, diff --git a/crates/next-core/src/next_font/google/mod.rs b/crates/next-core/src/next_font/google/mod.rs index a8c5f58ca74ac..841c8d17b16e3 100644 --- a/crates/next-core/src/next_font/google/mod.rs +++ b/crates/next-core/src/next_font/google/mod.rs @@ -1,31 +1,32 @@ use std::path::Path; -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use futures::FutureExt; use indoc::formatdoc; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; -use turbo_rcstr::RcStr; -use turbo_tasks::{Completion, FxIndexMap, ResolvedVc, Value, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{Completion, FxIndexMap, ResolvedVc, Vc}; use turbo_tasks_bytes::stream::SingleValue; use turbo_tasks_env::{CommandLineProcessEnv, ProcessEnv}; -use turbo_tasks_fetch::{fetch, HttpResponseBody}; +use turbo_tasks_fetch::{HttpResponseBody, fetch}; use turbo_tasks_fs::{ - json::parse_json_with_source_context, DiskFileSystem, File, FileContent, FileSystem, - FileSystemPath, + DiskFileSystem, File, FileContent, FileSystem, FileSystemPath, + json::parse_json_with_source_context, }; +use turbo_tasks_hash::hash_xxh3_hash64; use turbopack::evaluate_context::node_evaluate_asset_context; use turbopack_core::{ asset::AssetContent, context::AssetContext, - ident::AssetIdent, - issue::{IssueExt, IssueSeverity}, + ident::{AssetIdent, Layer}, + issue::{IssueExt, IssueSeverity, StyledString}, reference_type::{InnerAssets, ReferenceType}, resolve::{ + ResolveResult, options::{ImportMapResult, ImportMappingReplacement, ReplacedImportMapping}, parse::Request, pattern::Pattern, - ResolveResult, }, virtual_source::VirtualSource, }; @@ -35,19 +36,20 @@ use turbopack_node::{ use self::{ font_fallback::get_font_fallback, - options::{options_from_request, FontDataEntry, FontWeights, NextFontGoogleOptions}, + options::{FontDataEntry, FontWeights, NextFontGoogleOptions, options_from_request}, stylesheet::build_stylesheet, util::{get_font_axes, get_stylesheet_url}, }; use super::{ font_fallback::FontFallback, util::{ - can_use_next_font, get_request_hash, get_request_id, get_scoped_font_family, - FontCssProperties, FontFamilyType, + FontCssProperties, FontFamilyType, can_use_next_font, get_request_hash, get_request_id, + get_scoped_font_family, }, }; use crate::{ - embed_js::next_js_file_path, next_app::metadata::split_extension, util::load_next_js_templateon, + embed_js::next_js_file_path, mode::NextMode, next_app::metadata::split_extension, + next_font::issue::NextFontIssue, util::load_next_js_templateon, }; pub mod font_fallback; @@ -85,19 +87,17 @@ impl NextFontGoogleReplacer { #[turbo_tasks::function] async fn import_map_result(&self, query: RcStr) -> Result> { - let request_hash = get_request_hash(&query).await?; + let request_hash = get_request_hash(&query); let qstr = qstring::QString::from(query.as_str()); - let query_vc = Vc::cell(query); - let font_data = load_font_data(*self.project_path); - let options = font_options_from_query_map(query_vc, font_data); + let options = font_options_from_query_map(query, font_data); let fallback = get_font_fallback(*self.project_path, options); let properties = get_font_css_properties(options, fallback).await?; let js_asset = VirtualSource::new( - next_js_file_path("internal/font/google".into()) - .join(format!("{}.js", get_request_id(options.font_family(), request_hash).await?).into()), + next_js_file_path(rcstr!("internal/font/google")) + .join(format!("{}.js", get_request_id(options.font_family().await?, request_hash)).into()), AssetContent::file(FileContent::Content( formatdoc!( r#" @@ -123,13 +123,13 @@ impl NextFontGoogleReplacer { .weight .await? .as_ref() - .map(|w| format!("fontWeight: {},\n", w)) + .map(|w| format!("fontWeight: {w},\n")) .unwrap_or_else(|| "".to_owned()), properties .style .await? .as_ref() - .map(|s| format!("fontStyle: \"{}\",\n", s)) + .map(|s| format!("fontStyle: \"{s}\",\n")) .unwrap_or_else(|| "".to_owned()), ) .into(), @@ -168,8 +168,8 @@ impl ImportMappingReplacement for NextFontGoogleReplacer { }; let this = &*self.await?; - if can_use_next_font(*this.project_path, **query).await? { - Ok(self.import_map_result(query.await?.as_str().into())) + if can_use_next_font(*this.project_path, query).await? { + Ok(self.import_map_result(query.clone())) } else { Ok(ImportMapResult::NoEntry.into()) } @@ -180,6 +180,7 @@ impl ImportMappingReplacement for NextFontGoogleReplacer { pub struct NextFontGoogleCssModuleReplacer { project_path: ResolvedVc, execution_context: ResolvedVc, + next_mode: ResolvedVc, } #[turbo_tasks::value_impl] @@ -188,45 +189,46 @@ impl NextFontGoogleCssModuleReplacer { pub fn new( project_path: ResolvedVc, execution_context: ResolvedVc, + next_mode: ResolvedVc, ) -> Vc { Self::cell(NextFontGoogleCssModuleReplacer { project_path, execution_context, + next_mode, }) } #[turbo_tasks::function] async fn import_map_result(&self, query: RcStr) -> Result> { - let request_hash = get_request_hash(&query).await?; - let query_vc = Vc::cell(query); + let request_hash = get_request_hash(&query); let font_data = load_font_data(*self.project_path); - let options = font_options_from_query_map(query_vc, font_data); - let stylesheet_url = get_stylesheet_url_from_options(options, font_data); + let options = font_options_from_query_map(query, font_data); + let stylesheet_url = get_stylesheet_url_from_options(options, font_data) + .owned() + .await?; + let font_family = options.font_family().await?; let scoped_font_family = - get_scoped_font_family(FontFamilyType::WebFont.cell(), options.font_family()); - let css_virtual_path = next_js_file_path("internal/font/google".into()).join( - format!( - "/{}.module.css", - get_request_id(options.font_family(), request_hash).await? - ) - .into(), - ); + get_scoped_font_family(FontFamilyType::WebFont, font_family.clone()); + let css_virtual_path = next_js_file_path(rcstr!("internal/font/google")) + .join(format!("/{}.module.css", get_request_id(font_family, request_hash)).into()); // When running Next.js integration tests, use the mock data available in // process.env.NEXT_FONT_GOOGLE_MOCKED_RESPONSES instead of making real // requests to Google Fonts. let env = Vc::upcast::>(CommandLineProcessEnv::new()); - let mocked_responses_path = &*env.read("NEXT_FONT_GOOGLE_MOCKED_RESPONSES".into()).await?; + let mocked_responses_path = &*env + .read(rcstr!("NEXT_FONT_GOOGLE_MOCKED_RESPONSES")) + .await?; + let stylesheet_str = mocked_responses_path .as_ref() .map_or_else( - || fetch_real_stylesheet(stylesheet_url, css_virtual_path).boxed(), - |p| get_mock_stylesheet(stylesheet_url, p, *self.execution_context).boxed(), + || fetch_real_stylesheet(stylesheet_url.clone(), css_virtual_path).boxed(), + |p| get_mock_stylesheet(stylesheet_url.clone(), p, *self.execution_context).boxed(), ) .await?; let font_fallback = get_font_fallback(*self.project_path, options); - let stylesheet = match stylesheet_str { Some(s) => Some( update_google_stylesheet( @@ -239,10 +241,57 @@ impl NextFontGoogleCssModuleReplacer { .await?, ), None => { - println!( - "Failed to download `{}` from Google Fonts. Using fallback font instead.", - options.await?.font_family - ); + match *self.next_mode.await? { + // If we're in production mode, we want to fail the build to ensure proper font + // rendering. + NextMode::Build => { + NextFontIssue { + path: css_virtual_path.to_resolved().await?, + title: StyledString::Line(vec![ + StyledString::Code(rcstr!("next/font:")), + StyledString::Text(rcstr!(" error:")), + ]) + .resolved_cell(), + description: StyledString::Text( + format!( + "Failed to fetch `{}` from Google Fonts.", + options.await?.font_family + ) + .into(), + ) + .resolved_cell(), + severity: IssueSeverity::Error, + } + .resolved_cell() + .emit(); + } + // Inform the user of the failure to retreive the stylesheet / font, but don't + // propagate this error. We don't want e.g. offline connections to prevent page + // renders during development. + NextMode::Development => { + NextFontIssue { + path: css_virtual_path.to_resolved().await?, + title: StyledString::Line(vec![ + StyledString::Code(rcstr!("next/font:")), + StyledString::Text(rcstr!(" warning:")), + ]) + .resolved_cell(), + description: StyledString::Text( + format!( + "Failed to download `{}` from Google Fonts. Using fallback \ + font instead.", + options.await?.font_family + ) + .into(), + ) + .resolved_cell(), + severity: IssueSeverity::Warning, + } + .resolved_cell() + .emit(); + } + } + None } }; @@ -290,14 +339,14 @@ impl ImportMappingReplacement for NextFontGoogleCssModuleReplacer { let Request::Module { module: _, path: _, - query: query_vc, + query, fragment: _, } = request else { return Ok(ImportMapResult::NoEntry.cell()); }; - Ok(self.import_map_result(query_vc.owned().await?)) + Ok(self.import_map_result(query.clone())) } } @@ -341,7 +390,7 @@ impl ImportMappingReplacement for NextFontGoogleFontFileReplacer { let Request::Module { module: _, path: _, - query: query_vc, + query, fragment: _, } = request else { @@ -352,13 +401,13 @@ impl ImportMappingReplacement for NextFontGoogleFontFileReplacer { url, preload, has_size_adjust: size_adjust, - } = font_file_options_from_query_map(**query_vc).await?; + } = font_file_options_from_query_map(query)?; let (filename, ext) = split_extension(&url); let ext = ext.with_context(|| format!("font url {} is missing an extension", &url))?; // remove dashes and dots as they might be used for the markers below. - let mut name = filename.replace(['-', '.'], "_"); + let mut name = format!("{:016x}", hash_xxh3_hash64(filename.as_bytes())); if size_adjust { name.push_str("-s") } @@ -366,14 +415,12 @@ impl ImportMappingReplacement for NextFontGoogleFontFileReplacer { name.push_str(".p") } - let font_virtual_path = next_js_file_path("internal/font/google".into()) - .join(format!("/{}.{}", name, ext).into()) - .truncate_file_name_with_hash_vc(); + let font_virtual_path = + next_js_file_path(rcstr!("internal/font/google")).join(format!("/{name}.{ext}").into()); // doesn't seem ideal to download the font into a string, but probably doesn't // really matter either. - let Some(font) = fetch_from_google_fonts(Vc::cell(url.into()), font_virtual_path).await? - else { + let Some(font) = fetch_from_google_fonts(url.into(), font_virtual_path).await? else { return Ok(ImportMapResult::Result(ResolveResult::unresolvable()).cell()); }; @@ -392,7 +439,7 @@ impl ImportMappingReplacement for NextFontGoogleFontFileReplacer { async fn load_font_data(project_root: ResolvedVc) -> Result> { let data: FontData = load_next_js_templateon( project_root, - "dist/compiled/@next/font/dist/google/font-data.json".into(), + rcstr!("dist/compiled/@next/font/dist/google/font-data.json"), ) .await?; @@ -405,7 +452,7 @@ async fn load_font_data(project_root: ResolvedVc) -> Result, options: Vc, - scoped_font_family: Vc, + scoped_font_family: RcStr, has_size_adjust: Vc, ) -> Result> { let options = &*options.await?; @@ -414,7 +461,7 @@ async fn update_google_stylesheet( // TODO: Do this more resiliently, e.g. transforming an swc ast let mut stylesheet = stylesheet.await?.replace( &format!("font-family: '{}';", &options.font_family), - &format!("font-family: '{}';", &*scoped_font_family.await?), + &format!("font-family: '{scoped_font_family}';"), ); let font_files = find_font_files_in_css( @@ -438,7 +485,7 @@ async fn update_google_stylesheet( stylesheet = stylesheet.replace( &font_url, - &format!("{}?{}", GOOGLE_FONTS_INTERNAL_PREFIX, query_str), + &format!("{GOOGLE_FONTS_INTERNAL_PREFIX}?{query_str}"), ) } @@ -493,8 +540,8 @@ async fn get_stylesheet_url_from_options( use turbo_tasks_env::{CommandLineProcessEnv, ProcessEnv}; let env = CommandLineProcessEnv::new(); - if let Some(url) = &*env.read("TURBOPACK_TEST_ONLY_MOCK_SERVER".into()).await? { - css_url = Some(format!("{}/css2", url)); + if let Some(url) = &*env.read(rcstr!("TURBOPACK_TEST_ONLY_MOCK_SERVER")).await? { + css_url = Some(format!("{url}/css2")); } } @@ -523,7 +570,7 @@ async fn get_font_css_properties( ) -> Result> { let options = &*options_vc.await?; let scoped_font_family = - &*get_scoped_font_family(FontFamilyType::WebFont.cell(), options_vc.font_family()).await?; + get_scoped_font_family(FontFamilyType::WebFont, options_vc.font_family().await?); let mut font_families = vec![format!("'{}'", scoped_font_family.clone()).into()]; let font_fallback = &*font_fallback.await?; @@ -532,7 +579,7 @@ async fn get_font_css_properties( font_families.extend_from_slice(fonts); } FontFallback::Automatic(fallback) => { - font_families.push(format!("'{}'", *fallback.scoped_font_family.await?).into()); + font_families.push(format!("'{}'", fallback.scoped_font_family).into()); } FontFallback::Error => {} } @@ -562,10 +609,10 @@ async fn get_font_css_properties( #[turbo_tasks::function] async fn font_options_from_query_map( - query: Vc, + query: RcStr, font_data: Vc, ) -> Result> { - let query_map = qstring::QString::from(&**query.await?); + let query_map = qstring::QString::from(query.as_str()); if query_map.len() != 1 { bail!("next/font/google queries must have exactly one entry"); @@ -577,13 +624,10 @@ async fn font_options_from_query_map( let options = options_from_request(&parse_json_with_source_context(&json)?, &*font_data.await?)?; - Ok(NextFontGoogleOptions::new(Value::new(options))) + Ok(NextFontGoogleOptions::new(options)) } - -async fn font_file_options_from_query_map( - query: Vc, -) -> Result { - let query_map = qstring::QString::from(&**query.await?); +fn font_file_options_from_query_map(query: &RcStr) -> Result { + let query_map = qstring::QString::from(query.as_str()); if query_map.len() != 1 { bail!("next/font/google queries have exactly one entry"); @@ -597,7 +641,7 @@ async fn font_file_options_from_query_map( } async fn fetch_real_stylesheet( - stylesheet_url: Vc, + stylesheet_url: RcStr, css_virtual_path: Vc, ) -> Result>> { let body = fetch_from_google_fonts(stylesheet_url, css_virtual_path).await?; @@ -606,27 +650,20 @@ async fn fetch_real_stylesheet( } async fn fetch_from_google_fonts( - url: Vc, + url: RcStr, virtual_path: Vc, ) -> Result>> { let result = fetch( url, - Vc::cell(Some(USER_AGENT_FOR_GOOGLE_FONTS.into())), + Some(rcstr!(USER_AGENT_FOR_GOOGLE_FONTS)), Vc::cell(None), ) .await?; - Ok(match &*result { + Ok(match *result { Ok(r) => Some(*r.await?.body), Err(err) => { - // Inform the user of the failure to retreive the stylesheet / font, but don't - // propagate this error. We don't want e.g. offline connections to prevent page - // renders during development. During production builds, however, this error - // should propagate. - // - // TODO(WEB-283): Use fallback in dev in this case - // TODO(WEB-293): Fail production builds (not dev) in this case - err.to_issue(IssueSeverity::Warning.into(), virtual_path) + err.to_issue(IssueSeverity::Warning, virtual_path) .to_resolved() .await? .emit(); @@ -637,13 +674,13 @@ async fn fetch_from_google_fonts( } async fn get_mock_stylesheet( - stylesheet_url: Vc, + stylesheet_url: RcStr, mocked_responses_path: &str, execution_context: Vc, ) -> Result>> { let response_path = Path::new(&mocked_responses_path); let mock_fs = Vc::upcast::>(DiskFileSystem::new( - "mock".into(), + rcstr!("mock"), response_path .parent() .context("Must be valid path")? @@ -658,9 +695,14 @@ async fn get_mock_stylesheet( project_path: _, chunking_context, } = *execution_context.await?; - let asset_context = - node_evaluate_asset_context(execution_context, None, None, "next_font".into(), false); - let loader_path = mock_fs.root().join("loader.js".into()); + let asset_context = node_evaluate_asset_context( + execution_context, + None, + None, + Layer::new(rcstr!("next_font")), + false, + ); + let loader_path = mock_fs.root().join(rcstr!("loader.js")); let mocked_response_asset = asset_context .process( Vc::upcast(VirtualSource::new( @@ -677,9 +719,7 @@ async fn get_mock_stylesheet( .into(), ), )), - Value::new(ReferenceType::Internal( - InnerAssets::empty().to_resolved().await?, - )), + ReferenceType::Internal(InnerAssets::empty().to_resolved().await?), ) .module(); @@ -703,7 +743,7 @@ async fn get_mock_stylesheet( let val: FxHashMap> = parse_json_with_source_context(val.to_str()?)?; Ok(val - .get(&*stylesheet_url.await?) + .get(&stylesheet_url) .context("url not found")? .clone() .map(Vc::cell)) diff --git a/crates/next-core/src/next_font/google/options.rs b/crates/next-core/src/next_font/google/options.rs index 1ee31c7617041..4efd8ae974f3b 100644 --- a/crates/next-core/src/next_font/google/options.rs +++ b/crates/next-core/src/next_font/google/options.rs @@ -1,8 +1,8 @@ use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; -use turbo_rcstr::RcStr; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ - fxindexset, trace::TraceRawVcs, FxIndexMap, FxIndexSet, NonLocalValue, Value, Vc, + FxIndexMap, FxIndexSet, NonLocalValue, TaskInput, Vc, fxindexset, trace::TraceRawVcs, }; use super::request::{NextFontRequest, OneOrManyStrings}; @@ -11,8 +11,8 @@ const ALLOWED_DISPLAY_VALUES: &[&str] = &["auto", "block", "swap", "fallback", " pub(super) type FontData = FxIndexMap; -#[turbo_tasks::value(serialization = "auto_for_input")] -#[derive(Clone, Debug, PartialOrd, Ord, Hash)] +#[turbo_tasks::value] +#[derive(Clone, Debug, PartialOrd, Ord, Hash, TaskInput)] pub(super) struct NextFontGoogleOptions { /// Name of the requested font from Google. Contains literal spaces. pub font_family: RcStr, @@ -29,16 +29,17 @@ pub(super) struct NextFontGoogleOptions { pub subsets: Option>, } -#[turbo_tasks::value_impl] impl NextFontGoogleOptions { - #[turbo_tasks::function] - pub fn new(options: Value) -> Vc { - Self::cell(options.into_value()) + pub async fn font_family(self: Vc) -> Result { + Ok(self.await?.font_family.clone()) } +} +#[turbo_tasks::value_impl] +impl NextFontGoogleOptions { #[turbo_tasks::function] - pub fn font_family(&self) -> Vc { - Vc::cell((*self.font_family).into()) + pub fn new(options: NextFontGoogleOptions) -> Vc { + Self::cell(options) } } @@ -54,6 +55,7 @@ impl NextFontGoogleOptions { Deserialize, TraceRawVcs, NonLocalValue, + TaskInput, )] pub(super) enum FontWeights { Variable, @@ -155,7 +157,7 @@ pub(super) fn options_from_request( if font_data.styles.len() == 1 { styles.push(font_data.styles[0].clone()); } else { - styles.push("normal".into()); + styles.push(rcstr!("normal")); } } @@ -170,7 +172,7 @@ pub(super) fn options_from_request( } } - let display = argument.display.unwrap_or_else(|| "swap".into()); + let display = argument.display.unwrap_or_else(|| rcstr!("swap")); if !ALLOWED_DISPLAY_VALUES.contains(&display.as_str()) { anyhow::bail!( @@ -181,18 +183,18 @@ pub(super) fn options_from_request( ) } - if let Some(axes) = argument.axes.as_ref() { - if !axes.is_empty() { - if !supports_variable_weight { - anyhow::bail!("Axes can only be defined for variable fonts.") - } + if let Some(axes) = argument.axes.as_ref() + && !axes.is_empty() + { + if !supports_variable_weight { + anyhow::bail!("Axes can only be defined for variable fonts.") + } - if weights != FontWeights::Variable { - anyhow::bail!( - "Axes can only be defined for variable fonts when the weight property is \ - nonexistent or set to `variable`." - ) - } + if weights != FontWeights::Variable { + anyhow::bail!( + "Axes can only be defined for variable fonts when the weight property is \ + nonexistent or set to `variable`." + ) } } @@ -213,11 +215,11 @@ pub(super) fn options_from_request( #[cfg(test)] mod tests { use anyhow::Result; - use turbo_rcstr::RcStr; + use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::FxIndexMap; use turbo_tasks_fs::json::parse_json_with_source_context; - use super::{options_from_request, FontDataEntry, NextFontGoogleOptions}; + use super::{FontDataEntry, NextFontGoogleOptions, options_from_request}; use crate::next_font::google::{options::FontWeights, request::NextFontRequest}; #[test] @@ -280,10 +282,10 @@ mod tests { assert_eq!( options_from_request(&request, &data)?, NextFontGoogleOptions { - font_family: "ABeeZee".into(), + font_family: rcstr!("ABeeZee"), weights: FontWeights::Variable, - styles: vec!["normal".into()], - display: "swap".into(), + styles: vec![rcstr!("normal")], + display: rcstr!("swap"), preload: true, selected_variable_axes: None, fallback: None, @@ -436,7 +438,7 @@ mod tests { )?; let options = options_from_request(&request, &data)?; - assert_eq!(options.styles, vec![RcStr::from("italic")]); + assert_eq!(options.styles, vec![rcstr!("italic")]); Ok(()) } @@ -468,7 +470,7 @@ mod tests { )?; let options = options_from_request(&request, &data)?; - assert_eq!(options.styles, vec![RcStr::from("normal")]); + assert_eq!(options.styles, vec![rcstr!("normal")]); Ok(()) } diff --git a/crates/next-core/src/next_font/google/util.rs b/crates/next-core/src/next_font/google/util.rs index a3270235de817..1db384705027d 100644 --- a/crates/next-core/src/next_font/google/util.rs +++ b/crates/next-core/src/next_font/google/util.rs @@ -1,6 +1,6 @@ use std::{cmp::Ordering, collections::BTreeSet}; -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use turbo_rcstr::RcStr; use turbo_tasks::FxIndexSet; @@ -88,13 +88,13 @@ pub(super) fn get_font_axes( for axis in defineable_axes { if axis.tag == "wght" { weight_axis = Some(format!("{}..{}", axis.min, axis.max).into()); - } else if let Some(selected_variable_axes) = selected_variable_axes { - if selected_variable_axes.contains(&axis.tag) { - variable_axes.push(( - axis.tag.clone(), - format!("{}..{}", axis.min, axis.max).into(), - )); - } + } else if let Some(selected_variable_axes) = selected_variable_axes + && selected_variable_axes.contains(&axis.tag) + { + variable_axes.push(( + axis.tag.clone(), + format!("{}..{}", axis.min, axis.max).into(), + )); } } @@ -168,13 +168,13 @@ pub(super) fn get_stylesheet_url( if weights.is_empty() { let mut variant = vec![]; - if let Some(variable_axes) = &axes.variable_axes { - if !variable_axes.is_empty() { - for (key, val) in variable_axes { - variant.push((key.as_str(), VariantValue::String(val.clone()))); - } - variants.push(variant); + if let Some(variable_axes) = &axes.variable_axes + && !variable_axes.is_empty() + { + for (key, val) in variable_axes { + variant.push((key.as_str(), VariantValue::String(val.clone()))); } + variants.push(variant); } } else { for wght in &weights { @@ -296,9 +296,9 @@ mod tests { use super::get_font_axes; use crate::next_font::google::{ - options::{FontData, FontWeights}, - util::{get_stylesheet_url, FontAxes, FontAxesWeights, FontStyle}, GOOGLE_FONTS_STYLESHEET_URL, + options::{FontData, FontWeights}, + util::{FontAxes, FontAxesWeights, FontStyle, get_stylesheet_url}, }; #[test] diff --git a/crates/next-core/src/next_font/issue.rs b/crates/next-core/src/next_font/issue.rs index a07db2f353e61..f8822296597ec 100644 --- a/crates/next-core/src/next_font/issue.rs +++ b/crates/next-core/src/next_font/issue.rs @@ -7,7 +7,7 @@ pub(crate) struct NextFontIssue { pub(crate) path: ResolvedVc, pub(crate) title: ResolvedVc, pub(crate) description: ResolvedVc, - pub(crate) severity: ResolvedVc, + pub(crate) severity: IssueSeverity, } #[turbo_tasks::value_impl] @@ -17,9 +17,8 @@ impl Issue for NextFontIssue { IssueStage::CodeGen.into() } - #[turbo_tasks::function] - fn severity(&self) -> Vc { - *self.severity + fn severity(&self) -> IssueSeverity { + self.severity } #[turbo_tasks::function] diff --git a/crates/next-core/src/next_font/local/errors.rs b/crates/next-core/src/next_font/local/errors.rs index 3f09137e077cc..da8f442dacd93 100644 --- a/crates/next-core/src/next_font/local/errors.rs +++ b/crates/next-core/src/next_font/local/errors.rs @@ -1,8 +1,19 @@ -use thiserror::Error; +use std::fmt::Display; + +use serde::{Deserialize, Serialize}; use turbo_rcstr::RcStr; +use turbo_tasks::{NonLocalValue, trace::TraceRawVcs}; + +pub(crate) enum FontResult { + Ok(T), + FontFileNotFound(FontFileNotFound), +} + +#[derive(Debug, Eq, PartialEq, Serialize, Deserialize, NonLocalValue, TraceRawVcs)] +pub(crate) struct FontFileNotFound(pub RcStr); -#[derive(Debug, Error)] -pub enum FontError { - #[error("could not find font file")] - FontFileNotFound(RcStr), +impl Display for FontFileNotFound { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Font file not found: Can't resolve {}'", self.0) + } } diff --git a/crates/next-core/src/next_font/local/font_fallback.rs b/crates/next-core/src/next_font/local/font_fallback.rs index 964bc01cf9c0d..37cfb623c1708 100644 --- a/crates/next-core/src/next_font/local/font_fallback.rs +++ b/crates/next-core/src/next_font/local/font_fallback.rs @@ -1,24 +1,31 @@ use allsorts::{ - font_data::{DynamicFontTableProvider, FontData}, Font, + font_data::{DynamicFontTableProvider, FontData}, }; -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; +use turbo_rcstr::rcstr; use turbo_tasks::{ResolvedVc, Vc}; use turbo_tasks_fs::{FileContent, FileSystemPath}; use super::{ + errors::{FontFileNotFound, FontResult}, options::{FontDescriptor, FontDescriptors, FontWeight, NextFontLocalOptions}, request::AdjustFontFallback, }; use crate::next_font::{ font_fallback::{ - AutomaticFontFallback, DefaultFallbackFont, FontAdjustment, FontFallback, FontFallbacks, - DEFAULT_SANS_SERIF_FONT, DEFAULT_SERIF_FONT, + AutomaticFontFallback, DEFAULT_SANS_SERIF_FONT, DEFAULT_SERIF_FONT, DefaultFallbackFont, + FontAdjustment, FontFallback, FontFallbacks, }, - local::errors::FontError, - util::{get_scoped_font_family, FontFamilyType}, + util::{FontFamilyType, get_scoped_font_family}, }; +#[turbo_tasks::value(shared)] +pub(crate) enum FontFallbackResult { + Ok(ResolvedVc), + FontFileNotFound(FontFileNotFound), +} + // From // https://github.com/vercel/next.js/blob/7457be0c74e64b4d0617943ed27f4d557cc916be/packages/font/src/local/get-fallback-metrics-from-font-file.ts#L34 static AVG_CHARACTERS: &str = "aaabcdeeeefghiijklmnnoopqrrssttuvwxyz "; @@ -29,33 +36,49 @@ static BOLD_WEIGHT: f64 = 700.0; pub(super) async fn get_font_fallbacks( lookup_path: Vc, options_vc: Vc, -) -> Result> { +) -> Result> { let options = &*options_vc.await?; - let mut font_fallbacks = vec![]; let scoped_font_family = - get_scoped_font_family(FontFamilyType::Fallback.cell(), options_vc.font_family()); + get_scoped_font_family(FontFamilyType::Fallback, options_vc.font_family().await?); + let mut font_fallbacks = vec![]; match options.adjust_font_fallback { - AdjustFontFallback::Arial => font_fallbacks.push( - FontFallback::Automatic(AutomaticFontFallback { - scoped_font_family: scoped_font_family.to_resolved().await?, - local_font_family: ResolvedVc::cell("Arial".into()), - adjustment: Some( - get_font_adjustment(lookup_path, options_vc, &DEFAULT_SANS_SERIF_FONT).await?, + AdjustFontFallback::Arial => { + let adjustment = + get_font_adjustment(lookup_path, options_vc, &DEFAULT_SANS_SERIF_FONT).await?; + + match adjustment { + FontResult::Ok(adjustment) => font_fallbacks.push( + FontFallback::Automatic(AutomaticFontFallback { + scoped_font_family, + local_font_family: rcstr!("Arial"), + adjustment: Some(adjustment), + }) + .resolved_cell(), ), - }) - .resolved_cell(), - ), - AdjustFontFallback::TimesNewRoman => font_fallbacks.push( - FontFallback::Automatic(AutomaticFontFallback { - scoped_font_family: scoped_font_family.to_resolved().await?, - local_font_family: ResolvedVc::cell("Times New Roman".into()), - adjustment: Some( - get_font_adjustment(lookup_path, options_vc, &DEFAULT_SERIF_FONT).await?, + FontResult::FontFileNotFound(err) => { + return Ok(FontFallbackResult::FontFileNotFound(err).cell()); + } + }; + } + AdjustFontFallback::TimesNewRoman => { + let adjustment = + get_font_adjustment(lookup_path, options_vc, &DEFAULT_SERIF_FONT).await?; + + match adjustment { + FontResult::Ok(adjustment) => font_fallbacks.push( + FontFallback::Automatic(AutomaticFontFallback { + scoped_font_family, + local_font_family: rcstr!("Times New Roman"), + adjustment: Some(adjustment), + }) + .resolved_cell(), ), - }) - .resolved_cell(), - ), + FontResult::FontFileNotFound(err) => { + return Ok(FontFallbackResult::FontFileNotFound(err).cell()); + } + }; + } AdjustFontFallback::None => (), }; @@ -63,14 +86,14 @@ pub(super) async fn get_font_fallbacks( font_fallbacks.push(FontFallback::Manual(fallback.clone()).resolved_cell()); } - Ok(Vc::cell(font_fallbacks)) + Ok(FontFallbackResult::Ok(FontFallbacks(font_fallbacks).resolved_cell()).cell()) } async fn get_font_adjustment( lookup_path: Vc, options: Vc, fallback_font: &DefaultFallbackFont, -) -> Result { +) -> Result> { let options = &*options.await?; let main_descriptor = pick_font_for_fallback_generation(&options.fonts)?; let font_file = &*lookup_path @@ -78,11 +101,15 @@ async fn get_font_adjustment( .read() .await?; let font_file_rope = match font_file { - FileContent::NotFound => bail!(FontError::FontFileNotFound(main_descriptor.path.clone())), + FileContent::NotFound => { + return Ok(FontResult::FontFileNotFound(FontFileNotFound( + main_descriptor.path.clone(), + ))); + } FileContent::Content(file) => file.content(), }; - let font_file_binary = font_file_rope.to_bytes()?; + let font_file_binary = font_file_rope.to_bytes(); let scope = allsorts::binary::read::ReadScope::new(&font_file_binary); let mut font = Font::new(scope.read::()?.table_provider(0)?)?.context(format!( "Unable to read font metrics from font file at {}", @@ -106,12 +133,12 @@ async fn get_font_adjustment( None => 1.0, }; - Ok(FontAdjustment { + Ok(FontResult::Ok(FontAdjustment { ascent: font.hhea_table.ascender as f64 / (units_per_em * size_adjust), descent: font.hhea_table.descender as f64 / (units_per_em * size_adjust), line_gap: font.hhea_table.line_gap as f64 / (units_per_em * size_adjust), size_adjust, - }) + })) } fn calc_average_width(font: &mut Font) -> Option { @@ -167,7 +194,7 @@ fn pick_font_for_fallback_generation( // Prefer normal style if they have the same weight if used_font_distance == current_font_distance - && current_descriptor.style != Some("italic".into()) + && current_descriptor.style != Some(rcstr!("italic")) { used_descriptor = current_descriptor; continue; @@ -254,18 +281,18 @@ fn parse_weight_string(weight_str: &str) -> Result { #[cfg(test)] mod tests { use anyhow::Result; - use turbo_rcstr::RcStr; + use turbo_rcstr::{RcStr, rcstr}; use crate::next_font::local::{ font_fallback::pick_font_for_fallback_generation, options::{FontDescriptor, FontDescriptors, FontWeight}, }; - fn generate_font_descriptor(weight: &FontWeight, style: &Option) -> FontDescriptor { + fn generate_font_descriptor(weight: &FontWeight, style: Option) -> FontDescriptor { FontDescriptor { - ext: "ttf".into(), - path: "foo.ttf".into(), - style: style.clone().map(RcStr::from), + ext: rcstr!("ttf"), + path: rcstr!("foo.ttf"), + style, weight: Some(weight.clone()), } } @@ -274,34 +301,34 @@ mod tests { fn test_picks_weight_closest_to_400() -> Result<()> { assert_eq!( pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Fixed("300".into()), &None), - generate_font_descriptor(&FontWeight::Fixed("600".into()), &None) + generate_font_descriptor(&FontWeight::Fixed(rcstr!("300")), None), + generate_font_descriptor(&FontWeight::Fixed(rcstr!("600")), None) ]))?, - &generate_font_descriptor(&FontWeight::Fixed("300".into()), &None) + &generate_font_descriptor(&FontWeight::Fixed(rcstr!("300")), None) ); assert_eq!( pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Fixed("200".into()), &None), - generate_font_descriptor(&FontWeight::Fixed("500".into()), &None) + generate_font_descriptor(&FontWeight::Fixed(rcstr!("200")), None), + generate_font_descriptor(&FontWeight::Fixed(rcstr!("500")), None) ]))?, - &generate_font_descriptor(&FontWeight::Fixed("500".into()), &None) + &generate_font_descriptor(&FontWeight::Fixed(rcstr!("500")), None) ); assert_eq!( pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Fixed("normal".into()), &None), - generate_font_descriptor(&FontWeight::Fixed("700".into()), &None) + generate_font_descriptor(&FontWeight::Fixed(rcstr!("normal")), None), + generate_font_descriptor(&FontWeight::Fixed(rcstr!("700")), None) ]))?, - &generate_font_descriptor(&FontWeight::Fixed("normal".into()), &None) + &generate_font_descriptor(&FontWeight::Fixed(rcstr!("normal")), None) ); assert_eq!( pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Fixed("bold".into()), &None), - generate_font_descriptor(&FontWeight::Fixed("900".into()), &None) + generate_font_descriptor(&FontWeight::Fixed(rcstr!("bold")), None), + generate_font_descriptor(&FontWeight::Fixed(rcstr!("900")), None) ]))?, - &generate_font_descriptor(&FontWeight::Fixed("bold".into()), &None) + &generate_font_descriptor(&FontWeight::Fixed(rcstr!("bold")), None) ); Ok(()) @@ -311,10 +338,10 @@ mod tests { fn test_picks_thinner_weight_if_same_distance_to_400() -> Result<()> { assert_eq!( pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Fixed("300".into()), &None), - generate_font_descriptor(&FontWeight::Fixed("500".into()), &None) + generate_font_descriptor(&FontWeight::Fixed(rcstr!("300")), None), + generate_font_descriptor(&FontWeight::Fixed(rcstr!("500")), None) ]))?, - &generate_font_descriptor(&FontWeight::Fixed("300".into()), &None) + &generate_font_descriptor(&FontWeight::Fixed(rcstr!("300")), None) ); Ok(()) @@ -324,26 +351,26 @@ mod tests { fn test_picks_variable_closest_to_400() -> Result<()> { assert_eq!( pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Variable("100".into(), "300".into()), &None), - generate_font_descriptor(&FontWeight::Variable("600".into(), "900".into()), &None) + generate_font_descriptor(&FontWeight::Variable(rcstr!("100"), rcstr!("300")), None), + generate_font_descriptor(&FontWeight::Variable(rcstr!("600"), rcstr!("900")), None) ]))?, - &generate_font_descriptor(&FontWeight::Variable("100".into(), "300".into()), &None) + &generate_font_descriptor(&FontWeight::Variable(rcstr!("100"), rcstr!("300")), None) ); assert_eq!( pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Variable("100".into(), "200".into()), &None), - generate_font_descriptor(&FontWeight::Variable("500".into(), "800".into()), &None) + generate_font_descriptor(&FontWeight::Variable(rcstr!("100"), rcstr!("200")), None), + generate_font_descriptor(&FontWeight::Variable(rcstr!("500"), rcstr!("800")), None) ]))?, - &generate_font_descriptor(&FontWeight::Variable("500".into(), "800".into()), &None) + &generate_font_descriptor(&FontWeight::Variable(rcstr!("500"), rcstr!("800")), None) ); assert_eq!( pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Variable("100".into(), "900".into()), &None), - generate_font_descriptor(&FontWeight::Variable("300".into(), "399".into()), &None) + generate_font_descriptor(&FontWeight::Variable(rcstr!("100"), rcstr!("900")), None), + generate_font_descriptor(&FontWeight::Variable(rcstr!("300"), rcstr!("399")), None) ]))?, - &generate_font_descriptor(&FontWeight::Variable("100".into(), "900".into()), &None) + &generate_font_descriptor(&FontWeight::Variable(rcstr!("100"), rcstr!("900")), None) ); Ok(()) @@ -353,10 +380,10 @@ mod tests { fn test_prefer_normal_over_italic() -> Result<()> { assert_eq!( pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Fixed("400".into()), &Some("normal".into())), - generate_font_descriptor(&FontWeight::Fixed("400".into()), &Some("italic".into())) + generate_font_descriptor(&FontWeight::Fixed(rcstr!("400")), Some(rcstr!("normal"))), + generate_font_descriptor(&FontWeight::Fixed(rcstr!("400")), Some(rcstr!("italic"))) ]))?, - &generate_font_descriptor(&FontWeight::Fixed("400".into()), &Some("normal".into())) + &generate_font_descriptor(&FontWeight::Fixed(rcstr!("400")), Some(rcstr!("normal"))) ); Ok(()) @@ -365,10 +392,13 @@ mod tests { #[test] fn test_errors_on_invalid_weight() -> Result<()> { match pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ - generate_font_descriptor(&FontWeight::Variable("normal".into(), "bold".into()), &None), - generate_font_descriptor(&FontWeight::Variable("400".into(), "bold".into()), &None), - generate_font_descriptor(&FontWeight::Variable("normal".into(), "700".into()), &None), - generate_font_descriptor(&FontWeight::Variable("100".into(), "abc".into()), &None), + generate_font_descriptor( + &FontWeight::Variable(rcstr!("normal"), rcstr!("bold")), + None, + ), + generate_font_descriptor(&FontWeight::Variable(rcstr!("400"), rcstr!("bold")), None), + generate_font_descriptor(&FontWeight::Variable(rcstr!("normal"), rcstr!("700")), None), + generate_font_descriptor(&FontWeight::Variable(rcstr!("100"), rcstr!("abc")), None), ])) { Ok(_) => panic!(), Err(err) => { diff --git a/crates/next-core/src/next_font/local/mod.rs b/crates/next-core/src/next_font/local/mod.rs index 97cd8e60b5dac..3f6065e6c9397 100644 --- a/crates/next-core/src/next_font/local/mod.rs +++ b/crates/next-core/src/next_font/local/mod.rs @@ -1,42 +1,43 @@ -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; +use font_fallback::FontFallbackResult; use indoc::formatdoc; use serde::{Deserialize, Serialize}; -use turbo_rcstr::RcStr; -use turbo_tasks::{ResolvedVc, Value, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{ResolvedVc, Vc}; use turbo_tasks_fs::{ - glob::Glob, json::parse_json_with_source_context, FileContent, FileSystemPath, + FileContent, FileSystemPath, glob::Glob, json::parse_json_with_source_context, }; use turbopack_core::{ asset::AssetContent, issue::{Issue, IssueExt, IssueSeverity, IssueStage, StyledString}, reference_type::ReferenceType, resolve::{ + ResolveResult, ResolveResultItem, ResolveResultOption, parse::Request, plugin::{BeforeResolvePlugin, BeforeResolvePluginCondition}, - ResolveResult, ResolveResultItem, ResolveResultOption, }, virtual_source::VirtualSource, }; use self::{ font_fallback::get_font_fallbacks, - options::{options_from_request, FontDescriptors, NextFontLocalOptions}, + options::{FontDescriptors, NextFontLocalOptions, options_from_request}, stylesheet::build_stylesheet, util::build_font_family_string, }; use super::{ font_fallback::FontFallbacks, - util::{can_use_next_font, FontCssProperties}, + util::{FontCssProperties, can_use_next_font}, }; use crate::{ next_app::metadata::split_extension, next_font::{ - local::{errors::FontError, options::FontWeight}, + local::options::FontWeight, util::{get_request_hash, get_request_id}, }, }; -mod errors; +pub mod errors; pub mod font_fallback; pub mod options; pub mod request; @@ -67,16 +68,16 @@ impl NextFontLocalResolvePlugin { impl BeforeResolvePlugin for NextFontLocalResolvePlugin { #[turbo_tasks::function] fn before_resolve_condition(&self) -> Vc { - BeforeResolvePluginCondition::from_request_glob(Glob::new( - "{next,@vercel/turbopack-next/internal}/font/local/*".into(), - )) + BeforeResolvePluginCondition::from_request_glob(Glob::new(rcstr!( + "{next,@vercel/turbopack-next/internal}/font/local/*" + ))) } #[turbo_tasks::function] async fn before_resolve( self: Vc, lookup_path: Vc, - _reference_type: Value, + _reference_type: ReferenceType, request_vc: Vc, ) -> Result> { let this = &*self.await?; @@ -89,7 +90,7 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin { let Request::Module { module: _, path: _, - query: query_vc, + query, fragment: _, } = request else { @@ -98,41 +99,33 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin { match request_key.as_str() { "next/font/local/target.css" => { - if !can_use_next_font(*this.root, **query_vc).await? { + if !can_use_next_font(*this.root, query).await? { return Ok(ResolveResultOption::none()); } - let query = query_vc.await?.to_string(); - let request_hash = get_request_hash(&query).await?; + let request_hash = get_request_hash(query.as_str()); let qstr = qstring::QString::from(query.as_str()); - let options_vc = font_options_from_query_map(**query_vc); - let font_fallbacks = get_font_fallbacks(lookup_path, options_vc); - let properties = get_font_css_properties(options_vc, font_fallbacks).await; + let options_vc = font_options_from_query_map(query.clone()); + let font_fallbacks = &*get_font_fallbacks(lookup_path, options_vc).await?; let lookup_path = lookup_path.to_resolved().await?; - if let Err(e) = &properties { - for source_error in e.chain() { - if let Some(FontError::FontFileNotFound(font_path)) = - source_error.downcast_ref::() - { - FontResolvingIssue { - origin_path: lookup_path, - font_path: ResolvedVc::cell(font_path.clone()), - } - .resolved_cell() - .emit(); - - return Ok(ResolveResultOption::some(*ResolveResult::primary( - ResolveResultItem::Error(ResolvedVc::cell( - format!("Font file not found: Can't resolve {}'", font_path) - .into(), - )), - ))); + let font_fallbacks = match font_fallbacks { + FontFallbackResult::FontFileNotFound(err) => { + FontResolvingIssue { + origin_path: lookup_path, + font_path: ResolvedVc::cell(err.0.clone()), } + .resolved_cell() + .emit(); + + return Ok(ResolveResultOption::some(*ResolveResult::primary( + ResolveResultItem::Error(ResolvedVc::cell(err.to_string().into())), + ))); } - } + FontFallbackResult::Ok(font_fallbacks) => *font_fallbacks, + }; - let properties = properties?; + let properties = get_font_css_properties(options_vc, *font_fallbacks).await?; let file_content = formatdoc!( r#" import cssModule from "@vercel/turbopack-next/internal/font/local/cssmodule.module.css?{}"; @@ -157,20 +150,20 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin { .weight .await? .as_ref() - .map(|w| format!("fontWeight: {},\n", w)) + .map(|w| format!("fontWeight: {w},\n")) .unwrap_or_else(|| "".to_owned()), properties .style .await? .as_ref() - .map(|s| format!("fontStyle: \"{}\",\n", s)) + .map(|s| format!("fontStyle: \"{s}\",\n")) .unwrap_or_else(|| "".to_owned()), ); let js_asset = VirtualSource::new( lookup_path.join( format!( "{}.js", - get_request_id(options_vc.font_family(), request_hash).await? + get_request_id(options_vc.font_family().await?, request_hash) ) .into(), ), @@ -184,20 +177,34 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin { ))) } "@vercel/turbopack-next/internal/font/local/cssmodule.module.css" => { - let query = query_vc.await?.to_string(); - let request_hash = get_request_hash(&query).await?; - let options = font_options_from_query_map(**query_vc); + let request_hash = get_request_hash(query); + let options = font_options_from_query_map(query.clone()); let css_virtual_path = lookup_path.join( format!( "/{}.module.css", - get_request_id(options.font_family(), request_hash).await? + get_request_id(options.font_family().await?, request_hash) ) .into(), ); - let fallback = get_font_fallbacks(lookup_path, options); + let fallback = &*get_font_fallbacks(lookup_path, options).await?; + let fallback = match fallback { + FontFallbackResult::FontFileNotFound(err) => { + FontResolvingIssue { + origin_path: lookup_path.to_resolved().await?, + font_path: ResolvedVc::cell(err.0.clone()), + } + .resolved_cell() + .emit(); + + return Ok(ResolveResultOption::some(*ResolveResult::primary( + ResolveResultItem::Error(ResolvedVc::cell(err.to_string().into())), + ))); + } + FontFallbackResult::Ok(font_fallbacks) => **font_fallbacks, + }; let stylesheet = build_stylesheet( - font_options_from_query_map(**query_vc), + font_options_from_query_map(query.clone()), fallback, get_font_css_properties(options, fallback), ) @@ -219,7 +226,7 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin { path, preload, has_size_adjust: size_adjust, - } = font_file_options_from_query_map(**query_vc).await?; + } = font_file_options_from_query_map(query)?; let (filename, ext) = split_extension(&path); let ext = ext.with_context(|| format!("font {} needs an extension", &path))?; @@ -233,7 +240,7 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin { name.push_str(".p") } - let font_virtual_path = lookup_path.join(format!("/{}.{}", name, ext).into()); + let font_virtual_path = lookup_path.join(format!("/{name}.{ext}").into()); let font_file = lookup_path.join(path.clone()).read(); @@ -285,8 +292,8 @@ async fn get_font_css_properties( } #[turbo_tasks::function] -async fn font_options_from_query_map(query: Vc) -> Result> { - let query_map = qstring::QString::from(&**query.await?); +fn font_options_from_query_map(query: RcStr) -> Result> { + let query_map = qstring::QString::from(query.as_str()); if query_map.len() != 1 { bail!("next/font/local queries have exactly one entry"); @@ -296,14 +303,11 @@ async fn font_options_from_query_map(query: Vc) -> Result, -) -> Result { - let query_map = qstring::QString::from(&**query.await?); +fn font_file_options_from_query_map(query: &RcStr) -> Result { + let query_map = qstring::QString::from(query.as_str()); if query_map.len() != 1 { bail!("next/font/local queries have exactly one entry"); @@ -324,9 +328,8 @@ struct FontResolvingIssue { #[turbo_tasks::value_impl] impl Issue for FontResolvingIssue { - #[turbo_tasks::function] - fn severity(&self) -> Vc { - IssueSeverity::Error.cell() + fn severity(&self) -> IssueSeverity { + IssueSeverity::Error } #[turbo_tasks::function] @@ -343,9 +346,9 @@ impl Issue for FontResolvingIssue { async fn title(self: Vc) -> Result> { let this = self.await?; Ok(StyledString::Line(vec![ - StyledString::Text("Font file not found: Can't resolve '".into()), + StyledString::Text(rcstr!("Font file not found: Can't resolve '")), StyledString::Code(this.font_path.owned().await?), - StyledString::Text("'".into()), + StyledString::Text(rcstr!("'")), ]) .cell()) } diff --git a/crates/next-core/src/next_font/local/options.rs b/crates/next-core/src/next_font/local/options.rs index 337e4a14a102e..92435d977a8f0 100644 --- a/crates/next-core/src/next_font/local/options.rs +++ b/crates/next-core/src/next_font/local/options.rs @@ -3,7 +3,7 @@ use std::{fmt::Display, str::FromStr}; use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; use turbo_rcstr::RcStr; -use turbo_tasks::{trace::TraceRawVcs, NonLocalValue, Value, Vc}; +use turbo_tasks::{NonLocalValue, TaskInput, Vc, trace::TraceRawVcs}; use super::request::{ AdjustFontFallback, NextFontLocalRequest, NextFontLocalRequestArguments, SrcDescriptor, @@ -12,8 +12,8 @@ use super::request::{ /// A normalized, Vc-friendly struct derived from validating and transforming /// [[NextFontLocalRequest]] -#[turbo_tasks::value(serialization = "auto_for_input")] -#[derive(Clone, Debug, PartialOrd, Ord, Hash)] +#[turbo_tasks::value] +#[derive(Clone, Debug, PartialOrd, Ord, Hash, TaskInput)] pub(super) struct NextFontLocalOptions { pub fonts: FontDescriptors, pub default_weight: Option, @@ -34,16 +34,17 @@ pub(super) struct NextFontLocalOptions { pub variable_name: RcStr, } -#[turbo_tasks::value_impl] impl NextFontLocalOptions { - #[turbo_tasks::function] - pub fn new(options: Value) -> Vc { - Self::cell(options.into_value()) + pub async fn font_family(self: Vc) -> Result { + Ok(self.await?.variable_name.clone()) } +} +#[turbo_tasks::value_impl] +impl NextFontLocalOptions { #[turbo_tasks::function] - pub fn font_family(&self) -> Vc { - Vc::cell(self.variable_name.clone()) + pub fn new(options: NextFontLocalOptions) -> Vc { + Self::cell(options) } } @@ -61,6 +62,7 @@ impl NextFontLocalOptions { Serialize, TraceRawVcs, NonLocalValue, + TaskInput, )] pub(super) struct FontDescriptor { pub weight: Option, @@ -102,6 +104,7 @@ impl FontDescriptor { Serialize, TraceRawVcs, NonLocalValue, + TaskInput, )] pub(super) enum FontDescriptors { /// `One` is a special case when the user did not provide a `src` field and @@ -124,6 +127,7 @@ pub(super) enum FontDescriptors { Hash, TraceRawVcs, NonLocalValue, + TaskInput, )] pub(super) enum FontWeight { Variable(RcStr, RcStr), @@ -149,7 +153,7 @@ impl Display for FontWeight { f, "{}", match self { - Self::Variable(start, end) => format!("{} {}", start, end), + Self::Variable(start, end) => format!("{start} {end}"), Self::Fixed(val) => val.to_string(), } ) @@ -204,9 +208,10 @@ pub(super) fn options_from_request(request: &NextFontLocalRequest) -> Result panic!("Expected failure, received {:?}", r), + Ok(r) => panic!("Expected failure, received {r:?}"), Err(err) => { - assert!(err - .to_string() - .contains("expected Expected string or `false`. Received `true`"),) + assert!( + err.to_string() + .contains("expected Expected string or `false`. Received `true`"), + ) } } @@ -359,19 +365,19 @@ mod tests { options_from_request(&request)?, NextFontLocalOptions { fonts: FontDescriptors::One(FontDescriptor { - path: "./Roboto-Regular.woff".into(), - weight: Some(FontWeight::Fixed("500".into())), - style: Some("italic".into()), - ext: "woff".into(), + path: rcstr!("./Roboto-Regular.woff"), + weight: Some(FontWeight::Fixed(rcstr!("500"))), + style: Some(rcstr!("italic")), + ext: rcstr!("woff"), }), - default_style: Some("italic".into()), - default_weight: Some(FontWeight::Fixed("500".into())), - display: "optional".into(), + default_style: Some(rcstr!("italic")), + default_weight: Some(FontWeight::Fixed(rcstr!("500"))), + display: rcstr!("optional"), preload: false, - fallback: Some(vec!["Fallback".into()]), + fallback: Some(vec![rcstr!("Fallback")]), adjust_font_fallback: AdjustFontFallback::TimesNewRoman, - variable: Some("myvar".into()), - variable_name: "myFont".into() + variable: Some(rcstr!("myvar")), + variable_name: rcstr!("myFont") }, ); diff --git a/crates/next-core/src/next_font/local/request.rs b/crates/next-core/src/next_font/local/request.rs index f044103981c02..7944e4e38c410 100644 --- a/crates/next-core/src/next_font/local/request.rs +++ b/crates/next-core/src/next_font/local/request.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Serialize}; use turbo_rcstr::RcStr; -use turbo_tasks::{trace::TraceRawVcs, NonLocalValue}; +use turbo_tasks::{NonLocalValue, TaskInput, trace::TraceRawVcs}; /// The top-most structure encoded into the query param in requests to /// `next/font/local` generated by the next/font swc transform. e.g. @@ -58,6 +58,7 @@ pub(super) struct SrcDescriptor { Serialize, TraceRawVcs, NonLocalValue, + TaskInput, )] pub(super) enum AdjustFontFallback { Arial, @@ -120,7 +121,7 @@ mod tests { use serde::Deserialize; use super::{ - default_adjust_font_fallback, deserialize_adjust_font_fallback, AdjustFontFallback, + AdjustFontFallback, default_adjust_font_fallback, deserialize_adjust_font_fallback, }; #[derive(Debug, Deserialize, PartialEq)] diff --git a/crates/next-core/src/next_font/local/stylesheet.rs b/crates/next-core/src/next_font/local/stylesheet.rs index 8589828c7841a..18661ff9de9f5 100644 --- a/crates/next-core/src/next_font/local/stylesheet.rs +++ b/crates/next-core/src/next_font/local/stylesheet.rs @@ -1,4 +1,4 @@ -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use indoc::formatdoc; use turbo_rcstr::RcStr; use turbo_tasks::Vc; @@ -8,7 +8,7 @@ use crate::next_font::{ font_fallback::FontFallbacks, local::NextFontLocalFontFileOptions, stylesheet::{build_fallback_definition, build_font_class_rules}, - util::{get_scoped_font_family, FontCssProperties, FontFamilyType}, + util::{FontCssProperties, FontFamilyType, get_scoped_font_family}, }; #[turbo_tasks::function] @@ -18,7 +18,7 @@ pub(super) async fn build_stylesheet( css_properties: Vc, ) -> Result> { let scoped_font_family = - get_scoped_font_family(FontFamilyType::WebFont.cell(), options.font_family()); + get_scoped_font_family(FontFamilyType::WebFont, options.font_family().await?); Ok(Vc::cell( formatdoc!( @@ -39,7 +39,7 @@ pub(super) async fn build_stylesheet( /// Builds a string of `@font-face` definitions for each local font file #[turbo_tasks::function] pub(super) async fn build_font_face_definitions( - scoped_font_family: Vc, + scoped_font_family: RcStr, options: Vc, has_size_adjust: Vc, ) -> Result> { @@ -70,7 +70,7 @@ pub(super) async fn build_font_face_definitions( {}{} }} "#, - *scoped_font_family.await?, + scoped_font_family, query_str, ext_to_format(&font.ext)?, options.display, @@ -78,12 +78,12 @@ pub(super) async fn build_font_face_definitions( .weight .as_ref() .or(options.default_weight.as_ref()) - .map_or_else(|| "".to_owned(), |w| format!("font-weight: {};", w)), + .map_or_else(|| "".to_owned(), |w| format!("font-weight: {w};")), &font .style .as_ref() .or(options.default_style.as_ref()) - .map_or_else(|| "".to_owned(), |s| format!("font-style: {};", s)), + .map_or_else(|| "".to_owned(), |s| format!("font-style: {s};")), )); } diff --git a/crates/next-core/src/next_font/local/util.rs b/crates/next-core/src/next_font/local/util.rs index c55df155fa734..50a6d0972c0f8 100644 --- a/crates/next-core/src/next_font/local/util.rs +++ b/crates/next-core/src/next_font/local/util.rs @@ -5,7 +5,7 @@ use turbo_tasks::Vc; use super::options::NextFontLocalOptions; use crate::next_font::{ font_fallback::{FontFallback, FontFallbacks}, - util::{get_scoped_font_family, FontFamilyType}, + util::{FontFamilyType, get_scoped_font_family}, }; /// Returns a string to be used as the `font-family` property in css. @@ -14,16 +14,18 @@ pub(super) async fn build_font_family_string( options: Vc, font_fallbacks: Vc, ) -> Result> { - let mut font_families = vec![format!( - "'{}'", - *get_scoped_font_family(FontFamilyType::WebFont.cell(), options.font_family(),).await? - ) - .into()]; + let mut font_families = vec![ + format!( + "'{}'", + get_scoped_font_family(FontFamilyType::WebFont, options.font_family().await?) + ) + .into(), + ]; for font_fallback in &*font_fallbacks.await? { match &*font_fallback.await? { FontFallback::Automatic(fallback) => { - font_families.push(format!("'{}'", *fallback.scoped_font_family.await?).into()); + font_families.push(format!("'{}'", fallback.scoped_font_family).into()); } FontFallback::Manual(fallbacks) => { font_families.extend_from_slice(fallbacks); diff --git a/crates/next-core/src/next_font/stylesheet.rs b/crates/next-core/src/next_font/stylesheet.rs index cd482b4de2bbe..149da57f5c4ec 100644 --- a/crates/next-core/src/next_font/stylesheet.rs +++ b/crates/next-core/src/next_font/stylesheet.rs @@ -38,8 +38,8 @@ pub(crate) async fn build_fallback_definition(fallbacks: Vc) -> R {} }} "#, - fallback.scoped_font_family.await?, - fallback.local_font_family.await?, + fallback.scoped_font_family, + fallback.local_font_family, override_properties )); } @@ -67,13 +67,13 @@ pub(super) async fn build_font_class_rules( .weight .await? .as_ref() - .map(|w| format!("font-weight: {};\n", w)) + .map(|w| format!("font-weight: {w};\n")) .unwrap_or_else(|| "".to_owned()), css_properties .style .await? .as_ref() - .map(|s| format!("font-style: {};\n", s)) + .map(|s| format!("font-style: {s};\n")) .unwrap_or_else(|| "".to_owned()), ); diff --git a/crates/next-core/src/next_font/util.rs b/crates/next-core/src/next_font/util.rs index a9bc9e2452eac..99baa00151937 100644 --- a/crates/next-core/src/next_font/util.rs +++ b/crates/next-core/src/next_font/util.rs @@ -1,8 +1,8 @@ use anyhow::{Context, Result}; use serde::Deserialize; -use turbo_rcstr::RcStr; +use turbo_rcstr::{RcStr, rcstr}; use turbo_tasks::{ResolvedVc, Vc}; -use turbo_tasks_fs::{json::parse_json_with_source_context, FileSystemPath}; +use turbo_tasks_fs::{FileSystemPath, json::parse_json_with_source_context}; use turbo_tasks_hash::hash_xxh3_hash64; use turbopack_core::issue::{IssueExt, IssueSeverity, StyledString}; @@ -22,19 +22,17 @@ pub(crate) struct FontCssProperties { /// A hash of the requested querymap derived from how the user invoked /// next/font. Used to uniquely identify font requests for generated filenames /// and scoped font family names. -pub(crate) async fn get_request_hash(query: &str) -> Result { +pub(crate) fn get_request_hash(query: &str) -> u32 { let query = qstring::QString::from(query); - let mut to_hash = vec![]; + let mut to_hash = Vec::with_capacity(query.len() * 2); for (k, v) in query { to_hash.push(k); to_hash.push(v); } - Ok( - // Truncate the hash to u32. These hashes are ultimately displayed as 6- or 8-character - // hexadecimal values. - hash_xxh3_hash64(to_hash) as u32, - ) + // Truncate the hash to u32. These hashes are ultimately displayed as 6- or 8-character + // hexadecimal values. + hash_xxh3_hash64(to_hash) as u32 } #[turbo_tasks::value(shared)] @@ -48,31 +46,21 @@ pub(crate) enum FontFamilyType { /// e.g. `__Roboto_Fallback_c123b8` /// * `font_family_name` - The font name to scope, e.g. `Roboto` /// * `request_hash` - The hash value of the font request -#[turbo_tasks::function] -pub(crate) async fn get_scoped_font_family( - ty: Vc, - font_family_name: Vc, -) -> Result> { - let font_family_base = font_family_name.await?.to_string(); - let font_family_name = match &*ty.await? { - FontFamilyType::WebFont => font_family_base, - FontFamilyType::Fallback => format!("{} Fallback", font_family_base), - }; - - Ok(Vc::cell(font_family_name.into())) +pub(crate) fn get_scoped_font_family(ty: FontFamilyType, font_family_name: RcStr) -> RcStr { + match ty { + FontFamilyType::WebFont => font_family_name, + FontFamilyType::Fallback => format!("{font_family_name} Fallback").into(), + } } -/// Returns a [Vc] for [String] uniquely identifying the request for the font. -#[turbo_tasks::function] -pub async fn get_request_id(font_family: Vc, request_hash: u32) -> Result> { - Ok(Vc::cell( - format!( - "{}_{:x?}", - font_family.await?.to_lowercase().replace(' ', "_"), - request_hash - ) - .into(), - )) +/// Returns a [RcStr] for [String] uniquely identifying the request for the font. +pub fn get_request_id(font_family: RcStr, request_hash: u32) -> RcStr { + format!( + "{}_{:x?}", + font_family.to_lowercase().replace(' ', "_"), + request_hash + ) + .into() } #[derive(Debug, Deserialize)] @@ -82,9 +70,9 @@ struct HasPath { pub(crate) async fn can_use_next_font( project_path: Vc, - query: Vc, + query: &RcStr, ) -> Result { - let query_map = qstring::QString::from(&**query.await?); + let query_map = qstring::QString::from(query.as_str()); let request: HasPath = parse_json_with_source_context( query_map .to_pairs() @@ -100,16 +88,16 @@ pub(crate) async fn can_use_next_font( NextFontIssue { path: path.to_resolved().await?, title: StyledString::Line(vec![ - StyledString::Code("next/font:".into()), - StyledString::Text(" error:".into()), + StyledString::Code(rcstr!("next/font:")), + StyledString::Text(rcstr!(" error:")), ]) .resolved_cell(), description: StyledString::Line(vec![ - StyledString::Text("Cannot be used within ".into()), + StyledString::Text(rcstr!("Cannot be used within ")), StyledString::Code(request.path), ]) .resolved_cell(), - severity: IssueSeverity::Error.resolved_cell(), + severity: IssueSeverity::Error, } .resolved_cell() .emit(); diff --git a/crates/next-core/src/next_image/module.rs b/crates/next-core/src/next_image/module.rs index ce86b7717d945..9c58747d4c04c 100644 --- a/crates/next-core/src/next_image/module.rs +++ b/crates/next-core/src/next_image/module.rs @@ -1,6 +1,8 @@ use anyhow::Result; -use turbo_tasks::{fxindexmap, ResolvedVc, TaskInput, Value, Vc}; -use turbopack::{module_options::CustomModuleType, ModuleAssetContext}; +use serde::{Deserialize, Serialize}; +use turbo_rcstr::rcstr; +use turbo_tasks::{NonLocalValue, ResolvedVc, TaskInput, Vc, fxindexmap, trace::TraceRawVcs}; +use turbopack::{ModuleAssetContext, module_options::CustomModuleType}; use turbopack_core::{ context::AssetContext, module::Module, reference_type::ReferenceType, resolve::ModulePart, source::Source, @@ -9,8 +11,21 @@ use turbopack_static::ecma::StaticUrlJsModule; use super::source_asset::StructuredImageFileSource; -#[turbo_tasks::value(serialization = "auto_for_input")] -#[derive(Clone, Copy, Debug, PartialOrd, Ord, Hash, TaskInput)] +#[derive( + Eq, + PartialEq, + Clone, + Copy, + Debug, + PartialOrd, + Ord, + Hash, + TaskInput, + TraceRawVcs, + NonLocalValue, + Serialize, + Deserialize, +)] pub enum BlurPlaceholderMode { /// Do not generate a blur placeholder at all. None, @@ -50,17 +65,17 @@ impl StructuredImageModuleType { } .cell(), ), - Value::new(ReferenceType::Internal(ResolvedVc::cell(fxindexmap!( - "IMAGE".into() => ResolvedVc::upcast(static_asset) - )))), + ReferenceType::Internal(ResolvedVc::cell(fxindexmap!( + rcstr!("IMAGE") => ResolvedVc::upcast(static_asset) + ))), ) .module()) } #[turbo_tasks::function] - pub fn new(blur_placeholder_mode: Value) -> Vc { + pub fn new(blur_placeholder_mode: BlurPlaceholderMode) -> Vc { StructuredImageModuleType::cell(StructuredImageModuleType { - blur_placeholder_mode: blur_placeholder_mode.into_value(), + blur_placeholder_mode, }) } } diff --git a/crates/next-core/src/next_image/source_asset.rs b/crates/next-core/src/next_image/source_asset.rs index 06ebcb9c59a36..5cd774d06f83a 100644 --- a/crates/next-core/src/next_image/source_asset.rs +++ b/crates/next-core/src/next_image/source_asset.rs @@ -1,23 +1,19 @@ use std::io::Write; -use anyhow::{bail, Result}; -use turbo_rcstr::RcStr; +use anyhow::{Result, bail}; +use turbo_rcstr::rcstr; use turbo_tasks::{ResolvedVc, Vc}; -use turbo_tasks_fs::{rope::RopeBuilder, FileContent}; +use turbo_tasks_fs::{FileContent, rope::RopeBuilder}; use turbopack_core::{ asset::{Asset, AssetContent}, ident::AssetIdent, source::Source, }; use turbopack_ecmascript::utils::StringifyJs; -use turbopack_image::process::{get_meta_data, BlurPlaceholderOptions}; +use turbopack_image::process::{BlurPlaceholderOptions, get_meta_data}; use super::module::BlurPlaceholderMode; -fn modifier() -> Vc { - Vc::cell("structured image object".into()) -} - #[turbo_tasks::function] fn blur_options() -> Vc { BlurPlaceholderOptions { @@ -41,7 +37,7 @@ impl Source for StructuredImageFileSource { fn ident(&self) -> Vc { self.image .ident() - .with_modifier(modifier()) + .with_modifier(rcstr!("structured image object")) .rename_as("*.mjs".into()) } } diff --git a/crates/next-core/src/next_import_map.rs b/crates/next-core/src/next_import_map.rs index 235e03b2ac7a9..bcd6920634b6e 100644 --- a/crates/next-core/src/next_import_map.rs +++ b/crates/next-core/src/next_import_map.rs @@ -2,31 +2,32 @@ use std::collections::BTreeMap; use anyhow::{Context, Result}; use rustc_hash::FxHashMap; -use turbo_rcstr::RcStr; -use turbo_tasks::{fxindexmap, FxIndexMap, ResolvedVc, Value, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{FxIndexMap, ResolvedVc, Vc, fxindexmap}; use turbo_tasks_fs::{FileSystem, FileSystemPath}; use turbopack_core::{ reference_type::{CommonJsReferenceSubType, ReferenceType}, resolve::{ + AliasPattern, ExternalTraced, ExternalType, ResolveAliasMap, SubpathValue, node::node_cjs_resolve_options, options::{ConditionValue, ImportMap, ImportMapping, ResolvedMap}, parse::Request, pattern::Pattern, - resolve, AliasPattern, ExternalTraced, ExternalType, ResolveAliasMap, SubpathValue, + resolve, }, source::Source, }; use turbopack_node::execution_context::ExecutionContext; use crate::{ - embed_js::{next_js_fs, VIRTUAL_PACKAGE_NAME}, + embed_js::{VIRTUAL_PACKAGE_NAME, next_js_fs}, mode::NextMode, next_client::context::ClientContextType, next_config::NextConfig, next_edge::unsupported::NextEdgeUnsupportedModuleReplacer, next_font::google::{ - NextFontGoogleCssModuleReplacer, NextFontGoogleFontFileReplacer, NextFontGoogleReplacer, - GOOGLE_FONTS_INTERNAL_PREFIX, + GOOGLE_FONTS_INTERNAL_PREFIX, NextFontGoogleCssModuleReplacer, + NextFontGoogleFontFileReplacer, NextFontGoogleReplacer, }, next_server::context::ServerContextType, util::NextRuntime, @@ -90,8 +91,9 @@ const EDGE_UNSUPPORTED_NODE_INTERNALS: [&str; 44] = [ #[turbo_tasks::function] pub async fn get_next_client_import_map( project_path: ResolvedVc, - ty: Value, + ty: ClientContextType, next_config: Vc, + next_mode: Vc, execution_context: Vc, ) -> Result> { let mut import_map = ImportMap::empty(); @@ -101,6 +103,7 @@ pub async fn get_next_client_import_map( project_path, execution_context, next_config, + next_mode, false, ) .await?; @@ -115,12 +118,11 @@ pub async fn get_next_client_import_map( ) .await?; - match ty.into_value() { + match ty { ClientContextType::Pages { .. } => {} ClientContextType::App { app_dir } => { let react_flavor = if *next_config.enable_ppr().await? || *next_config.enable_taint().await? - || *next_config.enable_react_owner_stack().await? || *next_config.enable_view_transition().await? || *next_config.enable_router_bfcache().await? { @@ -197,19 +199,23 @@ pub async fn get_next_client_import_map( &format!("next/dist/compiled/react-server-dom-turbopack{react_flavor}/*"), ), ); - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/head", request_to_import_mapping(project_path, "next/dist/client/components/noop-head"), ); - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/dynamic", request_to_import_mapping(project_path, "next/dist/shared/lib/app-dynamic"), ); - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/link", request_to_import_mapping(project_path, "next/dist/client/app-dir/link"), ); - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/form", request_to_import_mapping(project_path, "next/dist/client/app-dir/form"), ); @@ -230,7 +236,7 @@ pub async fn get_next_client_import_map( }, ); - match ty.into_value() { + match ty { ClientContextType::Pages { .. } | ClientContextType::App { .. } | ClientContextType::Fallback => { @@ -253,12 +259,10 @@ pub async fn get_next_client_import_map( /// Computes the Next-specific client fallback import map, which provides /// polyfills to Node.js externals. #[turbo_tasks::function] -pub async fn get_next_client_fallback_import_map( - ty: Value, -) -> Result> { +pub async fn get_next_client_fallback_import_map(ty: ClientContextType) -> Result> { let mut import_map = ImportMap::empty(); - match ty.into_value() { + match ty { ClientContextType::Pages { pages_dir: context_dir, } @@ -283,8 +287,9 @@ pub async fn get_next_client_fallback_import_map( #[turbo_tasks::function] pub async fn get_next_server_import_map( project_path: ResolvedVc, - ty: Value, + ty: ServerContextType, next_config: Vc, + next_mode: Vc, execution_context: Vc, ) -> Result> { let mut import_map = ImportMap::empty(); @@ -294,6 +299,7 @@ pub async fn get_next_server_import_map( project_path, execution_context, next_config, + next_mode, false, ) .await?; @@ -306,8 +312,6 @@ pub async fn get_next_server_import_map( ) .await?; - let ty = ty.into_value(); - let external = ImportMapping::External(None, ExternalType::CommonJs, ExternalTraced::Traced) .resolved_cell(); @@ -325,7 +329,7 @@ pub async fn get_next_server_import_map( import_map.insert_exact_alias( "styled-jsx/style", ImportMapping::External( - Some("styled-jsx/style.js".into()), + Some(rcstr!("styled-jsx/style.js")), ExternalType::CommonJs, ExternalTraced::Traced, ) @@ -338,19 +342,23 @@ pub async fn get_next_server_import_map( ServerContextType::AppSSR { .. } | ServerContextType::AppRSC { .. } | ServerContextType::AppRoute { .. } => { - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/head", request_to_import_mapping(project_path, "next/dist/client/components/noop-head"), ); - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/dynamic", request_to_import_mapping(project_path, "next/dist/shared/lib/app-dynamic"), ); - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/link", request_to_import_mapping(project_path, "next/dist/client/app-dir/link"), ); - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/form", request_to_import_mapping(project_path, "next/dist/client/app-dir/form"), ); @@ -374,8 +382,9 @@ pub async fn get_next_server_import_map( #[turbo_tasks::function] pub async fn get_next_edge_import_map( project_path: ResolvedVc, - ty: Value, + ty: ServerContextType, next_config: Vc, + next_mode: Vc, execution_context: Vc, ) -> Result> { let mut import_map = ImportMap::empty(); @@ -427,6 +436,7 @@ pub async fn get_next_edge_import_map( project_path, execution_context, next_config, + next_mode, true, ) .await?; @@ -441,8 +451,7 @@ pub async fn get_next_edge_import_map( ) .await?; - let ty = ty.into_value(); - match ty { + match &ty { ServerContextType::Pages { .. } | ServerContextType::PagesData { .. } | ServerContextType::PagesApi { .. } @@ -451,25 +460,28 @@ pub async fn get_next_edge_import_map( ServerContextType::AppSSR { .. } | ServerContextType::AppRSC { .. } | ServerContextType::AppRoute { .. } => { - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/head", request_to_import_mapping(project_path, "next/dist/client/components/noop-head"), ); - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/dynamic", request_to_import_mapping(project_path, "next/dist/shared/lib/app-dynamic"), ); - import_map.insert_exact_alias( + insert_exact_alias_or_js( + &mut import_map, "next/link", request_to_import_mapping(project_path, "next/dist/client/app-dir/link"), - ) + ); } } insert_next_server_special_aliases( &mut import_map, project_path, - ty, + ty.clone(), NextRuntime::Edge, next_config, ) @@ -486,12 +498,7 @@ pub async fn get_next_edge_import_map( | ServerContextType::Pages { .. } | ServerContextType::PagesData { .. } | ServerContextType::PagesApi { .. } => { - insert_unsupported_node_internal_aliases( - &mut import_map, - *project_path, - execution_context, - ) - .await?; + insert_unsupported_node_internal_aliases(&mut import_map).await?; } } @@ -501,13 +508,9 @@ pub async fn get_next_edge_import_map( /// Insert default aliases for the node.js's internal to raise unsupported /// runtime errors. User may provide polyfills for their own by setting user /// config's alias. -async fn insert_unsupported_node_internal_aliases( - import_map: &mut ImportMap, - project_path: Vc, - execution_context: Vc, -) -> Result<()> { +async fn insert_unsupported_node_internal_aliases(import_map: &mut ImportMap) -> Result<()> { let unsupported_replacer = ImportMapping::Dynamic(ResolvedVc::upcast( - NextEdgeUnsupportedModuleReplacer::new(project_path, execution_context) + NextEdgeUnsupportedModuleReplacer::new() .to_resolved() .await?, )) @@ -599,14 +602,14 @@ async fn insert_next_server_special_aliases( .resolved_cell(), ); - match ty { + match &ty { ServerContextType::Pages { .. } | ServerContextType::PagesApi { .. } => {} ServerContextType::PagesData { .. } => {} // the logic closely follows the one in createRSCAliases in webpack-config.ts ServerContextType::AppSSR { app_dir } | ServerContextType::AppRSC { app_dir, .. } | ServerContextType::AppRoute { app_dir, .. } => { - let next_package = get_next_package(*app_dir).to_resolved().await?; + let next_package = get_next_package(**app_dir).to_resolved().await?; import_map.insert_exact_alias( "styled-jsx", request_to_import_mapping(next_package, "styled-jsx"), @@ -616,10 +619,10 @@ async fn insert_next_server_special_aliases( request_to_import_mapping(next_package, "styled-jsx/*"), ); - rsc_aliases(import_map, project_path, ty, runtime, next_config).await?; + rsc_aliases(import_map, project_path, ty.clone(), runtime, next_config).await?; } ServerContextType::Middleware { .. } | ServerContextType::Instrumentation { .. } => { - rsc_aliases(import_map, project_path, ty, runtime, next_config).await?; + rsc_aliases(import_map, project_path, ty.clone(), runtime, next_config).await?; } } @@ -628,7 +631,7 @@ async fn insert_next_server_special_aliases( // context, it'll resolve to the noop where it's allowed, or aliased into // the error which throws a runtime error. This works with in combination of // build-time error as well, refer https://github.com/vercel/next.js/blob/0060de1c4905593ea875fa7250d4b5d5ce10897d/packages/next-swc/crates/next-core/src/next_server/context.rs#L103 - match ty { + match &ty { ServerContextType::Pages { .. } => { insert_exact_alias_map( import_map, @@ -700,10 +703,9 @@ async fn rsc_aliases( ) -> Result<()> { let ppr = *next_config.enable_ppr().await?; let taint = *next_config.enable_taint().await?; - let react_owner_stack = *next_config.enable_react_owner_stack().await?; let router_bfcache = *next_config.enable_router_bfcache().await?; let view_transition = *next_config.enable_view_transition().await?; - let react_channel = if ppr || taint || react_owner_stack || view_transition || router_bfcache { + let react_channel = if ppr || taint || view_transition || router_bfcache { "-experimental" } else { "" @@ -844,6 +846,7 @@ async fn insert_next_shared_aliases( project_path: ResolvedVc, execution_context: Vc, next_config: Vc, + next_mode: Vc, is_runtime_edge: bool, ) -> Result<()> { let package_root = next_js_fs().root().to_resolved().await?; @@ -855,6 +858,7 @@ async fn insert_next_shared_aliases( request_to_import_mapping(project_path, "./mdx-components"), request_to_import_mapping(project_path, "./src/mdx-components"), request_to_import_mapping(project_path, "@mdx-js/react"), + request_to_import_mapping(project_path, "@next/mdx/mdx-components.js"), ], ); @@ -891,7 +895,7 @@ async fn insert_next_shared_aliases( import_map.insert_alias( AliasPattern::exact("@vercel/turbopack-next/internal/font/google/cssmodule.module.css"), ImportMapping::Dynamic(ResolvedVc::upcast( - NextFontGoogleCssModuleReplacer::new(*project_path, execution_context) + NextFontGoogleCssModuleReplacer::new(*project_path, execution_context, next_mode) .to_resolved() .await?, )) @@ -965,6 +969,13 @@ async fn insert_next_shared_aliases( "next/dist/build/webpack/loaders/next-flight-loader/cache-wrapper", ), ); + import_map.insert_exact_alias( + "private-next-rsc-track-dynamic-import", + request_to_import_mapping( + project_path, + "next/dist/build/webpack/loaders/next-flight-loader/track-dynamic-import", + ), + ); insert_turbopack_dev_alias(import_map).await?; insert_package_alias( @@ -998,8 +1009,8 @@ async fn insert_next_shared_aliases( pub async fn get_next_package(context_directory: Vc) -> Result> { let result = resolve( context_directory, - Value::new(ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined)), - Request::parse(Value::new(Pattern::Constant("next/package.json".into()))), + ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined), + Request::parse(Pattern::Constant(rcstr!("next/package.json"))), node_cjs_resolve_options(context_directory.root()), ); let source = result @@ -1098,7 +1109,7 @@ fn insert_package_alias( ) { import_map.insert_wildcard_alias( prefix, - ImportMapping::PrimaryAlternative("./*".into(), Some(package_root)).resolved_cell(), + ImportMapping::PrimaryAlternative(rcstr!("./*"), Some(package_root)).resolved_cell(), ); } @@ -1135,6 +1146,16 @@ async fn insert_instrumentation_client_alias( Ok(()) } +// To alias e.g. both `import "next/link"` and `import "next/link.js"` +fn insert_exact_alias_or_js( + import_map: &mut ImportMap, + pattern: &str, + mapping: ResolvedVc, +) { + import_map.insert_exact_alias(pattern, mapping); + import_map.insert_exact_alias(format!("{pattern}.js"), mapping); +} + /// Creates a direct import mapping to the result of resolving a request /// in a context. fn request_to_import_mapping( diff --git a/crates/next-core/src/next_manifests/client_reference_manifest.rs b/crates/next-core/src/next_manifests/client_reference_manifest.rs index 94785b7028c32..ea684e83af780 100644 --- a/crates/next-core/src/next_manifests/client_reference_manifest.rs +++ b/crates/next-core/src/next_manifests/client_reference_manifest.rs @@ -7,15 +7,15 @@ use serde::{Deserialize, Serialize}; use tracing::Instrument; use turbo_rcstr::RcStr; use turbo_tasks::{ - trace::TraceRawVcs, FxIndexSet, ReadRef, ResolvedVc, TaskInput, TryFlatJoinIterExt, - TryJoinIterExt, ValueToString, Vc, + FxIndexSet, ReadRef, ResolvedVc, TaskInput, TryFlatJoinIterExt, TryJoinIterExt, ValueToString, + Vc, trace::TraceRawVcs, }; use turbo_tasks_fs::{File, FileSystemPath}; use turbopack_core::{ asset::{Asset, AssetContent}, chunk::{ChunkingContext, ModuleChunkItemIdExt, ModuleId as TurbopackModuleId}, module_graph::async_module_info::AsyncModulesInfo, - output::{OutputAsset, OutputAssets}, + output::OutputAsset, virtual_output::VirtualOutputAsset, }; use turbopack_ecmascript::utils::StringifyJs; @@ -37,7 +37,6 @@ pub struct ClientReferenceManifestOptions { pub entry_name: RcStr, pub client_references: ResolvedVc, pub client_references_chunks: ResolvedVc, - pub rsc_app_entry_chunks: ResolvedVc, pub client_chunking_context: ResolvedVc>, pub ssr_chunking_context: Option>>, pub async_module_info: ResolvedVc, @@ -58,7 +57,6 @@ impl ClientReferenceManifest { entry_name, client_references, client_references_chunks, - rsc_app_entry_chunks, client_chunking_context, ssr_chunking_context, async_module_info, @@ -100,7 +98,6 @@ impl ClientReferenceManifest { } = &*client_references_chunks.await?; let client_relative_path = &*client_relative_path.await?; let node_root_ref = &*node_root.await?; - let rsc_app_entry_chunks = &*rsc_app_entry_chunks.await?; let client_references_ecmascript = client_references .await? @@ -167,10 +164,6 @@ impl ClientReferenceManifest { ResolvedVc>, ReadRef, > = FxHashMap::default(); - let mut rsc_chunk_path_cache: FxHashMap< - ResolvedVc>, - ReadRef, - > = FxHashMap::default(); for (client_reference_module, client_reference_module_ref) in client_references_ecmascript @@ -266,32 +259,10 @@ impl ClientReferenceManifest { (Vec::new(), false) }; - let (rsc_chunks_paths, rsc_is_async) = if runtime == NextRuntime::Edge { - // the chunks get added to the middleware-manifest.json instead - // of this file because the - // edge runtime doesn't support dynamically - // loading chunks. - (Vec::new(), false) + let rsc_is_async = if runtime == NextRuntime::Edge { + false } else { - let rsc_chunks_paths = cached_chunk_paths( - &mut rsc_chunk_path_cache, - rsc_app_entry_chunks.iter().copied(), - ) - .await?; - - let chunk_paths = rsc_chunks_paths - .filter_map(|(_, chunk_path)| { - node_root_ref - .get_path_to(&chunk_path) - .map(ToString::to_string) - }) - .map(RcStr::from) - .collect::>(); - - let is_async = - async_modules.contains(&ResolvedVc::upcast(client_reference_module)); - - (chunk_paths, is_async) + async_modules.contains(&ResolvedVc::upcast(client_reference_module)) }; entry_manifest.client_modules.module_exports.insert( @@ -326,7 +297,7 @@ impl ClientReferenceManifest { ManifestNodeEntry { name: "*".into(), id: (&*rsc_chunk_item_id).into(), - chunks: rsc_chunks_paths, + chunks: vec![], r#async: rsc_is_async, }, ); @@ -462,6 +433,6 @@ pub fn get_client_reference_module_key(server_path: &str, export_name: &str) -> if export_name == "*" { server_path.into() } else { - format!("{}#{}", server_path, export_name).into() + format!("{server_path}#{export_name}").into() } } diff --git a/crates/next-core/src/next_manifests/encode_uri_component.rs b/crates/next-core/src/next_manifests/encode_uri_component.rs index f2f2282dbac75..2c502146cd333 100644 --- a/crates/next-core/src/next_manifests/encode_uri_component.rs +++ b/crates/next-core/src/next_manifests/encode_uri_component.rs @@ -1,4 +1,4 @@ -use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS}; +use percent_encoding::{AsciiSet, CONTROLS, utf8_percent_encode}; /// An `AsciiSet` that matches the behavior of JavaScript's `encodeURIComponent`. /// - It leaves `A-Z a-z 0-9 - _ . ~` unescaped. diff --git a/crates/next-core/src/next_manifests/mod.rs b/crates/next-core/src/next_manifests/mod.rs index dbd274ff7397c..bfad238c8f3f7 100644 --- a/crates/next-core/src/next_manifests/mod.rs +++ b/crates/next-core/src/next_manifests/mod.rs @@ -7,8 +7,8 @@ use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; use turbo_rcstr::RcStr; use turbo_tasks::{ - trace::TraceRawVcs, FxIndexMap, FxIndexSet, NonLocalValue, ReadRef, ResolvedVc, TaskInput, - TryJoinIterExt, Vc, + FxIndexMap, FxIndexSet, NonLocalValue, ReadRef, ResolvedVc, TaskInput, TryJoinIterExt, Vc, + trace::TraceRawVcs, }; use turbo_tasks_fs::{File, FileSystemPath}; use turbopack_core::{ @@ -269,10 +269,10 @@ pub struct AppPathsManifest { // The manifest is in a format of: // { [`${origin} -> ${imported}`]: { id: `${origin} -> ${imported}`, files: // string[] } } -#[derive(Serialize, Default, Debug)] +#[derive(Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct LoadableManifest { - pub id: RcStr, + pub id: ModuleId, pub files: Vec, } diff --git a/crates/next-core/src/next_pages/page_entry.rs b/crates/next-core/src/next_pages/page_entry.rs index aecfe32d78dfc..9a1d26ca13b24 100644 --- a/crates/next-core/src/next_pages/page_entry.rs +++ b/crates/next-core/src/next_pages/page_entry.rs @@ -1,10 +1,10 @@ use std::io::Write; -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use serde::Serialize; -use turbo_rcstr::RcStr; -use turbo_tasks::{fxindexmap, FxIndexMap, ResolvedVc, Value, Vc}; -use turbo_tasks_fs::{rope::RopeBuilder, File, FileSystemPath}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{FxIndexMap, ResolvedVc, Vc, fxindexmap}; +use turbo_tasks_fs::{File, FileSystemPath, rope::RopeBuilder}; use turbopack_core::{ asset::{Asset, AssetContent}, context::AssetContext, @@ -20,7 +20,7 @@ use crate::{ next_config::NextConfig, next_edge::entry::wrap_edge_entry, pages_structure::{PagesStructure, PagesStructureItem}, - util::{file_content_rope, load_next_js_template, NextRuntime}, + util::{NextRuntime, file_content_rope, load_next_js_template}, }; #[turbo_tasks::value] @@ -32,18 +32,18 @@ pub struct PageSsrEntryModule { #[turbo_tasks::function] pub async fn create_page_ssr_entry_module( - pathname: Vc, - reference_type: Value, + pathname: RcStr, + reference_type: ReferenceType, project_root: Vc, ssr_module_context: Vc>, source: Vc>, - next_original_name: Vc, + next_original_name: RcStr, pages_structure: Vc, runtime: NextRuntime, next_config: Vc, ) -> Result> { - let definition_page = &*next_original_name.await?; - let definition_pathname = &*pathname.await?; + let definition_page = next_original_name; + let definition_pathname = pathname; let ssr_module = ssr_module_context .process(source, reference_type.clone()) @@ -51,8 +51,6 @@ pub async fn create_page_ssr_entry_module( .to_resolved() .await?; - let reference_type = reference_type.into_value(); - let template_file = match (&reference_type, runtime) { (ReferenceType::Entry(EntryReferenceSubType::Page), _) => { // Load the Page entry file. @@ -69,20 +67,20 @@ pub async fn create_page_ssr_entry_module( _ => bail!("Invalid path type"), }; - const INNER: &str = "INNER_PAGE"; + let inner = rcstr!("INNER_PAGE"); - const INNER_DOCUMENT: &str = "INNER_DOCUMENT"; - const INNER_APP: &str = "INNER_APP"; + let inner_document = rcstr!("INNER_DOCUMENT"); + let inner_app = rcstr!("INNER_APP"); let mut replacements = fxindexmap! { "VAR_DEFINITION_PAGE" => definition_page.clone(), "VAR_DEFINITION_PATHNAME" => definition_pathname.clone(), - "VAR_USERLAND" => INNER.into(), + "VAR_USERLAND" => inner.clone(), }; if reference_type == ReferenceType::Entry(EntryReferenceSubType::Page) { - replacements.insert("VAR_MODULE_DOCUMENT", INNER_DOCUMENT.into()); - replacements.insert("VAR_MODULE_APP", INNER_APP.into()); + replacements.insert("VAR_MODULE_DOCUMENT", inner_document.clone()); + replacements.insert("VAR_MODULE_APP", inner_app.clone()); } // Load the file from the next.js codebase. @@ -99,7 +97,7 @@ pub async fn create_page_ssr_entry_module( // instrumentation file conflicts with a page also labeled // /instrumentation) hoist the `register` method. if reference_type == ReferenceType::Entry(EntryReferenceSubType::Page) - && (*definition_page == "/instrumentation" || *definition_page == "/src/instrumentation") + && (definition_page == "/instrumentation" || definition_page == "/src/instrumentation") { let file = &*file_content_rope(source.content().file_content()).await?; @@ -120,7 +118,7 @@ pub async fn create_page_ssr_entry_module( } let mut inner_assets = fxindexmap! { - INNER.into() => ssr_module, + inner => ssr_module, }; let pages_structure_ref = pages_structure.await?; @@ -129,20 +127,20 @@ pub async fn create_page_ssr_entry_module( if reference_type == ReferenceType::Entry(EntryReferenceSubType::Page) { let document_module = process_global_item( *pages_structure_ref.document, - Value::new(reference_type.clone()), + reference_type.clone(), ssr_module_context, ) .to_resolved() .await?; let app_module = process_global_item( *pages_structure_ref.app, - Value::new(reference_type.clone()), + reference_type.clone(), ssr_module_context, ) .to_resolved() .await?; - inner_assets.insert(INNER_DOCUMENT.into(), document_module); - inner_assets.insert(INNER_APP.into(), app_module); + inner_assets.insert(inner_document, document_module); + inner_assets.insert(inner_app, app_module); (Some(app_module), Some(document_module)) } else { (None, None) @@ -151,7 +149,7 @@ pub async fn create_page_ssr_entry_module( let mut ssr_module = ssr_module_context .process( source, - Value::new(ReferenceType::Internal(ResolvedVc::cell(inner_assets))), + ReferenceType::Internal(ResolvedVc::cell(inner_assets)), ) .module(); @@ -163,7 +161,7 @@ pub async fn create_page_ssr_entry_module( ssr_module, definition_page.clone(), definition_pathname.clone(), - Value::new(reference_type), + reference_type, pages_structure, next_config, ); @@ -188,7 +186,7 @@ pub async fn create_page_ssr_entry_module( #[turbo_tasks::function] fn process_global_item( item: Vc, - reference_type: Value, + reference_type: ReferenceType, module_context: Vc>, ) -> Vc> { let source = Vc::upcast(FileSource::new(item.file_path())); @@ -202,7 +200,7 @@ async fn wrap_edge_page( entry: ResolvedVc>, page: RcStr, pathname: RcStr, - reference_type: Value, + reference_type: ReferenceType, pages_structure: Vc, next_config: Vc, ) -> Result>> { @@ -244,8 +242,8 @@ async fn wrap_edge_page( "nextConfig" => serde_json::to_string(next_config_val)?.into(), "dev" => serde_json::Value::Bool(dev).to_string().into(), "pageRouteModuleOptions" => serde_json::to_string(&get_route_module_options(page.clone(), pathname.clone()))?.into(), - "errorRouteModuleOptions" => serde_json::to_string(&get_route_module_options("/_error".into(), "/_error".into()))?.into(), - "user500RouteModuleOptions" => serde_json::to_string(&get_route_module_options("/500".into(), "/500".into()))?.into(), + "errorRouteModuleOptions" => serde_json::to_string(&get_route_module_options(rcstr!("/_error"), rcstr!("/_error")))?.into(), + "user500RouteModuleOptions" => serde_json::to_string(&get_route_module_options(rcstr!("/500"), rcstr!("/500")))?.into(), }, fxindexmap! { // TODO @@ -288,7 +286,7 @@ async fn wrap_edge_page( let wrapped = asset_context .process( Vc::upcast(source), - Value::new(ReferenceType::Internal(ResolvedVc::cell(inner_assets))), + ReferenceType::Internal(ResolvedVc::cell(inner_assets)), ) .module(); @@ -322,12 +320,12 @@ struct RouteDefinition { fn get_route_module_options(page: RcStr, pathname: RcStr) -> PartialRouteModuleOptions { PartialRouteModuleOptions { definition: RouteDefinition { - kind: "PAGES".into(), + kind: rcstr!("PAGES"), page, pathname, // The following aren't used in production. - bundle_path: "".into(), - filename: "".into(), + bundle_path: rcstr!(""), + filename: rcstr!(""), }, } } diff --git a/crates/next-core/src/next_route_matcher/mod.rs b/crates/next-core/src/next_route_matcher/mod.rs index 61a864178a0fa..9593930ee43c0 100644 --- a/crates/next-core/src/next_route_matcher/mod.rs +++ b/crates/next-core/src/next_route_matcher/mod.rs @@ -1,4 +1,4 @@ -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use turbo_rcstr::RcStr; use turbo_tasks::{ResolvedVc, Vc}; use turbopack_node::route_matcher::{Params, RouteMatcher, RouteMatcherRef}; diff --git a/crates/next-core/src/next_server/context.rs b/crates/next-core/src/next_server/context.rs index 4d5758b2637a2..fd0da02a07ebc 100644 --- a/crates/next-core/src/next_server/context.rs +++ b/crates/next-core/src/next_server/context.rs @@ -1,8 +1,8 @@ use std::iter::once; -use anyhow::{bail, Result}; -use turbo_rcstr::RcStr; -use turbo_tasks::{FxIndexMap, OptionVcExt, ResolvedVc, Value, Vc}; +use anyhow::{Result, bail}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{FxIndexMap, OptionVcExt, ResolvedVc, TaskInput, Vc}; use turbo_tasks_env::{EnvMap, ProcessEnv}; use turbo_tasks_fs::FileSystemPath; use turbopack::{ @@ -16,8 +16,8 @@ use turbopack::{ }; use turbopack_core::{ chunk::{ - module_id_strategies::ModuleIdStrategy, ChunkingConfig, MangleType, MinifyType, - SourceMapsType, + ChunkingConfig, MangleType, MinifyType, SourceMapsType, + module_id_strategies::ModuleIdStrategy, }, compile_time_info::{ CompileTimeDefineValue, CompileTimeDefines, CompileTimeInfo, DefineableNameSegment, @@ -53,9 +53,9 @@ use crate::{ next_server::resolve::ExternalPredicate, next_shared::{ resolve::{ - get_invalid_client_only_resolve_plugin, get_invalid_styled_jsx_resolve_plugin, ModuleFeatureReportResolvePlugin, NextExternalResolvePlugin, - NextNodeSharedRuntimeResolvePlugin, + NextNodeSharedRuntimeResolvePlugin, get_invalid_client_only_resolve_plugin, + get_invalid_styled_jsx_resolve_plugin, }, transforms::{ emotion::get_emotion_transform_rule, get_ecma_transform_rule, @@ -73,13 +73,13 @@ use crate::{ get_typescript_transform_options, }, util::{ - foreign_code_context_condition, get_transpiled_packages, internal_assets_conditions, - load_next_js_templateon, NextRuntime, + NextRuntime, foreign_code_context_condition, get_transpiled_packages, + internal_assets_conditions, load_next_js_templateon, }, }; -#[turbo_tasks::value(shared, serialization = "auto_for_input")] -#[derive(Debug, Copy, Clone, Hash)] +#[turbo_tasks::value(shared)] +#[derive(Debug, Clone, Hash, TaskInput)] pub enum ServerContextType { Pages { pages_dir: ResolvedVc, @@ -95,20 +95,20 @@ pub enum ServerContextType { }, AppRSC { app_dir: ResolvedVc, - ecmascript_client_reference_transition_name: Option>, + ecmascript_client_reference_transition_name: Option, client_transition: Option>>, }, AppRoute { app_dir: ResolvedVc, - ecmascript_client_reference_transition_name: Option>, + ecmascript_client_reference_transition_name: Option, }, Middleware { app_dir: Option>, - ecmascript_client_reference_transition_name: Option>, + ecmascript_client_reference_transition_name: Option, }, Instrumentation { app_dir: Option>, - ecmascript_client_reference_transition_name: Option>, + ecmascript_client_reference_transition_name: Option, }, } @@ -128,15 +128,20 @@ impl ServerContextType { #[turbo_tasks::function] pub async fn get_server_resolve_options_context( project_path: ResolvedVc, - ty: Value, + ty: ServerContextType, mode: Vc, next_config: Vc, execution_context: Vc, ) -> Result> { - let next_server_import_map = - get_next_server_import_map(*project_path, ty, next_config, execution_context) - .to_resolved() - .await?; + let next_server_import_map = get_next_server_import_map( + *project_path, + ty.clone(), + next_config, + mode, + execution_context, + ) + .to_resolved() + .await?; let foreign_code_context_condition = foreign_code_context_condition(next_config, project_path).await?; let root_dir = project_path.root().to_resolved().await?; @@ -154,7 +159,7 @@ pub async fn get_server_resolve_options_context( // Always load these predefined packages as external. let mut external_packages: Vec = load_next_js_templateon( project_path, - "dist/lib/server-external-packages.json".into(), + rcstr!("dist/lib/server-external-packages.json"), ) .await?; @@ -188,8 +193,6 @@ pub async fn get_server_resolve_options_context( external_packages.retain(|item| !transpiled_packages.contains(item)); - let ty = ty.into_value(); - let server_external_packages_plugin = ExternalCjsModulesResolvePlugin::new( *project_path, project_path.root(), @@ -209,7 +212,7 @@ pub async fn get_server_resolve_options_context( ); if ty.supports_react_server() { - custom_conditions.push("react-server".into()); + custom_conditions.push(rcstr!("react-server")); }; let external_cjs_modules_plugin = if *next_config.bundle_pages_router_dependencies().await? { @@ -229,11 +232,11 @@ pub async fn get_server_resolve_options_context( .to_resolved() .await?; let next_node_shared_runtime_plugin = - NextNodeSharedRuntimeResolvePlugin::new(*project_path, Value::new(ty)) + NextNodeSharedRuntimeResolvePlugin::new(*project_path, ty.clone()) .to_resolved() .await?; - let mut before_resolve_plugins = match ty { + let mut before_resolve_plugins = match &ty { ServerContextType::Pages { .. } | ServerContextType::AppSSR { .. } | ServerContextType::AppRSC { .. } => { @@ -383,7 +386,7 @@ pub async fn get_server_compile_time_info( cwd: RcStr, ) -> Result> { CompileTimeInfo::builder( - Environment::new(Value::new(ExecutionEnvironment::NodeJsLambda( + Environment::new(ExecutionEnvironment::NodeJsLambda( NodeJsEnvironment { compile_target: CompileTarget::current().to_resolved().await?, node_version: NodeJsVersion::resolved_cell(NodeJsVersion::Current( @@ -392,7 +395,7 @@ pub async fn get_server_compile_time_info( cwd: ResolvedVc::cell(Some(cwd)), } .resolved_cell(), - ))) + )) .to_resolved() .await?, ) @@ -406,16 +409,17 @@ pub async fn get_server_compile_time_info( pub async fn get_server_module_options_context( project_path: ResolvedVc, execution_context: ResolvedVc, - ty: Value, + ty: ServerContextType, mode: Vc, next_config: Vc, next_runtime: NextRuntime, encryption_key: ResolvedVc, + environment: ResolvedVc, ) -> Result> { let next_mode = mode.await?; let mut next_server_rules = get_next_server_transforms_rules( next_config, - ty.into_value(), + ty.clone(), mode, false, next_runtime, @@ -424,7 +428,7 @@ pub async fn get_server_module_options_context( .await?; let mut foreign_next_server_rules = get_next_server_transforms_rules( next_config, - ty.into_value(), + ty.clone(), mode, true, next_runtime, @@ -432,7 +436,7 @@ pub async fn get_server_module_options_context( ) .await?; let mut internal_custom_rules = get_next_server_internal_transforms_rules( - ty.into_value(), + ty.clone(), next_config.mdx_rs().await?.is_some(), ) .await?; @@ -478,7 +482,7 @@ pub async fn get_server_module_options_context( conditions .iter() .cloned() - .chain(once("foreign".into())) + .chain(once(rcstr!("foreign"))) .collect(), ) .await?; @@ -550,6 +554,7 @@ pub async fn get_server_module_options_context( ..Default::default() }, execution_context: Some(execution_context), + environment: Some(environment), css: CssOptionsContext { source_maps, ..Default::default() @@ -562,10 +567,12 @@ pub async fn get_server_module_options_context( None }, keep_last_successful_parse: next_mode.is_development(), + remove_unused_exports: *next_config + .turbopack_remove_unused_exports(next_mode.is_development()) + .await?, ..Default::default() }; - let ty = ty.into_value(); let module_options_context = match ty { ServerContextType::Pages { .. } | ServerContextType::PagesData { .. } @@ -971,7 +978,7 @@ pub async fn get_server_module_options_context( #[turbo_tasks::function] pub fn get_server_runtime_entries( - _ty: Value, + _ty: ServerContextType, _mode: Vc, ) -> Vc { let runtime_entries = vec![]; @@ -983,14 +990,15 @@ pub async fn get_server_chunking_context_with_client_assets( mode: Vc, root_path: ResolvedVc, node_root: ResolvedVc, - node_root_to_root_path: ResolvedVc, + node_root_to_root_path: RcStr, client_root: ResolvedVc, - asset_prefix: ResolvedVc>, + asset_prefix: Option, environment: ResolvedVc, module_id_strategy: ResolvedVc>, turbo_minify: Vc, turbo_source_maps: Vc, no_mangling: Vc, + scope_hoisting: Vc, ) -> Result> { let next_mode = mode.await?; // TODO(alexkirsz) This should return a trait that can be implemented by the @@ -1002,11 +1010,11 @@ pub async fn get_server_chunking_context_with_client_assets( node_root_to_root_path, client_root, node_root - .join("server/chunks/ssr".into()) + .join(rcstr!("server/chunks/ssr")) .to_resolved() .await?, client_root - .join("static/media".into()) + .join(rcstr!("static/media")) .to_resolved() .await?, environment, @@ -1032,22 +1040,24 @@ pub async fn get_server_chunking_context_with_client_assets( if next_mode.is_development() { builder = builder.use_file_source_map_uris(); } else { - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - min_chunk_size: 20_000, - max_chunk_count_per_group: 100, - max_merge_chunk_size: 100_000, - ..Default::default() - }, - ); - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - max_merge_chunk_size: 100_000, - ..Default::default() - }, - ); + builder = builder + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + min_chunk_size: 20_000, + max_chunk_count_per_group: 100, + max_merge_chunk_size: 100_000, + ..Default::default() + }, + ) + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + max_merge_chunk_size: 100_000, + ..Default::default() + }, + ) + .module_merging(*scope_hoisting.await?); } Ok(builder.build()) @@ -1058,12 +1068,13 @@ pub async fn get_server_chunking_context( mode: Vc, root_path: ResolvedVc, node_root: ResolvedVc, - node_root_to_root_path: ResolvedVc, + node_root_to_root_path: RcStr, environment: ResolvedVc, module_id_strategy: ResolvedVc>, turbo_minify: Vc, turbo_source_maps: Vc, no_mangling: Vc, + scope_hoisting: Vc, ) -> Result> { let next_mode = mode.await?; // TODO(alexkirsz) This should return a trait that can be implemented by the @@ -1074,8 +1085,14 @@ pub async fn get_server_chunking_context( node_root, node_root_to_root_path, node_root, - node_root.join("server/chunks".into()).to_resolved().await?, - node_root.join("server/assets".into()).to_resolved().await?, + node_root + .join(rcstr!("server/chunks")) + .to_resolved() + .await?, + node_root + .join(rcstr!("server/assets")) + .to_resolved() + .await?, environment, next_mode.runtime_type(), ) @@ -1097,22 +1114,24 @@ pub async fn get_server_chunking_context( if next_mode.is_development() { builder = builder.use_file_source_map_uris() } else { - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - min_chunk_size: 20_000, - max_chunk_count_per_group: 100, - max_merge_chunk_size: 100_000, - ..Default::default() - }, - ); - builder = builder.chunking_config( - Vc::::default().to_resolved().await?, - ChunkingConfig { - max_merge_chunk_size: 100_000, - ..Default::default() - }, - ); + builder = builder + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + min_chunk_size: 20_000, + max_chunk_count_per_group: 100, + max_merge_chunk_size: 100_000, + ..Default::default() + }, + ) + .chunking_config( + Vc::::default().to_resolved().await?, + ChunkingConfig { + max_merge_chunk_size: 100_000, + ..Default::default() + }, + ) + .module_merging(*scope_hoisting.await?); } Ok(builder.build()) diff --git a/crates/next-core/src/next_server/mod.rs b/crates/next-core/src/next_server/mod.rs index 3703bc3272a69..6267375fbc5ed 100644 --- a/crates/next-core/src/next_server/mod.rs +++ b/crates/next-core/src/next_server/mod.rs @@ -3,7 +3,7 @@ pub(crate) mod resolve; pub(crate) mod transforms; pub use context::{ - get_server_chunking_context, get_server_chunking_context_with_client_assets, + ServerContextType, get_server_chunking_context, get_server_chunking_context_with_client_assets, get_server_compile_time_info, get_server_module_options_context, - get_server_resolve_options_context, get_server_runtime_entries, ServerContextType, + get_server_resolve_options_context, get_server_runtime_entries, }; diff --git a/crates/next-core/src/next_server/resolve.rs b/crates/next-core/src/next_server/resolve.rs index 954f6168652c0..9b060404c0378 100644 --- a/crates/next-core/src/next_server/resolve.rs +++ b/crates/next-core/src/next_server/resolve.rs @@ -1,20 +1,20 @@ use anyhow::Result; use serde::{Deserialize, Serialize}; use turbo_rcstr::RcStr; -use turbo_tasks::{trace::TraceRawVcs, NonLocalValue, ResolvedVc, Value, Vc}; -use turbo_tasks_fs::{self, glob::Glob, FileJsonContent, FileSystemPath}; +use turbo_tasks::{NonLocalValue, ResolvedVc, Vc, trace::TraceRawVcs}; +use turbo_tasks_fs::{self, FileJsonContent, FileSystemPath, glob::Glob}; use turbopack_core::{ issue::{Issue, IssueExt, IssueSeverity, IssueStage, OptionStyledString, StyledString}, reference_type::{EcmaScriptModulesReferenceSubType, ReferenceType}, resolve::{ - find_context_file, + ExternalTraced, ExternalType, FindContextFileResult, ResolveResult, ResolveResultItem, + ResolveResultOption, find_context_file, node::{node_cjs_resolve_options, node_esm_resolve_options}, package_json, parse::Request, pattern::Pattern, plugin::{AfterResolvePlugin, AfterResolvePluginCondition}, - resolve, ExternalTraced, ExternalType, FindContextFileResult, ResolveResult, - ResolveResultItem, ResolveResultOption, + resolve, }, source::Source, }; @@ -79,7 +79,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin { &self, fs_path: ResolvedVc, lookup_path: ResolvedVc, - reference_type: Value, + reference_type: ReferenceType, request: ResolvedVc, ) -> Result> { let request_value = &*request.await?; @@ -119,8 +119,8 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin { request_glob, }) = *exception_glob { - let path_match = path_glob.await?.execute(&raw_fs_path.path); - let request_match = request_glob.await?.execute(&request_str); + let path_match = path_glob.await?.matches(&raw_fs_path.path); + let request_match = request_glob.await?.matches(&request_str); if path_match || request_match { return Ok(ResolveResultOption::none()); } @@ -135,8 +135,8 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin { request_glob, }) = *external_glob { - let path_match = path_glob.await?.execute(&raw_fs_path.path); - let request_match = request_glob.await?.execute(&request_str); + let path_match = path_glob.await?.matches(&raw_fs_path.path); + let request_match = request_glob.await?.matches(&request_str); if !path_match && !request_match { return Ok(ResolveResultOption::none()); @@ -459,13 +459,12 @@ struct ExternalizeIssue { #[turbo_tasks::value_impl] impl Issue for ExternalizeIssue { - #[turbo_tasks::function] - fn severity(&self) -> Vc { - IssueSeverity::Warning.cell() + fn severity(&self) -> IssueSeverity { + IssueSeverity::Warning } #[turbo_tasks::function] - async fn title(&self) -> Vc { + fn title(&self) -> Vc { StyledString::Line(vec![ StyledString::Text("Package ".into()), StyledString::Code(self.package.clone()), @@ -485,7 +484,7 @@ impl Issue for ExternalizeIssue { } #[turbo_tasks::function] - async fn description(&self) -> Result> { + fn description(&self) -> Result> { Ok(Vc::cell(Some( StyledString::Stack(vec![ StyledString::Line(vec![ diff --git a/crates/next-core/src/next_server/transforms.rs b/crates/next-core/src/next_server/transforms.rs index b680827278c09..163cbe1b629b0 100644 --- a/crates/next-core/src/next_server/transforms.rs +++ b/crates/next-core/src/next_server/transforms.rs @@ -12,8 +12,8 @@ use crate::{ next_shared::transforms::{ get_next_dynamic_transform_rule, get_next_font_transform_rule, get_next_image_rule, get_next_lint_transform_rule, get_next_modularize_imports_rule, - get_next_pages_transforms_rule, get_server_actions_transform_rule, - next_amp_attributes::get_next_amp_attr_rule, + get_next_pages_transforms_rule, get_next_track_dynamic_imports_transform_rule, + get_server_actions_transform_rule, next_amp_attributes::get_next_amp_attr_rule, next_cjs_optimizer::get_next_cjs_optimizer_rule, next_disallow_re_export_all_in_page::get_next_disallow_export_all_in_page_rule, next_edge_node_api_assert::next_edge_node_api_assert, @@ -54,16 +54,29 @@ pub async fn get_next_server_transforms_rules( // Ignore the internal ModuleCssAsset -> CssModuleAsset references // The CSS Module module itself is still needed for class names ModuleRule::new_internal( - RuleCondition::ResourcePathEndsWith(".module.css".into()), + RuleCondition::any(vec![ + RuleCondition::ResourcePathEndsWith(".module.css".into()), + RuleCondition::ContentTypeStartsWith("text/css+module".into()), + ]), vec![ModuleRuleEffect::Ignore], ), ]); rules.extend([ // Ignore all non-module CSS references ModuleRule::new( - RuleCondition::all(vec![ - RuleCondition::ResourcePathEndsWith(".css".into()), - RuleCondition::not(RuleCondition::ResourcePathEndsWith(".module.css".into())), + RuleCondition::any(vec![ + RuleCondition::all(vec![ + RuleCondition::ResourcePathEndsWith(".css".into()), + RuleCondition::not(RuleCondition::ResourcePathEndsWith( + ".module.css".into(), + )), + ]), + RuleCondition::all(vec![ + RuleCondition::ContentTypeStartsWith("text/css".into()), + RuleCondition::not(RuleCondition::ContentTypeStartsWith( + "text/css+module".into(), + )), + ]), ]), vec![ModuleRuleEffect::Ignore], ), @@ -73,7 +86,7 @@ pub async fn get_next_server_transforms_rules( if !foreign_code { rules.push(get_next_page_static_info_assert_rule( mdx_rs, - Some(context_ty), + Some(context_ty.clone()), None, )); } @@ -82,7 +95,7 @@ pub async fn get_next_server_transforms_rules( let cache_kinds = next_config.cache_kinds().to_resolved().await?; let mut is_app_dir = false; - let is_server_components = match context_ty { + let is_server_components = match &context_ty { ServerContextType::Pages { pages_dir } | ServerContextType::PagesApi { pages_dir } => { if !foreign_code { rules.push(get_next_disallow_export_all_in_page_rule( @@ -96,7 +109,7 @@ pub async fn get_next_server_transforms_rules( if !foreign_code { rules.push( get_next_pages_transforms_rule( - *pages_dir, + **pages_dir, ExportFilter::StripDefaultExport, mdx_rs, ) @@ -165,6 +178,15 @@ pub async fn get_next_server_transforms_rules( ServerContextType::Middleware { .. } | ServerContextType::Instrumentation { .. } => false, }; + if is_app_dir && + // `dynamicIO` is not supported in the edge runtime. + // (also, the code generated by the dynamic imports transform relies on `CacheSignal`, which uses nodejs-specific APIs) + next_runtime != NextRuntime::Edge && + *next_config.enable_dynamic_io().await? + { + rules.push(get_next_track_dynamic_imports_transform_rule(mdx_rs)); + } + if !foreign_code { rules.push( get_next_dynamic_transform_rule(true, is_server_components, is_app_dir, mode, mdx_rs) diff --git a/crates/next-core/src/next_server_component/server_component_module.rs b/crates/next-core/src/next_server_component/server_component_module.rs index a87e02959998e..c6df49ff0f1f5 100644 --- a/crates/next-core/src/next_server_component/server_component_module.rs +++ b/crates/next-core/src/next_server_component/server_component_module.rs @@ -1,8 +1,8 @@ use std::collections::BTreeMap; -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use indoc::formatdoc; -use turbo_rcstr::RcStr; +use turbo_rcstr::rcstr; use turbo_tasks::{ResolvedVc, Vc}; use turbo_tasks_fs::FileSystemPath; use turbopack_core::{ @@ -25,11 +25,6 @@ use turbopack_ecmascript::{ use super::server_component_reference::NextServerComponentModuleReference; -#[turbo_tasks::function] -fn modifier() -> Vc { - Vc::cell("Next.js server component".into()) -} - #[turbo_tasks::value(shared)] pub struct NextServerComponentModule { pub module: ResolvedVc>, @@ -52,7 +47,9 @@ impl NextServerComponentModule { impl Module for NextServerComponentModule { #[turbo_tasks::function] fn ident(&self) -> Vc { - self.module.ident().with_modifier(modifier()) + self.module + .ident() + .with_modifier(rcstr!("Next.js Server Component")) } #[turbo_tasks::function] @@ -69,7 +66,7 @@ impl Module for NextServerComponentModule { impl Asset for NextServerComponentModule { #[turbo_tasks::function] fn content(&self) -> Result> { - bail!("Next.js server component module has no content") + bail!("Next.js Server Component module has no content") } } diff --git a/crates/next-core/src/next_server_component/server_component_reference.rs b/crates/next-core/src/next_server_component/server_component_reference.rs index 14c89d3d01f01..8c4b915dd45cc 100644 --- a/crates/next-core/src/next_server_component/server_component_reference.rs +++ b/crates/next-core/src/next_server_component/server_component_reference.rs @@ -27,7 +27,7 @@ impl ValueToString for NextServerComponentModuleReference { async fn to_string(&self) -> Result> { Ok(Vc::cell( format!( - "Next.js server component {}", + "Next.js Server Component {}", self.asset.ident().to_string().await? ) .into(), diff --git a/crates/next-core/src/next_server_component/server_component_transition.rs b/crates/next-core/src/next_server_component/server_component_transition.rs index d061cbee3f0d0..adc9c37a1b9dc 100644 --- a/crates/next-core/src/next_server_component/server_component_transition.rs +++ b/crates/next-core/src/next_server_component/server_component_transition.rs @@ -1,7 +1,6 @@ -use anyhow::{bail, Result}; -use turbo_rcstr::RcStr; +use anyhow::{Result, bail}; use turbo_tasks::Vc; -use turbopack::{transition::Transition, ModuleAssetContext}; +use turbopack::{ModuleAssetContext, transition::Transition}; use turbopack_core::module::Module; use turbopack_ecmascript::chunk::EcmascriptChunkPlaceable; @@ -27,11 +26,6 @@ impl NextServerComponentTransition { #[turbo_tasks::value_impl] impl Transition for NextServerComponentTransition { - #[turbo_tasks::function] - fn process_layer(self: Vc, layer: Vc) -> Vc { - layer - } - #[turbo_tasks::function] async fn process_module( self: Vc, diff --git a/crates/next-core/src/next_server_utility/server_utility_module.rs b/crates/next-core/src/next_server_utility/server_utility_module.rs index b833391f4de4d..7835ba49430ec 100644 --- a/crates/next-core/src/next_server_utility/server_utility_module.rs +++ b/crates/next-core/src/next_server_utility/server_utility_module.rs @@ -1,8 +1,8 @@ use std::collections::BTreeMap; -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use indoc::formatdoc; -use turbo_rcstr::RcStr; +use turbo_rcstr::rcstr; use turbo_tasks::{ResolvedVc, Vc}; use turbo_tasks_fs::FileSystemPath; use turbopack_core::{ @@ -25,11 +25,6 @@ use turbopack_ecmascript::{ use super::server_utility_reference::NextServerUtilityModuleReference; -#[turbo_tasks::function] -fn modifier() -> Vc { - Vc::cell("Next.js server utility".into()) -} - #[turbo_tasks::value(shared)] pub struct NextServerUtilityModule { pub module: ResolvedVc>, @@ -52,7 +47,9 @@ impl NextServerUtilityModule { impl Module for NextServerUtilityModule { #[turbo_tasks::function] fn ident(&self) -> Vc { - self.module.ident().with_modifier(modifier()) + self.module + .ident() + .with_modifier(rcstr!("Next.js server utility")) } #[turbo_tasks::function] diff --git a/crates/next-core/src/next_server_utility/server_utility_transition.rs b/crates/next-core/src/next_server_utility/server_utility_transition.rs index 654a07d17387a..949eed411454d 100644 --- a/crates/next-core/src/next_server_utility/server_utility_transition.rs +++ b/crates/next-core/src/next_server_utility/server_utility_transition.rs @@ -1,7 +1,6 @@ -use anyhow::{bail, Result}; -use turbo_rcstr::RcStr; +use anyhow::{Result, bail}; use turbo_tasks::Vc; -use turbopack::{transition::Transition, ModuleAssetContext}; +use turbopack::{ModuleAssetContext, transition::Transition}; use turbopack_core::module::Module; use turbopack_ecmascript::chunk::EcmascriptChunkPlaceable; @@ -27,11 +26,6 @@ impl NextServerUtilityTransition { #[turbo_tasks::value_impl] impl Transition for NextServerUtilityTransition { - #[turbo_tasks::function] - fn process_layer(self: Vc, layer: Vc) -> Vc { - layer - } - #[turbo_tasks::function] async fn process_module( self: Vc, diff --git a/crates/next-core/src/next_shared/resolve.rs b/crates/next-core/src/next_shared/resolve.rs index 8d950f4da44e4..4803b11c42105 100644 --- a/crates/next-core/src/next_shared/resolve.rs +++ b/crates/next-core/src/next_shared/resolve.rs @@ -1,44 +1,47 @@ +use std::sync::LazyLock; + use anyhow::Result; -use lazy_static::lazy_static; use rustc_hash::FxHashMap; -use turbo_rcstr::RcStr; -use turbo_tasks::{ResolvedVc, Value, Vc}; -use turbo_tasks_fs::{glob::Glob, FileSystemPath}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{ResolvedVc, Vc}; +use turbo_tasks_fs::{FileSystemPath, glob::Glob}; use turbopack_core::{ diagnostics::DiagnosticExt, file_source::FileSource, issue::{Issue, IssueExt, IssueSeverity, IssueStage, OptionStyledString, StyledString}, reference_type::ReferenceType, resolve::{ + ExternalTraced, ExternalType, ResolveResult, ResolveResultItem, ResolveResultOption, parse::Request, plugin::{ AfterResolvePlugin, AfterResolvePluginCondition, BeforeResolvePlugin, BeforeResolvePluginCondition, }, - ExternalTraced, ExternalType, ResolveResult, ResolveResultItem, ResolveResultOption, }, }; use crate::{next_server::ServerContextType, next_telemetry::ModuleFeatureTelemetry}; -lazy_static! { - // Set of the features we want to track, following existing references in webpack/plugins/telemetry-plugin. - static ref FEATURE_MODULES: FxHashMap<&'static str, Vec<&'static str>> = FxHashMap::from_iter([ - ( - "next", - vec![ - "/image", - "/future/image", - "/legacy/image", - "/script", - "/dynamic", - "/font/google", - "/font/local" - ] - ), - ("@next", vec!["/font/google", "/font/local"]) - ]); -} +// Set of the features we want to track, following existing references in +// webpack/plugins/telemetry-plugin. +static FEATURE_MODULES: LazyLock>> = + LazyLock::new(|| { + FxHashMap::from_iter([ + ( + "next", + vec![ + "/image", + "/future/image", + "/legacy/image", + "/script", + "/dynamic", + "/font/google", + "/font/local", + ], + ), + ("@next", vec!["/font/google", "/font/local"]), + ]) + }); #[turbo_tasks::value(shared)] pub struct InvalidImportModuleIssue { @@ -49,9 +52,8 @@ pub struct InvalidImportModuleIssue { #[turbo_tasks::value_impl] impl Issue for InvalidImportModuleIssue { - #[turbo_tasks::function] - fn severity(&self) -> Vc { - IssueSeverity::Error.into() + fn severity(&self) -> IssueSeverity { + IssueSeverity::Error } #[turbo_tasks::function] @@ -61,7 +63,7 @@ impl Issue for InvalidImportModuleIssue { #[turbo_tasks::function] fn title(&self) -> Vc { - StyledString::Text("Invalid import".into()).cell() + StyledString::Text(rcstr!("Invalid import")).cell() } #[turbo_tasks::function] @@ -85,7 +87,7 @@ impl Issue for InvalidImportModuleIssue { StyledString::Line( messages .iter() - .map(|v| StyledString::Text(format!("{}\n", v).into())) + .map(|v| StyledString::Text(format!("{v}\n").into())) .collect::>(), ) .resolved_cell(), @@ -132,7 +134,7 @@ impl BeforeResolvePlugin for InvalidImportResolvePlugin { fn before_resolve( &self, lookup_path: ResolvedVc, - _reference_type: Value, + _reference_type: ReferenceType, _request: Vc, ) -> Vc { InvalidImportModuleIssue { @@ -231,7 +233,7 @@ impl AfterResolvePlugin for NextExternalResolvePlugin { &self, fs_path: Vc, _lookup_path: Vc, - _reference_type: Value, + _reference_type: ReferenceType, _request: Vc, ) -> Result> { let path = fs_path.await?.path.to_string(); @@ -263,9 +265,8 @@ impl NextNodeSharedRuntimeResolvePlugin { #[turbo_tasks::function] pub fn new( root: ResolvedVc, - server_context_type: Value, + server_context_type: ServerContextType, ) -> Vc { - let server_context_type = server_context_type.into_value(); NextNodeSharedRuntimeResolvePlugin { root, server_context_type, @@ -289,7 +290,7 @@ impl AfterResolvePlugin for NextNodeSharedRuntimeResolvePlugin { &self, fs_path: Vc, _lookup_path: Vc, - _reference_type: Value, + _reference_type: ReferenceType, _request: Vc, ) -> Result> { let stem = fs_path.file_stem().await?; @@ -357,7 +358,7 @@ impl BeforeResolvePlugin for ModuleFeatureReportResolvePlugin { async fn before_resolve( &self, _lookup_path: Vc, - _reference_type: Value, + _reference_type: ReferenceType, request: Vc, ) -> Result> { if let Request::Module { @@ -374,7 +375,7 @@ impl BeforeResolvePlugin for ModuleFeatureReportResolvePlugin { .find(|sub_path| path.is_match(sub_path)); if let Some(sub_path) = sub_path { - ModuleFeatureTelemetry::new(format!("{}{}", module, sub_path).into(), 1) + ModuleFeatureTelemetry::new(format!("{module}{sub_path}").into(), 1) .resolved_cell() .emit(); } @@ -413,7 +414,7 @@ impl AfterResolvePlugin for NextSharedRuntimeResolvePlugin { &self, fs_path: Vc, _lookup_path: Vc, - _reference_type: Value, + _reference_type: ReferenceType, _request: Vc, ) -> Result> { let raw_fs_path = &*fs_path.await?; diff --git a/crates/next-core/src/next_shared/transforms/mod.rs b/crates/next-core/src/next_shared/transforms/mod.rs index b4f085dec56a1..8eef5ef840c69 100644 --- a/crates/next-core/src/next_shared/transforms/mod.rs +++ b/crates/next-core/src/next_shared/transforms/mod.rs @@ -16,6 +16,7 @@ pub(crate) mod next_pure; pub(crate) mod next_react_server_components; pub(crate) mod next_shake_exports; pub(crate) mod next_strip_page_exports; +pub(crate) mod next_track_dynamic_imports; pub(crate) mod react_remove_properties; pub(crate) mod relay; pub(crate) mod remove_console; @@ -25,19 +26,20 @@ pub(crate) mod styled_jsx; pub(crate) mod swc_ecma_transform_plugins; use anyhow::Result; -pub use modularize_imports::{get_next_modularize_imports_rule, ModularizeImportPackageConfig}; +pub use modularize_imports::{ModularizeImportPackageConfig, get_next_modularize_imports_rule}; pub use next_dynamic::get_next_dynamic_transform_rule; pub use next_font::get_next_font_transform_rule; pub use next_lint::get_next_lint_transform_rule; pub use next_strip_page_exports::get_next_pages_transforms_rule; +pub use next_track_dynamic_imports::get_next_track_dynamic_imports_transform_rule; pub use server_actions::get_server_actions_transform_rule; -use turbo_tasks::{ReadRef, ResolvedVc, Value}; +use turbo_tasks::{ReadRef, ResolvedVc}; use turbo_tasks_fs::FileSystemPath; use turbopack::module_options::{ModuleRule, ModuleRuleEffect, ModuleType, RuleCondition}; use turbopack_core::reference_type::{ReferenceType, UrlReferenceSubType}; use turbopack_ecmascript::{CustomTransformer, EcmascriptInputTransform}; -use crate::next_image::{module::BlurPlaceholderMode, StructuredImageModuleType}; +use crate::next_image::{StructuredImageModuleType, module::BlurPlaceholderMode}; pub async fn get_next_image_rule() -> Result { Ok(ModuleRule::new( @@ -65,7 +67,7 @@ pub async fn get_next_image_rule() -> Result { ]), vec![ModuleRuleEffect::ModuleType(ModuleType::Custom( ResolvedVc::upcast( - StructuredImageModuleType::new(Value::new(BlurPlaceholderMode::DataUrl)) + StructuredImageModuleType::new(BlurPlaceholderMode::DataUrl) .to_resolved() .await?, ), diff --git a/crates/next-core/src/next_shared/transforms/modularize_imports.rs b/crates/next-core/src/next_shared/transforms/modularize_imports.rs index 719e326f534f1..d963108dfb707 100644 --- a/crates/next-core/src/next_shared/transforms/modularize_imports.rs +++ b/crates/next-core/src/next_shared/transforms/modularize_imports.rs @@ -2,10 +2,10 @@ use std::sync::Arc; use anyhow::Result; use async_trait::async_trait; -use modularize_imports::{modularize_imports, Config, PackageConfig}; +use modularize_imports::{Config, PackageConfig, modularize_imports}; use serde::{Deserialize, Serialize}; use swc_core::ecma::ast::Program; -use turbo_tasks::{trace::TraceRawVcs, FxIndexMap, NonLocalValue, OperationValue, ResolvedVc}; +use turbo_tasks::{FxIndexMap, NonLocalValue, OperationValue, ResolvedVc, trace::TraceRawVcs}; use turbopack::module_options::{ModuleRule, ModuleRuleEffect}; use turbopack_ecmascript::{CustomTransformer, EcmascriptInputTransform, TransformContext}; @@ -92,7 +92,7 @@ impl ModularizeImportsTransformer { modularize_imports::Transform::Vec(v.clone()) } Transform::None => { - panic!("Missing transform value for package {}", k) + panic!("Missing transform value for package {k}") } }, prevent_full_import: v.prevent_full_import, diff --git a/crates/next-core/src/next_shared/transforms/next_cjs_optimizer.rs b/crates/next-core/src/next_shared/transforms/next_cjs_optimizer.rs index c110270382281..cf09fbcd7e1c5 100644 --- a/crates/next-core/src/next_shared/transforms/next_cjs_optimizer.rs +++ b/crates/next-core/src/next_shared/transforms/next_cjs_optimizer.rs @@ -1,6 +1,6 @@ use anyhow::Result; use async_trait::async_trait; -use next_custom_transforms::transforms::cjs_optimizer::{cjs_optimizer, Config, PackageConfig}; +use next_custom_transforms::transforms::cjs_optimizer::{Config, PackageConfig, cjs_optimizer}; use rustc_hash::FxHashMap; use swc_core::{ atoms::atom, diff --git a/crates/next-core/src/next_shared/transforms/next_dynamic.rs b/crates/next-core/src/next_shared/transforms/next_dynamic.rs index 3fab4e4821c99..0fb60d6bad27a 100644 --- a/crates/next-core/src/next_shared/transforms/next_dynamic.rs +++ b/crates/next-core/src/next_shared/transforms/next_dynamic.rs @@ -1,6 +1,6 @@ use anyhow::Result; use async_trait::async_trait; -use next_custom_transforms::transforms::dynamic::{next_dynamic, NextDynamicMode}; +use next_custom_transforms::transforms::dynamic::{NextDynamicMode, next_dynamic}; use swc_core::{atoms::atom, common::FileName, ecma::ast::Program}; use turbo_tasks::{ResolvedVc, Vc}; use turbopack::module_options::{ModuleRule, ModuleRuleEffect}; diff --git a/crates/next-core/src/next_shared/transforms/next_optimize_server_react.rs b/crates/next-core/src/next_shared/transforms/next_optimize_server_react.rs index 2fe243b00e461..acbe54ce12714 100644 --- a/crates/next-core/src/next_shared/transforms/next_optimize_server_react.rs +++ b/crates/next-core/src/next_shared/transforms/next_optimize_server_react.rs @@ -1,6 +1,6 @@ use anyhow::Result; use async_trait::async_trait; -use next_custom_transforms::transforms::optimize_server_react::{optimize_server_react, Config}; +use next_custom_transforms::transforms::optimize_server_react::{Config, optimize_server_react}; use swc_core::ecma::ast::*; use turbo_tasks::ResolvedVc; use turbopack::module_options::{ModuleRule, ModuleRuleEffect}; diff --git a/crates/next-core/src/next_shared/transforms/next_page_static_info.rs b/crates/next-core/src/next_shared/transforms/next_page_static_info.rs index 2e01dba6a465f..d88a4743d6ca3 100644 --- a/crates/next-core/src/next_shared/transforms/next_page_static_info.rs +++ b/crates/next-core/src/next_shared/transforms/next_page_static_info.rs @@ -1,10 +1,14 @@ use anyhow::Result; use async_trait::async_trait; use next_custom_transforms::transforms::page_static_info::{ - collect_exports, extract_exported_const_values, Const, + Const, collect_exported_const_visitor::GetMut, collect_exports, extract_exported_const_values, }; use serde_json::Value; -use swc_core::{atoms::atom, ecma::ast::Program}; +use swc_core::{ + atoms::{Atom, atom}, + ecma::ast::Program, +}; +use turbo_rcstr::rcstr; use turbo_tasks::{ResolvedVc, Vc}; use turbo_tasks_fs::FileSystemPath; use turbopack::module_options::{ModuleRule, ModuleRuleEffect}; @@ -45,15 +49,27 @@ struct NextPageStaticInfo { client_context: Option, } +#[derive(Default)] +struct PropertiesToExtract { + config: Option, +} +impl GetMut> for PropertiesToExtract { + fn get_mut(&mut self, key: &Atom) -> Option<&mut Option> { + if key == &atom!("config") { + Some(&mut self.config) + } else { + None + } + } +} #[async_trait] impl CustomTransformer for NextPageStaticInfo { #[tracing::instrument(level = tracing::Level::TRACE, name = "next_page_static_info", skip_all)] async fn transform(&self, program: &mut Program, ctx: &TransformContext<'_>) -> Result<()> { if let Some(collected_exports) = collect_exports(program)? { - let mut properties_to_extract = collected_exports.extra_properties.clone(); - properties_to_extract.insert(atom!("config")); + let mut properties_to_extract = PropertiesToExtract::default(); - let extracted = extract_exported_const_values(program, properties_to_extract); + extract_exported_const_values(program, &mut properties_to_extract); let is_server_layer_page = matches!( self.server_context, @@ -81,34 +97,32 @@ impl CustomTransformer for NextPageStaticInfo { } } - if is_app_page { - if let Some(Some(Const::Value(Value::Object(config_obj)))) = - extracted.get(&atom!("config")) - { - let mut messages = vec![format!( - "Page config in {} is deprecated. Replace `export const config=…` with \ - the following:", - ctx.file_path_str - )]; - - if let Some(runtime) = config_obj.get("runtime") { - messages.push(format!("- `export const runtime = {}`", runtime)); - } + if is_app_page + && let Some(Const::Value(Value::Object(config_obj))) = properties_to_extract.config + { + let mut messages = vec![format!( + "Page config in {} is deprecated. Replace `export const config=…` with the \ + following:", + ctx.file_path_str + )]; + + if let Some(runtime) = config_obj.get("runtime") { + messages.push(format!("- `export const runtime = {runtime}`")); + } - if let Some(regions) = config_obj.get("regions") { - messages.push(format!("- `export const preferredRegion = {}`", regions)); - } + if let Some(regions) = config_obj.get("regions") { + messages.push(format!("- `export const preferredRegion = {regions}`")); + } - messages.push("Visit https://nextjs.org/docs/app/api-reference/file-conventions/route-segment-config for more information.".to_string()); + messages.push("Visit https://nextjs.org/docs/app/api-reference/file-conventions/route-segment-config for more information.".to_string()); - PageStaticInfoIssue { - file_path: ctx.file_path, - messages, - severity: IssueSeverity::Warning, - } - .resolved_cell() - .emit(); + PageStaticInfoIssue { + file_path: ctx.file_path, + messages, + severity: IssueSeverity::Warning, } + .resolved_cell() + .emit(); } if collected_exports.directives.contains(&atom!("client")) @@ -138,9 +152,8 @@ pub struct PageStaticInfoIssue { #[turbo_tasks::value_impl] impl Issue for PageStaticInfoIssue { - #[turbo_tasks::function] - fn severity(&self) -> Vc { - self.severity.into() + fn severity(&self) -> IssueSeverity { + self.severity } #[turbo_tasks::function] @@ -150,7 +163,7 @@ impl Issue for PageStaticInfoIssue { #[turbo_tasks::function] fn title(&self) -> Vc { - StyledString::Text("Invalid page configuration".into()).cell() + StyledString::Text(rcstr!("Invalid page configuration")).cell() } #[turbo_tasks::function] @@ -164,7 +177,7 @@ impl Issue for PageStaticInfoIssue { StyledString::Line( self.messages .iter() - .map(|v| StyledString::Text(format!("{}\n", v).into())) + .map(|v| StyledString::Text(format!("{v}\n").into())) .collect::>(), ) .resolved_cell(), diff --git a/crates/next-core/src/next_shared/transforms/next_shake_exports.rs b/crates/next-core/src/next_shared/transforms/next_shake_exports.rs index 690420cb16220..d8cc453ff975b 100644 --- a/crates/next-core/src/next_shared/transforms/next_shake_exports.rs +++ b/crates/next-core/src/next_shared/transforms/next_shake_exports.rs @@ -1,6 +1,6 @@ use anyhow::Result; use async_trait::async_trait; -use next_custom_transforms::transforms::shake_exports::{shake_exports, Config}; +use next_custom_transforms::transforms::shake_exports::{Config, shake_exports}; use swc_core::ecma::ast::*; use turbo_tasks::ResolvedVc; use turbopack::module_options::{ModuleRule, ModuleRuleEffect}; diff --git a/crates/next-core/src/next_shared/transforms/next_strip_page_exports.rs b/crates/next-core/src/next_shared/transforms/next_strip_page_exports.rs index 8f8e2467dd1f0..c7a1dfa7c6c58 100644 --- a/crates/next-core/src/next_shared/transforms/next_strip_page_exports.rs +++ b/crates/next-core/src/next_shared/transforms/next_strip_page_exports.rs @@ -1,7 +1,7 @@ use anyhow::Result; use async_trait::async_trait; use next_custom_transforms::transforms::strip_page_exports::{ - next_transform_strip_page_exports, ExportFilter, + ExportFilter, next_transform_strip_page_exports, }; use swc_core::ecma::ast::Program; use turbo_tasks::{ResolvedVc, Vc}; diff --git a/crates/next-core/src/next_shared/transforms/next_track_dynamic_imports.rs b/crates/next-core/src/next_shared/transforms/next_track_dynamic_imports.rs new file mode 100644 index 0000000000000..653df152afce8 --- /dev/null +++ b/crates/next-core/src/next_shared/transforms/next_track_dynamic_imports.rs @@ -0,0 +1,24 @@ +use anyhow::Result; +use async_trait::async_trait; +use next_custom_transforms::transforms::track_dynamic_imports::*; +use swc_core::ecma::ast::Program; +use turbopack::module_options::ModuleRule; +use turbopack_ecmascript::{CustomTransformer, TransformContext}; + +use super::get_ecma_transform_rule; + +pub fn get_next_track_dynamic_imports_transform_rule(mdx_rs: bool) -> ModuleRule { + get_ecma_transform_rule(Box::new(NextTrackDynamicImports {}), mdx_rs, false) +} + +#[derive(Debug)] +struct NextTrackDynamicImports {} + +#[async_trait] +impl CustomTransformer for NextTrackDynamicImports { + #[tracing::instrument(level = tracing::Level::TRACE, name = "next_track_dynamic_imports", skip_all)] + async fn transform(&self, program: &mut Program, ctx: &TransformContext<'_>) -> Result<()> { + program.mutate(track_dynamic_imports(ctx.unresolved_mark)); + Ok(()) + } +} diff --git a/crates/next-core/src/next_shared/transforms/server_actions.rs b/crates/next-core/src/next_shared/transforms/server_actions.rs index 40453845d617f..93ece0814ab76 100644 --- a/crates/next-core/src/next_shared/transforms/server_actions.rs +++ b/crates/next-core/src/next_shared/transforms/server_actions.rs @@ -1,7 +1,7 @@ use anyhow::Result; use async_trait::async_trait; use next_custom_transforms::transforms::server_actions::{ - server_actions, Config, ServerActionsMode, + Config, ServerActionsMode, server_actions, }; use swc_core::{common::FileName, ecma::ast::Program}; use turbo_rcstr::RcStr; @@ -40,8 +40,8 @@ pub async fn get_server_actions_transform_rule( Ok(ModuleRule::new( module_rule_match_js_no_url(enable_mdx_rs), vec![ModuleRuleEffect::ExtendEcmascriptTransforms { - prepend: ResolvedVc::cell(vec![]), - append: ResolvedVc::cell(vec![transformer]), + prepend: ResolvedVc::cell(vec![transformer]), + append: ResolvedVc::cell(vec![]), }], )) } diff --git a/crates/next-core/src/next_shared/transforms/swc_ecma_transform_plugins.rs b/crates/next-core/src/next_shared/transforms/swc_ecma_transform_plugins.rs index f877ba4245bff..45ffeffd774ae 100644 --- a/crates/next-core/src/next_shared/transforms/swc_ecma_transform_plugins.rs +++ b/crates/next-core/src/next_shared/transforms/swc_ecma_transform_plugins.rs @@ -35,14 +35,14 @@ pub async fn get_swc_ecma_transform_rule_impl( plugin_configs: &[(RcStr, serde_json::Value)], enable_mdx_rs: bool, ) -> Result> { - use anyhow::{bail, Context}; - use turbo_tasks::{TryJoinIterExt, Value}; + use anyhow::bail; + use turbo_tasks::TryFlatJoinIterExt; use turbo_tasks_fs::FileContent; use turbopack::{resolve_options, resolve_options_context::ResolveOptionsContext}; use turbopack_core::{ asset::Asset, reference_type::{CommonJsReferenceSubType, ReferenceType}, - resolve::{handle_resolve_error, parse::Request, pattern::Pattern, resolve}, + resolve::{handle_resolve_error, parse::Request, resolve}, }; use turbopack_ecmascript_plugins::transform::swc_ecma_transform_plugins::{ SwcEcmaTransformPluginsTransformer, SwcPluginModule, @@ -58,7 +58,7 @@ pub async fn get_swc_ecma_transform_rule_impl( // one for implicit package name resolves to node_modules, // and one for explicit path to a .wasm binary. // Current resolve will fail with latter. - let request = Request::parse(Value::new(Pattern::Constant(name.as_str().into()))); + let request = Request::parse_string(name.clone()); let resolve_options = resolve_options( *project_path, ResolveOptionsContext { @@ -72,36 +72,40 @@ pub async fn get_swc_ecma_transform_rule_impl( let plugin_wasm_module_resolve_result = handle_resolve_error( resolve( *project_path, - Value::new(ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined)), + ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined), request, resolve_options, ) .as_raw_module_result(), - Value::new(ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined)), + ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined), + // TODO proper error location *project_path, request, resolve_options, false, + // TODO proper error location None, ) .await?; - let plugin_module = plugin_wasm_module_resolve_result - .first_module() - .await? - .context("Expected to find module")?; - let content = &*plugin_module.content().file_content().await?; + let Some(plugin_module) = &*plugin_wasm_module_resolve_result.first_module().await? + else { + // Ignore unresolveable plugin modules, handle_resolve_error has already emitted an + // issue. + return Ok(None); + }; + let content = &*plugin_module.content().file_content().await?; let FileContent::Content(file) = content else { bail!("Expected file content for plugin module"); }; - Ok(( - SwcPluginModule::new(name, file.content().to_bytes()?.to_vec()).resolved_cell(), + Ok(Some(( + SwcPluginModule::new(name, file.content().to_bytes().to_vec()).resolved_cell(), config.clone(), - )) + ))) }) - .try_join() + .try_flat_join() .await?; Ok(Some(get_ecma_transform_rule( diff --git a/crates/next-core/src/next_shared/webpack_rules/babel.rs b/crates/next-core/src/next_shared/webpack_rules/babel.rs index 50f62f6ccc31b..dcb7854553086 100644 --- a/crates/next-core/src/next_shared/webpack_rules/babel.rs +++ b/crates/next-core/src/next_shared/webpack_rules/babel.rs @@ -1,5 +1,6 @@ use anyhow::Result; -use turbo_tasks::{ResolvedVc, Value, Vc}; +use turbo_rcstr::rcstr; +use turbo_tasks::{ResolvedVc, Vc}; use turbo_tasks_fs::{self, FileSystemEntryType, FileSystemPath}; use turbopack::module_options::{LoaderRuleItem, OptionWebpackRules, WebpackRules}; use turbopack_core::{ @@ -66,15 +67,15 @@ pub async fn maybe_add_babel_loader( { BabelIssue { path: project_root.to_resolved().await?, - title: StyledString::Text( - "Unable to resolve babel-loader, but a babel config is present".into(), - ) + title: StyledString::Text(rcstr!( + "Unable to resolve babel-loader, but a babel config is present" + )) .resolved_cell(), - description: StyledString::Text( - "Make sure babel-loader is installed via your package manager.".into(), - ) + description: StyledString::Text(rcstr!( + "Make sure babel-loader is installed via your package manager." + )) .resolved_cell(), - severity: IssueSeverity::Fatal.resolved_cell(), + severity: IssueSeverity::Fatal, } .resolved_cell() .emit(); @@ -83,7 +84,7 @@ pub async fn maybe_add_babel_loader( } let loader = WebpackLoaderItem { - loader: "babel-loader".into(), + loader: rcstr!("babel-loader"), options: Default::default(), }; if let Some(rule) = rule { @@ -95,7 +96,7 @@ pub async fn maybe_add_babel_loader( pattern.into(), LoaderRuleItem { loaders: ResolvedVc::cell(vec![loader]), - rename_as: Some("*".into()), + rename_as: Some(rcstr!("*")), }, ); } @@ -114,10 +115,8 @@ pub async fn maybe_add_babel_loader( pub async fn is_babel_loader_available(project_path: Vc) -> Result> { let result = resolve( project_path, - Value::new(ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined)), - Request::parse(Value::new(Pattern::Constant( - "babel-loader/package.json".into(), - ))), + ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined), + Request::parse(Pattern::Constant("babel-loader/package.json".into())), node_cjs_resolve_options(project_path), ); let assets = result.primary_sources().await?; @@ -129,7 +128,7 @@ struct BabelIssue { path: ResolvedVc, title: ResolvedVc, description: ResolvedVc, - severity: ResolvedVc, + severity: IssueSeverity, } #[turbo_tasks::value_impl] @@ -139,9 +138,8 @@ impl Issue for BabelIssue { IssueStage::Transform.into() } - #[turbo_tasks::function] - fn severity(&self) -> Vc { - *self.severity + fn severity(&self) -> IssueSeverity { + self.severity } #[turbo_tasks::function] diff --git a/crates/next-core/src/next_shared/webpack_rules/mod.rs b/crates/next-core/src/next_shared/webpack_rules/mod.rs index 9cb41113084b6..ef72d27f195cb 100644 --- a/crates/next-core/src/next_shared/webpack_rules/mod.rs +++ b/crates/next-core/src/next_shared/webpack_rules/mod.rs @@ -3,7 +3,7 @@ use turbo_rcstr::RcStr; use turbo_tasks::{ResolvedVc, Vc}; use turbo_tasks_fs::FileSystemPath; use turbopack::module_options::WebpackLoadersOptions; -use turbopack_core::resolve::{options::ImportMapping, ExternalTraced, ExternalType}; +use turbopack_core::resolve::{ExternalTraced, ExternalType, options::ImportMapping}; use self::{babel::maybe_add_babel_loader, sass::maybe_add_sass_loader}; use crate::next_config::NextConfig; @@ -15,19 +15,22 @@ pub async fn webpack_loader_options( project_path: ResolvedVc, next_config: Vc, foreign: bool, - conditions: Vec, + condition_strs: Vec, ) -> Result>> { - let rules = *next_config.webpack_rules(conditions).await?; + let rules = *next_config.webpack_rules(condition_strs).await?; let rules = *maybe_add_sass_loader(next_config.sass_config(), rules.map(|v| *v)).await?; let rules = if foreign { rules } else { *maybe_add_babel_loader(*project_path, rules.map(|v| *v)).await? }; + + let conditions = next_config.webpack_conditions().to_resolved().await?; Ok(if let Some(rules) = rules { Some( WebpackLoadersOptions { rules, + conditions, loader_runner_package: Some(loader_runner_package_mapping().to_resolved().await?), } .resolved_cell(), @@ -38,12 +41,14 @@ pub async fn webpack_loader_options( } #[turbo_tasks::function] -async fn loader_runner_package_mapping() -> Result> { - Ok(ImportMapping::Alternatives(vec![ImportMapping::External( - Some("next/dist/compiled/loader-runner".into()), - ExternalType::CommonJs, - ExternalTraced::Untraced, - ) - .resolved_cell()]) +fn loader_runner_package_mapping() -> Result> { + Ok(ImportMapping::Alternatives(vec![ + ImportMapping::External( + Some("next/dist/compiled/loader-runner".into()), + ExternalType::CommonJs, + ExternalTraced::Untraced, + ) + .resolved_cell(), + ]) .cell()) } diff --git a/crates/next-core/src/next_shared/webpack_rules/sass.rs b/crates/next-core/src/next_shared/webpack_rules/sass.rs index 5f2d01c20708c..b8bcf8ca0e34f 100644 --- a/crates/next-core/src/next_shared/webpack_rules/sass.rs +++ b/crates/next-core/src/next_shared/webpack_rules/sass.rs @@ -1,6 +1,6 @@ use std::mem::take; -use anyhow::{bail, Result}; +use anyhow::{Result, bail}; use serde_json::Value as JsonValue; use turbo_tasks::{ResolvedVc, Vc}; use turbopack::module_options::{LoaderRuleItem, OptionWebpackRules, WebpackRules}; diff --git a/crates/next-core/src/next_telemetry.rs b/crates/next-core/src/next_telemetry.rs index 7c80e60f895b9..872f031bc6140 100644 --- a/crates/next-core/src/next_telemetry.rs +++ b/crates/next-core/src/next_telemetry.rs @@ -1,5 +1,5 @@ -use turbo_rcstr::RcStr; -use turbo_tasks::{fxindexmap, Vc}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{Vc, fxindexmap}; use turbopack_core::diagnostics::{Diagnostic, DiagnosticPayload}; /// A structure that keeps track of whether a particular Next.js feature is @@ -17,7 +17,7 @@ pub struct NextFeatureTelemetry { impl NextFeatureTelemetry { pub fn new(feature_name: RcStr, enabled: bool) -> Self { NextFeatureTelemetry { - event_name: "EVENT_BUILD_FEATURE_USAGE".into(), + event_name: rcstr!("EVENT_BUILD_FEATURE_USAGE"), feature_name, enabled, } @@ -28,7 +28,7 @@ impl NextFeatureTelemetry { impl Diagnostic for NextFeatureTelemetry { #[turbo_tasks::function] fn category(&self) -> Vc { - Vc::cell("NextFeatureTelemetry_category_tbd".into()) + Vc::cell(rcstr!("NextFeatureTelemetry_category_tbd")) } #[turbo_tasks::function] @@ -57,7 +57,7 @@ pub struct ModuleFeatureTelemetry { impl ModuleFeatureTelemetry { pub fn new(feature_name: RcStr, invocation_count: usize) -> Self { ModuleFeatureTelemetry { - event_name: "EVENT_BUILD_FEATURE_USAGE".into(), + event_name: rcstr!("EVENT_BUILD_FEATURE_USAGE"), feature_name, invocation_count, } @@ -68,7 +68,7 @@ impl ModuleFeatureTelemetry { impl Diagnostic for ModuleFeatureTelemetry { #[turbo_tasks::function] fn category(&self) -> Vc { - Vc::cell("ModuleFeatureTelemetry_category_tbd".into()) + Vc::cell(rcstr!("ModuleFeatureTelemetry_category_tbd")) } #[turbo_tasks::function] diff --git a/crates/next-core/src/page_loader.rs b/crates/next-core/src/page_loader.rs index 6e118530c81f7..42320b8c73037 100644 --- a/crates/next-core/src/page_loader.rs +++ b/crates/next-core/src/page_loader.rs @@ -1,10 +1,10 @@ use std::io::Write; -use anyhow::{bail, Result}; -use turbo_rcstr::RcStr; -use turbo_tasks::{fxindexmap, ResolvedVc, TryJoinIterExt, Value, Vc}; +use anyhow::{Result, bail}; +use turbo_rcstr::{RcStr, rcstr}; +use turbo_tasks::{ResolvedVc, TryJoinIterExt, Vc, fxindexmap}; use turbo_tasks_fs::{ - self, rope::RopeBuilder, File, FileContent, FileSystemPath, FileSystemPathOption, + self, File, FileContent, FileSystemPath, FileSystemPathOption, rope::RopeBuilder, }; use turbopack_core::{ asset::{Asset, AssetContent}, @@ -25,16 +25,12 @@ use crate::{embed_js::next_js_file_path, util::get_asset_path_from_pathname}; pub async fn create_page_loader_entry_module( client_context: Vc>, entry_asset: Vc>, - pathname: Vc, + pathname: RcStr, ) -> Result>> { let mut result = RopeBuilder::default(); - writeln!( - result, - "const PAGE_PATH = {};\n", - StringifyJs(&*pathname.await?) - )?; + writeln!(result, "const PAGE_PATH = {};\n", StringifyJs(&pathname))?; - let page_loader_path = next_js_file_path("entry/page-loader.ts".into()); + let page_loader_path = next_js_file_path(rcstr!("entry/page-loader.ts")); let base_code = page_loader_path.read(); if let FileContent::Content(base_file) = &*base_code.await? { result += base_file.content() @@ -52,7 +48,7 @@ pub async fn create_page_loader_entry_module( let module = client_context .process( entry_asset, - Value::new(ReferenceType::Entry(EntryReferenceSubType::Page)), + ReferenceType::Entry(EntryReferenceSubType::Page), ) .module() .to_resolved() @@ -61,9 +57,9 @@ pub async fn create_page_loader_entry_module( let module = client_context .process( virtual_source, - Value::new(ReferenceType::Internal(ResolvedVc::cell(fxindexmap! { - "PAGE".into() => module, - }))), + ReferenceType::Internal(ResolvedVc::cell(fxindexmap! { + rcstr!("PAGE") => module, + })), ) .module(); Ok(module) @@ -72,7 +68,7 @@ pub async fn create_page_loader_entry_module( #[turbo_tasks::value(shared)] pub struct PageLoaderAsset { pub server_root: ResolvedVc, - pub pathname: ResolvedVc, + pub pathname: RcStr, pub rebase_prefix_path: ResolvedVc, pub page_chunks: ResolvedVc, } @@ -82,7 +78,7 @@ impl PageLoaderAsset { #[turbo_tasks::function] pub fn new( server_root: ResolvedVc, - pathname: ResolvedVc, + pathname: RcStr, rebase_prefix_path: ResolvedVc, page_chunks: ResolvedVc, ) -> Vc { @@ -125,11 +121,6 @@ impl PageLoaderAsset { } } -#[turbo_tasks::function] -fn page_loader_chunk_reference_description() -> Vc { - Vc::cell("page loader chunk".into()) -} - #[turbo_tasks::value_impl] impl OutputAsset for PageLoaderAsset { #[turbo_tasks::function] @@ -141,7 +132,7 @@ impl OutputAsset for PageLoaderAsset { Ok(root.join( format!( "static/chunks/pages{}", - get_asset_path_from_pathname(&self.pathname.await?, ".js") + get_asset_path_from_pathname(&self.pathname, ".js") ) .into(), )) @@ -181,7 +172,7 @@ impl Asset for PageLoaderAsset { let content = format!( "__turbopack_load_page_chunks__({}, {:#})\n", - StringifyJs(&this.pathname.await?), + StringifyJs(&this.pathname), StringifyJs(&chunks_data) ); diff --git a/crates/next-core/src/tracing_presets.rs b/crates/next-core/src/tracing_presets.rs index b54edb689c375..906fac8ab3f2e 100644 --- a/crates/next-core/src/tracing_presets.rs +++ b/crates/next-core/src/tracing_presets.rs @@ -1,6 +1,6 @@ use once_cell::sync::Lazy; use turbopack_trace_utils::tracing_presets::{ - TRACING_OVERVIEW_TARGETS, TRACING_TURBOPACK_TARGETS, TRACING_TURBO_TASKS_TARGETS, + TRACING_OVERVIEW_TARGETS, TRACING_TURBO_TASKS_TARGETS, TRACING_TURBOPACK_TARGETS, }; pub static TRACING_NEXT_OVERVIEW_TARGETS: Lazy> = Lazy::new(|| { diff --git a/crates/next-core/src/transform_options.rs b/crates/next-core/src/transform_options.rs index 799a44e7fda27..1ea07c4155643 100644 --- a/crates/next-core/src/transform_options.rs +++ b/crates/next-core/src/transform_options.rs @@ -10,7 +10,7 @@ use turbopack::{ use turbopack_browser::react_refresh::assert_can_resolve_react_refresh; use turbopack_core::{ file_source::FileSource, - resolve::{find_context_file, node::node_cjs_resolve_options, FindContextFileResult}, + resolve::{FindContextFileResult, find_context_file, node::node_cjs_resolve_options}, source::Source, }; use turbopack_ecmascript::typescript::resolve::{read_from_tsconfigs, read_tsconfigs, tsconfig}; @@ -66,51 +66,62 @@ pub async fn get_decorators_transform_options( ) -> Result> { let tsconfig = get_typescript_options(project_path).await?; - let decorators_transform_options = if let Some(tsconfig) = tsconfig { - read_from_tsconfigs(&tsconfig, |json, _| { - let decorators_kind = if json["compilerOptions"]["experimentalDecorators"] - .as_bool() - .unwrap_or(false) - { - Some(DecoratorsKind::Legacy) - } else { - // ref: https://devblogs.microsoft.com/typescript/announcing-typescript-5-0-rc/#differences-with-experimental-legacy-decorators - // `without the flag, decorators will now be valid syntax for all new code. - // Outside of --experimentalDecorators, they will be type-checked and emitted - // differently with ts 5.0, new ecma decorators will be enabled - // if legacy decorators are not enabled - Some(DecoratorsKind::Ecma) - }; - - let emit_decorators_metadata = if let Some(decorators_kind) = &decorators_kind { - match decorators_kind { - DecoratorsKind::Legacy => { - // ref: This new decorators proposal is not compatible with - // --emitDecoratorMetadata, and it does not allow decorating parameters. - // Future ECMAScript proposals may be able to help bridge that gap - json["compilerOptions"]["emitDecoratorMetadata"] - .as_bool() - .unwrap_or(false) - } - DecoratorsKind::Ecma => false, - } - } else { - false - }; - - Some(DecoratorsOptions { - decorators_kind, - emit_decorators_metadata, - use_define_for_class_fields: json["compilerOptions"]["useDefineForClassFields"] - .as_bool() - .unwrap_or(false), - ..Default::default() - }) + let experimental_decorators = if let Some(ref tsconfig) = tsconfig { + read_from_tsconfigs(tsconfig, |json, _| { + json["compilerOptions"]["experimentalDecorators"].as_bool() }) .await? - .unwrap_or_default() + .unwrap_or(false) + } else { + false + }; + + let decorators_kind = if experimental_decorators { + Some(DecoratorsKind::Legacy) } else { - Default::default() + // ref: https://devblogs.microsoft.com/typescript/announcing-typescript-5-0-rc/#differences-with-experimental-legacy-decorators + // `without the flag, decorators will now be valid syntax for all new code. + // Outside of --experimentalDecorators, they will be type-checked and emitted + // differently with ts 5.0, new ecma decorators will be enabled + // if legacy decorators are not enabled + Some(DecoratorsKind::Ecma) + }; + + let emit_decorators_metadata = if let Some(ref tsconfig) = tsconfig { + read_from_tsconfigs(tsconfig, |json, _| { + json["compilerOptions"]["emitDecoratorMetadata"].as_bool() + }) + .await? + .unwrap_or(false) + } else { + false + }; + + let use_define_for_class_fields = if let Some(ref tsconfig) = tsconfig { + read_from_tsconfigs(tsconfig, |json, _| { + json["compilerOptions"]["useDefineForClassFields"].as_bool() + }) + .await? + .unwrap_or(false) + } else { + false + }; + + let decorators_transform_options = DecoratorsOptions { + decorators_kind: decorators_kind.clone(), + emit_decorators_metadata: if let Some(ref decorators_kind) = decorators_kind { + match decorators_kind { + DecoratorsKind::Legacy => emit_decorators_metadata, + // ref: This new decorators proposal is not compatible with + // --emitDecoratorMetadata, and it does not allow decorating parameters. + // Future ECMAScript proposals may be able to help bridge that gap + DecoratorsKind::Ecma => false, + } + } else { + false + }, + use_define_for_class_fields, + ..Default::default() }; Ok(decorators_transform_options.cell()) diff --git a/crates/next-core/src/url_node.rs b/crates/next-core/src/url_node.rs index 60296cba7c954..cdd88ba44350a 100644 --- a/crates/next-core/src/url_node.rs +++ b/crates/next-core/src/url_node.rs @@ -454,10 +454,12 @@ mod tests { "/blog/[cid]".to_string(), ]); assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("different slug names")); + assert!( + result + .unwrap_err() + .to_string() + .contains("different slug names") + ); } #[test] @@ -469,10 +471,12 @@ mod tests { "/blog/[id]".to_string(), ]); assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("the same slug name")); + assert!( + result + .unwrap_err() + .to_string() + .contains("the same slug name") + ); } #[test] @@ -480,60 +484,72 @@ mod tests { let result = get_sorted_routes(&["/blog/[id]".to_string(), "/blog/[id]/[...id]".to_string()]); assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("the same slug name")); + assert!( + result + .unwrap_err() + .to_string() + .contains("the same slug name") + ); } #[test] fn catches_middle_catch_all_with_another_catch_all() { let result = get_sorted_routes(&["/blog/[...id]/[...id2]".to_string()]); assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("Catch-all must be the last part of the URL.")); + assert!( + result + .unwrap_err() + .to_string() + .contains("Catch-all must be the last part of the URL.") + ); } #[test] fn catches_middle_catch_all_with_fixed_route() { let result = get_sorted_routes(&["/blog/[...id]/abc".to_string()]); assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("Catch-all must be the last part of the URL.")); + assert!( + result + .unwrap_err() + .to_string() + .contains("Catch-all must be the last part of the URL.") + ); } #[test] fn catches_extra_dots_in_catch_all() { let result = get_sorted_routes(&["/blog/[....id]/abc".to_string()]); assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("Segment names may not start with erroneous periods")); + assert!( + result + .unwrap_err() + .to_string() + .contains("Segment names may not start with erroneous periods") + ); } #[test] fn catches_missing_dots_in_catch_all() { let result = get_sorted_routes(&["/blog/[..id]/abc".to_string()]); assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("Segment names may not start with erroneous periods")); + assert!( + result + .unwrap_err() + .to_string() + .contains("Segment names may not start with erroneous periods") + ); } #[test] fn catches_extra_brackets_for_optional_1() { let result = get_sorted_routes(&["/blog/[[...id]".to_string()]); assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("Segment names may not start or end with extra brackets")); + assert!( + result + .unwrap_err() + .to_string() + .contains("Segment names may not start or end with extra brackets") + ); } #[test] @@ -679,9 +695,11 @@ mod tests { "/blog/[helloworld]/[hello-world]".to_string(), ]); assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("differ only by non-word")); + assert!( + result + .unwrap_err() + .to_string() + .contains("differ only by non-word") + ); } } diff --git a/crates/next-core/src/util.rs b/crates/next-core/src/util.rs index 164c4415e4a08..253809e0d0cf5 100644 --- a/crates/next-core/src/util.rs +++ b/crates/next-core/src/util.rs @@ -1,19 +1,19 @@ use std::future::Future; -use anyhow::{bail, Context, Result}; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use anyhow::{Context, Result, bail}; +use serde::{Deserialize, Serialize, de::DeserializeOwned}; use swc_core::{ common::GLOBALS, ecma::ast::{Expr, Lit, Program}, }; use turbo_rcstr::RcStr; use turbo_tasks::{ - trace::TraceRawVcs, util::WrapFuture, FxIndexMap, FxIndexSet, NonLocalValue, ResolvedVc, - TaskInput, ValueDefault, ValueToString, Vc, + FxIndexMap, FxIndexSet, NonLocalValue, ResolvedVc, TaskInput, ValueDefault, ValueToString, Vc, + trace::TraceRawVcs, util::WrapFuture, }; use turbo_tasks_fs::{ - self, json::parse_json_rope_with_source_context, rope::Rope, util::join_path, File, - FileContent, FileSystem, FileSystemPath, + self, File, FileContent, FileSystem, FileSystemPath, json::parse_json_rope_with_source_context, + rope::Rope, util::join_path, }; use turbopack_core::{ asset::AssetContent, @@ -25,10 +25,10 @@ use turbopack_core::{ virtual_source::VirtualSource, }; use turbopack_ecmascript::{ + EcmascriptParsable, analyzer::{ConstantValue, JsValue, ObjectPart}, parse::ParseResult, utils::StringifyJs, - EcmascriptParsable, }; use crate::{ @@ -71,7 +71,7 @@ pub async fn pathname_for_path( (PathType::Data, "") => "/index".into(), // `get_path_to` always strips the leading `/` from the path, so we need to add // it back here. - (_, path) => format!("/{}", path).into(), + (_, path) => format!("/{path}").into(), }; Ok(Vc::cell(path)) @@ -84,7 +84,7 @@ pub fn get_asset_prefix_from_pathname(pathname: &str) -> String { if pathname == "/" { "/index".to_string() } else if pathname == "/index" || pathname.starts_with("/index/") { - format!("/index{}", pathname) + format!("/index{pathname}") } else { pathname.to_string() } @@ -243,14 +243,14 @@ impl NextSourceConfigParsingIssue { #[turbo_tasks::value_impl] impl Issue for NextSourceConfigParsingIssue { - #[turbo_tasks::function] - fn severity(&self) -> Vc { - IssueSeverity::Warning.into() + fn severity(&self) -> IssueSeverity { + IssueSeverity::Warning } #[turbo_tasks::function] fn title(&self) -> Vc { - StyledString::Text("Unable to parse config export in source file".into()).cell() + StyledString::Text("Next.js can't recognize the exported `config` field in route".into()) + .cell() } #[turbo_tasks::function] @@ -384,6 +384,9 @@ async fn parse_route_matcher_from_js_value( matcher.original_source = value.into(); } } + Some("locale") => { + matcher.locale = value.as_bool().unwrap_or_default(); + } Some("missing") => { matcher.missing = Some(parse_matcher_kind_matcher(value)) } @@ -431,112 +434,106 @@ pub async fn parse_config_from_source( default_runtime: NextRuntime, ) -> Result> { if let Some(ecmascript_asset) = ResolvedVc::try_sidecast::>(module) - { - if let ParseResult::Ok { + && let ParseResult::Ok { program: Program::Module(module_ast), globals, eval_context, .. } = &*ecmascript_asset.parse_original().await? - { - for item in &module_ast.body { - if let Some(decl) = item - .as_module_decl() - .and_then(|mod_decl| mod_decl.as_export_decl()) - .and_then(|export_decl| export_decl.decl.as_var()) - { - for decl in &decl.decls { - let decl_ident = decl.name.as_ident(); - - // Check if there is exported config object `export const config = {...}` - // https://nextjs.org/docs/app/building-your-application/routing/middleware#matcher - if decl_ident - .map(|ident| &*ident.sym == "config") - .unwrap_or_default() - { - if let Some(init) = decl.init.as_ref() { - return WrapFuture::new( - async { - let value = eval_context.eval(init); - Ok(parse_config_from_js_value( - *module, - &value, - default_runtime, - ) + { + for item in &module_ast.body { + if let Some(decl) = item + .as_module_decl() + .and_then(|mod_decl| mod_decl.as_export_decl()) + .and_then(|export_decl| export_decl.decl.as_var()) + { + for decl in &decl.decls { + let decl_ident = decl.name.as_ident(); + + // Check if there is exported config object `export const config = {...}` + // https://nextjs.org/docs/app/building-your-application/routing/middleware#matcher + if decl_ident + .map(|ident| &*ident.sym == "config") + .unwrap_or_default() + { + if let Some(init) = decl.init.as_ref() { + return WrapFuture::new( + async { + let value = eval_context.eval(init); + Ok(parse_config_from_js_value(*module, &value, default_runtime) .await? .cell()) - }, - |f, ctx| GLOBALS.set(globals, || f.poll(ctx)), - ) - .await; - } else { - NextSourceConfigParsingIssue::new( - module.ident(), - StyledString::Text( - "The exported config object must contain an variable \ - initializer." - .into(), - ) - .cell(), - ) - .to_resolved() - .await? - .emit(); - } - } - // Or, check if there is segment runtime option - // https://nextjs.org/docs/app/building-your-application/rendering/edge-and-nodejs-runtimes#segment-runtime-Option - else if decl_ident - .map(|ident| &*ident.sym == "runtime") - .unwrap_or_default() - { - let runtime_value_issue = NextSourceConfigParsingIssue::new( + }, + |f, ctx| GLOBALS.set(globals, || f.poll(ctx)), + ) + .await; + } else { + NextSourceConfigParsingIssue::new( module.ident(), StyledString::Text( - "The runtime property must be either \"nodejs\" or \"edge\"." + "The exported config object must contain an variable \ + initializer." .into(), ) .cell(), ) .to_resolved() - .await?; - if let Some(init) = decl.init.as_ref() { - // skipping eval and directly read the expr's value, as we know it - // should be a const string - if let Expr::Lit(Lit::Str(str_value)) = &**init { - let mut config = NextSourceConfig::default(); - - let runtime = str_value.value.to_string(); - match runtime.as_str() { - "edge" | "experimental-edge" => { - config.runtime = NextRuntime::Edge; - } - "nodejs" => { - config.runtime = NextRuntime::NodeJs; - } - _ => { - runtime_value_issue.emit(); - } + .await? + .emit(); + } + } + // Or, check if there is segment runtime option + // https://nextjs.org/docs/app/building-your-application/rendering/edge-and-nodejs-runtimes#segment-runtime-Option + else if decl_ident + .map(|ident| &*ident.sym == "runtime") + .unwrap_or_default() + { + let runtime_value_issue = NextSourceConfigParsingIssue::new( + module.ident(), + StyledString::Text( + "The runtime property must be either \"nodejs\" or \"edge\"." + .into(), + ) + .cell(), + ) + .to_resolved() + .await?; + if let Some(init) = decl.init.as_ref() { + // skipping eval and directly read the expr's value, as we know it + // should be a const string + if let Expr::Lit(Lit::Str(str_value)) = &**init { + let mut config = NextSourceConfig::default(); + + let runtime = str_value.value.to_string(); + match runtime.as_str() { + "edge" | "experimental-edge" => { + config.runtime = NextRuntime::Edge; + } + "nodejs" => { + config.runtime = NextRuntime::NodeJs; + } + _ => { + runtime_value_issue.emit(); } - - return Ok(config.cell()); - } else { - runtime_value_issue.emit(); } + + return Ok(config.cell()); } else { - NextSourceConfigParsingIssue::new( - module.ident(), - StyledString::Text( - "The exported segment runtime option must contain an \ - variable initializer." - .into(), - ) - .cell(), - ) - .to_resolved() - .await? - .emit(); + runtime_value_issue.emit(); } + } else { + NextSourceConfigParsingIssue::new( + module.ident(), + StyledString::Text( + "The exported segment runtime option must contain an variable \ + initializer." + .into(), + ) + .cell(), + ) + .to_resolved() + .await? + .emit(); } } } @@ -773,7 +770,7 @@ pub async fn load_next_js_template( // variable is missing, throw an error. let mut replaced = FxIndexSet::default(); for (key, replacement) in &replacements { - let full = format!("'{}'", key); + let full = format!("'{key}'"); if content.contains(&full) { replaced.insert(*key); @@ -815,12 +812,12 @@ pub async fn load_next_js_template( // Replace the injections. let mut injected = FxIndexSet::default(); for (key, injection) in &injections { - let full = format!("// INJECT:{}", key); + let full = format!("// INJECT:{key}"); if content.contains(&full) { // Track all the injections to ensure that we're not missing any. injected.insert(*key); - content = content.replace(&full, &format!("const {} = {}", key, injection)); + content = content.replace(&full, &format!("const {key} = {injection}")); } } @@ -858,9 +855,9 @@ pub async fn load_next_js_template( // Replace the optional imports. let mut imports_added = FxIndexSet::default(); for (key, import_path) in &imports { - let mut full = format!("// OPTIONAL_IMPORT:{}", key); + let mut full = format!("// OPTIONAL_IMPORT:{key}"); let namespace = if !content.contains(&full) { - full = format!("// OPTIONAL_IMPORT:* as {}", key); + full = format!("// OPTIONAL_IMPORT:* as {key}"); if content.contains(&full) { true } else { @@ -884,7 +881,7 @@ pub async fn load_next_js_template( ), ); } else { - content = content.replace(&full, &format!("const {} = null", key)); + content = content.replace(&full, &format!("const {key} = null")); } } diff --git a/crates/next-custom-transforms/Cargo.toml b/crates/next-custom-transforms/Cargo.toml index ee45de309eac0..636d2608de9a7 100644 --- a/crates/next-custom-transforms/Cargo.toml +++ b/crates/next-custom-transforms/Cargo.toml @@ -10,11 +10,19 @@ plugin = [ "turbopack-ecmascript-plugins/swc_ecma_transform_plugin", ] +[package.metadata.cargo-shear] +ignored = [ + # when using the `plugin` feature, we need to set a feature flag on `turbopack-ecmascript-plugins` + # so we must list it as a dependency even though we don't directly use it + "turbopack-ecmascript-plugins", +] + [lints] workspace = true [dependencies] base64 = "0.21.0" +bytes-str = { workspace = true } chrono = "0.4" easy-error = "1.0.0" either = "1" @@ -25,18 +33,14 @@ once_cell = { workspace = true } pathdiff = { workspace = true } regex = "1.5" rustc-hash = { workspace = true } -swc-rustc-hash = { workspace = true } serde = { workspace = true } serde_json = { workspace = true, features = ["preserve_order"] } sha1 = "0.10.1" tracing = { version = "0.1.37" } anyhow = { workspace = true } -lazy_static = { workspace = true } -dashmap = "6.1.0" swc_core = { workspace = true, features = [ "base", - "cached", "common_concurrent", "ecma_ast", "ecma_codegen", @@ -63,11 +67,10 @@ turbopack-ecmascript-plugins = { workspace = true, optional = true } turbo-rcstr = { workspace = true } urlencoding = { workspace = true } -react_remove_properties = "0.34.0" -remove_console = "0.35.0" -preset_env_base = "2.0.1" +react_remove_properties = "0.43.0" +remove_console = "0.44.0" +preset_env_base = "3.0.2" [dev-dependencies] -swc_core = { workspace = true, features = ["testing_transform"]} +swc_core = { workspace = true, features = ["testing_transform"] } testing = { workspace = true } -walkdir = "2.3.2" diff --git a/crates/next-custom-transforms/src/chain_transforms.rs b/crates/next-custom-transforms/src/chain_transforms.rs index 7424596f5f66e..1f303bcea0ffe 100644 --- a/crates/next-custom-transforms/src/chain_transforms.rs +++ b/crates/next-custom-transforms/src/chain_transforms.rs @@ -121,6 +121,9 @@ pub struct TransformOptions { #[serde(default)] pub css_env: Option, + + #[serde(default)] + pub track_dynamic_imports: bool, } pub fn custom_before_pass<'a, C>( @@ -165,7 +168,7 @@ where .css_env .as_ref() .map(|env| { - targets_to_versions(env.targets.clone()) + targets_to_versions(env.targets.clone(), None) .expect("failed to parse env.targets") }) .unwrap_or_default(); @@ -333,6 +336,14 @@ where )), None => Either::Right(noop_pass()), }, + match &opts.track_dynamic_imports { + true => Either::Left( + crate::transforms::track_dynamic_imports::track_dynamic_imports( + unresolved_mark, + ), + ), + false => Either::Right(noop_pass()), + }, match &opts.cjs_require_optimizer { Some(config) => Either::Left(visit_mut_pass( crate::transforms::cjs_optimizer::cjs_optimizer( diff --git a/crates/next-custom-transforms/src/lib.rs b/crates/next-custom-transforms/src/lib.rs index 0e2a59fd72833..258658e40c2db 100644 --- a/crates/next-custom-transforms/src/lib.rs +++ b/crates/next-custom-transforms/src/lib.rs @@ -39,6 +39,7 @@ use rustc_hash::FxHasher; pub mod chain_transforms; mod linter; +pub mod react_compiler; pub mod transforms; type FxIndexMap = IndexMap>; diff --git a/crates/next-custom-transforms/src/react_compiler.rs b/crates/next-custom-transforms/src/react_compiler.rs new file mode 100644 index 0000000000000..0227d439e4ff5 --- /dev/null +++ b/crates/next-custom-transforms/src/react_compiler.rs @@ -0,0 +1,283 @@ +use swc_core::ecma::{ + ast::{ + Callee, ExportDefaultDecl, ExportDefaultExpr, Expr, FnDecl, FnExpr, Pat, Program, Stmt, + VarDeclarator, + }, + visit::{Visit, VisitWith}, +}; + +pub fn is_required(program: &Program) -> bool { + let mut finder = Finder::default(); + finder.visit_program(program); + finder.found +} + +#[derive(Default)] +struct Finder { + found: bool, + + /// We are in a function that starts with a capital letter or it's a function that starts with + /// `use` + is_interested: bool, +} + +impl Visit for Finder { + fn visit_callee(&mut self, node: &Callee) { + if self.is_interested { + if let Callee::Expr(e) = node { + if let Expr::Ident(c) = &**e { + if c.sym.starts_with("use") { + self.found = true; + return; + } + } + } + } + + node.visit_children_with(self); + } + + fn visit_export_default_decl(&mut self, node: &ExportDefaultDecl) { + let old = self.is_interested; + + self.is_interested = true; + + node.visit_children_with(self); + + self.is_interested = old; + } + + fn visit_export_default_expr(&mut self, node: &ExportDefaultExpr) { + let old = self.is_interested; + + self.is_interested = true; + + node.visit_children_with(self); + + self.is_interested = old; + } + + fn visit_expr(&mut self, node: &Expr) { + if self.found { + return; + } + if self.is_interested + && matches!( + node, + Expr::JSXMember(..) + | Expr::JSXNamespacedName(..) + | Expr::JSXEmpty(..) + | Expr::JSXElement(..) + | Expr::JSXFragment(..) + ) + { + self.found = true; + return; + } + + node.visit_children_with(self); + } + + fn visit_fn_decl(&mut self, node: &FnDecl) { + let old = self.is_interested; + + self.is_interested = node.ident.sym.starts_with("use") + || node.ident.sym.starts_with(|c: char| c.is_ascii_uppercase()); + + node.visit_children_with(self); + + self.is_interested = old; + } + + fn visit_fn_expr(&mut self, node: &FnExpr) { + let old = self.is_interested; + + self.is_interested |= node.ident.as_ref().is_some_and(|ident| { + ident.sym.starts_with("use") || ident.sym.starts_with(|c: char| c.is_ascii_uppercase()) + }); + + node.visit_children_with(self); + + self.is_interested = old; + } + + fn visit_stmt(&mut self, node: &Stmt) { + if self.found { + return; + } + node.visit_children_with(self); + } + + fn visit_var_declarator(&mut self, node: &VarDeclarator) { + let old = self.is_interested; + + if matches!(node.init.as_deref(), Some(Expr::Fn(..) | Expr::Arrow(..))) { + if let Pat::Ident(ident) = &node.name { + self.is_interested = ident.sym.starts_with("use") + || ident.sym.starts_with(|c: char| c.is_ascii_uppercase()); + } else { + self.is_interested = false; + } + } + + node.visit_children_with(self); + + self.is_interested = old; + } +} + +#[cfg(test)] +mod tests { + use swc_core::{ + common::FileName, + ecma::parser::{parse_file_as_program, EsSyntax}, + }; + use testing::run_test2; + + use super::*; + + fn assert_required(code: &str, required: bool) { + run_test2(false, |cm, _| { + let fm = + cm.new_source_file(FileName::Custom("test.tsx".into()).into(), code.to_string()); + + let program = parse_file_as_program( + &fm, + swc_core::ecma::parser::Syntax::Es(EsSyntax { + jsx: true, + ..Default::default() + }), + Default::default(), + Default::default(), + &mut vec![], + ) + .unwrap(); + + assert_eq!(is_required(&program), required); + + Ok(()) + }) + .unwrap(); + } + + #[test] + fn lazy_return() { + assert_required( + " + function Foo() { + const a =
Hello
; + + return a + } + ", + true, + ); + + assert_required( + " + function Foo() { + ", + false, + ); + } + + #[test] + fn return_jsx() { + assert_required( + " + function Foo() { + return
Hello
; + } + ", + true, + ); + } + + #[test] + fn use_hooks() { + assert_required( + " + function Foo(props) { + const [a, b] = useState(0); + + return props.children; + } + ", + true, + ); + } + + #[test] + fn arrow_function() { + assert_required( + " + const Foo = () =>
Hello
; + ", + true, + ); + + assert_required( + " + const Foo = () => { + return
Hello
; + }; + ", + true, + ); + } + + #[test] + fn export_const_arrow_function() { + assert_required( + " + export const Foo = () =>
Hello
; + ", + true, + ); + + assert_required( + " + export const Foo = () => { + return
Hello
; + }; + ", + true, + ); + } + + #[test] + fn normal_arrow_function() { + assert_required( + " + const Foo = () => { + const a = 1; + console.log(a); + }; + ", + false, + ); + } + + #[test] + fn export_default_arrow_function() { + assert_required( + " + export default () =>
Hello
; + ", + true, + ); + } + + #[test] + fn not_required_arrow_function() { + assert_required( + " + export default () => { + const a = 1; + console.log(a); + }; + ", + false, + ); + } +} diff --git a/crates/next-custom-transforms/src/transforms/mod.rs b/crates/next-custom-transforms/src/transforms/mod.rs index 71618dd378b24..529a59180c134 100644 --- a/crates/next-custom-transforms/src/transforms/mod.rs +++ b/crates/next-custom-transforms/src/transforms/mod.rs @@ -18,6 +18,7 @@ pub mod react_server_components; pub mod server_actions; pub mod shake_exports; pub mod strip_page_exports; +pub mod track_dynamic_imports; pub mod warn_for_edge_runtime; //[TODO] PACK-1564: need to decide reuse vs. turbopack specific diff --git a/crates/next-custom-transforms/src/transforms/page_static_info/collect_exported_const_visitor.rs b/crates/next-custom-transforms/src/transforms/page_static_info/collect_exported_const_visitor.rs index 18230df31e3db..01c30f2947cd3 100644 --- a/crates/next-custom-transforms/src/transforms/page_static_info/collect_exported_const_visitor.rs +++ b/crates/next-custom-transforms/src/transforms/page_static_info/collect_exported_const_visitor.rs @@ -1,15 +1,13 @@ -use rustc_hash::{FxHashMap, FxHashSet}; use serde_json::{Map, Number, Value}; use swc_core::{ atoms::Atom, common::{Mark, SyntaxContext}, ecma::{ ast::{ - BindingIdent, Decl, ExportDecl, Expr, Lit, ModuleDecl, ModuleItem, Pat, Prop, PropName, - PropOrSpread, VarDecl, VarDeclKind, VarDeclarator, + BindingIdent, Decl, ExportDecl, Expr, Lit, Module, ModuleDecl, ModuleItem, Pat, + Program, Prop, PropName, PropOrSpread, VarDecl, VarDeclKind, VarDeclarator, }, utils::{ExprCtx, ExprExt}, - visit::{Visit, VisitWith}, }, }; @@ -21,18 +19,21 @@ pub enum Const { Unsupported(String), } -pub(crate) struct CollectExportedConstVisitor { - pub properties: FxHashMap>, +pub(crate) struct CollectExportedConstVisitor<'a, M> +where + M: GetMut>, +{ + pub properties: &'a mut M, expr_ctx: ExprCtx, } -impl CollectExportedConstVisitor { - pub fn new(properties_to_extract: FxHashSet) -> Self { +impl<'a, M> CollectExportedConstVisitor<'a, M> +where + M: GetMut>, +{ + pub fn new(properties: &'a mut M) -> Self { Self { - properties: properties_to_extract - .into_iter() - .map(|p| (p, None)) - .collect(), + properties, expr_ctx: ExprCtx { unresolved_ctxt: SyntaxContext::empty().apply_mark(Mark::new()), is_unresolved_ref_safe: false, @@ -41,11 +42,13 @@ impl CollectExportedConstVisitor { }, } } -} -impl Visit for CollectExportedConstVisitor { - fn visit_module_items(&mut self, module_items: &[ModuleItem]) { - for module_item in module_items { + pub fn check_program(&mut self, program: &Program) { + let Program::Module(Module { body, .. }) = program else { + return; + }; + + for module_item in body { if let ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(ExportDecl { decl: Decl::Var(decl), .. @@ -69,11 +72,13 @@ impl Visit for CollectExportedConstVisitor { } } } - - module_items.visit_children_with(self); } } +pub trait GetMut { + fn get_mut(&mut self, key: &K) -> Option<&mut V>; +} + /// Coerece the actual value of the given ast node. fn extract_value(ctx: ExprCtx, init: &Expr, id: String) -> Option { match init { diff --git a/crates/next-custom-transforms/src/transforms/page_static_info/collect_exports_visitor.rs b/crates/next-custom-transforms/src/transforms/page_static_info/collect_exports_visitor.rs index 294f8f93c0403..f1cc24521a17b 100644 --- a/crates/next-custom-transforms/src/transforms/page_static_info/collect_exports_visitor.rs +++ b/crates/next-custom-transforms/src/transforms/page_static_info/collect_exports_visitor.rs @@ -1,6 +1,5 @@ -use std::iter::FromIterator; +use std::{iter::FromIterator, sync::LazyLock}; -use lazy_static::lazy_static; use rustc_hash::FxHashSet; use swc_core::{ atoms::atom, @@ -15,15 +14,15 @@ use swc_core::{ use super::{ExportInfo, ExportInfoWarning}; -lazy_static! { - static ref EXPORTS_SET: FxHashSet<&'static str> = FxHashSet::from_iter([ +static EXPORTS_SET: LazyLock> = LazyLock::new(|| { + FxHashSet::from_iter([ "getStaticProps", "getServerSideProps", "generateImageMetadata", "generateSitemaps", "generateStaticParams", - ]); -} + ]) +}); pub(crate) struct CollectExportsVisitor { pub export_info: Option, diff --git a/crates/next-custom-transforms/src/transforms/page_static_info/mod.rs b/crates/next-custom-transforms/src/transforms/page_static_info/mod.rs index becb7b4be6c97..863bd1b446512 100644 --- a/crates/next-custom-transforms/src/transforms/page_static_info/mod.rs +++ b/crates/next-custom-transforms/src/transforms/page_static_info/mod.rs @@ -3,7 +3,7 @@ pub use collect_exported_const_visitor::Const; use collect_exports_visitor::CollectExportsVisitor; use once_cell::sync::Lazy; use regex::Regex; -use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_hash::FxHashSet; use serde::{Deserialize, Serialize}; use swc_core::{ atoms::Atom, @@ -12,6 +12,8 @@ use swc_core::{ ecma::{ast::Program, visit::VisitWith}, }; +use crate::transforms::page_static_info::collect_exported_const_visitor::GetMut; + pub mod collect_exported_const_visitor; pub mod collect_exports_visitor; @@ -209,15 +211,13 @@ pub fn collect_rsc_module_info( /// error. pub fn extract_exported_const_values( source_ast: &Program, - properties_to_extract: FxHashSet, -) -> FxHashMap> { + properties_to_extract: &mut impl GetMut>, +) { GLOBALS.set(&Default::default(), || { let mut visitor = collect_exported_const_visitor::CollectExportedConstVisitor::new(properties_to_extract); - source_ast.visit_with(&mut visitor); - - visitor.properties + visitor.check_program(source_ast); }) } @@ -286,6 +286,7 @@ mod tests { ) }, ) + .map_err(|e| e.to_pretty_error()) .map(|p| (p, comments)) }) } diff --git a/crates/next-custom-transforms/src/transforms/react_server_components.rs b/crates/next-custom-transforms/src/transforms/react_server_components.rs index 86db7ad4ca6c6..1f0f250429502 100644 --- a/crates/next-custom-transforms/src/transforms/react_server_components.rs +++ b/crates/next-custom-transforms/src/transforms/react_server_components.rs @@ -315,7 +315,7 @@ fn report_error(app_dir: &Option, filepath: &str, error_kind: RSCErrorK let msg = if source == "Component" { "You’re importing a class component. It only works in a Client Component but none of its parents are marked with \"use client\", so they're Server Components by default.\nLearn more: https://nextjs.org/docs/app/building-your-application/rendering/client-components\n\n".to_string() } else { - format!("You're importing a component that needs `{source}`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `\"use client\"` directive.\n\n Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client\n\n") + format!("You're importing a component that needs `{source}`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `\"use client\"` directive.\n\n Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client\n\n") }; (msg, vec![span]) @@ -346,12 +346,12 @@ fn report_error(app_dir: &Option, filepath: &str, error_kind: RSCErrorK _ => (format!("\"{source}\" is deprecated."), vec![span]), }, RSCErrorKind::NextSsrDynamicFalseNotAllowed(span) => ( - "`ssr: false` is not allowed with `next/dynamic` in Server Components. Please move it into a client component." + "`ssr: false` is not allowed with `next/dynamic` in Server Components. Please move it into a Client Component." .to_string(), vec![span], ), RSCErrorKind::NextRscErrIncompatibleRouteSegmentConfig(span, segment, property) => ( - format!("Route segment config \"{}\" is not compatible with `nextConfig.{}`. Please remove it.", segment, property), + format!("Route segment config \"{segment}\" is not compatible with `nextConfig.{property}`. Please remove it."), vec![span], ), }; @@ -640,7 +640,7 @@ impl ReactServerComponentValidator { invalid_client_imports: vec![Atom::from("server-only"), Atom::from("next/headers")], invalid_client_lib_apis_mapping: FxHashMap::from_iter([ - ("next/server", vec!["after"]), + ("next/server", vec!["after", "unstable_rootParams"]), ( "next/cache", vec![ diff --git a/crates/next-custom-transforms/src/transforms/server_actions.rs b/crates/next-custom-transforms/src/transforms/server_actions.rs index 5dabf880215f5..e208ffa37d589 100644 --- a/crates/next-custom-transforms/src/transforms/server_actions.rs +++ b/crates/next-custom-transforms/src/transforms/server_actions.rs @@ -32,7 +32,7 @@ use swc_core::{ }, quote, }; -use turbo_rcstr::RcStr; +use turbo_rcstr::{rcstr, RcStr}; use crate::FxIndexMap; @@ -1011,6 +1011,12 @@ impl VisitMut for ServerActions { self.fn_decl_ident = old_fn_decl_ident; } + let mut child_names = take(&mut self.names); + + if self.should_track_names { + self.names = [old_names, child_names.clone()].concat(); + } + if let Some(directive) = directive { if !f.is_async { emit_error(ServerActionsErrorKind::InlineSyncFunction { @@ -1028,12 +1034,6 @@ impl VisitMut for ServerActions { return; } - let mut child_names = take(&mut self.names); - - if self.should_track_names { - self.names = [old_names, child_names.clone()].concat(); - } - if let Directive::UseCache { cache_kind } = directive { // Collect all the identifiers defined inside the closure and used // in the cache function. With deduplication. @@ -1181,6 +1181,12 @@ impl VisitMut for ServerActions { self.in_default_export_decl = old_in_default_export_decl; } + let mut child_names = take(&mut self.names); + + if self.should_track_names { + self.names = [old_names, child_names.clone()].concat(); + } + if let Some(directive) = directive { if !a.is_async { emit_error(ServerActionsErrorKind::InlineSyncFunction { @@ -1199,12 +1205,6 @@ impl VisitMut for ServerActions { return; } - let mut child_names = take(&mut self.names); - - if self.should_track_names { - self.names = [old_names, child_names.clone()].concat(); - } - // Collect all the identifiers defined inside the closure and used // in the action function. With deduplication. retain_names_from_declared_idents( @@ -1539,6 +1539,9 @@ impl VisitMut for ServerActions { } } } + Decl::TsInterface(_) => {} + Decl::TsTypeAlias(_) => {} + Decl::TsEnum(_) => {} _ => { disallowed_export_span = *span; } @@ -1661,6 +1664,7 @@ impl VisitMut for ServerActions { } } } + DefaultDecl::TsInterfaceDecl(_) => {} _ => { disallowed_export_span = *span; } @@ -2774,7 +2778,7 @@ impl DirectiveVisitor<'_> { if value == "use cache" { self.directive = Some(Directive::UseCache { - cache_kind: RcStr::from("default"), + cache_kind: rcstr!("default"), }); self.increment_cache_usage_counter("default"); } else { @@ -3257,7 +3261,7 @@ fn program_to_data_url( } } - let map = cm.build_source_map_with_config( + let map = cm.build_source_map( &mappings, None, InlineSourcesContentConfig { diff --git a/crates/next-custom-transforms/src/transforms/track_dynamic_imports.rs b/crates/next-custom-transforms/src/transforms/track_dynamic_imports.rs new file mode 100644 index 0000000000000..fc2ba0fb441fb --- /dev/null +++ b/crates/next-custom-transforms/src/transforms/track_dynamic_imports.rs @@ -0,0 +1,124 @@ +use swc_core::{ + common::{source_map::PURE_SP, util::take::Take, Mark, SyntaxContext}, + ecma::{ + ast::*, + utils::{prepend_stmt, private_ident, quote_ident, quote_str}, + visit::{noop_visit_mut_type, visit_mut_pass, VisitMut, VisitMutWith}, + }, + quote, +}; + +pub fn track_dynamic_imports(unresolved_mark: Mark) -> impl VisitMut + Pass { + visit_mut_pass(ImportReplacer::new(unresolved_mark)) +} + +struct ImportReplacer { + unresolved_ctxt: SyntaxContext, + has_dynamic_import: bool, + wrapper_function_local_ident: Ident, +} + +impl ImportReplacer { + pub fn new(unresolved_mark: Mark) -> Self { + ImportReplacer { + unresolved_ctxt: SyntaxContext::empty().apply_mark(unresolved_mark), + has_dynamic_import: false, + wrapper_function_local_ident: private_ident!("$$trackDynamicImport__"), + } + } +} + +impl VisitMut for ImportReplacer { + noop_visit_mut_type!(); + + fn visit_mut_program(&mut self, program: &mut Program) { + program.visit_mut_children_with(self); + // if we wrapped a dynamic import while visiting the children, we need to import the wrapper + + if self.has_dynamic_import { + let import_args = MakeNamedImportArgs { + original_ident: quote_ident!("trackDynamicImport").into(), + local_ident: self.wrapper_function_local_ident.clone(), + source: "private-next-rsc-track-dynamic-import", + unresolved_ctxt: self.unresolved_ctxt, + }; + match program { + Program::Module(module) => { + prepend_stmt(&mut module.body, make_named_import_esm(import_args)); + } + Program::Script(script) => { + // CJS modules can still use `import()`. for CJS, we have to inject the helper + // using `require` instead of `import` to avoid accidentally turning them + // into ESM modules. + prepend_stmt(&mut script.body, make_named_import_cjs(import_args)); + } + } + } + } + + fn visit_mut_expr(&mut self, expr: &mut Expr) { + expr.visit_mut_children_with(self); + + // before: `import(...)` + // after: `$$trackDynamicImport__(import(...))` + + if let Expr::Call(CallExpr { + callee: Callee::Import(_), + .. + }) = expr + { + self.has_dynamic_import = true; + let replacement_expr = quote!( + "$wrapper_fn($expr)" as Expr, + wrapper_fn = self.wrapper_function_local_ident.clone(), + expr: Expr = expr.take() + ) + .with_span(PURE_SP); + *expr = replacement_expr + } + } +} + +struct MakeNamedImportArgs<'a> { + original_ident: Ident, + local_ident: Ident, + source: &'a str, + unresolved_ctxt: SyntaxContext, +} + +fn make_named_import_esm(args: MakeNamedImportArgs) -> ModuleItem { + let MakeNamedImportArgs { + original_ident, + local_ident, + source, + .. + } = args; + let mut item = quote!( + "import { $original_ident as $local_ident } from 'dummy'" as ModuleItem, + original_ident = original_ident, + local_ident = local_ident, + ); + // the import source cannot be parametrized in `quote!()`, so patch it manually + let decl = item.as_mut_module_decl().unwrap().as_mut_import().unwrap(); + decl.src = Box::new(source.into()); + item +} + +fn make_named_import_cjs(args: MakeNamedImportArgs) -> Stmt { + let MakeNamedImportArgs { + original_ident, + local_ident, + source, + unresolved_ctxt, + } = args; + quote!( + "const { [$original_name]: $local_ident } = $require($source)" as Stmt, + original_name: Expr = quote_str!(original_ident.sym).into(), + local_ident = local_ident, + source: Expr = quote_str!(source).into(), + // the builtin `require` is considered an unresolved identifier. + // we have to match that, or it won't be recognized as + // a proper `require()` call. + require = quote_ident!(unresolved_ctxt, "require") + ) +} diff --git a/crates/next-custom-transforms/src/transforms/warn_for_edge_runtime.rs b/crates/next-custom-transforms/src/transforms/warn_for_edge_runtime.rs index d320e1910fcd1..2f23c02d3f9d4 100644 --- a/crates/next-custom-transforms/src/transforms/warn_for_edge_runtime.rs +++ b/crates/next-custom-transforms/src/transforms/warn_for_edge_runtime.rs @@ -28,6 +28,42 @@ pub fn warn_for_edge_runtime( guarded_process_props: Default::default(), guarded_runtime: false, is_production, + emit_warn: |span: Span, msg: String| { + HANDLER.with(|h| { + h.struct_span_warn(span, &msg).emit(); + }); + }, + emit_error: |span: Span, msg: String| { + HANDLER.with(|h| { + h.struct_span_err(span, &msg).emit(); + }); + }, + } +} + +pub fn warn_for_edge_runtime_with_handlers( + cm: Arc, + ctx: ExprCtx, + should_error_for_node_apis: bool, + is_production: bool, + emit_warn: EmitWarn, + emit_error: EmitError, +) -> impl Visit +where + EmitWarn: Fn(Span, String), + EmitError: Fn(Span, String), +{ + WarnForEdgeRuntime { + cm, + ctx, + should_error_for_node_apis, + should_add_guards: false, + guarded_symbols: Default::default(), + guarded_process_props: Default::default(), + guarded_runtime: false, + is_production, + emit_warn, + emit_error, } } @@ -41,7 +77,7 @@ pub fn warn_for_edge_runtime( /// ```js /// if(typeof clearImmediate !== "function") clearImmediate(); /// ``` -struct WarnForEdgeRuntime { +struct WarnForEdgeRuntime { cm: Arc, ctx: ExprCtx, should_error_for_node_apis: bool, @@ -52,6 +88,8 @@ struct WarnForEdgeRuntime { // for process.env.NEXT_RUNTIME guarded_runtime: bool, is_production: bool, + emit_warn: EmitWarn, + emit_error: EmitError, } const EDGE_UNSUPPORTED_NODE_APIS: &[&str] = &[ @@ -144,7 +182,11 @@ const NODEJS_MODULE_NAMES: &[&str] = &[ "zlib", ]; -impl WarnForEdgeRuntime { +impl WarnForEdgeRuntime +where + EmitWarn: Fn(Span, String), + EmitError: Fn(Span, String), +{ fn warn_if_nodejs_module(&self, span: Span, module_specifier: &str) -> Option<()> { if self.guarded_runtime { return None; @@ -162,9 +204,7 @@ Learn More: https://nextjs.org/docs/messages/node-module-in-edge-runtime", loc.line + 1 ); - HANDLER.with(|h| { - h.struct_span_warn(span, &msg).emit(); - }); + (self.emit_warn)(span, msg); } None @@ -189,13 +229,11 @@ Learn more: https://nextjs.org/docs/api-reference/edge-runtime", loc.line + 1 ); - HANDLER.with(|h| { - if self.should_error_for_node_apis { - h.struct_span_err(span, &msg).emit(); - } else { - h.struct_span_warn(span, &msg).emit(); - } - }); + if self.should_error_for_node_apis { + (self.emit_error)(span, msg); + } else { + (self.emit_warn)(span, msg); + } None } @@ -266,9 +304,7 @@ Learn more: https://nextjs.org/docs/api-reference/edge-runtime", 'WebAssembly.compile') not allowed in Edge Runtime" .to_string(); - HANDLER.with(|h| { - h.struct_span_err(span, &msg).emit(); - }); + (self.emit_error)(span, msg); } } @@ -284,7 +320,11 @@ Learn more: https://nextjs.org/docs/api-reference/edge-runtime", } } -impl Visit for WarnForEdgeRuntime { +impl Visit for WarnForEdgeRuntime +where + EmitWarn: Fn(Span, String), + EmitError: Fn(Span, String), +{ fn visit_call_expr(&mut self, n: &CallExpr) { n.visit_children_with(self); diff --git a/crates/next-custom-transforms/tests/errors/react-server-components/client-graph/root-params/input.js b/crates/next-custom-transforms/tests/errors/react-server-components/client-graph/root-params/input.js new file mode 100644 index 0000000000000..90bb710baa1d3 --- /dev/null +++ b/crates/next-custom-transforms/tests/errors/react-server-components/client-graph/root-params/input.js @@ -0,0 +1,13 @@ +// This is a comment. + +'use strict' + +/** + * This is a comment. + */ + +import { unstable_rootParams } from 'next/server' + +export default function () { + return null +} diff --git a/crates/next-custom-transforms/tests/errors/react-server-components/client-graph/root-params/output.js b/crates/next-custom-transforms/tests/errors/react-server-components/client-graph/root-params/output.js new file mode 100644 index 0000000000000..ce04f4969bcce --- /dev/null +++ b/crates/next-custom-transforms/tests/errors/react-server-components/client-graph/root-params/output.js @@ -0,0 +1,8 @@ +// This is a comment. +'use strict'; +/** + * This is a comment. + */ import { unstable_rootParams } from 'next/server'; +export default function() { + return null; +} diff --git a/crates/next-custom-transforms/tests/errors/react-server-components/client-graph/root-params/output.stderr b/crates/next-custom-transforms/tests/errors/react-server-components/client-graph/root-params/output.stderr new file mode 100644 index 0000000000000..268d0f94149e5 --- /dev/null +++ b/crates/next-custom-transforms/tests/errors/react-server-components/client-graph/root-params/output.stderr @@ -0,0 +1,9 @@ + x You're importing a component that needs "unstable_rootParams". That only works in a Server Component which is not supported in the pages/ directory. Read more: https://nextjs.org/docs/app/ + | building-your-application/rendering/server-components + | + | + ,-[input.js:9:1] + 8 | + 9 | import { unstable_rootParams } from 'next/server' + : ^^^^^^^^^^^^^^^^^^^ + `---- diff --git a/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/dynamic-ssr-false/output.stderr b/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/dynamic-ssr-false/output.stderr index 7df6cc022eb2a..8f0d8d1467899 100644 --- a/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/dynamic-ssr-false/output.stderr +++ b/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/dynamic-ssr-false/output.stderr @@ -1,4 +1,4 @@ - x `ssr: false` is not allowed with `next/dynamic` in Server Components. Please move it into a client component. + x `ssr: false` is not allowed with `next/dynamic` in Server Components. Please move it into a Client Component. ,-[input.js:4:1] 3 | export default function () { 4 | return dynamic(() => import('client-only'), { ssr: false }) diff --git a/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/react-api/output.stderr b/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/react-api/output.stderr index a790809a5fc7c..18b46a82af488 100644 --- a/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/react-api/output.stderr +++ b/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/react-api/output.stderr @@ -1,4 +1,4 @@ - x You're importing a component that needs `useState`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useState`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -7,7 +7,7 @@ 1 | import { useState } from 'react' : ^^^^^^^^ `---- - x You're importing a component that needs `createContext`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `createContext`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -17,7 +17,7 @@ 3 | import { createContext } from 'react' : ^^^^^^^^^^^^^ `---- - x You're importing a component that needs `useEffect`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useEffect`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -27,7 +27,7 @@ 5 | import { useEffect, useImperativeHandle } from 'react' : ^^^^^^^^^ `---- - x You're importing a component that needs `useImperativeHandle`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useImperativeHandle`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -47,7 +47,7 @@ : ^^^^^^^^^ 9 | createFactory, `---- - x You're importing a component that needs `createFactory`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `createFactory`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -58,7 +58,7 @@ : ^^^^^^^^^^^^^ 10 | PureComponent, `---- - x You're importing a component that needs `PureComponent`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `PureComponent`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -69,7 +69,7 @@ : ^^^^^^^^^^^^^ 11 | useDeferredValue, `---- - x You're importing a component that needs `useDeferredValue`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useDeferredValue`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -80,7 +80,7 @@ : ^^^^^^^^^^^^^^^^ 12 | useInsertionEffect, `---- - x You're importing a component that needs `useInsertionEffect`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useInsertionEffect`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -91,7 +91,7 @@ : ^^^^^^^^^^^^^^^^^^ 13 | useLayoutEffect, `---- - x You're importing a component that needs `useLayoutEffect`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useLayoutEffect`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -102,7 +102,7 @@ : ^^^^^^^^^^^^^^^ 14 | useReducer, `---- - x You're importing a component that needs `useReducer`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useReducer`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -113,7 +113,7 @@ : ^^^^^^^^^^ 15 | useRef, `---- - x You're importing a component that needs `useRef`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useRef`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -124,7 +124,7 @@ : ^^^^^^ 16 | useSyncExternalStore, `---- - x You're importing a component that needs `useSyncExternalStore`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useSyncExternalStore`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -135,7 +135,7 @@ : ^^^^^^^^^^^^^^^^^^^^ 17 | } from 'react' `---- - x You're importing a component that needs `experimental_useOptimistic`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `experimental_useOptimistic`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | diff --git a/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/react-dom-api/output.stderr b/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/react-dom-api/output.stderr index 7b94cb4d192f5..7a0f79c0052b1 100644 --- a/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/react-dom-api/output.stderr +++ b/crates/next-custom-transforms/tests/errors/react-server-components/server-graph/react-dom-api/output.stderr @@ -1,4 +1,4 @@ - x You're importing a component that needs `flushSync`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `flushSync`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -7,7 +7,7 @@ 1 | import { flushSync, unstable_batchedUpdates } from 'react-dom' : ^^^^^^^^^ `---- - x You're importing a component that needs `unstable_batchedUpdates`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `unstable_batchedUpdates`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -16,7 +16,7 @@ 1 | import { flushSync, unstable_batchedUpdates } from 'react-dom' : ^^^^^^^^^^^^^^^^^^^^^^^ `---- - x You're importing a component that needs `useActionState`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useActionState`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -26,7 +26,7 @@ 3 | import { useActionState } from 'react' : ^^^^^^^^^^^^^^ `---- - x You're importing a component that needs `useFormStatus`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useFormStatus`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | @@ -36,7 +36,7 @@ 5 | import { useFormStatus, useFormState } from 'react-dom' : ^^^^^^^^^^^^^ `---- - x You're importing a component that needs `useFormState`. This React hook only works in a client component. To fix, mark the file (or its parent) with the `"use client"` directive. + x You're importing a component that needs `useFormState`. This React Hook only works in a Client Component. To fix, mark the file (or its parent) with the `"use client"` directive. | | Learn more: https://nextjs.org/docs/app/api-reference/directives/use-client | diff --git a/crates/next-custom-transforms/tests/fixture.rs b/crates/next-custom-transforms/tests/fixture.rs index 4463df7d3053a..dafe895d727d1 100644 --- a/crates/next-custom-transforms/tests/fixture.rs +++ b/crates/next-custom-transforms/tests/fixture.rs @@ -2,9 +2,9 @@ use std::{ env::current_dir, iter::FromIterator, path::{Path, PathBuf}, - sync::Arc, }; +use bytes_str::BytesStr; use next_custom_transforms::transforms::{ amp_attributes::amp_attributes, cjs_optimizer::cjs_optimizer, @@ -21,6 +21,7 @@ use next_custom_transforms::transforms::{ server_actions::{self, server_actions, ServerActionsMode}, shake_exports::{shake_exports, Config as ShakeExportsConfig}, strip_page_exports::{next_transform_strip_page_exports, ExportFilter}, + track_dynamic_imports::track_dynamic_imports, warn_for_edge_runtime::warn_for_edge_runtime, }; use rustc_hash::FxHashSet; @@ -365,8 +366,8 @@ fn next_ssg_fixture(input: PathBuf) { next: false.into(), runtime: None, import_source: Some("".into()), - pragma: Some(Arc::new("__jsx".into())), - pragma_frag: Some(Arc::new("__jsxFrag".into())), + pragma: Some(BytesStr::from_str_slice("__jsx")), + pragma_frag: Some(BytesStr::from_str_slice("__jsxFrag")), throw_if_namespace: false.into(), development: false.into(), refresh: Default::default(), @@ -537,18 +538,26 @@ fn next_font_loaders_fixture(input: PathBuf) { ); } -#[fixture("tests/fixture/server-actions/**/input.js")] +#[fixture("tests/fixture/server-actions/**/input.*")] fn server_actions_fixture(input: PathBuf) { - let output = input.parent().unwrap().join("output.js"); + let (input_syntax, extension) = if input.extension() == Some("ts".as_ref()) { + (Syntax::Typescript(Default::default()), "ts") + } else { + (syntax(), "js") + }; + + let output = input.parent().unwrap().join(format!("output.{extension}")); let is_react_server_layer = input.iter().any(|s| s.to_str() == Some("server-graph")); let is_development = input.iter().any(|s| s.to_str() == Some("development")); + let mode = if input.iter().any(|s| s.to_str() == Some("turbopack")) { ServerActionsMode::Turbopack } else { ServerActionsMode::Webpack }; + test_fixture( - syntax(), + input_syntax, &|tr| { ( resolver(Mark::new(), Mark::new(), false), @@ -793,8 +802,8 @@ fn run_stip_page_exports_test(input: &Path, output: &Path, mode: ExportFilter) { next: false.into(), runtime: None, import_source: Some("".into()), - pragma: Some(Arc::new("__jsx".into())), - pragma_frag: Some(Arc::new("__jsxFrag".into())), + pragma: Some(BytesStr::from_str_slice("__jsx")), + pragma_frag: Some(BytesStr::from_str_slice("__jsxFrag")), throw_if_namespace: false.into(), development: false.into(), ..Default::default() @@ -930,6 +939,29 @@ fn test_source_maps(input: PathBuf) { ); } +#[fixture("tests/fixture/track-dynamic-imports/**/input.js")] +fn track_dynamic_imports_fixture(input: PathBuf) { + let output = input.parent().unwrap().join("output.js"); + test_fixture( + syntax(), + &|_tr| { + let unresolved_mark = Mark::new(); + let top_level_mark = Mark::new(); + ( + resolver(unresolved_mark, top_level_mark, false), + track_dynamic_imports(unresolved_mark), + ) + }, + &input, + &output, + FixtureTestConfig { + // auto detect script/module to test CJS handling + module: None, + ..Default::default() + }, + ); +} + fn lint_to_fold(r: R) -> impl Pass where R: Visit, diff --git a/crates/next-custom-transforms/tests/fixture/server-actions/modules.d.ts b/crates/next-custom-transforms/tests/fixture/server-actions/modules.d.ts index d4fdfac42e46a..f49bb27f64b98 100644 --- a/crates/next-custom-transforms/tests/fixture/server-actions/modules.d.ts +++ b/crates/next-custom-transforms/tests/fixture/server-actions/modules.d.ts @@ -49,3 +49,6 @@ declare module 'components' { declare module 'navigation' { export function redirect(href: string): void } + +// Some tests generate `data:text/javascript,...` imports +declare module 'data:text/*' diff --git a/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/60/input.ts b/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/60/input.ts new file mode 100644 index 0000000000000..12082a2a81908 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/60/input.ts @@ -0,0 +1,10 @@ +'use cache' + +// Exported TypeScript nodes should be ignored when validating that all module +// exports are async functions. +export type T = {} +export interface I {} +export enum E {} +export default interface D {} + +export async function Page() {} diff --git a/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/60/output.ts b/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/60/output.ts new file mode 100644 index 0000000000000..eb23ba9b378d5 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/60/output.ts @@ -0,0 +1,19 @@ +/* __next_internal_action_entry_do_not_use__ {"803128060c414d59f8552e4788b846c0d2b7f74743":"$$RSC_SERVER_CACHE_0"} */ import { registerServerReference } from "private-next-rsc-server-reference"; +import { encryptActionBoundArgs, decryptActionBoundArgs } from "private-next-rsc-action-encryption"; +import { cache as $$cache__ } from "private-next-rsc-cache-wrapper"; +// Exported TypeScript nodes should be ignored when validating that all module +// exports are async functions. +export type T = { +}; +export interface I { +} +export enum E { +} +export default interface D { +} +export var $$RSC_SERVER_CACHE_0 = $$cache__("default", "803128060c414d59f8552e4788b846c0d2b7f74743", 0, async function Page() {}); +Object["defineProperty"]($$RSC_SERVER_CACHE_0, "name", { + value: "Page", + writable: false +}); +export var Page = registerServerReference($$RSC_SERVER_CACHE_0, "803128060c414d59f8552e4788b846c0d2b7f74743", null); diff --git a/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/61/input.js b/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/61/input.js new file mode 100644 index 0000000000000..14367767c2903 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/61/input.js @@ -0,0 +1,29 @@ +export function ComponentA({ list, y }) { + return ( +
{ + 'use server' + console.log(list.find((x) => x === y)) + }} + > + +
+ ) +} + +export function ComponentB({ list, y }) { + return ( +
{ + 'use server' + console.log( + list.find(function (x) { + return x === y + }) + ) + }} + > + +
+ ) +} diff --git a/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/61/output.js b/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/61/output.js new file mode 100644 index 0000000000000..10417e25b2c85 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/server-actions/server-graph/61/output.js @@ -0,0 +1,22 @@ +/* __next_internal_action_entry_do_not_use__ {"406a88810ecce4a4e8b59d53b8327d7e98bbf251d7":"$$RSC_SERVER_ACTION_0","4090b5db271335765a4b0eab01f044b381b5ebd5cd":"$$RSC_SERVER_ACTION_1"} */ import { registerServerReference } from "private-next-rsc-server-reference"; +import { encryptActionBoundArgs, decryptActionBoundArgs } from "private-next-rsc-action-encryption"; +export const $$RSC_SERVER_ACTION_0 = async function action($$ACTION_CLOSURE_BOUND) { + var [$$ACTION_ARG_0, $$ACTION_ARG_1] = await decryptActionBoundArgs("406a88810ecce4a4e8b59d53b8327d7e98bbf251d7", $$ACTION_CLOSURE_BOUND); + console.log($$ACTION_ARG_0.find((x)=>x === $$ACTION_ARG_1)); +}; +export function ComponentA({ list, y }) { + return
+ +
; +} +export const $$RSC_SERVER_ACTION_1 = async function action($$ACTION_CLOSURE_BOUND) { + var [$$ACTION_ARG_0, $$ACTION_ARG_1] = await decryptActionBoundArgs("4090b5db271335765a4b0eab01f044b381b5ebd5cd", $$ACTION_CLOSURE_BOUND); + console.log($$ACTION_ARG_0.find(function(x) { + return x === $$ACTION_ARG_1; + })); +}; +export function ComponentB({ list, y }) { + return
+ +
; +} diff --git a/crates/next-custom-transforms/tests/fixture/server-actions/tsconfig.json b/crates/next-custom-transforms/tests/fixture/server-actions/tsconfig.json index 297f2bdda85f8..e0c85ba79964a 100644 --- a/crates/next-custom-transforms/tests/fixture/server-actions/tsconfig.json +++ b/crates/next-custom-transforms/tests/fixture/server-actions/tsconfig.json @@ -23,12 +23,14 @@ "moduleDetection": "force" }, "files": ["./index.ts"], // loads ambient declarations for modules used in tests - "include": ["./**/*/input.js", "./**/*/output.js"], + "include": ["./**/*/input.*", "./**/*/output.*"], "exclude": [ // FIXME: invalid transformation of hoisted functions (https://github.com/vercel/next.js/issues/57392) "./server-graph/25/output.js", "./server-graph/28/output.js", "./server-graph/30/output.js", + // FIXME: buggy renaming of anonymous functions + "./server-graph/51/output.js", // Excluded because of weird TS behavior around `action.bind(...)` making args optional // (but only if no JSDoc type annotations are present) "./server-graph/24/output.js" diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/1/input.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/1/input.js new file mode 100644 index 0000000000000..f93d860c24e40 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/1/input.js @@ -0,0 +1,4 @@ +export default async function Page() { + const { foo } = await import('some-module') + return foo() +} diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/1/output.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/1/output.js new file mode 100644 index 0000000000000..bcf4d19be33bb --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/1/output.js @@ -0,0 +1,5 @@ +import { trackDynamicImport as $$trackDynamicImport__ } from "private-next-rsc-track-dynamic-import"; +export default async function Page() { + const { foo } = await /*#__PURE__*/ $$trackDynamicImport__(import('some-module')); + return foo(); +} diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/2/input.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/2/input.js new file mode 100644 index 0000000000000..7d0bd4f650227 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/2/input.js @@ -0,0 +1,4 @@ +export default async function Page() { + await import((await import('get-name')).default) + return null +} diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/2/output.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/2/output.js new file mode 100644 index 0000000000000..20c99119ffd0f --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/2/output.js @@ -0,0 +1,5 @@ +import { trackDynamicImport as $$trackDynamicImport__ } from "private-next-rsc-track-dynamic-import"; +export default async function Page() { + await /*#__PURE__*/ $$trackDynamicImport__(import((await /*#__PURE__*/ $$trackDynamicImport__(import('get-name'))).default)); + return null; +} diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/3/input.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/3/input.js new file mode 100644 index 0000000000000..cf5719d3390ff --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/3/input.js @@ -0,0 +1,8 @@ +export default async function Page() { + const { foo } = await import('some-module') + // name conflict + $$trackDynamicImport__() + return foo() +} + +export function $$trackDynamicImport__() {} diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/3/output.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/3/output.js new file mode 100644 index 0000000000000..81f97fbe33fad --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/3/output.js @@ -0,0 +1,9 @@ +import { trackDynamicImport as $$trackDynamicImport__ } from "private-next-rsc-track-dynamic-import"; +export default async function Page() { + const { foo } = await /*#__PURE__*/ $$trackDynamicImport__(import('some-module')); + // name conflict + $$trackDynamicImport__1(); + return foo(); +} +function $$trackDynamicImport__1() {} +export { $$trackDynamicImport__1 as $$trackDynamicImport__ }; diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/4/input.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/4/input.js new file mode 100644 index 0000000000000..8f7c41948fc83 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/4/input.js @@ -0,0 +1,6 @@ +const promise = import('some-module') + +export default async function Page() { + const { foo } = await promise + return foo() +} diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/4/output.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/4/output.js new file mode 100644 index 0000000000000..6bc4be721895e --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/4/output.js @@ -0,0 +1,6 @@ +import { trackDynamicImport as $$trackDynamicImport__ } from "private-next-rsc-track-dynamic-import"; +const promise = /*#__PURE__*/ $$trackDynamicImport__(import('some-module')); +export default async function Page() { + const { foo } = await promise; + return foo(); +} diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/5/input.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/5/input.js new file mode 100644 index 0000000000000..a762d9c472280 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/5/input.js @@ -0,0 +1,6 @@ +async function foo() { + const { foo } = await import('some-module') + return foo() +} + +exports.foo = foo diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/5/output.js b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/5/output.js new file mode 100644 index 0000000000000..64811de98f9a3 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/5/output.js @@ -0,0 +1,6 @@ +const { ["trackDynamicImport"]: $$trackDynamicImport__ } = require("private-next-rsc-track-dynamic-import"); +async function foo() { + const { foo } = await /*#__PURE__*/ $$trackDynamicImport__(import('some-module')); + return foo(); +} +exports.foo = foo; diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/index.ts b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/index.ts new file mode 100644 index 0000000000000..20915445a3590 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/index.ts @@ -0,0 +1,2 @@ +/// +/// diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/modules.d.ts b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/modules.d.ts new file mode 100644 index 0000000000000..fb65a5a2b6500 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/modules.d.ts @@ -0,0 +1,7 @@ +declare module 'some-module' { + export function foo(): null +} +declare module 'get-name' { + const name: string + export default name +} diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/next.d.ts b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/next.d.ts new file mode 100644 index 0000000000000..5178b71fb3cb8 --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/next.d.ts @@ -0,0 +1,3 @@ +declare module 'private-next-rsc-track-dynamic-import' { + export function trackDynamicImport(promise: Promise): Promise +} diff --git a/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/tsconfig.json b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/tsconfig.json new file mode 100644 index 0000000000000..5da6bee985c0c --- /dev/null +++ b/crates/next-custom-transforms/tests/fixture/track-dynamic-imports/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "noEmit": true, + "rootDir": ".", + + "allowJs": true, + "checkJs": true, + + "lib": ["ESNext", "DOM"], + "skipLibCheck": true, + + "strict": true, + "jsx": "preserve", + + "target": "ESNext", + "esModuleInterop": true, + "module": "Preserve", + "moduleResolution": "bundler", + "moduleDetection": "force" + }, + "files": ["./index.ts"], // loads ambient declarations for modules used in tests + "include": ["./**/*/input.js", "./**/*/output.js"] +} diff --git a/crates/next-custom-transforms/tests/full.rs b/crates/next-custom-transforms/tests/full.rs index cc535bcf60c60..0d6e3cd87a841 100644 --- a/crates/next-custom-transforms/tests/full.rs +++ b/crates/next-custom-transforms/tests/full.rs @@ -82,6 +82,7 @@ fn test(input: &Path, minify: bool) { prefer_esm: false, debug_function_name: false, css_env: None, + track_dynamic_imports: false, }; let unresolved_mark = Mark::new(); diff --git a/crates/next-error-code-swc-plugin/Cargo.lock b/crates/next-error-code-swc-plugin/Cargo.lock index b795f628f807e..661ab3ef1eabb 100644 --- a/crates/next-error-code-swc-plugin/Cargo.lock +++ b/crates/next-error-code-swc-plugin/Cargo.lock @@ -2,17 +2,6 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "ahash" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" -dependencies = [ - "getrandom", - "once_cell", - "version_check", -] - [[package]] name = "ahash" version = "0.8.11" @@ -55,16 +44,28 @@ version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "ascii" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16" + [[package]] name = "ast_node" -version = "2.0.0" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94741d66bdda032fcbf33e621b4e3a888d7d11bd3ac4446d82c5593a136936ff" +checksum = "91fb5864e2f5bf9fd9797b94b2dfd1554d4c3092b535008b27d7e15c86675a2f" dependencies = [ "proc-macro2", "quote", "swc_macros_common", - "syn 2.0.90", + "syn", ] [[package]] @@ -75,9 +76,9 @@ checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "base64" -version = "0.21.7" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64-simd" @@ -135,24 +136,25 @@ dependencies = [ [[package]] name = "bytecheck" -version = "0.6.12" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2" +checksum = "50690fb3370fb9fe3550372746084c46f2ac8c9685c583d2be10eefd89d3d1a3" dependencies = [ "bytecheck_derive", "ptr_meta", + "rancor", "simdutf8", ] [[package]] name = "bytecheck_derive" -version = "0.6.12" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" +checksum = "efb7846e0cb180355c2dec69e721edafa36919850f1a9f52ffba4ebc0393cb71" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn", ] [[package]] @@ -207,6 +209,15 @@ dependencies = [ "thiserror 2.0.6", ] +[[package]] +name = "castaway" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0abae9be0aaf9ea96a3b1b8b1b55c602ca751eba1b1500220cea4ecbafe7c0d5" +dependencies = [ + "rustversion", +] + [[package]] name = "cc" version = "1.2.3" @@ -222,6 +233,19 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "compact_str" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f86b9c4c00838774a6d902ef931eff7470720c51d90c2e32cfe15dc304737b3f" +dependencies = [ + "castaway", + "cfg-if", + "itoa", + "ryu", + "static_assertions", +] + [[package]] name = "cpufeatures" version = "0.2.16" @@ -262,7 +286,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.90", + "syn", ] [[package]] @@ -273,7 +297,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -310,7 +334,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -320,7 +344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.90", + "syn", ] [[package]] @@ -353,7 +377,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -407,7 +431,7 @@ checksum = "8d7ccf961415e7aa17ef93dcb6c2441faaa8e768abe09e659b908089546f74c5" dependencies = [ "proc-macro2", "swc_macros_common", - "syn 2.0.90", + "syn", ] [[package]] @@ -426,39 +450,19 @@ dependencies = [ "version_check", ] -[[package]] -name = "getrandom" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - [[package]] name = "glob" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -dependencies = [ - "ahash 0.7.8", -] - [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "ahash 0.8.11", + "ahash", "allocator-api2", ] @@ -488,15 +492,15 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hstr" -version = "0.2.12" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dae404c0c5d4e95d4858876ab02eecd6a196bb8caa42050dfa809938833fc412" +checksum = "71399f53a92ef72ee336a4b30201c6e944827e14e0af23204c291aad9c24cc85" dependencies = [ "hashbrown 0.14.5", "new_debug_unreachable", "once_cell", "phf", - "rustc-hash 1.1.0", + "rustc-hash 2.1.0", "triomphe", ] @@ -615,7 +619,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -670,7 +674,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -762,7 +766,27 @@ checksum = "23c9b935fbe1d6cbd1dac857b54a688145e2d93f48db36010514d0f612d0ad67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", +] + +[[package]] +name = "munge" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e22e7961c873e8b305b176d2a4e1d41ce7ba31bc1c52d2a107a89568ec74c55" +dependencies = [ + "munge_macro", +] + +[[package]] +name = "munge_macro" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ac7d860b767c6398e88fe93db73ce53eb496057aa6895ffa4d60cb02e1d1c6b" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -833,9 +857,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.20.2" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "outref" @@ -855,6 +879,25 @@ version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb37767f6569cd834a413442455e0f066d0d522de8630436e2a1761d9726ba56" +[[package]] +name = "par-core" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "757892557993c69e82f9de0f9051e87144278aa342f03bf53617bbf044554484" +dependencies = [ + "once_cell", +] + +[[package]] +name = "par-iter" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a5b20f31e9ba82bfcbbb54a67aa40be6cebec9f668ba5753be138f9523c531a" +dependencies = [ + "either", + "par-core", +] + [[package]] name = "parking_lot" version = "0.12.3" @@ -914,7 +957,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -962,22 +1005,22 @@ dependencies = [ [[package]] name = "ptr_meta" -version = "0.1.4" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" +checksum = "fe9e76f66d3f9606f44e45598d155cb13ecf09f4a28199e48daf8c8fc937ea90" dependencies = [ "ptr_meta_derive", ] [[package]] name = "ptr_meta_derive" -version = "0.1.4" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" +checksum = "ca414edb151b4c8d125c12566ab0d74dc9cdba36fb80eb7b848c15f495fd32d1" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn", ] [[package]] @@ -995,6 +1038,15 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" +[[package]] +name = "rancor" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "caf5f7161924b9d1cea0e4cabc97c372cea92b5f927fc13c6bca67157a0ad947" +dependencies = [ + "ptr_meta", +] + [[package]] name = "rand" version = "0.8.5" @@ -1071,40 +1123,41 @@ checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" [[package]] name = "rend" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c" +checksum = "a35e8a6bf28cd121053a66aa2e6a2e3eaffad4a60012179f0e864aa5ffeff215" dependencies = [ "bytecheck", ] [[package]] name = "rkyv" -version = "0.7.45" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" +checksum = "1e147371c75553e1e2fcdb483944a8540b8438c31426279553b9a8182a9b7b65" dependencies = [ - "bitvec", "bytecheck", "bytes", - "hashbrown 0.12.3", + "hashbrown 0.15.2", + "indexmap", + "munge", "ptr_meta", + "rancor", "rend", "rkyv_derive", - "seahash", "tinyvec", "uuid", ] [[package]] name = "rkyv_derive" -version = "0.7.45" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" +checksum = "246b40ac189af6c675d124b802e8ef6d5246c53e17367ce9501f8f66a81abb7a" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn", ] [[package]] @@ -1171,12 +1224,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "seahash" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" - [[package]] name = "semver" version = "0.9.0" @@ -1218,7 +1265,7 @@ checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -1350,7 +1397,7 @@ dependencies = [ "proc-macro2", "quote", "swc_macros_common", - "syn 2.0.90", + "syn", ] [[package]] @@ -1361,36 +1408,38 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "swc_allocator" -version = "1.0.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52cacc28f0ada8e4e31a720dd849ff06864b10e6ab0a1aaa99c06456cfe046af" +checksum = "cc6b926f0d94bbb34031fe5449428cfa1268cdc0b31158d6ad9c97e0fc1e79dd" dependencies = [ + "allocator-api2", "bumpalo", "hashbrown 0.14.5", "ptr_meta", - "rustc-hash 1.1.0", + "rustc-hash 2.1.0", "triomphe", ] [[package]] name = "swc_atoms" -version = "2.0.0" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d7211e5c57ea972f32b8a104d7006c4a68d094ec30c6a73bcd20d4d6c473c7c" +checksum = "9d7077ba879f95406459bc0c81f3141c529b34580bc64d7ab7bd15e7118a0391" dependencies = [ "bytecheck", "hstr", "once_cell", + "rancor", "rkyv", - "rustc-hash 1.1.0", + "rustc-hash 2.1.0", "serde", ] [[package]] name = "swc_common" -version = "4.0.1" +version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f87a21612a324493fd065e9c6fea960b4031088a213db782e2ca71d2fabb3ec" +checksum = "5e36654ec9a8b089c329ab7522aa70eb39cc3e4e3dfd70f9176a74414bdec00e" dependencies = [ "anyhow", "ast_node", @@ -1403,8 +1452,9 @@ dependencies = [ "num-bigint", "once_cell", "parking_lot", + "rancor", "rkyv", - "rustc-hash 1.1.0", + "rustc-hash 2.1.0", "serde", "siphasher", "sourcemap", @@ -1420,9 +1470,9 @@ dependencies = [ [[package]] name = "swc_core" -version = "5.0.4" +version = "23.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92086975747587872715a20f78fc51e7047bac58f3a6a17d4ed5a9643f3fd0a2" +checksum = "6f806c64573a1b9e7ea5e263fc24a015bc18a1a8c2a56dd36860380ec3e80a71" dependencies = [ "once_cell", "swc_allocator", @@ -1435,21 +1485,25 @@ dependencies = [ "swc_plugin", "swc_plugin_macro", "swc_plugin_proxy", + "swc_transform_common", "vergen", ] [[package]] name = "swc_ecma_ast" -version = "4.0.1" +version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bdab7759509c1b37ec77bd9fc231f525b888d9609c2963ce71995da1b27357c" +checksum = "0613d84468a6bb6d45d13c5a3368b37bd21f3067a089f69adac630dcb462a018" dependencies = [ "bitflags", "bytecheck", "is-macro", "num-bigint", + "once_cell", "phf", + "rancor", "rkyv", + "rustc-hash 2.1.0", "scoped-tls", "string_enum", "swc_atoms", @@ -1460,14 +1514,17 @@ dependencies = [ [[package]] name = "swc_ecma_codegen" -version = "4.0.2" +version = "11.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e474f6c2671524dbb179b44a36425cb1a58928f0f7211c45043f0951a1842c5d" +checksum = "b01b3de365a86b8f982cc162f257c82f84bda31d61084174a3be37e8ab15c0f4" dependencies = [ + "ascii", + "compact_str", "memchr", "num-bigint", "once_cell", "regex", + "rustc-hash 2.1.0", "serde", "sourcemap", "swc_allocator", @@ -1480,27 +1537,55 @@ dependencies = [ [[package]] name = "swc_ecma_codegen_macros" -version = "1.0.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f9a42f479a6475647e248fa9750982c87cd985e19d1016a1fc18a70682305d1" +checksum = "e99e1931669a67c83e2c2b4375674f6901d1480994a76aa75b23f1389e6c5076" dependencies = [ "proc-macro2", "quote", "swc_macros_common", - "syn 2.0.90", + "syn", +] + +[[package]] +name = "swc_ecma_lexer" +version = "12.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d11c8e71901401b9aae2ece4946eeb7674b14b8301a53768afbbeeb0e48b599" +dependencies = [ + "arrayvec", + "bitflags", + "either", + "new_debug_unreachable", + "num-bigint", + "num-traits", + "phf", + "rustc-hash 2.1.0", + "serde", + "smallvec", + "smartstring", + "stacker", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "tracing", + "typed-arena", ] [[package]] name = "swc_ecma_parser" -version = "5.0.0" +version = "12.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54c5ab8bd4cc4a4956514699c84d1a25cdb5a33f5ec760ec64ce712e973019c9" +checksum = "250786944fbc05f6484eda9213df129ccfe17226ae9ad51b62fce2f72135dbee" dependencies = [ + "arrayvec", + "bitflags", "either", "new_debug_unreachable", "num-bigint", "num-traits", "phf", + "rustc-hash 2.1.0", "serde", "smallvec", "smartstring", @@ -1508,15 +1593,16 @@ dependencies = [ "swc_atoms", "swc_common", "swc_ecma_ast", + "swc_ecma_lexer", "tracing", "typed-arena", ] [[package]] name = "swc_ecma_testing" -version = "4.0.0" +version = "10.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d0397cdbbdcfec2048da1291f44e2d433471fab9bfb430f8f879a831242d636" +checksum = "977386a831e9464cc99e914d5682621efca49c443e5c737a00a2babd6d1589aa" dependencies = [ "anyhow", "hex", @@ -1527,16 +1613,17 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_base" -version = "5.0.1" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eb4000822f02b54af0be4f668649fa1e5555f1e3392479d17a277eb81a841f0" +checksum = "6856da3da598f4da001b7e4ce225ee8970bc9d5cbaafcaf580190cf0a6031ec5" dependencies = [ "better_scoped_tls", "bitflags", "indexmap", "once_cell", + "par-core", "phf", - "rustc-hash 1.1.0", + "rustc-hash 2.1.0", "serde", "smallvec", "swc_atoms", @@ -1550,9 +1637,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_testing" -version = "5.0.0" +version = "16.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21721599724e9f9c40467ff9cdd20f045f134c26e5fe794b1ee6708798c724ed" +checksum = "93a905befc831be30430ab1e4af5aa6f2052ea397f44e1747c28a4d3859f4f84" dependencies = [ "ansi_term", "anyhow", @@ -1562,6 +1649,7 @@ dependencies = [ "serde_json", "sha2", "sourcemap", + "swc_allocator", "swc_common", "swc_ecma_ast", "swc_ecma_codegen", @@ -1576,14 +1664,16 @@ dependencies = [ [[package]] name = "swc_ecma_utils" -version = "5.0.1" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eb9a28511d17d1e6c5dfcf209368a1da4a542270c450fba7f27faf22c34df22" +checksum = "bb6ecf7485a130df25c4ba4e27cfde0cc7bf45f453f40cf0c52eb69b3a4235d0" dependencies = [ "indexmap", "num_cpus", "once_cell", - "rustc-hash 1.1.0", + "par-core", + "par-iter", + "rustc-hash 2.1.0", "ryu-js", "swc_atoms", "swc_common", @@ -1595,9 +1685,9 @@ dependencies = [ [[package]] name = "swc_ecma_visit" -version = "4.0.1" +version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5af5332117aa0424e418556f74e9cee335dc47eb7ae35dddbd9fd65fc01452c" +checksum = "249dc9eede1a4ad59a038f9cfd61ce67845bd2c1392ade3586d714e7181f3c1a" dependencies = [ "new_debug_unreachable", "num-bigint", @@ -1616,19 +1706,22 @@ checksum = "e96e15288bf385ab85eb83cff7f9e2d834348da58d0a31b33bdb572e66ee413e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] name = "swc_error_reporters" -version = "5.0.0" +version = "11.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb4a3c124af5d297d98e6c18776ba04024087cde14602621017e8e9c6cd1c2d1" +checksum = "e3b5be5f151485ec9372c23bbb132c4a829c879632db8b790439779b873970be" dependencies = [ "anyhow", "miette", "once_cell", "parking_lot", + "serde", + "serde_derive", + "serde_json", "swc_common", ] @@ -1640,7 +1733,7 @@ checksum = "a509f56fca05b39ba6c15f3e58636c3924c78347d63853632ed2ffcb6f5a0ac7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -1654,23 +1747,26 @@ dependencies = [ [[package]] name = "swc_plugin_macro" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0917ccfdcd3fa6cf41bdacef2388702a3b274f9ea708d930e1e8db37c7c3e1c6" +checksum = "ace467dfafbbdf3aecff786b8605b35db57d945e92fd88800569aa2cba0cdf61" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] name = "swc_plugin_proxy" -version = "4.0.0" +version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6749c4027aad79cf648ffce6633100ea01a7b0d6cf17299cfa68ce141897c26c" +checksum = "edbd6dddc6f98f7ded495b918c80bc59c78d9b297ed98081e22def0f27a117f9" dependencies = [ "better_scoped_tls", + "bytecheck", + "rancor", "rkyv", + "rustc-hash 2.1.0", "swc_common", "swc_ecma_ast", "swc_trace_macro", @@ -1679,34 +1775,37 @@ dependencies = [ [[package]] name = "swc_trace_macro" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c78717a841565df57f811376a3d19c9156091c55175e12d378f3a522de70cef" +checksum = "559185db338f1bcb50297aafd4f79c0956c84dc71a66da4cffb57acf9d93fd88" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] -name = "swc_visit" -version = "2.0.0" +name = "swc_transform_common" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9138b6a36bbe76dd6753c4c0794f7e26480ea757bee499738bedbbb3ae3ec5f3" +checksum = "6d73c21cecc518e0107f890012a747fa679cb0faf04f32fc8f5bd618040eb8fe" dependencies = [ - "either", - "new_debug_unreachable", + "better_scoped_tls", + "once_cell", + "rustc-hash 2.1.0", + "serde", + "serde_json", + "swc_common", ] [[package]] -name = "syn" -version = "1.0.109" +name = "swc_visit" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +checksum = "9138b6a36bbe76dd6753c4c0794f7e26480ea757bee499738bedbbb3ae3ec5f3" dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", + "either", + "new_debug_unreachable", ] [[package]] @@ -1728,7 +1827,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -1761,9 +1860,9 @@ dependencies = [ [[package]] name = "testing" -version = "4.0.0" +version = "10.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c6b200c27382caadd583563c79cdf870d854e14c4c078731d447ecbfe27c35f" +checksum = "987241734b96bd71228f0395ab38e05b71ec7c6ded958538c5d3a1b67f6465ce" dependencies = [ "ansi_term", "cargo_metadata 0.18.1", @@ -1771,6 +1870,7 @@ dependencies = [ "once_cell", "pretty_assertions", "regex", + "rustc-hash 2.1.0", "serde", "serde_json", "swc_common", @@ -1793,7 +1893,7 @@ dependencies = [ "quote", "regex", "relative-path", - "syn 2.0.90", + "syn", ] [[package]] @@ -1832,7 +1932,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -1843,7 +1943,7 @@ checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -1900,7 +2000,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -2060,12 +2160,6 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - [[package]] name = "winapi" version = "0.3.9" @@ -2217,7 +2311,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", "synstructure", ] @@ -2238,7 +2332,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] [[package]] @@ -2258,7 +2352,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", "synstructure", ] @@ -2281,5 +2375,5 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn", ] diff --git a/crates/next-error-code-swc-plugin/Cargo.toml b/crates/next-error-code-swc-plugin/Cargo.toml index 8f2ef123802de..ec5a6de278726 100644 --- a/crates/next-error-code-swc-plugin/Cargo.toml +++ b/crates/next-error-code-swc-plugin/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "next-error-code-swc-plugin" version = "0.1.0" -edition = "2021" +edition = "2024" [lib] crate-type = ["cdylib"] @@ -11,4 +11,4 @@ md5 = "0.7" rustc-hash = "2.1.0" serde = "1.0" serde_json = "1.0" -swc_core = { version = "5.0.*", features = ["ecma_plugin_transform"] } +swc_core = { version = "23.0.*", features = ["ecma_plugin_transform"] } diff --git a/crates/wasm/src/lib.rs b/crates/wasm/src/lib.rs index d22a95081f527..5e3ee0ee21558 100644 --- a/crates/wasm/src/lib.rs +++ b/crates/wasm/src/lib.rs @@ -21,7 +21,7 @@ use wasm_bindgen_futures::future_to_promise; pub mod mdx; fn convert_err(err: Error) -> JsValue { - format!("{:?}", err).into() + format!("{err:?}").into() } #[wasm_bindgen(js_name = "minifySync")] @@ -40,7 +40,7 @@ pub fn minify_sync(s: JsString, opts: JsValue) -> Result { }, |handler| { GLOBALS.set(&Default::default(), || { - let fm = c.cm.new_source_file(FileName::Anon.into(), s.into()); + let fm = c.cm.new_source_file(FileName::Anon.into(), String::from(s)); let program = c .minify(fm, handler, &opts, Default::default()) .context("failed to minify file")?; @@ -49,6 +49,7 @@ pub fn minify_sync(s: JsString, opts: JsValue) -> Result { }) }, ) + .map_err(|e| e.to_pretty_error()) .map_err(convert_err)?; Ok(serde_wasm_bindgen::to_value(&value)?) @@ -88,7 +89,7 @@ pub fn transform_sync(s: JsValue, opts: JsValue) -> Result { } else { FileName::Real(opts.swc.filename.clone().into()).into() }, - s.into(), + String::from(s), ); let cm = c.cm.clone(); let file = fm.clone(); @@ -125,6 +126,7 @@ pub fn transform_sync(s: JsValue, opts: JsValue) -> Result { }) }, ) + .map_err(|e| e.to_pretty_error()) .map_err(convert_err)?; Ok(serde_wasm_bindgen::to_value(&out)?) @@ -152,7 +154,7 @@ pub fn parse_sync(s: JsString, opts: JsValue) -> Result { |handler| { c.run(|| { GLOBALS.set(&Default::default(), || { - let fm = c.cm.new_source_file(FileName::Anon.into(), s.into()); + let fm = c.cm.new_source_file(FileName::Anon.into(), String::from(s)); let cmts = c.comments().clone(); let comments = if opts.comments { @@ -178,6 +180,7 @@ pub fn parse_sync(s: JsString, opts: JsValue) -> Result { }) }, ) + .map_err(|e| e.to_pretty_error()) .map_err(convert_err) } diff --git a/docs/01-app/01-getting-started/01-installation.mdx b/docs/01-app/01-getting-started/01-installation.mdx index cb9b84d437a19..c893e3626c7f4 100644 --- a/docs/01-app/01-getting-started/01-installation.mdx +++ b/docs/01-app/01-getting-started/01-installation.mdx @@ -1,7 +1,6 @@ --- -title: How to set up a new Next.js project -nav_title: Installation -description: Create a new Next.js application with the `create-next-app` CLI, and set up TypeScript, ESLint, and Module Path Aliases. +title: Installation +description: Learn how to create a new Next.js application with the `create-next-app` CLI, and set up TypeScript, ESLint, and Module Path Aliases. --- {/* The content of this doc is shared between the app and pages router. You can use the `Content` component to add content that is specific to the Pages Router. Any shared content should not be wrapped in a component. */} @@ -41,8 +40,20 @@ After the prompts, [`create-next-app`](/docs/app/api-reference/cli/create-next-a To manually create a new Next.js app, install the required packages: -```bash filename="Terminal" -npm install next@latest react@latest react-dom@latest +```bash package="pnpm" +pnpm i next@latest react@latest react-dom@latest +``` + +```bash package="npm" +npm i next@latest react@latest react-dom@latest +``` + +```bash package="yarn" +yarn add next@latest react@latest react-dom@latest +``` + +```bash package="bun" +bun add next@latest react@latest react-dom@latest ``` Then, add the following scripts to your `package.json` file: @@ -71,7 +82,7 @@ These scripts refer to the different stages of developing an application: Next.js uses file-system routing, which means the routes in your application are determined by how you structure your files. -Create an `app` folder. Then, inside `app`, create a `layout.tsx` file. This file is the [root layout](/docs/app/api-reference/file-conventions/layout#root-layouts). It's required and must contain the `` and `` tags. +Create an `app` folder. Then, inside `app`, create a `layout.tsx` file. This file is the [root layout](/docs/app/api-reference/file-conventions/layout#root-layout). It's required and must contain the `` and `` tags. ```tsx filename="app/layout.tsx" switcher export default function RootLayout({ @@ -124,7 +135,7 @@ Both `layout.tsx` and `page.tsx` will be rendered when the user visits the root > **Good to know**: > > - If you forget to create the root layout, Next.js will automatically create this file when running the development server with `next dev`. -> - You can optionally use a [`src` directory](/docs/app/building-your-application/configuring/src-directory) in the root of your project to separate your application's code from configuration files. +> - You can optionally use a [`src` folder](/docs/app/api-reference/file-conventions/src-folder) in the root of your project to separate your application's code from configuration files. @@ -202,7 +213,7 @@ export default function Document() { ### Create the `public` folder (optional) -Create a [`public` folder](/docs/app/building-your-application/optimizing/static-assets) at the root of your project to store static assets such as images, fonts, etc. Files inside `public` can then be referenced by your code starting from the base URL (`/`). +Create a [`public` folder](/docs/app/api-reference/file-conventions/public-folder) at the root of your project to store static assets such as images, fonts, etc. Files inside `public` can then be referenced by your code starting from the base URL (`/`). You can then reference these assets using the root path (`/`). For example, `public/profile.png` can be referenced as `/profile.png`: @@ -226,7 +237,7 @@ export default function Page() { 1. Run `npm run dev` to start the development server. 2. Visit `http://localhost:3000` to view your application. -3. Edit the`app/page.tsx` `pages/index.tsx` file and save it to see the updated result in your browser. +3. Edit the `app/page.tsx``pages/index.tsx` file and save it to see the updated result in your browser. ## Set up TypeScript diff --git a/docs/01-app/01-getting-started/02-project-structure.mdx b/docs/01-app/01-getting-started/02-project-structure.mdx index 091f6fce0169f..dbf7cc07d47f5 100644 --- a/docs/01-app/01-getting-started/02-project-structure.mdx +++ b/docs/01-app/01-getting-started/02-project-structure.mdx @@ -1,7 +1,7 @@ --- title: Project structure and organization nav_title: Project Structure -description: An overview of the folder and file conventions in Next.js, and how to organize your project. +description: Learn the folder and file conventions in Next.js, and how to organize your project. --- This page provides an overview of **all** the folder and file conventions in Next.js, and recommendations for organizing your project. @@ -20,33 +20,33 @@ Top-level folders are used to organize your application's code and static assets height="525" /> -| | | -| ------------------------------------------------------------------------ | ---------------------------------- | -| [`app`](/docs/app/building-your-application/routing) | App Router | -| [`pages`](/docs/pages/building-your-application/routing) | Pages Router | -| [`public`](/docs/app/building-your-application/optimizing/static-assets) | Static assets to be served | -| [`src`](/docs/app/building-your-application/configuring/src-directory) | Optional application source folder | +| | | +| ------------------------------------------------------------------ | ---------------------------------- | +| [`app`](/docs/app) | App Router | +| [`pages`](/docs/pages/building-your-application/routing) | Pages Router | +| [`public`](/docs/app/api-reference/file-conventions/public-folder) | Static assets to be served | +| [`src`](/docs/app/api-reference/file-conventions/src-folder) | Optional application source folder | ### Top-level files Top-level files are used to configure your application, manage dependencies, run middleware, integrate monitoring tools, and define environment variables. -| | | -| ------------------------------------------------------------------------------------------- | --------------------------------------- | -| **Next.js** | | -| [`next.config.js`](/docs/app/api-reference/config/next-config-js) | Configuration file for Next.js | -| [`package.json`](/docs/app/getting-started/installation#manual-installation) | Project dependencies and scripts | -| [`instrumentation.ts`](/docs/app/building-your-application/optimizing/instrumentation) | OpenTelemetry and Instrumentation file | -| [`middleware.ts`](/docs/app/building-your-application/routing/middleware) | Next.js request middleware | -| [`.env`](/docs/app/building-your-application/configuring/environment-variables) | Environment variables | -| [`.env.local`](/docs/app/building-your-application/configuring/environment-variables) | Local environment variables | -| [`.env.production`](/docs/app/building-your-application/configuring/environment-variables) | Production environment variables | -| [`.env.development`](/docs/app/building-your-application/configuring/environment-variables) | Development environment variables | -| [`.eslintrc.json`](/docs/app/api-reference/config/eslint) | Configuration file for ESLint | -| `.gitignore` | Git files and folders to ignore | -| `next-env.d.ts` | TypeScript declaration file for Next.js | -| `tsconfig.json` | Configuration file for TypeScript | -| `jsconfig.json` | Configuration file for JavaScript | +| | | +| ---------------------------------------------------------------------------- | --------------------------------------- | +| **Next.js** | | +| [`next.config.js`](/docs/app/api-reference/config/next-config-js) | Configuration file for Next.js | +| [`package.json`](/docs/app/getting-started/installation#manual-installation) | Project dependencies and scripts | +| [`instrumentation.ts`](/docs/app/guides/instrumentation) | OpenTelemetry and Instrumentation file | +| [`middleware.ts`](/docs/app/api-reference/file-conventions/middleware) | Next.js request middleware | +| [`.env`](/docs/app/guides/environment-variables) | Environment variables | +| [`.env.local`](/docs/app/guides/environment-variables) | Local environment variables | +| [`.env.production`](/docs/app/guides/environment-variables) | Production environment variables | +| [`.env.development`](/docs/app/guides/environment-variables) | Development environment variables | +| [`.eslintrc.json`](/docs/app/api-reference/config/eslint) | Configuration file for ESLint | +| `.gitignore` | Git files and folders to ignore | +| `next-env.d.ts` | TypeScript declaration file for Next.js | +| `tsconfig.json` | Configuration file for TypeScript | +| `jsconfig.json` | Configuration file for JavaScript | @@ -73,28 +73,28 @@ Top-level files are used to configure your application, manage dependencies, run ### Dynamic routes -| | | -| --------------------------------------------------------------------------------------------------------- | -------------------------------- | -| [`[folder]`](/docs/app/building-your-application/routing/dynamic-routes#convention) | Dynamic route segment | -| [`[...folder]`](/docs/app/building-your-application/routing/dynamic-routes#catch-all-segments) | Catch-all route segment | -| [`[[...folder]]`](/docs/app/building-your-application/routing/dynamic-routes#optional-catch-all-segments) | Optional catch-all route segment | +| | | +| ------------------------------------------------------------------------------------------------------ | -------------------------------- | +| [`[folder]`](/docs/app/api-reference/file-conventions/dynamic-routes#convention) | Dynamic route segment | +| [`[...folder]`](/docs/app/api-reference/file-conventions/dynamic-routes#catch-all-segments) | Catch-all route segment | +| [`[[...folder]]`](/docs/app/api-reference/file-conventions/dynamic-routes#optional-catch-all-segments) | Optional catch-all route segment | ### Route Groups and private folders -| | | -| --------------------------------------------------------------------------------- | ------------------------------------------------ | -| [`(folder)`](/docs/app/building-your-application/routing/route-groups#convention) | Group routes without affecting routing | -| [`_folder`](#private-folders) | Opt folder and all child segments out of routing | +| | | +| ------------------------------------------------------------------------------ | ------------------------------------------------ | +| [`(folder)`](/docs/app/api-reference/file-conventions/route-groups#convention) | Group routes without affecting routing | +| [`_folder`](#private-folders) | Opt folder and all child segments out of routing | ### Parallel and Intercepted Routes -| | | -| ---------------------------------------------------------------------------------------------- | -------------------------- | -| [`@folder`](/docs/app/building-your-application/routing/parallel-routes#slots) | Named slot | -| [`(.)folder`](/docs/app/building-your-application/routing/intercepting-routes#convention) | Intercept same level | -| [`(..)folder`](/docs/app/building-your-application/routing/intercepting-routes#convention) | Intercept one level above | -| [`(..)(..)folder`](/docs/app/building-your-application/routing/intercepting-routes#convention) | Intercept two levels above | -| [`(...)folder`](/docs/app/building-your-application/routing/intercepting-routes#convention) | Intercept from root | +| | | +| ------------------------------------------------------------------------------------------- | -------------------------- | +| [`@folder`](/docs/app/api-reference/file-conventions/parallel-routes#slots) | Named slot | +| [`(.)folder`](/docs/app/api-reference/file-conventions/intercepting-routes#convention) | Intercept same level | +| [`(..)folder`](/docs/app/api-reference/file-conventions/intercepting-routes#convention) | Intercept one level above | +| [`(..)(..)folder`](/docs/app/api-reference/file-conventions/intercepting-routes#convention) | Intercept two levels above | +| [`(...)folder`](/docs/app/api-reference/file-conventions/intercepting-routes#convention) | Intercept from root | ### Metadata file conventions @@ -285,25 +285,25 @@ Route groups are useful for: - [Creating multiple nested layouts in the same segment, including multiple root layouts](#creating-multiple-root-layouts) - [Adding a layout to a subset of routes in a common segment](#opting-specific-segments-into-a-layout) -### `src` directory +### `src` folder -Next.js supports storing application code (including `app`) inside an optional [`src` directory](/docs/app/building-your-application/configuring/src-directory). This separates application code from project configuration files which mostly live in the root of a project. +Next.js supports storing application code (including `app`) inside an optional [`src` folder](/docs/app/api-reference/file-conventions/src-folder). This separates application code from project configuration files which mostly live in the root of a project. An example folder structure with the `src` directory -### Examples +## Examples The following section lists a very high-level overview of common strategies. The simplest takeaway is to choose a strategy that works for you and your team and be consistent across the project. > **Good to know**: In our examples below, we're using `components` and `lib` folders as generalized placeholders, their naming has no special framework significance and your projects might use other folders like `ui`, `utils`, `hooks`, `styles`, etc. -#### Store project files outside of `app` +### Store project files outside of `app` This strategy stores all application code in shared folders in the **root of your project** and keeps the `app` directory purely for routing purposes. @@ -315,7 +315,7 @@ This strategy stores all application code in shared folders in the **root of you height="849" /> -#### Store project files in top-level folders inside of `app` +### Store project files in top-level folders inside of `app` This strategy stores all application code in shared folders in the **root of the `app` directory**. @@ -327,7 +327,7 @@ This strategy stores all application code in shared folders in the **root of the height="849" /> -#### Split project files by feature or route +### Split project files by feature or route This strategy stores globally shared application code in the root `app` directory and **splits** more specific application code into the route segments that use them. @@ -375,7 +375,7 @@ To opt specific routes into a layout, create a new route group (e.g. `(shop)`) a ### Opting for loading skeletons on a specific route -To apply a [loading skeleton](/docs/app/building-your-application/routing/loading-ui-and-streaming) via a `loading.js` file to a specific route, create a new route group (e.g., `/(overview)`) and then move your `loading.tsx` inside that route group. +To apply a [loading skeleton](/docs/app/api-reference/file-conventions/loading) via a `loading.js` file to a specific route, create a new route group (e.g., `/(overview)`) and then move your `loading.tsx` inside that route group. Folder structure showing a loading.tsx and a page.tsx inside the route group` and `` tags need to be added to each root layout. +To create multiple [root layouts](/docs/app/api-reference/file-conventions/layout#root-layout), remove the top-level `layout.js` file, and add a `layout.js` file inside each route group. This is useful for partitioning an application into sections that have a completely different UI or experience. The `` and `` tags need to be added to each root layout. Route Groups with Multiple Root Layouts +}) { + const { slug } = await params + const post = await getPost(slug) + + return ( +
+

{post.title}

+

{post.content}

+
+ ) +} +``` + +```jsx filename="app/blog/[slug]/page.js" switcher +export default async function BlogPostPage({ params }) { + const { slug } = await params + const post = await getPost(slug) + + return ( +
+

{post.title}

+

{post.content}

+
+ ) +} +``` + +Learn more about [Dynamic Segments](/docs/app/api-reference/file-conventions/dynamic-routes). + ## Linking between pages -You can use the [`` component](/docs/app/api-reference/components/link) to navigate between routes. `` is a built-in Next.js component that extends the HTML `` tag to provide [prefetching](/docs/app/building-your-application/routing/linking-and-navigating#2-prefetching) and [client-side navigation](/docs/app/building-your-application/routing/linking-and-navigating#5-soft-navigation). +You can use the [`` component](/docs/app/api-reference/components/link) to navigate between routes. `` is a built-in Next.js component that extends the HTML `` tag to provide [prefetching](/docs/app/getting-started/linking-and-navigating#prefetching) and [client-side navigation](/docs/app/getting-started/linking-and-navigating#client-side-transitions). For example, to generate a list of blog posts, import `` from `next/link` and pass a `href` prop to the component: @@ -248,4 +289,4 @@ export default async function Post({ post }) { } ``` -`` is the primary and recommended way to navigate between routes in your Next.js application. However, you can also use the [`useRouter` hook](/docs/app/api-reference/functions/use-router) for more advanced navigation. +> **Good to know**: `` is the primary way to navigate between routes in Next.js. You can also use the [`useRouter` hook](/docs/app/api-reference/functions/use-router) for more advanced navigation. diff --git a/docs/01-app/01-getting-started/04-images-and-fonts.mdx b/docs/01-app/01-getting-started/04-images-and-fonts.mdx deleted file mode 100644 index 9d4e33ad57fb7..0000000000000 --- a/docs/01-app/01-getting-started/04-images-and-fonts.mdx +++ /dev/null @@ -1,357 +0,0 @@ ---- -title: How to optimize images and fonts -nav_title: Images and Fonts -description: Learn how to optimize images and fonts. -related: - title: API Reference - description: Learn more about the features mentioned in this page by reading the API Reference. - links: - - app/api-reference/components/font - - app/api-reference/components/image ---- - -Next.js comes with automatic image and font optimization. This page will guide you through how to start using them. - -## Optimizing images - -The Next.js [``](/docs/app/building-your-application/optimizing/images) component extends the HTML `` element to provide: - -- **Size optimization:** Automatically serving correctly sized images for each device, using modern image formats like WebP. -- **Visual stability:** Preventing [layout shift](https://web.dev/articles/cls) automatically when images are loading. -- **Faster page loads:** Only loading images when they enter the viewport using native browser lazy loading, with optional blur-up placeholders. -- **Asset flexibility:** Resizing images on-demand, even images stored on remote servers. - -To start using ``, import it from `next/image` and render it within your component. - -```tsx filename="app/page.tsx" switcher -import Image from 'next/image' - -export default function Page() { - return -} -``` - -```jsx filename="app/page.js" switcher -import Image from 'next/image' - -export default function Page() { - return -} -``` - -The `src` property can be a [local](#local-images) or [remote](#remote-images) image. - -### Local images - -You can store static files, like images and fonts, under a folder called `public` in the root directory. Files inside `public` can then be referenced by your code starting from the base URL (`/`). - -Folder structure showing app and public folders - -```tsx filename="app/page.tsx" switcher -import Image from 'next/image' -import profilePic from './me.png' - -export default function Page() { - return ( - Picture of the author - ) -} -``` - -```jsx filename="app/page.js" switcher -import Image from 'next/image' -import profilePic from './me.png' - -export default function Page() { - return ( - Picture of the author - ) -} -``` - -Next.js will automatically determine the intrinsic [`width`](/docs/app/api-reference/components/image#width) and [`height`](/docs/app/api-reference/components/image#height) of your image based on the imported file. These values are used to determine the image ratio and prevent [Cumulative Layout Shift](https://web.dev/articles/cls) while your image is loading. - -### Remote images - -To use a remote image, you can provide a URL string for the `src` property. - -```tsx filename="app/page.tsx" switcher -import Image from 'next/image' - -export default function Page() { - return ( - Picture of the author - ) -} -``` - -```jsx filename="app/page.js" switcher -import Image from 'next/image' - -export default function Page() { - return ( - Picture of the author - ) -} -``` - -Since Next.js does not have access to remote files during the build process, you'll need to provide the [`width`](/docs/app/api-reference/components/image#width), [`height`](/docs/app/api-reference/components/image#height) and optional [`blurDataURL`](/docs/app/api-reference/components/image#blurdataurl) props manually. The `width` and `height` are used to infer the correct aspect ratio of image and avoid layout shift from the image loading in. - -To safely allow images from remote servers, you need to define a list of supported URL patterns in [`next.config.js`](/docs/app/api-reference/config/next-config-js). Be as specific as possible to prevent malicious usage. For example, the following configuration will only allow images from a specific AWS S3 bucket: - -```ts filename="next.config.ts" switcher -import { NextConfig } from 'next' - -const config: NextConfig = { - images: { - remotePatterns: [ - { - protocol: 'https', - hostname: 's3.amazonaws.com', - port: '', - pathname: '/my-bucket/**', - search: '', - }, - ], - }, -} - -export default config -``` - -```js filename="next.config.js" switcher -module.exports = { - images: { - remotePatterns: [ - { - protocol: 'https', - hostname: 's3.amazonaws.com', - port: '', - pathname: '/my-bucket/**', - search: '', - }, - ], - }, -} -``` - -## Optimizing fonts - -The [`next/font`](/docs/app/api-reference/components/font) module automatically optimizes your fonts and removes external network requests for improved privacy and performance. - -It includes **built-in self-hosting** for any font file. This means you can optimally load web fonts with no layout shift. - -To start using `next/font`, import it from [`next/font/local`](#local-fonts) or [`next/font/google`](#google-fonts), call it as a function with the appropriate options, and set the `className` of the element you want to apply the font to. For example: - -```tsx filename="app/layout.tsx" highlight={1,3-5,9} switcher -import { Geist } from 'next/font/google' - -const geist = Geist({ - subsets: ['latin'], -}) - -export default function Layout({ children }: { children: React.ReactNode }) { - return ( - - {children} - - ) -} -``` - -```jsx filename="app/layout.js" highlight={1,3-5,9} switcher -import { Geist } from 'next/font/google' - -const geist = Geist({ - subsets: ['latin'], -}) - -export default function Layout({ children }) { - return ( - - {children} - - ) -} -``` - -### Google fonts - -You can automatically self-host any Google Font. Fonts are included in the deployment and served from the same domain as your deployment, meaning no requests are sent to Google by the browser when the user visits your site. - -To start using a Google Font, import your chosen font from `next/font/google`: - -```tsx filename="app/layout.tsx" switcher -import { Geist } from 'next/font/google' - -const geist = Geist({ - subsets: ['latin'], -}) - -export default function RootLayout({ - children, -}: { - children: React.ReactNode -}) { - return ( - - {children} - - ) -} -``` - -```jsx filename="app/layout.js" switcher -import { Geist } from 'next/font/google' - -const geist = Geist({ - subsets: ['latin'], -}) - -export default function RootLayout({ children }) { - return ( - - {children} - - ) -} -``` - -We recommend using [variable fonts](https://fonts.google.com/variablefonts) for the best performance and flexibility. But if you can't use a variable font, you will need to specify a weight: - -```tsx filename="app/layout.tsx" highlight={4} switcher -import { Roboto } from 'next/font/google' - -const roboto = Roboto({ - weight: '400', - subsets: ['latin'], -}) - -export default function RootLayout({ - children, -}: { - children: React.ReactNode -}) { - return ( - - {children} - - ) -} -``` - -```jsx filename="app/layout.js" highlight={4} switcher -import { Roboto } from 'next/font/google' - -const roboto = Roboto({ - weight: '400', - subsets: ['latin'], -}) - -export default function RootLayout({ children }) { - return ( - - {children} - - ) -} -``` - -### Local fonts - -To use a local font, import your font from `next/font/local` and specify the [`src`](/docs/app/api-reference/components/font#src) of your local font file. - -```tsx filename="app/layout.tsx" switcher -import localFont from 'next/font/local' - -const myFont = localFont({ - src: './my-font.woff2', -}) - -export default function RootLayout({ - children, -}: { - children: React.ReactNode -}) { - return ( - - {children} - - ) -} -``` - -```jsx filename="app/layout.js" switcher -import localFont from 'next/font/local' - -const myFont = localFont({ - src: './my-font.woff2', -}) - -export default function RootLayout({ children }) { - return ( - - {children} - - ) -} -``` - -If you want to use multiple files for a single font family, `src` can be an array: - -```js -const roboto = localFont({ - src: [ - { - path: './Roboto-Regular.woff2', - weight: '400', - style: 'normal', - }, - { - path: './Roboto-Italic.woff2', - weight: '400', - style: 'italic', - }, - { - path: './Roboto-Bold.woff2', - weight: '700', - style: 'normal', - }, - { - path: './Roboto-BoldItalic.woff2', - weight: '700', - style: 'italic', - }, - ], -}) -``` diff --git a/docs/01-app/01-getting-started/04-linking-and-navigating.mdx b/docs/01-app/01-getting-started/04-linking-and-navigating.mdx new file mode 100644 index 0000000000000..b40833850611c --- /dev/null +++ b/docs/01-app/01-getting-started/04-linking-and-navigating.mdx @@ -0,0 +1,465 @@ +--- +title: Linking and Navigating +description: Learn how the built-in navigation optimizations work, including prefetching, prerendering, and client-side navigation, and how to optimize navigation for dynamic routes and slow networks. +related: + links: + - app/api-reference/components/link + - app/api-reference/file-conventions/loading + - app/guides/prefetching +--- + +In Next.js, routes are rendered on the server by default. This often means the client has to wait for a server response before a new route can be shown. Next.js comes with built-in [prefetching](#prefetching), [streaming](#streaming), and [client-side transitions](#client-side-transitions) ensuring navigation stays fast and responsive. + +This guide explains how navigation works in Next.js and how you can optimize it for [dynamic routes](#dynamic-routes-without-loadingtsx) and [slow networks](#slow-networks). + +## How navigation works + +To understand how navigation works in Next.js, it helps to be familiar with the following concepts: + +- [Server Rendering](#server-rendering) +- [Prefetching](#prefetching) +- [Streaming](#streaming) +- [Client-side transitions](#client-side-transitions) + +### Server Rendering + +In Next.js, [Layouts and Pages](/docs/app/getting-started/layouts-and-pages) are [React Server Components](https://react.dev/reference/rsc/server-components) by default. On initial and subsequent navigations, the [Server Component Payload](/docs/app/getting-started/server-and-client-components#how-do-server-and-client-components-work-in-nextjs) is generated on the server before being sent to the client. + +There are two types of server rendering, based on _when_ it happens: + +- **Static Rendering (or Prerendering)** happens at build time or during [revalidation](/docs/app/getting-started/caching-and-revalidating) and the result is cached. +- **Dynamic Rendering** happens at request time in response to a client request. + +The trade-off of server rendering is that the client must wait for the server to respond before the new route can be shown. Next.js addresses this delay by [prefetching](#prefetching) routes the user is likely to visit and performing [client-side transitions](#client-side-transitions). + +> **Good to know**: HTML is also generated for the initial visit. + +### Prefetching + +Prefetching is the process of loading a route in the background before the user navigates to it. This makes navigation between routes in your application feel instant, because by the time a user clicks on a link, the data to render the next route is already available client side. + +Next.js automatically prefetches routes linked with the [`` component](/docs/app/api-reference/components/link) when they enter the user's viewport. + +```tsx filename="app/layout.tsx" switcher +import Link from 'next/link' + +export default function Layout({ children }: { children: React.ReactNode }) { + return ( + + + + {children} + + + ) +} +``` + +```jsx filename="app/layout.js" switcher +import Link from 'next/link' + +export default function Layout() { + return ( + + + + {children} + + + ) +} +``` + +How much of the route is prefetched depends on whether it's static or dynamic: + +- **Static Route**: the full route is prefetched. +- **Dynamic Route**: prefetching is skipped, or the route is partially prefetched if [`loading.tsx`](/docs/app/api-reference/file-conventions/loading) is present. + +By skipping or partially prefetching dynamic routes, Next.js avoids unnecessary work on the server for routes the users may never visit. However, waiting for a server response before navigation can give the users the impression that the app is not responding. + +Server Rendering without Streaming + +To improve the navigation experience to dynamic routes, you can use [streaming](#streaming). + +### Streaming + +Streaming allows the server to send parts of a dynamic route to the client as soon as they're ready, rather than waiting for the entire route to be rendered. This means users see something sooner, even if parts of the page are still loading. + +For dynamic routes, it means they can be **partially prefetched**. That is, shared layouts and loading skeletons can be requested ahead of time. + +How Server Rendering with Streaming Works + +To use streaming, create a `loading.tsx` in your route folder: + +loading.js special file + +```tsx filename="app/dashboard/loading.tsx" switcher +export default function Loading() { + // Add fallback UI that will be shown while the route is loading. + return +} +``` + +```jsx filename="app/dashboard/loading.js" switcher +export default function Loading() { + // Add fallback UI that will be shown while the route is loading. + return +} +``` + +Behind the scenes, Next.js will automatically wrap the `page.tsx` contents in a `` boundary. The prefetched fallback UI will be shown while the route is loading, and swapped for the actual content once ready. + +> **Good to know**: You can also use [``](https://react.dev/reference/react/Suspense) to create loading UI for nested components. + +Benefits of `loading.tsx`: + +- Immediate navigation and visual feedback for the user. +- Shared layouts remain interactive and navigation is interruptible. +- Improved Core Web Vitals: [TTFB](https://web.dev/articles/ttfb), [FCP](https://web.dev/articles/fcp), and [TTI](https://web.dev/articles/tti). + +To further improve the navigation experience, Next.js performs a [client-side transition](#client-side-transitions) with the `` component. + +### Client-side transitions + +Traditionally, navigation to a server-rendered page triggers a full page load. This clears state, resets scroll position, and blocks interactivity. + +Next.js avoids this with client-side transitions using the `` component. Instead of reloading the page, it updates the content dynamically by: + +- Keeping any shared layouts and UI. +- Replacing the current page with the prefetched loading state or a new page if available. + +Client-side transitions are what makes a server-rendered apps _feel_ like client-rendered apps. And when paired with [prefetching](#prefetching) and [streaming](#streaming), it enables fast transitions, even for dynamic routes. + +## What can make transitions slow? + +These Next.js optimizations make navigation fast and responsive. However, under certain conditions, transitions can still _feel_ slow. Here are some common causes and how to improve the user experience: + +### Dynamic routes without `loading.tsx` + +When navigating to a dynamic route, the client must wait for the server response before showing the result. This can give the users the impression that the app is not responding. + +We recommend adding `loading.tsx` to dynamic routes to enable partial prefetching, trigger immediate navigation, and display a loading UI while the route renders. + +```tsx filename="app/blog/[slug]/loading.tsx" switcher +export default function Loading() { + return +} +``` + +```jsx filename="app/blog/[slug]/loading.js" switcher +export default function Loading() { + return +} +``` + +> **Good to know**: In development mode, you can use the Next.js Devtools to identify if the route is static or dynamic. See [`devIndicators`](/docs/app/api-reference/config/next-config-js/devIndicators) for more information. + +### Dynamic segments without `generateStaticParams` + +If a [dynamic segment](/docs/app/api-reference/file-conventions/dynamic-routes) could be prerendered but isn't because it's missing [`generateStaticParams`](/docs/app/api-reference/functions/generate-static-params), the route will fallback to dynamic rendering at request time. + +Ensure the route is statically generated at build time by adding `generateStaticParams`: + +```tsx filename="app/blog/[slug]/page.tsx" switcher +export async function generateStaticParams() { + const posts = await fetch('https://.../posts').then((res) => res.json()) + + return posts.map((post) => ({ + slug: post.slug, + })) +} + +export default async function Page({ + params, +}: { + params: Promise<{ slug: string }> +}) { + const { slug } = await params + // ... +} +``` + +```jsx filename="app/blog/[slug]/page.js" switcher +export async function generateStaticParams() { + const posts = await fetch('https://.../posts').then((res) => res.json()) + + return posts.map((post) => ({ + slug: post.slug, + })) + +export default async function Page({ params }) { + const { slug } = await params + // ... +} +``` + +### Slow networks + +On slow or unstable networks, prefetching may not finish before the user clicks a link. This can affect both static and dynamic routes. In these cases, the `loading.js` fallback may not appear immediately because it hasn't been prefetched yet. + +To improve perceived performance, you can use the [`useLinkStatus` hook](/docs/app/api-reference/functions/use-link-status) to show inline visual feedback to the user (like spinners or text glimmers on the link) while a transition is in progress. + +```tsx filename="app/ui/loading-indicator.tsx" switcher +'use client' + +import { useLinkStatus } from 'next/link' + +export default function LoadingIndicator() { + const { pending } = useLinkStatus() + return pending ? ( +
+ ) : null +} +``` + +```jsx filename="app/ui/loading-indicator.js" switcher +'use client' + +import { useLinkStatus } from 'next/link' + +export default function LoadingIndicator() { + const { pending } = useLinkStatus() + return pending ? ( +
+ ) : null +} +``` + +You can "debounce" the loading indicator by adding an initial animation delay (e.g. 100ms) and starting the animation as invisible (e.g. `opacity: 0`). This means the loading indicator will only be shown if the navigation takes longer than the specified delay. + +```css +.spinner { + /* ... */ + opacity: 0; + animation: + fadeIn 500ms 100ms forwards, + rotate 1s linear infinite; +} + +@keyframes fadeIn { + from { + opacity: 0; + } + to { + opacity: 1; + } +} + +@keyframes rotate { + to { + transform: rotate(360deg); + } +} +``` + +> **Good to know**: You can use other visual feedback patterns like a progress bar. View an example [here](https://github.com/vercel/react-transition-progress). + +### Disabling prefetching + +You can opt out of prefetching by setting the `prefetch` prop to `false` on the `` component. This is useful to avoid unnecessary usage of resources when rendering large lists of links (e.g. an infinite scroll table). + +```tsx + + Blog + +``` + +However, disabling prefetching comes with trade-offs: + +- **Static routes** will only be fetched when the user clicks the link. +- **Dynamic routes** will need to be rendered on the server first before the client can navigate to it. + +To reduce resource usage without fully disabling prefetch, you can prefetch only on hover. This limits prefetching to routes the user is more _likely_ to visit, rather than all links in the viewport. + +```tsx filename="app/ui/hover-prefetch-link.tsx" switcher +'use client' + +import Link from 'next/link' +import { useState } from 'react' + +function HoverPrefetchLink({ + href, + children, +}: { + href: string + children: React.ReactNode +}) { + const [active, setActive] = useState(false) + + return ( + setActive(true)} + > + {children} + + ) +} +``` + +```jsx filename="app/ui/hover-prefetch-link.js" switcher +'use client' + +import Link from 'next/link' +import { useState } from 'react' + +function HoverPrefetchLink({ href, children }) { + const [active, setActive] = useState(false) + + return ( + setActive(true)} + > + {children} + + ) +} +``` + +### Hydration not completed + +`` is a Client Component and must be hydrated before it can prefetch routes. On the initial visit, large JavaScript bundles can delay hydration, preventing prefetching from starting right away. + +React mitigates this with Selective Hydration and you can further improve this by: + +- Using the [`@next/bundle-analyzer`](/docs/app/guides/package-bundling#analyzing-javascript-bundles) plugin to identify and reduce bundle size by removing large dependencies. +- Moving logic from the client to the server where possible. See the [Server and Client Components](/docs/app/getting-started/server-and-client-components) docs for guidance. + +## Examples + +### Native History API + +Next.js allows you to use the native [`window.history.pushState`](https://developer.mozilla.org/en-US/docs/Web/API/History/pushState) and [`window.history.replaceState`](https://developer.mozilla.org/en-US/docs/Web/API/History/replaceState) methods to update the browser's history stack without reloading the page. + +`pushState` and `replaceState` calls integrate into the Next.js Router, allowing you to sync with [`usePathname`](/docs/app/api-reference/functions/use-pathname) and [`useSearchParams`](/docs/app/api-reference/functions/use-search-params). + +#### `window.history.pushState` + +Use it to add a new entry to the browser's history stack. The user can navigate back to the previous state. For example, to sort a list of products: + +```tsx fileName="app/ui/sort-products.tsx" switcher +'use client' + +import { useSearchParams } from 'next/navigation' + +export default function SortProducts() { + const searchParams = useSearchParams() + + function updateSorting(sortOrder: string) { + const params = new URLSearchParams(searchParams.toString()) + params.set('sort', sortOrder) + window.history.pushState(null, '', `?${params.toString()}`) + } + + return ( + <> + + + + ) +} +``` + +```jsx fileName="app/ui/sort-products.js" switcher +'use client' + +import { useSearchParams } from 'next/navigation' + +export default function SortProducts() { + const searchParams = useSearchParams() + + function updateSorting(sortOrder) { + const params = new URLSearchParams(searchParams.toString()) + params.set('sort', sortOrder) + window.history.pushState(null, '', `?${params.toString()}`) + } + + return ( + <> + + + + ) +} +``` + +#### `window.history.replaceState` + +Use it to replace the current entry on the browser's history stack. The user is not able to navigate back to the previous state. For example, to switch the application's locale: + +```tsx fileName="app/ui/locale-switcher.tsx" switcher +'use client' + +import { usePathname } from 'next/navigation' + +export function LocaleSwitcher() { + const pathname = usePathname() + + function switchLocale(locale: string) { + // e.g. '/en/about' or '/fr/contact' + const newPath = `/${locale}${pathname}` + window.history.replaceState(null, '', newPath) + } + + return ( + <> + + + + ) +} +``` + +```jsx fileName="app/ui/locale-switcher.js" switcher +'use client' + +import { usePathname } from 'next/navigation' + +export function LocaleSwitcher() { + const pathname = usePathname() + + function switchLocale(locale) { + // e.g. '/en/about' or '/fr/contact' + const newPath = `/${locale}${pathname}` + window.history.replaceState(null, '', newPath) + } + + return ( + <> + + + + ) +} +``` diff --git a/docs/01-app/01-getting-started/05-css.mdx b/docs/01-app/01-getting-started/05-css.mdx deleted file mode 100644 index b6d52b582ed39..0000000000000 --- a/docs/01-app/01-getting-started/05-css.mdx +++ /dev/null @@ -1,506 +0,0 @@ ---- -title: How to use CSS in your application -nav_title: CSS -description: Learn about the different ways to add CSS to your application, including CSS Modules, Global CSS, Tailwind CSS, and more. -related: - title: API Reference - description: Learn more about the features mentioned in this page by reading the API Reference. - links: - - app/api-reference/config/next-config-js/sassOptions - - architecture/nextjs-compiler ---- - -Next.js provides several ways to use CSS in your application, including: - -- [CSS Modules](#css-modules) -- [Global CSS](#global-css) -- [Tailwind CSS](#tailwind-css) -- [Sass](#sass) -- [CSS-in-JS](#css-in-js) -- [External Stylesheets](#external-stylesheets) - -This page will guide you through how to use each of these approaches. - -## CSS Modules - -CSS Modules locally scope CSS by generating unique class names. This allows you to use the same class in different files without worrying about naming collisions. - -To start using CSS Modules, create a new file with the extension `.module.css` and import it into any component inside the `app` directory: - -```css filename="app/blog/styles.module.css" -.blog { - padding: 24px; -} -``` - -```tsx filename="app/blog/page.tsx" switcher -import styles from './styles.module.css' - -export default function Page({ children }: { children: React.ReactNode }) { - return
{children}
-} -``` - -```jsx filename="app/blog/page.js" switcher -import styles from './styles.module.css' - -export default function Page({ children }) { - return
{children}
-} -``` - -## Global CSS - -You can use global CSS to apply styles across your application. - -To use global styles, create a `app/global.css` file and import it in the root layout to apply the styles to **every route** in your application: - -```css filename="app/global.css" -body { - padding: 20px 20px 60px; - max-width: 680px; - margin: 0 auto; -} -``` - -```tsx filename="app/layout.tsx" switcher -// These styles apply to every route in the application -import './global.css' - -export default function RootLayout({ - children, -}: { - children: React.ReactNode -}) { - return ( - - {children} - - ) -} -``` - -```jsx filename="app/layout.js" switcher -// These styles apply to every route in the application -import './global.css' - -export default function RootLayout({ children }) { - return ( - - {children} - - ) -} -``` - -> **Good to know:** Global styles can be imported into any layout, page, or component inside the `app` directory. However, since Next.js uses React's built-in support for stylesheets to integrate with Suspense, this currently does not remove stylesheets as you navigate between routes which can lead to conflicts. We recommend using global styles for _truly_ global CSS, and [CSS Modules](#css-modules) for scoped CSS. - -## Tailwind CSS - -[Tailwind CSS](https://tailwindcss.com/) is a utility-first CSS framework that integrates seamlessly with Next.js. - -### Installing Tailwind - -To start using Tailwind, install the necessary Tailwind CSS packages: - -```bash filename="Terminal" -npm install tailwindcss @tailwindcss/postcss postcss -``` - -### Configuring Tailwind - -Create a `postcss.config.mjs` file in the root of your project and add the `@tailwindcss/postcss` plugin to your PostCSS configuration: - -```js filename="postcss.config.mjs" highlight={4} -/** @type {import('tailwindcss').Config} */ -export default { - plugins: { - '@tailwindcss/postcss': {}, - }, -} -``` - -### Using Tailwind - -Add the [Tailwind directives](https://tailwindcss.com/docs/functions-and-directives#directives) to your [Global Stylesheet](#global-css): - -```css filename="app/globals.css" -@import 'tailwindcss'; -``` - -Then, import the styles in the [root layout](/docs/app/api-reference/file-conventions/layout#root-layouts): - -```tsx filename="app/layout.tsx" switcher -import type { Metadata } from 'next' -// These styles apply to every route in the application -import './globals.css' - -export const metadata: Metadata = { - title: 'Create Next App', - description: 'Generated by create next app', -} - -export default function RootLayout({ - children, -}: { - children: React.ReactNode -}) { - return ( - - {children} - - ) -} -``` - -```jsx filename="app/layout.js" switcher -// These styles apply to every route in the application -import './globals.css' - -export const metadata = { - title: 'Create Next App', - description: 'Generated by create next app', -} - -export default function RootLayout({ children }) { - return ( - - {children} - - ) -} -``` - -You can then start writing Tailwind's utility classes in your application. - -```tsx filename="app/page.tsx" switcher -export default function Page() { - return

Hello, Next.js!

-} -``` - -```jsx filename="app/page.js" switcher -export default function Page() { - return

Hello, Next.js!

-} -``` - -## Sass - -Next.js integrates with [Sass](https://sass-lang.com/) using both the [`.scss`](https://sass-lang.com/documentation/syntax/#scss) and [`.sass`](https://sass-lang.com/documentation/syntax#the-indented-syntax) extensions and syntax. - -You can also use component-level Sass via [CSS Modules](#css-modules) and the `.module.scss`or `.module.sass` extension. - -### Installing Sass - -To start using Sass, install the `sass` package: - -```bash filename="Terminal" -npm install --save-dev sass -``` - -### Customizing Sass options - -If you want to configure your Sass options, use the [`sassOptions`](/docs/app/api-reference/config/next-config-js/sassOptions) option in `next.config.js`. - -```ts filename="next.config.ts" switcher -import type { NextConfig } from 'next' - -const nextConfig: NextConfig = { - sassOptions: { - additionalData: `$var: red;`, - }, -} - -export default nextConfig -``` - -```js filename="next.config.mjs" switcher -/** @type {import('next').NextConfig} */ - -const nextConfig = { - sassOptions: { - additionalData: `$var: red;`, - }, -} - -export default nextConfig -``` - -## CSS-in-JS - -> **Warning:** CSS-in-JS libraries which require runtime JavaScript are not currently supported in React Server Components. Using CSS-in-JS with newer React features like Server Components and Streaming requires library authors to support the latest version of React. - -The following libraries are supported in **Client Components** in the `app` directory (alphabetical): - -- [`ant-design`](https://ant.design/docs/react/use-with-next#using-app-router) -- [`chakra-ui`](https://chakra-ui.com/docs/get-started/frameworks/next-app) -- [`@fluentui/react-components`](https://react.fluentui.dev/?path=/docs/concepts-developer-server-side-rendering-next-js-appdir-setup--page) -- [`kuma-ui`](https://kuma-ui.com) -- [`@mui/material`](https://mui.com/material-ui/guides/next-js-app-router/) -- [`@mui/joy`](https://mui.com/joy-ui/integrations/next-js-app-router/) -- [`pandacss`](https://panda-css.com) -- [`styled-jsx`](#styled-jsx) -- [`styled-components`](#styled-components) -- [`stylex`](https://stylexjs.com) -- [`tamagui`](https://tamagui.dev/docs/guides/next-js#server-components) -- [`tss-react`](https://tss-react.dev/) -- [`vanilla-extract`](https://vanilla-extract.style) - -The following are currently working on support: - -- [`emotion`](https://github.com/emotion-js/emotion/issues/2928) - -If you want to style Server Components, we recommend using [CSS Modules](#css-modules) or other solutions that output CSS files, like [Tailwind CSS](#tailwind-css). - -### Configuring CSS-in-JS - -To configure CSS-in-JS, you need to: - -1. Create a **style registry** to collect all CSS rules in a render. -2. Use the `useServerInsertedHTML` hook to inject rules before any content that might use them. -3. Create a Client Component that wraps your app with the style registry during initial server-side rendering. - -#### `styled-jsx` - -To configure `styled-jsx` for your application, create a new registry: - -```tsx filename="app/registry.tsx" switcher -'use client' - -import React, { useState } from 'react' -import { useServerInsertedHTML } from 'next/navigation' -import { StyleRegistry, createStyleRegistry } from 'styled-jsx' - -export default function StyledJsxRegistry({ - children, -}: { - children: React.ReactNode -}) { - // Only create stylesheet once with lazy initial state - // x-ref: https://reactjs.org/docs/hooks-reference.html#lazy-initial-state - const [jsxStyleRegistry] = useState(() => createStyleRegistry()) - - useServerInsertedHTML(() => { - const styles = jsxStyleRegistry.styles() - jsxStyleRegistry.flush() - return <>{styles} - }) - - return {children} -} -``` - -```jsx filename="app/registry.js" switcher -'use client' - -import React, { useState } from 'react' -import { useServerInsertedHTML } from 'next/navigation' -import { StyleRegistry, createStyleRegistry } from 'styled-jsx' - -export default function StyledJsxRegistry({ children }) { - // Only create stylesheet once with lazy initial state - // x-ref: https://reactjs.org/docs/hooks-reference.html#lazy-initial-state - const [jsxStyleRegistry] = useState(() => createStyleRegistry()) - - useServerInsertedHTML(() => { - const styles = jsxStyleRegistry.styles() - jsxStyleRegistry.flush() - return <>{styles} - }) - - return {children} -} -``` - -Then, wrap your [root layout](/docs/app/api-reference/file-conventions/layout#root-layouts) with the registry: - -```tsx filename="app/layout.tsx" switcher -import StyledJsxRegistry from './registry' - -export default function RootLayout({ - children, -}: { - children: React.ReactNode -}) { - return ( - - - {children} - - - ) -} -``` - -```jsx filename="app/layout.js" switcher -import StyledJsxRegistry from './registry' - -export default function RootLayout({ children }) { - return ( - - - {children} - - - ) -} -``` - -#### `styled-components` - -To use `styled-components`, enable it in `next.config.js`: - -```ts filename="next.config.ts" switcher -import type { NextConfig } from 'next' - -const nextConfig: NextConfig = { - compiler: { - styledComponents: true, - }, -} - -export default nextConfig -``` - -```js filename="next.config.mjs" switcher -/** @type {import('next').NextConfig} */ - -const nextConfig = { - compiler: { - styledComponents: true, - }, -} - -export default nextConfig -``` - -Then, use the `styled-components` API to create a global registry component to collect all CSS style rules generated during a render, and a function to return those rules. Then use the `useServerInsertedHTML` hook to inject the styles collected in the registry into the `` HTML tag in the root layout. - -```tsx filename="lib/registry.tsx" switcher -'use client' - -import React, { useState } from 'react' -import { useServerInsertedHTML } from 'next/navigation' -import { ServerStyleSheet, StyleSheetManager } from 'styled-components' - -export default function StyledComponentsRegistry({ - children, -}: { - children: React.ReactNode -}) { - // Only create stylesheet once with lazy initial state - // x-ref: https://reactjs.org/docs/hooks-reference.html#lazy-initial-state - const [styledComponentsStyleSheet] = useState(() => new ServerStyleSheet()) - - useServerInsertedHTML(() => { - const styles = styledComponentsStyleSheet.getStyleElement() - styledComponentsStyleSheet.instance.clearTag() - return <>{styles} - }) - - if (typeof window !== 'undefined') return <>{children} - - return ( - - {children} - - ) -} -``` - -```jsx filename="lib/registry.js" switcher -'use client' - -import React, { useState } from 'react' -import { useServerInsertedHTML } from 'next/navigation' -import { ServerStyleSheet, StyleSheetManager } from 'styled-components' - -export default function StyledComponentsRegistry({ children }) { - // Only create stylesheet once with lazy initial state - // x-ref: https://reactjs.org/docs/hooks-reference.html#lazy-initial-state - const [styledComponentsStyleSheet] = useState(() => new ServerStyleSheet()) - - useServerInsertedHTML(() => { - const styles = styledComponentsStyleSheet.getStyleElement() - styledComponentsStyleSheet.instance.clearTag() - return <>{styles} - }) - - if (typeof window !== 'undefined') return <>{children} - - return ( - - {children} - - ) -} -``` - -Wrap the `children` of the root layout with the style registry component: - -```tsx filename="app/layout.tsx" switcher -import StyledComponentsRegistry from './lib/registry' - -export default function RootLayout({ - children, -}: { - children: React.ReactNode -}) { - return ( - - - {children} - - - ) -} -``` - -```jsx filename="app/layout.js" switcher -import StyledComponentsRegistry from './lib/registry' - -export default function RootLayout({ children }) { - return ( - - - {children} - - - ) -} -``` - -## External stylesheets - -Stylesheets published by external packages can be imported anywhere in the `app` directory, including colocated components: - -```tsx filename="app/layout.tsx" switcher -import 'bootstrap/dist/css/bootstrap.css' - -export default function RootLayout({ - children, -}: { - children: React.ReactNode -}) { - return ( - - {children} - - ) -} -``` - -```jsx filename="app/layout.js" switcher -import 'bootstrap/dist/css/bootstrap.css' - -export default function RootLayout({ children }) { - return ( - - {children} - - ) -} -``` - -External stylesheets must be directly imported from an npm package or downloaded and colocated with your codebase. You cannot use ``. diff --git a/docs/01-app/01-getting-started/05-server-and-client-components.mdx b/docs/01-app/01-getting-started/05-server-and-client-components.mdx new file mode 100644 index 0000000000000..102934b34ff6b --- /dev/null +++ b/docs/01-app/01-getting-started/05-server-and-client-components.mdx @@ -0,0 +1,583 @@ +--- +title: Server and Client Components +description: Learn how you can use React Server and Client Components to render parts of your application on the server or the client. +related: + title: Next Steps + description: Learn more about the APIs mentioned in this page. + links: + - app/api-reference/directives/use-client +--- + +By default, layouts and pages are [Server Components](https://react.dev/reference/rsc/server-components), which lets you fetch data and render parts of your UI on the server, optionally cache the result, and stream it to the client. When you need interactivity or browser APIs, you can use [Client Components](https://react.dev/reference/rsc/use-client) to layer in functionality. + +This page explains how Server and Client Components work in Next.js and when to use them, with examples of how to compose them together in your application. + +## When to use Server and Client Components? + +The client and server environments have different capabilities. Server and Client components allow you to run logic in each environment depending on your use case. + +Use **Client Components** when you need: + +- [State](https://react.dev/learn/managing-state) and [event handlers](https://react.dev/learn/responding-to-events). E.g. `onClick`, `onChange`. +- [Lifecycle logic](https://react.dev/learn/lifecycle-of-reactive-effects). E.g. `useEffect`. +- Browser-only APIs. E.g. `localStorage`, `window`, `Navigator.geolocation`, etc. +- [Custom hooks](https://react.dev/learn/reusing-logic-with-custom-hooks). + +Use **Server Components** when you need: + +- Fetch data from databases or APIs close to the source. +- Use API keys, tokens, and other secrets without exposing them to the client. +- Reduce the amount of JavaScript sent to the browser. +- Improve the [First Contentful Paint (FCP)](https://web.dev/fcp/), and stream content progressively to the client. + +For example, the `` component is a Server Component that fetches data about a post, and passes it as props to the `` which handles client-side interactivity. + +```tsx filename="app/[id]/page.tsx" highlight={1,12} switcher +import LikeButton from '@/app/ui/like-button' +import { getPost } from '@/lib/data' + +export default async function Page({ params }: { params: { id: string } }) { + const post = await getPost(params.id) + + return ( +
+
+

{post.title}

+ {/* ... */} + +
+
+ ) +} +``` + +```jsx filename="app/[id]/page.js" highlight={1,12} switcher +import LikeButton from '@/app/ui/like-button' +import { getPost } from '@/lib/data' + +export default async function Page({ params }) { + const post = await getPost(params.id) + + return ( +
+
+

{post.title}

+ {/* ... */} + +
+
+ ) +} +``` + +```tsx filename="app/ui/like-button.tsx" highlight={1} switcher +'use client' + +import { useState } from 'react' + +export default function LikeButton({ likes }: { likes: number }) { + // ... +} +``` + +```jsx filename="app/ui/like-button.js" highlight={1} switcher +'use client' + +import { useState } from 'react' + +export default function LikeButton({ likes }) { + // ... +} +``` + +## How do Server and Client Components work in Next.js? + +### On the server + +On the server, Next.js uses React's APIs to orchestrate rendering. The rendering work is split into chunks, by individual route segments ([layouts and pages](/docs/app/getting-started/layouts-and-pages)): + +- **Server Components** are rendered into a special data format called the React Server Component Payload (RSC Payload). +- **Client Components** and the RSC Payload are used to [prerender](/docs/app/getting-started/partial-prerendering#how-does-partial-prerendering-work) HTML. + +> **What is the React Server Component Payload (RSC)?** +> +> The RSC Payload is a compact binary representation of the rendered React Server Components tree. It's used by React on the client to update the browser's DOM. The RSC Payload contains: +> +> - The rendered result of Server Components +> - Placeholders for where Client Components should be rendered and references to their JavaScript files +> - Any props passed from a Server Component to a Client Component + +### On the client (first load) + +Then, on the client: + +1. **HTML** is used to immediately show a fast non-interactive preview of the route to the user. +2. **RSC Payload** is used to reconcile the Client and Server Component trees. +3. **JavaScript** is used to hydrate Client Components and make the application interactive. + +> **What is hydration?** +> +> Hydration is React's process for attaching [event handlers](https://react.dev/learn/responding-to-events) to the DOM, to make the static HTML interactive. + +### Subsequent Navigations + +On subsequent navigations: + +- The **RSC Payload** is prefetched and cached for instant navigation. +- **Client Components** are rendered entirely on the client, without the server-rendered HTML. + +## Examples + +### Using Client Components + +You can create a Client Component by adding the [`"use client"`](https://react.dev/reference/react/use-client) directive at the top of the file, above your imports. + +```tsx filename="app/ui/counter.tsx" highlight={1} switcher +'use client' + +import { useState } from 'react' + +export default function Counter() { + const [count, setCount] = useState(0) + + return ( +
+

{count} likes

+ +
+ ) +} +``` + +```jsx filename="app/ui/counter.tsx" highlight={1} switcher +'use client' + +import { useState } from 'react' + +export default function Counter() { + const [count, setCount] = useState(0) + + return ( +
+

{count} likes

+ +
+ ) +} +``` + +`"use client"` is used to declare a **boundary** between the Server and Client module graphs (trees). + +Once a file is marked with `"use client"`, **all its imports and child components are considered part of the client bundle**. This means you don't need to add the directive to every component that is intended for the client. + +### Reducing JS bundle size + +To reduce the size of your client JavaScript bundles, add `'use client'` to specific interactive components instead of marking large parts of your UI as Client Components. + +For example, the `` component contains mostly static elements like a logo and navigation links, but includes an interactive search bar. `` is interactive and needs to be a Client Component, however, the rest of the layout can remain a Server Component. + +```tsx filename="app/layout.tsx" highlight={12} switcher +// Client Component +import Search from './search' +// Server Component +import Logo from './logo' + +// Layout is a Server Component by default +export default function Layout({ children }: { children: React.ReactNode }) { + return ( + <> + +
{children}
+ + ) +} +``` + +```jsx filename="app/layout.js" highlight={12} switcher +// Client Component +import Search from './search' +// Server Component +import Logo from './logo' + +// Layout is a Server Component by default +export default function Layout({ children }) { + return ( + <> + +
{children}
+ + ) +} +``` + +```tsx filename="app/ui/search.tsx" highlight={1} switcher +'use client' + +export default function Search() { + // ... +} +``` + +```jsx filename="app/ui/search.js" highlight={1} switcher +'use client' + +export default function Search() { + // ... +} +``` + +### Passing data from Server to Client Components + +You can pass data from Server Components to Client Components using props. + +```tsx filename="app/[id]/page.tsx" highlight={1,7} switcher +import LikeButton from '@/app/ui/like-button' +import { getPost } from '@/lib/data' + +export default async function Page({ params }: { params: { id: string } }) { + const post = await getPost(params.id) + + return +} +``` + +```jsx filename="app/[id]/page.js" highlight={1,7} switcher +import LikeButton from '@/app/ui/like-button' +import { getPost } from '@/lib/data' + +export default async function Page({ params }) { + const post = await getPost(params.id) + + return +} +``` + +```tsx filename="app/ui/like-button.tsx" highlight={1} switcher +'use client' + +export default function LikeButton({ likes }: { likes: number }) { + // ... +} +``` + +```jsx filename="app/ui/like-button.js" highlight={1} switcher +'use client' + +export default function LikeButton({ likes }) { + // ... +} +``` + +Alternatively, you can stream data from a Server Component to a Client Component with the [`use` Hook](https://react.dev/reference/react/use). See an [example](/docs/app/getting-started/fetching-data#streaming-data-with-the-use-hook). + +> **Good to know**: Props passed to Client Components need to be [serializable](https://react.dev/reference/react/use-server#serializable-parameters-and-return-values) by React. + +### Interleaving Server and Client Components + +You can pass Server Components as a prop to a Client Component. This allows you to visually nest server-rendered UI within Client components. + +A common pattern is to use `children` to create a _slot_ in a ``. For example, a `` component that fetches data on the server, inside a `` component that uses client state to toggle visibility. + +```tsx filename="app/ui/modal.tsx" switcher +'use client' + +export default function Modal({ children }: { children: React.ReactNode }) { + return
{children}
+} +``` + +```jsx filename="app/ui/modal.js" switcher +'use client' + +export default function Modal({ children }) { + return
{children}
+} +``` + +Then, in a parent Server Component (e.g.``), you can pass a `` as the child of the ``: + +```tsx filename="app/page.tsx" highlight={7} switcher +import Modal from './ui/modal' +import Cart from './ui/cart' + +export default function Page() { + return ( + + + + ) +} +``` + +```jsx filename="app/page.js" highlight={7} switcher +import Modal from './ui/modal' +import Cart from './ui/cart' + +export default function Page() { + return ( + + + + ) +} +``` + +In this pattern, all Server Components will be rendered on the server ahead of time, including those as props. The resulting RSC payload will contain references of where Client Components should be rendered within the component tree. + +### Context providers + +[React context](https://react.dev/learn/passing-data-deeply-with-context) is commonly used to share global state like the current theme. However, React context is not supported in Server Components. + +To use context, create a Client Component that accepts `children`: + +```tsx filename="app/theme-provider.tsx" switcher +'use client' + +import { createContext } from 'react' + +export const ThemeContext = createContext({}) + +export default function ThemeProvider({ + children, +}: { + children: React.ReactNode +}) { + return {children} +} +``` + +```jsx filename="app/theme-provider.js" switcher +'use client' + +import { createContext } from 'react' + +export const ThemeContext = createContext({}) + +export default function ThemeProvider({ children }) { + return {children} +} +``` + +Then, import it into a Server Component (e.g. `layout`): + +```tsx filename="app/layout.tsx" switcher +import ThemeProvider from './theme-provider' + +export default function RootLayout({ + children, +}: { + children: React.ReactNode +}) { + return ( + + + {children} + + + ) +} +``` + +```jsx filename="app/layout.js" switcher +import ThemeProvider from './theme-provider' + +export default function RootLayout({ children }) { + return ( + + + {children} + + + ) +} +``` + +Your Server Component will now be able to directly render your provider, and all other Client Components throughout your app will be able to consume this context. + +> **Good to know**: You should render providers as deep as possible in the tree – notice how `ThemeProvider` only wraps `{children}` instead of the entire `` document. This makes it easier for Next.js to optimize the static parts of your Server Components. + +### Third-party components + +When using a third-party component that relies on client-only features, you can wrap it in a Client Component to ensure it works as expected. + +For example, the `` can be imported from the `acme-carousel` package. This component uses `useState`, but it doesn't yet have the `"use client"` directive. + +If you use `` within a Client Component, it will work as expected: + +```tsx filename="app/gallery.tsx" switcher +'use client' + +import { useState } from 'react' +import { Carousel } from 'acme-carousel' + +export default function Gallery() { + const [isOpen, setIsOpen] = useState(false) + + return ( +
+ + {/* Works, since Carousel is used within a Client Component */} + {isOpen && } +
+ ) +} +``` + +```jsx filename="app/gallery.js" switcher +'use client' + +import { useState } from 'react' +import { Carousel } from 'acme-carousel' + +export default function Gallery() { + const [isOpen, setIsOpen] = useState(false) + + return ( +
+ + {/* Works, since Carousel is used within a Client Component */} + {isOpen && } +
+ ) +} +``` + +However, if you try to use it directly within a Server Component, you'll see an error. This is because Next.js doesn't know `` is using client-only features. + +To fix this, you can wrap third-party components that rely on client-only features in your own Client Components: + +```tsx filename="app/carousel.tsx" switcher +'use client' + +import { Carousel } from 'acme-carousel' + +export default Carousel +``` + +```jsx filename="app/carousel.js" switcher +'use client' + +import { Carousel } from 'acme-carousel' + +export default Carousel +``` + +Now, you can use `` directly within a Server Component: + +```tsx filename="app/page.tsx" switcher +import Carousel from './carousel' + +export default function Page() { + return ( +
+

View pictures

+ {/* Works, since Carousel is a Client Component */} + +
+ ) +} +``` + +```jsx filename="app/page.js" switcher +import Carousel from './carousel' + +export default function Page() { + return ( +
+

View pictures

+ {/* Works, since Carousel is a Client Component */} + +
+ ) +} +``` + +> **Advice for Library Authors** +> +> If you’re building a component library, add the `"use client"` directive to entry points that rely on client-only features. This lets your users import components into Server Components without needing to create wrappers. +> +> It's worth noting some bundlers might strip out `"use client"` directives. You can find an example of how to configure esbuild to include the `"use client"` directive in the [React Wrap Balancer](https://github.com/shuding/react-wrap-balancer/blob/main/tsup.config.ts#L10-L13) and [Vercel Analytics](https://github.com/vercel/analytics/blob/main/packages/web/tsup.config.js#L26-L30) repositories. + +### Preventing environment poisoning + +JavaScript modules can be shared between both Server and Client Components modules. This means it's possible to accidentally import server-only code into the client. For example, consider the following function: + +```ts filename="lib/data.ts" switcher +export async function getData() { + const res = await fetch('https://external-service.com/data', { + headers: { + authorization: process.env.API_KEY, + }, + }) + + return res.json() +} +``` + +```js filename="lib/data.js" switcher +export async function getData() { + const res = await fetch('https://external-service.com/data', { + headers: { + authorization: process.env.API_KEY, + }, + }) + + return res.json() +} +``` + +This function contains an `API_KEY` that should never be exposed to the client. + +In Next.js, only environment variables prefixed with `NEXT_PUBLIC_` are included in the client bundle. If variables are not prefixed, Next.js replaces them with an empty string. + +As a result, even though `getData()` can be imported and executed on the client, it won't work as expected. + +To prevent accidental usage in Client Components, you can use the [`server-only` package](https://www.npmjs.com/package/server-only). + +Then, import the package into a file that contains server-only code: + +```js filename="lib/data.js" +import 'server-only' + +export async function getData() { + const res = await fetch('https://external-service.com/data', { + headers: { + authorization: process.env.API_KEY, + }, + }) + + return res.json() +} +``` + +Now, if you try to import the module into a Client Component, there will be a build-time error. + +The corresponding [`client-only` package](https://www.npmjs.com/package/client-only) can be used to mark modules that contain client-only logic like code that accesses the `window` object. + +In Next.js, installing `server-only` or `client-only` is **optional**. However, if your linting rules flag extraneous dependencies, you may install them to avoid issues. + +```bash package="npm" +npm install server-only +``` + +```bash package="yarn" +yarn add server-only +``` + +```bash package="pnpm" +pnpm add server-only +``` + +```bash package="bun" +bun add server-only +``` + +Next.js handles `server-only` and `client-only` imports internally to provide clearer error messages when a module is used in the wrong environment. The contents of these packages from NPM are not used by Next.js. + +Next.js also provides its own type declarations for `server-only` and `client-only`, for TypeScript configurations where [`noUncheckedSideEffectImports`](https://www.typescriptlang.org/tsconfig/#noUncheckedSideEffectImports) is active. diff --git a/docs/01-app/01-getting-started/06-fetching-data.mdx b/docs/01-app/01-getting-started/06-fetching-data.mdx deleted file mode 100644 index c01577cfa2ed1..0000000000000 --- a/docs/01-app/01-getting-started/06-fetching-data.mdx +++ /dev/null @@ -1,354 +0,0 @@ ---- -title: How to fetch data and stream -nav_title: Fetching Data -description: Start fetching data and streaming content in your application. -related: - title: API Reference - description: Learn more about the features mentioned in this page by reading the API Reference. - links: - - app/api-reference/functions/fetch - - app/api-reference/file-conventions/loading ---- - -This page will walk you through how you can fetch data in [Server Components](#server-components) and [Client Components](#client-components). As well as how to [stream](#streaming) content that depends on data. - -## Fetching data - -### Server Components - -You can fetch data in Server Components using: - -1. The [`fetch` API](#with-the-fetch-api) -2. An [ORM or database](#with-an-orm-or-database) - -#### With the `fetch` API - -To fetch data with the `fetch` API, turn your component into an asynchronous function, and await the `fetch` call. For example: - -```tsx filename="app/blog/page.tsx" switcher -export default async function Page() { - const data = await fetch('https://api.vercel.app/blog') - const posts = await data.json() - return ( -
    - {posts.map((post) => ( -
  • {post.title}
  • - ))} -
- ) -} -``` - -```jsx filename="app/blog/page.js" switcher -export default async function Page() { - const data = await fetch('https://api.vercel.app/blog') - const posts = await data.json() - return ( -
    - {posts.map((post) => ( -
  • {post.title}
  • - ))} -
- ) -} -``` - -#### With an ORM or database - -Since Server Components are rendered on the server, you can safely make database queries using an ORM or database client. Turn your component into an asynchronous function, and await the call: - -```tsx filename="app/blog/page.tsx" switcher -import { db, posts } from '@/lib/db' - -export default async function Page() { - const allPosts = await db.select().from(posts) - return ( -
    - {allPosts.map((post) => ( -
  • {post.title}
  • - ))} -
- ) -} -``` - -```jsx filename="app/blog/page.js" switcher -import { db, posts } from '@/lib/db' - -export default async function Page() { - const allPosts = await db.select().from(posts) - return ( -
    - {allPosts.map((post) => ( -
  • {post.title}
  • - ))} -
- ) -} -``` - -### Client Components - -There are two ways to fetch data in Client Components, using: - -1. React's [`use` hook](https://react.dev/reference/react/use) -2. A community library like [SWR](https://swr.vercel.app/) or [React Query](https://tanstack.com/query/latest) - -#### With the `use` hook - -You can use React's [`use` hook](https://react.dev/reference/react/use) to [stream](#streaming) data from the server to client. Start by fetching data in your Server component, and pass the promise to your Client Component as prop: - -```tsx filename="app/blog/page.tsx" switcher -import Posts from '@/app/ui/posts -import { Suspense } from 'react' - -export default function Page() { - // Don't await the data fetching function - const posts = getPosts() - - return ( - Loading...
}> - - - ) -} -``` - -```jsx filename="app/blog/page.js" switcher -import Posts from '@/app/ui/posts -import { Suspense } from 'react' - -export default function Page() { - // Don't await the data fetching function - const posts = getPosts() - - return ( - Loading...
}> - -
- ) -} -``` - -Then, in your Client Component, use the `use` hook to read the promise: - -```tsx filename="app/ui/posts.tsx" switcher -'use client' -import { use } from 'react' - -export default function Posts({ - posts, -}: { - posts: Promise<{ id: string; title: string }[]> -}) { - const allPosts = use(posts) - - return ( -
    - {allPosts.map((post) => ( -
  • {post.title}
  • - ))} -
- ) -} -``` - -```jsx filename="app/ui/posts.js" switcher -'use client' -import { use } from 'react' - -export default function Posts({ posts }) { - const posts = use(posts) - - return ( -
    - {posts.map((post) => ( -
  • {post.title}
  • - ))} -
- ) -} -``` - -In the example above, you need to wrap the `` component in a [`` boundary](https://react.dev/reference/react/Suspense). This means the fallback will be shown while the promise is being resolved. Learn more about [streaming](#streaming). - -#### Community libraries - -You can use a community library like [SWR](https://swr.vercel.app/) or [React Query](https://tanstack.com/query/latest) to fetch data in Client Components. These libraries have their own semantics for caching, streaming, and other features. For example, with SWR: - -```tsx filename="app/blog/page.tsx" switcher -'use client' -import useSWR from 'swr' - -const fetcher = (url) => fetch(url).then((r) => r.json()) - -export default function BlogPage() { - const { data, error, isLoading } = useSWR( - 'https://api.vercel.app/blog', - fetcher - ) - - if (isLoading) return
Loading...
- if (error) return
Error: {error.message}
- - return ( -
    - {data.map((post: { id: string; title: string }) => ( -
  • {post.title}
  • - ))} -
- ) -} -``` - -```jsx filename="app/blog/page.js" switcher -'use client' -import useSWR from 'swr' - -const fetcher = (url) => fetch(url).then((r) => r.json()) - -export default function BlogPage() { - const { data, error, isLoading } = useSWR( - 'https://api.vercel.app/blog', - fetcher - ) - - if (isLoading) return
Loading...
- if (error) return
Error: {error.message}
- - return ( -
    - {data.map((post) => ( -
  • {post.title}
  • - ))} -
- ) -} -``` - -## Streaming - -> **Warning:** The content below assumes the [`dynamicIO` config option](/docs/app/api-reference/config/next-config-js/dynamicIO) is enabled in your application. The flag was introduced in Next.js 15 canary. - -When using `async/await` in Server Components, Next.js will opt into **dynamic rendering**. This means the data will be fetched and rendered on the server for every user request. If there are any slow data requests, the whole route will be blocked from rendering. - -To improve the initial load time and user experience, you can use streaming to break up the page's HTML into smaller chunks and progressively send those chunks from the server to the client. - -How Server Rendering with Streaming Works - -There are two ways you can implement streaming in your application: - -1. With the [`loading.js` file](#with-loadingjs) -2. With React's [`` component](#with-suspense) - -### With `loading.js` - -You can create a `loading.js` file in the same folder as your page to stream the **entire page** while the data is being fetched. For example, to stream `app/blog/page.js`, add the file inside the `app/blog` folder. - -Blog folder structure with loading.js file - -```tsx filename="app/blog/loading.tsx" switcher -export default function Loading() { - // Define the Loading UI here - return
Loading...
-} -``` - -```jsx filename="app/blog/loading.js" switcher -export default function Loading() { - // Define the Loading UI here - return
Loading...
-} -``` - -On navigation, the user will immediately see the layout and a [loading state](#creating-meaningful-loading-states) while the page is being rendered. The new content will then be automatically swapped in once rendering is complete. - -Loading UI - -Behind-the-scenes, `loading.js` will be nested inside `layout.js`, and will automatically wrap the `page.js` file and any children below in a `` boundary. - -loading.js overview - -This approach works well for route segments (layouts and pages), but for more granular streaming, you can use ``. - -### With `` - -`` allows you to be more granular about what parts of the page to stream. For example, you can immediately show any page content that falls outside of the `` boundary, and stream in the list of blog posts inside the boundary. - -```tsx filename="app/blog/page.tsx" switcher -import { Suspense } from 'react' -import BlogList from '@/components/BlogList' -import BlogListSkeleton from '@/components/BlogListSkeleton' - -export default function BlogPage() { - return ( -
- {/* This content will be sent to the client immediately */} -
-

Welcome to the Blog

-

Read the latest posts below.

-
-
- {/* Any content wrapped in a boundary will be streamed */} - }> - - -
-
- ) -} -``` - -```jsx filename="app/blog/page.js" switcher -import { Suspense } from 'react' -import BlogList from '@/components/BlogList' -import BlogListSkeleton from '@/components/BlogListSkeleton' - -export default function BlogPage() { - return ( -
- {/* This content will be sent to the client immediately */} -
-

Welcome to the Blog

-

Read the latest posts below.

-
-
- {/* Any content wrapped in a boundary will be streamed */} - }> - - -
-
- ) -} -``` - -### Creating meaningful loading states - -An instant loading state is fallback UI that is shown immediately to the user after navigation. For the best user experience, we recommend designing loading states that are meaningful and help users understand the app is responding. For example, you can use skeletons and spinners, or a small but meaningful part of future screens such as a cover photo, title, etc. - -In development, you can preview and inspect the loading state of your components using the [React Devtools](https://react.dev/learn/react-developer-tools). diff --git a/docs/01-app/01-getting-started/06-partial-prerendering.mdx b/docs/01-app/01-getting-started/06-partial-prerendering.mdx new file mode 100644 index 0000000000000..e6ca866d1f5ac --- /dev/null +++ b/docs/01-app/01-getting-started/06-partial-prerendering.mdx @@ -0,0 +1,280 @@ +--- +title: Partial Prerendering +description: Learn how to use Partial Prerendering and combine the benefits of static and dynamic rendering. +version: experimental +related: + title: Next Steps + description: Learn more about the config option for Partial Prerendering. + links: + - app/api-reference/config/next-config-js/ppr +--- + +Partial Prerendering (PPR) is a rendering strategy that allows you to combine static and dynamic content in the same route. This improves the initial page performance while still supporting personalized, dynamic data. + +Partially Prerendered Product Page showing static nav and product information, and dynamic cart and recommended products + +When a user visits a route: + +- The server sends a **shell** containing the static content, ensuring a fast initial load. +- The shell leaves **holes** for the dynamic content that will load in asynchronously. +- The dynamic holes are **streamed in parallel**, reducing the overall load time of the page. + +> **🎥 Watch:** Why PPR and how it works → [YouTube (10 minutes)](https://www.youtube.com/watch?v=MTcPrTIBkpA). + +## How does Partial Prerendering work? + +To understand Partial Prerendering, it helps to be familiar with the rendering strategies available in Next.js. + +### Static Rendering + +With Static Rendering, HTML is generated ahead of time—either at build time or through [revalidation](/docs/app/guides/incremental-static-regeneration). The result is cached and shared across users and requests. + +In Partial Prerendering, Next.js prerenders a **static shell** for a route. This can include the layout and any other components that don't depend on request-time data. + +### Dynamic Rendering + +With Dynamic Rendering, HTML is generated at **request time**. This allows you to serve personalized content based on request-time data. + +A component becomes dynamic if it uses the following APIs: + +- [`cookies`](/docs/app/api-reference/functions/cookies) +- [`headers`](/docs/app/api-reference/functions/headers) +- [`connection`](/docs/app/api-reference/functions/connection) +- [`draftMode`](/docs/app/api-reference/functions/draft-mode) +- [`searchParams` prop](/docs/app/api-reference/file-conventions/page#searchparams-optional) +- [`unstable_noStore`](/docs/app/api-reference/functions/unstable_noStore) +- [`fetch`](/docs/app/api-reference/functions/fetch) with `{ cache: 'no-store' }` + +In Partial Prerendering, using these APIs throws a special React error that informs Next.js the component cannot be statically rendered, causing a build error. You can use a [Suspense](#suspense) boundary to wrap your component to defer rendering until runtime. + +### Suspense + +React [Suspense](https://react.dev/reference/react/Suspense) is used to defer rendering parts of your application until some condition is met. + +In Partial Prerendering, Suspense is used to mark **dynamic boundaries** in your component tree. + +At build time, Next.js prerenders the static content and the `fallback` UI. The dynamic content is **postponed** until the user requests the route. + +Wrapping a component in Suspense doesn't make the component itself dynamic (your API usage does), but rather Suspense is used as a boundary that encapsulates dynamic content and enable [streaming](#streaming) + +```jsx filename="app/page.js" +import { Suspense } from 'react' +import StaticComponent from './StaticComponent' +import DynamicComponent from './DynamicComponent' +import Fallback from './Fallback' + +export const experimental_ppr = true + +export default function Page() { + return ( + <> + + }> + + + + ) +} +``` + +### Streaming + +Streaming splits the route into chunks and progressively streams them to the client as they become ready. This allows the user to see parts of the page immediately, before the entire content has finished rendering. + +Diagram showing partially rendered page on the client, with loading UI for chunks that are being streamed. + +In Partial Prerendering, dynamic components wrapped in Suspense start streaming from the server in parallel. + +Diagram showing parallelization of route segments during streaming, showing data fetching, rendering, and hydration of individual chunks. + +To reduce network overhead, the full response—including static HTML and streamed dynamic parts—is sent in a **single HTTP request**. This avoids extra roundtrips and improves both initial load and overall performance. + +## Enabling Partial Prerendering + +You can enable PPR by adding the [`ppr`](https://rc.nextjs.org/docs/app/api-reference/next-config-js/ppr) option to your `next.config.ts` file: + +```ts filename="next.config.ts" highlight={5} switcher +import type { NextConfig } from 'next' + +const nextConfig: NextConfig = { + experimental: { + ppr: 'incremental', + }, +} + +export default nextConfig +``` + +```js filename="next.config.js" highlight={4} switcher +/** @type {import('next').NextConfig} */ +const nextConfig = { + experimental: { + ppr: 'incremental', + }, +} +``` + +The `'incremental'` value allows you to adopt PPR for specific routes: + +```tsx filename="/app/dashboard/layout.tsx" +export const experimental_ppr = true + +export default function Layout({ children }: { children: React.ReactNode }) { + // ... +} +``` + +```jsx filename="/app/dashboard/layout.js" +export const experimental_ppr = true + +export default function Layout({ children }) { + // ... +} +``` + +Routes that don't have `experimental_ppr` will default to `false` and will not be prerendered using PPR. You need to explicitly opt-in to PPR for each route. + +> **Good to know**: +> +> - `experimental_ppr` will apply to all children of the route segment, including nested layouts and pages. You don't have to add it to every file, only the top segment of a route. +> - To disable PPR for children segments, you can set `experimental_ppr` to `false` in the child segment. + +## Examples + +### Dynamic APIs + +When using Dynamic APIs that require looking at the incoming request, Next.js will opt into dynamic rendering for the route. To continue using PPR, wrap the component with Suspense. For example, the `` component is dynamic because it uses the `cookies` API: + +```jsx filename="app/user.js" switcher +import { cookies } from 'next/headers' + +export async function User() { + const session = (await cookies()).get('session')?.value + return '...' +} +``` + +```tsx filename="app/user.tsx" switcher +import { cookies } from 'next/headers' + +export async function User() { + const session = (await cookies()).get('session')?.value + return '...' +} +``` + +The `` component will be streamed while any other content inside `` will be prerendered and become part of the static shell. + +```tsx filename="app/page.tsx" switcher +import { Suspense } from 'react' +import { User, AvatarSkeleton } from './user' + +export const experimental_ppr = true + +export default function Page() { + return ( +
+

This will be prerendered

+ }> + + +
+ ) +} +``` + +```jsx filename="app/page.js" switcher +import { Suspense } from 'react' +import { User, AvatarSkeleton } from './user' + +export const experimental_ppr = true + +export default function Page() { + return ( +
+

This will be prerendered

+ }> + + +
+ ) +} +``` + +### Passing dynamic props + +Components only opt into dynamic rendering when the value is accessed. For example, if you are reading `searchParams` from a `` component, you can forward this value to another component as a prop: + +```tsx filename="app/page.tsx" switcher +import { Table, TableSkeleton } from './table' +import { Suspense } from 'react' + +export default function Page({ + searchParams, +}: { + searchParams: Promise<{ sort: string }> +}) { + return ( +
+

This will be prerendered

+ }> + + + + ) +} +``` + +```jsx filename="app/page.js" switcher +import { Table, TableSkeleton } from './table' +import { Suspense } from 'react' + +export default function Page({ searchParams }) { + return ( +
+

This will be prerendered

+ }> +
+ + + ) +} +``` + +Inside of the table component, accessing the value from `searchParams` will make the component dynamic while the rest of the page will be prerendered. + +```tsx filename="app/table.tsx" switcher +export async function Table({ + searchParams, +}: { + searchParams: Promise<{ sort: string }> +}) { + const sort = (await searchParams).sort === 'true' + return '...' +} +``` + +```jsx filename="app/table.js" switcher +export async function Table({ searchParams }) { + const sort = (await searchParams).sort === 'true' + return '...' +} +``` diff --git a/docs/01-app/01-getting-started/07-fetching-data.mdx b/docs/01-app/01-getting-started/07-fetching-data.mdx new file mode 100644 index 0000000000000..a0cba2fbaadb8 --- /dev/null +++ b/docs/01-app/01-getting-started/07-fetching-data.mdx @@ -0,0 +1,660 @@ +--- +title: Fetching Data +description: Learn how to fetch data and stream content that depends on data. +related: + title: API Reference + description: Learn more about the features mentioned in this page by reading the API Reference. + links: + - app/guides/data-security + - app/api-reference/functions/fetch + - app/api-reference/file-conventions/loading + - app/api-reference/config/next-config-js/logging + - app/api-reference/config/next-config-js/taint +--- + +This page will walk you through how you can fetch data in [Server and Client Components](/docs/app/getting-started/server-and-client-components), and how to [stream](#streaming) components that depend on data. + +## Fetching data + +### Server Components + +You can fetch data in Server Components using: + +1. The [`fetch` API](#with-the-fetch-api) +2. An [ORM or database](#with-an-orm-or-database) + +#### With the `fetch` API + +To fetch data with the `fetch` API, turn your component into an asynchronous function, and await the `fetch` call. For example: + +```tsx filename="app/blog/page.tsx" switcher +export default async function Page() { + const data = await fetch('https://api.vercel.app/blog') + const posts = await data.json() + return ( +
    + {posts.map((post) => ( +
  • {post.title}
  • + ))} +
+ ) +} +``` + +```jsx filename="app/blog/page.js" switcher +export default async function Page() { + const data = await fetch('https://api.vercel.app/blog') + const posts = await data.json() + return ( +
    + {posts.map((post) => ( +
  • {post.title}
  • + ))} +
+ ) +} +``` + +> **Good to know:** +> +> - `fetch` responses are not cached by default. However, Next.js will [prerender](/docs/app/getting-started/partial-prerendering#static-rendering) the route and the output will be cached for improved performance. If you'd like to opt into [dynamic rendering](/docs/app/getting-started/partial-prerendering#dynamic-rendering), use the `{ cache: 'no-store' }` option. See the [`fetch` API Reference](/docs/app/api-reference/functions/fetch). +> - During development, you can log `fetch` calls for better visibility and debugging. See the [`logging` API reference](/docs/app/api-reference/config/next-config-js/logging). + +#### With an ORM or database + +Since Server Components are rendered on the server, you can safely make database queries using an ORM or database client. Turn your component into an asynchronous function, and await the call: + +```tsx filename="app/blog/page.tsx" switcher +import { db, posts } from '@/lib/db' + +export default async function Page() { + const allPosts = await db.select().from(posts) + return ( +
    + {allPosts.map((post) => ( +
  • {post.title}
  • + ))} +
+ ) +} +``` + +```jsx filename="app/blog/page.js" switcher +import { db, posts } from '@/lib/db' + +export default async function Page() { + const allPosts = await db.select().from(posts) + return ( +
    + {allPosts.map((post) => ( +
  • {post.title}
  • + ))} +
+ ) +} +``` + +### Client Components + +There are two ways to fetch data in Client Components, using: + +1. React's [`use` hook](https://react.dev/reference/react/use) +2. A community library like [SWR](https://swr.vercel.app/) or [React Query](https://tanstack.com/query/latest) + +#### Streaming data with the `use` hook + +You can use React's [`use` hook](https://react.dev/reference/react/use) to [stream](#streaming) data from the server to client. Start by fetching data in your Server component, and pass the promise to your Client Component as prop: + +```tsx filename="app/blog/page.tsx" switcher +import Posts from '@/app/ui/posts +import { Suspense } from 'react' + +export default function Page() { + // Don't await the data fetching function + const posts = getPosts() + + return ( + Loading...}> + + + ) +} +``` + +```jsx filename="app/blog/page.js" switcher +import Posts from '@/app/ui/posts +import { Suspense } from 'react' + +export default function Page() { + // Don't await the data fetching function + const posts = getPosts() + + return ( + Loading...}> + + + ) +} +``` + +Then, in your Client Component, use the `use` hook to read the promise: + +```tsx filename="app/ui/posts.tsx" switcher +'use client' +import { use } from 'react' + +export default function Posts({ + posts, +}: { + posts: Promise<{ id: string; title: string }[]> +}) { + const allPosts = use(posts) + + return ( +
    + {allPosts.map((post) => ( +
  • {post.title}
  • + ))} +
+ ) +} +``` + +```jsx filename="app/ui/posts.js" switcher +'use client' +import { use } from 'react' + +export default function Posts({ posts }) { + const allPosts = use(posts) + + return ( +
    + {allPosts.map((post) => ( +
  • {post.title}
  • + ))} +
+ ) +} +``` + +In the example above, the `` component is wrapped in a [`` boundary](https://react.dev/reference/react/Suspense). This means the fallback will be shown while the promise is being resolved. Learn more about [streaming](#streaming). + +#### Community libraries + +You can use a community library like [SWR](https://swr.vercel.app/) or [React Query](https://tanstack.com/query/latest) to fetch data in Client Components. These libraries have their own semantics for caching, streaming, and other features. For example, with SWR: + +```tsx filename="app/blog/page.tsx" switcher +'use client' +import useSWR from 'swr' + +const fetcher = (url) => fetch(url).then((r) => r.json()) + +export default function BlogPage() { + const { data, error, isLoading } = useSWR( + 'https://api.vercel.app/blog', + fetcher + ) + + if (isLoading) return
Loading...
+ if (error) return
Error: {error.message}
+ + return ( +
    + {data.map((post: { id: string; title: string }) => ( +
  • {post.title}
  • + ))} +
+ ) +} +``` + +```jsx filename="app/blog/page.js" switcher +'use client' + +import useSWR from 'swr' + +const fetcher = (url) => fetch(url).then((r) => r.json()) + +export default function BlogPage() { + const { data, error, isLoading } = useSWR( + 'https://api.vercel.app/blog', + fetcher + ) + + if (isLoading) return
Loading...
+ if (error) return
Error: {error.message}
+ + return ( +
    + {data.map((post) => ( +
  • {post.title}
  • + ))} +
+ ) +} +``` + +## Deduplicate requests and cache data + +One way to deduplicate `fetch` requests is with [request memoization](/docs/app/guides/caching#request-memoization). With this mechanism, `fetch` calls using `GET` or `HEAD` with the same URL and options in a single render pass are combined into one request. This happens automatically, and you can [opt out](/docs/app/guides/caching#opting-out) by passing an Abort signal to `fetch`. + +Request memoization is scoped to the lifetime of a request. + +You can also deduplicate `fetch` requests by using Next.js’ [Data Cache](/docs/app/guides/caching#data-cache), for example by setting `cache: 'force-cache'` in your `fetch` options. + +Data Cache allows sharing data across the current render pass and incoming requests. + +If you are _not_ using `fetch`, and instead using an ORM or database directly, you can wrap your data access with the [React `cache`](https://react.dev/reference/react/cache) function. + +```tsx filename="app/lib/data.ts" switcher +import { cache } from 'react' +import { db, posts, eq } from '@/lib/db' + +export const getPost = cache(async (id: string) => { + const post = await db.query.posts.findFirst({ + where: eq(posts.id, parseInt(id)), + }) +}) +``` + +```jsx filename="app/lib/data.js" switcher +import { cache } from 'react' +import { db, posts, eq } from '@/lib/db' +import { notFound } from 'next/navigation' + +export const getPost = cache(async (id) => { + const post = await db.query.posts.findFirst({ + where: eq(posts.id, parseInt(id)), + }) +}) +``` + +## Streaming + +> **Warning:** The content below assumes the [`dynamicIO` config option](/docs/app/api-reference/config/next-config-js/dynamicIO) is enabled in your application. The flag was introduced in Next.js 15 canary. + +When using `async/await` in Server Components, Next.js will opt into [dynamic rendering](/docs/app/getting-started/partial-prerendering#dynamic-rendering). This means the data will be fetched and rendered on the server for every user request. If there are any slow data requests, the whole route will be blocked from rendering. + +To improve the initial load time and user experience, you can use streaming to break up the page's HTML into smaller chunks and progressively send those chunks from the server to the client. + + + +There are two ways you can implement streaming in your application: + +1. Wrapping a page with a [`loading.js` file](#with-loadingjs) +2. Wrapping a component with [``](#with-suspense) + +### With `loading.js` + +You can create a `loading.js` file in the same folder as your page to stream the **entire page** while the data is being fetched. For example, to stream `app/blog/page.js`, add the file inside the `app/blog` folder. + + + +```tsx filename="app/blog/loading.tsx" switcher +export default function Loading() { + // Define the Loading UI here + return
Loading...
+} +``` + +```jsx filename="app/blog/loading.js" switcher +export default function Loading() { + // Define the Loading UI here + return
Loading...
+} +``` + +On navigation, the user will immediately see the layout and a [loading state](#creating-meaningful-loading-states) while the page is being rendered. The new content will then be automatically swapped in once rendering is complete. + + + +Behind-the-scenes, `loading.js` will be nested inside `layout.js`, and will automatically wrap the `page.js` file and any children below in a `` boundary. + + + +This approach works well for route segments (layouts and pages), but for more granular streaming, you can use ``. + +### With `` + +`` allows you to be more granular about what parts of the page to stream. For example, you can immediately show any page content that falls outside of the `` boundary, and stream in the list of blog posts inside the boundary. + +```tsx filename="app/blog/page.tsx" switcher +import { Suspense } from 'react' +import BlogList from '@/components/BlogList' +import BlogListSkeleton from '@/components/BlogListSkeleton' + +export default function BlogPage() { + return ( +
+ {/* This content will be sent to the client immediately */} +
+

Welcome to the Blog

+

Read the latest posts below.

+
+
+ {/* Any content wrapped in a boundary will be streamed */} + }> + + +
+
+ ) +} +``` + +```jsx filename="app/blog/page.js" switcher +import { Suspense } from 'react' +import BlogList from '@/components/BlogList' +import BlogListSkeleton from '@/components/BlogListSkeleton' + +export default function BlogPage() { + return ( +
+ {/* This content will be sent to the client immediately */} +
+

Welcome to the Blog

+

Read the latest posts below.

+
+
+ {/* Any content wrapped in a boundary will be streamed */} + }> + + +
+
+ ) +} +``` + +### Creating meaningful loading states + +An instant loading state is fallback UI that is shown immediately to the user after navigation. For the best user experience, we recommend designing loading states that are meaningful and help users understand the app is responding. For example, you can use skeletons and spinners, or a small but meaningful part of future screens such as a cover photo, title, etc. + +In development, you can preview and inspect the loading state of your components using the [React Devtools](https://react.dev/learn/react-developer-tools). + +## Examples + +### Sequential data fetching + +Sequential data fetching happens when nested components in a tree each fetch their own data and the requests are not [deduplicated](/docs/app/guides/caching#request-memoization), leading to longer response times. + + + +There may be cases where you want this pattern because one fetch depends on the result of the other. + +For example, the `` component will only start fetching data once the `` component has finished fetching data because `` depends on the `artistID` prop: + +```tsx filename="app/artist/[username]/page.tsx" switcher +export default async function Page({ + params, +}: { + params: Promise<{ username: string }> +}) { + const { username } = await params + // Get artist information + const artist = await getArtist(username) + + return ( + <> +

{artist.name}

+ {/* Show fallback UI while the Playlists component is loading */} + Loading...}> + {/* Pass the artist ID to the Playlists component */} + + + + ) +} + +async function Playlists({ artistID }: { artistID: string }) { + // Use the artist ID to fetch playlists + const playlists = await getArtistPlaylists(artistID) + + return ( +
    + {playlists.map((playlist) => ( +
  • {playlist.name}
  • + ))} +
+ ) +} +``` + +```jsx filename="app/artist/[username]/page.js" switcher +export default async function Page({ params }) { + const { username } = await params + // Get artist information + const artist = await getArtist(username) + + return ( + <> +

{artist.name}

+ {/* Show fallback UI while the Playlists component is loading */} + Loading...}> + {/* Pass the artist ID to the Playlists component */} + + + + ) +} + +async function Playlists({ artistID }) { + // Use the artist ID to fetch playlists + const playlists = await getArtistPlaylists(artistID) + + return ( +
    + {playlists.map((playlist) => ( +
  • {playlist.name}
  • + ))} +
+ ) +} +``` + +To improve the user experience, you should use [React ``](/docs/app/getting-started/linking-and-navigating#streaming) to show a `fallback` while data is being fetch. This will enable [streaming](#streaming) and prevent the whole route from being blocked by the sequential data requests. + +### Parallel data fetching + +Parallel data fetching happens when data requests in a route are eagerly initiated and start at the same time. + +By default, [layouts and pages](/docs/app/getting-started/layouts-and-pages) are rendered in parallel. So each segment starts fetching data as soon as possible. + +However, within _any_ component, multiple `async`/`await` requests can still be sequential if placed after the other. For example, `getAlbums` will be blocked until `getArtist` is resolved: + +```tsx filename="app/artist/[username]/page.tsx" switcher +import { getArtist, getAlbums } from '@/app/lib/data' + +export default async function Page({ params }) { + // These requests will be sequential + const { username } = await params + const artist = await getArtist(username) + const albums = await getAlbums(username) + return
{artist.name}
+} +``` + +You can initiate requests in parallel by defining them outside the components that use the data, and resolving them together, for example, with [`Promise.all`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all): + +```tsx filename="app/artist/[username]/page.tsx" highlight={3,8,23} switcher +import Albums from './albums' + +async function getArtist(username: string) { + const res = await fetch(`https://api.example.com/artist/${username}`) + return res.json() +} + +async function getAlbums(username: string) { + const res = await fetch(`https://api.example.com/artist/${username}/albums`) + return res.json() +} + +export default async function Page({ + params, +}: { + params: Promise<{ username: string }> +}) { + const { username } = await params + const artistData = getArtist(username) + const albumsData = getAlbums(username) + + // Initiate both requests in parallel + const [artist, albums] = await Promise.all([artistData, albumsData]) + + return ( + <> +

{artist.name}

+ + + ) +} +``` + +```jsx filename="app/artist/[username]/page.js" highlight={3,8,19} switcher +import Albums from './albums' + +async function getArtist(username) { + const res = await fetch(`https://api.example.com/artist/${username}`) + return res.json() +} + +async function getAlbums(username) { + const res = await fetch(`https://api.example.com/artist/${username}/albums`) + return res.json() +} + +export default async function Page({ params }) { + const { username } = await params + const artistData = getArtist(username) + const albumsData = getAlbums(username) + + // Initiate both requests in parallel + const [artist, albums] = await Promise.all([artistData, albumsData]) + + return ( + <> +

{artist.name}

+ + + ) +} +``` + +> **Good to know:** If one request fails when using `Promise.all`, the entire operation will fail. To handle this, you can use the [`Promise.allSettled`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled) method instead. + +### Preloading data + +You can preload data by creating an utility function that you eagerly call above blocking requests. `` conditionally renders based on the `checkIsAvailable()` function. + +You can call `preload()` before `checkIsAvailable()` to eagerly initiate `` data dependencies. By the time `` is rendered, its data has already been fetched. + +```tsx filename="app/item/[id]/page.tsx" switcher +import { getItem, checkIsAvailable } from '@/lib/data' + +export default async function Page({ + params, +}: { + params: Promise<{ id: string }> +}) { + const { id } = await params + // starting loading item data + preload(id) + // perform another asynchronous task + const isAvailable = await checkIsAvailable() + + return isAvailable ? : null +} + +export const preload = (id: string) => { + // void evaluates the given expression and returns undefined + // https://developer.mozilla.org/docs/Web/JavaScript/Reference/Operators/void + void getItem(id) +} +export async function Item({ id }: { id: string }) { + const result = await getItem(id) + // ... +} +``` + +```jsx filename="app/item/[id]/page.js" switcher +import { getItem, checkIsAvailable } from '@/lib/data' + +export default async function Page({ params }) { + const { id } = await params + // starting loading item data + preload(id) + // perform another asynchronous task + const isAvailable = await checkIsAvailable() + + return isAvailable ? : null +} + +export const preload = (id) => { + // void evaluates the given expression and returns undefined + // https://developer.mozilla.org/docs/Web/JavaScript/Reference/Operators/void + void getItem(id) +} +export async function Item({ id }) { + const result = await getItem(id) + // ... +``` + +Additionally, you can use React's [`cache` function](https://react.dev/reference/react/cache) and the [`server-only` package](https://www.npmjs.com/package/server-only) to create a reusable utility function. This approach allows you to cache the data fetching function and ensure that it's only executed on the server. + +```ts filename="utils/get-item.ts" switcher +import { cache } from 'react' +import 'server-only' +import { getItem } from '@/lib/data' + +export const preload = (id: string) => { + void getItem(id) +} + +export const getItem = cache(async (id: string) => { + // ... +}) +``` + +```js filename="utils/get-item.js" switcher +import { cache } from 'react' +import 'server-only' +import { getItem } from '@/lib/data' + +export const preload = (id) => { + void getItem(id) +} + +export const getItem = cache(async (id) => { + // ... +}) +``` diff --git a/docs/01-app/01-getting-started/07-updating-data.mdx b/docs/01-app/01-getting-started/07-updating-data.mdx deleted file mode 100644 index 19c92b2749699..0000000000000 --- a/docs/01-app/01-getting-started/07-updating-data.mdx +++ /dev/null @@ -1,340 +0,0 @@ ---- -title: How to update data -nav_title: Updating Data -description: Learn how to update data in your Next.js application. -related: - title: API Reference - description: Learn more about the features mentioned in this page by reading the API Reference. - links: - - app/api-reference/functions/revalidatePath - - app/api-reference/functions/revalidateTag - - app/api-reference/functions/redirect ---- - -You can update data in Next.js using React's [Server Functions](https://react.dev/reference/rsc/server-functions). This page will go through how you can [create](#creating-server-functions) and [invoke](#invoking-server-functions) Server Functions. - -## Creating Server Functions - -A Server Function can be defined by using the [`use server`](https://react.dev/reference/rsc/use-server) directive. You can place the directive at the top of an **asynchronous** function to mark the function as a Server Function, or at the top of a separate file to mark all exports of that file. - -```ts filename="app/lib/actions.ts" switcher -export async function createPost(formData: FormData) { - 'use server' - const title = formData.get('title') - const content = formData.get('content') - - // Update data - // Revalidate cache -} - -export async function deletePost(formData: FormData) { - 'use server' - const id = formData.get('id') - - // Update data - // Revalidate cache -} -``` - -```js filename="app/lib/actions.js" switcher -export async function createPost(formData) { - 'use server' - const title = formData.get('title') - const content = formData.get('content') - - // Update data - // Revalidate cache -} - -export async function deletePost(formData) { - 'use server' - const id = formData.get('id') - - // Update data - // Revalidate cache -} -``` - -### Server Components - -Server Functions can be inlined in Server Components by adding the `"use server"` directive to the top of the function body: - -```tsx filename="app/page.tsx" switcher -export default function Page() { - // Server Action - async function createPost(formData: FormData) { - 'use server' - // ... - } - - return <> -} -``` - -```jsx filename="app/page.js" switcher -export default function Page() { - // Server Action - async function createPost(formData: FormData) { - 'use server' - // ... - } - - return <> -} -``` - -### Client Components - -It's not possible to define Server Functions in Client Components. However, you can invoke them in Client Components by importing them from a file that has the `"use server"` directive at the top of it: - -```ts filename="app/actions.ts" switcher -'use server' - -export async function createPost() {} -``` - -```js filename="app/actions.js" switcher -'use server' - -export async function createPost() {} -``` - -```tsx filename="app/ui/button.tsx" switcher -'use client' - -import { createPost } from '@/app/actions' - -export function Button() { - return -} -``` - -```jsx filename="app/ui/button.js" switcher -'use client' - -import { createPost } from '@/app/actions' - -export function Button() { - return -} -``` - -## Invoking Server Functions - -There are two main ways you can invoke a Server Function: - -1. [Forms](#forms) in Server and Client Components -2. [Event Handlers](#event-handlers) in Client Components - -### Forms - -React extends the HTML [`
`](https://react.dev/reference/react-dom/components/form) element to allow Server Function to be invoked with the HTML `action` prop. - -When invoked in a form, the function automatically receives the [`FormData`](https://developer.mozilla.org/docs/Web/API/FormData/FormData) object. You can extract the data using the native [`FormData` methods](https://developer.mozilla.org/en-US/docs/Web/API/FormData#instance_methods): - -```tsx filename="app/ui/form.tsx" switcher -import { createPost } from '@/app/actions' - -export function Form() { - return ( - - - - - - ) -} -``` - -```jsx filename="app/ui/form.js" switcher -import { createPost } from '@/app/actions' - -export function Form() { - return ( -
- - - - - ) -} -``` - -```ts filename="app/actions.ts" switcher -'use server' - -export async function createPost(formData: FormData) { - const title = formData.get('title') - const content = formData.get('content') - - // Update data - // Revalidate cache -} -``` - -```js filename="app/actions.js" switcher -'use server' - -export async function createPost(formData) { - const title = formData.get('title') - const content = formData.get('content') - - // Update data - // Revalidate cache -} -``` - -> **Good to know:** When passed to the `action` prop, Server Functions are also known as _Server Actions_. - -### Event Handlers - -You can invoke a Server Function in a Client Component by using event handlers such as `onClick`. - -```tsx filename="app/like-button.tsx" switcher -'use client' - -import { incrementLike } from './actions' -import { useState } from 'react' - -export default function LikeButton({ initialLikes }: { initialLikes: number }) { - const [likes, setLikes] = useState(initialLikes) - - return ( - <> -

Total Likes: {likes}

- - - ) -} -``` - -```jsx filename="app/like-button.js" switcher -'use client' - -import { incrementLike } from './actions' -import { useState } from 'react' - -export default function LikeButton({ initialLikes }) { - const [likes, setLikes] = useState(initialLikes) - - return ( - <> -

Total Likes: {likes}

- - - ) -} -``` - -## Examples - -### Showing a pending state - -While executing a Server Function, you can show a loading indicator with React's [`useActionState`](https://react.dev/reference/react/useActionState) hook. This hook returns a `pending` boolean: - -```tsx filename="app/ui/button.tsx" switcher -'use client' - -import { useActionState } from 'react' -import { createPost } from '@/app/actions' -import { LoadingSpinner } from '@/app/ui/loading-spinner' - -export function Button() { - const [state, action, pending] = useActionState(createPost, false) - - return ( - - ) -} -``` - -```jsx filename="app/ui/button.js" switcher -'use client' - -import { useActionState } from 'react' -import { createPost } from '@/app/actions' -import { LoadingSpinner } from '@/app/ui/loading-spinner' - -export function Button() { - const [state, action, pending] = useActionState(createPost, false) - - return ( - - ) -} -``` - -### Revalidating the cache - -After performing an update, you can revalidate the Next.js cache and show the updated data by calling [`revalidatePath`](/docs/app/api-reference/functions/revalidatePath) or [`revalidateTag`](/docs/app/api-reference/functions/revalidateTag) within the Server Function: - -```ts filename="app/lib/actions.ts" switcher -import { revalidatePath } from 'next/cache' - -export async function createPost(formData: FormData) { - 'use server' - // Update data - // ... - - revalidatePath('/posts') -} -``` - -```js filename="app/actions.js" switcher -import { revalidatePath } from 'next/cache' - -export async function createPost(formData) { - 'use server' - // Update data - // ... - revalidatePath('/posts') -} -``` - -### Redirecting - -You may want to redirect the user to a different page after performing an update. You can do this by calling [`redirect`](/docs/app/api-reference/functions/redirect) within the Server Function: - -```ts filename="app/lib/actions.ts" switcher -'use server' - -import { redirect } from 'next/navigation' - -export async function createPost(formData: FormData) { - // Update data - // ... - - redirect('/posts') -} -``` - -```js filename="app/actions.js" switcher -'use server' - -import { redirect } from 'next/navigation' - -export async function createPost(formData) { - // Update data - // ... - - redirect('/posts') -} -``` diff --git a/docs/01-app/01-getting-started/08-error-handling.mdx b/docs/01-app/01-getting-started/08-error-handling.mdx deleted file mode 100644 index ec45ce5e8d664..0000000000000 --- a/docs/01-app/01-getting-started/08-error-handling.mdx +++ /dev/null @@ -1,315 +0,0 @@ ---- -title: How to handle errors -nav_title: Error Handling -description: Learn how to display expected errors and handle uncaught exceptions. -related: - title: API Reference - description: Learn more about the features mentioned in this page by reading the API Reference. - links: - - app/api-reference/functions/redirect - - app/api-reference/file-conventions/error - - app/api-reference/functions/not-found - - app/api-reference/file-conventions/not-found ---- - -Errors can be divided into two categories: [expected errors](#handling-expected-errors) and [uncaught exceptions](#handling-uncaught-exceptions). This page will walk you through how you can handle these errors in your Next.js application. - -## Handling expected errors - -Expected errors are those that can occur during the normal operation of the application, such as those from [server-side form validation](/docs/app/building-your-application/data-fetching/server-actions-and-mutations#server-side-form-validation) or failed requests. These errors should be handled explicitly and returned to the client. - -### Server Functions - -You can use the [`useActionState`](https://react.dev/reference/react/useActionState) hook to handle expected errors in [Server Functions](https://react.dev/reference/rsc/server-functions). - -For these errors, avoid using `try`/`catch` blocks and throw errors. Instead, model expected errors as return values. - -```ts filename="app/actions.ts" switcher -'use server' - -export async function createPost(prevState: any, formData: FormData) { - const title = formData.get('title') - const content = formData.get('content') - - const res = await fetch('https://api.vercel.app/posts', { - method: 'POST', - body: { title, content }, - }) - const json = await res.json() - - if (!res.ok) { - return { message: 'Failed to create post' } - } -} -``` - -```js filename="app/actions.js" switcher -'use server' - -export async function createPost(prevState, formData) { - const title = formData.get('title') - const content = formData.get('content') - - const res = await fetch('https://api.vercel.app/posts', { - method: 'POST', - body: { title, content }, - }) - const json = await res.json() - - if (!res.ok) { - return { message: 'Failed to create post' } - } -} -``` - -You can pass your action to the `useActionState` hook and use the returned `state` to display an error message. - -```tsx filename="app/ui/form.tsx" highlight={11,19} switcher -'use client' - -import { useActionState } from 'react' -import { createPost } from '@/app/actions' - -const initialState = { - message: '', -} - -export function Form() { - const [state, formAction, pending] = useActionState(createPost, initialState) - - return ( -
- - - -