diff --git a/.bundle/config b/.bundle/config
deleted file mode 100644
index 9bc01b4c32..0000000000
--- a/.bundle/config
+++ /dev/null
@@ -1,3 +0,0 @@
----
-BUNDLE_PATH: "vendor/bundle"
-BUNDLE_DISABLE_SHARED_GEMS: "true"
diff --git a/.drone.yml b/.drone.yml
deleted file mode 100644
index 7b74f75e3a..0000000000
--- a/.drone.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-pipeline:
- build:
- image: scalacenter/scala-rvm-jvm-coursier:2.0
- commands:
- - ./scripts/ci.sh
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000000..ce1bb0f48b
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+/_ja @scala/docs-ja
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000000..f48b4ada51
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,11 @@
+version: 2
+updates:
+- package-ecosystem: bundler
+ directory: "/"
+ schedule:
+ interval: daily
+ open-pull-requests-limit: 10
+ ignore:
+ - dependency-name: html-proofer
+ versions:
+ - "> 3.15.3"
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 0000000000..e56f07a0ab
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,33 @@
+name: Build
+on: [push, pull_request]
+jobs:
+ build:
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Ruby
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: 3.2.6
+ bundler-cache: true
+ - name: Set up coursier
+ uses: coursier/setup-action@v1.3.5
+ with:
+ jvm: adopt:11
+ - name: Run mdoc
+ run: |
+ ./scripts/run-mdoc.sh
+ rm -r /tmp/mdoc-out/
+ - name: Jekyll build
+ run: bundle exec jekyll build
+ - name: HTMLProofer
+ run: |
+ # # Checking for docs.scala-lang/blob/main leads to a chicken and egg problem because of the edit links of new pages.
+ bundle exec htmlproofer ./_site/\
+ --only-4xx\
+ --ignore-status-codes "400,401,403,429"\
+ --ignore-empty-alt\
+ --allow-hash-href\
+ --no-enforce-https\
+ --ignore-urls '/https://github.com/scala/,/www.oracle.com/'
+
diff --git a/.gitignore b/.gitignore
index c73823b558..055aee462d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,5 +8,5 @@ _site
vendor/bundle
.idea/
/coursier
-/tut-tmp/
.sass-cache/
+.jekyll-cache/
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000..b2bbc255f9
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,12 @@
+FROM ruby:3.2.6
+
+RUN gem install bundler:2.6.5
+
+WORKDIR /srv/jekyll
+
+COPY Gemfile .
+COPY Gemfile.lock .
+
+RUN echo -n "bundle version: " && bundle --version
+RUN chmod u+s /bin/chown
+RUN bundle install
diff --git a/Gemfile b/Gemfile
index a0b87e6bb8..31cb37fbea 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,14 +1,6 @@
source 'https://rubygems.org'
-gem 'jekyll-redirect-from'
-gem 'jekyll-scalafiddle'
+gem 'github-pages'
+gem 'webrick'
+#
gem 'html-proofer'
-# gem 'html-proofer' # link-checking: bundle exec htmlproofer ./_site/ --only-4xx --empty-alt-ignore --allow-hash-href
-
-# group :jekyll_plugins do
-# gem 'hawkins'
-# end
-
-# ^ Useful for live reloading the site in your
-# browser during development. To use, uncomment
-# and do:
-# bundle exec jekyll liveserve --incremental
+# gem 'html-proofer' # link-checking: bundle exec htmlproofer ./_site/ --only-4xx --ignore-empty-alt=true --allow-hash-href=true
diff --git a/Gemfile.lock b/Gemfile.lock
index b01d106dff..8088be3873 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,89 +1,333 @@
GEM
remote: https://rubygems.org/
specs:
- addressable (2.7.0)
- public_suffix (>= 2.0.2, < 5.0)
+ Ascii85 (2.0.1)
+ activesupport (8.0.1)
+ base64
+ benchmark (>= 0.3)
+ bigdecimal
+ concurrent-ruby (~> 1.0, >= 1.3.1)
+ connection_pool (>= 2.2.5)
+ drb
+ i18n (>= 1.6, < 2)
+ logger (>= 1.4.2)
+ minitest (>= 5.1)
+ securerandom (>= 0.3)
+ tzinfo (~> 2.0, >= 2.0.5)
+ uri (>= 0.13.1)
+ addressable (2.8.7)
+ public_suffix (>= 2.0.2, < 7.0)
+ afm (0.2.2)
+ async (2.23.0)
+ console (~> 1.29)
+ fiber-annotation
+ io-event (~> 1.9)
+ metrics (~> 0.12)
+ traces (~> 0.15)
+ base64 (0.2.0)
+ benchmark (0.4.0)
+ bigdecimal (3.1.9)
+ coffee-script (2.4.1)
+ coffee-script-source
+ execjs
+ coffee-script-source (1.12.2)
colorator (1.1.0)
- concurrent-ruby (1.1.5)
- em-websocket (0.5.1)
+ commonmarker (0.23.11)
+ concurrent-ruby (1.3.5)
+ connection_pool (2.5.0)
+ console (1.29.3)
+ fiber-annotation
+ fiber-local (~> 1.1)
+ json
+ csv (3.3.2)
+ dnsruby (1.72.3)
+ base64 (~> 0.2.0)
+ simpleidn (~> 0.2.1)
+ drb (2.2.1)
+ em-websocket (0.5.3)
eventmachine (>= 0.12.9)
- http_parser.rb (~> 0.6.0)
- ethon (0.12.0)
- ffi (>= 1.3.0)
+ http_parser.rb (~> 0)
+ ethon (0.16.0)
+ ffi (>= 1.15.0)
eventmachine (1.2.7)
- ffi (1.12.2)
+ execjs (2.10.0)
+ faraday (2.12.2)
+ faraday-net_http (>= 2.0, < 3.5)
+ json
+ logger
+ faraday-net_http (3.4.0)
+ net-http (>= 0.5.0)
+ ffi (1.17.1-arm64-darwin)
+ ffi (1.17.1-x64-mingw-ucrt)
+ ffi (1.17.1-x86_64-linux-gnu)
+ fiber-annotation (0.2.0)
+ fiber-local (1.1.0)
+ fiber-storage
+ fiber-storage (1.0.0)
forwardable-extended (2.6.0)
- html-proofer (3.15.3)
- addressable (~> 2.3)
+ gemoji (4.1.0)
+ github-pages (232)
+ github-pages-health-check (= 1.18.2)
+ jekyll (= 3.10.0)
+ jekyll-avatar (= 0.8.0)
+ jekyll-coffeescript (= 1.2.2)
+ jekyll-commonmark-ghpages (= 0.5.1)
+ jekyll-default-layout (= 0.1.5)
+ jekyll-feed (= 0.17.0)
+ jekyll-gist (= 1.5.0)
+ jekyll-github-metadata (= 2.16.1)
+ jekyll-include-cache (= 0.2.1)
+ jekyll-mentions (= 1.6.0)
+ jekyll-optional-front-matter (= 0.3.2)
+ jekyll-paginate (= 1.1.0)
+ jekyll-readme-index (= 0.3.0)
+ jekyll-redirect-from (= 0.16.0)
+ jekyll-relative-links (= 0.6.1)
+ jekyll-remote-theme (= 0.4.3)
+ jekyll-sass-converter (= 1.5.2)
+ jekyll-seo-tag (= 2.8.0)
+ jekyll-sitemap (= 1.4.0)
+ jekyll-swiss (= 1.0.0)
+ jekyll-theme-architect (= 0.2.0)
+ jekyll-theme-cayman (= 0.2.0)
+ jekyll-theme-dinky (= 0.2.0)
+ jekyll-theme-hacker (= 0.2.0)
+ jekyll-theme-leap-day (= 0.2.0)
+ jekyll-theme-merlot (= 0.2.0)
+ jekyll-theme-midnight (= 0.2.0)
+ jekyll-theme-minimal (= 0.2.0)
+ jekyll-theme-modernist (= 0.2.0)
+ jekyll-theme-primer (= 0.6.0)
+ jekyll-theme-slate (= 0.2.0)
+ jekyll-theme-tactile (= 0.2.0)
+ jekyll-theme-time-machine (= 0.2.0)
+ jekyll-titles-from-headings (= 0.5.3)
+ jemoji (= 0.13.0)
+ kramdown (= 2.4.0)
+ kramdown-parser-gfm (= 1.1.0)
+ liquid (= 4.0.4)
mercenary (~> 0.3)
- nokogumbo (~> 2.0)
- parallel (~> 1.3)
+ minima (= 2.5.1)
+ nokogiri (>= 1.16.2, < 2.0)
+ rouge (= 3.30.0)
+ terminal-table (~> 1.4)
+ webrick (~> 1.8)
+ github-pages-health-check (1.18.2)
+ addressable (~> 2.3)
+ dnsruby (~> 1.60)
+ octokit (>= 4, < 8)
+ public_suffix (>= 3.0, < 6.0)
+ typhoeus (~> 1.3)
+ hashery (2.1.2)
+ html-pipeline (2.14.3)
+ activesupport (>= 2)
+ nokogiri (>= 1.4)
+ html-proofer (5.0.10)
+ addressable (~> 2.3)
+ async (~> 2.1)
+ nokogiri (~> 1.13)
+ pdf-reader (~> 2.11)
rainbow (~> 3.0)
typhoeus (~> 1.3)
yell (~> 2.0)
- http_parser.rb (0.6.0)
- i18n (0.9.5)
+ zeitwerk (~> 2.5)
+ http_parser.rb (0.8.0)
+ i18n (1.14.7)
concurrent-ruby (~> 1.0)
- jekyll (3.8.6)
+ io-event (1.9.0)
+ jekyll (3.10.0)
addressable (~> 2.4)
colorator (~> 1.0)
+ csv (~> 3.0)
em-websocket (~> 0.5)
- i18n (~> 0.7)
+ i18n (>= 0.7, < 2)
jekyll-sass-converter (~> 1.0)
jekyll-watch (~> 2.0)
- kramdown (~> 1.14)
+ kramdown (>= 1.17, < 3)
liquid (~> 4.0)
mercenary (~> 0.3.3)
pathutil (~> 0.9)
rouge (>= 1.7, < 4)
safe_yaml (~> 1.0)
- jekyll-redirect-from (0.15.0)
+ webrick (>= 1.0)
+ jekyll-avatar (0.8.0)
+ jekyll (>= 3.0, < 5.0)
+ jekyll-coffeescript (1.2.2)
+ coffee-script (~> 2.2)
+ coffee-script-source (~> 1.12)
+ jekyll-commonmark (1.4.0)
+ commonmarker (~> 0.22)
+ jekyll-commonmark-ghpages (0.5.1)
+ commonmarker (>= 0.23.7, < 1.1.0)
+ jekyll (>= 3.9, < 4.0)
+ jekyll-commonmark (~> 1.4.0)
+ rouge (>= 2.0, < 5.0)
+ jekyll-default-layout (0.1.5)
+ jekyll (>= 3.0, < 5.0)
+ jekyll-feed (0.17.0)
+ jekyll (>= 3.7, < 5.0)
+ jekyll-gist (1.5.0)
+ octokit (~> 4.2)
+ jekyll-github-metadata (2.16.1)
+ jekyll (>= 3.4, < 5.0)
+ octokit (>= 4, < 7, != 4.4.0)
+ jekyll-include-cache (0.2.1)
+ jekyll (>= 3.7, < 5.0)
+ jekyll-mentions (1.6.0)
+ html-pipeline (~> 2.3)
+ jekyll (>= 3.7, < 5.0)
+ jekyll-optional-front-matter (0.3.2)
+ jekyll (>= 3.0, < 5.0)
+ jekyll-paginate (1.1.0)
+ jekyll-readme-index (0.3.0)
+ jekyll (>= 3.0, < 5.0)
+ jekyll-redirect-from (0.16.0)
+ jekyll (>= 3.3, < 5.0)
+ jekyll-relative-links (0.6.1)
jekyll (>= 3.3, < 5.0)
+ jekyll-remote-theme (0.4.3)
+ addressable (~> 2.0)
+ jekyll (>= 3.5, < 5.0)
+ jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0)
+ rubyzip (>= 1.3.0, < 3.0)
jekyll-sass-converter (1.5.2)
sass (~> 3.4)
- jekyll-scalafiddle (1.0.1)
- jekyll (~> 3.0)
+ jekyll-seo-tag (2.8.0)
+ jekyll (>= 3.8, < 5.0)
+ jekyll-sitemap (1.4.0)
+ jekyll (>= 3.7, < 5.0)
+ jekyll-swiss (1.0.0)
+ jekyll-theme-architect (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-cayman (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-dinky (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-hacker (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-leap-day (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-merlot (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-midnight (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-minimal (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-modernist (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-primer (0.6.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-github-metadata (~> 2.9)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-slate (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-tactile (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-theme-time-machine (0.2.0)
+ jekyll (> 3.5, < 5.0)
+ jekyll-seo-tag (~> 2.0)
+ jekyll-titles-from-headings (0.5.3)
+ jekyll (>= 3.3, < 5.0)
jekyll-watch (2.2.1)
listen (~> 3.0)
- kramdown (1.17.0)
- liquid (4.0.3)
- listen (3.1.5)
- rb-fsevent (~> 0.9, >= 0.9.4)
- rb-inotify (~> 0.9, >= 0.9.7)
- ruby_dep (~> 1.2)
+ jemoji (0.13.0)
+ gemoji (>= 3, < 5)
+ html-pipeline (~> 2.2)
+ jekyll (>= 3.0, < 5.0)
+ json (2.10.2)
+ kramdown (2.4.0)
+ rexml
+ kramdown-parser-gfm (1.1.0)
+ kramdown (~> 2.0)
+ liquid (4.0.4)
+ listen (3.9.0)
+ rb-fsevent (~> 0.10, >= 0.10.3)
+ rb-inotify (~> 0.9, >= 0.9.10)
+ logger (1.6.6)
mercenary (0.3.6)
- mini_portile2 (2.4.0)
- nokogiri (1.10.9)
- mini_portile2 (~> 2.4.0)
- nokogumbo (2.0.2)
- nokogiri (~> 1.8, >= 1.8.4)
- parallel (1.19.1)
+ metrics (0.12.1)
+ minima (2.5.1)
+ jekyll (>= 3.5, < 5.0)
+ jekyll-feed (~> 0.9)
+ jekyll-seo-tag (~> 2.1)
+ minitest (5.25.4)
+ net-http (0.6.0)
+ uri
+ nokogiri (1.18.8-arm64-darwin)
+ racc (~> 1.4)
+ nokogiri (1.18.8-x64-mingw-ucrt)
+ racc (~> 1.4)
+ nokogiri (1.18.8-x86_64-linux-gnu)
+ racc (~> 1.4)
+ octokit (4.25.1)
+ faraday (>= 1, < 3)
+ sawyer (~> 0.9)
pathutil (0.16.2)
forwardable-extended (~> 2.6)
- public_suffix (4.0.4)
- rainbow (3.0.0)
- rb-fsevent (0.10.3)
- rb-inotify (0.10.0)
+ pdf-reader (2.14.1)
+ Ascii85 (>= 1.0, < 3.0, != 2.0.0)
+ afm (~> 0.2.1)
+ hashery (~> 2.0)
+ ruby-rc4
+ ttfunk
+ public_suffix (5.1.1)
+ racc (1.8.1)
+ rainbow (3.1.1)
+ rb-fsevent (0.11.2)
+ rb-inotify (0.11.1)
ffi (~> 1.0)
- rouge (3.9.0)
- ruby_dep (1.5.0)
+ rexml (3.4.1)
+ rouge (3.30.0)
+ ruby-rc4 (0.1.5)
+ rubyzip (2.4.1)
safe_yaml (1.0.5)
sass (3.7.4)
sass-listen (~> 4.0.0)
sass-listen (4.0.0)
rb-fsevent (~> 0.9, >= 0.9.4)
rb-inotify (~> 0.9, >= 0.9.7)
- typhoeus (1.3.1)
+ sawyer (0.9.2)
+ addressable (>= 2.3.5)
+ faraday (>= 0.17.3, < 3)
+ securerandom (0.4.1)
+ simpleidn (0.2.3)
+ terminal-table (1.8.0)
+ unicode-display_width (~> 1.1, >= 1.1.1)
+ traces (0.15.2)
+ ttfunk (1.8.0)
+ bigdecimal (~> 3.1)
+ typhoeus (1.4.1)
ethon (>= 0.9.0)
+ tzinfo (2.0.6)
+ concurrent-ruby (~> 1.0)
+ unicode-display_width (1.8.0)
+ uri (1.0.3)
+ webrick (1.9.1)
yell (2.2.2)
+ zeitwerk (2.7.2)
PLATFORMS
- ruby
+ arm64-darwin-22
+ arm64-darwin-23
+ arm64-darwin-24
+ x64-mingw-ucrt
+ x86_64-linux
DEPENDENCIES
+ github-pages
html-proofer
- jekyll-redirect-from
- jekyll-scalafiddle
+ webrick
BUNDLED WITH
- 1.16.2
+ 2.6.5
diff --git a/README.md b/README.md
index eef762ce16..013a66267c 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,17 @@
# Scala Documentation #
-[](https://platform-ci.scala-lang.org/scala/docs.scala-lang)
+[](https://github.com/scala/docs.scala-lang/actions/workflows/build.yml?query=branch%3Amain)
This repository contains the source for the Scala documentation website, as well as the source for "Scala Improvement Process" (SIP) documents.
+## Dependencies ##
+
+This site uses a Jekyll, a Ruby framework. You'll need Ruby and Bundler installed; see [Jekyll installation instructions](https://jekyllrb.com/docs/installation/) for the details.
+
## Quickstart ##
To build and view the site locally:
- gem install bundler
bundle install
bundle exec jekyll serve -I
@@ -16,27 +19,44 @@ To build and view the site locally:
For more details, read on.
-## Quickstart with Docker ##
+## Quickstart with Docker Compose ##
+
+You need to have [Docker Engine](https://docs.docker.com/engine/) and [Docker Compose](https://docs.docker.com/compose/) installed on your machine.
+Under macOS (Intel or Apple silicon), instead of installing [Docker Desktop](https://docs.docker.com/desktop/) you can also use [HomeBrew](https://brew.sh/) with [Colima](https://github.com/abiosoft/colima): `brew install colima docker docker-compose`.
+UID and GID environment variables are needed to avoid docker from writing files as root in your directory.
+By default, docker-compose will use the file docker-compose.yml which will build the website and serve it on 0.0.0.0:4000 .
+If you just need to build the website, add ```-f docker-compose_build-only.yml```
+
+```
+env UID="$(id -u)" GID="$(id -g)" docker-compose up
+```
+
+The generated site is available at `http://localhost:4000`.
-To build and view site with docker:
+When the website dependencies change (the content of the `Gemfile`),
+you have to re-build the Docker image:
- docker-compose up
+```
+env UID="$(id -u)" GID="$(id -g)" docker-compose up --build
+```
-It will incrementally build and serve site at `http://localhost:8080`
+If you have problems with the Docker image or want to force the rebuild of the Docker image:
+```
+env UID="$(id -u)" GID="$(id -g)" docker-compose build --no-cache
+```
+
+
+For more details on the Docker option, see also [this issue](https://github.com/scala/docs.scala-lang/issues/1286).
## Contributing ##
-Please have a look at [https://docs.scala-lang.org/contribute.html](https://docs.scala-lang.org/contribute.html) before making a contribution.
+Please have a look at [Add New Guides/Tutorials](https://docs.scala-lang.org/contribute/add-guides.html) before making a contribution.
This document gives an overview of the type of documentation contained within the Scala Documentation repository and the repository's structure.
Small changes, or corrected typos will generally be pulled in right away. Large changes, like the addition of new documents, or the rewriting of
existing documents will be thoroughly reviewed-- please keep in mind that, generally, new documents must be very well-polished, complete, and maintained
in order to be accepted.
-## Dependencies ##
-
-This site uses a Jekyll, a Ruby framework. You'll need Ruby and Bundler installed; see [Jekyll installation instructions](https://jekyllrb.com/docs/installation/) for the details.
-
## Building & Viewing ##
cd into the directory where you cloned this repository, then install the required gems with `bundle install`. This will automatically put the gems into `./vendor/bundle`.
@@ -62,9 +82,9 @@ The markdown used in this site uses [kramdown](https://kramdown.gettalong.org/)
### Markdown Editor for OSX ###
-There's a free markdown editor for OSX called [Mou](http://25.io/mou/). It's quite convenient to work with, and it generates the translated Markdown in real-time alongside of your editor window, as can be seen here:
+There's a free markdown editor for OSX called [MacDown](https://github.com/MacDownApp/macdown). It's quite convenient to work with, and it generates the translated Markdown in real-time alongside of your editor window, as can be seen here:
-
+
## License ##
diff --git a/_ba/tour/abstract-type-members.md b/_ba/tour/abstract-type-members.md
index 9e577d7cda..b7034b6922 100644
--- a/_ba/tour/abstract-type-members.md
+++ b/_ba/tour/abstract-type-members.md
@@ -15,7 +15,7 @@ Trejtovi i apstraktne klase mogu imati apstraktne tipove kao članove.
To znači da konkretne implementacije definišu stvarni tip.
Slijedi primjer:
-```tut
+```scala mdoc
trait Buffer {
type T
val element: T
@@ -26,7 +26,7 @@ U gornjem primjeru smo definisali apstraktni tip `T`.
On se koristi za opis člana `element`.
Ovaj trejt možemo naslijediti u apstraktnoj klasi i dodati gornju granicu tipa za `T` da bi ga učinili preciznijim.
-```tut
+```scala mdoc
abstract class SeqBuffer extends Buffer {
type U
type T <: Seq[U]
@@ -40,7 +40,7 @@ mora biti podtip `Seq[U]` za neki novi apstraktni tip `U`.
Trejtovi ili [klase](classes.html) s apstraktnim tip-članovima se često koriste u kombinaciji s instanciranjem anonimnih klasa.
Radi ilustracije, pogledaćemo program koji radi s sekvencijalnim baferom koji sadrži listu integera:
-```tut
+```scala mdoc
abstract class IntSeqBuffer extends SeqBuffer {
type U = Int
}
@@ -61,7 +61,7 @@ Metoda `newIntSeqBuf` koristi anonimnu klasu kao implementaciju `IntSeqBuf` pos
Često je moguće pretvoriti apstraktni tip-član u tipski parametar klase i obrnuto.
Slijedi verzija gornjeg koda koji koristi tipske parametre:
-```tut
+```scala mdoc:nest
abstract class Buffer[+T] {
val element: T
}
diff --git a/_ba/tour/annotations.md b/_ba/tour/annotations.md
index d39d970873..24b17a9012 100644
--- a/_ba/tour/annotations.md
+++ b/_ba/tour/annotations.md
@@ -5,7 +5,7 @@ language: ba
partof: scala-tour
num: 32
-next-page: default-parameter-values
+next-page: packages-and-imports
previous-page: by-name-parameters
---
@@ -30,7 +30,7 @@ Redoslijed anotacijskih klauza nije bitan.
Određene anotacije će uzrokovati pad kompajliranja ako određeni uslovi nisu ispunjeni.
Npr, anotacija `@tailrec` osigurava da je metoda [tail-rekurzivna](https://en.wikipedia.org/wiki/Tail_call). Tail-rekurzija može zadržati memorijske zahtjeve konstantnim.
Evo kako se koristi na metodi koja izračunava faktorijel:
-```tut
+```scala mdoc
import scala.annotation.tailrec
def factorial(x: Int): Int = {
diff --git a/_ba/tour/automatic-closures.md b/_ba/tour/automatic-closures.md
deleted file mode 100644
index 90f751ee2c..0000000000
--- a/_ba/tour/automatic-closures.md
+++ /dev/null
@@ -1,7 +0,0 @@
----
-layout: tour
-title: Automatic Type-Dependent Closure Construction
-partof: scala-tour
-
-language: ba
----
diff --git a/_ba/tour/basics.md b/_ba/tour/basics.md
index 6204faa74b..97956e6149 100644
--- a/_ba/tour/basics.md
+++ b/_ba/tour/basics.md
@@ -14,9 +14,9 @@ Na ovoj stranici ćemo objasniti osnove Scale.
## Probavanje Scale u browseru
-Scalu možete probati u Vašem browser sa ScalaFiddle aplikacijom.
+Scalu možete probati u Vašem browser sa Scastie aplikacijom.
-1. Idite na [https://scalafiddle.io](https://scalafiddle.io).
+1. Idite na [Scastie](https://scastie.scala-lang.org/).
2. Zalijepite `println("Hello, world!")` u lijevi panel.
3. Kliknite "Run" dugme. Izlaz će se pojaviti u desnom panelu.
@@ -30,7 +30,7 @@ Izrazi su izjave koje imaju vrijednost.
```
Rezultate izraza možete prikazati pomoću `println`.
-```tut
+```scala mdoc
println(1) // 1
println(1 + 1) // 2
println("Hello!") // Hello!
@@ -41,33 +41,33 @@ println("Hello," + " world!") // Hello, world!
Rezultatima možete dodijeliti naziv pomoću ključne riječi `val`.
-```tut
+```scala mdoc
val x = 1 + 1
println(x) // 2
```
-Imenovani rezultati, kao `x` ovdje, nazivaju se vrijednostima.
+Imenovani rezultati, kao `x` ovdje, nazivaju se vrijednostima.
Referenciranje vrijednosti ne okida njeno ponovno izračunavanje.
Vrijednosti se ne mogu mijenjati.
-```tut:fail
+```scala mdoc:fail
x = 3 // Ovo se ne kompajlira.
```
Tipovi vrijednosti mogu biti (automatski) zaključeni, ali možete i eksplicitno navesti tip:
-```tut
+```scala mdoc:nest
val x: Int = 1 + 1
```
-Primijetite da deklaracija tipa `Int` dolazi nakon identifikatora `x`. Također morate dodati i `:`.
+Primijetite da deklaracija tipa `Int` dolazi nakon identifikatora `x`. Također morate dodati i `:`.
### Varijable
Varijable su kao vrijednosti, osim što ih možete promijeniti. Varijable se definišu ključnom riječju `var`.
-```tut
+```scala mdoc:nest
var x = 1 + 1
x = 3 // Ovo se kompajlira jer je "x" deklarisano s "var" ključnom riječju.
println(x * x) // 9
@@ -75,7 +75,7 @@ println(x * x) // 9
Kao i s vrijednostima, tip možete eksplicitno navesti ako želite:
-```tut
+```scala mdoc:nest
var x: Int = 1 + 1
```
@@ -86,7 +86,7 @@ Izraze možete kombinovati okružujući ih s `{}`. Ovo se naziva blok.
Rezultat zadnjeg izraza u bloku je rezultat cijelog bloka, također.
-```tut
+```scala mdoc
println({
val x = 1 + 1
x + 1
@@ -99,7 +99,7 @@ Funkcije su izrazi koji primaju parametre.
Možete definisati anonimnu funkciju (bez imena) koja vraća cijeli broj plus jedan:
-```tut
+```scala mdoc
(x: Int) => x + 1
```
@@ -107,21 +107,21 @@ Na lijevoj strani `=>` je lista parametara. Na desnoj strani je izraz koji koris
Funkcije možete i imenovati.
-```tut
+```scala mdoc
val addOne = (x: Int) => x + 1
println(addOne(1)) // 2
```
Funkcije mogu imati više parametara.
-```tut
+```scala mdoc
val add = (x: Int, y: Int) => x + y
println(add(1, 2)) // 3
```
Ili bez parametara.
-```tut
+```scala mdoc
val getTheAnswer = () => 42
println(getTheAnswer()) // 42
```
@@ -132,7 +132,7 @@ Metode izgledaju i ponašaju se vrlo slično funkcijama, ali postoji nekoliko ra
Metode se definišu ključnom riječju `def`. Nakon `def` slijedi naziv, lista parametara, povratni tip, i tijelo.
-```tut
+```scala mdoc:nest
def add(x: Int, y: Int): Int = x + y
println(add(1, 2)) // 3
```
@@ -141,14 +141,14 @@ Primijetite da je povratni tip deklarisan _nakon_ liste parametara i dvotačke `
Metode mogu imati više listi parametara.
-```tut
+```scala mdoc
def addThenMultiply(x: Int, y: Int)(multiplier: Int): Int = (x + y) * multiplier
println(addThenMultiply(1, 2)(3)) // 9
```
Ili bez listi parametara ikako.
-```tut
+```scala mdoc
def name: String = System.getProperty("name")
println("Hello, " + name + "!")
```
@@ -157,15 +157,13 @@ Postoje i neke druge razlike, ali zasad, možete misliti o njima kao nečemu sli
Metode mogu imati višelinijske izraze također.
-{% scalafiddle %}
-```tut
+```scala mdoc
def getSquareString(input: Double): String = {
val square = input * input
square.toString
}
println(getSquareString(2.5)) // 6.25
```
-{% endscalafiddle %}
Zadnjo izraz u tijelu metode je povratna vrijednost metode. (Scala ima ključnu riječ `return`, ali se rijetko koristi.)
@@ -173,20 +171,20 @@ Zadnjo izraz u tijelu metode je povratna vrijednost metode. (Scala ima ključnu
Klasu možete definisati ključnom riječju `class` praćenom imenom i parametrima konstruktora.
-```tut
+```scala mdoc
class Greeter(prefix: String, suffix: String) {
def greet(name: String): Unit =
println(prefix + name + suffix)
}
```
-Povratni tip metode `greet` je `Unit`, koji kaže da metoda ne vraća ništa značajno.
-Koristi se slično kao `void` u Javi ili C-u.
-(Razlika je u tome što svaki Scalin izraz mora imati neku vrijednost, postoji singlton vrijednost tipa `Unit`, piše se `()`.
+Povratni tip metode `greet` je `Unit`, koji kaže da metoda ne vraća ništa značajno.
+Koristi se slično kao `void` u Javi ili C-u.
+(Razlika je u tome što svaki Scalin izraz mora imati neku vrijednost, postoji singlton vrijednost tipa `Unit`, piše se `()`.
Ne prenosi nikakvu korisnu informaciju.)
Instancu klase možete kreirati pomoću ključne riječi `new`.
-```tut
+```scala mdoc
val greeter = new Greeter("Hello, ", "!")
greeter.greet("Scala developer") // Hello, Scala developer!
```
@@ -195,16 +193,16 @@ Detaljniji pregled klasa biće dat [kasnije](classes.html).
## Case klase
-Scala ima poseban tip klase koji se zove "case" klasa.
+Scala ima poseban tip klase koji se zove "case" klasa.
Po defaultu, case klase su nepromjenjive i porede se po vrijednosti. Možete ih definisati s `case class` ključnim riječima.
-```tut
+```scala mdoc
case class Point(x: Int, y: Int)
```
Instancu case klase možete kreirati i bez ključne riječi `new`.
-```tut
+```scala mdoc
val point = Point(1, 2)
val anotherPoint = Point(1, 2)
val yetAnotherPoint = Point(2, 2)
@@ -212,17 +210,17 @@ val yetAnotherPoint = Point(2, 2)
I porede se po vrijednosti.
-```tut
+```scala mdoc
if (point == anotherPoint) {
- println(point + " and " + anotherPoint + " are the same.")
+ println(s"$point and $anotherPoint are the same.")
} else {
- println(point + " and " + anotherPoint + " are different.")
+ println(s"$point and $anotherPoint are different.")
} // Point(1,2) i Point(1,2) su iste.
if (point == yetAnotherPoint) {
- println(point + " and " + yetAnotherPoint + " are the same.")
+ println(s"$point and $yetAnotherPoint are the same.")
} else {
- println(point + " and " + yetAnotherPoint + " are different.")
+ println(s"$point and $yetAnotherPoint are different.")
} // Point(1,2) su Point(2,2) različite.
```
@@ -235,7 +233,7 @@ Objasnićemo ih u dubinu [kasnije](case-classes.html).
Objekti su jedine instance svojih definicija. Možete misliti o njima kao singltonima svoje vlastite klase.
Objekte možete definisati ključnom riječju `object`.
-```tut
+```scala mdoc
object IdFactory {
private var counter = 0
def create(): Int = {
@@ -247,7 +245,7 @@ object IdFactory {
Objektima možete pristupati referenciranjem njihovog imena.
-```tut
+```scala mdoc
val newId: Int = IdFactory.create()
println(newId) // 1
val newerId: Int = IdFactory.create()
@@ -262,7 +260,7 @@ Trejtovi su tipovi koji sadrže polja i metode. Više trejtova se može kombino
Definišu se pomoću `trait` ključne riječi.
-```tut
+```scala mdoc:nest
trait Greeter {
def greet(name: String): Unit
}
@@ -270,7 +268,7 @@ trait Greeter {
Metode trejtova mogu imati defaultnu implementaciju.
-```tut
+```scala mdoc:reset
trait Greeter {
def greet(name: String): Unit =
println("Hello, " + name + "!")
@@ -279,7 +277,7 @@ trait Greeter {
Možete naslijediti trejtove s `extends` ključnom riječi i redefinisati (override) implementacije s `override` ključnom riječi.
-```tut
+```scala mdoc
class DefaultGreeter extends Greeter
class CustomizableGreeter(prefix: String, postfix: String) extends Greeter {
@@ -301,12 +299,12 @@ Trejtove ćemo pokriti u dubinu [kasnije](traits.html).
## Glavna metoda
-Glavna metoda je ulazna tačka programa.
+Glavna metoda je ulazna tačka programa.
Java Virtuelna Mašina traži da se glavna metoda zove `main` i da prima jedan argument, niz stringova.
Koristeći objekt, možete definisati glavnu metodu ovako:
-```tut
+```scala mdoc
object Main {
def main(args: Array[String]): Unit =
println("Hello, Scala developer!")
diff --git a/_ba/tour/by-name-parameters.md b/_ba/tour/by-name-parameters.md
index e166f5242f..5cfc42f8cb 100644
--- a/_ba/tour/by-name-parameters.md
+++ b/_ba/tour/by-name-parameters.md
@@ -13,7 +13,7 @@ previous-page: operators
_By-name parametri_ (u slobodnom prevodu "po-imenu parametri") se izračunavaju samo kada se koriste.
Oni su kontrastu sa _by-value parametrima_ ("po-vrijednosti parametri").
Da bi parametar bio pozivan by-name, dodajte `=>` prije njegovog tipa.
-```tut
+```scala mdoc
def calculate(input: => Int) = input * 37
```
By-name parametri imaju prednost da se ne izračunavaju ako se ne koriste u tijelu funkcije.
@@ -21,7 +21,7 @@ U drugu ruku, by-value parametri imaju prednost da se izračunavaju samo jednom.
Ovo je primjer kako bi mogli implementirati while petlju:
-```tut
+```scala mdoc
def whileLoop(condition: => Boolean)(body: => Unit): Unit =
if (condition) {
body
diff --git a/_ba/tour/case-classes.md b/_ba/tour/case-classes.md
index a4cac6fad7..0c6de6d7b4 100644
--- a/_ba/tour/case-classes.md
+++ b/_ba/tour/case-classes.md
@@ -17,7 +17,7 @@ U sljedećem koraku turneje, vidjećemo kako su korisne u [podudaranju uzoraka (
## Definisanje case klase
Minimalna case klasa se sastoji iz ključnih riječi `case class`, identifikatora, i liste parametara (koja može biti prazna):
-```tut
+```scala mdoc
case class Book(isbn: String)
val frankenstein = Book("978-0486282114")
diff --git a/_ba/tour/classes.md b/_ba/tour/classes.md
index 534b1c6f27..29f170dca0 100644
--- a/_ba/tour/classes.md
+++ b/_ba/tour/classes.md
@@ -18,7 +18,7 @@ Tipovi, objekti i trejtovi biće pokriveni kasnije.
## Definisanje klase
Minimalna definicija klase sastoji se od riječi `class` i identifikatora. Imena klasa bi trebala počinjati velikim slovom.
-```tut
+```scala mdoc
class User
val user1 = new User
@@ -28,7 +28,7 @@ Ključna riječ `new` koristi se za kreiranje instance klase.
Međutim, često ćete imati konstruktor i tijelo klase.
Slijedi definicija klase `Point` (en. tačka):
-```tut
+```scala mdoc
class Point(var x: Int, var y: Int) {
def move(dx: Int, dy: Int): Unit = {
@@ -56,7 +56,7 @@ Pošto `toString` prebrisava metodu `toString` iz [`AnyRef`](unified-types.html)
Konstruktori mogu imati opcione parametre koristeći podrazumijevane vrijednosti:
-```tut
+```scala mdoc:nest
class Point(var x: Int = 0, var y: Int = 0)
val origin = new Point // x and y are both set to 0
@@ -67,7 +67,7 @@ println(point1.x) // prints 1
U ovoj verziji klase `Point`, `x` i `y` imaju podrazumijevanu vrijednost `0` tako da ne morate proslijediti argumente.
Međutim, pošto se argumenti konstruktora čitaju s lijeva na desno, ako želite proslijediti samo `y` vrijednost, morate imenovati parametar.
-```
+```scala mdoc:nest
class Point(var x: Int = 0, var y: Int = 0)
val point2 = new Point(y=2)
println(point2.y) // prints 2
@@ -78,7 +78,7 @@ Ovo je također dobra praksa zbog poboljšanja čitljivosti.
## Privatni članovi i sintaksa getera/setera
Članovi su javni (`public`) po defaultu.
Koristite `private` modifikator pristupa da sakrijete članove klase.
-```tut
+```scala mdoc:nest
class Point {
private var _x = 0
private var _y = 0
@@ -108,14 +108,14 @@ Primijetite specijalnu sintaksu za setere: metoda ima `_=` nadodano na identifik
Parametri primarnog konstruktora s `val` i `var` su javni.
Međutim, pošto su `val` nepromjenjivi, ne možete napisati sljedeće.
-```
+```scala mdoc:fail
class Point(val x: Int, val y: Int)
val point = new Point(1, 2)
point.x = 3 // <-- does not compile
```
Parametri bez `val` ili `var` su privatne vrijednosti, vidljive samo unutar klase.
-```
+```scala mdoc:fail
class Point(x: Int, y: Int)
val point = new Point(1, 2)
point.x // <-- does not compile
diff --git a/_ba/tour/compound-types.md b/_ba/tour/compound-types.md
index 316d874c85..3781e866e3 100644
--- a/_ba/tour/compound-types.md
+++ b/_ba/tour/compound-types.md
@@ -15,7 +15,7 @@ U Scali ovo može biti izraženo pomoću *složenih tipova*, koji su presjeci ti
Pretpostavimo da imamo dva trejta: `Cloneable` i `Resetable`:
-```tut
+```scala mdoc
trait Cloneable extends java.lang.Cloneable {
override def clone(): Cloneable = {
super.clone().asInstanceOf[Cloneable]
diff --git a/_ba/tour/default-parameter-values.md b/_ba/tour/default-parameter-values.md
index 38be16bcc1..f4fc257900 100644
--- a/_ba/tour/default-parameter-values.md
+++ b/_ba/tour/default-parameter-values.md
@@ -13,7 +13,7 @@ prerequisite-knowledge: named-arguments, function syntax
Scala omogućuje davanje podrazumijevanih vrijednosti parametrima koje dozvoljavaju korisniku metode da izostavi te parametre.
-```tut
+```scala mdoc
def log(message: String, level: String = "INFO") = println(s"$level: $message")
log("System starting") // prints INFO: System starting
@@ -22,7 +22,7 @@ log("User not found", "WARNING") // prints WARNING: User not found
Parametar `level` ima podrazumijevanu vrijednost tako da je opcioni. Na zadnjoj liniji, argument `"WARNING"` prebrisava podrazumijevani argument `"INFO"`. Gdje biste koristili overloadane metode u Javi, možete koristiti metode s opcionim parametrima da biste postigli isti efekat. Međutim, ako korisnik izostavi argument, bilo koji sljedeći argumenti moraju biti imenovani.
-```tut
+```scala mdoc
class Point(val x: Double = 0, val y: Double = 0)
val point1 = new Point(y = 1)
@@ -31,7 +31,7 @@ Ovdje moramo reći `y = 1`.
Podrazumijevani parametri u Scali nisu opcioni kada se koriste iz Java koda:
-```tut
+```scala mdoc:reset
// Point.scala
class Point(val x: Double = 0, val y: Double = 0)
```
diff --git a/_ba/tour/extractor-objects.md b/_ba/tour/extractor-objects.md
index 979f2d4250..0d0618aa00 100644
--- a/_ba/tour/extractor-objects.md
+++ b/_ba/tour/extractor-objects.md
@@ -11,15 +11,15 @@ previous-page: regular-expression-patterns
---
Ekstraktor objekat je objekat koji ima `unapply` metodu.
-Dok je `apply` metoda kao konstruktor koji uzima argumente i kreira objekat, `unapply` metoda prima objekat i pokušava vratiti argumente.
+Dok je `apply` metoda kao konstruktor koji uzima argumente i kreira objekat, `unapply` metoda prima objekat i pokušava vratiti argumente.
Ovo se najčešće koristi u podudaranju uzoraka i parcijalnim funkcijama.
-```tut
+```scala mdoc
import scala.util.Random
object CustomerID {
- def apply(name: String) = s"$name--${Random.nextLong}"
+ def apply(name: String) = s"$name--${Random.nextLong()}"
def unapply(customerID: String): Option[String] = {
val name = customerID.split("--").head
@@ -34,14 +34,14 @@ customer1ID match {
}
```
-Metoda `apply` kreira `CustomerID` string od argumenta `name`.
-Metoda `unapply` radi suprotno da dobije `name` nazad.
-Kada pozovemo `CustomerID("Sukyoung")`, to je skraćena sintaksa za `CustomerID.apply("Sukyoung")`.
+Metoda `apply` kreira `CustomerID` string od argumenta `name`.
+Metoda `unapply` radi suprotno da dobije `name` nazad.
+Kada pozovemo `CustomerID("Sukyoung")`, to je skraćena sintaksa za `CustomerID.apply("Sukyoung")`.
Kada pozovemo `case CustomerID(name) => customer1ID`, ustvari pozivamo `unapply` metodu.
Metoda `unapply` se može koristiti i za dodjelu vrijednosti.
-```tut
+```scala mdoc
val customer2ID = CustomerID("Nico")
val CustomerID(name) = customer2ID
println(name) // prints Nico
@@ -49,7 +49,7 @@ println(name) // prints Nico
Ovo je ekvivalentno `val name = CustomerID.unapply(customer2ID).get`. Ako se uzorak ne podudari, baciće se `scala.MatchError` izuzetak:
-```tut:fail
+```scala mdoc:crash
val CustomerID(name2) = "--asdfasdfasdf"
```
diff --git a/_ba/tour/for-comprehensions.md b/_ba/tour/for-comprehensions.md
index efdeb57dc8..7d5d0f9166 100644
--- a/_ba/tour/for-comprehensions.md
+++ b/_ba/tour/for-comprehensions.md
@@ -21,7 +21,7 @@ Komprehensija evaluira tijelo `e` za svako vezivanje varijable generisano od str
Slijedi primjer:
-```tut
+```scala mdoc
case class User(name: String, age: Int)
val userBase = List(User("Travis", 28),
@@ -38,7 +38,7 @@ twentySomethings.foreach(name => println(name)) // prints Travis Dennis
Slijedi malo komplikovaniji primjer koji s dva generatora. Izračunava sve parove brojeva između `0` i `n-1` čija je suma jednaka vrijednosti `v`:
-```tut
+```scala mdoc
def foo(n: Int, v: Int) =
for (i <- 0 until n;
j <- i until n if i + j == v)
@@ -54,5 +54,5 @@ Ovdje je `n == 10` i `v == 10`. U prvoj iteraciji, `i == 0` i `j == 0` tako da `
Bez `if` čuvara, ovo bi ispisalo sljedeće:
```
-(0, 0) (0, 1) (0, 2) (0, 3) (0, 4) (0, 5) (0, 6) (0, 7) (0, 8) (0, 9) (1, 1) ...
+(0, 0) (0, 1) (0, 2) (0, 3) (0, 4) (0, 5) (0, 6) (0, 7) (0, 8) (0, 9) (1, 0) ...
```
diff --git a/_ba/tour/generic-classes.md b/_ba/tour/generic-classes.md
index c86d8f3f8f..6b52e9ccd8 100644
--- a/_ba/tour/generic-classes.md
+++ b/_ba/tour/generic-classes.md
@@ -19,10 +19,11 @@ Vrlo su korisne za implementiranje kolekcija.
Generičke klase primaju tip kao parametar u uglastim zagradama `[]`.
Konvencija je da se koristi slovo `A` kao identifikator tipa, mada se može koristiti bilo koje ime.
-```tut
+```scala mdoc
class Stack[A] {
private var elements: List[A] = Nil
- def push(x: A) { elements = x :: elements }
+ def push(x: A): Unit =
+ elements = x :: elements
def peek: A = elements.head
def pop(): A = {
val currentTop = peek
diff --git a/_ba/tour/higher-order-functions.md b/_ba/tour/higher-order-functions.md
index e7ac3103aa..56f1c1807a 100644
--- a/_ba/tour/higher-order-functions.md
+++ b/_ba/tour/higher-order-functions.md
@@ -14,15 +14,15 @@ Scala dozvoljava definisanje funkcija višeg reda.
To su funkcije koje _primaju druge funkcije kao parametre_, ili čiji je _rezultat funkcija_.
Ovo je funkcija `apply` koja uzima drugu funkciju `f` i vrijednost `v` i primjenjuje funkciju `f` na `v`:
-```tut
+```scala mdoc
def apply(f: Int => String, v: Int) = f(v)
```
_Napomena: metode se automatski pretvaraju u funkcije ako to kontekst zahtijeva._
Ovo je još jedan primjer:
-
-```tut
+
+```scala mdoc
class Decorator(left: String, right: String) {
def layout[A](x: A) = left + x.toString() + right
}
@@ -33,7 +33,7 @@ object FunTest extends App {
println(apply(decorator.layout, 7))
}
```
-
+
Izvršavanjem se dobije izlaz:
```
diff --git a/_ba/tour/implicit-conversions.md b/_ba/tour/implicit-conversions.md
index 090dc03323..5a1ea3b9fa 100644
--- a/_ba/tour/implicit-conversions.md
+++ b/_ba/tour/implicit-conversions.md
@@ -43,11 +43,11 @@ Implicitno importovani objekt `scala.Predef` deklariše nekoliko predefinisanih
Naprimjer, kada se pozivaju Javine metode koje očekuju `java.lang.Integer`, možete proslijediti `scala.Int`.
Možete, zato što `Predef` uključuje slj. implicitnu konverziju:
-```tut
+```scala mdoc
import scala.language.implicitConversions
-implicit def int2Integer(x: Int) =
- java.lang.Integer.valueOf(x)
+implicit def int2Integer(x: Int): Integer =
+ Integer.valueOf(x)
```
Pošto su implicitne konverzije opasne ako se koriste pogrešno, kompajler upozorava kada kompajlira definiciju implicitne konverzije.
diff --git a/_ba/tour/implicit-parameters.md b/_ba/tour/implicit-parameters.md
index 5b50d8f5e9..c39d87d626 100644
--- a/_ba/tour/implicit-parameters.md
+++ b/_ba/tour/implicit-parameters.md
@@ -22,7 +22,7 @@ Argumenti koji se mogu proslijediti kao implicitni parametri spadaju u dvije kat
U sljedećem primjeru definisaćemo metodu `sum` koja izračunava sumu liste elemenata koristeći `add` i `unit` operacije monoida.
Molimo primijetite da implicitne vrijednosti ne mogu biti top-level, već moraju biti članovi templejta.
-```tut
+```scala mdoc
abstract class SemiGroup[A] {
def add(x: A, y: A): A
}
diff --git a/_ba/tour/inner-classes.md b/_ba/tour/inner-classes.md
index 41dd73ed91..ef72aa8929 100644
--- a/_ba/tour/inner-classes.md
+++ b/_ba/tour/inner-classes.md
@@ -17,11 +17,11 @@ Pretpostavimo da želimo da nas kompejler spriječi da pomiješamo koji čvorovi
Radi ilustracije razlike, prikazaćemo implementaciju klase grafa:
-```tut
+```scala mdoc
class Graph {
class Node {
var connectedNodes: List[Node] = Nil
- def connectTo(node: Node) {
+ def connectTo(node: Node): Unit = {
if (!connectedNodes.exists(node.equals)) {
connectedNodes = node :: connectedNodes
}
@@ -35,11 +35,11 @@ class Graph {
}
}
```
-
+
U našem programu, grafovi su predstavljeni listom čvorova (`List[Node]`).
Svaki čvor ima listu drugih čvorova s kojima je povezan (`connectedNodes`). Klasa `Node` je _path-dependent tip_ jer je ugniježdena u klasi `Graph`. Stoga, svi čvorovi u `connectedNodes` moraju biti kreirani koristeći `newNode` iz iste instance klase `Graph`.
-```tut
+```scala mdoc
val graph1: Graph = new Graph
val node1: graph1.Node = graph1.newNode
val node2: graph1.Node = graph1.newNode
@@ -47,14 +47,14 @@ val node3: graph1.Node = graph1.newNode
node1.connectTo(node2)
node3.connectTo(node1)
```
-
+
Eksplicitno smo deklarisali tip `node1`, `node2`, i `node3` kao `graph1.Node` zbog jasnosti ali ga je kompajler mogao sam zaključiti. Pošto kada pozivamo `graph1.newNode` koja poziva `new Node`, metoda koristi instancu `Node` specifičnu instanci `graph1`.
Da imamo dva grafa, sistem tipova Scale ne dozvoljava miješanje čvorova definisanih u različitim grafovima,
jer čvorovi različitih grafova imaju različit tip.
Ovo je primjer netačnog programa:
-
-```
+
+```scala mdoc:fail
val graph1: Graph = new Graph
val node1: graph1.Node = graph1.newNode
val node2: graph1.Node = graph1.newNode
@@ -69,12 +69,12 @@ U Javi bi zadnja linija prethodnog primjera bila tačna.
Za čvorove oba grafa, Java bi dodijelila isti tip `Graph.Node`; npr. `Node` bi imala prefiks klase `Graph`.
U Scali takav tip je također moguće izraziti, piše se kao `Graph#Node`.
Ako želimo povezati čvorove različitih grafova, moramo promijeniti definiciju naše inicijalne implementacije grafa:
-
-```tut
+
+```scala mdoc:nest
class Graph {
class Node {
var connectedNodes: List[Graph#Node] = Nil
- def connectTo(node: Graph#Node) {
+ def connectTo(node: Graph#Node): Unit = {
if (!connectedNodes.exists(node.equals)) {
connectedNodes = node :: connectedNodes
}
@@ -88,6 +88,6 @@ class Graph {
}
}
```
-
+
> Primijetite da ovaj program ne dozvoljava da dodamo čvor u dva različita grafa.
Ako bi htjeli ukloniti i ovo ograničenje, moramo promijeniti tipski parametar `nodes` u `Graph#Node`.
diff --git a/_ba/tour/lower-type-bounds.md b/_ba/tour/lower-type-bounds.md
index 8261a2e77c..85dd54a401 100644
--- a/_ba/tour/lower-type-bounds.md
+++ b/_ba/tour/lower-type-bounds.md
@@ -17,7 +17,7 @@ Izraz `B >: A` izražava tipski parametar `B` ili apstraktni tip `B` koji je nad
Kroz sljedeći primjer vidjećemo zašto je ovo korisno:
-```tut:fail
+```scala mdoc:fail
trait Node[+B] {
def prepend(elem: B): Node[B]
}
@@ -43,7 +43,7 @@ Ovo ne radi jer su funkcije *kontra*varijantne u svojim tipovima parametara i *k
Da bismo popravili ovo, moramo zamijeniti varijansu tipskog parametra `elem` u `prepend`.
Ovo radimo uvođenjem novog tipskog parametra `U` koji ima `B` kao svoju donju granicu tipa.
-```tut
+```scala mdoc
trait Node[+B] {
def prepend[U >: B](elem: U): Node[U]
}
@@ -60,7 +60,7 @@ case class Nil[+B]() extends Node[B] {
```
Sada možemo uraditi sljedeće:
-```tut
+```scala mdoc
trait Bird
case class AfricanSwallow() extends Bird
case class EuropeanSwallow() extends Bird
diff --git a/_ba/tour/mixin-class-composition.md b/_ba/tour/mixin-class-composition.md
index 66ad5f623e..a8216abfb6 100644
--- a/_ba/tour/mixin-class-composition.md
+++ b/_ba/tour/mixin-class-composition.md
@@ -13,7 +13,7 @@ prerequisite-knowledge: inheritance, traits, abstract-classes, unified-types
Mixini su trejtovi koji se koriste za kompoziciju klase.
-```tut
+```scala mdoc
abstract class A {
val message: String
}
@@ -29,23 +29,23 @@ val d = new D
d.message // I'm an instance of class B
d.loudMessage // I'M AN INSTANCE OF CLASS B
```
-Klasa `D` je nadklasa od `B` i mixina `C`.
+Klasa `D` je nadklasa od `B` i mixina `C`.
Klase mogu imati samo jednu nadklasu alid mogu imati više mixina (koristeći ključne riječi `extends` i `with` respektivno). Mixini i nadklasa mogu imati isti nadtip.
Pogledajmo sada zanimljiviji primjer počevši od apstraktne klase:
-
-```tut
+
+```scala mdoc
abstract class AbsIterator {
type T
def hasNext: Boolean
def next(): T
}
```
-
+
Klasa ima apstraktni tip `T` i standardne metode iteratora.
Dalje, implementiraćemo konkretnu klasu (svi apstraktni članovi `T`, `hasNext`, i `next` imaju implementacije):
-```tut
+```scala mdoc
class StringIterator(s: String) extends AbsIterator {
type T = Char
private var i = 0
@@ -59,14 +59,14 @@ class StringIterator(s: String) extends AbsIterator {
```
`StringIterator` prima `String` i može se koristiti za iteraciju nad `String`om (npr. da vidimo da li sadrži određeni karakter).
-
+
trait RichIterator extends AbsIterator {
- def foreach(f: T => Unit) { while (hasNext) f(next()) }
+ def foreach(f: T => Unit): Unit = { while (hasNext) f(next()) }
}
Kreirajmo sada trejt koji također nasljeđuje `AbsIterator`.
-```tut
+```scala mdoc
trait RichIterator extends AbsIterator {
def foreach(f: T => Unit): Unit = while (hasNext) f(next())
}
@@ -74,16 +74,16 @@ trait RichIterator extends AbsIterator {
Pošto je `RichIterator` trejt, on ne mora implementirati apstraktne članove `AbsIterator`a.
-Željeli bismo iskombinirati funkcionalnosti `StringIterator`a i `RichIterator`a u jednoj klasi.
+Željeli bismo iskombinirati funkcionalnosti `StringIterator`a i `RichIterator`a u jednoj klasi.
-```tut
+```scala mdoc
object StringIteratorTest extends App {
class Iter extends StringIterator("Scala") with RichIterator
val iter = new Iter
iter foreach println
}
```
-
+
Nova klasa `Iter` ima `StringIterator` kao nadklasu i `RichIterator` kao mixin.
S jednostrukim nasljeđivanjem ne bismo mogli postići ovaj nivo fleksibilnosti.
diff --git a/_ba/tour/multiple-parameter-lists.md b/_ba/tour/multiple-parameter-lists.md
index ed9f22c1b1..68230aa12b 100644
--- a/_ba/tour/multiple-parameter-lists.md
+++ b/_ba/tour/multiple-parameter-lists.md
@@ -16,7 +16,7 @@ onda će to vratiti funkciju koja prima preostale liste parametara kao argumente
Primjer:
-```tut
+```scala mdoc
object CurryTest extends App {
def filter(xs: List[Int], p: Int => Boolean): List[Int] =
diff --git a/_ba/tour/named-arguments.md b/_ba/tour/named-arguments.md
index abe434c3b4..6656b02da2 100644
--- a/_ba/tour/named-arguments.md
+++ b/_ba/tour/named-arguments.md
@@ -12,7 +12,7 @@ prerequisite-knowledge: function-syntax
Kada se pozivaju metode, možete koristiti imena varijabli eksplicitno pri pozivu:
-```tut
+```scala mdoc
def printName(first: String, last: String): Unit = {
println(first + " " + last)
}
diff --git a/_ba/tour/nested-functions.md b/_ba/tour/nested-functions.md
index 7c4adc75b2..1a00eaba59 100644
--- a/_ba/tour/nested-functions.md
+++ b/_ba/tour/nested-functions.md
@@ -13,7 +13,7 @@ previous-page: higher-order-functions
U Scali je moguće ugnježdavati definicije metode.
Sljedeći objekt sadrži metodu `factorial` za računanje faktorijela datog broja:
-```tut
+```scala mdoc
def factorial(x: Int): Int = {
def fact(x: Int, accumulator: Int): Int = {
if (x <= 1) accumulator
diff --git a/_ba/tour/operators.md b/_ba/tour/operators.md
index 259fdca8fe..f1e8f3da07 100644
--- a/_ba/tour/operators.md
+++ b/_ba/tour/operators.md
@@ -25,7 +25,7 @@ Međutim, lakše je čitati kada se napiše kao infiksni operator:
## Definisanje i korištenje operatora
Možete koristiti bilo koji legalni identifikator kao operator.
To uključuje i imena kao `add` ili simbole kao `+`.
-```tut
+```scala mdoc
case class Vec(x: Double, y: Double) {
def +(that: Vec) = Vec(this.x + that.x, this.y + that.y)
}
@@ -42,7 +42,7 @@ Koristeći zagrade, možete pisati kompleksne izraze s čitljivom sintaksom.
Slijedi definicija klase `MyBool` koja definiše tri metode `and`, `or`, i `negate`.
-```tut
+```scala mdoc
case class MyBool(x: Boolean) {
def and(that: MyBool): MyBool = if (x) that else this
def or(that: MyBool): MyBool = if (x) this else that
@@ -52,7 +52,7 @@ case class MyBool(x: Boolean) {
Sada je moguće koristiti `and` i `or` kao infiksne operatore:
-```tut
+```scala mdoc
def not(x: MyBool) = x.negate
def xor(x: MyBool, y: MyBool) = (x or y) and not(x and y)
```
@@ -71,7 +71,7 @@ Kada izraz koristi više operatora, operatori se primjenjuju bazirano na priorit
&
^
|
-(sva slova)
+(sva slova, $, _)
```
Ovo se odnosi na metode koje definišete. Npr, sljedeći izraz:
```
diff --git a/_ba/tour/pattern-matching.md b/_ba/tour/pattern-matching.md
index 6c902033a2..e303e05d63 100644
--- a/_ba/tour/pattern-matching.md
+++ b/_ba/tour/pattern-matching.md
@@ -16,7 +16,7 @@ Podudaranje uzoraka je mehanizam za provjeranje da li vrijednost odgovara uzroku
## Sintaksa
Izraz za podudaranje ima vrijednost, `match` ključnu riječ, i bar jednu `case` klauzu.
-```tut
+```scala mdoc
import scala.util.Random
val x: Int = Random.nextInt(10)
@@ -34,7 +34,7 @@ Zadnji slučaj, `_`, je "uhvati sve" slučaj za brojeve veće od 2.
Slučajevi se još zovu i _alternative_.
Izrazi za podudaranje imaju vrijednost.
-```tut
+```scala mdoc
def matchTest(x: Int): String = x match {
case 1 => "one"
case 2 => "two"
@@ -50,7 +50,7 @@ Stoga, metoda `matchTest` vraća `String`.
Case klase su posebno korisne za podudaranje uzoraka.
-```tut
+```scala mdoc
abstract class Notification
case class Email(sender: String, title: String, body: String) extends Notification
@@ -118,7 +118,7 @@ U `case Email(email, _, _) if importantPeopleInfo.contains(email)`, uzorak se po
## Podudaranje samo tipa
Možete podudarati samo tip ovako:
-```tut
+```scala mdoc
abstract class Device
case class Phone(model: String) extends Device {
def screenOff = "Turning screen off"
@@ -140,7 +140,7 @@ Konvencija je da se koristi prvo slovo tipa kao identifikator (`p` i `c` ovdje).
Trejtovi i klase mogu biti `sealed` što znači da svi podtipovi moraju biti reklarisani u istom fajlu.
Ovo osigurava da su svi podtipovi poznati.
-```tut
+```scala mdoc
sealed abstract class Furniture
case class Couch() extends Furniture
case class Chair() extends Furniture
diff --git a/_ba/tour/polymorphic-methods.md b/_ba/tour/polymorphic-methods.md
index 2fcb672428..a5ad6e27f4 100644
--- a/_ba/tour/polymorphic-methods.md
+++ b/_ba/tour/polymorphic-methods.md
@@ -18,7 +18,7 @@ Vrijednosni parameteri ("obični") su ograđeni parom zagrada, dok su tipski par
Slijedi primjer:
-```tut
+```scala mdoc
def listOfDuplicates[A](x: A, length: Int): List[A] = {
if (length < 1)
Nil
diff --git a/_ba/tour/regular-expression-patterns.md b/_ba/tour/regular-expression-patterns.md
index 6936f74013..558b039a24 100644
--- a/_ba/tour/regular-expression-patterns.md
+++ b/_ba/tour/regular-expression-patterns.md
@@ -14,7 +14,7 @@ previous-page: singleton-objects
Regularni izrazi su stringovi koji se mogu koristiti za traženje uzoraka u podacima.
Bilo koji string se može pretvoriti u regularni izraz pozivom `.r` metode.
-```tut
+```scala mdoc
import scala.util.matching.Regex
val numberPattern: Regex = "[0-9]".r
@@ -30,7 +30,7 @@ U gornjem primjeru, `numberPattern` je `Regex`
Također, možete tražiti grupe regularnih izraza koristeći zagrade.
-```tut
+```scala mdoc
import scala.util.matching.Regex
val keyValPattern: Regex = "([0-9a-zA-Z-#() ]+): ([0-9a-zA-Z-#() ]+)".r
diff --git a/_ba/tour/self-types.md b/_ba/tour/self-types.md
index da47a626bc..9b0ccd95a2 100644
--- a/_ba/tour/self-types.md
+++ b/_ba/tour/self-types.md
@@ -17,7 +17,7 @@ Self-tip je način da se suzi tip `this` ili drugi identifikator koji je alijas
Sintaksa izgleda kao obična funkcija ali znači nešto sasvim drugačije.
Da bi koristili self-tip u trejtu, napišite identifikator, tip drugog trejta za umiksavanje, i `=>` (tj. `someIdentifier: SomeOtherTrait =>`).
-```tut
+```scala mdoc
trait User {
def username: String
}
diff --git a/_ba/tour/singleton-objects.md b/_ba/tour/singleton-objects.md
index 7c74f5318e..7f8ac84ba4 100644
--- a/_ba/tour/singleton-objects.md
+++ b/_ba/tour/singleton-objects.md
@@ -44,7 +44,7 @@ ako krug s velikim “C” ili “O” ima savijenu ivicu (kao papir), možete k
Klasa i njen kompanjon objekt, ako ga ima, moraju biti definisani u istom izvornom fajlu:
-```tut
+```scala mdoc
class IntPair(val x: Int, val y: Int)
object IntPair {
diff --git a/_ba/tour/traits.md b/_ba/tour/traits.md
index 6d0356cda0..8f7a82cc9e 100644
--- a/_ba/tour/traits.md
+++ b/_ba/tour/traits.md
@@ -18,12 +18,12 @@ Klase i objekti mogu naslijediti trejtove ali trejtovi ne mogu biti instancirani
## Definisanje trejta
Minimalni trejt je samo ključna riječ `trait` i identifikator:
-```tut
+```scala mdoc
trait HairColor
```
Trejtovi su vrlo korisni s generičkim tipovima i apstraktnim metodama.
-```tut
+```scala mdoc
trait Iterator[A] {
def hasNext: Boolean
def next(): A
@@ -34,7 +34,7 @@ Nasljeđivanje `trait Iterator[A]` traži tip `A` i implementacije metoda `hasNe
## Korištenje trejtova
Koristite `extends` za nasljeđivanje trejta. Zatim implementirajte njegove apstraktne članove koristeći `override` ključnu riječ:
-```tut
+```scala mdoc:nest
trait Iterator[A] {
def hasNext: Boolean
def next(): A
@@ -62,7 +62,7 @@ Ona nasljeđuje `Iterator[Int]` što znači da `next` mora vraćati `Int`.
## Podtipovi
Podtipovi trejtova mogu se koristiti gdje se trejt traži.
-```tut
+```scala mdoc
import scala.collection.mutable.ArrayBuffer
trait Pet {
diff --git a/_ba/tour/tuples.md b/_ba/tour/tuples.md
index 34bceaa3d1..99f49e7247 100644
--- a/_ba/tour/tuples.md
+++ b/_ba/tour/tuples.md
@@ -10,4 +10,4 @@ previous-page: traits
---
(this section of the tour has not been translated yet. pull request
-with translation welcome!)
\ No newline at end of file
+with translation welcome!)
diff --git a/_ba/tour/type-inference.md b/_ba/tour/type-inference.md
index 3ff6ab6ce6..d3b7eb1867 100644
--- a/_ba/tour/type-inference.md
+++ b/_ba/tour/type-inference.md
@@ -16,7 +16,7 @@ Povratni tipovi metoda također mogu biti izostavljeni jer oni odgovaraju tipu t
Slijedi jedan primjer:
-```tut
+```scala mdoc
object InferenceTest1 extends App {
val x = 1 + 2 * 3 // the type of x is Int
val y = x.toString() // the type of y is String
@@ -27,7 +27,7 @@ object InferenceTest1 extends App {
Za rekurzivne metode, kompajler nije u mogućnosti da zaključi tip rezultata.
Ovo je program koji se ne može kompajlirati iz ovog razloga:
-```tut:fail
+```scala mdoc:fail
object InferenceTest2 {
def fac(n: Int) = if (n == 0) 1 else n * fac(n - 1)
}
@@ -58,7 +58,7 @@ val y: Int = id[Int](1)
U nekim situacijama može biti vrlo opasno osloniti se na Scalin mehanizam zaključivanja tipova:
-```tut:fail
+```scala mdoc:fail
object InferenceTest4 {
var obj = null
obj = new Object()
diff --git a/_ba/tour/unified-types.md b/_ba/tour/unified-types.md
index 7f9787e5b9..92c1e2a61e 100644
--- a/_ba/tour/unified-types.md
+++ b/_ba/tour/unified-types.md
@@ -18,14 +18,14 @@ Dijagram ispod prikazuje hijerarhiju Scala klasa.
## Hijerarhija tipova u Scali ##
-[`Any`](https://www.scala-lang.org/api/2.12.1/scala/Any.html) je nadtip svih tipova, zove se još i vrh-tip.
+[`Any`](https://www.scala-lang.org/api/2.12.1/scala/Any.html) je nadtip svih tipova, zove se još i vrh-tip.
Definiše određene univerzalne metode kao što su `equals`, `hashCode` i `toString`.
`Any` ima dvije direktne podklase, `AnyVal` i `AnyRef`.
-`AnyVal` predstavlja vrijednosne tipove. Postoji devet predefinisanih vrijednosnih tipova i oni ne mogu biti `null`:
+`AnyVal` predstavlja vrijednosne tipove. Postoji devet predefinisanih vrijednosnih tipova i oni ne mogu biti `null`:
`Double`, `Float`, `Long`, `Int`, `Short`, `Byte`, `Char`, `Unit` i `Boolean`.
-`Unit` je vrijednosni tip koji ne nosi značajnu informaciju. Postoji tačno jedna instanca tipa `Unit` koja se piše `()`.
+`Unit` je vrijednosni tip koji ne nosi značajnu informaciju. Postoji tačno jedna instanca tipa `Unit` koja se piše `()`.
Sve funkcije moraju vratiti nešto tako da je `Unit` ponekad koristan povratni tip.
`AnyRef` predstavlja referencne tipove. Svi nevrijednosni tipovi definišu se kao referencni.
@@ -34,7 +34,7 @@ Ako se Scala koristi u kontekstu JRE, onda `AnyRef` odgovara klasi `java.lang.Ob
Slijedi primjer koji demonstrira da su stringovi, integeri, karakteri, booleani i funkcije svi objekti kao bilo koji drugi:
-```tut
+```scala mdoc
val list: List[Any] = List(
"a string",
732, // an integer
@@ -64,9 +64,9 @@ Vrijednosni tipovi mogu biti kastovani na sljedeći način:
Npr:
-```tut
+```scala mdoc
val x: Long = 987654321
-val y: Float = x // 9.8765434E8 (određena doza preciznosti se gubi ovdje)
+val y: Float = x.toFloat // 9.8765434E8 (određena doza preciznosti se gubi ovdje)
val face: Char = '☺'
val number: Int = face // 9786
@@ -76,17 +76,17 @@ Kastovanje je jednosmjerno. Ovo se ne kompajlira:
```
val x: Long = 987654321
-val y: Float = x // 9.8765434E8
+val y: Float = x.toFloat // 9.8765434E8
val z: Long = y // Does not conform
```
Također možete kastovati i referencni tip u podtip. Ovo će biti pokriveno kasnije.
## Nothing i Null
-`Nothing` je podtip svih tipova, također se zove i donji tip (en. bottom type). Ne postoji vrijednost koja ima tip `Nothing`.
+`Nothing` je podtip svih tipova, također se zove i donji tip (en. bottom type). Ne postoji vrijednost koja ima tip `Nothing`.
Česta upotreba ovog tipa je signalizacija neterminacije kao što je bacanje izuzetka, izlaz iz programa, ili beskonačna petlja (tj. tip izraza koji se ne izračunava u vrijednost, ili metoda koja se ne završava normalno).
-`Null` je podtip svih referencnih tipova (tj. bilo kog podtipa `AnyRef`).
-Ima jednu vrijednost koja se piše literalom `null`.
-`Null` se uglavnom koristi radi interoperabilnosti s ostalim JVM jezicima i skoro nikad se ne koristi u Scala kodu.
+`Null` je podtip svih referencnih tipova (tj. bilo kog podtipa `AnyRef`).
+Ima jednu vrijednost koja se piše literalom `null`.
+`Null` se uglavnom koristi radi interoperabilnosti s ostalim JVM jezicima i skoro nikad se ne koristi u Scala kodu.
Alternative za `null` obradićemo kasnije.
diff --git a/_ba/tour/upper-type-bounds.md b/_ba/tour/upper-type-bounds.md
index ce4f4e198e..e91d904d5d 100644
--- a/_ba/tour/upper-type-bounds.md
+++ b/_ba/tour/upper-type-bounds.md
@@ -15,7 +15,7 @@ Takve granice tipa ograničavaju konkretne vrijednosti tipskih varijabli i ponek
_Gornja granica tipa_ `T <: A` kaže da se tipska varijabla `T` odnosi na podtip tipa `A`.
Slijedi primjer koji demonstrira gornju granicu tipa za tipski parametar klase `PetContainer`:
-```tut
+```scala mdoc
abstract class Animal {
def name: String
}
@@ -42,7 +42,7 @@ val dogContainer = new PetContainer[Dog](new Dog)
val catContainer = new PetContainer[Cat](new Cat)
```
-```tut:fail
+```scala mdoc:fail
val lionContainer = new PetContainer[Lion](new Lion) // this would not compile
```
Klasa `PetContainer` prima tipski parametar `P` koji mora biti podtip od `Pet`.
diff --git a/_ba/tour/variances.md b/_ba/tour/variances.md
index 0540982844..2920e00dac 100644
--- a/_ba/tour/variances.md
+++ b/_ba/tour/variances.md
@@ -14,7 +14,7 @@ Varijansa je korelacija podtipskih veza kompleksnih tipova i podtipskih veza nji
Scala podržava anotacije varijanse tipskih parametara [generičkih klasa](generic-classes.html), dozvoljavajući im da budu kovarijantni, kontravarijantni, ili invarijantni ako se anotacije ne koriste.
Korištenje varijanse u sistemu tipova dozvoljava pravljenje intuitivnijih veza među kompleksnim tipovima, a nedostatak varijanse može ograničiti ponovno iskorištenje klasne apstrakcije.
-```tut
+```scala mdoc
class Foo[+A] // kovarijantna klasa
class Bar[-A] // kontravarijantna klasa
class Baz[A] // invarijantna klasa
@@ -28,7 +28,7 @@ Ovo dozvoljava pravljenje vrlo intuitivnih podtipskih veza koristeći generiku.
Razmotrite sljedeću strukturu klasa:
-```tut
+```scala mdoc
abstract class Animal {
def name: String
}
@@ -44,7 +44,7 @@ Intuitivno, ima smisla da su lista mačaka i lista pasa također liste životinj
U sljedećem primjeru, metoda `printAnimalNames` prima listu životinja kao argument i ispisuje njihova imena, svako na idućoj liniji.
Da `List[A]` nije kovarijantna, zadnja dva poziva metode se ne bi kompajlirali, što bi značajno ograničilo korisnost `printAnimalNames` metode.
-```tut
+```scala mdoc
object CovarianceTest extends App {
def printAnimalNames(animals: List[Animal]): Unit = {
animals.foreach { animal =>
@@ -73,7 +73,7 @@ To jest, za neku `class Writer[-A]`, kontravarijantno `A` znači da za dva tipa
Razmotrimo `Cat`, `Dog`, i `Animal` klase u sljedećem primjeru:
-```tut
+```scala mdoc
abstract class Printer[-A] {
def print(value: A): Unit
}
@@ -81,7 +81,7 @@ abstract class Printer[-A] {
`Printer[A]` je jednostavna klasa koja zna ispisati neki tip `A`. Definišimo neke podklase za specifične tipove:
-```tut
+```scala mdoc
class AnimalPrinter extends Printer[Animal] {
def print(animal: Animal): Unit =
println("The animal's name is: " + animal.name)
@@ -99,7 +99,7 @@ Inverzna veza ne vrijedi, jer `Printer[Cat]` ne zna kako da ispiše bilo koju `A
Stoga, terbali bismo moći zamijeniti `Printer[Animal]` za `Printer[Cat]`, ako želimo, i praveći `Printer[A]` kontravarijantnim nam to dozvoljava.
-```tut
+```scala mdoc
object ContravarianceTest extends App {
val myCat: Cat = Cat("Boots")
@@ -129,7 +129,7 @@ Ovo znač da nisu ni kovarijantne ni kontravarijantne.
U kontekstu sljedećeg primjera, `Container` klasa je invarijantna.
`Container[Cat]` _nije_ `Container[Animal]`, niti obrnuto.
-```tut
+```scala mdoc
class Container[A](value: A) {
private var _value: A = value
def getValue: A = _value
@@ -162,7 +162,7 @@ Za ovaj primjer koristićemo literal notaciju `A => B` za predstavljanje `Functi
Pretpostavimo da imamo sličnu hijerarhiju klasa `Cat`, `Dog`, `Animal` otprije, plus sljedeće:
-```tut
+```scala mdoc
class SmallAnimal
class Mouse extends SmallAnimal
```
diff --git a/_books/1-programming-in-scala-4th.md b/_books/1-programming-in-scala-5th.md
similarity index 52%
rename from _books/1-programming-in-scala-4th.md
rename to _books/1-programming-in-scala-5th.md
index cb43ef35f1..826e5361df 100644
--- a/_books/1-programming-in-scala-4th.md
+++ b/_books/1-programming-in-scala-5th.md
@@ -1,12 +1,11 @@
---
-title: "Programming in Scala, 4th ed"
-link: https://booksites.artima.com/programming_in_scala_4ed
-image: /resources/img/books/ProgrammingInScala.gif
-status: Updated for Scala 2.13
+title: "Programming in Scala, 5th ed"
+link: https://www.artima.com/shop/programming_in_scala_5ed
+image: /resources/img/books/ProgrammingInScala.png
+status: Updated for Scala 3
authors: ["Martin Odersky", "Lex Spoon", "Bill Venners"]
-publisher:
+publisher: Artima
+publisherLink: https://www.artima.com/books
---
-(First edition [available for free online reading](https://www.artima.com/pins1ed/))
-
-This book is co-authored by the language's designer, Martin Odersky. It provides depth and clarity on the diverse features of the language. The book provides both an authoritative reference for Scala and a systematic tutorial covering all the features in the language. Once you are familiar with the basics of Scala you will appreciate having this source of invaluable examples and precise explanations of Scala on hand. The book is available from [Artima](https://booksites.artima.com/programming_in_scala_4ed). Award winning book - [Jolt Productivity award](https://www.drdobbs.com/joltawards/232601431) for Technical Books.
+This book is co-authored by the language's designer, Martin Odersky. It provides depth and clarity on the diverse features of the language. The book provides both an authoritative reference for Scala and a systematic tutorial covering all the features in the language. Once you are familiar with the basics of Scala you will appreciate having this source of invaluable examples and precise explanations of Scala on hand. The book is available from [Artima](https://www.artima.com/shop/programming_in_scala_5ed). Award winning book - [Jolt Productivity award](https://www.drdobbs.com/joltawards/232601431) for Technical Books.
diff --git a/_books/2-programming-scala.md b/_books/2-programming-scala.md
index 27a1e9ce61..8fc729169f 100644
--- a/_books/2-programming-scala.md
+++ b/_books/2-programming-scala.md
@@ -1,11 +1,11 @@
---
title: "Programming Scala"
-link: https://shop.oreilly.com/product/0636920033073.do
+link: http://programming-scala.com
image: /resources/img/books/ProgrammingScala-final-border.gif
-status: Updated for Scala 2.12
-authors: ["Alex Payne", "Dean Wampler"]
+status: Updated for Scala 3
+authors: ["Dean Wampler"]
publisher: O’Reilly
publisherLink: https://www.oreilly.com/
---
-Both are industry experts, Alex Payne being the lead API programmer at Twitter, a social networking service based on Scala. O’Reilly, the publisher, writes: "Learn how to be more productive with Scala, a new multi-paradigm language for the Java Virtual Machine (JVM) that integrates features of both object-oriented and functional programming. With this book, you'll discover why Scala is ideal for highly scalable, component-based applications that support concurrency and distribution. You'll also learn how to leverage the wealth of Java class libraries to meet the practical needs of enterprise and Internet projects more easily."
+Dean is a well-known member of the Scala community, using Scala recently for streaming data systems at Lightbend and now at Domino Data Lab. This edition covers the new features of Scala 3, with comparisons to Scala 2, both to explain why the changes were made and how they improve Scala, and also to enable developers using mixed Scala 2 and 3 code bases to work effectively. The book is aimed at professional programmers who want a comprehensive, in-depth, yet pragmatic tour of Scala and best practices for using it.
diff --git a/_books/3-scala-for-the-impatient.md b/_books/3-scala-for-the-impatient.md
index e01bcea6c0..72c7c01f6d 100644
--- a/_books/3-scala-for-the-impatient.md
+++ b/_books/3-scala-for-the-impatient.md
@@ -1,15 +1,17 @@
---
title: "Scala for the Impatient"
-link: https://www.horstmann.com/scala/index.html
-image: /resources/img/books/scala_for_the_impatient.png
-status: Available Now
-authors: ["Cay S. Horstmann"]
-publisher: Addison-Wesley
+link: https://horstmann.com/scala/
+image: /resources/img/books/scala_for_the_impatient.jpg
+status: Updated for Scala 3
+authors: ["Cay Horstmann"]
+publisher: Addison-Wesley Professional
+publisherLink: https://www.oreilly.com/publisher/addison-wesley-professional/
---
What you get:
-* A rapid introduction to Scala for programmers who are competent in Java, C#, or C++
+* Up to date coverage of Scala 3
+* A rapid introduction to Scala for programmers who are competent in another language such as Java, C#, Python, JavaScript, or C++
* Blog-length chunks of information that you can digest quickly
* An organization that you'll find useful as a quick reference
diff --git a/_books/4-hands-on-scala.md b/_books/4-hands-on-scala.md
new file mode 100644
index 0000000000..ba60fbf9b6
--- /dev/null
+++ b/_books/4-hands-on-scala.md
@@ -0,0 +1,11 @@
+---
+title: "Hands-on Scala Programming"
+link: https://www.handsonscala.com/
+image: /resources/img/books/HandsOnScala.jpg
+status: Covers Scala 2.13
+authors: ["Li Haoyi"]
+publisher: Li Haoyi
+publisherLink: http://www.lihaoyi.com
+---
+
+"Hands-on Scala teaches you how to use the Scala programming language in a practical, project-based fashion. This book is designed to quickly teach an existing programmer everything needed to go from "hello world" to building production applications like interactive websites, parallel web crawlers, and distributed systems in Scala. In the process you will learn how to use the Scala language to solve challenging problems in an elegant and intuitive manner."
diff --git a/_books/5-get-programming.md b/_books/5-get-programming.md
new file mode 100644
index 0000000000..5d6803860d
--- /dev/null
+++ b/_books/5-get-programming.md
@@ -0,0 +1,11 @@
+---
+title: "Get Programming with Scala"
+link: https://www.manning.com/books/get-programming-with-scala
+image: /resources/img/books/get-programming-book.png
+status: Covers Scala 2 and 3
+authors: ["Daniela Sfregola"]
+publisher: Manning
+publisherLink: https://www.manning.com/
+---
+
+"The perfect starting point for your journey into Scala and functional programming. Scala is a multi-style programming language for the JVM that supports both object-oriented and functional programming. Master Scala, and you'll be well-equipped to match your programming approach to the type of problem you're dealing with. Packed with examples and exercises, _Get Programming with Scala_ is the perfect starting point for developers with some OO knowledge who want to learn Scala and pick up a few FP skills along the way."
diff --git a/_books/5-scala-puzzlers.md b/_books/5-scala-puzzlers.md
deleted file mode 100644
index a60753e483..0000000000
--- a/_books/5-scala-puzzlers.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-title: "Scala Puzzlers"
-link: https://www.artima.com/shop/scala_puzzlers
-image: /resources/img/books/scala-puzzlers-book.jpg
-status: Available now
-authors: ["Andrew Phillips", "Nermin Šerifović"]
-publisher: Artima Press
-publisherLink: https://www.artima.com/index.jsp
----
-
-"Getting code to do what we want it to do is perhaps the essence of our purpose as developers. So there are few things more intriguing or important than code that we think we understand, but that behaves rather contrary to our expectations. Scala Puzzlers is a collection of such examples in Scala. It is not only an entertaining and instructive way of understanding this highly expressive language better. It will also help you recognize many counter-intuitive traps and pitfalls and prevent them from inflicting further production bug hunt stress on Scala developers."
diff --git a/_books/6-creative-scala.md b/_books/6-creative-scala.md
new file mode 100644
index 0000000000..bd2007679a
--- /dev/null
+++ b/_books/6-creative-scala.md
@@ -0,0 +1,11 @@
+---
+title: "Creative Scala"
+link: https://www.creativescala.org
+image: /resources/img/books/CreativeScala.png
+status: Free online book
+authors: ["Dave Gurnell", "Noel Welsh"]
+publisher: Underscore
+publisherLink: https://underscore.io
+---
+
+"The book for new developers who want to learn Scala and have fun. Creative Scala is aimed at developers who have no prior experience in Scala. It is designed to give you a fun introduction to functional programming. We assume you have some very basic familiarity with another programming language but little or no experience with Scala or other functional languages. We've chosen what we hope is a fun method to explore functional programming and Scala: computer graphics."
diff --git a/_books/4-functional-programming-in-scala.md b/_books/7-functional-programming-in-scala.md
similarity index 72%
rename from _books/4-functional-programming-in-scala.md
rename to _books/7-functional-programming-in-scala.md
index e90d36e92d..0b878c6b15 100644
--- a/_books/4-functional-programming-in-scala.md
+++ b/_books/7-functional-programming-in-scala.md
@@ -1,11 +1,13 @@
---
title: "Functional Programming in Scala"
-link: https://www.manning.com/books/functional-programming-in-scala
-image: /resources/img/books/FPiS_93x116.png
-status: Available now
-authors: ["Paul Chiusano", "Rúnar Bjarnason"]
+link: https://www.manning.com/books/functional-programming-in-scala-second-edition
+image: /resources/img/books/FPiS_93x116.jpg
+status: Updated for Scala 3
+authors: ["Michael Pilquist", "Paul Chiusano", "Rúnar Bjarnason"]
publisher: Manning
publisherLink: https://www.manning.com/
---
-"Functional programming (FP) is a style of software development emphasizing functions that don't depend on program state... Functional Programming in Scala is a serious tutorial for programmers looking to learn FP and apply it to the everyday business of coding. The book guides readers from basic techniques to advanced topics in a logical, concise, and clear progression. In it, you'll find concrete examples and exercises that open up the world of functional programming."
\ No newline at end of file
+"Functional programming (FP) is a style of software development emphasizing functions that don't depend on program state... Functional Programming in Scala is a serious tutorial for programmers looking to learn FP and apply it to the everyday business of coding. The book guides readers from basic techniques to advanced topics in a logical, concise, and clear progression. In it, you'll find concrete examples and exercises that open up the world of functional programming."
+
+Forewords by Daniel Spiewak and Martin Odersky.
diff --git a/_cheatsheets/index.md b/_cheatsheets/index.md
index 2b4cc489ec..679e4ed242 100644
--- a/_cheatsheets/index.md
+++ b/_cheatsheets/index.md
@@ -7,7 +7,7 @@ partof: cheatsheet
by: Brendan O'Connor
about: Thanks to Brendan O'Connor, this cheatsheet aims to be a quick reference of Scala syntactic constructions. Licensed by Brendan O'Connor under a CC-BY-SA 3.0 license.
-languages: [ba, fr, ja, pl, pt-br, zh-cn, th, ru]
+languages: [ba, fr, ja, pl, pt-br, zh-cn, th, ru, uk]
---
@@ -220,7 +220,6 @@ languages: [ba, fr, ja, pl, pt-br, zh-cn, th, ru]
import scala.util.control.Breaks._
-
breakable {
for (x <- xs) {
if (Math.random < 0.1)
@@ -332,7 +331,7 @@ breakable {
var y = x
val readonly = 5
private var secret = 1
- def this = this(42)
+ def this() = this(42)
}
Constructor is class body. Declare a public member. Declare a gettable but not settable member. Declare a private member. Alternative constructor.
diff --git a/_config.yml b/_config.yml
index 3757290d82..e1ed8d682e 100644
--- a/_config.yml
+++ b/_config.yml
@@ -15,8 +15,9 @@ keywords:
- Document
- Guide
-scala-version: 2.13.2
-scala-212-version: 2.12.11
+scala-version: 2.13.16
+scala-212-version: 2.12.20
+scala-3-version: 3.7.1
collections:
style:
@@ -40,9 +41,6 @@ collections:
permalink: /:collection/:path.html
books:
output: false
- getting-started:
- output: true
- permalink: /:collection/:path.html
ja: # Japanese translations
output: true
permalink: /:collection/:path.html
@@ -82,6 +80,9 @@ collections:
th: # Thai translations
output: true
permalink: /:collection/:path.html
+ uk: # Ukrainian translations
+ output: true
+ permalink: /:collection/:path.html
defaults:
-
@@ -90,11 +91,126 @@ defaults:
type: "tour"
values:
overview-name: "Tour of Scala"
+ -
+ scope:
+ path: "_overviews/getting-started"
+ values:
+ permalink: "/:path.html"
+ -
+ scope:
+ path: "_overviews/macros"
+ values:
+ scala2: true
+ versionSpecific: true
+ -
+ scope:
+ path: "_overviews/reflection"
+ values:
+ scala2: true
+ versionSpecific: true
+ -
+ scope:
+ path: "_overviews/quasiquotes"
+ values:
+ scala2: true
+ versionSpecific: true
+ -
+ scope:
+ path: "_overviews/repl"
+ values:
+ scala2: true
+ versionSpecific: true
+ -
+ scope:
+ path: "_overviews/plugins"
+ values:
+ scala2: true
+ versionSpecific: true
+ -
+ scope:
+ path: "_overviews/compiler-options"
+ values:
+ scala2: true
+ versionSpecific: true
+ -
+ scope:
+ path: "_overviews/scala3-book"
+ values:
+ scala3: true
+ # num: 99 # to list them in the TOC, should be overwritten individually
+ partof: scala3-book
+ type: section
+ overview-name: "Scala 3 — Book"
+ layout: multipage-overview
+ permalink: "/scala3/book/:title.html"
+ -
+ scope:
+ path: "_overviews/contribute"
+ values:
+ partof: scala-contribution
+ overview-name: Contributing to Scala's OSS Ecosystem
+ layout: multipage-overview
+ permalink: "/contribute/:title.html"
+ -
+ scope:
+ path: "_overviews/scala3-migration"
+ values:
+ scala3: true
+ # num: 99 # to list them in the TOC, should be overwritten individually
+ partof: scala3-migration
+ type: section
+ overview-name: "Scala 3 Migration Guide"
+ layout: multipage-overview
+ permalink: "/scala3/guides/migration/:title.html"
+ -
+ scope:
+ path: "_overviews/scala3-contribution"
+ values:
+ scala3: true
+ partof: scala3-contribution
+ type: section
+ overview-name: "Guide to Scala 3 Compiler Contribution"
+ layout: multipage-overview
+ permalink: "/scala3/guides/contribution/:title.html"
+ -
+ scope:
+ path: "_overviews/scala3-macros"
+ values:
+ scala3: true
+ versionSpecific: true
+ partof: scala3-macros
+ overview-name: "Macros in Scala 3"
+ layout: multipage-overview
+ permalink: "/scala3/guides/macros/:title.html"
+ -
+ scope:
+ path: "_overviews/scala3-scaladoc"
+ values:
+ scala3: true
+ versionSpecific: true
+ partof: scala3-scaladoc
+ overview-name: "Scaladoc"
+ layout: multipage-overview
+ permalink: "/scala3/guides/scaladoc/:title.html"
+ -
+ scope:
+ path: "_overviews/toolkit"
+ values:
+ partof: toolkit
+ overview-name: "The Scala Toolkit"
+ layout: multipage-overview
+ permalink: "/toolkit/:title.html"
+ -
+ scope:
+ path: "scala3"
+ values:
+ scala3: true
+
highlighter: rouge
permalink: /:categories/:title.html:output_ext
baseurl:
-exclude: ["vendor"]
+scala3ref: "https://docs.scala-lang.org/scala3/reference"
+exclude: ["vendor", ".metals"]
plugins:
- jekyll-redirect-from
- - jekyll-scalafiddle
diff --git a/_data/compiler-options.yml b/_data/compiler-options.yml
index fb67497392..f4cced5028 100644
--- a/_data/compiler-options.yml
+++ b/_data/compiler-options.yml
@@ -260,7 +260,7 @@
type: "String"
arg: "release"
default:
- description: "Compile for a specific version of the Java platform. Supported targets: 6, 7, 8, 9"
+ description: "Compile for a specific version of the Java platform. Supported targets: 8, 11, or any higher version listed at https://docs.scala-lang.org/overviews/jdk-compatibility/overview.html"
abbreviations:
- "--release"
- option: "-sourcepath"
@@ -379,8 +379,6 @@
description: "Warn when nullary methods return Unit."
- choice: "inaccessible"
description: "Warn about inaccessible types in method signatures."
- - choice: "nullary-override"
- description: "Warn when non-nullary `def f()` overrides nullary `def f`."
- choice: "infer-any"
description: "Warn when a type argument is inferred to be `Any`."
- choice: "missing-interpolator"
@@ -469,10 +467,6 @@
schema:
type: "Boolean"
description: "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation."
- - option: "-Xno-uescape"
- schema:
- type: "Boolean"
- description: "Disable handling of \\u unicode escapes."
- option: "-Xnojline"
schema:
type: "Boolean"
@@ -611,9 +605,22 @@
- option: "-Vimplicits"
schema:
type: "Boolean"
- description: "Show more detail on why some implicits are not applicable."
+ description: "Print dependent missing implicits."
abbreviations:
- "-Xlog-implicits"
+ - option: "-Vimplicits-verbose-tree"
+ schema:
+ type: "Boolean"
+ description: "Display all intermediate implicits in a chain."
+ - option: "-Vimplicits-max-refined"
+ schema:
+ type: "Int"
+ default: "0"
+ description: "max chars for printing refined types, abbreviate to `F {...}`"
+ - option: "-Vtype-diffs"
+ schema:
+ type: "Boolean"
+ description: "Print found/required error messages as colored diffs."
- option: "-Vinline"
schema:
type: "String"
@@ -1126,8 +1133,6 @@
description: "Warn when nullary methods return Unit."
- choice: "inaccessible"
description: "Warn about inaccessible types in method signatures."
- - choice: "nullary-override"
- description: "Warn when non-nullary `def f()` overrides nullary `def f`."
- choice: "infer-any"
description: "Warn when a type argument is inferred to be `Any`."
- choice: "missing-interpolator"
diff --git a/_data/doc-nav-header.yml b/_data/doc-nav-header.yml
index 5f9e5b9b0c..772da79703 100644
--- a/_data/doc-nav-header.yml
+++ b/_data/doc-nav-header.yml
@@ -1,25 +1,47 @@
-- title: API
+- title: Getting Started
url: "#"
submenu:
- - title: Current
- url: https://www.scala-lang.org/api/current/
- - title: Nightly
- url: https://www.scala-lang.org/files/archive/nightly/2.13.x/api/2.13.x/
- - title: All Versions
- url: "/api/all.html"
+ - title: Install Scala
+ url: "/getting-started/install-scala.html"
+ - title: Scala IDEs
+ url: "/getting-started/scala-ides.html"
- title: Learn
url: "#"
submenu:
- - title: Getting Started
- url: "/getting-started/index.html"
- title: Tour of Scala
url: "/tour/tour-of-scala.html"
- - title: Scala Book
+ - title: Scala 3 Book
+ url: "/scala3/book/introduction.html"
+ - title: Scala 2 Book
url: "/overviews/scala-book/introduction.html"
+ - title: Online Courses
+ url: "/online-courses.html"
+- title: Scala 3 Migration
+ url: "#"
+ submenu:
+ - title: What's New?
+ url: "/scala3/new-in-scala3.html"
+ - title: Migrating From Scala 2
+ url: "/scala3/guides/migration/compatibility-intro.html"
+ - title: New Features for Scaladoc
+ url: "/scala3/scaladoc.html"
+ - title: Videos and Talks
+ url: "/scala3/talks.html"
+- title: Tutorials
+ url: "#"
+ submenu:
+ - title: Getting Started with Scala in IntelliJ
+ url: "/getting-started/intellij-track/getting-started-with-scala-in-intellij.html"
+ - title: Getting Started with Scala and sbt
+ url: "/getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.html"
- title: Scala for Java Programmers
url: "/tutorials/scala-for-java-programmers.html"
- - title: Online Resources
- url: "/learn.html"
+ - title: Scala on Android
+ url: "/tutorials/scala-on-android.html"
+ - title: Scala with Maven
+ url: "/tutorials/scala-with-maven.html"
+ - title: Using the Scala Toolkit
+ url: "/toolkit/introduction.html"
- title: Reference
url: "#"
submenu:
@@ -27,15 +49,23 @@
url: "/overviews/index.html"
- title: Books
url: "/books.html"
- - title: Scala FAQs
+ - title: Scala FAQ
url: "/tutorials/FAQ/index.html"
- - title: Language Spec
+ - title: Scala 2 Language Specification
url: http://scala-lang.org/files/archive/spec/2.13/
-- title: Style Guide
- url: "/style/index.html"
-- title: Cheatsheet
- url: "/cheatsheets/index.html"
-- title: Glossary
- url: "/glossary/index.html"
+ - title: Scala 3 Language Specification
+ url: http://scala-lang.org/files/archive/spec/3.4/
+ - title: Scala 3 Language Reference
+ url: "https://docs.scala-lang.org/scala3/reference"
+ - title: Scala Contribution Guide
+ url: "/contribute/"
+ - title: Style Guide
+ url: "/style/index.html"
+ - title: Cheatsheet
+ url: "/cheatsheets/index.html"
+ - title: Glossary
+ url: "/glossary/index.html"
+- title: API
+ url: "/api/all.html"
- title: SIPs
url: "/sips/index.html"
diff --git a/_data/footer.yml b/_data/footer.yml
index b810181f92..789017f8b9 100644
--- a/_data/footer.yml
+++ b/_data/footer.yml
@@ -21,9 +21,11 @@
links:
- title: Community
url: "http://scala-lang.org/community/"
- - title: Mailing Lists
- url: "http://scala-lang.org/community/index.html#mailing-lists"
- - title: Chat Rooms & More
+ - title: Scala Ambassadors
+ url: "http://scala-lang.org/ambassadors/"
+ - title: Forums
+ url: "http://scala-lang.org/community/index.html#forums"
+ - title: Chat
url: "http://scala-lang.org/community/index.html#chat-rooms"
- title: Libraries and Tools
url: "http://scala-lang.org/community/index.html#community-libraries-and-tools"
@@ -39,16 +41,28 @@
- title: Scala
class: scala
links:
+ - title: Governance
+ url: "http://scala-lang.org/governance/"
- title: Blog
url: "http://scala-lang.org/blog/"
- title: Code of Conduct
url: "http://scala-lang.org/conduct/"
- title: License
url: "http://scala-lang.org/license/"
+ - title: Security Policy
+ url: "http://scala-lang.org/security/"
- title: Social
class: social
links:
- title: GitHub
url: "https://github.com/scala/scala"
- - title: Twitter
- url: "https://twitter.com/scala_lang"
+ - title: Mastodon
+ url: "https://fosstodon.org/@scala_lang"
+ - title: Bluesky
+ url: "https://bsky.app/profile/scala-lang.org"
+ - title: X
+ url: "https://x.com/scala_lang"
+ - title: Discord
+ url: "https://discord.com/invite/scala"
+ - title: LinkedIn
+ url: "https://www.linkedin.com/company/scala-center/"
\ No newline at end of file
diff --git a/_data/messages.yml b/_data/messages.yml
new file mode 100644
index 0000000000..642a7ac557
--- /dev/null
+++ b/_data/messages.yml
@@ -0,0 +1 @@
+scam-banner: "**⚠️ Beware of Scams**: since Feb 2024, scammers are using [fake Scala websites to sell courses](https://www.scala-lang.org/blog/2024/03/01/fake-scala-courses.html), please check you are using an official source."
diff --git a/_data/nav-header.yml b/_data/nav-header.yml
index 26bd280020..792c68fc1e 100644
--- a/_data/nav-header.yml
+++ b/_data/nav-header.yml
@@ -1,12 +1,14 @@
-- title: Documentation
+- title: Learn
url: "/"
-- title: Download
+- title: Install
url: https://www.scala-lang.org/download/
+- title: Playground
+ url: https://scastie.scala-lang.org
+- title: Find A Library
+ url: https://index.scala-lang.org
- title: Community
url: https://www.scala-lang.org/community/
-- title: Libraries
- url: https://index.scala-lang.org
-- title: Contribute
- url: https://www.scala-lang.org/contribute/
+- title: Governance
+ url: https://www.scala-lang.org/governance/
- title: Blog
url: https://www.scala-lang.org/blog/
diff --git a/_data/overviews-ja.yml b/_data/overviews-ja.yml
index f6cbeb34cd..60277bd90c 100644
--- a/_data/overviews-ja.yml
+++ b/_data/overviews-ja.yml
@@ -58,8 +58,32 @@
- category: 言語
description: "Scala 言語の機能をカバーするガイドと概要"
overviews:
+ - title: "Scala 2 から Scala 3 への移行"
+ icon: suitcase
+ root: "scala3/guides/"
+ url: "migration/compatibility-intro.html"
+ description: "Scala 3 との互換性と移行について知っておくべきことすべて"
+ - title: "Scala 3 マクロ"
+ by: Nicolas Stucki
+ icon: magic
+ root: "scala3/guides/"
+ url: "macros"
+ description: "Scala 3 のマクロの書き方に関係する全ての機能をカバーする詳しいチュートリアル"
+ label-text: new in Scala 3
+ - title: 値クラスと汎用トレイト
+ by: Mark Harrah
+ description: "値クラスは Scala で実行時のオブジェクトアロケーションを避ける新しい仕組みだ。これは新しい AnyVal サブクラスを定義することで達成できる。"
+ icon: gem
+ url: "core/value-classes.html"
+ - title: TASTyの概要
+ by: Alvin Alexander
+ icon: birthday-cake
+ root: "scala3/guides/"
+ url: "tasty-overview.html"
+ description: "Scala のエンドユーザー向けの TASTy のフォーマットの概要"
+ label-text: new in Scala 3
- title: 文字列補間
- icon: usd
+ icon: dollar-sign
url: "core/string-interpolation.html"
description: >
文字列補間は、ユーザーが加工文字列リテラル(processed string literal)に変数参照を直接埋め込めるようにしてくれる。以下例。
@@ -70,11 +94,6 @@
by: Josh Suereth
description: "Scala 2.10 は暗黙クラス(implicit class)と呼ばれる新しい機能を導入した。暗黙クラスは implicit キーワードでマークされたクラスだ。このキーワードはそのクラスがスコープ内にあるとき、そのプライマリコンストラクターが暗黙変換に利用可能にする。"
url: "core/implicit-classes.html"
- - title: 値クラスと汎用トレイト
- by: Mark Harrah
- description: "値クラスは Scala で実行時のオブジェクトアロケーションを避ける新しい仕組みだ。これは新しい AnyVal サブクラスを定義することで達成できる。"
- icon: diamond
- url: "core/value-classes.html"
- category: ライブラリの作成
description: "Scala エコシステム向けのオープンソースライブラリの貢献方法のガイド"
@@ -258,18 +277,6 @@
- category: レガシー
description: "最近の Scala バージョン(2.12以上)には関係なくなった機能をカバーするガイド。"
overviews:
- - title: Scala アクター移行ガイド
- by: Vojin Jovanovic and Philipp Haller
- icon: truck
- url: "core/actors-migration-guide.html"
- description: "Scala アクターから Akka への移行を容易にするため、Actor Migration Kit(AMK)を用意した。AMK は、Scala アクターの拡張から構成され、プロジェクトのクラスパスに scala-actors-migration.jar を含めることで有効になる。加えて、Akka 2.1 はアクター DSL シングルトンのような機能を導入し、Scala アクターを使ったコードを Akka へ容易に変換することを可能にしている。このドキュメントの目的はユーザーに移行プロセスを案内し、AMK の使い方を説明することだ。"
- - title: Scala アクター API
- by: Philipp Haller and Stephen Tu
- icon: users
- url: "core/actors.html"
- description: "このガイドは Scala 2.8/2.9 の scala.actors パッケージの API を説明する。組織は論理的に同類の型のグループに従う。トレイト階層は個々のセクションを構造化することを考慮している。既存の Scaladoc ベースの API ドキュメントを補間するため、これらトレイトが定義する様々なメソッドの実行時の挙動にフォーカスする。"
- label-color: "#899295"
- label-text: deprecated
- title: Scala 2.8 から 2.12 までのコレクション
by: Martin Odersky
icon: sitemap
diff --git a/_data/overviews-ru.yml b/_data/overviews-ru.yml
index 8387b36946..f4a652a032 100644
--- a/_data/overviews-ru.yml
+++ b/_data/overviews-ru.yml
@@ -60,7 +60,7 @@
description: "Руководства и обзоры, охватывающие функционал языка Scala."
overviews:
- title: Строковая интерполяция
- icon: usd
+ icon: dollar-sign
url: "core/string-interpolation.html"
description: >
Строковая интерполяция позволяет пользователям встраивать данные из переменных непосредственно в обрабатываемые строковые литералы. Вот пример:
@@ -74,7 +74,7 @@
- title: Вычислительные Классы и Универсальные Трейты
by: Mark Harrah
description: "Вычислительные-Классы - это новый механизм в Scala, позволяющий избежать создания объектов во время исполнения, которое достигается за счет объявления класса в качестве подкласса AnyVal."
- icon: diamond
+ icon: gem
url: "core/value-classes.html"
- category: Создание своих библиотек
@@ -258,18 +258,6 @@
- category: Наследие
description: "Руководство по функционалу, которые больше не соответствуют последним версиям Scala (2.12+)."
overviews:
- - title: Руководство по миграции Scala Акторов
- by: Vojin Jovanovic и Philipp Haller
- icon: truck
- url: "core/actors-migration-guide.html"
- description: "Для облегчения миграции со Скала Актеров на Акка мы предоставили Миграционный Комплект для Актеров (МКА). МКА состоит из расширения Scala Акторов, которое позволяет включить scala-actors-migration.jar в пространство классов проекта. Кроме того, Akka 2.1 включает в себя такие функции, как ActorDSL singleton, которые позволяют осуществлять простое преобразование кода с использованием Scala Actors в Akka. Цель этого документа - помочь пользователям пройти через процесс миграции и объяснить, как использовать МКА."
- - title: API Scala Акторов
- by: Philipp Haller и Stephen Tu
- icon: users
- url: "core/actors.html"
- description: "В данном руководстве описывается API пакета scala.actors версии 2.8/2.9. Сгруппированы по типам, которые логически принадлежат друг другу. Иерархия трейтов учитывается при структурировании отдельных разделов. Основное внимание уделяется поведению во время исполнения различных методов, которое дополняет существующую документацию по API на основе Scaladoc."
- label-color: "#899295"
- label-text: устарело
- title: Scala коллекции с 2.8 по 2.12
by: Martin Odersky
icon: sitemap
diff --git a/_data/overviews-uk.yml b/_data/overviews-uk.yml
new file mode 100644
index 0000000000..02be500922
--- /dev/null
+++ b/_data/overviews-uk.yml
@@ -0,0 +1,348 @@
+- category: Стандартна бібліотека
+ description: "Посібники та огляди стандартної бібліотеки Scala."
+ overviews:
+ - title: Scala колекції
+ by: Martin Odersky та Julien Richard-Foy
+ icon: sitemap
+ url: "collections-2.13/introduction.html"
+ description: "Бібліотека колекцій Scala."
+ subdocs:
+ - title: Вступ
+ url: "collections-2.13/introduction.html"
+ - title: Змінювані та незмінювані колекції
+ url: "collections-2.13/overview.html"
+ - title: Трейт Iterable
+ url: "collections-2.13/trait-iterable.html"
+ - title: Трейти послідовностей. Seq, IndexedSeq та LinearSeq
+ url: "collections-2.13/seqs.html"
+ - title: Реалізація незмінюваних колекцій
+ url: "collections-2.13/concrete-immutable-collection-classes.html"
+ - title: Реалізація змінюваних колекцій
+ url: "collections-2.13/concrete-mutable-collection-classes.html"
+ - title: Масиви
+ url: "collections-2.13/arrays.html"
+ - title: Рядки
+ url: "collections-2.13/strings.html"
+ - title: Показники продуктивності
+ url: "collections-2.13/performance-characteristics.html"
+ - title: Рівність
+ url: "collections-2.13/equality.html"
+ - title: Відображення
+ url: "collections-2.13/views.html"
+ - title: Ітератори
+ url: "collections-2.13/iterators.html"
+ - title: Створення колекцій з нуля
+ url: "collections-2.13/creating-collections-from-scratch.html"
+ - title: Перетворення між колекціями Java та Scala
+ url: "collections-2.13/conversions-between-java-and-scala-collections.html"
+ - title: Міграція проєкту до колекцій Scala 2.13
+ icon: sitemap
+ url: "core/collections-migration-213.html"
+ description: "Ця сторінка описує основні зміни в колекціях для користувачів, які переходять на Scala 2.13. Також, розглянуто варіанти побудови проєкти з перехресною сумісністю для Scala 2.11/2.12 і 2.13."
+ - title: Архітектура колекцій Scala
+ icon: sitemap
+ url: "core/architecture-of-scala-213-collections.html"
+ by: Julien Richard-Foy
+ description: "Ці сторінки описують архітектуру фреймворку колекцій, представленого в Scala 2.13. У порівнянні з Collections API ви дізнаєтеся більше про внутрішню роботу фреймворка."
+ - title: Реалізація користувацьких колекцій
+ icon: building
+ url: "core/custom-collections.html"
+ by: Martin Odersky, Lex Spoon та Julien Richard-Foy
+ description: "У цьому документі ви дізнаєтеся, як фреймворк колекцій допомагає вам визначати власні колекції за допомогою кількох рядків коду, повторно використовуючи переважну частину функцій колекції з фреймворку."
+ - title: Додавання спеціальних операцій до колекцій
+ icon: building
+ url: "core/custom-collection-operations.html"
+ by: Julien Richard-Foy
+ description: "У цьому посібнику показано, як писати перетворення, що застосовуються до всіх типів колекцій і повертати той самий тип колекції. Також, як писати операції, які параметризуються типом колекції."
+
+- category: Мова
+ description: "Посібники та огляди, що охоплюють функції на мові Scala."
+ overviews:
+ - title: Міграція зі Scala 2 на Scala 3
+ by: Adrien Piquerez
+ icon: suitcase
+ root: "scala3/guides/"
+ url: "migration/compatibility-intro.html"
+ description: "Все, що потрібно знати про сумісність і міграцію на Scala 3."
+ - title: Макроси Scala 3
+ by: Nicolas Stucki
+ icon: magic
+ root: "scala3/guides/"
+ url: "macros"
+ description: "Детальний підручник, який охоплює всі можливості, пов'язані з написанням макросів у Scala 3."
+ label-text: нове в Scala 3
+ - title: Класи значень та універсальні трейти
+ by: Mark Harrah
+ description: "Класи значень – це новий механізм у Scala, що дозволяє уникнути виділення об'єктів під час виконання. Це досягається за допомогою визначення нових підкласів AnyVal."
+ icon: gem
+ url: "core/value-classes.html"
+ - title: Огляд TASTy
+ by: Alvin Alexander
+ icon: birthday-cake
+ label-text: нове в Scala 3
+ root: "scala3/guides/"
+ url: "tasty-overview.html"
+ description: "Огляд формату TASTy, призначеного для користувачів мови Scala."
+ - title: Інтерполяція рядків
+ icon: dollar-sign
+ url: "core/string-interpolation.html"
+ description: >
+ Інтерполяція рядків дозволяє користувачам вбудовувати посилання на змінні безпосередньо в оброблені рядкові літерали. Ось приклад:
+
val name = "James"
+ println(s"Hello, $name") // Hello, James
+ Літерал s"Hello, $name" є рядковим літералом, який буде додатково оброблено. Це означає, що компілятор виконує додаткову роботу над цим літералом. Оброблений рядковий літерал позначається набором символів, що передують ". Інтерполяція рядків була введена в SIP-11.
+ - title: Неявні класи
+ by: Josh Suereth
+ description: "Scala 2.10 представила нову функцію під назвою неявні класи. Неявний клас — це клас, позначений ключовим словом implicit. Це ключове слово робить основний конструктор класу доступним для неявних перетворень, коли клас знаходиться в області видимості."
+ url: "core/implicit-classes.html"
+
+- category: Створення бібліотек
+ description: "Посібники щодо розробки бібліотек з відкритим кодом для екосистеми Scala."
+ overviews:
+ - title: Посібник для авторів бібліотек
+ by: Julien Richard-Foy
+ icon: tasks
+ url: "contributors/index.html"
+ description: "Перелічує всі інструменти, які автори бібліотек мають налаштувати для публікації та документування своїх бібліотек."
+
+- category: Паралельне та конкурентне програмування
+ description: "Повні посібники, що охоплюють деякі бібліотеки Scala для паралельного та конкурентного програмування."
+ overviews:
+ - title: Future та Promise
+ by: Philipp Haller, Aleksandar Prokopec, Heather Miller, Viktor Klang, Roland Kuhn та Vojin Jovanovic
+ icon: tasks
+ url: "core/futures.html"
+ description: "Ф'ючери дають можливість міркувати про паралельне виконання багатьох операцій – ефективним і не блокуючим способом. Ф'ючер — це об’єкт-заповнювач для значення, яке може ще не існувати. Як правило, вартість Ф'ючеру надається одночасно і може згодом використовуватися. Складання одночасних завдань таким чином, як правило, призводить до швидшого, асинхронного, не блокувального паралельного коду."
+ - title: Паралельні колекції
+ by: Aleksandar Prokopec та Heather Miller
+ icon: rocket
+ url: "parallel-collections/overview.html"
+ description: "Бібліотека паралельних колекцій Scala."
+ subdocs:
+ - title: Огляд
+ url: "parallel-collections/overview.html"
+ - title: Реалізація паралельних колекцій
+ url: "parallel-collections/concrete-parallel-collections.html"
+ - title: Перетворення паралельних колекцій
+ url: "parallel-collections/conversions.html"
+ - title: Конкурентні Try
+ url: "parallel-collections/ctries.html"
+ - title: Архітектура бібліотеки паралельних колекцій
+ url: "parallel-collections/architecture.html"
+ - title: Створення користувацьких паралельних колекцій
+ url: "parallel-collections/custom-parallel-collections.html"
+ - title: Конфігурація паралельних колекцій
+ url: "parallel-collections/configuration.html"
+ - title: Вимірювання продуктивності
+ url: "parallel-collections/performance.html"
+
+- category: Сумісність
+ description: "Що з чим працює (чи ні)."
+ overviews:
+ - title: Сумісність версій JDK
+ description: "Які версії Scala працюють на яких версіях JDK"
+ icon: coffee
+ url: "jdk-compatibility/overview.html"
+ - title: Бінарна сумісність релізів Scala
+ description: "Якщо дві версії Scala бінарно сумісні, можна безпечно скомпілювати свій проєкт на одній версії Scala та зв'язати з іншою версією Scala під час виконання. Безпечне зв'язування під час виконання (тільки!) означає, що JVM не генерує (підклас) LinkageError під час виконання вашої програми у змішаному сценарії, припускаючи, що вона не виникає при компіляції та запуску в одній версії Scala. Конкретно це означає, що ви можете мати зовнішні залежності від вашого шляху до класу під час виконання, які використовують іншу версію Scala, ніж та, з якою ви компілюєте, за умови, що вони сумісні з бінарними файлами. Іншими словами, окрема компіляція в різних версіях, сумісних з бінарними файлами, не створює проблем у порівнянні з компіляцією та запуском всього в одній версії Scala."
+ icon: puzzle-piece
+ url: "core/binary-compatibility-of-scala-releases.html"
+ - title: Бінарна сумісність для авторів бібліотек
+ description: "Різноманітний і повний набір бібліотек важливий для будь-якої продуктивної екосистеми програмного забезпечення. Хоча розробляти та розповсюджувати бібліотеки Scala легко, добре авторство бібліотеки виходить за рамки простого написання коду та його публікації. У цьому посібнику ми розглянемо важливу тему бінарної сумісності."
+ icon: puzzle-piece
+ url: "core/binary-compatibility-for-library-authors.html"
+
+- category: Інструменти
+ description: "Довідковий матеріал про основні інструменти Scala, такі як покоління Scala REPL і Scaladoc."
+ overviews:
+ - title: Scala 2 REPL
+ icon: terminal
+ url: "repl/overview.html"
+ description: |
+ Scala REPL це інструмент (scala) для виконання виразів в Scala.
+
+ Команда scala виконає скрипт шляхом обгортання його в шаблон, а потім компіляції та виконання отриманої програми
+ - title: Scaladoc для Scala 3
+ by: Krzysztof Romanowski, Aleksander Boruch-Gruszecki, Andrzej Ratajczak, Kacper Korban, Filip Zybała
+ icon: book
+ root: "scala3/guides/"
+ url: "scaladoc"
+ label-text: оновлено
+ description: "Оновлення в Scala 3 для інструменту генерації документації API."
+ - title: Scaladoc
+ url: "scaladoc/overview.html"
+ icon: book
+ description: "Інструмент Scala для генерації документації для API."
+ subdocs:
+ - title: Огляд
+ url: "scaladoc/overview.html"
+ - title: Scaladoc для авторів бібліотек
+ url: "scaladoc/for-library-authors.html"
+ - title: Використання інтерфейсу Scaladoc
+ url: "scaladoc/interface.html"
+
+- category: Компілятор
+ description: "Посібники та огляди компілятора Scala: плагіни компілятора, інструменти рефлексії та метапрограмування, такі як макроси."
+ overviews:
+ - title: "Посібник з внесення змін у Scala 3"
+ by: Jamie Thompson, Anatolii Kmetiuk
+ icon: cogs
+ root: "scala3/guides/"
+ url: "contribution/contribution-intro.html"
+ description: "Посібник з компілятора Scala 3 та вирішення проблем."
+ - title: Рефлексія в Scala 2
+ by: Heather Miller, Eugene Burmako та Philipp Haller
+ icon: binoculars
+ url: "reflection/overview.html"
+ description: Фреймворк Scala для рефлексії під час виконання/компіляції.
+ label-text: відсутнє в Scala 3
+ subdocs:
+ - title: Огляд
+ url: "reflection/overview.html"
+ - title: Environment, Universe та Mirror
+ url: "reflection/environment-universes-mirrors.html"
+ - title: Symbol, Tree та Type
+ url: "reflection/symbols-trees-types.html"
+ - title: Annotation, Name, Scope та More
+ url: "reflection/annotations-names-scopes.html"
+ - title: TypeTag та Manifest
+ url: "reflection/typetags-manifests.html"
+ - title: Безпека потоків
+ url: "reflection/thread-safety.html"
+ - title: Зміни в Scala 2.11
+ url: "reflection/changelog211.html"
+ - title: Макроси в Scala 2
+ by: Eugene Burmako
+ icon: magic
+ url: "macros/usecases.html"
+ description: "Фреймворк метапрограмування Scala."
+ label-text: відсутнє в Scala 3
+ subdocs:
+ - title: Випадки використання
+ url: "macros/usecases.html"
+ - title: Blackbox проти Whitebox
+ url: "macros/blackbox-whitebox.html"
+ - title: Макроси Def
+ url: "macros/overview.html"
+ - title: Квазіцитати
+ url: "quasiquotes/intro.html"
+ - title: Пакети макросів
+ url: "macros/bundles.html"
+ - title: Неявні макроси
+ url: "macros/implicits.html"
+ - title: Макроси-екстрактори
+ url: "macros/extractors.html"
+ - title: Провайдери типів
+ url: "macros/typeproviders.html"
+ - title: Анотації макросів
+ url: "macros/annotations.html"
+ - title: Макрос Paradise
+ url: "macros/paradise.html"
+ - title: Дорожня карта
+ url: "macros/roadmap.html"
+ - title: Зміни в 2.11
+ url: "macros/changelog211.html"
+ - title: Квазіцитати в Scala 2
+ by: Denys Shabalin
+ icon: quote-left
+ url: "quasiquotes/setup.html"
+ description: "Квазіцитати — це зручний спосіб маніпулювати синтаксичними деревами Scala."
+ label-text: відсутнє в Scala 3
+ subdocs:
+ - title: Залежності та налаштування
+ url: "quasiquotes/setup.html"
+ - title: Вступ
+ url: "quasiquotes/intro.html"
+ - title: Підіймання
+ url: "quasiquotes/lifting.html"
+ - title: Опускання
+ url: "quasiquotes/unlifting.html"
+ - title: Гігієна
+ url: "quasiquotes/hygiene.html"
+ - title: Випадки використання
+ url: "quasiquotes/usecases.html"
+ - title: Резюме синтаксису
+ url: "quasiquotes/syntax-summary.html"
+ - title: Деталі виразів
+ url: "quasiquotes/expression-details.html"
+ - title: Деталі типів
+ url: "quasiquotes/type-details.html"
+ - title: Деталі патернів
+ url: "quasiquotes/pattern-details.html"
+ - title: Деталі визначення та імпорту
+ url: "quasiquotes/definition-details.html"
+ - title: Резюме термінології
+ url: "quasiquotes/terminology.html"
+ - title: Майбутні перспективи
+ url: "quasiquotes/future.html"
+ - title: Плагіни компілятора
+ by: Lex Spoon та Seth Tisue
+ icon: puzzle-piece
+ url: "plugins/index.html"
+ description: "Плагіни компілятора дозволяють налаштовувати та розширювати компілятор Scala. У цьому підручнику описується функція плагіну та пояснюється, як створити простий плагін."
+ - title: Параметри компілятора
+ by: Community
+ icon: cog
+ url: "compiler-options/index.html"
+ description: "Різні параметри того як scalac компілює ваш код."
+ - title: Форматування помилок
+ by: Torsten Schmits
+ icon: cog
+ url: "compiler-options/errors.html"
+ description: "Новий механізм для більш зручних повідомлень про помилки, друку ланцюжків залежних неявних параметрів та кольорових відмінностей знайдених/потрібних типів."
+ - title: Оптимізатор
+ by: Lukas Rytz та Andrew Marki
+ icon: cog
+ url: "compiler-options/optimizer.html"
+ description: "Компілятор може виконувати різні оптимізації."
+
+- category: Спадщина (legacy)
+ description: "Посібники, що охоплюють функції, які більше не стосуються останніх версій Scala (2.12+)."
+ overviews:
+ - title: Колекції Scala з 2.8 до 2.12
+ by: Martin Odersky
+ icon: sitemap
+ url: "collections/introduction.html"
+ description: "Бібліотека колекцій Scala."
+ subdocs:
+ - title: Вступ
+ url: "collections/introduction.html"
+ - title: Змінювані та незмінювані колекції
+ url: "collections/overview.html"
+ - title: Трейт Traversable
+ url: "collections/trait-traversable.html"
+ - title: Трейт Iterable
+ url: "collections/trait-iterable.html"
+ - title: Трейти послідовностей. Seq, IndexedSeq та LinearSeq
+ url: "collections/seqs.html"
+ - title: Множини
+ url: "collections/sets.html"
+ - title: Асоціативні масиви
+ url: "collections/maps.html"
+ - title: Реалізація незмінюваних колекцій
+ url: "collections/concrete-immutable-collection-classes.html"
+ - title: Реалізація змінюваних колекцій
+ url: "collections/concrete-mutable-collection-classes.html"
+ - title: Масиви
+ url: "collections/arrays.html"
+ - title: Рядки
+ url: "collections/strings.html"
+ - title: Показники продуктивності
+ url: "collections/performance-characteristics.html"
+ - title: Рівність
+ url: "collections/equality.html"
+ - title: Відображення
+ url: "collections/views.html"
+ - title: Ітератори
+ url: "collections/iterators.html"
+ - title: Створення колекцій з нуля
+ url: "collections/creating-collections-from-scratch.html"
+ - title: Перетворення між колекціями Java та Scala
+ url: "collections/conversions-between-java-and-scala-collections.html"
+ - title: Міграція з версії Scala 2.7
+ url: "collections/migrating-from-scala-27.html"
+ - title: Архітектура колекцій Scala з 2.8 до 2.12
+ icon: building
+ url: "core/architecture-of-scala-collections.html"
+ by: Martin Odersky та Lex Spoon
+ description: "На цих сторінках детально описується архітектура фреймворку колекцій Scala. У порівнянні з Collections API ви дізнаєтеся більше про внутрішню роботу фреймворку. Ви також дізнаєтеся, як ця архітектура допомагає вам визначати власні колекції за допомогою кількох рядків коду, повторно використовуючи переважну частину функцій колекції з фреймворку."
diff --git a/_data/overviews-zh-cn.yml b/_data/overviews-zh-cn.yml
new file mode 100644
index 0000000000..1c48218eef
--- /dev/null
+++ b/_data/overviews-zh-cn.yml
@@ -0,0 +1,312 @@
+- category: 标准库
+ description: "涵盖 Scala 标准库的参考与概览"
+ overviews:
+ - title: Scala 容器
+ by: Martin Odersky and Julien Richard-Foy
+ icon: sitemap
+ url: "collections-2.13/introduction.html"
+ description: "Scala 的容器库"
+ subdocs:
+ - title: 简介
+ url: "collections-2.13/introduction.html"
+ - title: 可变与不可变容器
+ url: "collections-2.13/overview.html"
+ - title: Iterable 特质
+ url: "collections-2.13/trait-iterable.html"
+ - title: 序列特质 Seq, IndexedSeq, 和 LinearSeq
+ url: "collections-2.13/seqs.html"
+ - title: 具体不可变容器类
+ url: "collections-2.13/concrete-immutable-collection-classes.html"
+ - title: 具体可变容器类
+ url: "collections-2.13/concrete-mutable-collection-classes.html"
+ - title: 数组
+ url: "collections-2.13/arrays.html"
+ - title: 字符串
+ url: "collections-2.13/strings.html"
+ - title: 性能特点
+ url: "collections-2.13/performance-characteristics.html"
+ - title: 相等性
+ url: "collections-2.13/equality.html"
+ - title: 视图
+ url: "collections-2.13/views.html"
+ - title: 迭代器
+ url: "collections-2.13/iterators.html"
+ - title: 从头开始创建容器
+ url: "collections-2.13/creating-collections-from-scratch.html"
+ - title: Java 与 Scala 间的容器转换
+ url: "collections-2.13/conversions-between-java-and-scala-collections.html"
+ - title: 迁移项目容器至 Scala 2.13 的容器
+ icon: sitemap
+ url: "core/collections-migration-213.html"
+ description: "本篇向欲迁移至 Scala 2.13 的容器用户介绍了主要变更并展示了如何通过 Scala 2.11,2.12 和 2.13 进行交叉编译"
+ - title: Scala 容器架构
+ icon: sitemap
+ url: "core/architecture-of-scala-213-collections.html"
+ by: Julien Richard-Foy
+ description: "这几篇介绍了引进到 Scala 2.13 中的容器框架的架构,对照容器API就能知晓更多框架内部工作机制"
+ - title: 实现定制容器
+ icon: building
+ url: "core/custom-collections.html"
+ by: Martin Odersky, Lex Spoon and Julien Richard-Foy
+ description: "从本篇中你会了解到如何利用容器框架通过几行代码来定义自己的容器,来重用来自框架的绝大部分容器功能。"
+ - title: 新增定制的容器操作
+ icon: building
+ url: "core/custom-collection-operations.html"
+ by: Julien Richard-Foy
+ description: "本篇展示了如何定制可应用于任意容器类型并返回相同类型的操作,以及如何定制带有欲编译容器类型参数的操作"
+
+- category: 语言
+ description: "涵盖 Scala 语言特性的参考与概览"
+ overviews:
+ - title: 字符串内插
+ icon: dollar-sign
+ url: "core/string-interpolation.html"
+ description: >
+ 字符串内插允许用户在字符串字面插值中直接嵌入变量引用。这里有个例子:
+ String Interpolation allows users to embed variable references directly in processed string literals. Here’s an example:
+
val name = "James"
+ println(s"Hello, $name") // Hello, James
+ scala 命令会通过包装源脚本到一模板中来执行它,然后编译并执行结果程序
+ - title: Scaladoc
+ url: "scaladoc/overview.html"
+ icon: book
+ description: "Scala 的 API 文档生成工具"
+ subdocs:
+ - title: 概览
+ url: "scaladoc/overview.html"
+ - title: 针对库作者的 Scaladoc
+ url: "scaladoc/for-library-authors.html"
+ - title: 使用 Scaladoc 接口
+ url: "scaladoc/interface.html"
+
+- category: 编译器
+ description: "涵盖 Scala 编译器的参考和概览:编译器插件,反射,以及元编程工具比如宏"
+ overviews:
+ - title: 反射
+ by: Heather Miller, Eugene Burmako, and Philipp Haller
+ icon: binoculars
+ url: "reflection/overview.html"
+ description: Scala 的运行时和编译期的反射框架
+ label-text: 实验
+ subdocs:
+ - title: 概览
+ url: "reflection/overview.html"
+ - title: 环境,通用和镜像(Environment, Universes, and Mirrors)
+ url: "reflection/environment-universes-mirrors.html"
+ - title: 符号,树和类型(Symbols, Trees, and Types)
+ url: "reflection/symbols-trees-types.html"
+ - title: 标号,名称,作用域及其他(Annotations, Names, Scopes, and More)
+ url: "reflection/annotations-names-scopes.html"
+ - title: TypeTags 和 Manifests
+ url: "reflection/typetags-manifests.html"
+ - title: 线程安全
+ url: "reflection/thread-safety.html"
+ - title: Scala 2.11 中的变化
+ url: "reflection/changelog211.html"
+ - title: 宏
+ by: Eugene Burmako
+ icon: magic
+ url: "macros/usecases.html"
+ description: "Scala 的元编程框架"
+ label-text: 实验
+ subdocs:
+ - title: 用例
+ url: "macros/usecases.html"
+ - title: 黑盒与白盒
+ url: "macros/blackbox-whitebox.html"
+ - title: Def 宏
+ url: "macros/overview.html"
+ - title: 拟引号(Quasiquotes)
+ url: "quasiquotes/intro.html"
+ - title: 宏绑定
+ url: "macros/bundles.html"
+ - title: 隐式宏
+ url: "macros/implicits.html"
+ - title: Extractor 宏
+ url: "macros/extractors.html"
+ - title: 类型 Providers
+ url: "macros/typeproviders.html"
+ - title: 宏标号
+ url: "macros/annotations.html"
+ - title: 宏乐园
+ url: "macros/paradise.html"
+ - title: 路线图
+ url: "macros/roadmap.html"
+ - title: 2.11 中的变化
+ url: "macros/changelog211.html"
+ - title: 拟引号
+ by: Denys Shabalin
+ icon: quote-left
+ url: "quasiquotes/setup.html"
+ description: "拟引号是操作 Scala 语法树的便捷方式"
+ label-text: 实验
+ subdocs:
+ - title: 依赖和设置
+ url: "quasiquotes/setup.html"
+ - title: 简介
+ url: "quasiquotes/intro.html"
+ - title: 提升(Lifting)
+ url: "quasiquotes/lifting.html"
+ - title: 拉降(Unlifting)
+ url: "quasiquotes/unlifting.html"
+ - title: 卫生(Hygiene)
+ url: "quasiquotes/hygiene.html"
+ - title: 用例
+ url: "quasiquotes/usecases.html"
+ - title: 语法总结
+ url: "quasiquotes/syntax-summary.html"
+ - title: 表达式细节
+ url: "quasiquotes/expression-details.html"
+ - title: 类型细节
+ url: "quasiquotes/type-details.html"
+ - title: 模式细节
+ url: "quasiquotes/pattern-details.html"
+ - title: 定义和引用细节
+ url: "quasiquotes/definition-details.html"
+ - title: 属于总结
+ url: "quasiquotes/terminology.html"
+ - title: 未来展望
+ url: "quasiquotes/future.html"
+ - title: 编译器插件
+ by: Lex Spoon and Seth Tisue
+ icon: puzzle-piece
+ url: "plugins/index.html"
+ description: "编译器插件允许定制和扩展 Scala 编译器。本篇导引描述了插件设施并带你领略如何创作一个简单插件"
+ - title: 编译器选项
+ by: Community
+ icon: cog
+ url: "compiler-options/index.html"
+ description: "控制 scalac 如何编译代码的各种选项"
+ - title: 错误格式
+ by: Torsten Schmits
+ icon: cog
+ url: "compiler-options/errors.html"
+ description: "一个新的用户友好的错误消息引擎,可以打印依赖的隐式链,颜色区分找到的和所需的类型差异"
+
+
+- category: 遗留问题
+ description: "涵盖一些与最近的 Scala 版本(2.12+)不再相关的特性的参考"
+ overviews:
+ - title: Scala 2.8 到 2.12 的容器
+ by: Martin Odersky
+ icon: sitemap
+ url: "collections/introduction.html"
+ description: "Scala 的容器库"
+ subdocs:
+ - title: 简介
+ url: "collections/introduction.html"
+ - title: 可变和不可变容器
+ url: "collections/overview.html"
+ - title: Traversable 特质
+ url: "collections/trait-traversable.html"
+ - title: Iterable 特质
+ url: "collections/trait-iterable.html"
+ - title: 序列特质 Seq, IndexedSeq, 和 LinearSeq
+ url: "collections/seqs.html"
+ - title: 集合(Sets)
+ url: "collections/sets.html"
+ - title: 映射(Maps)
+ url: "collections/maps.html"
+ - title: 具体的不可变容器类
+ url: "collections/concrete-immutable-collection-classes.html"
+ - title: 具体的可变容器类
+ url: "collections/concrete-mutable-collection-classes.html"
+ - title: 数组
+ url: "collections/arrays.html"
+ - title: 字符串
+ url: "collections/strings.html"
+ - title: 性能特点
+ url: "collections/performance-characteristics.html"
+ - title: 相等性
+ url: "collections/equality.html"
+ - title: 视图
+ url: "collections/views.html"
+ - title: 迭代器
+ url: "collections/iterators.html"
+ - title: 从头开始创建容器
+ url: "collections/creating-collections-from-scratch.html"
+ - title: Java 和 Scala 间容器转换
+ url: "collections/conversions-between-java-and-scala-collections.html"
+ - title: 从 Scala 2.7 迁移
+ url: "collections/migrating-from-scala-27.html"
+ - title: Scala 2.8 到 2.12 的容器架构
+ icon: building
+ url: "core/architecture-of-scala-collections.html"
+ by: Martin Odersky and Lex Spoon
+ description: "本篇细致地描述了 Scala 容器框架的架构,对比容器 API 你会发现更多框架的内部工作机制。你也会学到该架构如何帮你通过几行代码定义自己的容器,来重用来自框架的绝大部分容器功能。"
diff --git a/_data/overviews.yml b/_data/overviews.yml
index bd12fb5844..5756db5e3e 100644
--- a/_data/overviews.yml
+++ b/_data/overviews.yml
@@ -1,4 +1,3 @@
-
- category: Standard Library
description: "Guides and overviews covering the Scala standard library."
overviews:
@@ -60,8 +59,33 @@
- category: Language
description: "Guides and overviews covering features in the Scala language."
overviews:
+ - title: "Migration from Scala 2 to Scala 3"
+ by: Adrien Piquerez
+ icon: suitcase
+ root: "scala3/guides/"
+ url: "migration/compatibility-intro.html"
+ description: "Everything you need to know about compatibility and migration to Scala 3."
+ - title: Scala 3 Macros
+ by: Nicolas Stucki
+ icon: magic
+ root: "scala3/guides/"
+ url: "macros"
+ description: "A detailed tutorial to cover all the features involved in writing macros in Scala 3."
+ label-text: new in Scala 3
+ - title: Value Classes and Universal Traits
+ by: Mark Harrah
+ description: "Value classes are a new mechanism in Scala to avoid allocating runtime objects. This is accomplished through the definition of new AnyVal subclasses."
+ icon: gem
+ url: "core/value-classes.html"
+ - title: An Overview of TASTy
+ by: Alvin Alexander
+ icon: birthday-cake
+ label-text: new in Scala 3
+ root: "scala3/guides/"
+ url: "tasty-overview.html"
+ description: "An overview over the TASTy format aimed at end-users of the Scala language."
- title: String Interpolation
- icon: usd
+ icon: dollar-sign
url: "core/string-interpolation.html"
description: >
String Interpolation allows users to embed variable references directly in processed string literals. Here’s an example:
@@ -72,11 +96,15 @@
by: Josh Suereth
description: "Scala 2.10 introduced a new feature called implicit classes. An implicit class is a class marked with the implicit keyword. This keyword makes the class’ primary constructor available for implicit conversions when the class is in scope."
url: "core/implicit-classes.html"
- - title: Value Classes and Universal Traits
- by: Mark Harrah
- description: "Value classes are a new mechanism in Scala to avoid allocating runtime objects. This is accomplished through the definition of new AnyVal subclasses."
- icon: diamond
- url: "core/value-classes.html"
+ - title: The Scala Book
+ by: Alvin Alexander
+ icon: book
+ label-color: "#899295"
+ label-text: archived
+ url: "scala-book/introduction.html"
+ description: >
+ A light introduction to the Scala language, focused on Scala 2.
+ Now updated for Scala 3, we are in the process of merging the two.
- category: Authoring Libraries
description: "Guides for contributing open source libraries to the Scala ecosystem."
@@ -133,17 +161,27 @@
description: "A diverse and comprehensive set of libraries is important to any productive software ecosystem. While it is easy to develop and distribute Scala libraries, good library authorship goes beyond just writing code and publishing it. In this guide, we cover the important topic of Binary Compatibility."
icon: puzzle-piece
url: "core/binary-compatibility-for-library-authors.html"
+ - title: Nightly Versions of Scala
+ description: "We regularly publish 'nightlies' of both Scala 3 and Scala 2 so that users can preview and test the contents of upcoming releases. Here's how to find and use these versions."
+ url: "core/nightlies.html"
- category: "Tools"
description: "Reference material on core Scala tools like the Scala REPL and Scaladoc generation."
overviews:
- - title: Scala REPL
+ - title: Scala 2 REPL
icon: terminal
url: "repl/overview.html"
description: |
The Scala REPL is a tool (scala) for evaluating expressions in Scala.
The scala command will execute a source script by wrapping it in a template and then compiling and executing the resulting program
+ - title: Scaladoc For Scala 3
+ by: Krzysztof Romanowski, Aleksander Boruch-Gruszecki, Andrzej Ratajczak, Kacper Korban, Filip Zybała
+ icon: book
+ root: "scala3/guides/"
+ url: "scaladoc"
+ description: "Updates in Scala 3 to Scala’s API documentation generation tool."
+ label-text: updated
- title: Scaladoc
url: "scaladoc/overview.html"
icon: book
@@ -159,12 +197,12 @@
- category: Compiler
description: "Guides and overviews covering the Scala compiler: compiler plugins, reflection, and metaprogramming tools such as macros."
overviews:
- - title: Reflection
+ - title: Scala 2 Reflection
by: Heather Miller, Eugene Burmako, and Philipp Haller
icon: binoculars
url: "reflection/overview.html"
description: Scala's runtime/compile-time reflection framework.
- label-text: experimental
+ label-text: removed in Scala 3
subdocs:
- title: Overview
url: "reflection/overview.html"
@@ -180,12 +218,12 @@
url: "reflection/thread-safety.html"
- title: Changes in Scala 2.11
url: "reflection/changelog211.html"
- - title: Macros
+ - title: Scala 2 Macros
by: Eugene Burmako
icon: magic
url: "macros/usecases.html"
description: "Scala's metaprogramming framework."
- label-text: experimental
+ label-text: removed in Scala 3
subdocs:
- title: Use Cases
url: "macros/usecases.html"
@@ -211,12 +249,12 @@
url: "macros/roadmap.html"
- title: Changes in 2.11
url: "macros/changelog211.html"
- - title: Quasiquotes
+ - title: Quasiquotes in Scala 2
by: Denys Shabalin
icon: quote-left
url: "quasiquotes/setup.html"
description: "Quasiquotes are a convenient way to manipulate Scala syntax trees."
- label-text: experimental
+ label-text: removed in Scala 3
subdocs:
- title: Dependencies and setup
url: "quasiquotes/setup.html"
@@ -244,33 +282,30 @@
url: "quasiquotes/terminology.html"
- title: Future prospects
url: "quasiquotes/future.html"
- - title: Compiler Plugins
+ - title: Scala 2 Compiler Plugins
by: Lex Spoon and Seth Tisue
icon: puzzle-piece
url: "plugins/index.html"
description: "Compiler plugins permit customizing and extending the Scala compiler. This tutorial describes the plugin facility and walks you through how to create a simple plugin."
- - title: Compiler Options
+ - title: Scala 2 Compiler Options
by: Community
icon: cog
url: "compiler-options/index.html"
description: "Various options to control how scalac compiles your code."
-
+ - title: Error Formatting
+ by: Torsten Schmits
+ icon: cog
+ url: "compiler-options/errors.html"
+ description: "A new engine for more user-friendly error messages, printing chains of dependent implicits and colored found/required type diffs."
+ - title: Optimizer
+ by: Lukas Rytz and Andrew Marki
+ icon: cog
+ url: "compiler-options/optimizer.html"
+ description: "The compiler can perform various optimizations."
- category: Legacy
description: "Guides covering features no longer relevant to recent Scala versions (2.12+)."
overviews:
- - title: The Scala Actors Migration Guide
- by: Vojin Jovanovic and Philipp Haller
- icon: truck
- url: "core/actors-migration-guide.html"
- description: "To ease the migration from Scala Actors to Akka we have provided the Actor Migration Kit (AMK). The AMK consists of an extension to Scala Actors which is enabled by including the scala-actors-migration.jar on a project’s classpath. In addition, Akka 2.1 includes features, such as the ActorDSL singleton, which enable a simpler conversion of code using Scala Actors to Akka. The purpose of this document is to guide users through the migration process and explain how to use the AMK."
- - title: The Scala Actors API
- by: Philipp Haller and Stephen Tu
- icon: users
- url: "core/actors.html"
- description: "This guide describes the API of the scala.actors package of Scala 2.8/2.9. The organization follows groups of types that logically belong together. The trait hierarchy is taken into account to structure the individual sections. The focus is on the run-time behavior of the various methods that these traits define, thereby complementing the existing Scaladoc-based API documentation."
- label-color: "#899295"
- label-text: deprecated
- title: Scala 2.8 to 2.12’s Collections
by: Martin Odersky
icon: sitemap
diff --git a/_data/setup-scala.yml b/_data/setup-scala.yml
new file mode 100644
index 0000000000..cda4c2361b
--- /dev/null
+++ b/_data/setup-scala.yml
@@ -0,0 +1,6 @@
+linux-x86-64: curl -fL https://github.com/coursier/coursier/releases/latest/download/cs-x86_64-pc-linux.gz | gzip -d > cs && chmod +x cs && ./cs setup
+linux-arm64: curl -fL https://github.com/VirtusLab/coursier-m1/releases/latest/download/cs-aarch64-pc-linux.gz | gzip -d > cs && chmod +x cs && ./cs setup
+macOS-x86-64: curl -fL https://github.com/coursier/coursier/releases/latest/download/cs-x86_64-apple-darwin.gz | gzip -d > cs && chmod +x cs && (xattr -d com.apple.quarantine cs || true) && ./cs setup
+macOS-arm64: curl -fL https://github.com/VirtusLab/coursier-m1/releases/latest/download/cs-aarch64-apple-darwin.gz | gzip -d > cs && chmod +x cs && (xattr -d com.apple.quarantine cs || true) && ./cs setup
+macOS-brew: brew install coursier && coursier setup
+windows-link: https://github.com/coursier/coursier/releases/latest/download/cs-x86_64-pc-win32.zip
diff --git a/_data/sip-data.yml b/_data/sip-data.yml
index 6a6b19fe68..0a351b24da 100644
--- a/_data/sip-data.yml
+++ b/_data/sip-data.yml
@@ -1,27 +1,47 @@
+design:
+ color: "#839496"
+ text: "Design"
+
+implementation:
+ color: "#839496"
+ text: "Implementation"
+
+submitted:
+ color: "#2aa198"
+ text: "Submitted"
+
under-review:
color: "#b58900"
text: "Under Review"
-pending:
+vote-requested:
color: "#b58900"
- text: "Pending"
-
-dormant:
- color: "#839496"
- text: "Dormant"
+ text: "Vote Requested"
-under-revision:
- color: "#2aa198"
- text: "Under Revision"
+waiting-for-implementation:
+ color: "#b58900"
+ text: "Waiting for Implementation"
accepted:
color: "#859900"
text: "Accepted"
-complete:
+shipped:
color: "#859900"
- text: "Complete"
+ text: "Shipped"
rejected:
color: "#dc322f"
text: "Rejected"
+
+withdrawn:
+ color: "#839496"
+ text: "Withdrawn"
+
+accept:
+ color: "#859900"
+ text: "Accept"
+
+reject:
+ color: "#dc322f"
+ text: "Reject"
diff --git a/_data/translations.yml b/_data/translations.yml
index 56d0fd59cf..80ab5afc1c 100644
--- a/_data/translations.yml
+++ b/_data/translations.yml
@@ -1,2 +1,2 @@
tour:
- languages: [ba, es, ko, pt-br, pl, zh-cn, th, ru, ja]
+ languages: [ba, es, fr, ko, pt-br, pl, zh-cn, th, ru, ja]
diff --git a/_de/tutorials/scala-for-java-programmers.md b/_de/tutorials/scala-for-java-programmers.md
index e4d64108b7..9055d7caea 100644
--- a/_de/tutorials/scala-for-java-programmers.md
+++ b/_de/tutorials/scala-for-java-programmers.md
@@ -23,7 +23,7 @@ einfach ist, eignet es sich sehr gut, Scalas Funktionsweise zu demonstrieren, oh
über die Sprache wissen muss.
object HalloWelt {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
println("Hallo, Welt!")
}
}
@@ -93,7 +93,7 @@ Klassen der Java-Pakete importieren:
import java.text.DateFormat._
object FrenchDate {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val now = new Date
val df = getDateInstance(LONG, Locale.FRANCE)
println(df format now)
@@ -183,7 +183,7 @@ einmal pro Sekunde aus.
println("Die Zeit vergeht wie im Flug.")
}
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
oncePerSecond(timeFlies)
}
}
@@ -209,7 +209,7 @@ Variante des obigen Timer-Programmes verwendet eine anonyme Funktion anstatt der
}
}
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
oncePerSecond(() => println("Die Zeit vergeht wie im Flug."))
}
}
@@ -256,7 +256,7 @@ Ein Problem der obigen Methoden `re` und `im` ist, dass man, um sie zu verwenden
Klammerpaar hinter ihren Namen anhängen muss:
object ComplexNumbers {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val c = new Complex(1.2, 3.4)
println("imaginary part: " + c.im())
}
@@ -433,7 +433,7 @@ noch aus. Zu diesem Zweck soll eine `main`-Methode dienen, die den Ausdruck `(x+
Beispiel verwendet: zuerst wird der Wert in der Umgebung `{ x -> 5, y -> 7 }` berechnet und darauf
die beiden partiellen Ableitungen gebildet:
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val exp: Tree = Sum(Sum(Var("x"),Var("x")),Sum(Const(7),Var("y")))
val env: Environment = {
case "x" => 5
@@ -597,7 +597,7 @@ Um diese Referenz-Klasse zu verwenden, muss der generische Typ bei der Erzeugung
angegeben werden. Für einen Ganzzahl-Container soll folgendes Beispiel dienen:
object IntegerReference {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val cell = new Reference[Int]
cell.set(13)
println("Reference contains the half of " + (cell.get * 2))
diff --git a/_es/overviews/core/actors.md b/_es/overviews/core/actors.md
deleted file mode 100644
index ef49675e1e..0000000000
--- a/_es/overviews/core/actors.md
+++ /dev/null
@@ -1,495 +0,0 @@
----
-layout: singlepage-overview
-title: API de actores en Scala
-
-partof: actors
-
-language: es
----
-
-**Philipp Haller and Stephen Tu**
-
-**Traducción e interpretación: Miguel Ángel Pastor Olivar**
-
-## Introducción
-
-La presente guía describe el API del paquete `scala.actors` de Scala 2.8/2.9. El documento se estructura en diferentes grupos lógicos. La jerarquía de "traits" es tenida en cuenta para llevar a cabo la estructuración de las secciones individuales. La atención se centra en el comportamiento exhibido en tiempo de ejecución por varios de los métodos presentes en los traits anteriores, complementando la documentación existente en el Scaladoc API.
-
-## Traits de actores: Reactor, ReplyReactor, y Actor
-
-### The Reactor trait
-
-`Reactor` es el padre de todos los traits relacionados con los actores. Heredando de este trait podremos definir actores con una funcionalidad básica de envío y recepción de mensajes.
-
-El comportamiento de un `Reactor` se define mediante la implementación de su método `act`. Este método es ejecutado una vez el `Reactor` haya sido iniciado mediante la invocación del método `start`, retornando el `Reactor`. El método `start`es *idempotente*, lo cual significa que la invocación del mismo sobre un actor que ya ha sido iniciado no surte ningún efecto.
-
-El trait `Reactor` tiene un parámetro de tipo `Msg` el cual determina el tipo de mensajes que un actor es capaz de recibir.
-
-La invocación del método `!` de un `Reactor` envía un mensaje al receptor. La operación de envío de un mensaje mediante el operador `!` es asíncrona por lo que el actor que envía el mensaje no se bloquea esperando a que el mensaje sea recibido sino que su ejecución continua de manera inmediata. Por ejemplo, `a ! msg` envia `msg` a `a`. Todos los actores disponen de un *buzón* encargado de regular los mensajes entrantes hasta que son procesados.
-
-El trait `Reactor` trait también define el método `forward`. Este método es heredado de `OutputChannel` y tiene el mismo efecto que el método `!`. Aquellos traits que hereden de `Reactor`, en particular el trait `ReplyActor`, sobreescriben este método para habilitar lo que comunmente se conocen como *"implicit reply destinations"* (ver a continuación)
-
-Un `Reactor` recibe mensajes utilizando el método `react`. Este método espera un argumento de tipo `PartialFunction[Msg, Unit]` el cual define cómo los mensajes de tipo `Msg` son tratados una vez llegan al buzón de un actor. En el siguiente ejemplo, el actor espera recibir la cadena "Hello", para posteriomente imprimir un saludo:
-
- react {
- case "Hello" => println("Hi there")
- }
-
-La invocación del método `react` nunca retorna. Por tanto, cualquier código que deba ejecutarse tras la recepción de un mensaje deberá ser incluido dentro de la función parcial pasada al método `react`. Por ejemplo, dos mensajes pueden ser recibidos secuencialmente mediante la anidación de dos llamadas a `react`:
-
- react {
- case Get(from) =>
- react {
- case Put(x) => from ! x
- }
- }
-
-El trait `Reactor` también ofrece una serie de estructuras de control que facilitan la programación utilizando el mecanismo de `react`.
-
-#### Terminación y estados de ejecución
-
-La ejecución de un `Reactor` finaliza cuando el cuerpo del método `act` ha sido completado. Un `Reactor` también pueden terminarse a si mismo de manera explícita mediante el uso del método `exit`. El tipo de retorno de `exit` es `Nothing`, dado que `exit` siempre dispara una excepción. Esta excepción únicamente se utiliza de manera interna y nunca debería ser capturada.
-
-Un `Reactor` finalizado pueden ser reiniciado mediante la invocación de su método `restart`. La invocación del método anterior sobre un `Reactor` que no ha terminado su ejecución lanza una excepción de tipo `IllegalStateException`. El reinicio de un actor que ya ha terminado provoca que el método `act` se ejecute nuevamente.
-
-El tipo `Reactor` define el método `getState`, el cual retorna, como un miembro de la enumeración `Actor.State`, el estado actual de la ejecución del actor. Un actor que todavía no ha sido iniciado se encuentra en el estado `Actor.State.New`. Si el actor se está ejecutando pero no está esperando por ningún mensaje su estado será `Actor.State.Runnable`. En caso de que el actor haya sido suspendido mientras espera por un mensaje estará en el estado `Actor.State.Suspended`. Por último, un actor ya terminado se encontrará en el estado `Actor.State.Terminated`.
-
-#### Manejo de excepciones
-
-El miembro `exceptionHandler` permite llevar a cabo la definición de un manejador de excepciones que estará habilitado durante toda la vida del `Reactor`:
-
- def exceptionHandler: PartialFunction[Exception, Unit]
-
-Este manejador de excepciones (`exceptionHandler`) retorna una función parcial que se utiliza para gestionar excepciones que no hayan sido tratadas de ninguna otra manera. Siempre que una excepción se propague fuera del método `act` de un `Reactor` el manejador anterior será aplicado a dicha excepción, permitiendo al actor ejecutar código de limpieza antes de que se termine. Nótese que la visibilidad de `exceptionHandler` es `protected`.
-
-El manejo de excepciones mediante el uso de `exceptionHandler` encaja a la perfección con las estructuras de control utilizadas para programas con el método `react`. Siempre que una excepción es manejada por la función parcial retornada por `excepctionHandler`, la ejecución continua con la "closure" actual:
-
- loop {
- react {
- case Msg(data) =>
- if (cond) // process data
- else throw new Exception("cannot process data")
- }
- }
-
-Assumiendo que `Reactor` sobreescribe el atributo `exceptionHandler`, tras el lanzamiento de una excepción en el cuerpo del método `react`, y una vez ésta ha sido gestionada, la ejecución continua con la siguiente iteración del bucle.
-
-### The ReplyReactor trait
-
-El trait `ReplyReactor` extiende `Reactor[Any]` y sobrescribe y/o añade los siguientes métodos:
-
-- El método `!` es sobrescrito para obtener una referencia al actor
- actual (el emisor). Junto al mensaje actual, la referencia a dicho
- emisor es enviada al buzón del actor receptor. Este último dispone de
- acceso al emisor del mensaje mediante el uso del método `sender` (véase más abajo).
-
-- El método `forward` es sobrescrito para obtener una referencia al emisor
- del mensaje que actualmente está siendo procesado. Junto con el mensaje
- actual, esta referencia es enviada como el emisor del mensaje actual.
- Como consuencia de este hecho, `forward` nos permite reenviar mensajes
- en nombre de actores diferentes al actual.
-
-- El método (añadido) `sender` retorna el emisor del mensaje que está siendo
- actualmente procesado. Puesto que un mensaje puede haber sido reenviado,
- `sender` podría retornar un actor diferente al que realmente envió el mensaje.
-
-- El método (añadido) `reply` envía una respuesta al emisor del último mensaje.
- `reply` también es utilizado para responder a mensajes síncronos o a mensajes
- que han sido enviados mediante un "future" (ver más adelante).
-
-- El método (añadido) `!?` ofrece un *mecanismo síncrono de envío de mensajes*.
- La invocación de `!?` provoca que el actor emisor del mensaje se bloquee hasta
- que se recibe una respuesta, momento en el cual retorna dicha respuesta. Existen
- dos variantes sobrecargadas. La versión con dos parámetros recibe un argumento
- adicional que representa el tiempo de espera (medido en milisegundos) y su tipo
- de retorno es `Option[Any]` en lugar de `Any`. En caso de que el emisor no
- reciba una respuesta en el periodo de espera establecido, el método `!?` retornará
- `None`; en otro caso retornará la respuesta recibida recubierta con `Some`.
-
-- Los métodos (añadidos) `!!` son similares al envío síncrono de mensajes en el sentido de
- que el receptor puede enviar una respuesta al emisor del mensaje. Sin embargo, en lugar
- de bloquear el actor emisor hasta que una respuesta es recibida, retornan una instancia de
- `Future`. Esta última puede ser utilizada para recuperar la respuesta del receptor una
- vez se encuentre disponible; asimismo puede ser utilizada para comprobar si la respuesta
- está disponible sin la necesidad de bloquear el emisor. Existen dos versiones sobrecargadas.
- La versión que acepta dos parámetros recibe un argumento adicional de tipo
- `PartialFunction[Any, A]`. Esta función parcial es utilizada para realizar el post-procesado de
- la respuesta del receptor. Básicamente, `!!` retorna un "future" que aplicará la anterior
- función parcial a la repuesta (una vez recibida). El resultado del "future" es el resultado
- de este post-procesado.
-
-- El método (añadido) `reactWithin` permite llevar a cabo la recepción de mensajes en un periodo
- determinado de tiempo. En comparación con el método `react`, recibe un parámetro adicional,
- `msec`, el cual representa el periodo de tiempo, expresado en milisegundos, hasta que el patrón `TIMEOUT`
- es satisfecho (`TIMEOUT` es un "case object" presente en el paquete `scala.actors`). Ejemplo:
-
- reactWithin(2000) {
- case Answer(text) => // process text
- case TIMEOUT => println("no answer within 2 seconds")
- }
-
-- El método `reactWithin` también permite realizar accesos no bloqueantes al buzón. Si
- especificamos un tiempo de espera de 0 milisegundos, primeramente el buzón será escaneado
- en busca de un mensaje que concuerde. En caso de que no exista ningún mensaje concordante
- tras el primer escaneo, el patrón `TIMEOUT` será satisfecho. Por ejemplo, esto nos permite
- recibir determinado tipo de mensajes donde unos tienen una prioridad mayor que otros:
-
- reactWithin(0) {
- case HighPriorityMsg => // ...
- case TIMEOUT =>
- react {
- case LowPriorityMsg => // ...
- }
- }
-
- En el ejemplo anterior, el actor procesa en primer lugar los mensajes `HighPriorityMsg` aunque
- exista un mensaje `LowPriorityMsg` más antiguo en el buzón. El actor sólo procesará mensajes
- `LowPriorityMsg` en primer lugar en aquella situación donde no exista ningún `HighProrityMsg`
- en el buzón.
-
-Adicionalmente, el tipo `ReplyActor` añade el estado de ejecución `Actor.State.TimedSuspended`. Un actor suspendido, esperando la recepción de un mensaje mediante el uso de `reactWithin` se encuentra en dicho estado.
-
-### El trait Actor
-
-El trait `Actor` extiende de `ReplyReactor` añadiendo y/o sobrescribiendo los siguientes miembros:
-
-- El método (añadido) `receive` se comporta del mismo modo que `react`, con la excepción
- de que puede retornar un resultado. Este hecho se ve reflejado en la definición del tipo,
- que es polimórfico en el tipo del resultado: `def receive[R](f: PartialFunction[Any, R]): R`.
- Sin embargo, la utilización de `receive` hace que el uso del actor
- sea más pesado, puesto que el hilo subyacente es bloqueado mientras
- el actor está esperando por la respuesta. El hilo bloqueado no está
- disponible para ejecutar otros actores hasta que la invocación del
- método `receive` haya retornado.
-
-- El método (añadido) `link` permite a un actor enlazarse y desenlazarse de otro
- actor respectivamente. El proceso de enlazado puede utilizarse para monitorizar
- y responder a la terminación de un actor. En particular, el proceso de enlazado
- afecta al comportamiento mostrado en la ejecución del método `exit` tal y como
- se escribe en el la documentación del API del trait `Actor`.
-
-- El atributo `trapExit` permite responder a la terminación de un actor enlazado,
- independientemente de los motivos de su terminación (es decir, carece de importancia
- si la terminación del actor es normal o no). Si `trapExit` toma el valor cierto en
- un actor, este nunca terminará por culpa de los actores enlazados. En cambio, siempre
- y cuando uno de sus actores enlazados finalice, recibirá un mensaje de tipo `Exit`.
- `Exit` es una "case class" que presenta dos atributos: `from` referenciando al actor
- que termina y `reason` conteniendo los motivos de la terminación.
-
-#### Terminación y estados de ejecución
-
-Cuando la ejecución de un actor finaliza, el motivo de dicha terminación puede ser
-establecida de manera explícita mediante la invocación de la siguiente variante
-del método `exit`:
-
- def exit(reason: AnyRef): Nothing
-
-Un actor cuyo estado de terminación es diferente del símbolo `'normal` propaga
-los motivos de su terminación a todos aquellos actores que se encuentren enlazados
-a él. Si el motivo de la terminación es una excepción no controlada, el motivo de
-finalización será una instancia de la "case class" `UncaughtException`.
-
-El trait `Actor` incluye dos nuevos estados de ejecución. Un actor que se encuentra
-esperando la recepción de un mensaje mediante la utilización del método `receive` se
-encuentra en el método `Actor.State.Blocked`. Un actor esperado la recepción de un
-mensaje mediante la utilización del método `receiveWithin` se encuentra en el estado
-`Actor.State.TimeBlocked`.
-
-## Estructuras de control
-
-El trait `Reactor` define una serie de estructuras de control que simplifican el mecanismo
-de programación con la función sin retorno `react`. Normalmente, una invocación al método
-`react` no retorna nunca. Si el actor necesita ejecutar código a continuación de la invocación
-anterior, tendrá que pasar, de manera explícita, dicho código al método `react` o utilizar
-algunas de las estructuras que encapsulan este comportamiento.
-
-La estructura de control más basica es `andThen`. Permite registrar una `closure` que será
-ejecutada una vez el actor haya terminado la ejecución de todo lo demas.
-
- actor {
- {
- react {
- case "hello" => // processing "hello"
- }: Unit
- } andThen {
- println("hi there")
- }
- }
-
-Por ejemplo, el actor anterior imprime un saludo tras realizar el procesado
-del mensaje `hello`. Aunque la invocación del método `react` no retorna,
-podemos utilizar `andThen` para registrar el código encargado de imprimir
-el saludo a continuación de la ejecución del actor.
-
-Nótese que existe una *atribución de tipo* a continuación de la invocación
-de `react` (`:Unit`). Básicamente, nos permite tratar el resultado de
-`react` como si fuese de tipo `Unit`, lo cual es legal, puesto que el resultado
-de una expresión siempre se puede eliminar. Es necesario llevar a cabo esta operación
-dado que `andThen` no puede ser un miembro del tipo `Unit`, que es el tipo del resultado
-retornado por `react`. Tratando el tipo de resultado retornado por `react` como
-`Unit` permite llevar a cabo la aplicación de una conversión implícita la cual
-hace que el miembro `andThen` esté disponible.
-
-El API ofrece unas cuantas estructuras de control adicionales:
-
-- `loop { ... }`. Itera de manera indefinidia, ejecutando el código entre
-las llaves en cada una de las iteraciones. La invocación de `react` en el
-cuerpo del bucle provoca que el actor se comporte de manera habitual ante
-la llegada de un nuevo mensaje. Posteriormente a la recepción del mensaje,
-la ejecución continua con la siguiente iteración del bucle actual.
-
-- `loopWhile (c) { ... }`. Ejecuta el código entre las llaves mientras la
-condición `c` tome el valor `true`. La invocación de `react` en el cuerpo
-del bucle ocasiona el mismo efecto que en el caso de `loop`.
-
-- `continue`. Continua con la ejecución de la closure actual. La invocación
-de `continue` en el cuerpo de un `loop`o `loopWhile` ocasionará que el actor
-termine la iteración en curso y continue con la siguiente. Si la iteración en
-curso ha sido registrada utilizando `andThen`, la ejecución continua con la
-segunda "closure" pasada como segundo argumento a `andThen`.
-
-Las estructuras de control pueden ser utilizadas en cualquier parte del cuerpo
-del método `act` y en los cuerpos de los métodos que, transitivamente, son
-llamados por `act`. Aquellos actores creados utilizando la sintáxis `actor { ... }`
-pueden importar las estructuras de control desde el objeto `Actor`.
-
-#### Futures
-
-Los traits `RepyActor` y `Actor` soportan operaciones de envío de mensajes
-(métodos `!!`) que, de manera inmediata, retornan un *future*. Un *future*,
-es una instancia del trait `Future` y actúa como un manejador que puede
-ser utilizado para recuperar la respuesta a un mensaje "send-with-future".
-
-El emisor de un mensaje "send-with-future" puede esperar por la respuesta del
-future *aplicando* dicha future. Por ejemplo, el envío de un mensaje mediante
-`val fut = a !! msg` permite al emisor esperar por el resultado del future
-del siguiente modo: `val res = fut()`.
-
-Adicionalmente, utilizando el método `isSet`, un `Future` puede ser consultado
-de manera no bloqueante para comprobar si el resultado está disponible.
-
-Un mensaje "send-with-future" no es el único modo de obtener una referencia a
-un future. Estos pueden ser creados utilizando el método `future`. En el siguiente
-ejemplo, `body` se ejecuta de manera concurrente, retornando un future como
-resultado.
-
- val fut = Future { body }
- // ...
- fut() // wait for future
-
-Lo que hace especial a los futures en el contexto de los actores es la posibilidad
-de recuperar su resultado utilizando las operaciones estándar de actores de
-recepción de mensajes como `receive`, etc. Además, es posible utilizar las operaciones
-basadas en eventos `react`y `reactWithin`. Esto permite a un actor esperar por el
-resultado de un future sin la necesidad de bloquear el hilo subyacente.
-
-Las operaciones de recepción basadas en actores están disponibles a través del
-atributo `inputChannel` del future. Dado un future de tipo `Future[T]`, el tipo
-de `inputChannel` es `InputChannel[T]`. Por ejemplo:
-
- val fut = a !! msg
- // ...
- fut.inputChannel.react {
- case Response => // ...
- }
-
-## Canales
-
-Los canales pueden ser utilizados para simplificar el manejo de mensajes
-que presentan tipos diferentes pero que son enviados al mismo actor. La
-jerarquía de canales se divide en `OutputChannel` e `InputChannel`.
-
-Los `OutputChannel` pueden ser utilizados para enviar mensajes. Un
-`OutputChannel` `out` soporta las siguientes operaciones:
-
-- `out ! msg`. Envía el mensaje `msg` a `out` de manera asíncrona. Cuando `msg`
- es enviado directamente a un actor se incluye un referencia al actor emisor
- del mensaje.
-
-- `out forward msg`. Reenvía el mensaje `msg` a `out` de manera asíncrona.
- El actor emisor se determina en el caso en el que `msg` es reenviado a
- un actor.
-
-- `out.receiver`. Retorna el único actor que está recibiendo mensajes que están
- siendo enviados al canal `out`.
-
-- `out.send(msg, from)`. Envía el mensaje `msg` a `out` de manera asíncrona,
- proporcionando a `from` como el emisor del mensaje.
-
-Nótese que el trait `OutputChannel` tiene un parámetro de tipo que especifica el
-tipo de los mensajes que pueden ser enviados al canal (utilizando `!`, `forward`,
-y `send`). Este parámetro de tipo es contra-variante:
-
- trait OutputChannel[-Msg]
-
-Los actores pueden recibir mensajes de un `InputChannel`. Del mismo modo que
-`OutputChannel`, el trait `InputChannel` presenta un parámetro de tipo que
-especifica el tipo de mensajes que pueden ser recibidos por el canal. En este caso,
-el parámetro de tipo es covariante:
-
- trait InputChannel[+Msg]
-
-Un `InputChannel[Msg]` `in` soportal las siguientes operaciones.
-
-- `in.receive { case Pat1 => ... ; case Patn => ... }` (y de manera similar,
- `in.receiveWithin`) recibe un mensaje proveniente de `in`. La invocación
- del método `receive` en un canal de entrada presenta la misma semántica
- que la operación estándar de actores `receive`. La única diferencia es que
- la función parcial pasada como argumento tiene tipo `PartialFunction[Msg, R]`
- donde `R` es el tipo de retorno de `receive`.
-
-- `in.react { case Pat1 => ... ; case Patn => ... }` (y de manera similar,
- `in.reactWithin`). Recibe un mensaje de `in` utilizando la operación basada en
- eventos `react`. Del mismo modo que la operación `react` en actores, el tipo
- de retorno es `Nothing`, indicando que las invocaciones de este método nunca
- retornan. Al igual que la operación `receive` anterior, la función parcial
- que se pasa como argumento presenta un tipo más específico:
-
- PartialFunction[Msg, Unit]
-
-### Creando y compartiendo canales
-
-Los canales son creados utilizando la clase concreta `Channel`. Esta clase extiende
-de `InputChannel` y `OutputChannel`. Un canal pueden ser compartido haciendo dicho
-canal visible en el ámbito de múltiples actores o enviándolo como mensaje.
-
-El siguiente ejemplo muestra la compartición mediante publicación en ámbitos:
-
- actor {
- var out: OutputChannel[String] = null
- val child = actor {
- react {
- case "go" => out ! "hello"
- }
- }
- val channel = new Channel[String]
- out = channel
- child ! "go"
- channel.receive {
- case msg => println(msg.length)
- }
- }
-
-La ejecución de este ejemplo imprime la cadena "5" en la consola. Nótese que el
-actor `child` únicamente tiene acceso a `out`, que es un `OutputChannel[String]`.
-La referencia al canal, la cual puede ser utilizada para llevar a cabo la recepción
-de mensajes, se encuentra oculta. Sin embargo, se deben tomar precauciones y
-asegurarse que el canal de salida es inicializado con un canal concreto antes de que
-`child` le envíe ningún mensaje. En el ejemplo que nos ocupa, esto es llevado a cabo
-mediante el mensaje "go". Cuando se está recibiendo de `channel` utilizando el método
-`channel.receive` podemos hacer uso del hecho que `msg` es de tipo `String`, y por
-lo tanto tiene un miembro `length`.
-
-Una alternativa a la compartición de canales es enviarlos a través de mensajes.
-El siguiente fragmento de código muestra un sencillo ejemplo de aplicación:
-
- case class ReplyTo(out: OutputChannel[String])
-
- val child = actor {
- react {
- case ReplyTo(out) => out ! "hello"
- }
- }
-
- actor {
- val channel = new Channel[String]
- child ! ReplyTo(channel)
- channel.receive {
- case msg => println(msg.length)
- }
- }
-
-La "case class" `ReplyTo` es un tipo de mensajes que utilizamos para distribuir
-una referencia a un `OutputChannel[String]`. Cuando el actor `child` recibe un
-mensaje de tipo `ReplyTo` éste envía una cadena a su canal de salida. El segundo
-actor recibe en el canal del mismo modo que anteriormente.
-
-## Planificadores
-
-Un `Reactor`(o una instancia de uno de sus subtipos) es ejecutado utilizando un
-*planificador*. El trait `Reactor` incluye el miembro `scheduler` el cual retorna el
-planificador utilizado para ejecutar sus instancias:
-
- def scheduler: IScheduler
-
-La plataforma de ejecución ejecuta los actores enviando tareas al planificador mediante
-el uso de los métodos `execute` definidos en el trait `IScheduler`. La mayor parte
-del resto de métodos definidos en este trait únicamente adquieren cierto protagonismo
-cuando se necesita implementar un nuevo planificador desde cero; algo que no es necesario
-en muchas ocasiones.
-
-Los planificadores por defecto utilizados para ejecutar instancias de `Reactor` y
-`Actor` detectan cuando los actores han finalizado su ejecución. En el momento que esto
-ocurre, el planificador se termina a si mismo (terminando con cualquier hilo que estuviera
-en uso por parte del planificador). Sin embargo, algunos planificadores como el
-`SingleThreadedScheduler` (definido en el paquete `scheduler`) necesita ser terminado de
-manera explícita mediante la invocación de su método `shutdown`).
-
-La manera más sencilla de crear un planificador personalizado consisten en extender la clase
-`SchedulerAdapter`, implementando el siguiente método abstracto:
-
- def execute(fun: => Unit): Unit
-
-Por norma general, una implementación concreata utilizaría un pool de hilos para llevar a cabo
-la ejecución del argumento por nombre `fun`.
-
-## Actores remotos
-
-Esta sección describe el API de los actores remotos. Su principal interfaz es el objecto
-[`RemoteActor`](https://www.scala-lang.org/api/2.9.1/scala/actors/remote/RemoteActor$.html) definido
-en el paquete `scala.actors.remote`. Este objeto facilita el conjunto de métodos necesarios para crear
-y establecer conexiones a instancias de actores remotos. En los fragmentos de código que se muestran a
-continuación se asume que todos los miembros de `RemoteActor` han sido importados; la lista completa
-de importaciones utilizadas es la siguiente:
-
- import scala.actors._
- import scala.actors.Actor._
- import scala.actors.remote._
- import scala.actors.remote.RemoteActor._
-
-### Iniciando actores remotos
-
-Un actore remot es identificado de manera unívoca por un
-[`Symbol`](https://www.scala-lang.org/api/2.9.1/scala/Symbol.html). Este símbolo es único para la instancia
-de la máquina virual en la que se está ejecutando un actor. Un actor remoto identificado con el nombre
-`myActor` puede ser creado del siguiente modo.
-
- class MyActor extends Actor {
- def act() {
- alive(9000)
- register('myActor, self)
- // ...
- }
- }
-
-Nótese que el nombre únicamente puede ser registrado con un único actor al mismo tiempo.
-Por ejemplo, para registrar el actor *A* como `'myActor` y posteriormente registrar otro
-actor *B* como `'myActor`, debería esperar hasta que *A* haya finalizado. Este requisito
-aplica a lo largo de todos los puertos, por lo que registrando a *B* en un puerto diferente
-no sería suficiente.
-
-### Connecting to remote actors
-
-Establecer la conexión con un actor remoto es un proceso simple. Para obtener una referencia remota
-a un actor remoto que está ejecutándose en la máquina `myMachine` en el puerto 8000 con el nombre
-`'anActor`, tendremos que utilizar `select`del siguiente modo:
-
- val myRemoteActor = select(Node("myMachine", 8000), 'anActor)
-
-El actor retornado por `select` es de tipo `AbstractActor`, que proporciona esencialmente el mismo
-interfaz que un actor normal, y por lo tanto es compatible con las habituales operaciones de envío
-de mensajes:
-
- myRemoteActor ! "Hello!"
- receive {
- case response => println("Response: " + response)
- }
- myRemoteActor !? "What is the meaning of life?" match {
- case 42 => println("Success")
- case oops => println("Failed: " + oops)
- }
- val future = myRemoteActor !! "What is the last digit of PI?"
-
-Nótese que la operación `select` es perezosa; no inicializa ninguna conexión de red. Simplemente crea
-una nueva instancia de `AbstractActor` que está preparada para iniciar una nueva conexión de red en el
-momento en que sea necesario (por ejemplo cuando el método '!' es invocado).
diff --git a/_es/overviews/core/string-interpolation.md b/_es/overviews/core/string-interpolation.md
index df9aede7f8..a85787d382 100644
--- a/_es/overviews/core/string-interpolation.md
+++ b/_es/overviews/core/string-interpolation.md
@@ -113,10 +113,10 @@ De este modo, el método `json` tiene acceso a las diferentes partes de las cade
def json(args: Any*): JSONObject = {
val strings = sc.parts.iterator
val expressions = args.iterator
- var buf = new StringBuffer(strings.next)
+ var buf = new StringBuilder(strings.next)
while(strings.hasNext) {
- buf append expressions.next
- buf append strings.next
+ buf.append(expressions.next())
+ buf.append(strings.next())
}
parseJson(buf)
}
diff --git a/_es/overviews/parallel-collections/architecture.md b/_es/overviews/parallel-collections/architecture.md
index 8e60e87a59..138a5dee08 100644
--- a/_es/overviews/parallel-collections/architecture.md
+++ b/_es/overviews/parallel-collections/architecture.md
@@ -87,7 +87,7 @@ de la librería de colecciones secuenciales -- de hecho, "replican" los correspo
traits presentes en el framework de colecciones secuenciales, tal y como se muestra
a continuación.
-[]({{ site.baseurl }}/resources/images/parallel-collections-hierarchy.png)
+[]({{ site.baseurl }}/resources/images/parallel-collections-hierarchy.png)
Jerarquía de clases de las librerías de colecciones secuenciales y paralelas de Scala
diff --git a/_es/tour/abstract-type-members.md b/_es/tour/abstract-type-members.md
index ffcf7fd482..1e9afc50d7 100644
--- a/_es/tour/abstract-type-members.md
+++ b/_es/tour/abstract-type-members.md
@@ -14,56 +14,60 @@ previous-page: tour-of-scala
En Scala, las cases son parametrizadas con valores (los parámetros de construcción) y con tipos (si las clases son [genéricas](generic-classes.html)). Por razones de consistencia, no es posible tener solo valores como miembros de objetos; tanto los tipos como los valores son miembros de objetos. Además, ambos tipos de miembros pueden ser concretos y abstractos.
A continuación un ejemplo el cual define de forma conjunta una asignación de valor tardía y un tipo abstracto como miembros del [trait](traits.html) `Buffer`.
- trait Buffer {
- type T
- val element: T
- }
+```scala mdoc
+trait Buffer {
+ type T
+ val element: T
+}
+```
Los *tipos abstractos* son tipos los cuales su identidad no es precisamente conocida. En el ejemplo anterior, lo único que sabemos es que cada objeto de la clase `Buffer` tiene un miembro de tipo `T`, pero la definición de la clase `Buffer` no revela qué tipo concreto se corresponde con el tipo `T`. Tal como las definiciones de valores, es posible sobrescribir las definiciones de tipos en subclases. Esto permite revelar más información acerca de un tipo abstracto al acotar el tipo ligado (el cual describe las posibles instancias concretas del tipo abstracto).
En el siguiente programa derivamos la clase `SeqBuffer` la cual nos permite almacenar solamente sequencias en el buffer al estipular que el tipo `T` tiene que ser un subtipo de `Seq[U]` para un nuevo tipo abstracto `U`:
- abstract class SeqBuffer extends Buffer {
- type U
- type T <: Seq[U]
- def length = element.length
- }
+```scala mdoc
+abstract class SeqBuffer extends Buffer {
+ type U
+ type T <: Seq[U]
+ def length = element.length
+}
+```
Traits o [clases](classes.html) con miembros de tipos abstractos son generalmente usados en combinación con instancias de clases anónimas. Para ilustrar este concepto veremos un programa el cual trata con un buffer de sequencia que se remite a una lista de enteros.
- abstract class IntSeqBuffer extends SeqBuffer {
- type U = Int
- }
+```scala mdoc
+abstract class IntSeqBuffer extends SeqBuffer {
+ type U = Int
+}
- object AbstractTypeTest1 extends App {
- def newIntSeqBuf(elem1: Int, elem2: Int): IntSeqBuffer =
- new IntSeqBuffer {
- type T = List[U]
- val element = List(elem1, elem2)
- }
- val buf = newIntSeqBuf(7, 8)
- println("length = " + buf.length)
- println("content = " + buf.element)
- }
+def newIntSeqBuf(elem1: Int, elem2: Int): IntSeqBuffer =
+ new IntSeqBuffer {
+ type T = List[U]
+ val element = List(elem1, elem2)
+ }
+val buf = newIntSeqBuf(7, 8)
+println("length = " + buf.length)
+println("content = " + buf.element)
+```
El tipo retornado por el método `newIntSeqBuf` está ligado a la especialización del trait `Buffer` en el cual el tipo `U` es ahora equivalente a `Int`. Existe un tipo alias similar en la instancia de la clase anónima dentro del cuerpo del método `newIntSeqBuf`. En ese lugar se crea una nueva instancia de `IntSeqBuffer` en la cual el tipo `T` está ligado a `List[Int]`.
Es necesario notar que generalmente es posible transformar un tipo abstracto en un tipo paramétrico de una clase y viceversa. A continuación se muestra una versión del código anterior el cual solo usa tipos paramétricos.
- abstract class Buffer[+T] {
- val element: T
- }
- abstract class SeqBuffer[U, +T <: Seq[U]] extends Buffer[T] {
- def length = element.length
- }
- object AbstractTypeTest2 extends App {
- def newIntSeqBuf(e1: Int, e2: Int): SeqBuffer[Int, Seq[Int]] =
- new SeqBuffer[Int, List[Int]] {
- val element = List(e1, e2)
- }
- val buf = newIntSeqBuf(7, 8)
- println("length = " + buf.length)
- println("content = " + buf.element)
- }
+```scala mdoc:reset
+abstract class Buffer[+T] {
+ val element: T
+}
+abstract class SeqBuffer[U, +T <: Seq[U]] extends Buffer[T] {
+ def length = element.length
+}
+def newIntSeqBuf(e1: Int, e2: Int): SeqBuffer[Int, Seq[Int]] =
+ new SeqBuffer[Int, List[Int]] {
+ val element = List(e1, e2)
+ }
+val buf = newIntSeqBuf(7, 8)
+println("length = " + buf.length)
+println("content = " + buf.element)
+```
Nótese que es necesario usar [variance annotations](variances.html) aquí; de otra manera no sería posible ocultar el tipo implementado por la secuencia concreta del objeto retornado por `newIntSeqBuf`. Además, existen casos en los cuales no es posible remplazar tipos abstractos con tipos parametrizados.
diff --git a/_es/tour/annotations.md b/_es/tour/annotations.md
index c49cbe3e8c..37dd912f97 100644
--- a/_es/tour/annotations.md
+++ b/_es/tour/annotations.md
@@ -6,7 +6,7 @@ partof: scala-tour
num: 3
language: es
-next-page: classes
+next-page: packages-and-imports
previous-page: abstract-type-members
---
diff --git a/_es/tour/automatic-closures.md b/_es/tour/automatic-closures.md
deleted file mode 100644
index bb26c5a665..0000000000
--- a/_es/tour/automatic-closures.md
+++ /dev/null
@@ -1,65 +0,0 @@
----
-layout: tour
-title: Construcción de closures automáticas
-partof: scala-tour
-
-num: 16
-language: es
-
-next-page: operators
-previous-page: multiple-parameter-lists
----
-
-Scala permite pasar funciones sin parámetros como parámetros de un método. Cuando un método así es invocado, los parámetros reales de la función enviada sin parámetros no son evaluados y una función "nularia" (de aridad cero, 0-aria, o sin parámetros) es pasada en su lugar. Esta función encapsula el comportamiento del parámetro correspondiente (comunmente conocido como "llamada por nombre").
-
-Para aclarar un poco esto aquí se muestra un ejemplo:
-
- object TargetTest1 extends App {
- def whileLoop(cond: => Boolean)(body: => Unit): Unit =
- if (cond) {
- body
- whileLoop(cond)(body)
- }
- var i = 10
- whileLoop (i > 0) {
- println(i)
- i -= 1
- }
- }
-
-La función `whileLoop` recibe dos parámetros `cond` y `body`. Cuando la función es llamada, los parámetros reales no son evaluados en ese momento. Pero cuando los parámetros son utilizados en el cuerpo de la función `whileLoop`, las funciones nularias creadas implícitamente serán evaluadas en su lugar. Así, nuestro método `whileLoop` implementa un bucle tipo Java mediante una implementación recursiva.
-
-Es posible combinar el uso de [operadores de infijo y postfijo (infix/postfix)](operators.html) con este mecanismo para crear declaraciones más complejas (con una sintaxis agradadable).
-
-Aquí mostramos la implementación de una declaración tipo repetir-a-menos-que (repetir el bucle a no ser que se cumpla X condición):
-
- object TargetTest2 extends App {
- def loop(body: => Unit): LoopUnlessCond =
- new LoopUnlessCond(body)
- protected class LoopUnlessCond(body: => Unit) {
- def unless(cond: => Boolean) {
- body
- if (!cond) unless(cond)
- }
- }
- var i = 10
- loop {
- println("i = " + i)
- i -= 1
- } unless (i == 0)
- }
-
-La función `loop` solo acepta el cuerpo de un bucle y retorna una instancia de la clase `LoopUnlessCond` (la cual encapsula el cuerpo del objeto). Es importante notar que en este punto el cuerpo del bucle no ha sido evaluado aún. La clase `LoopUnlessCond` tiene un método `unless` el cual puede ser usado como un *operador de infijo (infix)*. De esta manera podemos lograr una sintaxis muy natural para nuestro nuevo bucle `repetir { a_menos_que ( )`.
-
-A continuación se expone el resultado de la ejecución de `TargetTest2`:
-
- i = 10
- i = 9
- i = 8
- i = 7
- i = 6
- i = 5
- i = 4
- i = 3
- i = 2
- i = 1
diff --git a/_es/tour/basics.md b/_es/tour/basics.md
index 998ddbe544..484470a508 100644
--- a/_es/tour/basics.md
+++ b/_es/tour/basics.md
@@ -13,40 +13,36 @@ En esta página, practicaremos conceptos básicos de Scala.
## Probando Scala en el navegador
-Puedes ejecutar Scala en tu navegador con ScalaFiddle.
+Puedes ejecutar Scala en tu navegador con Scastie.
-1. Ve a [https://scalafiddle.io](https://scalafiddle.io).
+1. Ve a [Scastie](https://scastie.scala-lang.org/).
2. Escribe `println("Hello, world!")` en el panel a la izquierda.
3. Presiona el botón "Run". En el panel de la derecha aparecerá el resultado.
Así, de manera fácil y sin preparación, puedes probar fragmentos de código Scala.
-Muchos ejemplos de código en esta documentación están integrados con ScalaFiddle, y así puedes probarlos directamente solo con pulsar el botón "Run".
-
## Expresiones
Las expresiones son sentencias computables.
-```tut
+```scala mdoc
1 + 1
```
Se puede ver el resultado de evaluar expresiones usando `println`.
-{% scalafiddle %}
-```tut
+```scala mdoc
println(1) // 1
println(1 + 1) // 2
println("Hello!") // Hello!
println("Hello," + " world!") // Hello, world!
```
-{% endscalafiddle %}
## Valores
Se puede dar un nombre al resultado de una expresión usando la palabra reservada `val`.
-```tut
+```scala mdoc
val x = 1 + 1
println(x) // 2
```
@@ -55,13 +51,13 @@ Los resultados con nombre, como `x` en el ejemplo, son llamados valores. Referen
Los valores no pueden ser reasignados.
-```tut:fail
+```scala mdoc:fail
x = 3 // This does not compile.
```
Scala es capaz de inferir el tipo de un valor. Aun así, también se puede indicar el tipo usando una anotación:
-```tut
+```scala mdoc:nest
val x: Int = 1 + 1
```
@@ -71,7 +67,7 @@ Nótese que la anotación del tipo `Int` sigue al identificador `x` de la variab
Una variable es como un valor, excepto que a una variable se le puede re-asignar un valor después de declararla. Una variable se declara con la palabra reservada `var`.
-```tut
+```scala mdoc:nest
var x = 1 + 1
x = 3 // This compiles because "x" is declared with the "var" keyword.
println(x * x) // 9
@@ -79,7 +75,7 @@ println(x * x) // 9
Como con los valores, si se quiere se puede especificar el tipo de una variable mutable:
-```tut
+```scala mdoc:nest
var x: Int = 1 + 1
```
@@ -89,7 +85,7 @@ Se pueden combinar expresiones rodeándolas con `{}` . A esto le llamamos un blo
El resultado de la última expresión del bloque es también el resultado total del bloque.
-```tut
+```scala mdoc
println({
val x = 1 + 1
x + 1
@@ -102,7 +98,7 @@ Una función es una expresión que acepta parámetros.
Una función se puede declarar anónima, sin nombre. Por ejemplo, ésta es una función que acepta un número entero `x`, y devuelve el resultado de incrementarlo:
-```tut
+```scala mdoc
(x: Int) => x + 1
```
@@ -110,56 +106,48 @@ La lista de parámetros de la función está a la izquierda de la flecha `=>`, y
También podemos asignarle un nombre a la función.
-{% scalafiddle %}
-```tut
+```scala mdoc
val addOne = (x: Int) => x + 1
println(addOne(1)) // 2
```
-{% endscalafiddle %}
Las funciones pueden tomar varios parámetros.
-{% scalafiddle %}
-```tut
+```scala mdoc
val add = (x: Int, y: Int) => x + y
println(add(1, 2)) // 3
```
-{% endscalafiddle %}
O ninguno.
-```tut
+```scala mdoc
val getTheAnswer = () => 42
println(getTheAnswer()) // 42
```
## Métodos
-Los métodos se parecen y comportan casi como a las funciones, pero se diferencian en dos aspectos clave:
+Los métodos se parecen y comportan casi como a las funciones, pero se diferencian en dos aspectos clave:
Un método se define con la palabra reservada `def`, seguida por el nombre del método, la lista de parámetros, el tipo de valores que el método devuelve, y el cuerpo del método.
-{% scalafiddle %}
-```tut
+```scala mdoc:nest
def add(x: Int, y: Int): Int = x + y
println(add(1, 2)) // 3
```
-{% endscalafiddle %}
Observe que el tipo de retorno se declara _después_ de la lista de parámetros, y separado con dos puntos, p.ej. `: Int`.
Un método puede tener varias listas de parámetros.
-{% scalafiddle %}
-```tut
+```scala mdoc
def addThenMultiply(x: Int, y: Int)(multiplier: Int): Int = (x + y) * multiplier
println(addThenMultiply(1, 2)(3)) // 9
```
-{% endscalafiddle %}
O ninguna lista de parámetros.
-```tut
+```scala mdoc
def name: String = System.getProperty("user.name")
println("Hello, " + name + "!")
```
@@ -168,15 +156,13 @@ Hay otras diferencias, pero para simplificar, podemos pensar que son similares a
Los métodos también pueden tener expresiones de varias lineas.
-{% scalafiddle %}
-```tut
+```scala mdoc
def getSquareString(input: Double): String = {
val square = input * input
square.toString
}
println(getSquareString(2.5)) // 6.25
```
-{% endscalafiddle %}
La ultima expresión en el cuerpo del método es el valor de retorno del mismo.
(Scala tiene una palabra reservada `return`, pero se usa raramente y no se aconseja usarla)
@@ -185,7 +171,7 @@ La ultima expresión en el cuerpo del método es el valor de retorno del mismo.
Una clase se define con la palabra reservada `class`, seguida del nombre, y la lista de parámetros del constructor.
-```tut
+```scala mdoc
class Greeter(prefix: String, suffix: String) {
def greet(name: String): Unit =
println(prefix + name + suffix)
@@ -196,7 +182,7 @@ El método `greet` tiene un tipo de retorno `Unit`, que indica que el método no
Se puede crear una instancia de una clase con la palabra reservada *new*.
-```tut
+```scala mdoc
val greeter = new Greeter("Hello, ", "!")
greeter.greet("Scala developer") // Hello, Scala developer!
```
@@ -208,13 +194,13 @@ Las clases se tratan en profundidad [más adelante](classes.html).
Hay un tipo especial de clases en Scala, las llamadas "case" classes. Por defecto, las instancias de una case class son inmutables, y se comparan con otras solo por los valores que contienen en cada campo.
Una case class se define con las palabras reservadas `case class`:
-```tut
+```scala mdoc
case class Point(x: Int, y: Int)
```
-Se puede crear una instancia de una `case class`, sin usar la palabra reservada `new`.
+Se puede crear una instancia de una `case class`, sin usar la palabra reservada `new`.
-```tut
+```scala mdoc
val point = Point(1, 2)
val anotherPoint = Point(1, 2)
val yetAnotherPoint = Point(2, 2)
@@ -222,17 +208,17 @@ val yetAnotherPoint = Point(2, 2)
Y son comparadas por valor.
-```tut
+```scala mdoc
if (point == anotherPoint) {
- println(point + " and " + anotherPoint + " are the same.")
+ println(s"$point and $anotherPoint are the same.")
} else {
- println(point + " and " + anotherPoint + " are different.")
+ println(s"$point and $anotherPoint are different.")
} // Point(1,2) and Point(1,2) are the same.
if (point == yetAnotherPoint) {
- println(point + " and " + yetAnotherPoint + " are the same.")
+ println(s"$point and $yetAnotherPoint are the same.")
} else {
- println(point + " and " + yetAnotherPoint + " are different.")
+ println(s"$point and $yetAnotherPoint are different.")
} // Point(1,2) and Point(2,2) are different.
```
@@ -244,7 +230,7 @@ Los objetos son instancias de una sola clase de su propia definición. Puedes pe
Un objeto se define usando la palabra reservada `object`.
-```tut
+```scala mdoc
object IdFactory {
private var counter = 0
def create(): Int = {
@@ -256,7 +242,7 @@ object IdFactory {
Para acceder al objeto, lo referencias por su nombre.
-```tut
+```scala mdoc
val newId: Int = IdFactory.create()
println(newId) // 1
val newerId: Int = IdFactory.create()
@@ -271,7 +257,7 @@ Los traits son tipos que contienen campos y métodos. Se pueden combinar múltip
Un trait se define usando la palabra reservada `trait`.
-```tut
+```scala mdoc:nest
trait Greeter {
def greet(name: String): Unit
}
@@ -279,8 +265,7 @@ trait Greeter {
Un `trait` también puede definir un método, o un valor, con una implementación por defecto.
-{% scalafiddle %}
-```tut
+```scala mdoc:reset
trait Greeter {
def greet(name: String): Unit =
println("Hello, " + name + "!")
@@ -289,7 +274,7 @@ trait Greeter {
Un `trait` también puede extender otros traits, usando la palabra clave `extends`. Asimismo, en un `trait` se puede redefinir la implementación de un método heredado, usando la palabra reservada `override`.
-```tut
+```scala mdoc
class DefaultGreeter extends Greeter
class CustomizableGreeter(prefix: String, postfix: String) extends Greeter {
@@ -304,7 +289,6 @@ greeter.greet("Scala developer") // Hello, Scala developer!
val customGreeter = new CustomizableGreeter("How are you, ", "?")
customGreeter.greet("Scala developer") // How are you, Scala developer?
```
-{% endscalafiddle %}
Aquí, `DefaultGreeter` extiende un solo trait, pero puede extender múltiples traits.
@@ -316,7 +300,7 @@ El método principal (main) es el punto donde comienza la ejecución de un progr
Usando un objeto, puedes definir el método principal de la siguiente forma:
-```tut
+```scala mdoc
object Main {
def main(args: Array[String]): Unit =
println("Hello, Scala developer!")
diff --git a/_es/tour/case-classes.md b/_es/tour/case-classes.md
index c47a3b9428..7a4989bde5 100644
--- a/_es/tour/case-classes.md
+++ b/_es/tour/case-classes.md
@@ -19,7 +19,7 @@ A continuación se muestra un ejemplo para una jerarquía de clases la cual cons
case class Fun(arg: String, body: Term) extends Term
case class App(f: Term, v: Term) extends Term
-Esta jerarquía de clases puede ser usada para representar términos de [cálculo lambda no tipado](https://www.ezresult.com/article/Lambda_calculus). Para facilitar la construcción de instancias de clases Case, Scala no requiere que se utilice la primitiva `new`. Simplemente es posible utilizar el nombre de la clase como una llamada a una función.
+Esta jerarquía de clases puede ser usada para representar términos de [cálculo lambda no tipado](https://es.wikipedia.org/wiki/C%C3%A1lculo_lambda). Para facilitar la construcción de instancias de clases Case, Scala no requiere que se utilice la primitiva `new`. Simplemente es posible utilizar el nombre de la clase como una llamada a una función.
Aquí un ejemplo:
diff --git a/_es/tour/classes.md b/_es/tour/classes.md
index 90bd399be0..3f3939b3bc 100644
--- a/_es/tour/classes.md
+++ b/_es/tour/classes.md
@@ -29,7 +29,7 @@ Las clases en Scala son parametrizadas con argumentos constructores (inicializad
Para instanciar una clase es necesario usar la primitiva `new`, como se muestra en el siguiente ejemplo:
object Classes {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val pt = new Point(1, 2)
println(pt)
pt.move(10, 10)
diff --git a/_es/tour/generic-classes.md b/_es/tour/generic-classes.md
index 60d7626cab..b89b603ae3 100644
--- a/_es/tour/generic-classes.md
+++ b/_es/tour/generic-classes.md
@@ -14,12 +14,15 @@ Tal como en Java 5, Scala provee soporte nativo para clases parametrizados con t
A continuación se muestra un ejemplo:
- class Stack[T] {
- var elems: List[T] = Nil
- def push(x: T) { elems = x :: elems }
- def top: T = elems.head
- def pop() { elems = elems.tail }
- }
+```scala mdoc
+class Stack[T] {
+ var elems: List[T] = Nil
+ def push(x: T): Unit =
+ elems = x :: elems
+ def top: T = elems.head
+ def pop(): Unit = { elems = elems.tail }
+}
+```
La clase `Stack` modela una pila mutable que contiene elementos de un tipo arbitrario `T` (se dice, "una pila de elementos `T`). Los parámetros de tipos nos aseguran que solo elementos legales (o sea, del tipo `T`) sean insertados en la pila (apilados). De forma similar, con los parámetros de tipo podemos expresar que el método `top` solo devolverá elementos de un tipo dado (en este caso `T`).
diff --git a/_es/tour/inner-classes.md b/_es/tour/inner-classes.md
index 461c72eeb1..9b04862d27 100644
--- a/_es/tour/inner-classes.md
+++ b/_es/tour/inner-classes.md
@@ -12,44 +12,50 @@ previous-page: implicit-parameters
En Scala es posible que las clases tengan como miembro otras clases. A diferencia de lenguajes similares a Java donde ese tipo de clases internas son miembros de las clases que las envuelven, en Scala esas clases internas están ligadas al objeto externo. Para ilustrar esta diferencia, vamos a mostrar rápidamente una implementación del tipo grafo:
- class Graph {
- class Node {
- var connectedNodes: List[Node] = Nil
- def connectTo(node: Node) {
- if (!connectedNodes.exists(node.equals)) {
- connectedNodes = node :: connectedNodes
- }
- }
- }
- var nodes: List[Node] = Nil
- def newNode: Node = {
- val res = new Node
- nodes = res :: nodes
- res
+```scala mdoc
+class Graph {
+ class Node {
+ var connectedNodes: List[Node] = Nil
+ def connectTo(node: Node): Unit = {
+ if (!connectedNodes.exists(node.equals)) {
+ connectedNodes = node :: connectedNodes
}
}
+ }
+ var nodes: List[Node] = Nil
+ def newNode: Node = {
+ val res = new Node
+ nodes = res :: nodes
+ res
+ }
+}
+```
En nuestro programa, los grafos son representados mediante una lista de nodos. Estos nodos son objetos de la clase interna `Node`. Cada nodo tiene una lista de vecinos que se almacena en la lista `connectedNodes`. Ahora podemos crear un grafo con algunos nodos y conectarlos incrementalmente:
- object GraphTest extends App {
- val g = new Graph
- val n1 = g.newNode
- val n2 = g.newNode
- val n3 = g.newNode
- n1.connectTo(n2)
- n3.connectTo(n1)
- }
+```scala mdoc:nest
+def graphTest: Unit = {
+ val g = new Graph
+ val n1 = g.newNode
+ val n2 = g.newNode
+ val n3 = g.newNode
+ n1.connectTo(n2)
+ n3.connectTo(n1)
+}
+```
Ahora vamos a completar el ejemplo con información relacionada al tipado para definir explicitamente de qué tipo son las entidades anteriormente definidas:
- object GraphTest extends App {
- val g: Graph = new Graph
- val n1: g.Node = g.newNode
- val n2: g.Node = g.newNode
- val n3: g.Node = g.newNode
- n1.connectTo(n2)
- n3.connectTo(n1)
- }
+```scala mdoc:nest
+def graphTest: Unit = {
+ val g: Graph = new Graph
+ val n1: g.Node = g.newNode
+ val n2: g.Node = g.newNode
+ val n3: g.Node = g.newNode
+ n1.connectTo(n2)
+ n3.connectTo(n1)
+}
+```
El código anterior muestra que al tipo del nodo le es prefijado con la instancia superior (que en nuestro ejemplo es `g`). Si ahora tenemos dos grafos, el sistema de tipado de Scala no nos permite mezclar nodos definidos en un grafo con nodos definidos en otro, ya que los nodos del otro grafo tienen un tipo diferente.
@@ -70,7 +76,7 @@ Por favor note que en Java la última linea del ejemplo anterior hubiese sido co
class Graph {
class Node {
var connectedNodes: List[Graph#Node] = Nil // Graph#Node en lugar de Node
- def connectTo(node: Graph#Node) {
+ def connectTo(node: Graph#Node): Unit = {
if (!connectedNodes.exists(node.equals)) {
connectedNodes = node :: connectedNodes
}
diff --git a/_es/tour/mixin-class-composition.md b/_es/tour/mixin-class-composition.md
index 9221859891..bd53274158 100644
--- a/_es/tour/mixin-class-composition.md
+++ b/_es/tour/mixin-class-composition.md
@@ -22,7 +22,7 @@ A diferencia de lenguajes que solo soportan _herencia simple_, Scala tiene una n
A continuación, considere una clase mezcla la cual extiende `AbsIterator` con un método `foreach` el cual aplica una función dada a cada elemento retornado por el iterador. Para definir una clase que puede usarse como una clase mezcla usamos la palabra clave `trait`.
trait RichIterator extends AbsIterator {
- def foreach(f: T => Unit) { while (hasNext) f(next()) }
+ def foreach(f: T => Unit): Unit = { while (hasNext) f(next()) }
}
Aquí se muestra una clase iterador concreta, la cual retorna caracteres sucesivos de una cadena de caracteres dada:
@@ -37,7 +37,7 @@ Aquí se muestra una clase iterador concreta, la cual retorna caracteres sucesiv
Nos gustaría combinar la funcionalidad de `StringIterator` y `RichIterator` en una sola clase. Solo con herencia simple e interfaces esto es imposible, ya que ambas clases contienen implementaciones para sus miembros. Scala nos ayuda con sus _compisiciones de clases mezcladas_. Permite a los programadores reutilizar el delta de la definición de una clase, esto es, todas las nuevas definiciones que no son heredadas. Este mecanismo hace posible combinar `StringIterator` con `RichIterator`, como es hecho en el siguiente programa, el cual imprime una columna de todos los caracteres de una cadena de caracteres dada.
object StringIteratorTest {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
class Iter extends StringIterator("Scala") with RichIterator
val iter = new Iter
iter foreach println
diff --git a/_es/tour/multiple-parameter-lists.md b/_es/tour/multiple-parameter-lists.md
index 79d2318e3e..83b7218c0b 100644
--- a/_es/tour/multiple-parameter-lists.md
+++ b/_es/tour/multiple-parameter-lists.md
@@ -6,7 +6,7 @@ partof: scala-tour
num: 15
language: es
-next-page: automatic-closures
+next-page: operators
previous-page: nested-functions
---
@@ -18,7 +18,7 @@ Los métodos pueden definir múltiples listas de parámetros. Cuando un método
A continuación hay un ejemplo, tal y como se define en el trait `TraversableOnce` en el API de colecciones de Scala:
-```
+```scala mdoc:fail
def foldLeft[B](z: B)(op: (B, A) => B): B
```
@@ -26,19 +26,16 @@ def foldLeft[B](z: B)(op: (B, A) => B): B
Comenzando con un valor inicial 0, `foldLeft` aplica la función `(m, n) => m + n` a cada uno de los elementos de la lista y al valor acumulado previo.
-{% scalafiddle %}
-```tut
+```scala mdoc
val numbers = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
val res = numbers.foldLeft(0)((m, n) => m + n)
println(res) // 55
```
-{% endscalafiddle %}
A continuación se muestra otro ejemplo:
-{% scalafiddle %}
-```tut
+```scala mdoc
object CurryTest extends App {
def filter(xs: List[Int], p: Int => Boolean): List[Int] =
@@ -53,15 +50,15 @@ A continuación se muestra otro ejemplo:
println(filter(nums, modN(3)))
}
```
-{% endscalafiddle %}
_Nota: el método `modN` está parcialmente aplicado en las dos llamadas a `filter`; esto significa que solo su primer argumento es realmente aplicado. El término `modN(2)` devuelve una función de tipo `Int => Boolean` y es por eso un posible candidato para el segundo argumento de la función `filter`._
Aquí se muestra la salida del programa anterior:
- List(2,4,6,8)
- List(3,6)
-
+```scala mdoc
+List(2,4,6,8)
+List(3,6)
+```
### Casos de uso
@@ -72,19 +69,19 @@ Casos de uso sugeridos para múltiples listas de parámetros incluyen:
En Scala, la inferencia de tipos se realiza parámetro a parámetro.
Suponer que se dispone del siguiente método:
-```tut
+```scala mdoc
def foldLeft1[A, B](as: List[A], b0: B, op: (B, A) => B) = ???
```
Si se invoca de la siguiente manera, se puede comprobar que no compila correctamente:
-```tut:fail
+```scala mdoc:fail
def notPossible = foldLeft1(numbers, 0, _ + _)
```
Debes invocarlo de alguna de las maneras propuestas a continuación:
-```tut
+```scala mdoc
def firstWay = foldLeft1[Int, Int](numbers, 0, _ + _)
def secondWay = foldLeft1(numbers, 0, (a: Int, b: Int) => a + b)
```
@@ -93,7 +90,7 @@ Esto se debe a que Scala no será capaz de inferir el tipo de la función `_ + _
Moviéndo el parámetro `op` a su propia lista de parámetros, los tipos de `A` y `B` son inferidos en la primera lista de parámetros.
Una vez se han inferido sus tipos, estos están disponibles para la segunda lista de parámetros y `_ + _ ` podrá casar con los tipos inferidos `(Int, Int) => Int`
-```tut
+```scala mdoc
def foldLeft2[A, B](as: List[A], b0: B)(op: (B, A) => B) = ???
def possible = foldLeft2(numbers, 0)(_ + _)
```
@@ -107,7 +104,7 @@ Para especificar solamente ciertos parámetros como [`implicit`](https://docs.sc
Un ejemplo de esto se muestra a continuación:
-```
+```scala mdoc
def execute(arg: Int)(implicit ec: scala.concurrent.ExecutionContext) = ???
```
@@ -117,7 +114,7 @@ Cuando un método es invocado con menos parámetros que los que están declarado
Por ejemplo,
-```tut
+```scala mdoc:nest
val numbers = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
val numberFunc = numbers.foldLeft(List[Int]()) _
diff --git a/_es/tour/named-arguments.md b/_es/tour/named-arguments.md
index 38dd0574d4..fe38fe15d4 100644
--- a/_es/tour/named-arguments.md
+++ b/_es/tour/named-arguments.md
@@ -11,24 +11,24 @@ previous-page: default-parameter-values
En la invocación de métodos y funciones se puede usar el nombre de las variables explícitamente en la llamada, de la siguiente manera:
- def imprimirNombre(nombre:String, apellido:String) = {
+ def imprimirNombre(nombre: String, apellido: String) = {
println(nombre + " " + apellido)
}
imprimirNombre("John","Smith")
// Imprime "John Smith"
- imprimirNombre(first = "John",last = "Smith")
+ imprimirNombre(nombre = "John", apellido = "Smith")
// Imprime "John Smith"
- imprimirNombre(last = "Smith",first = "John")
+ imprimirNombre(apellido = "Smith", nombre = "John")
// Imprime "John Smith"
Note que una vez que se utilizan parámetros nombrados en la llamada, el orden no importa, mientras todos los parámetros sean nombrados. Esta característica funciona bien en conjunción con valores de parámetros por defecto:
- def imprimirNombre(nombre:String = "John", apellido:String = "Smith") = {
+ def imprimirNombre(nombre: String = "John", apellido: String = "Smith") = {
println(nombre + " " + apellido)
}
- printName(apellido = "Jones")
+ imprimirNombre(apellido = "Jones")
// Imprime "John Jones"
language: es
diff --git a/_es/tour/operators.md b/_es/tour/operators.md
index a2d3b5e4be..6aeb98e046 100644
--- a/_es/tour/operators.md
+++ b/_es/tour/operators.md
@@ -7,7 +7,7 @@ num: 17
language: es
next-page: higher-order-functions
-previous-page: automatic-closures
+previous-page: multiple-parameter-lists
---
En Scala, cualquier método el cual reciba un solo parámetro puede ser usado como un *operador de infijo (infix)*. Aquí se muestra la definición de la clase `MyBool`, la cual define tres métodos `and`, `or`, y `negate`.
diff --git a/_es/tour/self-types.md b/_es/tour/self-types.md
index 79714212a7..df02b7dc0a 100644
--- a/_es/tour/self-types.md
+++ b/_es/tour/self-types.md
@@ -91,7 +91,7 @@ Por favor nótese que en esta clase nos es posible instanciar `NodoImpl` porque
Aquí hay un ejemplo de uso de la clase `GrafoDirigidoConcreto`:
- object GraphTest extends App {
+ def graphTest: Unit = {
val g: Grafo = new GrafoDirigidoConcreto
val n1 = g.agregarNodo
val n2 = g.agregarNodo
diff --git a/_es/tour/singleton-objects.md b/_es/tour/singleton-objects.md
index 83aa22ef9b..dceed2d7ad 100644
--- a/_es/tour/singleton-objects.md
+++ b/_es/tour/singleton-objects.md
@@ -26,7 +26,7 @@ Un objeto singleton puede extender clases y _traits_. De hecho, una [clase Case]
## Acompañantes ##
-La mayoría de los objetos singleton no están solos, sino que en realidad están asociados con clases del mismo nombre. El "objeto singleton del mismo nombre" de una case Case, mencionada anteriormente es un ejemplo de esto. Cuando esto sucede, el objeto singleton es llamado el *objeto acompañante* de la clase, y la clase es a su vez llamada la *clase acompañante* del objeto.
+La mayoría de los objetos singleton no están solos, sino que en realidad están asociados con clases del mismo nombre. El "objeto singleton del mismo nombre" de una clase Case, mencionada anteriormente es un ejemplo de esto. Cuando esto sucede, el objeto singleton es llamado el *objeto acompañante* de la clase, y la clase es a su vez llamada la *clase acompañante* del objeto.
[Scaladoc](/style/scaladoc.html) proporciona un soporte especial para ir y venir entre una clase y su acompañante: Si el gran círculo conteniendo la “C” u la “O” tiene su borde inferior doblado hacia adentro, es posible hacer click en el círculo para ir a su acompañante.
diff --git a/_es/tour/tour-of-scala.md b/_es/tour/tour-of-scala.md
index 19b4f60af8..b742b271ab 100644
--- a/_es/tour/tour-of-scala.md
+++ b/_es/tour/tour-of-scala.md
@@ -37,7 +37,6 @@ El [mecanismo de inferencia de tipos locales](type-inference.html) se encarga de
En la práctica, el desarrollo de aplicaciones específicas para un dominio generalmente requiere de "Lenguajes de dominio específico" (DSL). Scala provee una única combinación de mecanismos del lenguaje que simplifican la creación de construcciones propias del lenguaje en forma de bibliotecas:
* cualquier método puede ser usado como un operador de [infijo o postfijo](operators.html)
-* [las closures son construidas automáticamente dependiendo del tipo esperado](automatic-closures.html) (tipos objetivo).
El uso conjunto de ambas características facilita la definición de nuevas sentencias sin tener que extender la sintaxis y sin usar facciones de meta-programación como tipo macros.
diff --git a/_es/tour/tuples.md b/_es/tour/tuples.md
index 004b06beac..27ba0d9819 100644
--- a/_es/tour/tuples.md
+++ b/_es/tour/tuples.md
@@ -18,7 +18,7 @@ un método.
Una tupla con dos elementos puede ser creada del siguiente modo:
-```tut
+```scala mdoc
val ingredient = ("Sugar", 25)
```
@@ -37,7 +37,7 @@ Cada clase tiene tantos parámetros como número de elementos.
Una forma de acceder a los elementos de una tupla es por posición.
Los elementos concretos se llaman `_1`, `_2`, y así sucesivamente.
-```tut
+```scala mdoc
println(ingredient._1) // Sugar
println(ingredient._2) // 25
```
@@ -46,7 +46,7 @@ println(ingredient._2) // 25
Una tupla también puede ser dividida/expandida usando reconocimiento de patrones (pattern matching):
-```tut
+```scala mdoc
val (name, quantity) = ingredient
println(name) // Sugar
println(quantity) // 25
@@ -57,7 +57,7 @@ En esta ocasión el tipo de `name` es inferido como `String` y el de
A continuación otro ejemplo de reconocimiento de patrones con tuplas:
-```tut
+```scala mdoc
val planets =
List(("Mercury", 57.9), ("Venus", 108.2), ("Earth", 149.6),
("Mars", 227.9), ("Jupiter", 778.3))
@@ -70,7 +70,7 @@ planets.foreach{
O en compresión de bucles `for`:
-```tut
+```scala mdoc
val numPairs = List((2, 5), (3, -7), (20, 56))
for ((a, b) <- numPairs) {
println(a * b)
diff --git a/_es/tour/unified-types.md b/_es/tour/unified-types.md
index 5f37f7b47d..3a1db1e651 100644
--- a/_es/tour/unified-types.md
+++ b/_es/tour/unified-types.md
@@ -17,7 +17,7 @@ A diferencia de Java, todos los valores en Scala son objetos (incluyendo valores
## Jerarquía de clases en Scala ##
La superclase de todas las clases, `scala.Any`, tiene dos subclases directas, `scala.AnyVal` y `scala.AnyRef` que representan dos mundos de clases muy distintos: clases para valores y clases para referencias. Todas las clases para valores están predefinidas; se corresponden con los tipos primitivos de los lenguajes tipo Java. Todas las otras clases definen tipos referenciables. Las clases definidas por el usuario son definidas como tipos referenciables por defecto, es decir, siempre (indirectamente) extienden de `scala.AnyRef`. Toda clase definida por usuario en Scala extiende implicitamente el trait `scala.ScalaObject`. Clases pertenecientes a la infraestructura en la cual Scala esté corriendo (ejemplo, el ambiente de ejecución de Java) no extienden de `scala.ScalaObject`. Si Scala es usado en el contexto de un ambiente de ejecución de Java, entonces `scala.AnyRef` corresponde a `java.lang.Object`.
-Por favor note que el diagrama superior también muestra conversiones implícitas llamadas viestas entre las clases para valores.
+Por favor note que el diagrama superior también muestra conversiones implícitas llamadas vistas entre las clases para valores.
Aquí se muestra un ejemplo que demuestra que tanto valores numéricos, de caracteres, buleanos y funciones son objetos, tal como cualquier otro objeto:
diff --git a/_es/tour/variances.md b/_es/tour/variances.md
index feedee8fbb..eb961061a8 100644
--- a/_es/tour/variances.md
+++ b/_es/tour/variances.md
@@ -14,7 +14,7 @@ Scala soporta anotaciones de varianza para parámetros de tipo para [clases gen
En el artículo sobre clases genéricas dimos un ejemplo de una pila mutable. Explicamos que el tipo definido por la clase `Stack[T]` es objeto de subtipos invariantes con respecto al parámetro de tipo. Esto puede restringir el reuso de la abstracción (la clase). Ahora derivaremos una implementación funcional (es decir, inmutable) para pilas que no tienen esta restricción. Nótese que este es un ejemplo avanzado que combina el uso de [métodos polimórficos](polymorphic-methods.html), [límites de tipado inferiores](lower-type-bounds.html), y anotaciones de parámetros de tipo covariante de una forma no trivial. Además hacemos uso de [clases internas](inner-classes.html) para encadenar los elementos de la pila sin enlaces explícitos.
-```tut
+```scala mdoc
class Stack[+T] {
def push[S >: T](elem: S): Stack[S] = new Stack[S] {
override def top: S = elem
diff --git a/_es/tutorials/scala-for-java-programmers.md b/_es/tutorials/scala-for-java-programmers.md
index f4cc568f84..120d93d316 100644
--- a/_es/tutorials/scala-for-java-programmers.md
+++ b/_es/tutorials/scala-for-java-programmers.md
@@ -18,7 +18,7 @@ Este documento provee una rápida introducción al lenguaje Scala como también
Como primer ejemplo, usaremos el programa *Hola mundo* estándar. No es muy fascinante, pero de esta manera resulta fácil demostrar el uso de herramientas de Scala sin saber demasiado acerca del lenguaje. Veamos como luce:
object HolaMundo {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
println("¡Hola, mundo!")
}
}
@@ -59,7 +59,7 @@ Las librerías de clases de Java definen clases de utilería poderosas, como `Da
import java.text.DateFormat._
object FrenchDate {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val ahora = new Date
val df = getDateInstance(LONG, Locale.FRANCE)
println(df format ahora)
@@ -116,7 +116,7 @@ En el siguiente programa, la función del temporizador se llama `unaVezPorSegund
def tiempoVuela() {
println("El tiempo vuela como una flecha...")
}
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
unaVezPorSegundo(tiempoVuela)
}
}
@@ -134,7 +134,7 @@ El programa anterior es fácil de entender, pero puede ser refinado aún más. P
Thread sleep 1000
}
}
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
unaVezPorSegundo(
() => println("El tiempo vuela como una flecha...")
)
@@ -167,7 +167,7 @@ El compilador no es siempre capaz de inferir los tipos como lo hace aquí, y des
Un pequeño problema de los métodos `re` e `im` es que para poder llamarlos es necesario agregar un par de paréntesis vacíos después de sus nombres, como muestra el siguiente ejemplo:
object NumerosComplejos {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val c = new Complejo(1.2, 3.4)
println("Parte imaginaria: " + c.im())
}
@@ -282,7 +282,7 @@ Esta función introduce dos nuevos conceptos relacionados al pattern matching. P
No hemos explorado el completo poder del pattern matching aún, pero nos detendremos aquí para mantener este documento corto. Todavía nos queda pendiente ver cómo funcionan las dos funciones de arriba en un ejemplo real. Para ese propósito, escribamos una función main simple que realice algunas operaciones sobre la expresión `(x+x)+(7+y)`: primero computa su valor en el entorno `{ x -> 5, y -> 7 }` y después computa su derivada con respecto a `x` y después a `y`.
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val exp: Arbol = Sum(Sum(Var("x"),Var("x")),Sum(Const(7),Var("y")))
val ent: Entonrno = { case "x" => 5 case "y" => 7 }
println("Expresión: " + exp)
@@ -386,7 +386,7 @@ El ejemplo anterior introduce a las variables en Scala, que no deberían requeri
Para utilizar esta clase `Referencia`, uno necesita especificar qué tipo utilizar por el parámetro `T`, es decir, el tipo del elemento contenido por la referencia. Por ejemplo, para crear y utilizar una referencia que contenga un entero, podríamos escribir lo siguiente:
object ReferenciaEntero {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val ref = new Referencia[Int]
ref.set(13)
println("La referencia tiene la mitad de " + (ref.get * 2))
diff --git a/_fr/getting-started/install-scala.md b/_fr/getting-started/install-scala.md
new file mode 100644
index 0000000000..76f7c537f9
--- /dev/null
+++ b/_fr/getting-started/install-scala.md
@@ -0,0 +1,198 @@
+---
+layout: singlepage-overview
+title: Démarrage
+partof: getting-started
+language: fr
+includeTOC: true
+---
+
+Les instructions ci-dessous couvrent à la fois Scala 2 et Scala 3.
+
+## Essayer Scala sans installation
+
+Pour commencer à expérimenter Scala sans plus attendre, utilisez “Scastie” dans votre navigateur _Scastie_ est un environnement "bac à sable" en ligne, où vous pouvez tester Scala, afin de comprendre comment fonctionne le langage et avec un accès à tous les compilateurs Scala et les librairies publiées.
+
+> Scastie supporte à la fois Scala 2 et Scala 3, en proposant Scala 3 par défaut.
+> Si vous cherchez à tester un morceau de code avec Scala 2
+> [cliquez ici](https://scastie.scala-lang.org/MHc7C9iiTbGfeSAvg8CKAA).
+
+## Installer Scala sur votre ordinateur
+
+Installer Scala veut dire installer différents outils en ligne de commande, comme le compilateur Scala et les outils de build.
+Nous recommandons l'utilisation de l'outil d'installation "Coursier" qui va automatiquement installer toutes les dépendances, mais vous pouvez aussi installer chaque outil à la main.
+
+### Utilisation de l'installateur Scala (recommandé)
+
+L'installateur Scala est un outil nommé [Coursier](https://get-coursier.io/docs/cli-overview), la commande principale de l'outil est `cs`.
+Il s'assure que la JVM est les outils standards de Scala sont installés sur votre système.
+Installez-le sur votre système avec les instructions suivantes.
+
+
+{% tabs install-cs-setup-tabs class=platform-os-options %}
+
+
+{% tab macOS for=install-cs-setup-tabs %}
+{% include code-snippet.html language='bash' codeSnippet=site.data.setup-scala.macOS-brew %}
+{% altDetails cs-setup-macos-nobrew "Alternativement, si vous n'utilisez pas Homebrew:" %}
+ {% include code-snippet.html language='bash' codeSnippet=site.data.setup-scala.macOS-x86-64 %}
+{% endaltDetails %}
+{% endtab %}
+
+
+
+{% tab Linux for=install-cs-setup-tabs %}
+ {% include code-snippet.html language='bash' codeSnippet=site.data.setup-scala.linux-x86-64 %}
+{% endtab %}
+
+
+
+{% tab Windows for=install-cs-setup-tabs %}
+ Téléchargez et exécutez [l'intallateur Scala pour Windows]({{site.data.setup-scala.windows-link}}) basé sur Coursier.
+{% endtab %}
+
+
+
+{% tab Other for=install-cs-setup-tabs defaultTab %}
+
+ Suivez
+ [les instructions pour installer la commande `cs`](https://get-coursier.io/docs/cli-installation)
+ puis exécutez `./cs setup`.
+{% endtab %}
+
+
+{% endtabs %}
+
+
+En plus de gérer les JVMs, `cs setup` installe aussi des utilitaires en ligne de commande :
+
+- Un JDK (si vous n'en avez pas déjà un)
+- L'outil de construction de package [sbt](https://www.scala-sbt.org/)
+- [Ammonite](https://ammonite.io/), un REPL amélioré
+- [scalafmt](https://scalameta.org/scalafmt/), le formatteur de code Scala
+- `scalac` (le compilateur Scala 2)
+- `scala` (le REPL et le lanceur de script Scala 2).
+
+Pour plus d'informations à propos de `cs`, vous pouvez lire la page suivante :
+[coursier-cli documentation](https://get-coursier.io/docs/cli-overview).
+
+> Actuellement, `cs setup` installe le compilateur Scala 2 et le lanceur
+> (les commandes `scalac` et `scala` respectivement). Ce n'est pas un problème,
+> car la plupart des projets utilisent un outil de contruction
+> de package qui fonctionne à la fois pour Scala 2 et Scala 3.
+> Cependant, vous pouvez installer le compilateur et le lanceur Scala 3 en ligne de commande,
+> en exécutant les commandes suivantes :
+> ```
+> $ cs install scala3-compiler
+> $ cs install scala3
+> ```
+
+### ...ou manuellement
+
+Vous avez seulement besoin de deux outils pour compiler, lancer, tester et packager un projet Scala: Java 8 ou 11, et sbt.
+Pour les installer manuellement :
+
+1. Si vous n'avez pas Java 8 ou 11 installé, téléchargez
+ Java depuis [Oracle Java 8](https://www.oracle.com/java/technologies/javase-jdk8-downloads.html), [Oracle Java 11](https://www.oracle.com/java/technologies/javase-jdk11-downloads.html),
+ ou [AdoptOpenJDK 8/11](https://adoptopenjdk.net/). Référez-vous à la page [JDK Compatibility](/overviews/jdk-compatibility/overview.html) pour les détails de compatibilité entre Java et Scala.
+1. Installez [sbt](https://www.scala-sbt.org/download.html)
+
+## Créer un projet "Hello World" avec sbt
+
+Une fois que vous avez installé sbt, vous pouvez créer un projet Scala, comme expliqué dans la section suivante.
+
+Pour créer un projet, vous pouvez soit utiliser la ligne de commande, soit un IDE.
+Si vous êtes habitué à la ligne de commande, nous recommandons cette approche.
+
+### Utiliser la ligne de commande
+
+sbt est un outil de construction de package pour Scala, sbt compile, lance et teste votre code Scala.
+(Il peut aussi publier les librairies et faire beaucoup d'autres tâches.)
+
+Pour créer un nouveau projet Scala avec sbt :
+
+1. `cd` dans un répertoire vide.
+1. Lancez la commande `sbt new scala/scala3.g8` pour créer un projet Scala 3, ou `sbt new scala/hello-world.g8` pour créer un projet Scala 2.
+ Cela va télécharger un projet modèle depuis Github.
+ Cela va aussi créer un dossier `target`, que vous pouvez ignorer.
+1. Quand cela vous est demandé, nommez votre application `hello-world`. Cela va créer un projet appelé "hello-world".
+1. Voyons ce que nous vennons de générer :
+
+```
+- hello-world
+ - project (sbt utilise ce dossier pour ses propres fichiers)
+ - build.properties
+ - build.sbt (fichier de définition de la construction du package pour sbt)
+ - src
+ - main
+ - scala (tout votre code Scala doit être placé ici)
+ - Main.scala (Point d'entrée du programme) <-- c'est tout ce dont nous avons besoin pour le moment
+```
+
+Vous pouvez trouver plus de documentation à propos de sbt dans le [Scala Book](/scala3/book/tools-sbt.html) ([Lien](/overviews/scala-book/scala-build-tool-sbt.html) vers la version Scala 2) et sur la [documentation](https://www.scala-sbt.org/1.x/docs/index.html) officielle de sbt.
+
+### Avec un IDE
+
+Vous pouvez ignorer le reste de cette page et aller directement sur [Building a Scala Project with IntelliJ and sbt](/getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.html).
+
+
+## Ouvrir le projet hello-world
+
+Utilisons un IDE pour ouvrir le projet. Les plus populaires sont IntelliJ et VSCode.
+Il proposent tout deux des fonctionnalités avancées. D'[autres éditeurs](https://scalameta.org/metals/docs/editors/overview.html) sont également disponibles.
+
+### Avec IntelliJ
+
+1. Téléchargez et installez [IntelliJ Community Edition](https://www.jetbrains.com/idea/download/)
+1. Installez l'extension Scala en suivant [les instruction IntelliJ pour installer des extensions](https://www.jetbrains.com/help/idea/managing-plugins.html)
+1. Ouvrez le fichier `build.sbt` puis choisissez *Open as a project*
+
+### Avec VSCode et metals
+
+1. Téléchargez [VSCode](https://code.visualstudio.com/Download)
+1. Installez l'extension Metals depuis [la marketplace](https://marketplace.visualstudio.com/items?itemName=scalameta.metals)
+1. Ensuite, ouvrez le répertoire contenant le fichier `build.sbt` (cela doit être le dossier `hello-world` si vous avez suivi les instructions précédentes). Choisissez *Import build* lorsque cela vous est demandé.
+
+> [Metals](https://scalameta.org/metals) est un "Serveur de langage Scala" qui fournit une aide pour écrire du code Scala dans VSCode et d'autres éditeurs [Atom, Sublime Text, autres ...](https://scalameta.org/metals/docs/editors/overview.html), en utilisant le [Language Server Protocol (LSP)](https://microsoft.github.io/language-server-protocol/).
+> En arrière plan, Metals communique avec l'outil de construction de package en utilisant
+> le [Build Server Protocol (BSP)](https://build-server-protocol.github.io/).
+> Pour plus de détails sur le fonctionnement de Metals, suivez [“Write Scala in VS Code, Vim, Emacs, Atom and Sublime Text with Metals”](https://www.scala-lang.org/2019/04/16/metals.html).
+
+### Essayer avec le code source
+
+Ouvrez ces deux fichiers dans votre IDE :
+
+- _build.sbt_
+- _src/main/scala/Main.scala_
+
+Quand vous lancerez votre projet à l'étape suivante, la configuration dans _build.sbt_ sera utilisée pour lancer le code dans _src/main/scala/Main.scala_.
+
+## Lancer Hello Word
+
+Si vous êtes habitué à votre IDE, vous pouvez lancer le code dans _Main.scala_ depuis celui-ci.
+
+Sinon, vous pouvez lancer l'application depuis le terminal avec ces étapes :
+
+1. `cd` vers `hello-world`.
+1. Lancez `sbt`. Cela va ouvrir la console sbt.
+1. Ecrivez `~run`. Le symbole `~` est optionnel, il va relancer l'application à chaque sauvegarde de fichier.
+ Cela permet un cyle rapide de modification/relance/debug. sbt va aussi générer un dossier `target` que vous pouvez ignorer.
+
+Quand vous avez fini d'expérimenter avec ce projet, appuyez sur `[Entrée]` pour interrompre la commande `run`.
+Puis saisissez `exit` ou appuyez sur `[Ctrl+D]` pour quitter sbt et revenir à votre invite de commande.
+
+## Prochaines étapes
+
+Une fois que vous avez terminé le tutoriel ce dessus, vous pouvez consulter :
+
+* [The Scala Book](/scala3/book/introduction.html) ([Lien](/overviews/scala-book/introduction.html) vers la version Scala 2), qui fournit un ensemble de courtes leçons et introduit les fonctionnalités principales de Scala.
+* [The Tour of Scala](/tour/tour-of-scala.html) pour une introduction des fonctionnalités Scala.
+* [Learning Courses](/online-courses.html), qui contient des tutoriels et des cours interactifs.
+* [Our list of some popular Scala books](/books.html).
+* [The migration guide](/scala3/guides/migration/compatibility-intro.html) pour vous aider à migrer votre code Scala 2 vers Scala 3.
+
+## Obtenir de l'aide
+Il y a plusieurs listes de diffusion et canaux de discussions instantanés si vous souhaitez rencontrer rapidement d'autres utilisateurs de Scala. Allez faire un tour sur notre page [community](https://scala-lang.org/community/) pour consulter la liste des ces ressources et obtenir de l'aide.
+
+Traduction par Antoine Pointeau.
diff --git a/_fr/tour/abstract-type-members.md b/_fr/tour/abstract-type-members.md
new file mode 100644
index 0000000000..68f1cdfd1e
--- /dev/null
+++ b/_fr/tour/abstract-type-members.md
@@ -0,0 +1,76 @@
+---
+layout: tour
+title: Abstract Type Members
+partof: scala-tour
+num: 25
+language: fr
+next-page: compound-types
+previous-page: inner-classes
+topics: abstract type members
+prerequisite-knowledge: variance, upper-type-bound
+---
+
+Les types abstraits, tels que les traits et les classes abstraites, peuvent avoir des membres type abstrait.
+Cela signifie que les implémentations concrètes définissent les types réels.
+Voici un exemple :
+
+```scala mdoc
+trait Buffer {
+ type T
+ val element: T
+}
+```
+
+Ici, nous avons défini un `type T` abstrait. Il est utilisé pour décrire le type de `element`. Nous pouvons étendre ce trait dans une classe abstraite, en ajoutant une borne de type supérieure à `T` pour le rendre plus spécifique.
+
+```scala mdoc
+abstract class SeqBuffer extends Buffer {
+ type U
+ type T <: Seq[U]
+ def length = element.length
+}
+```
+
+Remarquez comment nous pouvons utiliser un autre type abstrait `U` dans la spécification d'une borne supérieure pour `T`. Cette `class SeqBuffer` nous permet de stocker uniquement des séquences dans le tampon en indiquant que le type `T` doit être un sous-type de `Seq[U]` pour un nouveau type abstrait `U`.
+
+Les traits ou [classes](classes.html) avec des membres type abstrait sont souvent utilisés en combinaison avec des instanciations de classes anonymes. Pour illustrer cela, regardons maintenant un programme qui traite un "sequence buffer" qui fait référence à une liste d'entiers :
+
+```scala mdoc
+abstract class IntSeqBuffer extends SeqBuffer {
+ type U = Int
+}
+
+
+def newIntSeqBuf(elem1: Int, elem2: Int): IntSeqBuffer =
+ new IntSeqBuffer {
+ type T = List[U]
+ val element = List(elem1, elem2)
+ }
+val buf = newIntSeqBuf(7, 8)
+println("length = " + buf.length)
+println("content = " + buf.element)
+```
+
+Ici, la factory `newIntSeqBuf` utilise une implémentation de classe anonyme de `IntSeqBuffer` (c'est-à-dire `new IntSeqBuffer`) pour définir le type abstrait `T` comme étant le type concret `List[Int]`.
+
+Il est également possible de transformer des membres type abstrait en paramètres de type de classes et *vice versa*. Voici une version du code ci-dessous qui n'utilise que des paramètres de type :
+
+```scala mdoc:nest
+abstract class Buffer[+T] {
+ val element: T
+}
+abstract class SeqBuffer[U, +T <: Seq[U]] extends Buffer[T] {
+ def length = element.length
+}
+
+def newIntSeqBuf(e1: Int, e2: Int): SeqBuffer[Int, Seq[Int]] =
+ new SeqBuffer[Int, List[Int]] {
+ val element = List(e1, e2)
+ }
+
+val buf = newIntSeqBuf(7, 8)
+println("length = " + buf.length)
+println("content = " + buf.element)
+```
+
+Notez que nous devons utiliser ici [les annotaions de variance](variances.html) (`+T <: Seq[U]`) afin de masquer le type concret d'implémentation de séquence dans l'objet renvoyé par la méthode `newIntSeqBuf`. De plus, il existe des cas où il n'est pas possible de remplacer les membres de type abstrait par des paramètres de type.
diff --git a/_fr/tour/annotations.md b/_fr/tour/annotations.md
new file mode 100644
index 0000000000..5f2b4cbf55
--- /dev/null
+++ b/_fr/tour/annotations.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Annotations
+partof: scala-tour
+
+num: 30
+
+language: fr
+
+next-page: packages-and-imports
+previous-page: by-name-parameters
+---
diff --git a/_fr/tour/basics.md b/_fr/tour/basics.md
new file mode 100644
index 0000000000..a16e3c7970
--- /dev/null
+++ b/_fr/tour/basics.md
@@ -0,0 +1,11 @@
+---
+layout: tour
+title: Basics
+partof: scala-tour
+
+num: 2
+language: fr
+
+next-page: unified-types
+previous-page: tour-of-scala
+---
diff --git a/_fr/tour/by-name-parameters.md b/_fr/tour/by-name-parameters.md
new file mode 100644
index 0000000000..917e78aede
--- /dev/null
+++ b/_fr/tour/by-name-parameters.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: By-name Parameters
+partof: scala-tour
+
+num: 29
+
+language: fr
+
+next-page: annotations
+previous-page: operators
+---
diff --git a/_fr/tour/case-classes.md b/_fr/tour/case-classes.md
new file mode 100644
index 0000000000..66debb53f4
--- /dev/null
+++ b/_fr/tour/case-classes.md
@@ -0,0 +1,73 @@
+---
+layout: tour
+title: Case Classes
+partof: scala-tour
+
+num: 13
+
+language: fr
+
+next-page: pattern-matching
+previous-page: multiple-parameter-lists
+---
+
+Les classes de cas sont comme les autres classes avec quelques différences que nous allons présenter. Les classes de cas sont pratiques pour modéliser des données immuables. Dans la prochaine étape du tour, nous verrons comment elles peuvent être utilisées avec le [pattern matching](pattern-matching.html).
+
+## Définir une classe de cas
+
+Une classe de cas requiert au minimum le mot clef `case class`, un identifiant, et une liste de paramètres (qui peut être vide) :
+
+```scala mdoc
+case class Book(isbn: String)
+
+val frankenstein = Book("978-0486282114")
+```
+
+Notez que le mot clef `new` n'a pas été utilisé pour instancier la classe de cas `Book`. C'est parce que la classe de cas a une méthode `apply` par défaut qui prend en charge la construction de l'objet.
+
+Quand vous créez une classe de cas avec des paramètres, les paramètres sont des `val` publiques.
+
+```
+case class Message(sender: String, recipient: String, body: String)
+val message1 = Message("guillaume@quebec.ca", "jorge@catalonia.es", "Ça va ?")
+
+println(message1.sender) // prints guillaume@quebec.ca
+message1.sender = "travis@washington.us" // cette ligne ne compile pas
+```
+
+Vous ne pouvez pas réaffecter `message1.sender` parce que c'est une `val` (càd. une valeur immuable). Il est possible d'utiliser des `var` dans les classes de cas mais ce n'est pas recommandé.
+
+## Comparaison
+
+Les instances des classes de cas sont comparées structurellement et non par référence :
+
+```scala mdoc
+case class Message(sender: String, recipient: String, body: String)
+
+val message2 = Message("jorge@catalonia.es", "guillaume@quebec.ca", "Com va?")
+val message3 = Message("jorge@catalonia.es", "guillaume@quebec.ca", "Com va?")
+val messagesAreTheSame = message2 == message3 // true
+```
+
+Même si `message2` et `message3` font référence à des objets différents, les valeurs de chaque objet sont égales.
+
+## Copier
+
+Vous pouvez créer une copie (superficielle) d'une instance de classe de cas simplement en utlisant la méthode `copy`. Vous pouvez optionnellement changer les arguments du constructeur.
+
+```scala mdoc:nest
+case class Message(sender: String, recipient: String, body: String)
+val message4 = Message("julien@bretagne.fr", "travis@washington.us", "Me zo o komz gant ma amezeg")
+val message5 = message4.copy(sender = message4.recipient, recipient = "claire@bourgogne.fr")
+message5.sender // travis@washington.us
+message5.recipient // claire@bourgogne.fr
+message5.body // "Me zo o komz gant ma amezeg"
+```
+
+Le destinataire (recipient) de `message4` est utilisé comment expéditeur (sender) du message `message5` mais le `body` du `message4` a été directement copié.
+
+## Plus d'informations
+
+* Apprennez-en plus sur les classes de cas dans [Scala Book](/overviews/scala-book/case-classes.html)
+
+Traduit par Antoine Pointeau.
diff --git a/_fr/tour/classes.md b/_fr/tour/classes.md
new file mode 100644
index 0000000000..40f56d0513
--- /dev/null
+++ b/_fr/tour/classes.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Classes
+partof: scala-tour
+
+num: 4
+
+language: fr
+
+next-page: traits
+previous-page: unified-types
+---
diff --git a/_fr/tour/compound-types.md b/_fr/tour/compound-types.md
new file mode 100644
index 0000000000..db813518b1
--- /dev/null
+++ b/_fr/tour/compound-types.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Compound Types
+partof: scala-tour
+
+num: 22
+
+language: fr
+
+next-page: self-types
+previous-page: abstract-type-members
+---
diff --git a/_fr/tour/default-parameter-values.md b/_fr/tour/default-parameter-values.md
new file mode 100644
index 0000000000..0f73ab1653
--- /dev/null
+++ b/_fr/tour/default-parameter-values.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Default Parameter Values
+partof: scala-tour
+
+num: 31
+
+language: fr
+
+next-page: named-arguments
+previous-page: annotations
+---
diff --git a/_fr/tour/extractor-objects.md b/_fr/tour/extractor-objects.md
new file mode 100644
index 0000000000..1f864b7f39
--- /dev/null
+++ b/_fr/tour/extractor-objects.md
@@ -0,0 +1,66 @@
+---
+layout: tour
+title: Extractor Objects
+partof: scala-tour
+
+num: 18
+
+language: fr
+
+next-page: for-comprehensions
+previous-page: regular-expression-patterns
+---
+
+Un objet extracteur est un objet avec une méthode `unapply`. Tandis que la méthode `apply` ressemble à un constructeur qui prend des arguments et crée un objet, `unapply` prend un object et essaye de retourner ses arguments. Il est utilisé le plus souvent en filtrage par motif (*pattern matching*) ou avec les fonctions partielles.
+
+```scala mdoc
+import scala.util.Random
+
+object CustomerID {
+
+ def apply(name: String) = s"$name--${Random.nextLong()}"
+
+ def unapply(customerID: String): Option[String] = {
+ val stringArray: Array[String] = customerID.split("--")
+ if (stringArray.tail.nonEmpty) Some(stringArray.head) else None
+ }
+}
+
+val customer1ID = CustomerID("Sukyoung") // Sukyoung--23098234908
+customer1ID match {
+ case CustomerID(name) => println(name) // prints Sukyoung
+ case _ => println("Could not extract a CustomerID")
+}
+```
+
+La méthode `apply` crée une chaîne de caractères `CustomerID` depuis `name`. La méthode `unapply` fait l'inverse pour retrouver le `name`. Lorsqu'on appelle `CustomerID("Sukyoung")`, c'est un raccourci pour `CustomerID.apply("Sukyoung")`. Lorsqu'on appelle `case CustomerID(name) => println(name)`, on appelle la méthode `unapply` avec `CustomerID.unapply(customer1ID)`.
+
+Sachant qu'une définition de valeur peut utiliser une décomposition pour introduire une nouvelle variable, un extracteur peut être utilisé pour initialiser la variable, avec la méthode `unapply` pour fournir la valeur.
+
+```scala mdoc
+val customer2ID = CustomerID("Nico")
+val CustomerID(name) = customer2ID
+println(name) // prints Nico
+```
+
+C'est équivalent à `val name = CustomerID.unapply(customer2ID).get`.
+
+```scala mdoc
+val CustomerID(name2) = "--asdfasdfasdf"
+```
+
+S'il n'y a pas de correspondance, une `scala.MatchError` est levée :
+
+```scala
+val CustomerID(name3) = "-asdfasdfasdf"
+```
+
+Le type de retour de `unapply` doit être choisi comme suit :
+
+* Si c'est juste un test, retourner un `Boolean`. Par exemple, `case even()`.
+* Si cela retourne une seule sous-valeur de type T, retourner un `Option[T]`.
+* Si vous souhaitez retourner plusieurs sous-valeurs `T1,...,Tn`, groupez-les dans un tuple optionnel `Option[(T1,...,Tn)]`.
+
+Parfois, le nombre de valeurs à extraire n'est pas fixe et on souhaiterait retourner un nombre arbitraire de valeurs, en fonction des données d'entrée. Pour ce cas, vous pouvez définir des extracteurs avec la méthode `unapplySeq` qui retourne un `Option[Seq[T]]`. Un exemple commun d'utilisation est la déconstruction d'une liste en utilisant `case List(x, y, z) =>`. Un autre est la décomposition d'une `String` en utilisant une expression régulière `Regex`, comme `case r(name, remainingFields @ _*) =>`.
+
+Traduit par Antoine Pointeau.
diff --git a/_fr/tour/for-comprehensions.md b/_fr/tour/for-comprehensions.md
new file mode 100644
index 0000000000..ea4649ad39
--- /dev/null
+++ b/_fr/tour/for-comprehensions.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: For Comprehensions
+partof: scala-tour
+
+num: 15
+
+language: fr
+
+next-page: generic-classes
+previous-page: extractor-objects
+---
diff --git a/_fr/tour/generic-classes.md b/_fr/tour/generic-classes.md
new file mode 100644
index 0000000000..6eeb2e8fea
--- /dev/null
+++ b/_fr/tour/generic-classes.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Generic Classes
+partof: scala-tour
+
+num: 16
+
+language: fr
+
+next-page: variances
+previous-page: extractor-objects
+---
diff --git a/_fr/tour/higher-order-functions.md b/_fr/tour/higher-order-functions.md
new file mode 100644
index 0000000000..513f6b619f
--- /dev/null
+++ b/_fr/tour/higher-order-functions.md
@@ -0,0 +1,123 @@
+---
+layout: tour
+title: Higher-order Functions
+partof: scala-tour
+
+num: 10
+
+language: fr
+
+next-page: nested-functions
+previous-page: mixin-class-composition
+---
+
+Les fonctions d'ordre supérieur prennent d'autres fonctions en paramètres ou retournent une fonction en résultat.
+C'est possible car les fonctions sont des valeurs de première classe en Scala.
+La terminologie peut devenir une peu confuse à ce point, et nous utilisons l'expression "fonction d'ordre supérieur" à la fois pour les méthodes et les fonctions qui prennent d'autres fonctions en paramètres ou retournent une fonction en résultat.
+
+Dans le monde du pur orienté objet, une bonne pratique est d'éviter d'exposer des méthodes paramétrées avec des fonctions qui pourraient exposer l'état interne de l'objet. Le fait d’exposer l'état interne de l'objet pourrait casser les invariants de l'objet lui-même ce qui violerait l'encapsulation.
+
+Un des exemples les plus communs est la fonction d'ordre supérieur `map` qui est diponible pour les collections en Scala.
+
+```scala mdoc
+val salaries = Seq(20000, 70000, 40000)
+val doubleSalary = (x: Int) => x * 2
+val newSalaries = salaries.map(doubleSalary) // List(40000, 140000, 80000)
+```
+
+`doubleSalary` est une fonction qui prend un seul entier, `x` et retourne `x * 2`. La partie à gauche de la flèche `=>` est la liste de paramètres, et la valeur de l'expression à droite est ce qui est retourné. Sur la ligne 3, la fonction `doubleSalary` est appliquée à chaque élément dans la liste des salariés.
+
+Pour réduire le code, nous pouvons faire une fonction anonyme et la passer directement en argument de `map` :
+
+```scala:nest
+val salaries = Seq(20000, 70000, 40000)
+val newSalaries = salaries.map(x => x * 2) // List(40000, 140000, 80000)
+```
+
+Notez que `x` n'est pas déclaré comme un `Int` dans l'exemple ci-dessus. C'est parce que le compilateur peut inférrer le type en se basant sur le type que méthode `map` attend. (voir [Currying](/tour/multiple-parameter-lists.html)). Une autre façon d'écrire le même morceau de code encore plus idiomatique serait :
+
+```scala mdoc:nest
+val salaries = Seq(20000, 70000, 40000)
+val newSalaries = salaries.map(_ * 2)
+```
+
+Sachant que le compilateur Scala sait déjà quel est le type des paramètres (un seul `Int`), vous pouvez fournir uniquement la partie de droite de la fonction.
+La seule contrepartie c'est que vous devez utiliser `_` à la place du nom du paramètre (c'était `x` dans l'exemple précédent).
+
+## Convertir les méthodes en fonctions
+
+Il est aussi possible de passer des méthodes comme arguments aux fonctions d'ordre supérieur, parce que le compilateur Scala va convertir la méthode en fonction.
+
+```scala mdoc
+case class WeeklyWeatherForecast(temperatures: Seq[Double]) {
+
+ private def convertCtoF(temp: Double) = temp * 1.8 + 32
+
+ def forecastInFahrenheit: Seq[Double] = temperatures.map(convertCtoF) // <-- passing the method convertCtoF
+}
+```
+
+Ici la méthode `convertCtoF` est passée à la fonction d'ordre supérieur `map`. C'est possible car le compilateur convertit `convertCtoF` vers la fonction `x => convertCtoF(x)` (note : `x` sera un nom généré qui sera garanti d'être unique dans le scope).
+
+## Les fonction qui acceptent des fonctions
+
+Une raison d'utiliser les fonctions d'ordre supérieur est de réduire le code redondant. Suposons que vous souhaitez des méthodes qui augmentent le salaire de quelqu'un en fonction de différents facteurs. Sans créer de fonction d'ordre supérieur, cela ressemblerait à ça :
+
+```scala mdoc
+object SalaryRaiser {
+
+ def smallPromotion(salaries: List[Double]): List[Double] =
+ salaries.map(salary => salary * 1.1)
+
+ def greatPromotion(salaries: List[Double]): List[Double] =
+ salaries.map(salary => salary * math.log(salary))
+
+ def hugePromotion(salaries: List[Double]): List[Double] =
+ salaries.map(salary => salary * salary)
+}
+```
+
+Notez comment chacunes de ces trois méthodes ne changent que par le facteur de multiplication.
+Pour simplifier, vous pouvez extraire le code répété dans une fonction d'ordre supérieur comme ceci :
+
+```scala mdoc:nest
+object SalaryRaiser {
+
+ private def promotion(salaries: List[Double], promotionFunction: Double => Double): List[Double] =
+ salaries.map(promotionFunction)
+
+ def smallPromotion(salaries: List[Double]): List[Double] =
+ promotion(salaries, salary => salary * 1.1)
+
+ def greatPromotion(salaries: List[Double]): List[Double] =
+ promotion(salaries, salary => salary * math.log(salary))
+
+ def hugePromotion(salaries: List[Double]): List[Double] =
+ promotion(salaries, salary => salary * salary)
+}
+```
+
+La nouvelle méthode, `promotion`, prend les salaires plus une fonction du type `Double => Double` (càd. une fonction qui prend un Double et retourne un Double) et retourne le produit.
+
+Les méthodes et les fonctions expriment généralement des comportements ou des transformations de données, donc avoir des fonctions qui composent en se basant sur d'autres fonctions peut aider à construire des mécanismes génériques. Ces opérations génériques reportent le verrouillage de l'intégralité du comportement de l'opération, donnant aux clients un moyen de contrôler ou de personnaliser davantage certaines parties de l'opération elle-même.
+
+## Les fonctions qui retournent des fonctions
+
+Il y a certains cas ou vous voulez générer une fonction. Voici un exemple de méthode qui retourne une fonction.
+
+```scala mdoc
+def urlBuilder(ssl: Boolean, domainName: String): (String, String) => String = {
+ val schema = if (ssl) "https://" else "http://"
+ (endpoint: String, query: String) => s"$schema$domainName/$endpoint?$query"
+}
+
+val domainName = "www.example.com"
+def getURL = urlBuilder(ssl=true, domainName)
+val endpoint = "users"
+val query = "id=1"
+val url = getURL(endpoint, query) // "https://www.example.com/users?id=1": String
+```
+
+Notez le type de retour de urlBuilder `(String, String) => String`. Cela veut dire que la fonction anonyme retournée prend deux Strings et retourne une String. Dans ce cas, la fonction anonyme retournée est `(endpoint: String, query: String) => s"https://www.example.com/$endpoint?$query"`
+
+Traduit par Antoine Pointeau.
\ No newline at end of file
diff --git a/_fr/tour/implicit-conversions.md b/_fr/tour/implicit-conversions.md
new file mode 100644
index 0000000000..1030827e4b
--- /dev/null
+++ b/_fr/tour/implicit-conversions.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Implicit Conversions
+partof: scala-tour
+
+num: 25
+
+language: fr
+
+next-page: polymorphic-methods
+previous-page: implicit-parameters
+---
diff --git a/_fr/tour/implicit-parameters.md b/_fr/tour/implicit-parameters.md
new file mode 100644
index 0000000000..236dd136f5
--- /dev/null
+++ b/_fr/tour/implicit-parameters.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Implicit Parameters
+partof: scala-tour
+
+num: 24
+
+language: fr
+
+next-page: implicit-conversions
+previous-page: self-types
+---
diff --git a/_fr/tour/inner-classes.md b/_fr/tour/inner-classes.md
new file mode 100644
index 0000000000..a5df305ce5
--- /dev/null
+++ b/_fr/tour/inner-classes.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Inner Classes
+partof: scala-tour
+
+num: 20
+
+language: fr
+
+next-page: abstract-type-members
+previous-page: lower-type-bounds
+---
diff --git a/_fr/tour/lower-type-bounds.md b/_fr/tour/lower-type-bounds.md
new file mode 100644
index 0000000000..eb6ffb785c
--- /dev/null
+++ b/_fr/tour/lower-type-bounds.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Lower Type Bounds
+partof: scala-tour
+
+num: 19
+
+language: fr
+
+next-page: inner-classes
+previous-page: upper-type-bounds
+---
diff --git a/_fr/tour/mixin-class-composition.md b/_fr/tour/mixin-class-composition.md
new file mode 100644
index 0000000000..8d1b823c11
--- /dev/null
+++ b/_fr/tour/mixin-class-composition.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Class Composition with Mixins
+partof: scala-tour
+
+num: 6
+
+language: fr
+
+next-page: higher-order-functions
+previous-page: tuples
+---
diff --git a/_fr/tour/multiple-parameter-lists.md b/_fr/tour/multiple-parameter-lists.md
new file mode 100644
index 0000000000..476e918cc1
--- /dev/null
+++ b/_fr/tour/multiple-parameter-lists.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Multiple Parameter Lists (Currying)
+partof: scala-tour
+
+num: 9
+
+language: fr
+
+next-page: case-classes
+previous-page: nested-functions
+---
diff --git a/_fr/tour/named-arguments.md b/_fr/tour/named-arguments.md
new file mode 100644
index 0000000000..fec11428a3
--- /dev/null
+++ b/_fr/tour/named-arguments.md
@@ -0,0 +1,34 @@
+---
+layout: tour
+title: Named Arguments
+partof: scala-tour
+
+num: 6
+
+language: fr
+
+next-page: traits
+previous-page: default-parameter-values
+---
+
+En appelant des méthodes, vous pouvez nommer leurs arguments comme ceci :
+
+```scala mdoc
+def printName(first: String, last: String): Unit = {
+ println(first + " " + last)
+}
+
+printName("John", "Smith") // Prints "John Smith"
+printName(first = "John", last = "Smith") // Prints "John Smith"
+printName(last = "Smith", first = "John") // Prints "John Smith"
+```
+
+Notez comment l'ordre des arguments nommés peut être réarrangé. Cependant, si certains arguments sont nommés et d'autres non, les arguments non nommés doivent venir en premier et suivrent l'ordre de leurs paramètres dans la signature de la méthode.
+
+```scala mdoc:fail
+printName(last = "Smith", "john") // erreur: argument positionnel après un argument nommé
+```
+
+Les arguments nommés fonctionnent avec les appels de méthodes Java, mais seulement si la librairie Java en question a été compilée avec `-parameters`.
+
+Traduction par Antoine Pointeau.
\ No newline at end of file
diff --git a/_fr/tour/nested-functions.md b/_fr/tour/nested-functions.md
new file mode 100644
index 0000000000..f92045364f
--- /dev/null
+++ b/_fr/tour/nested-functions.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Nested Methods
+partof: scala-tour
+
+num: 8
+
+language: fr
+
+next-page: multiple-parameter-lists
+previous-page: higher-order-functions
+---
diff --git a/_fr/tour/operators.md b/_fr/tour/operators.md
new file mode 100644
index 0000000000..59c697727e
--- /dev/null
+++ b/_fr/tour/operators.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Operators
+partof: scala-tour
+
+num: 28
+
+language: fr
+
+next-page: by-name-parameters
+previous-page: type-inference
+---
diff --git a/_fr/tour/package-objects.md b/_fr/tour/package-objects.md
new file mode 100644
index 0000000000..80cfb5e055
--- /dev/null
+++ b/_fr/tour/package-objects.md
@@ -0,0 +1,9 @@
+---
+layout: tour
+title: Package Objects
+language: fr
+partof: scala-tour
+
+num: 36
+previous-page: packages-and-imports
+---
diff --git a/_fr/tour/packages-and-imports.md b/_fr/tour/packages-and-imports.md
new file mode 100644
index 0000000000..8edac3b01c
--- /dev/null
+++ b/_fr/tour/packages-and-imports.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Packages and Imports
+partof: scala-tour
+
+num: 33
+
+language: fr
+
+previous-page: named-arguments
+next-page: package-objects
+---
diff --git a/_fr/tour/pattern-matching.md b/_fr/tour/pattern-matching.md
new file mode 100644
index 0000000000..1cd3731b9a
--- /dev/null
+++ b/_fr/tour/pattern-matching.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Pattern Matching
+partof: scala-tour
+
+num: 11
+
+language: fr
+
+next-page: singleton-objects
+previous-page: case-classes
+---
diff --git a/_fr/tour/polymorphic-methods.md b/_fr/tour/polymorphic-methods.md
new file mode 100644
index 0000000000..6375d54957
--- /dev/null
+++ b/_fr/tour/polymorphic-methods.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Polymorphic Methods
+partof: scala-tour
+
+num: 26
+
+language: fr
+
+next-page: type-inference
+previous-page: implicit-conversions
+---
diff --git a/_fr/tour/regular-expression-patterns.md b/_fr/tour/regular-expression-patterns.md
new file mode 100644
index 0000000000..253da8efa6
--- /dev/null
+++ b/_fr/tour/regular-expression-patterns.md
@@ -0,0 +1,63 @@
+---
+layout: tour
+title: Regular Expression Patterns
+partof: scala-tour
+
+num: 17
+
+language: fr
+
+next-page: extractor-objects
+previous-page: singleton-objects
+---
+
+Les expressions régulières sont des chaînes de caractères qui peuvent être utilisées pour trouver des motifs (ou l'absence de motif) dans un texte. Toutes les chaînes de caractères peuvent être converties en expressions régulières en utilisant la méthode `.r`.
+
+```scala mdoc
+import scala.util.matching.Regex
+
+val numberPattern: Regex = "[0-9]".r
+
+numberPattern.findFirstMatchIn("awesomepassword") match {
+ case Some(_) => println("Password OK")
+ case None => println("Password must contain a number")
+}
+```
+
+Dans l'exemple ci-dessus, `numberPattern` est une `Regex` (EXpression REGulière) que nous utilisons pour vérifier que le mot de passe contient un nombre.
+
+Vous pouvez aussi faire des recherches de groupes d'expressions régulières en utilisant les parenthèses.
+
+```scala mdoc
+import scala.util.matching.Regex
+
+val keyValPattern: Regex = "([0-9a-zA-Z- ]+): ([0-9a-zA-Z-#()/. ]+)".r
+
+val input: String =
+ """background-color: #A03300;
+ |background-image: url(img/header100.png);
+ |background-position: top center;
+ |background-repeat: repeat-x;
+ |background-size: 2160px 108px;
+ |margin: 0;
+ |height: 108px;
+ |width: 100%;""".stripMargin
+
+for (patternMatch <- keyValPattern.findAllMatchIn(input))
+ println(s"key: ${patternMatch.group(1)} value: ${patternMatch.group(2)}")
+```
+
+Ici nous analysons les clefs et les valeurs d'une chaîne de caractère. Chaque correspondance a un groupe de sous-correspondances. Voici le résultat :
+
+```
+key: background-color value: #A03300
+key: background-image value: url(img/header100.png)
+key: background-position value: top center
+key: background-repeat value: repeat-x
+key: background-size value: 2160px 108px
+key: margin value: 0
+key: height value: 108px
+key: width value: 100
+```
+
+Traduit par Antoine Pointeau.
\ No newline at end of file
diff --git a/_fr/tour/self-types.md b/_fr/tour/self-types.md
new file mode 100644
index 0000000000..9d82783417
--- /dev/null
+++ b/_fr/tour/self-types.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Self-types
+partof: scala-tour
+
+num: 23
+
+language: fr
+
+next-page: implicit-parameters
+previous-page: compound-types
+---
diff --git a/_fr/tour/singleton-objects.md b/_fr/tour/singleton-objects.md
new file mode 100644
index 0000000000..073dfaf5ec
--- /dev/null
+++ b/_fr/tour/singleton-objects.md
@@ -0,0 +1,120 @@
+---
+layout: tour
+title: Singleton Objects
+partof: scala-tour
+
+num: 15
+
+language: fr
+
+next-page: regular-expression-patterns
+previous-page: pattern-matching
+---
+
+Un objet est une classe qui a exactement une instance. Il est créé de façon paresseuse au moment où il est référencé, comme une valeur paresseuse `lazy val`.
+
+En tant que valeur de premier niveau, un objet est un singleton.
+
+En tant que membre d'une classe englobante ou en tant que valeur locale, il se comporte exactement comme une `lazy val`.
+
+# Définir un objet singleton
+
+Un objet est une valeur. La définition d'un objet ressemble a une classe, mais utilise le mot clef `object` :
+
+```scala mdoc
+object Box
+```
+
+Voici un exemple d'un objet avec une méthode :
+
+```
+package logging
+
+object Logger {
+ def info(message: String): Unit = println(s"INFO: $message")
+}
+```
+
+La méthode `info` peut être importée depuis n'importe où dans le programme. Créer des méthodes utilitaires, comme celle-ci, est un cas d'usage commun pour les objets singleton.
+
+Regardons comment utiliser `info` dans un autre package :
+
+```
+import logging.Logger.info
+
+class Project(name: String, daysToComplete: Int)
+
+class Test {
+ val project1 = new Project("TPS Reports", 1)
+ val project2 = new Project("Website redesign", 5)
+ info("Created projects") // Prints "INFO: Created projects"
+}
+```
+
+La méthode `info` est visible grâce à l'import, `import logging.Logger.info`. Les imports ont besoin d'un chemin d'accès stable aux ressources, et un objet est un chemin stable.
+
+Note : Si un `objet` est encapsulé dans une autre classe ou un autre objet, alors l'objet est dépendant du chemin d'accès, comme les autres membres. Cela veut dire, par exemple, que si on prend 2 types de boissons, `class Milk` et `class OrangeJuice`, un membre de classe `object NutritionInfo` est dépendant de son instance d'encapsulation. `milk.NutritionInfo` est complètement différent de `oj.NutritionInfo`.
+
+## Les objets compagnons
+
+Un objet avec le même nom qu'une classe est appelé un _objet compagnon_. Inversement, la classe se nomme la _classe compagnon_ de l'objet. Une classe ou un objet compagnon peut accéder aux membres privés de son compagnon. L'objet compagnon est utile pour les méthodes et les valeurs qui ne sont pas spécifiques aux instances de la classe compagnon.
+
+```
+import scala.math._
+
+case class Circle(radius: Double) {
+ import Circle._
+ def area: Double = calculateArea(radius)
+}
+
+object Circle {
+ private def calculateArea(radius: Double): Double = Pi * pow(radius, 2.0)
+}
+
+val circle1 = Circle(5.0)
+
+circle1.area
+```
+
+La classe `class Circle` a un membre `area` qui est spécifique à chaque instance, et un singleton `object Circle` qui a une méthode `calculateArea` qui est disponible pour chaque instance.
+
+L'objet compagnon peut aussi contenir des méthodes de fabrique (_factory_) :
+
+```scala mdoc
+class Email(val username: String, val domainName: String)
+
+object Email {
+ def fromString(emailString: String): Option[Email] = {
+ emailString.split('@') match {
+ case Array(a, b) => Some(new Email(a, b))
+ case _ => None
+ }
+ }
+}
+
+val scalaCenterEmail = Email.fromString("scala.center@epfl.ch")
+scalaCenterEmail match {
+ case Some(email) => println(
+ s"""Registered an email
+ |Username: ${email.username}
+ |Domain name: ${email.domainName}
+ """.stripMargin)
+ case None => println("Error: could not parse email")
+}
+```
+
+L'objet `object Email` contient une méthode de fabrique `fromString` qui créé une instance de `Email` depuis une chaîne de caractères. L'instance est retournée en tant que `Option[Email]` pour gérer le cas des erreurs de syntaxe.
+
+Note : Si une classe ou un objet a un compagnon, tous deux doivent être définis dans le même fichier. Pour définir des compagnons dans le REPL, tous deux doivent être définis sur la même ligne ou définis en mode `:paste`.
+
+## Notes pour les programmeurs Java
+
+Les membres `static` en Java sont modélisés comme des membres ordinaires d'un objet compagnon en Scala.
+
+Lorsqu'on utilise un objet compagnon depuis du code Java, ses membres sont définis dans la classe compagnon avec le modificateur `static`. Cela s'appelle le _static forwarding_. Cela se produit même si vous n'avez pas défini de classe compagnon vous-même.
+
+## Plus d'informations
+
+* Apprenez-en plus sur les objets compagnons dans le [Scala Book](/overviews/scala-book/companion-objects.html)
+
+Traduit par Antoine Pointeau.
diff --git a/_fr/tour/tour-of-scala.md b/_fr/tour/tour-of-scala.md
new file mode 100644
index 0000000000..f5d0f5d20a
--- /dev/null
+++ b/_fr/tour/tour-of-scala.md
@@ -0,0 +1,90 @@
+---
+layout: tour
+title: Introduction
+partof: scala-tour
+
+num: 1
+language: fr
+next-page: basics
+
+---
+
+## Bienvenue au tour
+Ce tour contient une introduction morceaux par morceaux aux fonctionnalités les plus fréquemment
+utilisées en Scala. Il est adressé aux novices de Scala.
+
+Ceci est un bref tour du language, non pas un tutoriel complet.
+Si vous recherchez un guide plus détaillé, il est préférable d'opter pour [un livre](/books.html) ou de suivre
+[un cours en ligne](/online-courses.html).
+
+## Qu'est-ce que le Scala ?
+Scala est un langage de programmation à multiples paradigmes désigné pour exprimer des motifs de programmation communs de
+façon concise, élégante et robuste. Il intègre sans problème les fonctionnalités des langages orientés objet et des
+langages fonctionnels.
+
+## Scala est orienté objet ##
+Scala est un langage purement orienté objet dans le sens où [toute valeur est un objet](unified-types.html).
+Les types et les comportements de ces objets sont décrits par des [classes](classes.html) et des trait [traits](traits.html).
+Les classes peuvent être étendues à travers des sous-classes et grâce à un système flexible de [composition de classes](mixin-class-composition.html).
+
+## Scala est fonctionnel ##
+Scala est également un langage fonctionnel dans le sen où [toute fonction est une valeur](unified-types.html).
+Scala propose une [syntaxe légère](basics.html) pour définir des fonctions anonymes, supporte des
+[fonctions de haut niveau](higher-order-functions.html), autorise les fonctions [imbriquées](nested-functions.html) et
+supporte le [currying](multiple-parameter-lists.html).
+Les [case class](case-classes.html) de Scala et leur système intégré de [reconnaissance de motifs](pattern-matching.html)
+permettent de construire des types algébriques utilisés dans de nombreux langages de programmation.
+Les [objets singleton](singleton-objects.html) fournissent une façon pratique de regrouper des fonctions qui ne sont pas
+membres d'une classe.
+
+De plus, la notion de reconnaissance de motifs de Scala s'étend naturellement au
+[traitement des données XML](https://github.com/scala/scala-xml/wiki/XML-Processing) avec l'aide des
+[patrons d'expressions régulières](regular-expression-patterns.html), grâce à une extension générale via des
+[objets extracteurs](extractor-objects.html). Dans ce contexte, les [for comprehensions](for-comprehensions.html) sont
+utiles pour formuler des requêtes. Ces fonctionnalités font de Scala un langage idéal pour développer des applications
+comme des services Web.
+
+## Scala est fortement typé ##
+A la compilation, le système de type expressif de Scala renforce l'utilisation des abstractions d'une manière
+sécurisée et cohérente. En particulier, ce système de type supporte :
+
+* Les [classes génériques](generic-classes.html)
+* Les [annotations variables](variances.html)
+* Les limites de type [supérieures](upper-type-bounds.html) and [inférieures](lower-type-bounds.html)
+* Les [classes internes](inner-classes.html) et les membres d'objets de [types abstraits](abstract-type-members.html)
+* Les [types composés](compound-types.html)
+* Les [auto-références explicitement typées](self-types.html)
+* Les [paramètres](implicit-parameters.html) et les [conversions](implicit-conversions.html) implicites
+* Les [méthodes polymorphiques](polymorphic-methods.html)
+
+L'[inférence de type](type-inference.html) signifie que l'utilisateur n'est pas obligé d'annoter son code avec des
+informations redondantes. Rassemblées, toutes ces fonctionnalités fournissent une base solide pour la ré-utilisation
+sécurisée d'abstractions de programmation et pour une extension sûre au niveau des types de programme.
+
+## Scala est extensible ##
+
+En pratique, le développement d'applications dans un domaine spécifique demande souvent des extensions de langage propre
+à ce domaine. Scala fournit une combinaison de mécaniques de langage unique qui rend simple l'ajout de nouvelles
+constructions de langage avec l'importation de nouvelles librairies.
+
+Dans beaucoup de cas, cela peut être fait sans utiliser des outils de méta-programmation, comme les macros.
+En voici quelques exemples :
+
+* Les [classes implicites](/overviews/core/implicit-classes.html) permettent d'ajouter des méthodes supplémentaires à des types existants.
+* L'[interpolation de String](/overviews/core/string-interpolation.html) est extensible par l'utilisateur avec des interpolateurs personnalisés.
+
+## Scala interagit ##
+
+Scala est conçu pour interagir proprement avec le populaire Java Runtime Environment (JRE). En particulier, l'interaction
+avec le langage de programmation orienté objet le plus populaire du moment, Java, est la plus transparente possible.
+Les nouvelles fonctionnalités Java comme les SAMs, les [lambdas](higher-order-functions.html), les [annotations](annotations.html),
+et les [classes génériques](generic-classes.html) ont des équivalents directs en Scala.
+
+Il existe des fonctionnalités Scala sans équivalent Java, comme les [valeurs par défaut](default-parameter-values.html) et les
+[paramètres nommés](named-arguments.html), qui se compilent d'une façon la plus proche de Java possible. Scala possède le
+même modèle de compilation que Java (compilation séparée, chargement dynamique des classes) et permet d'avoir accès à des
+milliers de librairies de haute qualité.
+
+## Bon tour !
+
+Merci de continuer à la [page suivante](basics.html) pour en savoir plus.
diff --git a/_fr/tour/traits.md b/_fr/tour/traits.md
new file mode 100644
index 0000000000..069648bb53
--- /dev/null
+++ b/_fr/tour/traits.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Traits
+partof: scala-tour
+
+num: 5
+
+language: fr
+
+next-page: tuples
+previous-page: classes
+---
diff --git a/_fr/tour/tuples.md b/_fr/tour/tuples.md
new file mode 100644
index 0000000000..edef97a6ca
--- /dev/null
+++ b/_fr/tour/tuples.md
@@ -0,0 +1,82 @@
+---
+layout: tour
+title: Tuples
+partof: scala-tour
+
+num: 8
+
+language: fr
+
+next-page: mixin-class-composition
+previous-page: traits
+---
+
+En Scala, un tuple est une valeur qui contient un nombre fixe d'éléments, chacun avec son propre type. Les tuples sont immuables.
+
+Les tuples sont notamment utiles pour retourner plusieurs valeurs depuis une méthode.
+
+Un tuple avec deux éléments peut être créé de la façon suivante :
+
+```scala mdoc
+val ingredient = ("Sugar" , 25)
+```
+
+Cela crée un tuple contenant un élément de type `String` et un élément de type `Int`.
+
+Le type inféré de `ingredient` est `(String, Int)`, qui est un raccourci pour `Tuple2[String, Int]`.
+
+Pour représenter les tuples, Scala utilise une série de classes : `Tuple2`, `Tuple3`, etc., jusqu'a `Tuple22`.
+Chaque classe a autant de paramètres de types qu'elle a d'éléments.
+
+## Accéder aux éléments
+
+Une des méthodes pour accéder aux éléments d'un tuple est par position. Les éléments sont nommés individuellement `_1`, `_2`, et ainsi de suite.
+
+```scala mdoc
+println(ingredient._1) // Sugar
+println(ingredient._2) // 25
+```
+
+## Pattern matching sur les tuples
+
+Un tuple peut aussi être décomposé en utilisant le pattern matching :
+
+```scala mdoc
+val (name, quantity) = ingredient
+println(name) // Sugar
+println(quantity) // 25
+```
+
+Ici le type inféré de `name` est `String` et le type inféré de `quantity` est `Int`.
+
+Voici un autre exemple de pattern-matching sur un tuple :
+
+```scala mdoc
+val planets =
+ List(("Mercury", 57.9), ("Venus", 108.2), ("Earth", 149.6),
+ ("Mars", 227.9), ("Jupiter", 778.3))
+planets.foreach {
+ case ("Earth", distance) =>
+ println(s"Our planet is $distance million kilometers from the sun")
+ case _ =>
+}
+```
+
+Ou, en décomposition dans un `for` :
+
+```scala mdoc
+val numPairs = List((2, 5), (3, -7), (20, 56))
+for ((a, b) <- numPairs) {
+ println(a * b)
+}
+```
+
+## Les tuples et les classes de cas
+
+Les utilisateurs trouvent parfois qu'il est difficile de choisir entre les tuples et les classes de cas. Les classes de cas ont des éléments nommés. Les noms peuvent améliorer la lisibilité de certains codes. Dans l'exemple ci-dessus avec planet, nous pourrions définir `case class Planet(name: String, distance: Double)` plutôt que d'utiliser les tuples.
+
+## Plus d'informations
+
+* Apprennez-en d'avantage sur les tuples dans [Scala Book](/overviews/scala-book/tuples.html)
+
+Traduction par Antoine Pointeau.
\ No newline at end of file
diff --git a/_fr/tour/type-inference.md b/_fr/tour/type-inference.md
new file mode 100644
index 0000000000..019ed21ef5
--- /dev/null
+++ b/_fr/tour/type-inference.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Type Inference
+partof: scala-tour
+
+num: 27
+
+language: fr
+
+next-page: operators
+previous-page: polymorphic-methods
+---
diff --git a/_fr/tour/unified-types.md b/_fr/tour/unified-types.md
new file mode 100644
index 0000000000..6ecf013319
--- /dev/null
+++ b/_fr/tour/unified-types.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Unified Types
+partof: scala-tour
+
+num: 3
+
+language: fr
+
+next-page: classes
+previous-page: basics
+---
diff --git a/_fr/tour/upper-type-bounds.md b/_fr/tour/upper-type-bounds.md
new file mode 100644
index 0000000000..f47c6a4e30
--- /dev/null
+++ b/_fr/tour/upper-type-bounds.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Upper Type Bounds
+partof: scala-tour
+
+num: 18
+
+language: fr
+
+next-page: lower-type-bounds
+previous-page: variances
+---
diff --git a/_fr/tour/variances.md b/_fr/tour/variances.md
new file mode 100644
index 0000000000..5f535d303b
--- /dev/null
+++ b/_fr/tour/variances.md
@@ -0,0 +1,12 @@
+---
+layout: tour
+title: Variance
+partof: scala-tour
+
+num: 17
+
+language: fr
+
+next-page: upper-type-bounds
+previous-page: generic-classes
+---
diff --git a/_fr/tutorials/scala-for-java-programmers.md b/_fr/tutorials/scala-for-java-programmers.md
new file mode 100644
index 0000000000..0b9d3ffa63
--- /dev/null
+++ b/_fr/tutorials/scala-for-java-programmers.md
@@ -0,0 +1,667 @@
+---
+layout: singlepage-overview
+title: Tutoriel Scala pour développeurs Java
+
+partof: scala-for-java-programmers
+language: fr
+---
+
+Par Michel Schinz and Philipp Haller.
+
+Traduction et arrangements par Agnès Maury.
+
+## Introduction
+
+Ce document présente une introduction rapide au langage Scala et à son compilateur.
+Il est destiné aux personnes ayant une expérience de programmation et qui souhaitent
+un aperçu de ce qu'ils peuvent faire avec Scala. On part du principe que le lecteur possède
+des connaissances de base sur la programmation orientée objet, particulièrement sur Java.
+
+
+## Un premier exemple
+
+Commençons par écrire le célèbre programme *Hello world*.
+Bien que simple, il permet de découvrir plusieurs fonctionnalités du language
+avec peu de connaissance préalable de Scala. Voilà à quoi il ressemble :
+
+ object HelloWorld {
+ def main(args: Array[String]): Unit = {
+ println("Hello, world!")
+ }
+ }
+
+La structure de ce programme devrait être familière pour les développeurs Java :
+il consiste en une méthode appelée `main` qui prend les arguments de la ligne de commande,
+une array de String, comme paramètre ; le corps de cette méthode consiste en un simple appel de la méthode
+prédéfinie `println` avec le salut amical comme argument. Cette méthode `main` ne retourne pas de valeur.
+Pourtant, son type de retour est déclaré comme `Unit`.
+
+Ce qui est moins familier pour les développeurs Java est la déclaration `object` qui contient la méthode
+`main`. Une telle déclaration introduit ce qui est communément connu comme un *objet singleton*, qui est une classe
+avec une seule instance. La déclaration ci-dessus déclare à la fois une classe nommée `HelloWorld`
+et une instance de cette classe, aussi nommée `HelloWorld`. Cette instance est créée sur demande, c'est-à-dire,
+la première fois qu'elle est utilisée.
+
+Le lecteur avisé a pu remarquer que la méthode `main` n'est pas déclarée en tant que `static`.
+C'est parce que les membres statiques (membres ou champs) n'existent pas en Scala. Plutôt que de définir des
+membres statiques, le développeur Scala déclare ces membres dans un objet singleton.
+
+### Compiler l'exemple
+
+Pour compiler cet exemple, nous utilisons `scalac`, le compilateur Scala.
+`scalac` fonctionne comme la plupart des compilateurs : il prend comme argument un fichier source,
+potentiellement certaines options, et produit un ou plusieurs fichiers objets.
+Les fichiers objets produits sont des fichiers classes de Java classiques.
+
+Si nous sauvegardons le programme ci-dessus dans un fichier appelé `HelloWorld.scala`,
+nous pouvons le compiler en exécutant la commande suivante (le symbole supérieur `>` représente
+l'invité de commandes et ne doit pas être écrit) :
+
+ > scalac HelloWorld.scala
+
+Cette commande va générer un certain nombre de fichiers class dans le répertoire courant.
+L'un d'entre eux s'appellera `HelloWorld.class` et contiendra une classe qui pourra être directement exécutée
+en utilisant la commande `scala`, comme décrit dans la section suivante.
+
+### Exécuter l'exemple
+
+Une fois compilé, le programme Scala peut être exécuté en utilisant la commande `scala`.
+Son utilisation est très similaire à la commande `java` utilisée pour exécuter les programmes Java,
+et qui accepte les mêmes options. L'exemple ci-dessus peut être exécuté en utilisant la commande suivante,
+ce qui produit le résultat attendu :
+
+ > scala -classpath . HelloWorld
+
+ Hello, world!
+
+## Interaction avec Java
+
+L'une des forces du Scala est qu'il rend très facile l'interaction avec le code Java.
+Toutes les classes du paquet `java.lang` sont importées par défaut, alors que les autres
+doivent être importées explicitement.
+
+Prenons l'exemple suivant. Nous voulons obtenir et formater la date actuelle
+par rapport aux conventions utilisées dans un pays spécifique, par exemple la France.
+
+Les librairies de classes Java définissent des classes utilitaires très puissantes, comme `Date`
+et `DateFormat`. Comme Scala interagit avec Java, il n'y a pas besoin de ré-implémenter ces classes en Scala
+--nous pouvons simplement importer les classes des paquets correspondants de Java :
+
+ import java.util.{Date, Locale}
+ import java.text.DateFormat._
+
+ object DateFrancaise {
+ def main(args: Array[String]): Unit = {
+ val maintenant = new Date
+ val df = getDateInstance(LONG, Locale.FRANCE)
+ println(df format maintenant)
+ }
+ }
+
+Les déclarations d'import de Scala sont très similaires à celle de Java, cependant,
+elles sont bien plus puissantes. Plusieurs classes peuvent être importées du même paquet en les plaçant
+dans des accolades comme démontré dans la première ligne. Une autre différence notable est de pouvoir
+importer tous les noms d'un paquet ou d'une classe en utilisant le symbole underscore (`_`) au lieu de
+l'astérisque (`*`). C'est parce que l'astérisque est un identifiant valide en Scala (par exemple pour
+un nom de méthode), comme nous le verrons plus tard.
+
+Par conséquent, la déclaration d'importation dans la seconde ligne importe tous les membres de la classe
+`DateFormat`. Cela rend la méthode statique `getDateInstance` et le champ statique `LONG`
+directement visibles.
+
+Dans la méthode `main`, nous avons tout d'abord créé une instance de la classe Java `Date`
+qui contient par défaut la date actuelle. Ensuite, nous définissons un format de date en utilisant la
+méthode statique `getDateInstance` que nous avons importée précédemment. Enfin, nous imprimons
+la date actuelle selon l'instance de `DateFormat` localisée. Cette dernière ligne montre une
+propriété intéressante de la syntaxe Scala. Les méthodes qui ne prennent en entrée qu'un seul argument
+peuvent être utilisées avec une syntaxe infixe. C'est-à-dire que l'expression
+
+ df format maintenant
+
+est juste une autre façon moins verbeuse d'écrire l'expression
+
+ df.format(maintenant)
+
+Cela peut paraître comme un détail syntaxique mineur, mais il entraîne des conséquences importantes,
+dont l'une va être explorée dans la section suivante.
+
+Pour conclure cette section sur l'intégration avec Java, il faut noter qu'il est possible
+d'hériter de classes Java et d'implémenter des interfaces Java directement en Scala.
+
+## Tout est objet
+
+Scala est un langage purement orienté objet dans le sens où *tout* est un objet,
+y compris les nombres ou les fonctions. Cela diffère du Java dans cet aspect, car Java
+distingue les types primitifs (comme `boolean` et `int`) des types référentiels.
+
+### Les nombres sont des objets
+
+Étant donné que les nombres sont des objets, ils ont aussi des méthodes.
+De fait, une expression arithmétique comme la suivante :
+
+ 1 + 2 * 3 / x
+
+consiste exclusivement en des appels de méthodes, parce qu'il est équivalent à l'expression
+suivante, comme nous l'avons vu dans la section précédente :
+
+ 1.+(2.*(3)./(x)
+
+Cela veut aussi dire que `+`, `*`, etc. sont des identifiants valides en Scala.
+
+### Les fonctions sont des objets
+
+Les fonctions sont aussi des objets en Scala. C'est pourquoi il est possible de passer
+des fonctions en arguments, de les stocker dans des variables et de les retourner depuis d'autres
+fonctions. Cette capacité à manipuler les fonctions en tant que valeurs est l'une des
+pierres angulaires d'un paradigme de programmation très intéressant nommé *programmation fonctionnelle*.
+
+Pour illustrer à quel point il est peut être utile d'utiliser des fonctions en tant que valeurs,
+considérons une fonction minuteur qui vise à performer une action toutes les secondes. Comment faire
+pour passer au minuteur une action à performer ? En toute logique, comme une fonction. Ce concept de
+passer une fonction devrait être familier à beaucoup de développeurs : il est souvent utilisé dans
+le code d'interface utilisateur pour enregistrer des fonctions de rappel qui sont invoquées lorsque
+certains évènements se produisent.
+
+Dans le programme suivant, la fonction minuteur est appelée `uneFoisParSeconde` et prend comme argument
+une fonction de rappel. Le type de cette fonction est écrit `() => Unit`. C'est le type de toutes les
+fonctions qui ne prennent aucun argument et ne renvoie rien (le type `Unit` est similaire à `void` en C/C++).
+La principale fonction de ce programme est d'appeler la fonction minuteur avec une fonction de rappel
+qui imprime une phrase dans le terminal. Dans d'autres termes, ce programme imprime à l'infini la phrase
+"le temps passe comme une flèche".
+
+ object Minuteur {
+ def uneFoisParSeconde(retour: () => Unit): Unit = {
+ while (true) {
+ retour()
+ Thread sleep 1000
+ }
+ }
+
+ def leTempsPasse(): Unit = {
+ println("le temps passe comme une flèche")
+ }
+
+ def main(args: Array[String]): Unit = {
+ uneFoisParSeconde(leTempsPasse)
+ }
+ }
+
+Notez que pour imprimer la String, nous utilisons la méthode prédéfinie `println` au lieu
+d'utiliser celle du paquet `System.out`.
+
+#### Fonctions anonymes
+
+Bien que ce programme soit facile à comprendre, il peut être affiné un peu plus.
+Premièrement, notez que la fonction `leTempsPasse` est définie uniquement dans le but d'être
+passée plus tard dans la fonction `uneFoisParSeconde`. Devoir nommer cette fonction qui ne va
+être utilisée qu'une fois peut sembler superflu et il serait plus agréable de pouvoir construire
+cette fonction juste au moment où elle est passée à `uneFoisParSeconde`. C'est possible en Scala
+en utilisant des *fonctions anonymes*, ce qui correspond exactement à ça : des fonctions sans nom.
+La version revisitée de notre programme minuteur en utilisant une fonction anonyme à la place de
+*leTempsPasse* ressemble à ça :
+
+ object MinuteurAnonyme {
+ def uneFoisParSeconde(retour: () => Unit): Unit = {
+ while (true) {
+ retour()
+ Thread sleep 1000
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ uneFoisParSeconde(
+ () => println("le temps passe comme une flèche")
+ )
+ }
+ }
+
+La présence d'une fonction anonyme dans cet exemple est reconnaissable par la flèche pointant à droite
+`=>` qui sépare la liste des arguments de la fonction de son corps. Dans cet exemple, la liste des
+arguments est vide, comme en témoigne la paire de parenthèses vide à gauche de la flèche. Le corps
+de cette fonction est le même que celui de `leTempsPasse` décrit plus haut.
+
+## Classes
+
+Comme nous l'avons vu plus tôt, Scala est un langage orienté objet et de ce fait, possède le concept de classe
+(pour être plus exact, il existe certains langages orientés objet qui ne possèdent pas le concept de classe
+mais Scala n'en fait pas partie). Les classes en Scala sont déclarées en utilisant une syntaxe proche de
+celle de Java. Une différence notable est que les classes en Scala peuvent avoir des paramètres.
+Ceci est illustré dans la définition suivante des nombres complexes.
+
+ class Complexe(reel: Double, imaginaire: Double) {
+ def re() = reel
+ def im() = imaginaire
+ }
+
+La classe `Complexe` prend en entrée deux arguments : la partie réelle et la partie imaginaire du
+nombre complexe. Ces arguments peuvent être passés lors de la création d'une instance de `Complexe` comme
+ceci :
+
+ new Complexe(1.5, 2.3)
+
+La classe contient deux méthodes, appelées `re` et `im` qui donnent accès à ces deux parties.
+
+Il faut noter que le type de retour de ces méthodes n'est pas explicitement donné. Il sera inféré
+automatiquement par le compilateur, qui regarde la partie droite de ces méthodes et en déduit que chacune
+de ces fonctions renvoie une valeur de type `Double`.
+
+Le compilateur n'est pas toujours capable d'inférer des types comme il le fait ici et il n'y a
+malheureusement aucune règle simple pour savoir dans quel cas il est capable de le faire. En pratique,
+ce n'est pas généralement un problème car le compilateur se plaint quand il n'est pas capable d'inférer
+un type qui n'a pas été donné explicitement. Une règle simple que les développeurs débutant en Scala
+devraient suivre est d'essayer d'omettre les déclarations de type qui semblent être faciles à
+déduire et voir si le compilateur ne renvoie pas d'erreur. Après quelque temps, le développeur devrait
+avoir une bonne idée de quand il peut omettre les types et quand il faut les spécifier explicitement.
+
+### Les méthodes sans arguments
+
+Un petit problème des méthodes `re` et `im` est qu'il faut mettre une paire de parenthèses vides après
+leur nom pour les appeler, comme démontré dans l'exemple suivant :
+
+ object NombresComplexes {
+ def main(args: Array[String]): Unit = {
+ val c = new Complexe(1.2, 3.4)
+ println("partie imaginaire : " + c.im())
+ }
+ }
+
+Il serait plus agréable de pouvoir accéder à la partie réelle et imaginaire comme si elles étaient des
+champs, sans ajouter une paire de parenthèses vides. C'est parfaitement faisable en Scala, simplement en
+les définissant comme des méthodes *sans argument*. De telles méthodes diffèrent des méthodes avec
+aucun argument : elles n'ont pas de parenthèses après leur nom, que ce soit dans leur déclaration
+ou lors de leur utilisation. Notre classe `Complexe` peut être réécrite de cette façon :
+
+ class Complexe(reel: Double, imaginaire: Double) {
+ def re = reel
+ def im = imaginaire
+ }
+
+
+### Héritage et redéfinition
+
+Toutes les classes en Scala héritent d'une super classe. Quand aucune super classe n'est spécifiée,
+comme dans l'exemple `Complexe` de la section précédente, la classe `scala.AnyRef` est utilisée
+implicitement.
+
+Il est possible de redéfinir les méthodes héritées d'une super classe en Scala. Cependant, il est
+obligatoire de spécifier explicitement qu'une méthode en redéfinit une autre en utilisant le
+modificateur `override` dans le but d'éviter les redéfinitions accidentelles. Dans notre exemple,
+la classe `Complexe` peut être enrichie avec une redéfinition de la méthode `toString` héritée
+de la classe `Object`.
+
+ class Complexe(reel: Double, imaginaire: Double) {
+ def re() = reel
+ def im() = imaginaire
+ override def toString() = "" + re + (if (im >= 0) "+" + im + "i" else "")
+ }
+
+Nous pouvons alors appeler la méthode `toString` redéfinie comme ci-dessus.
+
+ object NombresComplexes {
+ def main(args: Array[String]): Unit = {
+ val c = new Complexe(1.2, 3.4)
+ println("toString() redéfinie : " + c.toString)
+ }
+ }
+
+## Les case class et le pattern matching
+
+L'arbre est un type de structure de données qui revient souvent.
+Par exemple, les interpréteurs et les compilateurs représentent généralement en interne les programmes
+comme des arbres ; les documents XML sont des arbres ; et beaucoup de conteneurs sont basés sur des
+arbres, comme les arbres bicolores.
+
+Nous allons maintenant examiner comment de tels arbres sont représentés et manipulés en Scala à travers
+d'un petit programme de calculatrice. Le but de ce programme est de manipuler des expressions arithmétiques
+simples composées de sommes, de constantes numériques et de variables. Deux exemples de telles expressions
+sont `1+2` et `(x+x)+(7+y)`.
+
+Nous devons d'abord décider d'une représentation pour de telles expressions.
+La manière la plus naturelle est un arbre où chaque nœud représente une opération (ici, une addition) et
+chaque feuille est une valeur (ici des constantes ou variables).
+
+En Java, un tel arbre serait représenté par une super classe abstraite pour les arbres et une
+sous classe concrète pour chaque nœud et feuille. Dans un langage de programmation fonctionnelle,
+on utiliserait plutôt un type de donnée algébrique pour faire la même chose. Scala fournit le concept de
+*case class* qui est quelque part entre ces deux concepts. Voici comment elles peuvent être utilisées pour
+définir le type des arbres pour notre exemple :
+
+ abstract class Arbre
+ case class Somme(l: Arbre, r: Arbre) extends Arbre
+ case class Var(n: String) extends Arbre
+ case class Const(v: Int) extends Arbre
+
+Le fait que les classes `Somme`, `Var` et `Const` sont définies en tant que case class signifie qu'elles
+différent des classes traditionnelles en différents points :
+
+- le mot clé `new` n'est pas obligatoire lors de la création d'instance de ces classes (c'est-à-dire qu'on
+ peut écrire `Const(5)` à la place de `new Const(5)`) ;
+- les fonctions accesseurs sont automatiquement définies pour les paramètres du constructeur
+ (c'est-à-dire qu'il est possible de récupérer la valeur du paramètre du constructeur `v` pour une instance `c` de
+ la classe `Const` en écrivant tout simplement `c.v`) ;
+- une définition par défaut des méthodes `equals` et `hashCode` est fournie, qui se base sur la
+ *structure* des instances et non pas leur identité ;
+- une définition par défaut de la méthode `toString` est fournie et imprime la valeur "à la source"
+ (par exemple, l'arbre pour l'expression `x+1` s'imprime comme `Somme(Var(x),Const(1))`) ;
+- les instances de ces classes peuvent être décomposées avec un *pattern matching* (filtrage par motif)
+ comme nous le verrons plus bas.
+
+Maintenant que nous avons défini le type de données pour représenter nos expressions arithmétiques,
+il est temps de définir des opérations pour les manipuler. Nous allons commencer par une fonction
+pour évaluer une expression dans un certain *environnement*. Le but de cet environnement est de
+donner des valeurs aux variables. Par exemple, l'expression `x+1` évaluée dans un environnement qui
+associe la valeur `5` à la variable `x`, écrit `{ x -> 5 }`, donne comme résultat `6`.
+
+Il faut donc trouver un moyen de représenter ces environnements. Nous pouvons certes utiliser
+une sorte de structure de données associatives comme une table de hashage, mais nous pouvons aussi
+utiliser directement des fonctions ! Un environnement n'est ni plus ni moins qu'une fonction qui associe
+une valeur à une variable. L'environnement `{ x -> 5 }` décrit plus tôt peut être écrit simplement comme
+ceci en Scala :
+
+ { case "x" => 5 }
+
+Cette notation définit une fonction qui, quand on lui donne une String `"x"` en entrée, retourne l'entier
+`5` et renvoie une exception dans les autres cas.
+
+Avant d'écrire la fonction d'évaluation, donnons un nom au type de ces environnements.
+Nous pouvons toujours utiliser le `String => Int` pour ces environnements mais cela simplifie
+le programme si nous introduisons un nom pour ce type et rendra les modifications futures plus simples.
+En Scala, on le réalise avec la notation suivante :
+
+ type Environnement = String => Int
+
+À partir de maintenant, le type `Environnement` peut être utilisé comme un alias comme
+le type des fonctions de `String` à `Int`.
+
+Maintenant, nous pouvons donner la définition de l'évaluation de fonction.
+Théoriquement, c'est très simple : la valeur d'une somme de deux expressions
+est tout simplement la somme des valeurs de ces expressions ; la valeur d'une
+variable est obtenue directement à partir de l'environnement ; la valeur d'une
+constante est la constante elle-même. Pour l'exprimer en Scala, ce n'est pas plus
+compliqué :
+
+ def eval(a: Arbre, env: Environnement): Int = a match {
+ case Somme(l, r) => eval(l, env) + eval(r, env)
+ case Var(n) => env(n)
+ case Const(v) => v
+ }
+
+Cette fonction d'évaluation fonctionne en effectuant un pattern matching
+sur l'arbre `a`. De façon intuitive, la signification de la définition ci-dessus
+devrait être claire :
+
+1. Tout d'abord, il vérifie si l'arbre `a` est une `Somme`. Si c'est le cas,
+ il relie le sous arbre de gauche à une nouvelle variable appelée `l` et
+ le sous arbre de gauche à une variable appelée `r`. Ensuite, il traite
+ l'expression à droite de la flèche : cette expression peut
+ utiliser (dans notre exemple, c'est le cas) les deux variables `l` et `r` extraites dans le
+ motif décrit à gauche de la flèche ;
+2. Si la première vérification échoue, c'est-à-dire que l'arbre n'est pas une `Somme`,
+ on continue et on vérifie si `a` est une `Var`. Si c'est le cas,
+ il relie le nom contenu dans le nœud `Var` à une variable `n` et
+ il traite l'expression à droite de la flèche ;
+3. Si la deuxième vérification échoue, c'est-à-dire que l'arbre n'est ni
+ une `Somme` ni une `Var`, on vérifie si l'arbre est un `Const`. Si
+ c'est le cas, il relie la valeur contenue dans le nœud `Const` à une
+ variable `v` et il traite l'expression à droite de la flèche ;
+4. Enfin, si toutes les vérifications échouent, une exception est levée pour signaler
+ l'échec de l'expression. Dans notre cas, cela pourrait arriver si
+ d'autres sous classes de `Arbre` étaient déclarées.
+
+Nous observons que l'idée basique du pattern matching est de faire correspondre
+une valeur à une série de motifs et dès qu'un motif correspond, extraire
+et nommer les différentes parties de la valeur pour enfin évaluer du
+code qui, généralement, utilise ces parties nommées.
+
+Un développeur orienté objet chevronné pourrait se demander pourquoi nous n'avions pas
+défini `eval` comme une *méthode* de la classe `Arbre` et de ces
+sous classes. En effet, nous aurions pu le faire, étant donné que Scala autorise
+la définition de méthodes dans les case class tout comme dans les classes normales.
+Décider d'utiliser un pattern matching ou des méthodes est donc une question de
+goût mais a aussi des implications importantes sur l'extensibilité :
+
+- quand on utilise des méthodes, il est facile d'ajouter un nouveau type de nœud en même temps
+ qu'une nouvelle sous classe de `Arbre` est définie. Par contre,
+ ajouter une nouvelle opération pour manipuler un arbre est
+ fastidieux car il demande de modifier toutes les sous classes de `Arbre` ;
+- quand on utilise un pattern matching, la situation est inversée : ajouter un
+ nouveau type de nœud demande la modification de toutes les fonctions qui effectuent
+ un pattern matching sur un arbre pour prendre en compte le nouveau nœud.
+ Par contre, ajouter une nouvelle opération est facile en la définissant
+ en tant que fonction indépendante.
+
+Pour explorer plus loin dans le pattern matching, définissons une autre opération
+sur les expressions arithmétiques : la dérivée de fonction. Le lecteur doit
+garder à l'esprit les règles suivantes par rapport à cette opération :
+
+1. la dérivée d'une somme est la somme des dérivées ;
+2. la dérivée d'une variable `v` est 1 si `v` est égale la
+ variable utilisée pour la dérivation et zéro sinon ;
+3. la dérivée d'une constante est zéro.
+
+Ces règles peuvent presque être traduites littéralement en du code Scala
+pour obtenir la définition suivante :
+
+ def derivee(a: Arbres, v: String): Arbres = a match {
+ case Somme(l, r) => Somme(derivee(l, v), derivee(r, v))
+ case Var(n) if (v == n) => Const(1)
+ case _ => Const(0)
+ }
+
+Cette fonction introduit deux nouveaux concepts reliés au pattern matching.
+
+Premièrement, l'expression `case` qui peut être utilisé avec un *garde* qui suit le mot clé `if`.
+Ce garde empêche le pattern matching de réussir à moins que l'expression soit vraie. Ici, il est utilisé
+pour s'assurer qu'on retourne la constante `1` uniquement si le nom de
+la variable se faisant dériver est la même que la variable de dérivation
+`v`. La seconde nouvelle fonctionnalité du pattern matching utilisée ici est
+le motif *joker*, représenté par `_`, qui est un motif correspondant à
+n'importe quelle valeur sans lui donner un nom.
+
+Nous n'avons pas encore exploré l'étendue du pouvoir du pattern matching, mais nous
+nous arrêterons ici afin de garder ce document court. Nous voulons toujours
+voir comment les deux fonctions ci-dessus fonctionnent dans un exemple réel. Pour se
+faire, écrivons une fonction `main` simple qui effectue plusieurs opérations sur l'expression
+`(x+x)+(7+y)` : elle évalue tout d'abord sa valeur dans l'environnement
+`{ x -> 5, y -> 7 }` puis on la dérive par rapport à `x` et par rapport à `y`.
+
+ def main(args: Array[String]): Unit = {
+ val exp: Arbre = Somme(Somme(Var("x"),Var("x")),Somme(Const(7),Var("y")))
+ val env: Environnement = { case "x" => 5 case "y" => 7 }
+ println("Expression : " + exp)
+ println("Évaluation avec x=5, y=7 : " + eval(exp, env))
+ println("Dérivée par rapport à x :\n " + derivee(exp, "x"))
+ println("Dérivée par rapport à y :\n " + derivee(exp, "y"))
+ }
+
+Vous devrez envelopper le type `Environnement` et les méthodes`eval`, `derivee` et `main`
+dans un objet `Calc` avant de compiler. En exécutant ce programme, on obtient le résultat attendu :
+
+ Expression : Somme(Somme(Var(x),Var(x)),Somme(Const(7),Var(y)))
+ Évaluation avec x=5, y=7 : 24
+ Dérivée par rapport à x :
+ Somme(Somme(Const(1),Const(1)),Somme(Const(0),Const(0)))
+ Dérivée par rapport à y :
+ Somme(Somme(Const(0),Const(0)),Somme(Const(0),Const(1)))
+
+En examinant la sortie, on voit que le résultat de la dérivée devrait être simplifiée avant
+d'être présentée à l'utilisateur. Définir une simplification basique en utilisant
+un pattern matching est un problème intéressant (mais curieusement délicat), laissé
+comme exercice pour le lecteur.
+
+## Traits
+
+Hormis le fait d'hériter du code d'une super classe, une classe Scala peut aussi
+importer du code d'un ou de plusieurs *traits*.
+
+Peut-être que le moyen le plus simple pour un développeur Java de comprendre les traits
+est de le voir comme une interface qui peut aussi contenir du code. En
+Scala, quand une classe hérite d'un trait, elle implémente son interface et
+hérite de tout le code contenu dans ce trait.
+
+Notez que depuis Java 8, les interfaces Java peut aussi contenir du code, soit
+en utilisant le mot clé `default` soit avec des méthodes statiques.
+
+Pour s'apercevoir de l'utilité des traits, regardons un exemple classique :
+les objets ordonnés. Il est souvent utile de pouvoir comparer des objets
+d'une même classe, par exemple pour les trier. En Java,
+les objets qui sont comparables implémentent l'interface `Comparable`.
+En Scala, on peut faire un peu mieux qu'en Java en définissant
+notre équivalent de `Comparable` en tant que trait, qu'on appellera
+`Ord`.
+
+Quand on compare des objets, six différents prédicats peuvent être utiles :
+plus petit, plus petit ou égal, égal, inégal, plus grand, plus grand ou égal.
+Cependant, tous les définir est fastidieux, surtout que quatre de ces six
+prédicats peuvent être exprimés en utilisant les deux restantes. En effet,
+en utilisant les prédicats égal et plus petit (par exemple), on peut
+exprimer les autres. En Scala, toutes ces observations peuvent être
+capturées dans la déclaration de trait suivante :
+
+ trait Ord {
+ def < (that: Any): Boolean
+ def <=(that: Any): Boolean = (this < that) || (this == that)
+ def > (that: Any): Boolean = !(this <= that)
+ def >=(that: Any): Boolean = !(this < that)
+ }
+
+Cette définition crée à la fois un nouveau type appelé `Ord`,
+qui joue un rôle similaire à l'interface Java `Comparable`, et
+des implémentations par défaut de trois prédicats par rapport à un
+quatrième prédicat abstrait. Les prédicats d'égalité et d'inégalité n'apparaissent pas
+ici vu qu'ils sont présents par défaut dans tous les objets.
+
+Le type `Any` qui est utilisé plus haut est le type
+qui est le super type de tous les autres types en Scala. Il peut être vu comme une
+version plus générale du type Java `Object`, puisqu'il est aussi un
+super type de types basic comme `Int`, `Float`, etc.
+
+Pour rendre les objets d'une classes comparables, il est alors suffisant de
+définir les prédicats qui testent l'égalité et l'infériorité, puis les mixer
+dans la classe `Ord` ci-dessus. Comme exemple, définissons une
+classe `Date` qui représente les dates dans le calendrier grégorien. Elles
+sont composées d'un jour, un mois et une année, que nous allons
+représenter avec des entiers. Nous commençons toutefois la définition de la
+classe `Date` comme ceci :
+
+ class Date(a: Int, m: Int, j: Int) extends Ord {
+ def annee = a
+ def mois = m
+ def jour = j
+ override def toString(): String = annee + "-" + mois + "-" + jour
+
+La partie importante ici est la déclaration `extends Ord` qui
+suit le nom de la classe et ses paramètres. Cela veut dire que la
+classe `Date` hérite du trait `Ord`.
+
+Ensuite, nous redéfinissons la méthode `equals`, héritée de `Object`,
+pour comparer correctement les dates en comparant leur
+champs individuels. L'implémentation par défaut de `equals` n'est pas
+utilisable, car en Java, elle compare les objets physiquement. On arrive
+à la définition suivante :
+
+ override def equals(that: Any): Boolean =
+ that.isInstanceOf[Date] && {
+ val d = that.asInstanceOf[Date]
+ d.jour == jour && d.mois == mois && d.annee == annee
+ }
+
+Cette méthode utilise les méthodes prédéfinies `isInstanceOf` et
+`asInstanceOf`. La première méthode, `isInstanceOf` correspond à l'opérateur
+Java `instanceof` et retourne true si et seulement si l'objet
+sur lequel elle est appliquée est une instance du type donné.
+La deuxième, `asInstanceOf`, correspond à l'opérateur de conversion de type :
+si l'objet est une instance du type donné, il est vu en tant que tel,
+sinon une `ClassCastException` est levée.
+
+Enfin, la dernière méthode à définir est le prédicat qui teste l'infériorité
+comme décrit plus loin. Elle utilise une autre méthode,
+`error` du paquet `scala.sys`, qui lève une exception avec le message d'erreur donné.
+
+ def <(that: Any): Boolean = {
+ if (!that.isInstanceOf[Date])
+ sys.error("on ne peut pas comparer " + that + " et une Date")
+
+ val d = that.asInstanceOf[Date]
+ (annee < d.annee) ||
+ (annee == d.annee && (mois < d.mois ||
+ (mois == d.mois && jour < d.jour)))
+ }
+
+Cela complète la définition de la classe `Date`. Les instances de
+cette classe peuvent être vues soit comme des dates, soit comme des objets comparables.
+De plus, elles définissent les six prédicats de comparaison mentionnés
+ci-dessus : `equals` et `<` car elles apparaissent directement dans
+la définition de la classe `Date`, ainsi que les autres qui sont directement héritées du trait `Ord`.
+
+Bien sûr, les traits sont utiles dans d'autres situations que celle décrite ici,
+mais discuter de leurs applications plus amplement est hors de la
+portée de document.
+
+## Généricité
+
+La dernière caractéristique de Scala que nous allons explorer dans ce tutoriel est
+la généricité. Les développeurs Java devraient être conscients des problèmes
+posés par le manque de généricité dans leur langage, une lacune qui
+a été compensée avec Java 1.5.
+
+La généricité est la capacité d'écrire du code paramétrisé par des types. Par
+exemple, un développeur qui écrit une librairie pour des listes liées fait face au
+problème de décider quel type donner aux éléments de la liste.
+Comme cette liste est destinée à être utilisée dans divers contextes, il n'est
+pas possible de décider quel type doit avoir les éléments de liste, par exemple,
+`Int`. Ce serait complètement arbitraire et excessivement restrictif.
+
+Les développeurs Java se retrouvent à utiliser `Object`, le super type de
+tous les objets. Cependant, cette solution est loin d'être
+idéale, puisqu'elle ne marche pas pour les types basiques (`int`,
+`long`, `float`, etc.) et cela implique que le développeur
+devra faire un certain nombre de conversions de types.
+
+Scala rend possible la définition de classes (et de méthodes) génériques pour
+résoudre ce problème. Examinons ceci au travers d'un exemple d'une
+classes conteneur la plus simple possible : une référence, qui peut être
+vide ou pointer vers un objet typé.
+
+ class Reference[T] {
+ private var contenu: T = _
+ def set(valeur: T) { contenu = valeur }
+ def get: T = contenu
+ }
+
+La classe `Reference` est paramétrisé par un type appelé `T`
+qui est le type de son élément. Ce type est utilisé dans le corps de la
+classe en tant que de la variable `contenu`, l'argument de la méthode
+`set` et le type de retour de la méthode `get`.
+
+L'échantillon de code ci-dessus introduit les variables en Scala, ce qui ne devrait pas
+demander plus d'explications. Cependant, il est intéressant de voir que
+la valeur initiale donnée à la variable est `_` qui représente
+une valeur par défaut. Cette valeur par défaut est 0 pour les types numériques,
+`false` pour le type `Boolean`, `()` pour le type `Unit`
+et `null` pour tous les types d'objet.
+
+Pour utiliser cette classe `Reference`, il faut spécifier quel type utiliser
+pour le type paramètre `T`, le type de l'élément contenu dans la cellule.
+Par exemple, pour créer et utiliser une cellule contenant
+un entier, on peut écrire :
+
+ object ReferenceEntier {
+ def main(args: Array[String]): Unit = {
+ val cellule = new Reference[Int]
+ cellule.set(13)
+ println("La référence contient la moitié de " + (cellule.get * 2))
+ }
+ }
+
+Comme on peut le voir dans l'exemple, il n'est pas nécessaire de convertir la valeur
+retournée par la méthode `get` avant de pouvoir l'utiliser en tant qu'entier. Il
+n'est pas possible de stocker autre chose d'un entier dans cette
+cellule particulière, puisqu'elle a été déclarée comme portant un entier.
+
+## Conclusion
+
+Ce document donne un rapide aperçu du langage Scala et
+présente quelques exemples basiques. Le développeur intéressé peut poursuivre sa lecture,
+par exemple, en lisant le *[Tour of Scala](https://docs.scala-lang.org/tour/tour-of-scala.html)*
+(document en anglais) et consulter la *spécification du langage Scala* si nécessaire.
diff --git a/_getting-started/index.md b/_getting-started/index.md
deleted file mode 100644
index 77147fc08c..0000000000
--- a/_getting-started/index.md
+++ /dev/null
@@ -1,61 +0,0 @@
----
-layout: singlepage-overview
-title: Getting Started
-partof: getting-started
-languages: [ja]
-includeTOC: true
-
-redirect_from: "/getting-started.html"
----
-
-There are __two__ main ways people prefer to work in Scala:
-
-* Using an IDE.
-* Using the command line.
-
-The following tutorials will walk you through the setup process for whichever way
-you prefer.
-
-However, if you just want to jump directly into Scala without installing anything, skip the guides on this page and check out:
-
-* [Our interactive introduction to Scala on scala-exercises.com](https://www.scala-exercises.org/scala_tutorial/terms_and_types), or
-* [Scastie](https://scastie.scala-lang.org/), Scala in the browser, with access to all Scala compilers and all published libraries!
-
-## Setting up and getting started with Scala
-
-### If you prefer working in an IDE...
-
-IntelliJ is the most commonly-used IDE by Scala developers. In this tutorial,
-we'll walk you through downloading and setting up IntelliJ with the Scala
-plugin, and we'll get you started with your first Scala project, complete with
-unit tests!
-
-* [Getting Started with Scala in IntelliJ](/getting-started/intellij-track/getting-started-with-scala-in-intellij.html)
-* [Building a Scala Project with IntelliJ and sbt](/getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.html)
-* [Testing Scala in IntelliJ with ScalaTest](/getting-started/intellij-track/testing-scala-in-intellij-with-scalatest.html)
-
-
-### If you prefer working on the command line...
-
-If you prefer using a text editor like emacs, Vim, Atom, or Sublime Text, then
-the best way to compile, test, and run Scala code is by using _sbt_, Scala's build
-tool.
-
-* [Getting Started with Scala and sbt on the Command Line](/getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.html)
-* [Testing Scala with sbt and ScalaTest on the Command Line](/getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.html)
-
-
-
-## Next Steps
-Once you've finished these tutorials, check out:
-
-* [The Tour of Scala](/tour/tour-of-scala.html) for bite-sized introductions to Scala's features.
-* [The Scala Book](/overviews/scala-book/introduction.html), which provides a set of short lessons introducing Scala’s main features.
-* [Learning Resources](/learn.html), which includes online interactive tutorials and courses.
-* [Our list of some popular Scala books](/books.html).
-
-## Getting Help
-There are a multitude of mailing lists and real-time chat channels in case you want to quickly connect with other Scala users. Check out our [community](https://scala-lang.org/community/) page for a list of these resources, and for where to reach out for help.
diff --git a/_glossary/index.md b/_glossary/index.md
index 0778d19cd6..9d4d490c65 100644
--- a/_glossary/index.md
+++ b/_glossary/index.md
@@ -16,380 +16,380 @@ languages: [zh-cn]
-* #### algebraic data type
+* ### algebraic data type
A type defined by providing several alternatives, each of which comes with its own constructor. It usually comes with a way to decompose the type through pattern matching. The concept is found in specification languages and functional programming languages. Algebraic data types can be emulated in Scala with case classes.
-* #### alternative
+* ### alternative
A branch of a match expression. It has the form “`case` _pattern_ => _expression_.” Another name for alternative is _case_.
-* #### annotation
+* ### annotation
An annotation appears in source code and is attached to some part of the syntax. Annotations are computer processable, so you can use them to effectively add an extension to Scala.
-* #### anonymous class
+* ### anonymous class
An anonymous class is a synthetic subclass generated by the Scala compiler from a new expression in which the class or trait name is followed by curly braces. The curly braces contains the body of the anonymous subclass, which may be empty. However, if the name following new refers to a trait or class that contains abstract members, these must be made concrete inside the curly braces that define the body of the anonymous subclass.
-* #### anonymous function
+* ### anonymous function
Another name for [function literal](#function-literal).
-* #### apply
+* ### apply
You can apply a method, function, or closure to arguments, which means you invoke it on those arguments.
-* #### argument
+* ### argument
When a function is invoked, an argument is passed for each parameter of that function. The parameter is the variable that refers to the argument. The argument is the object passed at invocation time. In addition, applications can take (command line) arguments that show up in the `Array[String]` passed to main methods of singleton objects.
-* #### assign
+* ### assign
You can assign an object to a variable. Afterwards, the variable will refer to the object.
-* #### auxiliary constructor
+* ### auxiliary constructor
Extra constructors defined inside the curly braces of the class definition, which look like method definitions named `this`, but with no result type.
-* #### block
-One or more expressions and declarations surrounded by curly braces. When the block evaluates, all of its expressions and declarations are processed in order, and then the block returns the value of the last expression as its own value. Blocks are commonly used as the bodies of functions, [for expressions](#for-expression), `while` loops, and any other place where you want to group a number of statements together. More formally, a block is an encapsulation construct for which you can only see side effects and a result value. The curly braces in which you define a class or object do not, therefore, form a block, because fields and methods (which are defined inside those curly braces) are visible from the out- side. Such curly braces form a template.
+* ### block
+One or more expressions and declarations surrounded by curly braces. When the block evaluates, all of its expressions and declarations are processed in order, and then the block returns the value of the last expression as its own value. Blocks are commonly used as the bodies of functions, [for expressions](#for-expression), `while` loops, and any other place where you want to group a number of statements together. More formally, a block is an encapsulation construct for which you can only see side effects and a result value. The curly braces in which you define a class or object do not, therefore, form a block, because fields and methods (which are defined inside those curly braces) are visible from the outside. Such curly braces form a template.
-* #### bound variable
+* ### bound variable
A bound variable of an expression is a variable that’s both used and defined inside the expression. For instance, in the function literal expression `(x: Int) => (x, y)`, both variables `x` and `y` are used, but only `x` is bound, because it is defined in the expression as an `Int` and the sole argument to the function described by the expression.
-* #### by-name parameter
+* ### by-name parameter
A parameter that is marked with a `=>` in front of the parameter type, e.g., `(x: => Int)`. The argument corresponding to a by-name parameter is evaluated not before the method is invoked, but each time the parameter is referenced by name inside the method. If a parameter is not by-name, it is by-value.
-* #### by-value parameter
+* ### by-value parameter
A parameter that is not marked with a `=>` in front of the parameter type, e.g., `(x: Int)`. The argument corresponding to a by-value parameter is evaluated before the method is invoked. By-value parameters contrast with by-name parameters.
-* #### class
+* ### class
Defined with the `class` keyword, a _class_ may either be abstract or concrete, and may be parameterized with types and values when instantiated. In `new Array[String](2)`, the class being instantiated is `Array` and the type of the value that results is `Array[String]`. A class that takes type parameters is called a _type constructor_. A type can be said to have a class as well, as in: the class of type `Array[String]` is `Array`.
-* #### closure
+* ### closure
A function object that captures free variables, and is said to be “closed” over the variables visible at the time it is created.
-* #### companion class
+* ### companion class
A class that shares the same name with a singleton object defined in the same source file. The class is the singleton object’s companion class.
-* #### companion object
+* ### companion object
A singleton object that shares the same name with a class defined in the same source file. Companion objects and classes have access to each other’s private members. In addition, any implicit conversions defined in the companion object will be in scope anywhere the class is used.
-* #### contravariant
+* ### contravariant
A _contravariant_ annotation can be applied to a type parameter of a class or trait by putting a minus sign (-) before the type parameter. The class or trait then subtypes contravariantly with—in the opposite direction as—the type annotated parameter. For example, `Function1` is contravariant in its first type parameter, and so `Function1[Any, Any]` is a subtype of `Function1[String, Any]`.
-* #### covariant
+* ### covariant
A _covariant_ annotation can be applied to a type parameter of a class or trait by putting a plus sign (+) before the type parameter. The class or trait then subtypes covariantly with—in the same direction as—the type annotated parameter. For example, `List` is covariant in its type parameter, so `List[String]` is a subtype of `List[Any]`.
-* #### currying
+* ### currying
A way to write functions with multiple parameter lists. For instance `def f(x: Int)(y: Int)` is a curried function with two parameter lists. A curried function is applied by passing several arguments lists, as in: `f(3)(4)`. However, it is also possible to write a _partial application_ of a curried function, such as `f(3)`.
-* #### declare
+* ### declare
You can _declare_ an abstract field, method, or type, which gives an entity a name but not an implementation. The key difference between declarations and definitions is that definitions establish an implementation for the named entity, declarations do not.
-* #### define
+* ### define
To _define_ something in a Scala program is to give it a name and an implementation. You can define classes, traits, singleton objects, fields, methods, local functions, local variables, _etc_. Because definitions always involve some kind of implementation, abstract members are declared not defined.
-* #### direct subclass
+* ### direct subclass
A class is a _direct subclass_ of its direct superclass.
-* #### direct superclass
+* ### direct superclass
The class from which a class or trait is immediately derived, the nearest class above it in its inheritance hierarchy. If a class `Parent` is mentioned in a class `Child`’s optional extends clause, then `Parent` is the direct superclass of `Child`. If a trait is mentioned in `Child`’s extends clause, the trait’s direct superclass is the `Child`’s direct superclass. If `Child` has no extends clause, then `AnyRef` is the direct superclass of `Child`. If a class’s direct superclass takes type parameters, for example class `Child` extends `Parent[String]`, the direct superclass of `Child` is still `Parent`, not `Parent[String]`. On the other hand, `Parent[String]` would be the direct supertype of `Child`. See [supertype](#supertype) for more discussion of the distinction between class and type.
-* #### equality
+* ### equality
When used without qualification, _equality_ is the relation between values expressed by `==`. See also [reference equality](#reference-equality).
-* #### existential type
+* ### existential type
An existential type includes references to type variables that are unknown. For example, `Array[T] forSome { type T }` is an existential type. It is an array of `T`, where `T` is some completely unknown type. All that is assumed about `T` is that it exists at all. This assumption is weak, but it means at least that an `Array[T] forSome { type T }` is indeed an array and not a banana.
-* #### expression
+* ### expression
Any bit of Scala code that yields a result. You can also say that an expression _evaluates_ to a result or _results_ in a value.
-* #### filter
+* ### filter
An `if` followed by a boolean expression in a [for expression](#for-expression). In `for(i <- 1 to 10; if i % 2 == 0)`, the filter is “`if i % 2 == 0`”. The value to the right of the `if` is the [filter expression](#filter-expression). Also known as a guard.
-* #### filter expression
+* ### filter expression
A _filter expression_ is the boolean expression following an `if` in a [for expression](#for-expression). In `for( i <- 1 to 10 ; if i % 2 == 0)`,the filter expression is “`i % 2 == 0`”.
-* #### first-class function
+* ### first-class function
Scala supports _first-class functions_, which means you can express functions in function literal syntax, i.e., `(x: Int) => x + 1`, and that functions can be represented by objects, which are called [function values](#function-value).
-* #### for comprehension
+* ### for comprehension
A _for comprehension_ is a type of [for expression](#for-expression) that creates a new collection. For each iteration of the `for` comprehension, the [yield](#yield) clause defines an element of the new collection. For example, `for (i <- (0 until 2); j <- (2 until 4)) yield (i, j)` returns the collection `Vector((0,2), (0,3), (1,2), (1,3))`.
-* #### for expression
+* ### for expression
A _for expression_ is either a [for loop](#for-loop), which iterates over one or more collections, or a [for comprehension](#for-comprehension), which builds a new collection from the elements of one or more collections. A `for` expression is built up of [generators](#generator), [filters](#filter), variable definitions, and (in the case of [for comprehensions](#for-comprehension)) a [yield](#yield) clause.
-* #### for loop
+* ### for loop
A _for loop_ is a type of [for expression](#for-expression) that loops over one or more collections. Since `for` loops return unit, they usually produce side-effects. For example, `for (i <- 0 until 100) println(i)` prints the numbers 0 through 99.
-* #### free variable
+* ### free variable
A _free variable_ of an expression is a variable that’s used inside the expression but not defined inside the expression. For instance, in the function literal expression `(x: Int) => (x, y)`, both variables `x` and `y` are used, but only `y` is a free variable, because it is not defined inside the expression.
-* #### function
+* ### function
A _function_ can be [invoked](#invoke) with a list of arguments to produce a result. A function has a parameter list, a body, and a result type. Functions that are members of a class, trait, or singleton object are called [methods](#method). Functions defined inside other functions are called [local functions](#local-function). Functions with the result type of `Unit` are called [procedures](#procedure). Anonymous functions in source code are called [function literals](#function-literal). At run time, function literals are instantiated into objects called [function values](#function-value).
-* #### function literal
-A function with no name in Scala source code, specified with function literal syntax. For example, `(x: Int, y: Int) => x + y`.
+* ### function literal
+A function with no name in Scala source code, specified with _function literal_ syntax. For example, `(x: Int, y: Int) => x + y`.
-* #### function value
-A function object that can be invoked just like any other function. A function value’s class extends one of the `FunctionN` traits (e.g., `Function0`, `Function1`) from package `scala`, and is usually expressed in source code via [function literal](#function-literal) syntax. A function value is “invoked” when its apply method is called. A function value that captures free variables is a [closure](#closure).
+* ### function value
+A function object that can be invoked just like any other function. A _function value_’s class extends one of the `FunctionN` traits (e.g., `Function0`, `Function1`) from package `scala`, and is usually expressed in source code via [function literal](#function-literal) syntax. A function value is “invoked” when its apply method is called. A function value that captures free variables is a [closure](#closure).
-* #### functional style
+* ### functional style
The _functional style_ of programming emphasizes functions and evaluation results and deemphasizes the order in which operations occur. The style is characterized by passing function values into looping methods, immutable data, methods with no side effects. It is the dominant paradigm of languages such as Haskell and Erlang, and contrasts with the [imperative style](#imperative-style).
-* #### generator
-A generator defines a named val and assigns to it a series of values in a [for expression](#for-expression). For example, in `for(i <- 1 to 10)`, the generator is “`i <- 1 to 10`”. The value to the right of the `<-` is the [generator expression](#generator-expression).
+* ### generator
+A _generator_ defines a named val and assigns to it a series of values in a [for expression](#for-expression). For example, in `for(i <- 1 to 10)`, the generator is “`i <- 1 to 10`”. The value to the right of the `<-` is the [generator expression](#generator-expression).
-* #### generator expression
-A generator expression generates a series of values in a [for expression](#for-expression). For example, in `for(i <- 1 to 10)`, the generator expression is “`1 to 10`”.
+* ### generator expression
+A _generator expression_ generates a series of values in a [for expression](#for-expression). For example, in `for(i <- 1 to 10)`, the generator expression is “`1 to 10`”.
-* #### generic class
-A class that takes type parameters. For example, because `scala.List` takes a type parameter, `scala.List` is a generic class.
+* ### generic class
+A class that takes type parameters. For example, because `scala.List` takes a type parameter, `scala.List` is a _generic class_.
-* #### generic trait
-A trait that takes type parameters. For example, because trait `scala.collection.Set` takes a type parameter, it is a generic trait.
+* ### generic trait
+A trait that takes type parameters. For example, because trait `scala.collection.Set` takes a type parameter, it is a _generic trait_.
-* #### guard
+* ### guard
See [filter](#filter).
-* #### helper function
+* ### helper function
A function whose purpose is to provide a service to one or more other functions nearby. Helper functions are often implemented as local functions.
-* #### helper method
+* ### helper method
A [helper function](#helper-function) that’s a member of a class. Helper methods are often private.
-* #### immutable
+* ### immutable
An object is _immutable_ if its value cannot be changed after it is created in any way visible to clients. Objects may or may not be immutable.
-* #### imperative style
+* ### imperative style
The _imperative style_ of programming emphasizes careful sequencing of operations so that their effects happen in the right order. The style is characterized by iteration with loops, mutating data in place, and methods with side effects. It is the dominant paradigm of languages such as C, C++, C# and Java, and contrasts with the [functional style](#functional-style).
-* #### initialize
-When a variable is defined in Scala source code, you must initialize it with an object.
+* ### initialize
+When a variable is defined in Scala source code, you must _initialize_ it with an object.
-* #### instance
+* ### instance
An _instance_, or class instance, is an object, a concept that exists only at run time.
-* #### instantiate
+* ### instantiate
To _instantiate_ a class is to make a new object from the class, an action that happens only at run time.
-* #### invariant
+* ### invariant
_Invariant_ is used in two ways. It can mean a property that always holds true when a data structure is well-formed. For example, it is an invariant of a sorted binary tree that each node is ordered before its right subnode, if it has a right subnode. Invariant is also sometimes used as a synonym for nonvariant: “class `Array` is invariant in its type parameter.”
-* #### invoke
+* ### invoke
You can _invoke_ a method, function, or closure _on_ arguments, meaning its body will be executed with the specified arguments.
-* #### JVM
+* ### JVM
The _JVM_ is the Java Virtual Machine, or [runtime](#runtime), that hosts a running Scala program.
-* #### literal
+* ### literal
`1`, `"One"`, and `(x: Int) => x + 1` are examples of _literals_. A literal is a shorthand way to describe an object, where the shorthand exactly mirrors the structure of the created object.
-* #### local function
+* ### local function
A _local function_ is a `def` defined inside a block. To contrast, a `def` defined as a member of a class, trait, or singleton object is called a [method](#method).
-* #### local variable
+* ### local variable
A _local variable_ is a `val` or `var` defined inside a block. Although similar to [local variables](#local-variable), parameters to functions are not referred to as local variables, but simply as parameters or “variables” without the “local.”
-* #### member
+* ### member
A _member_ is any named element of the template of a class, trait, or singleton object. A member may be accessed with the name of its owner, a dot, and its simple name. For example, top-level fields and methods defined in a class are members of that class. A trait defined inside a class is a member of its enclosing class. A type defined with the type keyword in a class is a member of that class. A class is a member of the package in which is it defined. By contrast, a local variable or local function is not a member of its surrounding block.
-* #### message
+* ### message
Actors communicate with each other by sending each other _messages_. Sending a message does not interrupt what the receiver is doing. The receiver can wait until it has finished its current activity and its invariants have been reestablished.
-* #### meta-programming
+* ### meta-programming
Meta-programming software is software whose input is itself software. Compilers are meta-programs, as are tools like `scaladoc`. Meta-programming software is required in order to do anything with an annotation.
-* #### method
+* ### method
A _method_ is a function that is a member of some class, trait, or singleton object.
-* #### mixin
+* ### mixin
_Mixin_ is what a trait is called when it is being used in a mixin composition. In other words, in “`trait Hat`,” `Hat` is just a trait, but in “`new Cat extends AnyRef with Hat`,” `Hat` can be called a mixin. When used as a verb, “mix in” is two words. For example, you can _mix_ traits _in_ to classes or other traits.
-* #### mixin composition
+* ### mixin composition
The process of mixing traits into classes or other traits. _Mixin composition_ differs from traditional multiple inheritance in that the type of the super reference is not known at the point the trait is defined, but rather is determined anew each time the trait is mixed into a class or other trait.
-* #### modifier
+* ### modifier
A keyword that qualifies a class, trait, field, or method definition in some way. For example, the `private` modifier indicates that a class, trait, field, or method being defined is private.
-* #### multiple definitions
+* ### multiple definitions
The same expression can be assigned in _multiple definitions_ if you use the syntax `val v1, v2, v3 = exp`.
-* #### nonvariant
+* ### nonvariant
A type parameter of a class or trait is by default _nonvariant_. The class or trait then does not subtype when that parameter changes. For example, because class `Array` is nonvariant in its type parameter, `Array[String]` is neither a subtype nor a supertype of `Array[Any]`.
-* #### operation
+* ### operation
In Scala, every _operation_ is a method call. Methods may be invoked in _operator notation_, such as `b + 2`, and when in that notation, `+` is an _operator_.
-* #### parameter
+* ### parameter
Functions may take zero to many _parameters_. Each parameter has a name and a type. The distinction between parameters and arguments is that arguments refer to the actual objects passed when a function is invoked. Parameters are the variables that refer to those passed arguments.
-* #### parameterless function
+* ### parameterless function
A function that takes no parameters, which is defined without any empty parentheses. Invocations of parameterless functions may not supply parentheses. This supports the [uniform access principle](#uniform-access-principle), which enables the `def` to be changed into a `val` without requiring a change to client code.
-* #### parameterless method
+* ### parameterless method
A _parameterless method_ is a parameterless function that is a member of a class, trait, or singleton object.
-* #### parametric field
+* ### parametric field
A field defined as a class parameter.
-* #### partially applied function
+* ### partially applied function
A function that’s used in an expression and that misses some of its arguments. For instance, if function `f` has type `Int => Int => Int`, then `f` and `f(1)` are _partially applied functions_.
-* #### path-dependent type
+* ### path-dependent type
A type like `swiss.cow.Food`. The `swiss.cow` part is a path that forms a reference to an object. The meaning of the type is sensitive to the path you use to access it. The types `swiss.cow.Food` and `fish.Food`, for example, are different types.
-* #### pattern
+* ### pattern
In a `match` expression alternative, a _pattern_ follows each `case` keyword and precedes either a _pattern guard_ or the `=>` symbol.
-* #### pattern guard
+* ### pattern guard
In a `match` expression alternative, a _pattern guard_ can follow a [pattern](#pattern). For example, in “`case x if x % 2 == 0 => x + 1`”, the pattern guard is “`if x % 2 == 0`”. A case with a pattern guard will only be selected if the pattern matches and the pattern guard yields true.
-* #### predicate
+* ### predicate
A _predicate_ is a function with a `Boolean` result type.
-* #### primary constructor
+* ### primary constructor
The main constructor of a class, which invokes a superclass constructor, if necessary, initializes fields to passed values, and executes any top-level code defined between the curly braces of the class. Fields are initialized only for value parameters not passed to the superclass constructor, except for any that are not used in the body of the class and can therefore be optimized away.
-* #### procedure
+* ### procedure
A _procedure_ is a function with result type of `Unit`, which is therefore executed solely for its side effects.
-* #### reassignable
+* ### reassignable
A variable may or may not be _reassignable_. A `var` is reassignable while a `val` is not.
-* #### recursive
+* ### recursive
A function is _recursive_ if it calls itself. If the only place the function calls itself is the last expression of the function, then the function is [tail recursive](#tail-recursive).
-* #### reference
+* ### reference
A _reference_ is the Java abstraction of a pointer, which uniquely identifies an object that resides on the JVM’s heap. Reference type variables hold references to objects, because reference types (instances of `AnyRef`) are implemented as Java objects that reside on the JVM’s heap. Value type variables, by contrast, may sometimes hold a reference (to a boxed wrapper type) and sometimes not (when the object is being represented as a primitive value). Speaking generally, a Scala variable [refers](#refers) to an object. The term “refers” is more abstract than “holds a reference.” If a variable of type `scala.Int` is currently represented as a primitive Java `int` value, then that variable still refers to the `Int` object, but no reference is involved.
-* #### reference equality
+* ### reference equality
_Reference equality_ means that two references identify the very same Java object. Reference equality can be determined, for reference types only, by calling `eq` in `AnyRef`. (In Java programs, reference equality can be determined using `==` on Java [reference types](#reference-type).)
-* #### reference type
+* ### reference type
A _reference type_ is a subclass of `AnyRef`. Instances of reference types always reside on the JVM’s heap at run time.
-* #### referential transparency
+* ### referential transparency
A property of functions that are independent of temporal context and have no side effects. For a particular input, an invocation of a referentially transparent function can be replaced by its result without changing the program semantics.
-* #### refers
+* ### refers
A variable in a running Scala program always _refers_ to some object. Even if that variable is assigned to `null`, it conceptually refers to the `Null` object. At runtime, an object may be implemented by a Java object or a value of a primitive type, but Scala allows programmers to think at a higher level of abstraction about their code as they imagine it running. See also [reference](#reference).
-* #### refinement type
+* ### refinement type
A type formed by supplying a base type with a number of members inside curly braces. The members in the curly braces refine the types that are present in the base type. For example, the type of “animal that eats grass” is `Animal { type SuitableFood = Grass }`.
-* #### result
+* ### result
An expression in a Scala program yields a _result_. The result of every expression in Scala is an object.
-* #### result type
+* ### result type
A method’s _result type_ is the type of the value that results from calling the method. (In Java, this concept is called the return type.)
-* #### return
+* ### return
A function in a Scala program _returns_ a value. You can call this value the [result](#result) of the function. You can also say the function _results in_ the value. The result of every function in Scala is an object.
-* #### runtime
+* ### runtime
The Java Virtual Machine, or [JVM](#jvm), that hosts a running Scala program. Runtime encompasses both the virtual machine, as defined by the Java Virtual Machine Specification, and the runtime libraries of the Java API and the standard Scala API. The phrase at run time (with a space between run and time) means when the program is running, and contrasts with compile time.
-* #### runtime type
+* ### runtime type
The type of an object at run time. To contrast, a [static type](#static-type) is the type of an expression at compile time. Most runtime types are simply bare classes with no type parameters. For example, the runtime type of `"Hi"` is `String`, and the runtime type of `(x: Int) => x + 1` is `Function1`. Runtime types can be tested with `isInstanceOf`.
-* #### script
+* ### script
A file containing top level definitions and statements, which can be run directly with `scala` without explicitly compiling. A script must end in an expression, not a definition.
-* #### selector
+* ### selector
The value being matched on in a `match` expression. For example, in “`s match { case _ => }`”, the selector is `s`.
-* #### self type
+* ### self type
A _self type_ of a trait is the assumed type of `this`, the receiver, to be used within the trait. Any concrete class that mixes in the trait must ensure that its type conforms to the trait’s self type. The most common use of self types is for dividing a large class into several traits (as described in Chapter 29 of [Programming in Scala](https://www.artima.com/shop/programming_in_scala)).
-* #### semi-structured data
+* ### semi-structured data
XML data is semi-structured. It is more structured than a flat binary file or text file, but it does not have the full structure of a programming language’s data structures.
-* #### serialization
-You can _serialize_ an object into a byte stream which can then be saved to files or transmitted over the network. You can later _deserialize_ the byte stream, even on different computer, and obtain an object that is the same as the original serialized object.
+* ### serialization
+You can _serialize_ an object into a byte stream which can then be saved to a file or transmitted over the network. You can later _deserialize_ the byte stream, even on different computer, and obtain an object that is the same as the original serialized object.
-* #### shadow
+* ### shadow
A new declaration of a local variable _shadows_ one of the same name in an enclosing scope.
-* #### signature
+* ### signature
_Signature_ is short for [type signature](#type-signature).
-* #### singleton object
+* ### singleton object
An object defined with the object keyword. Each singleton object has one and only one instance. A singleton object that shares its name with a class, and is defined in the same source file as that class, is that class’s [companion object](#companion-object). The class is its [companion class](#companion-class). A singleton object that doesn’t have a companion class is a [standalone object](#standalone-object).
-* #### standalone object
+* ### standalone object
A [singleton object](#singleton-object) that has no [companion class](#companion-class).
-* #### statement
+* ### statement
An expression, definition, or import, _i.e._, things that can go into a template or a block in Scala source code.
-* #### static type
+* ### static type
See [type](#type).
-* #### structural type
+* ### structural type
A [refinement type](#refinement-type) where the refinements are for members not in the base type. For example, `{ def close(): Unit }` is a structural type, because the base type is `AnyRef`, and `AnyRef` does not have a member named `close`.
-* #### subclass
+* ### subclass
A class is a _subclass_ of all of its [superclasses](#superclass) and [supertraits](#supertrait).
-* #### subtrait
+* ### subtrait
A trait is a _subtrait_ of all of its [supertraits](#supertrait).
-* #### subtype
+* ### subtype
The Scala compiler will allow any of a type’s _subtypes_ to be used as a substitute wherever that type is required. For classes and traits that take no type parameters, the subtype relationship mirrors the subclass relationship. For example, if class `Cat` is a subclass of abstract class `Animal`, and neither takes type parameters, type `Cat` is a subtype of type `Animal`. Likewise, if trait `Apple` is a subtrait of trait `Fruit`, and neither takes type parameters, type `Apple` is a subtype of type `Fruit`. For classes and traits that take type parameters, however, variance comes into play. For example, because abstract class `List` is declared to be covariant in its lone type parameter (i.e., `List` is declared `List[+A]`), `List[Cat]` is a subtype of `List[Animal]`, and `List[Apple]` a subtype of `List[Fruit]`. These subtype relationships exist even though the class of each of these types is `List`. By contrast, because `Set` is not declared to be covariant in its type parameter (i.e., `Set` is declared `Set[A]` with no plus sign), `Set[Cat]` is not a subtype of `Set[Animal]`. A subtype should correctly implement the contracts of its supertypes, so that the Liskov Substitution Principle applies, but the compiler only verifies this property at the level of type checking.
-* #### superclass
+* ### superclass
A class’s _superclasses_ include its direct superclass, its direct superclass’s direct superclass, and so on, all the way up to `Any`.
-* #### supertrait
+* ### supertrait
A class’s or trait’s _supertraits_, if any, include all traits directly mixed into the class or trait or any of its superclasses, plus any supertraits of those traits.
-* #### supertype
+* ### supertype
A type is a _supertype_ of all of its subtypes.
-* #### synthetic class
+* ### synthetic class
A synthetic class is generated automatically by the compiler rather than being written by hand by the programmer.
-* #### tail recursive
+* ### tail recursive
A function is _tail recursive_ if the only place the function calls itself is the last operation of the function.
-* #### target typing
+* ### target typing
_Target typing_ is a form of type inference that takes into account the type that’s expected. In `nums.filter((x) => x > 0)`, for example, the Scala compiler infers type of `x` to be the element type of `nums`, because the `filter` method invokes the function on each element of `nums`.
-* #### template
+* ### template
A _template_ is the body of a class, trait, or singleton object definition. It defines the type signature, behavior and initial state of the class, trait, or object.
-* #### trait
+* ### trait
A _trait_, which is defined with the `trait` keyword, is like an abstract class that cannot take any value parameters and can be “mixed into” classes or other traits via the process known as [mixin composition](#mixin-composition). When a trait is being mixed into a class or trait, it is called a [mixin](#mixin). A trait may be parameterized with one or more types. When parameterized with types, the trait constructs a type. For example, `Set` is a trait that takes a single type parameter, whereas `Set[Int]` is a type. Also, `Set` is said to be “the trait of” type `Set[Int]`.
-* #### type
+* ### type
Every variable and expression in a Scala program has a _type_ that is known at compile time. A type restricts the possible values to which a variable can refer, or an expression can produce, at run time. A variable or expression’s type can also be referred to as a _static type_ if necessary to differentiate it from an object’s [runtime type](#runtime-type). In other words, “type” by itself means static type. Type is distinct from class because a class that takes type parameters can construct many types. For example, `List` is a class, but not a type. `List[T]` is a type with a free type parameter. `List[Int]` and `List[String]` are also types (called ground types because they have no free type parameters). A type can have a “[class](#class)” or “[trait](#trait).” For example, the class of type `List[Int]` is `List`. The trait of type `Set[String]` is `Set`.
-* #### type constraint
+* ### type constraint
Some [annotations](#annotation) are _type constraints_, meaning that they add additional limits, or constraints, on what values the type includes. For example, `@positive` could be a type constraint on the type `Int`, limiting the type of 32-bit integers down to those that are positive. Type constraints are not checked by the standard Scala compiler, but must instead be checked by an extra tool or by a compiler plugin.
-* #### type constructor
+* ### type constructor
A class or trait that takes type parameters.
-* #### type parameter
+* ### type parameter
A parameter to a generic class or generic method that must be filled in by a type. For example, class `List` is defined as “`class List[T] { . . . `”, and method `identity`, a member of object `Predef`, is defined as “`def identity[T](x:T) = x`”. The `T` in both cases is a type parameter.
-* #### type signature
+* ### type signature
A method’s _type signature_ comprises its name, the number, order, and types of its parameters, if any, and its result type. The type signature of a class, trait, or singleton object comprises its name, the type signatures of all of its members and constructors, and its declared inheritance and mixin relations.
-* #### uniform access principle
+* ### uniform access principle
The _uniform access principle_ states that variables and parameterless functions should be accessed using the same syntax. Scala supports this principle by not allowing parentheses to be placed at call sites of parameterless functions. As a result, a parameterless function definition can be changed to a `val`, or _vice versa_, without affecting client code.
-* #### unreachable
+* ### unreachable
At the Scala level, objects can become _unreachable_, at which point the memory they occupy may be reclaimed by the runtime. Unreachable does not necessarily mean unreferenced. Reference types (instances of `AnyRef`) are implemented as objects that reside on the JVM’s heap. When an instance of a reference type becomes unreachable, it indeed becomes unreferenced, and is available for garbage collection. Value types (instances of `AnyVal`) are implemented as both primitive type values and as instances of Java wrapper types (such as `java.lang.Integer`), which reside on the heap. Value type instances can be boxed (converted from a primitive value to a wrapper object) and unboxed (converted from a wrapper object to a primitive value) throughout the lifetime of the variables that refer to them. If a value type instance currently represented as a wrapper object on the JVM’s heap becomes unreachable, it indeed becomes unreferenced, and is available for garbage collection. But if a value type currently represented as a primitive value becomes unreachable, then it does not become unreferenced, because it does not exist as an object on the JVM’s heap at that point of time. The runtime may reclaim memory occupied by unreachable objects, but if an Int, for example, is implemented at run time by a primitive Java int that occupies some memory in the stack frame of an executing method, then the memory for that object is “reclaimed” when the stack frame is popped as the method completes. Memory for reference types, such as `Strings`, may be reclaimed by the JVM’s garbage collector after they become unreachable.
-* #### unreferenced
+* ### unreferenced
See [unreachable](#unreachable).
-* #### value
+* ### value
The result of any computation or expression in Scala is a _value_, and in Scala, every value is an object. The term value essentially means the image of an object in memory (on the JVM’s heap or stack).
-* #### value type
+* ### value type
A _value type_ is any subclass of `AnyVal`, such as `Int`, `Double`, or `Unit`. This term has meaning at the level of Scala source code. At runtime, instances of value types that correspond to Java primitive types may be implemented in terms of primitive type values or instances of wrapper types, such as `java.lang.Integer`. Over the lifetime of a value type instance, the runtime may transform it back and forth between primitive and wrapper types (_i.e._, to box and unbox it).
-* #### variable
+* ### variable
A named entity that refers to an object. A variable is either a `val` or a `var`. Both `val`s and `var`s must be initialized when defined, but only `var`s can be later reassigned to refer to a different object.
-* #### variance
+* ### variance
A type parameter of a class or trait can be marked with a _variance_ annotation, either [covariant](#covariant) (+) or [contravariant](#contravariant) (-). Such variance annotations indicate how subtyping works for a generic class or trait. For example, the generic class `List` is covariant in its type parameter, and thus `List[String]` is a subtype of `List[Any]`. By default, _i.e._, absent a `+` or `-` annotation, type parameters are [nonvariant](#nonvariant).
-* #### yield
+* ### yield
An expression can _yield_ a result. The `yield` keyword designates the result of a [for comprehension](#for-comprehension).
diff --git a/_includes/_markdown/_ru/install-cask.md b/_includes/_markdown/_ru/install-cask.md
new file mode 100644
index 0000000000..1cac104c20
--- /dev/null
+++ b/_includes/_markdown/_ru/install-cask.md
@@ -0,0 +1,45 @@
+{% altDetails require-info-box 'Установка Cask' %}
+
+{% tabs cask-install class=tabs-build-tool %}
+
+{% tab 'Scala CLI' %}
+
+Вы можете объявить зависимость от Cask с помощью следующей директивы `using`:
+
+```scala
+//> using dep com.lihaoyi::cask::0.10.2
+```
+
+{% endtab %}
+
+{% tab 'sbt' %}
+
+В файле `build.sbt` вы можете добавить зависимость от Cask:
+
+```scala
+lazy val example = project.in(file("example"))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "com.lihaoyi" %% "cask" % "0.10.2",
+ fork := true
+ )
+```
+
+{% endtab %}
+
+{% tab 'Mill' %}
+
+В файле `build.sc` вы можете добавить зависимость от Cask:
+
+```scala
+object example extends RootModule with ScalaModule {
+ def scalaVersion = "3.4.2"
+ def ivyDeps = Agg(
+ ivy"com.lihaoyi::cask::0.10.2"
+ )
+}
+```
+{% endtab %}
+
+{% endtabs %}
+{% endaltDetails %}
diff --git a/_includes/_markdown/_ru/install-munit.md b/_includes/_markdown/_ru/install-munit.md
new file mode 100644
index 0000000000..aa15142558
--- /dev/null
+++ b/_includes/_markdown/_ru/install-munit.md
@@ -0,0 +1,68 @@
+{% altDetails install-info-box 'Установка MUnit' %}
+
+{% tabs munit-unit-test-1 class=tabs-build-tool %}
+{% tab 'Scala CLI' %}
+
+Вы можете запросить весь набор инструментов одной командой:
+
+```scala
+//> using toolkit latest
+```
+
+MUnit, будучи тестовым фреймворком, доступен только в тестовых файлах:
+файлах в каталоге `test` или тех, которые имеют расширение `.test.scala`.
+Подробнее о тестовой области (test scope) см. [в документации Scala CLI](https://scala-cli.virtuslab.org/docs/commands/test/).
+
+В качестве альтернативы вы можете запросить только определенную версию MUnit:
+
+```scala
+//> using dep org.scalameta::munit:1.1.0
+```
+
+{% endtab %}
+
+{% tab 'sbt' %}
+
+В файле `build.sbt` вы можете добавить зависимость от toolkit-test:
+
+```scala
+lazy val example = project.in(file("."))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "org.scala-lang" %% "toolkit-test" % "0.7.0" % Test
+ )
+```
+
+Здесь конфигурация `Test` означает, что зависимость используется только исходными файлами в `src/test`.
+
+В качестве альтернативы вы можете запросить только определенную версию MUnit:
+
+```scala
+libraryDependencies += "org.scalameta" %% "munit" % "1.1.0" % Test
+```
+{% endtab %}
+
+{% tab 'Mill' %}
+
+В файле `build.sc` вы можете добавить объект `test`, расширяющий `Tests` и `TestModule.Munit`:
+
+```scala
+object example extends ScalaModule {
+ def scalaVersion = "3.4.2"
+ object test extends Tests with TestModule.Munit {
+ def ivyDeps =
+ Agg(
+ ivy"org.scala-lang::toolkit-test:0.7.0"
+ )
+ }
+}
+```
+
+В качестве альтернативы вы можете запросить только определенную версию MUnit:
+
+```scala
+ivy"org.scalameta::munit:1.1.0"
+```
+{% endtab %}
+{% endtabs %}
+{% endaltDetails %}
\ No newline at end of file
diff --git a/_includes/_markdown/_ru/install-os-lib.md b/_includes/_markdown/_ru/install-os-lib.md
new file mode 100644
index 0000000000..f010d1f7fd
--- /dev/null
+++ b/_includes/_markdown/_ru/install-os-lib.md
@@ -0,0 +1,64 @@
+{% altDetails require-info-box 'Установка OS-Lib' %}
+
+{% tabs oslib-install class=tabs-build-tool %}
+
+{% tab 'Scala CLI' %}
+
+Вы можете запросить весь набор инструментов одной командой:
+
+```scala
+//> using toolkit latest
+```
+
+В качестве альтернативы вы можете запросить только определенную версию OS-Lib:
+
+```scala
+//> using dep com.lihaoyi::os-lib:0.11.3
+```
+
+{% endtab %}
+
+{% tab 'sbt' %}
+
+В файле `build.sbt` вы можете добавить зависимость от `toolkit`:
+
+```scala
+lazy val example = project.in(file("."))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "org.scala-lang" %% "toolkit" % "0.7.0"
+ )
+```
+
+В качестве альтернативы вы можете запросить только определенную версию OS-Lib:
+
+```scala
+libraryDependencies += "com.lihaoyi" %% "os-lib" % "0.11.3"
+```
+
+{% endtab %}
+
+{% tab 'Mill' %}
+
+В файле `build.sc` вы можете добавить зависимость от `toolkit`:
+
+```scala
+object example extends ScalaModule {
+ def scalaVersion = "3.4.2"
+ def ivyDeps =
+ Agg(
+ ivy"org.scala-lang::toolkit:0.7.0"
+ )
+}
+```
+
+В качестве альтернативы вы можете запросить только определенную версию OS-Lib:
+
+```scala
+ivy"com.lihaoyi::os-lib:0.11.3"
+```
+
+{% endtab %}
+
+{% endtabs %}
+{% endaltDetails %}
\ No newline at end of file
diff --git a/_includes/_markdown/_ru/install-sttp.md b/_includes/_markdown/_ru/install-sttp.md
new file mode 100644
index 0000000000..fec7938cea
--- /dev/null
+++ b/_includes/_markdown/_ru/install-sttp.md
@@ -0,0 +1,64 @@
+
+{% altDetails install-info-box 'Установка sttp' %}
+
+{% tabs sttp-install-methods class=tabs-build-tool%}
+
+{% tab 'Scala CLI' %}
+
+Вы можете запросить весь набор инструментов одной командой:
+
+```scala
+//> using toolkit latest
+```
+
+В качестве альтернативы вы можете запросить только определенную версию sttp:
+
+```scala
+//> using dep com.softwaremill.sttp.client4::core:4.0.0-RC1
+```
+
+{% endtab %}
+
+{% tab 'sbt' %}
+
+В файле `build.sbt` вы можете добавить зависимость от `toolkit`:
+
+```scala
+lazy val example = project.in(file("."))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "org.scala-lang" %% "toolkit" % "0.7.0"
+ )
+```
+
+В качестве альтернативы вы можете запросить только определенную версию sttp:
+
+```scala
+libraryDependencies += "com.softwaremill.sttp.client4" %% "core" % "4.0.0-RC1"
+```
+
+{% endtab %}
+
+{% tab 'Mill' %}
+
+В файле `build.sc` вы можете добавить зависимость от `toolkit`:
+
+```scala
+object example extends ScalaModule {
+ def scalaVersion = "3.4.2"
+ def ivyDeps =
+ Agg(
+ ivy"org.scala-lang::toolkit:0.7.0"
+ )
+}
+```
+
+В качестве альтернативы вы можете запросить только определенную версию sttp:
+
+```scala
+ivy"com.softwaremill.sttp.client4::core:4.0.0-RC1"
+```
+
+{% endtab %}
+{% endtabs %}
+{% endaltDetails %}
diff --git a/_includes/_markdown/_ru/install-upickle.md b/_includes/_markdown/_ru/install-upickle.md
new file mode 100644
index 0000000000..83880a91a8
--- /dev/null
+++ b/_includes/_markdown/_ru/install-upickle.md
@@ -0,0 +1,64 @@
+
+{% altDetails install-info-box 'Установка upickle' %}
+
+{% tabs upickle-install-methods class=tabs-build-tool %}
+
+{% tab 'Scala CLI' %}
+
+Вы можете запросить весь набор инструментов одной командой:
+
+```scala
+//> using toolkit latest
+```
+
+В качестве альтернативы вы можете запросить только определенную версию UPickle:
+
+```scala
+//> using dep com.lihaoyi::upickle:4.1.0
+```
+
+{% endtab %}
+
+{% tab 'sbt' %}
+
+В файле `build.sbt` вы можете добавить зависимость от `toolkit`:
+
+```scala
+lazy val example = project.in(file("."))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "org.scala-lang" %% "toolkit" % "0.7.0"
+ )
+```
+
+В качестве альтернативы вы можете запросить только определенную версию UPickle:
+
+```scala
+libraryDependencies += "com.lihaoyi" %% "upickle" % "4.1.0"
+```
+
+{% endtab %}
+
+{% tab 'Mill' %}
+
+В файле `build.sc` вы можете добавить зависимость от `toolkit`:
+
+```scala
+object example extends ScalaModule {
+ def scalaVersion = "3.4.2"
+ def ivyDeps =
+ Agg(
+ ivy"org.scala-lang::toolkit:0.7.0"
+ )
+}
+```
+
+В качестве альтернативы вы можете запросить только определенную версию UPickle:
+
+```scala
+ivy"com.lihaoyi::upickle:4.1.0"
+```
+
+{% endtab %}
+{% endtabs %}
+{% endaltDetails %}
diff --git a/_includes/_markdown/courses-coursera.md b/_includes/_markdown/courses-coursera.md
new file mode 100644
index 0000000000..403c5e3100
--- /dev/null
+++ b/_includes/_markdown/courses-coursera.md
@@ -0,0 +1,18 @@
+## Scala Courses on Coursera by EPFL
+
+The [Scala Center](https://scala.epfl.ch) at EPFL offers free online courses of various levels, from beginner to advanced.
+
+For beginners:
+
+- [Effective Programming in Scala](https://www.coursera.org/learn/effective-scala): a practical introduction to Scala for professional developers
+- [Functional Programming Principles in Scala](https://www.coursera.org/learn/scala-functional-programming): the foundational course by Martin Odersky, Scala's creator
+
+More advanced topics:
+
+- [Functional Program Design in Scala](https://www.coursera.org/learn/scala-functional-program-design): builds on functional principles with more advanced concepts
+- [Parallel Programming](https://www.coursera.org/learn/scala-parallel-programming)
+- [Big Data Analysis with Scala and Spark](https://www.coursera.org/learn/scala-spark-big-data)
+- [Programming Reactive Systems](https://www.coursera.org/learn/scala-akka-reactive): introduces Akka, actors and reactive streams
+
+All courses are free to audit, with an option to pay for a certificate, to showcase your skills on your resume or LinkedIn.
+For more on Scala Center's online courses, visit [this page](https://docs.scala-lang.org/online-courses.html#learning-platforms).
diff --git a/_includes/_markdown/courses-extension-school.md b/_includes/_markdown/courses-extension-school.md
new file mode 100644
index 0000000000..003c42a4f2
--- /dev/null
+++ b/_includes/_markdown/courses-extension-school.md
@@ -0,0 +1,9 @@
+## EPFL Extension School: Effective Programming in Scala
+
+Subscribing to [Effective programming in Scala](https://www.epfl.ch/education/continuing-education/effective-programming-in-scala/) on the EPFL Extension School offers:
+
+- Regular Q&A sessions and code reviews with experts from the Scala team
+- An [Extension School certificate](https://www.epfl.ch/education/continuing-education/certifications/) upon completion
+
+This course combines video lessons, written content and hands-on exercise focused on practical aspects, including business domain modeling, error handling, data manipulation, and task parallelization.
+For more on Scala Center's online courses, visit [this page](https://docs.scala-lang.org/online-courses.html#learning-platforms).
diff --git a/_includes/_markdown/courses-rock-the-jvm.md b/_includes/_markdown/courses-rock-the-jvm.md
new file mode 100644
index 0000000000..0b0db4f9f1
--- /dev/null
+++ b/_includes/_markdown/courses-rock-the-jvm.md
@@ -0,0 +1,17 @@
+## Rock the JVM Courses
+
+_As part of a partnership with the Scala Center, Rock the JVM donates 30% of the revenue from any courses purchased through the links in this section to support the Scala Center._
+
+[Rock the JVM](https://rockthejvm.com?affcode=256201_r93i1xuv) is a learning platform with free and premium courses on the Scala language, and all major libraries and tools in the Scala ecosystem: Typelevel, Zio, Akka/Pekko, Spark, and others.
+Its main Scala courses are:
+
+- [Scala at Light Speed](https://rockthejvm.com/courses/scala-at-light-speed?affcode=256201_r93i1xuv) (free)
+- [Scala & Functional Programming Essentials](https://rockthejvm.com/courses/scala-essentials?affcode=256201_r93i1xuv) (premium)
+- [Advanced Scala and Functional Programming](https://rockthejvm.com/courses/advanced-scala?affcode=256201_r93i1xuv) (premium)
+- [Scala Macros & Metaprogramming](https://rockthejvm.com/courses/scala-macros-and-metaprogramming?affcode=256201_r93i1xuv) (premium)
+
+Other courses teach how to build full-stack Scala applications, using [Typelevel](https://rockthejvm.com/courses/typelevel-rite-of-passage?affcode=256201_r93i1xuv) or [ZIO](https://rockthejvm.com/courses/zio-rite-of-passage?affcode=256201_r93i1xuv) ecosystems.
+
+
+
+Explore more premium [courses](https://rockthejvm.com/courses?affcode=256201_r93i1xuv) or check out [free video tutorials](https://youtube.com/rockthejvm?affcode=256201_r93i1xuv) and [free articles](https://rockthejvm.com/articles?affcode=256201_r93i1xuv).
diff --git a/_includes/_markdown/install-cask.md b/_includes/_markdown/install-cask.md
new file mode 100644
index 0000000000..3637ddfac9
--- /dev/null
+++ b/_includes/_markdown/install-cask.md
@@ -0,0 +1,37 @@
+{% altDetails require-info-box 'Getting Cask' %}
+
+{% tabs cask-install class=tabs-build-tool %}
+
+{% tab 'Scala CLI' %}
+You can declare a dependency on Cask with the following `using` directive:
+```scala
+//> using dep com.lihaoyi::cask::0.10.2
+```
+{% endtab %}
+
+{% tab 'sbt' %}
+In your `build.sbt`, you can add a dependency on Cask:
+```scala
+lazy val example = project.in(file("example"))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "com.lihaoyi" %% "cask" % "0.10.2",
+ fork := true
+ )
+```
+{% endtab %}
+
+{% tab 'Mill' %}
+In your `build.sc`, you can add a dependency on Cask:
+```scala
+object example extends RootModule with ScalaModule {
+ def scalaVersion = "3.4.2"
+ def ivyDeps = Agg(
+ ivy"com.lihaoyi::cask::0.10.2"
+ )
+}
+```
+{% endtab %}
+
+{% endtabs %}
+{% endaltDetails %}
diff --git a/_includes/_markdown/install-munit.md b/_includes/_markdown/install-munit.md
new file mode 100644
index 0000000000..47eeb1509f
--- /dev/null
+++ b/_includes/_markdown/install-munit.md
@@ -0,0 +1,53 @@
+{% altDetails install-info-box 'Getting MUnit' %}
+
+{% tabs munit-unit-test-1 class=tabs-build-tool %}
+{% tab 'Scala CLI' %}
+You can require the entire toolkit in a single line:
+```scala
+//> using toolkit latest
+```
+MUnit, being a testing framework, is only available in test files: files in a `test` directory or ones that have the `.test.scala` extension. Refer to the [Scala CLI documentation](https://scala-cli.virtuslab.org/docs/commands/test/) to learn more about the test scope.
+
+Alternatively, you can require just a specific version of MUnit:
+```scala
+//> using dep org.scalameta::munit:1.1.0
+```
+{% endtab %}
+{% tab 'sbt' %}
+In your build.sbt file, you can add the dependency on toolkit-test:
+```scala
+lazy val example = project.in(file("."))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "org.scala-lang" %% "toolkit-test" % "0.7.0" % Test
+ )
+```
+
+Here the `Test` configuration means that the dependency is only used by the source files in `src/test`.
+
+Alternatively, you can require just a specific version of MUnit:
+```scala
+libraryDependencies += "org.scalameta" %% "munit" % "1.1.0" % Test
+```
+{% endtab %}
+{% tab 'Mill' %}
+In your build.sc file, you can add a `test` object extending `Tests` and `TestModule.Munit`:
+```scala
+object example extends ScalaModule {
+ def scalaVersion = "3.4.2"
+ object test extends Tests with TestModule.Munit {
+ def ivyDeps =
+ Agg(
+ ivy"org.scala-lang::toolkit-test:0.7.0"
+ )
+ }
+}
+```
+
+Alternatively, you can require just a specific version of MUnit:
+```scala
+ivy"org.scalameta::munit:1.1.0"
+```
+{% endtab %}
+{% endtabs %}
+{% endaltDetails %}
diff --git a/_includes/_markdown/install-os-lib.md b/_includes/_markdown/install-os-lib.md
new file mode 100644
index 0000000000..ae254d9d71
--- /dev/null
+++ b/_includes/_markdown/install-os-lib.md
@@ -0,0 +1,46 @@
+{% altDetails require-info-box 'Getting OS-Lib' %}
+
+{% tabs oslib-install class=tabs-build-tool %}
+{% tab 'Scala CLI' %}
+You can require the entire toolkit in a single line:
+```scala
+//> using toolkit latest
+```
+
+Alternatively, you can require just a specific version of OS-Lib:
+```scala
+//> using dep com.lihaoyi::os-lib:0.11.3
+```
+{% endtab %}
+{% tab 'sbt' %}
+In your `build.sbt`, you can add a dependency on the toolkit:
+```scala
+lazy val example = project.in(file("."))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "org.scala-lang" %% "toolkit" % "0.7.0"
+ )
+```
+Alternatively, you can require just a specific version of OS-Lib:
+```scala
+libraryDependencies += "com.lihaoyi" %% "os-lib" % "0.11.3"
+```
+{% endtab %}
+{% tab 'Mill' %}
+In your `build.sc` file, you can add a dependency on the Toolkit:
+```scala
+object example extends ScalaModule {
+ def scalaVersion = "3.4.2"
+ def ivyDeps =
+ Agg(
+ ivy"org.scala-lang::toolkit:0.7.0"
+ )
+}
+```
+Alternatively, you can require just a specific version of OS-Lib:
+```scala
+ivy"com.lihaoyi::os-lib:0.11.3"
+```
+{% endtab %}
+{% endtabs %}
+{% endaltDetails %}
diff --git a/_includes/_markdown/install-sttp.md b/_includes/_markdown/install-sttp.md
new file mode 100644
index 0000000000..0173ec47e1
--- /dev/null
+++ b/_includes/_markdown/install-sttp.md
@@ -0,0 +1,47 @@
+{% altDetails install-info-box 'Getting sttp' %}
+
+{% tabs sttp-install-methods class=tabs-build-tool%}
+{% tab 'Scala CLI' %}
+You can require the entire toolkit in a single line:
+```scala
+//> using toolkit latest
+```
+
+Alternatively, you can require just a specific version of sttp:
+```scala
+//> using dep com.softwaremill.sttp.client4::core:4.0.0-RC1
+```
+{% endtab %}
+{% tab 'sbt' %}
+In your build.sbt file, you can add a dependency on the Toolkit:
+```scala
+lazy val example = project.in(file("."))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "org.scala-lang" %% "toolkit" % "0.7.0"
+ )
+```
+
+Alternatively, you can require just a specific version of sttp:
+```scala
+libraryDependencies += "com.softwaremill.sttp.client4" %% "core" % "4.0.0-RC1"
+```
+{% endtab %}
+{% tab 'Mill' %}
+In your build.sc file, you can add a dependency on the Toolkit:
+```scala
+object example extends ScalaModule {
+ def scalaVersion = "3.4.2"
+ def ivyDeps =
+ Agg(
+ ivy"org.scala-lang::toolkit:0.7.0"
+ )
+}
+```
+Alternatively, you can require just a specific version of sttp:
+```scala
+ivy"com.softwaremill.sttp.client4::core:4.0.0-RC1"
+```
+{% endtab %}
+{% endtabs %}
+{% endaltDetails %}
diff --git a/_includes/_markdown/install-upickle.md b/_includes/_markdown/install-upickle.md
new file mode 100644
index 0000000000..9f9cff8a62
--- /dev/null
+++ b/_includes/_markdown/install-upickle.md
@@ -0,0 +1,46 @@
+{% altDetails install-info-box 'Getting upickle' %}
+
+{% tabs upickle-install-methods class=tabs-build-tool %}
+{% tab 'Scala CLI' %}
+Using Scala CLI, you can require the entire toolkit in a single line:
+```scala
+//> using toolkit latest
+```
+
+Alternatively, you can require just a specific version of UPickle:
+```scala
+//> using dep com.lihaoyi::upickle:4.1.0
+```
+{% endtab %}
+{% tab 'sbt' %}
+In your build.sbt file, you can add the dependency on the Toolkit:
+```scala
+lazy val example = project.in(file("."))
+ .settings(
+ scalaVersion := "3.4.2",
+ libraryDependencies += "org.scala-lang" %% "toolkit" % "0.7.0"
+ )
+```
+Alternatively, you can require just a specific version of UPickle:
+```scala
+libraryDependencies += "com.lihaoyi" %% "upickle" % "4.1.0"
+```
+{% endtab %}
+{% tab 'Mill' %}
+In your build.sc file, you can add the dependency to the upickle library:
+```scala
+object example extends ScalaModule {
+ def scalaVersion = "3.4.2"
+ def ivyDeps =
+ Agg(
+ ivy"org.scala-lang::toolkit:0.7.0"
+ )
+}
+```
+Alternatively, you can require just a specific version of UPickle:
+```scala
+ivy"com.lihaoyi::upickle:4.1.0"
+```
+{% endtab %}
+{% endtabs %}
+{% endaltDetails %}
diff --git a/_includes/alert-banner.html b/_includes/alert-banner.html
new file mode 100644
index 0000000000..94c5ac1273
--- /dev/null
+++ b/_includes/alert-banner.html
@@ -0,0 +1,10 @@
+{% comment %}use the variable 'message' to include markdown text to display in the alert.{% endcomment %}
+
+{% unless include.message_id == 'disabled' %}
+
+
diff --git a/_includes/column-list-of-items.html b/_includes/column-list-of-items.html
deleted file mode 100644
index eb9e1600be..0000000000
--- a/_includes/column-list-of-items.html
+++ /dev/null
@@ -1,18 +0,0 @@
-{% comment %}
- Layouts using this include should pass an include variable called 'collection' referencing a collection carrying the data (i.e.: contribute_community_tickets, contribute_resources...)
-{% endcomment %}
-
\ No newline at end of file
diff --git a/_includes/documentation-sections.html b/_includes/documentation-sections.html
new file mode 100644
index 0000000000..cac3c2d21b
--- /dev/null
+++ b/_includes/documentation-sections.html
@@ -0,0 +1,18 @@
+
-
\ No newline at end of file
+
diff --git a/_includes/version-specific-notice.html b/_includes/version-specific-notice.html
new file mode 100644
index 0000000000..4a92f84a6d
--- /dev/null
+++ b/_includes/version-specific-notice.html
@@ -0,0 +1,31 @@
+{% if include.language %}
+
+
+ {% if include.language == 'scala3' %}
+ {% if include.page-language == 'ru' %}
+ Эта страница документа относится к Scala 3 и
+ может охватывать новые концепции, недоступные в Scala 2.
+ Если не указано явно, все примеры кода на этой странице
+ предполагают, что вы используете Scala 3.
+ {% else %}
+ This doc page is specific to Scala 3,
+ and may cover new concepts not available in Scala 2. Unless
+ otherwise stated, all the code examples in this page assume
+ you are using Scala 3.
+ {% endif %}
+ {% else if include.language == 'scala2' %}
+ {% if include.page-language == 'ru' %}
+ Эта страница документа относится к функциям, представленным в Scala 2,
+ которые либо были удалены в Scala 3, либо заменены альтернативными.
+ Если не указано явно, все примеры кода на этой странице предполагают,
+ что вы используете Scala 2.
+ {% else %}
+ This doc page is specific to features shipped in Scala 2,
+ which have either been removed in Scala 3 or replaced by an
+ alternative. Unless otherwise stated, all the code examples
+ in this page assume you are using Scala 2.
+ {% endif %}
+ {% endif %}
+
+
+{% endif %}
diff --git a/_it/getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.md b/_it/getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.md
new file mode 100644
index 0000000000..0de0347ca5
--- /dev/null
+++ b/_it/getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.md
@@ -0,0 +1,79 @@
+---
+title: Primi passi su scala e sbt con la linea di comando
+layout: singlepage-overview
+partof: getting-started-with-scala-and-sbt-on-the-command-line
+language: it
+disqus: true
+next-page: /it/testing-scala-with-sbt-on-the-command-line
+---
+
+In questo tutorial si vedrà come creare un progetto Scala a partire da un template, che può essere usato come punto di partenza anche per progettti personali.
+Lo strumento utilizzato per tale scopo è [sbt](https://www.scala-sbt.org/1.x/docs/index.html), che è lo standard di build per Scala.
+sbt permette di compilare, eseguire e testare i tuoi progetti, ma permette di svolgere anche altri compiti.
+Si presuppone una conoscenza dell'uso della linea di comando.
+
+## Installazione
+1. Assicurarsi di avere la Java 8 JDK (conosciuta anche come 1.8) installata
+ * Per verificarlo, eseguire `javac -version` da linea di comando e controllare che nell'output sia riportato
+ `javac 1.8.___`
+ * Se non si possiede la versione 1.8 o superiore, installarla seguendo [queste indicazioni](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)
+1. Installare sbt
+ * [Mac](https://www.scala-sbt.org/1.x/docs/Installing-sbt-on-Mac.html)
+ * [Windows](https://www.scala-sbt.org/1.x/docs/Installing-sbt-on-Windows.html)
+ * [Linux](https://www.scala-sbt.org/1.x/docs/Installing-sbt-on-Linux.html)
+
+## Creare il progetto
+1. Eseguire il comando `cd` specificando una cartella vuota per spostarsi in essa.
+1. Eseguire il comando `sbt new scala/hello-world.g8`. Questo effettuerà una pull del template 'hello-world' da GitHub.
+ Si occuperà inoltre di creare la cartella `target`, che per ora può essere ignorata.
+1. Quando richiesto verrà richiesto il nome dell'applicazione, indicare `hello-world`. In questo modo verrà creato un progetto chiamato "hello-world".
+1. Osserviamo cosa è stato generato una volta eseguiti i passaggi sopra riportati:
+
+```
+- hello-world
+ - project (sbt usa questa cartella per installare e gestire plugins e dipendenze)
+ - build.properties
+ - src
+ - main
+ - scala (Tutto il codice scala che viene scritto dovrà andare qui)
+ - Main.scala (Entry point dell'applicazione) <-- per ora è tutto ciò che ci servirà
+ - build.sbt (il file di definizione della build interpretato da sbt)
+```
+
+Una volta che verrà buildato il progetto, sbt creerà diverse cartelle `target` per i file generati. Possono essere ignorate per lo scopo di questo tutorial.
+
+## Eseguire il progetto
+1. `cd` nella cartella `hello-world`.
+1. Lanciare il comando `sbt`. Questo aprirà la console di sbt.
+1. Eseguire `~run`. Il carattere `~` è opzionale. Indica ad sbt di eseguirsi ad ogni salvataggio di un file, permettendo un ciclo di modifica, esecuzione e debug più veloce. sbt genererà anche una cartella chiamata `target` che può essere ignorata.
+
+## Modificare il codice
+1. Aprire il file `src/main/scala/Main.scala` in un qualsiasi editor di testo.
+1. Modificare "Hello, World!" in "Hello, New York!"
+1. Se non è stato interrotto il comando sbt, dovrebbe ora apparire "Hello, New York!" sulla console.
+1. Si può continuare a modificare il file, e le modifiche dovrebbero apparire a schermo se non vengono riportati errori.
+
+## Aggiungere una dipendenza
+Vediamo ora come utilizzare librerie pubblicate da terzi per aggiungere ulteriori funzionalità alle nostre applicazioni.
+
+1. Aprire il file `build.sbt` con un qualsiasi editor di testo e aggiungere la seguente riga:
+
+```
+libraryDependencies += "org.scala-lang.modules" %% "scala-parser-combinators" % "1.1.2"
+```
+`libraryDependencies` è un set (un tipo di collection in scala), e utilizzando il simbolo `+=`,
+si sta aggiungendo la dipendenza [scala-parser-combinators](https://github.com/scala/scala-parser-combinators) al set di dipendenze che sbt fetcherà quando verà inizializzato.
+Una volta eseguito questo passaggio, sarà possibile importare classi, object ed altro da scala-parser-combinators tramite una semplice istruzione di import.
+
+Ulteriori librerie pubblicate possono essere trovate sul sito
+[Scaladex](https://index.scala-lang.org/), dove è possibile copiare le informazioni delle dipendenze cercate nel file `build.sbt`.
+
+## Next steps
+
+Si consiglia di continuare al tutorial successivo della serie _getting started with sbt_ , ed imparare a [testare il codice Scala con sbt tramite linea di comando](testing-scala-with-sbt-on-the-command-line.html).
+
+**oppure**
+
+- Continuare ad imparare Scala online e in maniera interattiva su
+ [Scala Exercises](https://www.scala-exercises.org/scala_tutorial).
+- Imparare le feature di Scala tramite articoli più concisi su [Tour of Scala]({{ site.baseurl }}/tour/tour-of-scala.html).
\ No newline at end of file
diff --git a/_it/getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.md b/_it/getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.md
new file mode 100644
index 0000000000..cac6f0953a
--- /dev/null
+++ b/_it/getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.md
@@ -0,0 +1,101 @@
+---
+title: Testare scala con sbt da linea di comando
+layout: singlepage-overview
+partof: testing-scala-with-sbt-on-the-command-line
+language: it
+disqus: true
+previous-page: /it/getting-started-with-scala-and-sbt-on-the-command-line
+---
+
+Ci sono diverse librerie e modalità per testare il codice Scala, ma in questo tutorial verrà mostrato come eseguire il testing usando [AnyFunSuite](https://www.scalatest.org/scaladoc/3.2.2/org/scalatest/funsuite/AnyFunSuite.html) del framework ScalaTest.
+Si assume che si sappia [creare un progetto Scala con sbt](getting-started-with-scala-and-sbt-on-the-command-line.html).
+
+## Setup
+1. Da linea di comando, creare una nuova directory in una posizione a propria scelta.
+1. `cd` nella cartella appena creata ed eseguire `sbt new scala/scalatest-example.g8`
+1. Quando richiesto, rinominare il progetto come `ScalaTestTutorial`.
+1. Il progetto avrà già in se la libreria ScalaTest come dipendenza indicata nel file `build.sbt`.
+1. `cd` nel progetto ed eseguire `sbt test`. Questo eseguirà la test suite
+`CubeCalculatorTest` con un unico test chiamato `CubeCalculator.cube`.
+
+```
+sbt test
+[info] Loading global plugins from /Users/username/.sbt/0.13/plugins
+[info] Loading project definition from /Users/username/workspace/sandbox/my-something-project/project
+[info] Set current project to scalatest-example (in build file:/Users/username/workspace/sandbox/my-something-project/)
+[info] CubeCalculatorTest:
+[info] - CubeCalculator.cube
+[info] Run completed in 267 milliseconds.
+[info] Total number of tests run: 1
+[info] Suites: completed 1, aborted 0
+[info] Tests: succeeded 1, failed 0, canceled 0, ignored 0, pending 0
+[info] All tests passed.
+[success] Total time: 1 s, completed Feb 2, 2017 7:37:31 PM
+```
+
+## Comprendere i test
+1. In qualsiasi editor di testo aprire i seguenti due file:
+ * `src/main/scala/CubeCalculator.scala`
+ * `src/test/scala/CubeCalculatorTest.scala`
+1. Nel file `CubeCalculator.scala`, è riportata la definizione della funzione `cube`.
+1. Nel file `CubeCalculatorTest.scala`, è presente una classe chiamata allo stesso modo dell'oggetto che stiamo testando.
+
+```
+ import org.scalatest.funsuite.AnyFunSuite
+
+ class CubeCalculatorTest extends AnyFunSuite {
+ test("CubeCalculator.cube") {
+ assert(CubeCalculator.cube(3) === 27)
+ }
+ }
+```
+
+Analizziamo ogni riga di codice.
+
+* `class CubeCalculatorTest` significa che stiamo testando l'oggetto `CubeCalculator`
+* `extends AnyFunSuite` ci permette di utilizzare la funzionalità della classe AnyFunSuite, come ad esempio la funzione `test`
+* `test` è una funzione proveniente da AnyFunSuite che raccoglie i risultati delle asserzioni all'interno del corpo della funzione.
+* `"CubeCalculator.cube"` è il nome del test. Può essere chiamato in qualsiasi modo, ma la convenzione è "NomeClasse.nomeMetodo".
+* `assert` prende una condizione booleana e stabilisce se il test è superato o no.
+* `CubeCalculator.cube(3) === 27` controlla se l'output della funzione `cube` sia realmente 27.
+Il simbolo `===` è parte di ScalaTest e restituisce messaggi di errore comprensibili.
+
+## Aggiungere un altro test case
+1. Aggiungere un altro blocco di testo contenente il proprio enunciato `assert` che verificherà il cubo di `0`.
+
+ ```
+ import org.scalatest.funsuite.AnyFunSuite
+
+ class CubeCalculatorTest extends AnyFunSuite {
+ test("CubeCalculator.cube 3 should be 27") {
+ assert(CubeCalculator.cube(3) === 27)
+ }
+
+ test("CubeCalculator.cube 0 should be 0") {
+ assert(CubeCalculator.cube(0) === 0)
+ }
+ }
+ ```
+
+1. Lanciare `sbt test` nuovamente e controllare i risultati.
+
+ ```
+ sbt test
+ [info] Loading project definition from C:\projects\scalaPlayground\scalatestpractice\project
+ [info] Loading settings for project root from build.sbt ...
+ [info] Set current project to scalatest-example (in build file:/C:/projects/scalaPlayground/scalatestpractice/)
+ [info] Compiling 1 Scala source to C:\projects\scalaPlayground\scalatestpractice\target\scala-2.13\test-classes ...
+ [info] CubeCalculatorTest:
+ [info] - CubeCalculator.cube 3 should be 27
+ [info] - CubeCalculator.cube 0 should be 0
+ [info] Run completed in 257 milliseconds.
+ [info] Total number of tests run: 2
+ [info] Suites: completed 1, aborted 0
+ [info] Tests: succeeded 2, failed 0, canceled 0, ignored 0, pending 0
+ [info] All tests passed.
+ [success] Total time: 3 s, completed Dec 4, 2019 10:34:04 PM
+ ```
+
+## Conclusioni
+In questo tutorial è stato mostrato una delle modalità per testare il codice Scala. Per saperne di più su FunSuite si può consultare [il sito ufficiale](https://www.scalatest.org/getting_started_with_fun_suite).
+Si possono anche consultare altri framework di testing come [ScalaCheck](https://www.scalacheck.org/) e [Specs2](https://etorreborre.github.io/specs2/).
diff --git a/_it/tutorials/scala-for-java-programmers.md b/_it/tutorials/scala-for-java-programmers.md
index fa4327bbeb..180e5795bb 100644
--- a/_it/tutorials/scala-for-java-programmers.md
+++ b/_it/tutorials/scala-for-java-programmers.md
@@ -26,7 +26,7 @@ Scala senza richiedere troppe conoscenze del linguaggio stesso.
Ecco come appeare il codice:
object HelloWorld {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
println("Hello, world!")
}
}
@@ -73,7 +73,7 @@ avrà il nome `HelloWorld.class` e conterrà una classe che può essere
direttamente eseguita con il comando `scala`, come mostra la seguente
sezione.
-### Eseguimo l'esempio
+### Eseguiamo l'esempio
Una volta compilato il programma può esser facilmente eseguito con il
comando scala. L'uso è molto simile al comando java ed accetta le stesse
@@ -91,13 +91,13 @@ codice Java. Tutte le classi del package `java.lang` sono importate di
default mentre le altre richiedono l’esplicito import.
Osserviamo un esempio che lo dimostra. Vogliamo ottenere la data
-corrente e formattarla in accordo con la convezione usata in uno
+corrente e formattarla in accordo con la convenzione usata in uno
specifico paese del mondo, diciamo la Francia. (Altre regioni, come la parte
di lingua francese della Svizzera, utilizzano le stesse convenzioni.)
Le librerie delle classi Java definiscono potenti classi di utilità come
`Date` e `DateFormat`. Poiché Scala interagisce direttamente con Java, non
-esistono le classi equivalenti nella libreria delle classi di Scala--possiamo
+esistono le classi equivalenti nella libreria delle classi di Scala; possiamo
semplicemente importare le classi dei corrispondenti package Java:
import java.util.{Date, Locale}
@@ -105,14 +105,14 @@ semplicemente importare le classi dei corrispondenti package Java:
import java.text.DateFormat._
object FrenchDate {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val now = new Date
val df = getDateInstance(LONG, Locale.FRANCE)
println(df format now)
}
}
-L’istruzione import di Scala è molto simile all’equivalente in Java
+L’istruzione `import` di Scala è molto simile all’equivalente in Java
tuttavia, risulta essere più potente. Più classi possono essere importate
dallo stesso package includendole in parentesi graffe come nella prima riga
di codice precedentemente riportato. Un’altra differenza è evidente
@@ -130,7 +130,7 @@ Java che di default contiene la data corrente. Successivamente, definiamo il
formato della data usando il metodo statico `getDateInstance` importato
precedentemente. Infine, stampiamo la data corrente, formattata secondo la
localizzazione scelta, con l’istanza `DateFormat`; quest’ultima linea mostra
-un’importante proprietà di Scala.I metodi che prendono un argomento possono
+un’importante proprietà di Scala. I metodi che prendono un argomento (ed uno soltanto) possono
essere usati con una sintassi non fissa. Questa forma dell’espressione
df format now
@@ -204,7 +204,7 @@ frase “time flies like an arrow” ogni secondo.
def timeFlies() {
println("time flies like an arrow...")
}
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
oncePerSecond(timeFlies)
}
}
@@ -228,7 +228,7 @@ invece di *timeFlies* e appare come di seguito:
def oncePerSecond(callback: () => Unit) {
while (true) { callback(); Thread sleep 1000 }
}
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
oncePerSecond(() =>
println("time flies like an arrow..."))
}
@@ -262,7 +262,7 @@ modo: `new Complex(1.5, 2.3)`. La classe ha due metodi, `re` e `im` che
danno l’accesso rispettivamente alla parte reale e a quella immaginaria
del numero complesso.
-Da notare che il tipo di ritorno dei due metodi non è specificato esplicitamante.
+Da notare che il tipo di ritorno dei due metodi non è specificato esplicitamente.
Sarà il compilatore che lo dedurrà automaticamente osservando la parte a destra
del segno uguale dei metodi e deducendo che per entrambi si tratta di
valori di tipo `Double`.
@@ -283,7 +283,7 @@ necessario far seguire il nome del metodo da una coppia di parentesi tonde
vuote, come mostrato nel codice seguente:
object ComplexNumbers {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val c = new Complex(1.2, 3.4)
println("imaginary part: " + c.im())
}
@@ -419,7 +419,7 @@ detto in Scala non è difficile:
}
Questa funzione di valutazione lavora effettuando un *pattern matching*
-sull’albero `t`. Intuitivamente il significato della definizione precendente
+sull’albero `t`. Intuitivamente il significato della definizione precedente
dovrebbe esser chiaro:
1. prima controlla se l’albero `t` è un `Sum`; se lo è, esegue il bind del
@@ -499,7 +499,7 @@ sull’espressione `(x+x)+(7+y)`: prima calcola il suo valore
nell’environment `{ x -> 5, y -> 7 }`, dopo calcola la
derivata relativa ad `x` e poi ad `y`.
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val exp: Tree = Sum(Sum(Var("x"),Var("x")),Sum(Const(7),Var("y")))
val env: Environment = { case "x" => 5 case "y" => 7 }
println("Expression: " + exp)
@@ -557,7 +557,7 @@ dichiarazione di un trait:
}
Questa definizione crea un nuovo tipo chiamato `Ord` che ha lo stesso
-ruolo dell’interfaccia `Comparable` in Java e, fornisce l’implementazione
+ruolo dell’interfaccia `Comparable` in Java e fornisce l’implementazione
di default di tre predicati in termini del quarto astraendone uno.
I predicati di uguaglianza e disuguaglianza non sono presenti in questa
dichiarazione poichè sono presenti di default in tutti gli oggetti.
@@ -578,11 +578,11 @@ definendo la classe `Date` come segue:
def year = y
def month = m
def day = d
- override def toString(): String = year + "-" + month + "-" + day
+ override def toString(): String = s"$year-$month-$day"
La parte importante qui è la dichiarazione `extends Ord` che segue il nome
della classe e dei parametri. Dichiara che la classe `Date` eredita il
-codice dal trait `extends Ord`.
+codice dal trait `Ord`.
Successivamente ridefiniamo il metodo `equals`, ereditato da `Object`,
in modo tale che possa confrontare in modo corretto le date confrontando
@@ -646,7 +646,7 @@ restrittivo.
I programmatori Java hanno fatto ricorso all’uso di `Object`, che è il
super-tipo di tutti gli oggetti. Questa soluzione è in ogni caso ben lontana
dall’esser ideale perché non funziona per i tipi base (`int`, `long`, `float`,
-ecc.) ed implica che molto type casts dinamico deve esser fatto dal
+ecc.) ed implica che molto type cast dinamico deve esser fatto dal
programmatore.
Scala rende possibile la definizione delle classi generiche (e metodi) per
@@ -678,7 +678,7 @@ per creare ed usare una cella che contiene un intero si potrebbe scrivere il
seguente codice:
object IntegerReference {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
val cell = new Reference[Int]
cell.set(13)
println("Reference contains the half of " + (cell.get * 2))
@@ -694,6 +694,6 @@ poiché è stata dichiarata per memorizzare un intero.
Questo documento ha fornito una veloce introduzione del linguaggio Scala e
presentato alcuni esempi di base. Il lettore interessato può continuare, per
-esempio, leggendo il documento *Scala By Example* che contiene esempi molti più
+esempio, leggendo il documento [*Tour of Scala*](https://docs.scala-lang.org/tour/tour-of-scala.html) che contiene esempi molti più
avanzati e consultare al bisogno la documentazione
*Scala Language Specification*.
diff --git a/_ja/cheatsheets/index.md b/_ja/cheatsheets/index.md
index 7304890aff..ac3551736c 100644
--- a/_ja/cheatsheets/index.md
+++ b/_ja/cheatsheets/index.md
@@ -5,7 +5,7 @@ title: Scala Cheatsheet
partof: cheatsheet
by: Kenji Ohtsuka
-about: Thanks to Brendan O'Connor. このチートシートは Scala 構文 のクイックリファレンスとして作成されました。 Licensed by Brendan O'Connor under a CC-BY-SA 3.0 license.
+about: Thanks to Brendan O'Connor. このチートシートは Scala 構文 のクイックリファレンスとして作成された。 Licensed by Brendan O'Connor under a CC-BY-SA 3.0 license.
language: ja
---
@@ -13,78 +13,610 @@ language: ja
###### Contributed by {{ page.by }}
{{ page.about }}
+
+
+
+
+
+
+
+
変数
+
+
+
+
var x = 5
Good
x = 6
+
変数
+
+
+
val x = 5
Bad
x = 6
+
定数
+
+
+
var x: Double = 5
+
明示的な型
+
+
+
関数
+
+
+
+
Good
def f(x: Int) = { x * x }
Bad
def f(x: Int) { x * x }
+
関数定義 落とし穴: = を書かないと Unit を返す手続きになり、大惨事の原因になります。 Scala 2.13 より非推奨です。
+
+
+
Good
def f(x: Any) = println(x)
Bad
def f(x) = println(x)
+
関数定義 シンタックスエラー: すべての引数に型指定が必要です。
+
+
+
type R = Double
+
型エイリアス
+
+
+
def f(x: R)
vs.
def f(x: => R)
+
値渡し
名前渡し(遅延評価パラメータ)
+
+
+
(x: R) => x * x
+
無名関数
+
+
+
(1 to 5).map(_ * 2)
vs.
(1 to 5).reduceLeft(_ + _)
+
無名関数: アンダースコアは位置に応じて引数が代入されます。
+
+
+
(1 to 5).map(x => x * x)
+
無名関数: 引数を2回使用する場合は名前をつけます。
+
+
+
(1 to 5).map { x =>
+ val y = x * 2
+ println(y)
+ y
+}
+
+## インターフェース、トレイト、継承
+
+Java 8以降に慣れていれば、ScalaのtraitはJavaのインターフェースに良く似ていることに気づくと思います。
+Pythonのインターフェース(プロトコル)や抽象クラスがあまり使われないのに対して、Scalaではトレイトが常に使われています。
+したがって、この例では両者を比較するのではなく、Scalaのトレイトを使って数学のちょっとした問題を解く方法を紹介します:
+
+```scala
+trait Adder:
+ def add(a: Int, b: Int) = a + b
+
+trait Multiplier:
+ def multiply(a: Int, b: Int) = a * b
+
+// create a class from the traits
+class SimpleMath extends Adder, Multiplier
+val sm = new SimpleMath
+sm.add(1,1) // 2
+sm.multiply(2,2) // 4
+```
+
+クラスやオブジェクトでtraitを使う方法は他にも[たくさんあります][modeling-intro]。
+しかし、これは概念を論理的な動作のグループに整理して、完全な解答を作成するために必要に応じてそれらを統合するために、どのように使うことができるかのちょっとしたアイデアを与えてくれます。
+
+## 制御構文
+
+ここではPythonとScalaの[制御構文][control-structures]を比較します。
+どちらの言語にも `if`/`else`, `while`, `for` ループ、 `try` といった構文があります。
+加えて、Scala には `match` 式があります。
+
+### `if` 文, 1行
+
+
+
+
+
+ if x == 1: print(x)
+
+
+
+
+ if x == 1 then println(x)
+
+
+
+
+
+### `if` 文, 複数行
+
+
+
+
+
+ if x == 1:
+ print("x is 1, as you can see:")
+ print(x)
+
+
+
+
+ if x == 1 then
+ println("x is 1, as you can see:")
+ println(x)
+
+
+
+
+
+### if, else if, else:
+
+
+
+
+
+ if x < 0:
+ print("negative")
+ elif x == 0:
+ print("zero")
+ else:
+ print("positive")
+
+
+
+
+ if x < 0 then
+ println("negative")
+ else if x == 0 then
+ println("zero")
+ else
+ println("positive")
+
+
+
+
+
+### `if` 文からの戻り値
+
+
+
+
+
+ min_val = a if a < b else b
+
+
+
+
+ val minValue = if a < b then a else b
+
+
+
+
+
+### メソッドの本体としての`if`
+
+
+
+
+
+ def min(a, b):
+ return a if a < b else b
+
+
+
+
+ def min(a: Int, b: Int): Int =
+ if a < b then a else b
+
+
+
+
+
+### `while` ループ
+
+
+
+
+
+ i = 1
+ while i < 3:
+ print(i)
+ i += 1
+
+
+
+
+ var i = 1
+ while i < 3 do
+ println(i)
+ i += 1
+
+
+
+
+
+### rangeを指定した`for` ループ
+
+
+
+
+
+ for i in range(0,3):
+ print(i)
+
+
+
+
+ // preferred
+ for i <- 0 until 3 do println(i)
+
+ // also available
+ for (i <- 0 until 3) println(i)
+
+ // multiline syntax
+ for
+ i <- 0 until 3
+ do
+ println(i)
+
+
+
+
+
+### リスト範囲内の`for` ループ
+
+
+
+
+
+ for i in ints: print(i)
+
+ for i in ints:
+ print(i)
+
+
+
+
+ for i <- ints do println(i)
+
+
+
+
+
+### 複数行での`for` ループ
+
+
+
+
+
+ for i in ints:
+ x = i * 2
+ print(f"i = {i}, x = {x}")
+
+
+
+
+ for
+ i <- ints
+ do
+ val x = i * 2
+ println(s"i = $i, x = $x")
+
+
+
+
+
+### 複数の “range” ジェネレータ
+
+
+
+
+
+ for i in range(1,3):
+ for j in range(4,6):
+ for k in range(1,10,3):
+ print(f"i = {i}, j = {j}, k = {k}")
+
+
+
+
+ for
+ i <- 1 to 2
+ j <- 4 to 5
+ k <- 1 until 10 by 3
+ do
+ println(s"i = $i, j = $j, k = $k")
+
+
+
+
+
+### ガード付きジェネレータ (`if` 式)
+
+
+
+
+
+ for i in range(1,11):
+ if i % 2 == 0:
+ if i < 5:
+ print(i)
+
+
+
+
+ for
+ i <- 1 to 10
+ if i % 2 == 0
+ if i < 5
+ do
+ println(i)
+
+
+
+
+
+### 行ごとに複数の`if`条件
+
+
+
+
+
+ for i in range(1,11):
+ if i % 2 == 0 and i < 5:
+ print(i)
+
+
+
+
+ for
+ i <- 1 to 10
+ if i % 2 == 0 && i < 5
+ do
+ println(i)
+
+
+
+
+
+### 内包表記
+
+
+
+
+
+ xs = [i * 10 for i in range(1, 4)]
+ # xs: [10,20,30]
+
+
+
+
+ val xs = for i <- 1 to 3 yield i * 10
+ // xs: Vector(10, 20, 30)
+
+
+
+
+
+### `match` 条件式
+
+
+
+
+
+ # From 3.10, Python supports structural pattern matching
+ # You can also use dictionaries for basic “switch” functionality
+ match month:
+ case 1:
+ monthAsString = "January"
+ case 2:
+ monthAsString = "February"
+ case _:
+ monthAsString = "Other"
+
+
+
+
+ val monthAsString = month match
+ case 1 => "January"
+ case 2 => "February"
+ _ => "Other"
+
+
+
+
+
+### switch/match
+
+
+
+
+
+ # Only from Python 3.10
+ match i:
+ case 1 | 3 | 5 | 7 | 9:
+ numAsString = "odd"
+ case 2 | 4 | 6 | 8 | 10:
+ numAsString = "even"
+ case _:
+ numAsString = "too big"
+
+
+
+
+ val numAsString = i match
+ case 1 | 3 | 5 | 7 | 9 => "odd"
+ case 2 | 4 | 6 | 8 | 10 => "even"
+ case _ => "too big"
+
Scala is unusual because it is usually installed for each of your Scala projects rather than being installed system-wide. Both of the above options manage a specific Scala version per Scala project you create.
-
-
Release Notes
-
For important changes, please consult the release notes.
{% endfor %}
diff --git a/_layouts/inner-page-parent-dropdown.html b/_layouts/root-content-layout.html
similarity index 67%
rename from _layouts/inner-page-parent-dropdown.html
rename to _layouts/root-content-layout.html
index 5b89d3ef47..b45513d346 100644
--- a/_layouts/inner-page-parent-dropdown.html
+++ b/_layouts/root-content-layout.html
@@ -1,13 +1,13 @@
{% include headertop.html %}
{% include headerbottom.html %}
-{% if page.new-version %}This page has a new version.{% endif %}
+{% include alert-banner.html message_id='disabled' message=site.data.messages.scam-banner %}
+
{% include navbar-inner.html %}
-
@@ -22,11 +22,19 @@
{% endif %}
{{ page.title }}
+
+
+
+
+
+
+
Language
-
+
+
diff --git a/_layouts/root-index-layout.html b/_layouts/root-index-layout.html
new file mode 100644
index 0000000000..c236bb7b2f
--- /dev/null
+++ b/_layouts/root-index-layout.html
@@ -0,0 +1,33 @@
+{% include headertop.html %} {% include headerbottom.html %}
+
+
+
+{% include alert-banner.html message_id='disabled' message=site.data.messages.scam-banner %}
+
+{% include navbar-inner.html %}
+
+
+
+
+
+
- {% if page.vote-text %}{{ page.vote-text }}{% endif %}
-
- {% endif %}
+
Status
+ {% if page.stage == "implementation" %}
+
+ This proposal has been accepted by the committee.
+ {% if page.status == "waiting-for-implementation" %}
+ An implementation is welcome in the compiler.
+ {% else %}
+ It might be available as an experimental feature in the latest version of the compiler.
+ {% endif %}
+
+ {% else if page.stage == "completed" %}
+
+ This proposal has been implemented,
+ {% if page.status == "accepted" %}
+ it will be available in the next minor release of the compiler.
+ {% else if page.status == "shipped" %}
+ it is available in the latest version of the compiler.
+ {% endif %}
+
-
- {% include paginator.html urlPath="training" %}
-
\ No newline at end of file
diff --git a/_overviews/FAQ/breakout.md b/_overviews/FAQ/breakout.md
deleted file mode 100644
index dfb41380ab..0000000000
--- a/_overviews/FAQ/breakout.md
+++ /dev/null
@@ -1,233 +0,0 @@
----
-layout: multipage-overview
-title: What is breakOut, and how does it work?
-overview-name: FAQ
-partof: FAQ
-
-num: 5
-permalink: /tutorials/FAQ/:title.html
----
-You might have encountered some code like the one below, and wonder what is
-`breakOut`, and why is it being passed as parameter?
-
- import scala.collection.breakOut
- val map : Map[Int,String] = List("London", "France").map(x => (x.length, x))(breakOut)
-
-
-The answer is found on the definition of `map`:
-
- def map[B, That](f : (A) => B)(implicit bf : CanBuildFrom[Repr, B, That]) : That
-
-Note that it has two parameters. The first is your function and the second is
-an implicit. If you do not provide that implicit, Scala will choose the most
-_specific_ one available.
-
-### About breakOut
-
-So, what's the purpose of `breakOut`? Consider the example given at the
-beginning , You take a list of strings, transform each string into a tuple
-`(Int, String)`, and then produce a `Map` out of it. The most obvious way to do
-that would produce an intermediary `List[(Int, String)]` collection, and then
-convert it.
-
-Given that `map` uses a `Builder` to produce the resulting collection, wouldn't
-it be possible to skip the intermediary `List` and collect the results directly
-into a `Map`? Evidently, yes, it is. To do so, however, we need to pass a
-proper `CanBuildFrom` to `map`, and that is exactly what `breakOut` does.
-
-Let's look, then, at the definition of `breakOut`:
-
- def breakOut[From, T, To](implicit b : CanBuildFrom[Nothing, T, To]) =
- new CanBuildFrom[From, T, To] {
- def apply(from: From) = b.apply() ; def apply() = b.apply()
- }
-
-Note that `breakOut` is parameterized, and that it returns an instance of
-`CanBuildFrom`. As it happens, the types `From`, `T` and `To` have already been
-inferred, because we know that `map` is expecting `CanBuildFrom[List[String],
-(Int, String), Map[Int, String]]`. Therefore:
-
- From = List[String]
- T = (Int, String)
- To = Map[Int, String]
-
-To conclude let's examine the implicit received by `breakOut` itself. It is of
-type `CanBuildFrom[Nothing,T,To]`. We already know all these types, so we can
-determine that we need an implicit of type
-`CanBuildFrom[Nothing,(Int,String),Map[Int,String]]`. But is there such a
-definition?
-
-Let's look at `CanBuildFrom`'s definition:
-
- trait CanBuildFrom[-From, -Elem, +To]
- extends AnyRef
-
-So `CanBuildFrom` is contra-variant on its first type parameter. Because
-`Nothing` is a bottom class (ie, it is a subclass of everything), that means
-*any* class can be used in place of `Nothing`.
-
-Since such a builder exists, Scala can use it to produce the desired output.
-
-### About Builders
-
-A lot of methods from Scala's collections library consists of taking the
-original collection, processing it somehow (in the case of `map`, transforming
-each element), and storing the results in a new collection.
-
-To maximize code reuse, this storing of results is done through a _builder_
-(`scala.collection.mutable.Builder`), which basically supports two operations:
-appending elements, and returning the resulting collection. The type of this
-resulting collection will depend on the type of the builder. Thus, a `List`
-builder will return a `List`, a `Map` builder will return a `Map`, and so on.
-The implementation of the `map` method need not concern itself with the type of
-the result: the builder takes care of it.
-
-On the other hand, that means that `map` needs to receive this builder somehow.
-The problem faced when designing Scala 2.8 Collections was how to choose the
-best builder possible. For example, if I were to write `Map('a' ->
-1).map(_.swap)`, I'd like to get a `Map(1 -> 'a')` back. On the other hand, a
-`Map('a' -> 1).map(_._1)` can't return a `Map` (it returns an `Iterable`).
-
-The magic of producing the best possible `Builder` from the known types of the
-expression is performed through this `CanBuildFrom` implicit.
-
-### About CanBuildFrom
-
-To better explain what's going on, I'll give an example where the collection
-being mapped is a `Map` instead of a `List`. I'll go back to `List` later. For
-now, consider these two expressions:
-
- Map(1 -> "one", 2 -> "two") map Function.tupled(_ -> _.length)
- Map(1 -> "one", 2 -> "two") map (_._2)
-
-The first returns a `Map` and the second returns an `Iterable`. The magic of
-returning a fitting collection is the work of `CanBuildFrom`. Let's consider
-the definition of `map` again to understand it.
-
-The method `map` is inherited from `TraversableLike`. It is parameterized on
-`B` and `That`, and makes use of the type parameters `A` and `Repr`, which
-parameterize the class. Let's see both definitions together:
-
-The class `TraversableLike` is defined as:
-
- trait TraversableLike[+A, +Repr]
- extends HasNewBuilder[A, Repr] with AnyRef
-
- def map[B, That](f : (A) => B)(implicit bf : CanBuildFrom[Repr, B, That]) : That
-
-
-To understand where `A` and `Repr` come from, let's consider the definition of
-`Map` itself:
-
- trait Map[A, +B]
- extends Iterable[(A, B)] with Map[A, B] with MapLike[A, B, Map[A, B]]
-
-Because `TraversableLike` is inherited by all traits which extend `Map`, `A`
-and `Repr` could be inherited from any of them. The last one gets the
-preference, though. So, following the definition of the immutable `Map` and all
-the traits that connect it to `TraversableLike`, we have:
-
- trait Map[A, +B]
- extends Iterable[(A, B)] with Map[A, B] with MapLike[A, B, Map[A, B]]
-
- trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
- extends MapLike[A, B, This]
-
- trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
- extends PartialFunction[A, B] with IterableLike[(A, B), This] with Subtractable[A, This]
-
- trait IterableLike[+A, +Repr]
- extends Equals with TraversableLike[A, Repr]
-
- trait TraversableLike[+A, +Repr]
- extends HasNewBuilder[A, Repr] with AnyRef
-
-If you pass the type parameters of `Map[Int, String]` all the way down the
-chain, we find that the types passed to `TraversableLike`, and, thus, used by
-`map`, are:
-
- A = (Int,String)
- Repr = Map[Int, String]
-
-Going back to the example, the first map is receiving a function of type
-`((Int, String)) => (Int, Int)` and the second map is receiving a function of
-type `((Int, String)) => String`. I use the double parenthesis to emphasize it is
-a tuple being received, as that's the type of `A` as we saw.
-
-With that information, let's consider the other types.
-
- map Function.tupled(_ -> _.length):
- B = (Int, Int)
-
- map (_._2):
- B = String
-
-We can see that the type returned by the first `map` is `Map[Int,Int]`, and the
-second is `Iterable[String]`. Looking at `map`'s definition, it is easy to see
-that these are the values of `That`. But where do they come from?
-
-If we look inside the companion objects of the classes involved, we see some
-implicit declarations providing them. On object `Map`:
-
- implicit def canBuildFrom [A, B] : CanBuildFrom[Map, (A, B), Map[A, B]]
-
-And on object `Iterable`, whose class is extended by `Map`:
-
- implicit def canBuildFrom [A] : CanBuildFrom[Iterable, A, Iterable[A]]
-
-These definitions provide factories for parameterized `CanBuildFrom`.
-
-Scala will choose the most specific implicit available. In the first case, it
-was the first `CanBuildFrom`. In the second case, as the first did not match,
-it chose the second `CanBuildFrom`.
-
-### Back to the first example
-
-Let's see the first example, `List`'s and `map`'s definition (again) to
-see how the types are inferred:
-
- val map : Map[Int,String] = List("London", "France").map(x => (x.length, x))(breakOut)
-
- sealed abstract class List[+A]
- extends LinearSeq[A] with Product with GenericTraversableTemplate[A, List] with LinearSeqLike[A, List[A]]
-
- trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]]
- extends SeqLike[A, Repr]
-
- trait SeqLike[+A, +Repr]
- extends IterableLike[A, Repr]
-
- trait IterableLike[+A, +Repr]
- extends Equals with TraversableLike[A, Repr]
-
- trait TraversableLike[+A, +Repr]
- extends HasNewBuilder[A, Repr] with AnyRef
-
- def map[B, That](f : (A) => B)(implicit bf : CanBuildFrom[Repr, B, That]) : That
-
-The type of `List("London", "France")` is `List[String]`, so the types `A` and
-`Repr` defined on `TraversableLike` are:
-
- A = String
- Repr = List[String]
-
-The type for `(x => (x.length, x))` is `(String) => (Int, String)`, so the type
-of `B` is:
-
- B = (Int, String)
-
-The last unknown type, `That` is the type of the result of `map`, and we
-already have that as well:
-
- val map : Map[Int,String] =
-
-So,
-
- That = Map[Int, String]
-
-That means `breakOut` must, necessarily, return a type or subtype of
-`CanBuildFrom[List[String], (Int, String), Map[Int, String]]`.
-
-This answer was originally submitted in response to [this question on Stack Overflow][1].
-
- [1]: https://stackoverflow.com/q/1715681/53013
diff --git a/_overviews/FAQ/chaining-implicits.md b/_overviews/FAQ/chaining-implicits.md
deleted file mode 100644
index cf8f513174..0000000000
--- a/_overviews/FAQ/chaining-implicits.md
+++ /dev/null
@@ -1,109 +0,0 @@
----
-layout: multipage-overview
-title: How can I chain/nest implicit conversions?
-overview-name: FAQ
-partof: FAQ
-
-num: 6
-permalink: /tutorials/FAQ/:title.html
----
-
-The enrich-my-library pattern allows one to seemingly add a method to a class by
-making available an implicit conversion from that class to one that implements
-the method.
-
-Scala does not allow two such implicit conversions taking place, however, so
-one cannot got from `A` to `C` using an implicit `A` to `B` and another
-implicit `B` to `C`. Is there a way around this restriction?
-
-Scala has a restriction on automatic conversions to add a method, which is that
-it won't apply more than one conversion in trying to find methods. For example:
-
- class A(val n: Int)
- class B(val m: Int, val n: Int)
- class C(val m: Int, val n: Int, val o: Int) {
- def total = m + n + o
- }
-
- import scala.language.implicitConversions
-
- // This demonstrates implicit conversion chaining restrictions
- object T1 { // to make it easy to test on REPL
- implicit def toA(n: Int): A = new A(n)
- implicit def aToB(a: A): B = new B(a.n, a.n)
- implicit def bToC(b: B): C = new C(b.m, b.n, b.m + b.n)
-
- // won't work
- println(5.total)
- println(new A(5).total)
-
- // works
- println(new B(5, 5).total)
- println(new C(5, 5, 10).total)
- }
-
-However, if an implicit definition requires an implicit parameter itself, Scala
-_will_ look for additional implicit values for as long as needed. Continuing from
-the last example:
-
- object T2 {
- implicit def toA(n: Int): A = new A(n)
- implicit def aToB[A1](a: A1)(implicit f: A1 => A): B =
- new B(a.n, a.n)
- implicit def bToC[B1](b: B1)(implicit f: B1 => B): C =
- new C(b.m, b.n, b.m + b.n)
-
- // works
- println(5.total)
- println(new A(5).total)
- println(new B(5, 5).total)
- println(new C(5, 5, 10).total)
- }
-
-_"Magic!"_, you might say. Not so. Here is how the compiler would translate each
-one:
-
- object T1Translated {
- implicit def toA(n: Int): A = new A(n)
- implicit def aToB(a: A): B = new B(a.n, a.n)
- implicit def bToC(b: B): C = new C(b.m, b.n, b.m + b.n)
-
- // Scala won't do this
- println(bToC(aToB(toA(5))).total)
- println(bToC(aToB(new A(5))).total)
-
- // Just this
- println(bToC(new B(5, 5)).total)
-
- // No implicits required
- println(new C(5, 5, 10).total)
- }
-
- object T2Translated {
- implicit def toA(n: Int): A = new A(n)
- implicit def aToB[A1](a: A1)(implicit f: A1 => A): B =
- new B(a.n, a.n)
- implicit def bToC[B1](b: B1)(implicit f: B1 => B): C =
- new C(b.m, b.n, b.m + b.n)
-
- // Scala does this
- println(bToC(5)(x => aToB(x)(y => toA(y))).total)
- println(bToC(new A(5))(x => aToB(x)(identity)).total)
- println(bToC(new B(5, 5))(identity).total)
-
- // no implicits required
- println(new C(5, 5, 10).total)
- }
-
-So, while `bToC` is being used as an implicit conversion, `aToB` and `toA` are
-being passed as _implicit parameters_, instead of being chained as implicit
-conversions.
-
-See also:
-
-* [Context bounds](context-bounds.html)
-* [A discussion on types, origin and precedence of implicits](finding-implicits.html)
-
-This question and answer were originally submitted on [Stack Overflow][1].
-
- [1]: https://stackoverflow.com/questions/5332801/how-can-i-chain-implicits-in-scala/5332804
diff --git a/_overviews/FAQ/collections.md b/_overviews/FAQ/collections.md
deleted file mode 100644
index 6c0ad3355d..0000000000
--- a/_overviews/FAQ/collections.md
+++ /dev/null
@@ -1,382 +0,0 @@
----
-layout: multipage-overview
-title: How are the collections structured? Which one should I choose?
-overview-name: FAQ
-partof: FAQ
-
-num: 8
-permalink: /tutorials/FAQ/:title.html
----
-## Foreword
-
-There's a [2.8 collection walk-through][1] by Martin Odersky which should
-probably be your first reference. It has been supplemented as well with
-[architectural notes][2], which will be of particular interest to those who
-want to design their own collections.
-
-The rest of this answer was written way before any such thing existed (in fact,
-before 2.8.0 itself was released).
-
-You can find a paper about it as [Scala SID #3][3]. Other papers in that area
-should be interesting as well to people interested in the differences between
-Scala 2.7 and 2.8.
-
-I'll quote from the paper, selectively, and complement with some thoughts of
-mine. There are also some images, generated by Matthias at decodified.com, and
-the original SVG files can be found [here][4].
-
-## The collection classes/traits themselves
-
-There are actually three hierarchies of traits for the collections: one for
-mutable collections, one for immutable collections, and one which doesn't make
-any assumptions about the collections.
-
-There's also a distinction between parallel, serial and maybe-parallel
-collections, which was introduced with Scala 2.9. I'll talk about them in the
-next section. The hierarchy described in this section refers _exclusively to
-non-parallel collections_.
-
-The following image shows the non-specific hierarchy as of Scala 2.10:
-
-[![General collection hierarchy][5]][5]
-
-All elements shown are traits. In the other two hierarchies there are also
-classes directly inheriting the traits as well as classes which can be _viewed
-as_ belonging in that hierarchy through implicit conversion to wrapper classes.
-The legend for these graphs can be found after them.
-
-Graph for immutable hierarchy:
-
-[![Immutable collection hierarchy][10]][10]
-
-Graph for mutable hierarchy:
-
-[![Mutable collection hierarchy][11]][11]
-
-Legend:
-
-[![Graph legend][8]][8]
-
-Here's an abbreviated ASCII depiction of the collection hierarchy, for those who can't see the images.
-
- Traversable
- |
- |
- Iterable
- |
- +------------------+--------------------+
- Map Set Seq
- | | |
- | | +------+-------+
- SortedMap SortedSet Buffer Vector LinearSeq
- |
- |
- BitSet
-
-## Parallel Collections
-
-When Scala 2.9 introduced parallel collections, one of the design goals was to
-make their use as seamless as possible. In the simplest terms, one can replace
-a non-parallel (serial) collection with a parallel one, and instantly reap the
-benefits.
-
-However, since all collections until then were serial, many algorithms using
-them assumed and depended on the fact that they _were_ serial. Parallel
-collections fed to the methods with such assumptions would fail. For this
-reason, all the hierarchy described in the previous section _mandates serial
-processing_.
-
-Two new hierarchies were created to support the parallel collections.
-
-The parallel collections hierarchy has the same names for traits, but preceded
-with `Par`: `ParIterable`, `ParSeq`, `ParMap` and `ParSet`. Note that there is
-no `ParTraversable`, since any collection supporting parallel access is capable
-of supporting the stronger `ParIterable` trait. It doesn't have some of the
-more specialized traits present in the serial hierarchy either. This whole
-hierarchy is found under the directory `scala.collection.parallel`.
-
-The classes implementing parallel collections also differ, with `ParHashMap`
-and `ParHashSet` for both mutable and immutable parallel collections, plus
-`ParRange` and `ParVector` implementing `immutable.ParSeq` and `ParArray`
-implementing `mutable.ParSeq`.
-
-Another hierarchy also exists that mirrors the traits of serial and parallel
-collections, but with a prefix `Gen`: `GenTraversable`, `GenIterable`,
-`GenSeq`, `GenMap` and `GenSet`. These traits are _parents_ to both parallel
-and serial collections. This means that a method taking a `Seq` cannot receive
-a parallel collection, but a method taking a `GenSeq` is expected to work with
-both serial and parallel collections.
-
-Given the way these hierarchies were structured, code written for Scala 2.8 was
-fully compatible with Scala 2.9, and demanded serial behavior. Without being
-rewritten, it cannot take advantage of parallel collections, but the changes
-required are very small.
-
-### Using Parallel Collections
-
-Any collection can be converted into a parallel one by calling the method `par`
-on it. Likewise, any collection can be converted into a serial one by calling
-the method `seq` on it.
-
-If the collection was already of the type requested (parallel or serial), no
-conversion will take place. If one calls `seq` on a parallel collection or
-`par` on a serial collection, however, a new collection with the requested
-characteristic will be generated.
-
-Do not confuse `seq`, which turns a collection into a non-parallel collection,
-with `toSeq`, which returns a `Seq` created from the elements of the
-collection. Calling `toSeq` on a parallel collection will return a `ParSeq`,
-not a serial collection.
-
-## The Main Traits
-
-While there are many implementing classes and subtraits, there are some basic
-traits in the hierarchy, each of which providing more methods or more specific
-guarantees, but reducing the number of classes that could implement them.
-
-In the following subsections, I'll give a brief description of the main traits
-and the idea behind them.
-
-### Trait TraversableOnce
-
-This trait is pretty much like trait `Traversable` described below, but with
-the limitation that you can only use it _once_. That is, any methods called on
-a `TraversableOnce` _may_ render it unusable.
-
-This limitation makes it possible for the same methods to be shared between the
-collections and `Iterator`. This makes it possible for a method that works with
-an `Iterator` but not using `Iterator`-specific methods to actually be able to
-work with any collection at all, plus iterators, if rewritten to accept
-`TraversableOnce`.
-
-Because `TraversableOnce` unifies collections and iterators, and iterators are
-not considered collections, it does not appear in the previous graphs, which
-concern themselves only with collections.
-
-### Trait Traversable
-
-At the top of the _collection_ hierarchy is trait `Traversable`. Its only
-abstract operation is
-
- def foreach[U](f: Elem => U)
-
-The operation is meant to traverse all elements of the collection, and apply
-the given operation f to each element. The application is done for its side
-effect only; in fact any function result of f is discarded by foreach.
-
-Traversable objects can be finite or infinite. An example of an infinite
-traversable object is the stream of natural numbers `Stream.from(0)`. The
-method `hasDefiniteSize` indicates whether a collection is possibly infinite.
-If `hasDefiniteSize` returns true, the collection is certainly finite. If it
-returns false, the collection has not been fully elaborated yet, so it might
-be infinite or finite.
-
-This class defines methods which can be efficiently implemented in terms of
-`foreach` (over 40 of them).
-
-### Trait Iterable
-
-This trait declares an abstract method `iterator` that returns an iterator that
-yields all the collection’s elements one by one. The `foreach` method in
-`Iterable` is implemented in terms of `iterator`. Subclasses of `Iterable`
-often override foreach with a direct implementation for efficiency.
-
-Class `Iterable` also adds some less-often used methods to `Traversable`, which
-can be implemented efficiently only if an `iterator` is available. They are
-summarized below.
-
- xs.iterator An iterator that yields every element in xs, in the same order as foreach traverses elements.
- xs takeRight n A collection consisting of the last n elements of xs (or, some arbitrary n elements, if no order is defined).
- xs dropRight n The rest of the collection except xs takeRight n.
- xs sameElements ys A test whether xs and ys contain the same elements in the same order
-
-### Seq, Set and Map
-
-After `Iterable` there come three base traits which inherit from it: `Seq`,
-`Set`, and `Map`. All three have an `apply` method and all three implement the
-`PartialFunction` trait, but the meaning of `apply` is different in each case.
-
-I trust the meaning of `Seq`, `Set` and `Map` is intuitive. After them, the
-classes break up in specific implementations that offer particular guarantees
-with regards to performance, and the methods it makes available as a result of
-it. Also available are some traits with further refinements, such as
-`LinearSeq`, `IndexedSeq` and `SortedSet`.
-
-## Complete Overview
-
-### Base Classes and Traits
-
-* `TraversableOnce` -- All methods and behavior common to collections and iterators.
-
- * `Traversable` -- Basic collection class. Can be implemented just with `foreach`.
-
- * `TraversableProxy` -- Proxy for a `Traversable`. Just point `self` to the real collection.
- * `TraversableView` -- A Traversable with some non-strict methods.
- * `TraversableForwarder` -- Forwards most methods to `underlying`, except `toString`, `hashCode`, `equals`, `stringPrefix`, `newBuilder`, `view` and all calls creating a new iterable object of the same kind.
- * `mutable.Traversable` and `immutable.Traversable` -- same thing as `Traversable`, but restricting the collection type.
- * Other special-cases `Iterable` classes, such as `MetaData`, exists.
- * `Iterable` -- A collection for which an `Iterator` can be created (through `iterator`).
- * `IterableProxy`, `IterableView`, `mutable` and `immutable.Iterable`.
-
- * `Iterator` -- A trait which is not descendant of `Traversable`. Define `next` and `hasNext`.
- * `CountedIterator` -- An `Iterator` defining `count`, which returns the elements seen so far.
- * `BufferedIterator` -- Defines `head`, which returns the next element without consuming it.
- * Other special-cases `Iterator` classes, such as `Source`, exists.
-
-### The Sequences
-
-* `Seq` -- A sequence of elements. One assumes a well-defined size and element repetition. Extends `PartialFunction` as well.
-
- * `IndexedSeq` -- Sequences that support O(1) element access and O(1) length computation.
- * `IndexedSeqView`
- * `immutable.PagedSeq` -- An implementation of `IndexedSeq` where the elements are produced on-demand by a function passed through the constructor.
- * `immutable.IndexedSeq`
-
- * `immutable.Range` -- A delimited sequence of integers, closed on the lower end, open on the high end, and with a step.
- * `immutable.Range.Inclusive` -- A `Range` closed on the high end as well.
- * `immutable.NumericRange` -- A more generic version of `Range` which works with any `Integral`.
- * `immutable.NumericRange.Inclusive`, `immutable.NumericRange.Exclusive`.
- * `immutable.WrappedString`, `immutable.RichString` -- Wrappers which enables seeing a `String` as a `Seq[Char]`, while still preserving the `String` methods. I'm not sure what the difference between them is.
-
- * `mutable.IndexedSeq`
- * `mutable.GenericArray` -- An `Seq`-based array-like structure. Note that the "class" `Array` is Java's `Array`, which is more of a memory storage method than a class.
- * `mutable.ResizableArray` -- Internal class used by classes based on resizable arrays.
- * `mutable.PriorityQueue`, `mutable.SynchronizedPriorityQueue` -- Classes implementing prioritized queues -- queues where the elements are dequeued according to an `Ordering` first, and order of queueing last.
- * `mutable.PriorityQueueProxy` -- an abstract `Proxy` for a `PriorityQueue`.
-
- * `LinearSeq` -- A trait for linear sequences, with efficient time for `isEmpty`, `head` and `tail`.
-
- * `immutable.LinearSeq`
- * `immutable.List` -- An immutable, singly-linked, list implementation.
- * `immutable.Stream` -- A lazy-list. Its elements are only computed on-demand, but memoized (kept in memory) afterwards. It can be theoretically infinite.
- * `mutable.LinearSeq`
- * `mutable.DoublyLinkedList` -- A list with mutable `prev`, `head` (`elem`) and `tail` (`next`).
- * `mutable.LinkedList` -- A list with mutable `head` (`elem`) and `tail` (`next`).
- * `mutable.MutableList` -- A class used internally to implement classes based on mutable lists.
- * `mutable.Queue`, `mutable.QueueProxy` -- A data structure optimized for FIFO (First-In, First-Out) operations.
- * `mutable.QueueProxy` -- A `Proxy` for a `mutable.Queue`.
-
- * `SeqProxy`, `SeqView`, `SeqForwarder`
-
- * `immutable.Seq`
-
- * `immutable.Queue` -- A class implementing a FIFO-optimized (First-In, First-Out) data structure. There is no common superclass of both `mutable` and `immutable` queues.
- * `immutable.Stack` -- A class implementing a LIFO-optimized (Last-In, First-Out) data structure. There is no common superclass of both `mutable` `immutable` stacks.
- * `immutable.Vector` -- ?
- * `scala.xml.NodeSeq` -- A specialized XML class which extends `immutable.Seq`.
- * `immutable.IndexedSeq` -- As seen above.
- * `immutable.LinearSeq` -- As seen above.
-
- * `mutable.ArrayStack` -- A class implementing a LIFO-optimized data structure using arrays. Supposedly significantly faster than a normal stack.
- * `mutable.Stack`, `mutable.SynchronizedStack` -- Classes implementing a LIFO-optimized data structure.
- * `mutable.StackProxy` -- A `Proxy` for a `mutable.Stack`..
- * `mutable.Seq`
-
- * `mutable.Buffer` -- Sequence of elements which can be changed by appending, prepending or inserting new members.
- * `mutable.ArrayBuffer` -- An implementation of the `mutable.Buffer` class, with constant amortized time for the append, update and random access operations. It has some specialized subclasses, such as `NodeBuffer`.
- * `mutable.BufferProxy`, `mutable.SynchronizedBuffer`.
- * `mutable.ListBuffer` -- A buffer backed by a list. It provides constant time append and prepend, with most other operations being linear.
- * `mutable.ObservableBuffer` -- A *mixin* trait which, when mixed to a `Buffer`, provides notification events through a `Publisher` interfaces.
- * `mutable.IndexedSeq` -- As seen above.
- * `mutable.LinearSeq` -- As seen above.
-
-### The Sets
-
-* `Set` -- A set is a collection that includes at most one of any object.
-
- * `SortedSet` -- A set whose elements are ordered.
- * `immutable.SortedSet`
- * `immutable.BitSet` -- A set of integers stored as a bitset.
- * `immutable.TreeSet` -- An implementation of a `SortedSet` based on a tree.
- * `mutable.SortedSet`
- * `mutable.BitSet` -- A set of integers stored as a bitset.
-
- * `SetProxy` -- A `Proxy` for a `Set`.
-
- * `immutable.Set`
- * `immutable.HashSet` -- An implementation of `Set` based on element hashing.
- * `immutable.ListSet` -- An implementation of `Set` based on lists.
- * Additional set classes exists to provide optimized implementations for sets from 0 to 4 elements.
- * `immutable.SetProxy` -- A `Proxy` for an immutable `Set`.
-
- * `mutable.Set`
- * `mutable.HashSet` -- An implementation of `Set` based on element hashing.
- * `mutable.ImmutableSetAdaptor` -- A class implementing a mutable `Set` from an immutable `Set`.
- * `LinkedHashSet` -- An implementation of `Set` based on lists.
- * `ObservableSet` -- A *mixin* trait which, when mixed with a `Set`, provides notification events through a `Publisher` interface.
- * `SetProxy` -- A `Proxy` for a `Set`.
- * `SynchronizedSet` -- A *mixin* trait which, when mixed with a `Set`, provides notification events through a `Publisher` interface.
-
-### The Maps
-
-* `Map` -- An `Iterable` of `Tuple2`, which also provides methods for retrieving a value (the second element of the tuple) given a key (the first element of the tuple). Extends `PartialFunction` as well.
- * `MapProxy` -- A `Proxy` for a `Map`.
- * `DefaultMap` -- A trait implementing some of `Map`'s abstract methods.
- * `SortedMap` -- A `Map` whose keys are sorted.
- * `immutable.SortMap`
- * `immutable.TreeMap` -- A class implementing `immutable.SortedMap`.
- * `immutable.Map`
- * `immutable.MapProxy`
- * `immutable.HashMap` -- A class implementing `immutable.Map` through key hashing.
- * `immutable.IntMap` -- A class implementing `immutable.Map` specialized for `Int` keys. Uses a tree based on the binary digits of the keys.
- * `immutable.ListMap` -- A class implementing `immutable.Map` through lists.
- * `immutable.LongMap` -- A class implementing `immutable.Map` specialized for `Long` keys. See `IntMap`.
- * There are additional classes optimized for an specific number of elements.
- * `mutable.Map`
- * `mutable.HashMap` -- A class implementing `mutable.Map` through key hashing.
- * `mutable.ImmutableMapAdaptor` -- A class implementing a `mutable.Map` from an existing `immutable.Map`.
- * `mutable.LinkedHashMap` -- ?
- * `mutable.ListMap` -- A class implementing `mutable.Map` through lists.
- * `mutable.MultiMap` -- A class accepting more than one distinct value for each key.
- * `mutable.ObservableMap` -- A *mixin* which, when mixed with a `Map`, publishes events to observers through a `Publisher` interface.
- * `mutable.OpenHashMap` -- A class based on an open hashing algorithm.
- * `mutable.SynchronizedMap` -- A *mixin* which should be mixed with a `Map` to provide a version of it with synchronized methods.
- * `mutable.MapProxy`.
-
-## Bonus Questions
-
-* Why the Like classes exist (e.g. TraversableLike)?
-
-This was done to achieve maximum code reuse. The concrete *generic*
-implementation for classes with a certain structure (a traversable, a map, etc)
-is done in the Like classes. The classes intended for general consumption,
-then, override selected methods that can be optimized.
-
-* What the companion methods are for (e.g. List.companion)?
-
-The builder for the classes, ie, the object which knows how to create instances
-of that class in a way that can be used by methods like `map`, is created by a
-method in the companion object. So, in order to build an object of type X, I
-need to get that builder from the companion object of X. Unfortunately, there
-is no way, in Scala, to get from class X to object X. Because of that, there is
-a method defined in each instance of X, `companion`, which returns the
-companion object of class X.
-
-While there might be some use for such method in normal programs, its target is
-enabling code reuse in the collection library.
-
-* How I know what implicit objects are in scope at a given point?
-
-You aren't supposed to care about that. They are implicit precisely so that you
-don't need to figure out how to make it work.
-
-These implicits exists to enable the methods on the collections to be defined
-on parent classes but still return a collection of the same type. For example,
-the `map` method is defined on `TraversableLike`, but if you used on a `List`
-you'll get a `List` back.
-
-This answer was originally submitted in response to [this question][9] on Stack
-Overflow.
-
-
- [1]: https://docs.scala-lang.org/overviews/collections/introduction.html
- [2]: https://docs.scala-lang.org/overviews/core/architecture-of-scala-collections.html
- [3]: https://www.scala-lang.org/sid/3
- [4]: https://github.com/sirthias/scala-collections-charts/downloads
- [5]: /resources/images/tour/collections-diagram.svg
- [6]: https://i.stack.imgur.com/2fjoA.png
- [7]: https://i.stack.imgur.com/Dsptl.png
- [8]: /resources/images/tour/collections-legend-diagram.svg
- [9]: https://stackoverflow.com/q/1722137/53013
- [10]: /resources/images/tour/collections-immutable-diagram.svg
- [11]: /resources/images/tour/collections-mutable-diagram.svg
diff --git a/_overviews/FAQ/context-bounds.md b/_overviews/FAQ/context-bounds.md
deleted file mode 100644
index 8efd4174da..0000000000
--- a/_overviews/FAQ/context-bounds.md
+++ /dev/null
@@ -1,104 +0,0 @@
----
-layout: multipage-overview
-title: What are Scala context bounds?
-overview-name: FAQ
-partof: FAQ
-
-num: 3
-permalink: /tutorials/FAQ/:title.html
----
-
-What is a Context Bound?
-------------------------
-
-Context bounds were introduced in Scala 2.8.0, and are typically used with the
-so-called _type class pattern_, a pattern of code that emulates the
-functionality provided by Haskell type classes, though in a more verbose
-manner.
-
-A context bound requires a _parameterized type_, such as `Ordered[A]`,
-but unlike `String`.
-
-A context bound describes an implicit _value_. It is used to declare that for
-some type `A`, there is an
-implicit value of type `B[A]` available. The syntax goes like this:
-
- def f[A : B](a: A) = g(a) // where g requires an implicit value of type B[A]
-
-The common example of usage in Scala is this:
-
- def f[A : ClassTag](n: Int) = new Array[A](n)
-
-An `Array` initialization on a parameterized type requires a `ClassTag` to
-be available, for arcane reasons related to type erasure and the non-erasure
-nature of arrays.
-
-Another very common example in the library is a bit more complex:
-
- def f[A : Ordering](a: A, b: A) = implicitly[Ordering[A]].compare(a, b)
-
-Here, `implicitly` is used to retrieve the implicit value we want, one of type
-`Ordering[A]`, which class defines the method `compare(a: A, b: A): Int`.
-
-We'll see another way of doing this below.
-
-How are Context Bounds implemented?
----------------------------------------------------
-
-It shouldn't be surprising that context bounds are
-implemented with implicit parameters, given their definition. Actually, the
-syntax I showed are syntactic sugars for what really happens. See below how
-they de-sugar:
-
- def g[A : B](a: A) = h(a)
- def g[A](a: A)(implicit ev: B[A]) = h(a)
-
-So, naturally, one can write them in their full syntax, which is specially
-useful for context bounds:
-
- def f[A](a: A, b: A)(implicit ord: Ordering[A]) = ord.compare(a, b)
-
-What are Context Bounds used for?
----------------------------------
-
-Context bounds are mainly used in what has become known as _typeclass pattern_,
-as a reference to Haskell's type classes. Basically, this pattern implements an
-alternative to inheritance by making functionality available through a sort of
-implicit adapter pattern.
-
-The classic example is Scala 2.8's `Ordering`. The usage is:
-
- def f[A : Ordering](a: A, b: A) = if (implicitly[Ordering[A]].lt(a, b)) a else b
-
-Though you'll usually see that written like this:
-
- def f[A](a: A, b: A)(implicit ord: Ordering[A]) = {
- import ord._
- if (a < b) a else b
- }
-
-Which take advantage of some implicit conversions inside `Ordering` that enable
-the traditional operator style. Another example in Scala 2.8 is the `Numeric`:
-
- def f[A : Numeric](a: A, b: A) = implicitly[Numeric[A]].plus(a, b)
-
-A more complex example is the new collection usage of `CanBuildFrom`, but
-there's already a very long answer about that, so I'll avoid it here. And, as
-mentioned before, there's the `ClassTag` usage, which is required to
-initialize new arrays without concrete types.
-
-Though it has been possible for a long time, the use of context bounds has
-really taken off in 2010, and is now found to some degree in most of Scala's
-most important libraries and frameworks. The most extreme example of its usage,
-though, is the Scalaz library, which brings a lot of the power of Haskell to
-Scala. I recommend reading up on typeclass patterns to get more acquainted it
-all the ways in which it can be used.
-
-Related questions of interest:
-
-* [A discussion on types, origin and precedence of implicits](finding-implicits.html)
-* [Chaining implicits](chaining-implicits.html)
-
-This answer was originally submitted in response to [this question on Stack Overflow][1].
-
- [1]: https://stackoverflow.com/q/4465948/53013
diff --git a/_overviews/FAQ/finding-implicits.md b/_overviews/FAQ/finding-implicits.md
deleted file mode 100644
index 5be593d931..0000000000
--- a/_overviews/FAQ/finding-implicits.md
+++ /dev/null
@@ -1,415 +0,0 @@
----
-layout: multipage-overview
-title: Where does Scala look for implicits?
-overview-name: FAQ
-partof: FAQ
-
-num: 7
-permalink: /tutorials/FAQ/:title.html
----
-
-Newcomers to Scala often ask: Where does the compiler look for implicits?
-
-For example, where do the values for `integral` below come from?
-
- scala> import scala.math._
- import scala.math._
-
- scala> def foo[T](t: T)(implicit integral: Integral[T]): Unit = {
- println(integral)
- }
- foo: [T](t: T)(implicit integral: scala.math.Integral[T])Unit
-
- scala> foo(0)
- scala.math.Numeric$IntIsIntegral$@3dbea611
-
- scala> foo(0L)
- scala.math.Numeric$LongIsIntegral$@48c610af
-
-The natural continuation of this line of inquiry leads to a second question: How
-does the compiler choose which implicit to use, in certain situations of apparent
-ambiguity (but that compile anyway)?
-
-For instance, `scala.Predef` defines two conversions from `String`: one to
-`WrappedString` and another to `StringOps`. Both classes, however, share a lot
-of methods, so why doesn't Scala complain about ambiguity when, say, calling
-`map`?
-
-**Note:** this question was inspired by [this other question on Stack
-Overflow][4], but states the problem in more general terms. The example was
-copied from there, because it is referred to in the answer.
-
-## Types of Implicits
-
-Implicits in Scala refers to either a value that can be passed "automatically",
-so to speak, or a conversion from one type to another that is made
-automatically.
-
-### Implicit Conversion
-
-Speaking very briefly about the latter type, if one calls a method `m` on an
-object `o` of a class `C`, and that class does not support method `m`, then
-Scala will look for an implicit conversion from `C` to something that _does_
-support `m`. A simple example would be the method `map` on `String`:
-
- "abc".map(_.toInt)
-
-`String` does not support the method `map`, but `StringOps` does, and there's
-an implicit conversion from `String` to `StringOps` available (see `implicit
-def augmentString` on `Predef`).
-
-### Implicit Parameters
-
-The other kind of implicit is the implicit _parameter_. These are passed to
-method calls like any other parameter, but the compiler tries to fill them in
-automatically. If it can't, it will complain. One _can_ pass these parameters
-explicitly, which is how one uses `breakOut`, for example (see question about
-`breakOut`, on a day you are feeling up for a challenge).
-
-In this case, one has to declare the need for an implicit, such as the `foo`
-method declaration:
-
- def foo[T](t: T)(implicit integral: Integral[T]): Unit = {
- println(integral)
- }
-
-### Implicit conversions as implicit parameters
-
-There's one situation where an implicit is both an implicit conversion and an
-implicit parameter. For example:
-
- def getIndex[T, CC](seq: CC, value: T)(implicit conv: CC => Seq[T]) = seq.indexOf(value)
-
- getIndex("abc", 'a')
-
-The method `getIndex` can receive any object, as long as there is an implicit
-conversion available from its class to `Seq[T]`. Because of that, a `String` can be
-passed to `getIndex`, and it will work.
-
-Behind the scenes, the compiler changes `seq.IndexOf(value)` to
-`conv(seq).indexOf(value)`.
-
-### Context Bounds
-
-Another common pattern in implicit parameters is the _type class pattern_. This
-pattern enables the provision of common interfaces to classes which did not
-declare them. It can both serve as a bridge pattern -- gaining separation of
-concerns -- and as an adapter pattern.
-
-The `Integral` class mentioned above is a classic example of type class pattern.
-Another example on Scala's standard library is `Ordering`. Scalaz is a library
-that makes heavy use of this pattern.
-
-This is an example of its use:
-
- def sum[T](list: List[T])(implicit integral: Integral[T]): T = {
- import integral._ // get the implicits in question into scope
- list.foldLeft(integral.zero)(_ + _)
- }
-
-There is also a syntactic sugar for it, called a _context bound_, which is made
-less useful by the need to refer to the implicit. A straight conversion of that
-method looks like this:
-
- def sum[T : Integral](list: List[T]): T = {
- val integral = implicitly[Integral[T]]
- import integral._ // get the implicits in question into scope
- list.foldLeft(integral.zero)(_ + _)
- }
-
-Context bounds are more useful when you just need to _pass_ them to other
-methods that use them. For example, the method `sorted` on `Seq` needs an
-implicit `Ordering`. To create a method `reverseSort`, one could write:
-
- def reverseSort[T : Ordering](seq: Seq[T]) = seq.sorted.reverse
-
-Because `Ordering[T]` was implicitly passed to `reverseSort`, it can then pass
-it implicitly to `sorted`.
-
-## Where do Implicits Come From?
-
-As described above, there are several contexts in which an implicit value may be required
-for an expression to typecheck. The required implicit type is what determines
-which value is selected. That value is found either in lexical scope or,
-failing that, in what is called implicit scope.
-
-### Implicits Defined in Lexical Scope
-
-When a value of a certain name is required, lexical scope is searched for
-a value with that name. Similarly, when an implicit value of a certain type is required,
-lexical scope is searched for a value with that type.
-
-Any such value which can be referenced with its "simple" name, without
-selecting from another value using dotted syntax, is an eligible implicit value.
-
-For example, here is a function that takes an implicit scaling factor.
-The function requires a parameter of type `Int`, and there is a value
-of that type in scope. The variable name `n` does not matter in this
-case.
-
- implicit val n: Int = 5
- def scale(x: Int)(implicit y: Int) = x * y
- scale(5) // takes n from the current scope, with the result 25
-
-The invocation can be rewritten `scale(5)(n)`. If `n` can be referenced
-using its simple name, as shown here, it is eligible as an implicit value.
-
-An implicit value can be introduced into scope by an import statement:
-
- import scala.collection.JavaConverters._
- def env = System.getenv().asScala // extension method enabled by imported implicit
- val term = env("TERM") // it's a Scala Map
-
-There may be more than one such value because they have different names.
-
-In that case, overload resolution is used to pick one of them. The algorithm
-for overload resolution is the same used to choose the reference for a
-given name, when more than one term in scope has that name. For example,
-`println` is overloaded, and each overload takes a different parameter type.
-An invocation of `println` requires selecting the correct overloaded method.
-
-In implicit search, overload resolution chooses a value among more than one
-that have the same required type. Usually this entails selecting a narrower
-type or a value defined in a subclass relative to other eligible values.
-
-The rule that the value must be accessible using its simple name means
-that the normal rules for name binding apply.
-
-In summary, a definition for `x` shadows a definition in
-an enclosing scope. But a binding for `x` can also be introduced by
-local imports. Imported symbols can't override definitions of the same
-name in an enclosing scope. Similarly, wildcard imports can't override
-an import of a specific name, and names in the current package that are
-visible from other source files can't override imports or local definitions.
-
-These are the normal rules for deciding what `x` means in a given context,
-and also determine which value `x` is accessible by its simple name and
-is eligible as an implicit.
-
-This means that an implicit in scope can be disabled by shadowing it with
-a term of the same name.
-
-For example, here, `X.f` is supplied the imported `X.s`: `X.f(s)`.
-The body of `f` uses an implicit `Int`, from the immediate scope,
-which shadows the `n` from `Y`, which is therefore not an eligible
-implicit value. The parameter `s` shadows the member `s`.
-
-The method `g` does not compile because the implicit `t` is shadowed
-by a `t` that is not implicit, so no implicit `T` is in scope.
-
- object Y {
- implicit val n: Int = 17
- trait T {
- implicit val i: Int = 17
- implicit def t: T = ???
- }
- object X extends T {
- implicit val n: Int = 42
- implicit val s: String = "hello, world\n"
- def f(implicit s: String) = implicitly[String] * implicitly[Int]
- override def t: T = ???
- def g = implicitly[T]
- }
- }
- import Y.X._
- f
-
-The invocation of `f` was enabled by importing from `Y.X.`. But it is
-not convenient to require an import to access implicit values
-provided by a package.
-
-If an implicit value is not found in lexical scope, implicit search
-continues in implicit scope.
-
-### Implicits Defined in Implicit Scope
-
-Implicit syntax can avoid the [import tax][1], which of course is a "sin tax,"
-by leveraging "implicit scope", which depends on the type of the implicit
-instead of imports in lexical scope.
-
-When an implicit of type `T` is required, implicit scope includes
-the companion object `T`:
-
- trait T
- object T { implicit val t: T = new T { } }
-
-When an `F[T]` is required, implicit scope includes both the companion
-of `F` and the companion of the type argument, e.g., `object C` for `F[C]`.
-
-In addition, implicit scope includes the companions of the base classes
-of `F` and `C`, including package objects, such as `p` for `p.F`.
-
-### Companion Objects of a Type
-
-There are two object companions of note here. First, the object companion of
-the "source" type is looked into. For instance, inside the object `Option`
-there is an implicit conversion to `Iterable`, so one can call `Iterable`
-methods on `Option`, or pass `Option` to something expecting an `Iterable`. For
-example:
-
- for {
- x <- List(1, 2, 3)
- y <- Some('x')
- } yield (x, y)
-
-That expression is translated by the compiler into
-
- List(1, 2, 3).flatMap(x => Some('x').map(y => (x, y)))
-
-However, `List.flatMap` expects a `TraversableOnce`, which `Option` is not. The
-compiler then looks inside `Option`'s object companion and finds the conversion
-to `Iterable`, which is a `TraversableOnce`, making this expression correct.
-
-Second, the companion object of the expected type:
-
- List(1, 2, 3).sorted
-
-The method `sorted` takes an implicit `Ordering`. In this case, it looks inside
-the object `Ordering`, companion to the class `Ordering`, and finds an implicit
-`Ordering[Int]` there.
-
-Note that companion objects of super classes are also looked into. For example:
-
- class A(val n: Int)
- object A {
- implicit def str(a: A) = "A: %d" format a.n
- }
- class B(val x: Int, y: Int) extends A(y)
- val b = new B(5, 2)
- val s: String = b // s == "A: 2"
-
-This is how Scala found the implicit `Numeric[Int]` and `Numeric[Long]` in the
-opening example, by the way, as they are found inside `Numeric`, not `Integral`.
-
-### Implicit scope of an argument's type
-
-If you have a method with an argument type `A`, then the implicit scope of type
-`A` will also be considered. Here "implicit scope" means all these rules
-will be applied recursively -- for example, the companion object of `A` will be
-searched for implicits, as per the rule above.
-
-Note that this does not mean the implicit scope of `A` will be searched for
-conversions of that parameter alone, but of the whole expression. For example:
-
- class A(val n: Int) {
- def +(other: A) = new A(n + other.n)
- }
- object A {
- implicit def fromInt(n: Int) = new A(n)
- }
-
- // This becomes possible:
- 1 + new A(1)
- // because it is converted into this:
- A.fromInt(1) + new A(1)
-
-### Implicit scope of type arguments
-
-This is required to make the type class pattern really work. Consider
-`Ordering`, for instance... it comes with some implicits in its companion
-object, but you can't add stuff to it. So how can you make an `Ordering` for
-your own class that is automatically found?
-
-Let's start with the implementation:
-
- class A(val n: Int)
- object A {
- implicit val ord = new Ordering[A] {
- def compare(x: A, y: A) = implicitly[Ordering[Int]].compare(x.n, y.n)
- }
- }
-
-So, consider what happens when you call
-
- List(new A(5), new A(2)).sorted
-
-As we saw, the method `sorted` expects an `Ordering[A]` (actually, it expects
-an `Ordering[B]`, where `B >: A`). There isn't any such thing inside
-`Ordering`, and there is no "source" type on which to look. Obviously, it is
-finding it inside `A`, which is a _type argument_ of `Ordering`.
-
-This is also how various collection methods expecting `CanBuildFrom` work: the
-implicits are found inside companion objects to the type parameters of
-`CanBuildFrom`.
-
-**Note**: `Ordering` is defined as `trait Ordering[T]`, where `T` is a type
-parameter. The implicit looked for above is `Ordering[A]`, where
-`A` is an actual type, not type parameter: it is a _type argument_ to
-`Ordering`. See section 7.2 of the [Scala Specification][6].
-
-### Outer Objects for Nested Types
-
-The principle is simple:
-
- class A(val n: Int) {
- class B(val m: Int) { require(m < n) }
- }
- object A {
- implicit def bToString(b: A#B) = "B: %d" format b.m
- }
- val a = new A(5)
- val b = new a.B(3)
- val s: String = b // s == "B: 3"
-
-A real world example of this would be welcome. Please share your example!
-
-### Package Objects Can Contribute Implicit Values
-
-An implicit value in a package object can be made available either
-in lexical scope or in implicit scope.
-
-To be available in lexical scope, the packages must be declared as nested packages:
-
- package object p { implicit val s: String = "hello, world" }
- package p {
- package q {
- object X { def f = implicitly[String] }
- }
- }
-
-This is sensitive to name binding rules. The following example compiles
-only if the package object is in a separate file, in which case the import is used:
-
- package object p { implicit val s: String = "hello, world" }
- package p {
- package q {
- object Y {
- implicit val s: String = "bye"
- }
- object X {
- import Y._
- def f = implicitly[String]
- }
- }
- }
-
-A package object can also offer implicit values of types in subpackages:
-
- package object p { implicit val c: q.C = new q.C }
- package p.q {
- class C
- object X { def f = implicitly[C] }
- }
-
-Here, the implicit is supplied in implicit scope of `C`.
-
-### Call To Action
-
-Avoid taking this question as being the final arbiter of what is happening.
-If you do notice it has become out-of-date, do [open a ticket about it][7], or, if
-you know how to correct it, please fix it.
-
-Related questions of interest:
-
-* [Context bounds](context-bounds.html)
-* [Chaining implicits](chaining-implicits.html)
-
-This question and answer were originally submitted on [Stack Overflow][3].
-
- [1]: https://jsuereth.com/scala/2011/02/18/2011-implicits-without-tax.html
- [2]: https://issues.scala-lang.org/browse/SI-4427
- [3]: https://stackoverflow.com/q/5598085/53013
- [4]: https://stackoverflow.com/questions/5512397/passing-scala-math-integral-as-implicit-parameter
- [5]: https://scala-lang.org/files/archive/spec/2.11/06-expressions.html
- [6]: https://scala-lang.org/files/archive/spec/2.11/07-implicits.html
- [7]: https://github.com/scala/docs.scala-lang/issues
diff --git a/_overviews/FAQ/finding-symbols.md b/_overviews/FAQ/finding-symbols.md
deleted file mode 100644
index c9737400c0..0000000000
--- a/_overviews/FAQ/finding-symbols.md
+++ /dev/null
@@ -1,205 +0,0 @@
----
-layout: multipage-overview
-title: How do I find what some symbol means or does?
-overview-name: FAQ
-partof: FAQ
-
-num: 1
-
-permalink: /tutorials/FAQ/:title.html
----
-We can divide the operators in Scala, for the purpose of teaching, into four categories:
-
-* Keywords/reserved symbols
-* Normal methods or values
-* Methods provided by implicit conversion
-* Syntactic sugars/composition
-
-And let's see some arbitrary examples:
-
- <- // Keyword
- -> // Method provided by implicit conversion
- <= // Common method
- ++= // Can be a common method or syntactic sugar involving ++ method
- :: // Common method or object
- _+_ // Not really a single operator; it's parsed as _ + _
-
-The exact meaning of most of these methods depends on the class they are defined
-on. For example, `<=` on `Int` means _"less than or equal to"_, but it might
-mean something else in another class. `::` in an expression is probably the method of the class
-`List` but it can also refer to the object of the same name (and in a pattern it
-definitely does).
-
-So, let's discuss these categories.
-
-Keywords/reserved symbols
--------------------------
-
-There are a few symbols in Scala that are special and cannot be defined or used as method names.
-Two of them are considered proper keywords, while others are just "reserved". They are:
-
- // Keywords
- <- // Used on for-comprehensions, to separate pattern from generator
- => // Used for function types, function literals and import renaming
-
- // Reserved
- ( ) // Delimit expressions and parameters
- [ ] // Delimit type parameters
- { } // Delimit blocks
- . // Method call and path separator
- // /* */ // Comments
- # // Used in type notations
- : // Type ascription or context bounds
- <: >: // Upper and lower bounds
- <% // View bounds (deprecated)
- " """ // Strings
- ' // Indicate symbols and characters
- @ // Annotations and variable binding on pattern matching
- ` // Denote constant or enable arbitrary identifiers
- , // Parameter separator
- ; // Statement separator
- _* // vararg expansion
- _ // Many different meanings
-
-These are all _part of the language_, and, as such, can be found in any text
-that properly describe the language, such as [Scala Specification][1](PDF)
-itself.
-
-The last one, the underscore, deserve a special description, because it is
-widely used, and has different meanings depending on the context. Here's a sample:
-
- import scala._ // Wild card -- all of Scala is imported
- import scala.{ Predef => _, _ } // Exclusion, everything except Predef
- def f[M[_]] // Higher kinded type parameter
- def f(m: M[_]) // Existential type
- _ + _ // Anonymous function placeholder parameter
- m _ // Eta expansion of method into method value
- m(_) // Partial function application
- _ => 5 // Discarded parameter
- case _ => // Wild card pattern -- matches anything
- f(xs: _*) // Sequence xs is passed as multiple parameters to f(ys: T*)
- case Seq(xs @ _*) // Identifier xs is bound to the whole matched sequence
-
-Common methods
---------------
-
-Many symbols are simply methods of a class, a trait, or an object. For instance, if you do
-
- List(1, 2) ++ List(3, 4)
-
-You'll find the method `++` right on the Scaladoc for [List][5]. However,
-there's one convention that you must be aware when searching for methods.
-Methods ending in colon (`:`) bind _to the right_ instead of the left. In other
-words, while the above method call is equivalent to:
-
- List(1, 2).++(List(3, 4))
-
-If I had, instead `1 :: List(2, 3)`, that would be equivalent to:
-
- List(2, 3).::(1)
-
-So you need to look at the type found _on the right_ when looking for methods
-ending in colon. Consider, for instance:
-
- 1 +: List(2, 3) :+ 4
-
-The first method (`+:`) binds to the right, and is found on `List`. The second
-method (`:+`) is just a normal method, and binds to the left -- again, on
-`List`.
-
-If the name ends in `=`, look for the method called the same without `=` and
-read the last section.
-
-If you aren't sure what the type of the receiver is, you can look up the symbol
-on the Scaladoc [index page for identifiers not starting with letters][2] (for
-standard Scala library; of course, third-party libraries can add their own
-symbolic methods, for which you should look at the corresponding page of _their_
-Scaladoc).
-
-Types and objects can also have symbolic names; in particular, it should be mentioned
-that for types with two type parameters the name can be written _between_ parameters,
-so that e.g. `Int <:< Any` is the same as `<:<[Int, Any]`.
-
-Methods provided by implicit conversion
----------------------------------------
-
-If you did not find the symbol you are looking for in the list of reserved symbols, then
-it must be a method, or part of one. But, often, you'll see some symbol and the
-documentation for the class will not have that method. When this happens,
-either you are looking at a composition of one or more methods with something
-else, or the method has been imported into scope, or is available through an
-imported implicit conversion.
-
-These can also be found in Scaladoc's [index][2], as mentioned above.
-
-All Scala code has three automatic imports:
-
- // Not necessarily in this order
- import java.lang._
- import scala._
- import scala.Predef._
-
-The first two only make classes and singleton objects available, none of which
-look like operators. [`Predef`][3] is the only interesting one for this post.
-
-Looking inside `Predef` shows some symbolic names:
-
- class <:<
- class =:=
- object =:=
- object <%< // removed in Scala 2.10
- def ???
-
-There is also `::`, which doesn't appear in the Scaladoc, but is mentioned in the comments.
-In addition, `Predef` makes some methods available through _implicit conversions_. Just
-look at the methods and classes with `implicit` modifier that receive, as parameter, an
-object of type that is receiving the method. For example, consider `"a" -> 1`. We need
-to look for an implicit which works on `"a"`, and so it can take `String`, one of its
-supertypes (`AnyRef` or `Any`) or a type parameter. In this case, we find
-`implicit final class ArrowAssoc[A](private val self: A)` which makes this implicit
-available on all types.
-
-Other implicit conversions may be visible in your scope depending on imports, extended types or
-self-type annotations. See [Finding implicits](finding-implicits.html) for details.
-
-Syntactic sugars/composition
------------------------------
-
-So, here's a few syntactic sugars that may hide a method:
-
- class Example(arr: Array[Int] = Array.fill(5)(0)) {
- def apply(n: Int) = arr(n)
- def update(n: Int, v: Int) = arr(n) = v
- def a = arr(0); def a_=(v: Int) = arr(0) = v
- def b = arr(1); def b_=(v: Int) = arr(1) = v
- def c = arr(2); def c_=(v: Int) = arr(2) = v
- def d = arr(3); def d_=(v: Int) = arr(3) = v
- def e = arr(4); def e_=(v: Int) = arr(4) = v
- def +(v: Int) = new Example(arr map (_ + v))
- def unapply(n: Int) = if (arr.indices contains n) Some(arr(n)) else None
- }
-
- val ex = new Example
- println(ex(0)) // means ex.apply(0)
- ex(0) = 2 // means ex.update(0, 2)
- ex.b = 3 // means ex.b_=(3)
- val ex(c) = 2 // calls ex.unapply(2) and assigns result to c, if it's Some; throws MatchError if it's None
- ex += 1 // means ex = ex + 1; if Example had a += method, it would be used instead
-
-The last one is interesting, because *any* symbolic method can be combined with `=` in that way.
-
-And, of course, all of the above can be combined in various combinations, e.g.
-
- (_+_) // An expression, or parameter, that is an anonymous function with
- // two parameters, used exactly where the underscores appear, and
- // which calls the "+" method on the first parameter passing the
- // second parameter as argument.
-
-This answer was originally submitted in response to [this question on Stack Overflow][6].
-
- [1]: https://scala-lang.org/files/archive/spec/2.11/
- [2]: https://www.scala-lang.org/api/current/index.html#index.index-_
- [3]: https://www.scala-lang.org/api/current/scala/Predef$.html
- [4]: https://www.scala-lang.org/api/current/scala/Predef$$ArrowAssoc.html
- [5]: https://www.scala-lang.org/api/current/scala/collection/immutable/List.html
- [6]: https://stackoverflow.com/q/7888944/53013
diff --git a/_overviews/FAQ/index.md b/_overviews/FAQ/index.md
index 91ab76367b..a3aa167c98 100644
--- a/_overviews/FAQ/index.md
+++ b/_overviews/FAQ/index.md
@@ -1,19 +1,374 @@
---
layout: singlepage-overview
-title: Scala FAQs
+title: Scala FAQ
permalink: /tutorials/FAQ/index.html
+redirect_from:
+ - "/tutorials/FAQ/breakout.html"
+ - "/tutorials/FAQ/chaining-implicits.html"
+ - "/tutorials/FAQ/collections.html"
+ - "/tutorials/FAQ/context-bounds.html"
+ - "/tutorials/FAQ/finding-implicits.html"
+ - "/tutorials/FAQ/finding-symbols.html"
+ - "/tutorials/FAQ/stream-view.html"
+ - "/tutorials/FAQ/yield.html"
---
-A collection of frequently asked questions and their answers! Graciously
-provided by Daniel Sobral, adapted from his StackOverflow posts.
+Frequently asked questions, with _brief_ answers and/or links to
+longer answers.
-## FAQs
+This list only includes questions that _actually_ come up over and
+over again in Scala chat rooms and forums.
-{% assign overviews = site.overviews | sort: 'num' %}
-
-{% for overview in overviews %}
- {% if overview.partof == "FAQ" %}
-
+## General questions
+
+### Where can I ask Scala questions?
+
+See our [Community page](https://scala-lang.org/community/).
+
+### What's a good book about Scala?
+
+Our [Books page](https://docs.scala-lang.org/books.html) lists a few
+especially popular, well-known books.
+
+We don't have a list of all the Scala books that
+are out there; there are many.
+
+You can go on the \#scala-users room [on
+Discord](https://discord.com/invite/scala) or another community forum and
+ask for book recommendations. You'll get more helpful
+answers if you provide some information about your background and your
+reasons for wanting to learn Scala.
+
+### Should I learn Scala 2, or Scala 3?
+
+Don't sweat the decision too much. You can't go far wrong either
+way. It isn't that hard to switch later, in either direction.
+
+Regardless, you should choose Scala 3 unless you have a specific reason
+to need 2. Scala 3 is the future, and it's the best version for
+falling in love with the language and everything it has to offer.
+Scala 3 has plenty of books, plenty of libraries, and high quality
+tooling.
+
+That said, many Scala jobs are still Scala 2 jobs. In most cases, the
+cause of that is simply inertia, especially at large shops. (But it can
+sometimes be due to availability of specific libraries.)
+
+### Where are Scala jobs advertised?
+
+This is addressed on our [Community page](https://scala-lang.org/community/#scala-jobs).
+
+In short, the only officially sanctioned place is the \#jobs channel
+[on Discord](https://discord.com/invite/scala).
+
+### Who's behind Scala?
+
+This is answered [on the Governance page](https://www.scala-lang.org/governance/).
+
+### Can I use the Scala logo?
+
+See [scala/scala-lang#1040](https://github.com/scala/scala-lang/issues/1040).
+
+## Technical questions
+
+### What IDEs are available for Scala?
+
+See [this doc page](https://docs.scala-lang.org/getting-started/scala-ides.html).
+
+### What compiler flags are recommended?
+
+The list of available options is
+[here](https://docs.scala-lang.org/overviews/compiler-options/index.html).
+
+What flags people choose varies widely from shop to shop and from
+individual to individual. `-Xlint` is valuable to enable. Some brave
+people enable `-Werror` (formerly `-Xfatal-warnings`) to make warnings
+fatal.
+
+[sbt-tpolecat](https://github.com/typelevel/sbt-tpolecat) is an
+opinionated sbt plugin that sets many options automatically, depending
+on Scala version; you can see
+[here](https://github.com/typelevel/sbt-tpolecat/blob/main/plugin/src/main/scala/io/github/davidgregory084/TpolecatPlugin.scala)
+what it sets. Some choices it makes are oriented towards
+pure-functional programmers.
+
+### How do I find what some symbol means or does?
+
+A [Stack Overflow answer](https://stackoverflow.com/a/7890032) lays
+out what the different kinds of symbol in Scala are and explains the
+most commonly used symbols.
+
+Scala allows symbolic method names. So if you see a random-looking
+operator like `>=@=>` in Scala code, it might simply be a method in
+some library, rather than having any special meaning in the language
+itself.
+
+You can search for symbols on Google. For example, if you want to
+know what `<:<` means, searching for `scala <:<` works fine. If you
+get poor results, try surrounding the symbol with double quotes.
+
+### I want Scala 2.13 (or some other version); why does sbt say it's using Scala 2.12?
+
+sbt 1.x always uses Scala 2.12 to compile build definitions.
+Your sbt 1.x build definition is always a Scala 2.12 program.
+
+Regardless, in your `build.sbt`, you can set `scalaVersion` to whichever
+available distribution you want and your program code will be compiled with that version.
+
+### I want Scala 3. Why does `versionNumberString` say I'm on 2.13?
+
+To aid migration, Scala 3 currently uses the Scala 2.13 library as-is,
+with only minor supplements. That's why `versionString` and
+`versionNumberString` report that Scala 2 is in use:
+
+```
+Welcome to Scala 3.3.4 (17.0.3, Java OpenJDK 64-Bit Server VM).
+Type in expressions for evaluation. Or try :help.
+
+scala> util.Properties.versionNumberString
+val res0: String = 2.13.15
+```
+
+Note that even the latest Scala 3 version might not use the very
+latest Scala 2 standard library, since the 3 and 2 release schedules
+aren't coordinated.
+
+So how do you ask for the Scala 3 version number? Scala 3 offers
+`dotty.tools.dotc.config.Properties.versionNumberString`, but only if
+you have scala3-compiler on the classpath. So that works in the Scala 3
+REPL, but won't work in typical Scala 3 application code.
+
+For an alternative way to detect the Scala 3 version, see
+[this gist](https://gist.github.com/romanowski/de14691cab7340134e197419bc48919a).
+
+There is a proposal to provide something easier at [scala/scala3#22144](https://github.com/scala/scala3/issues/22144).
+
+### Why is my (abstract or overridden) `val` null?
+
+
+
+See [this]({{ site.baseurl }}/tutorials/FAQ/initialization-order.html).
+
+### Which type of collection should I choose?
+
+See the [Scala 2.13 Collections Guide](https://docs.scala-lang.org/overviews/collections-2.13/introduction.html).
+
+### What are context bounds?
+
+It's syntactic sugar for a context parameter (an `implicit` parameter in Scala 2, or a `using` parameter in Scala 3).
+
+More details in this [section of the Scala 3 Book](https://docs.scala-lang.org/scala3/book/ca-context-bounds.html) and this [Stack Overflow answer](https://stackoverflow.com/a/4467012).
+
+### How does `for / yield` work?
+
+It is syntactic sugar for nested `map`, `flatMap`, and `withFilter` calls.
+
+For an in-depth explanation
+see this [Stack Overflow answer](https://stackoverflow.com/a/1059501).
+
+### What is the difference between view, stream and iterator?
+
+[Answer on Stack Overflow](https://stackoverflow.com/a/5159356).
+
+### What does `_` mean?
+
+Many things really, depending on the context.
+[This answer on Stack Overflow](https://stackoverflow.com/a/8001065/4111404)
+has a good summary of all the meanings it has.
+
+Note that, even if the specific meaning is different,
+according to the situation, it usually means _"anything"_.
+
+### Why doesn't my function literal with `_` in it work?
+
+Not all function literals (aka lambdas) can be expressed with the `_`
+syntax.
+
+Every occurrence of `_` introduces a new variable. So `_ + _` means
+`(x, y) => x + y`, not `x => x + x`. The latter function cannot be
+written using the `_` syntax.
+
+Also, the scope of `_` is always the smallest enclosing expression.
+The scope is determined purely syntactically, during parsing, without
+regard to types. So for example, `foo(_ + 1)` always means `foo(x =>
+x + 1)`; it never means `x => foo(x + 1)`. The latter function cannot
+be written using the `_` syntax.
+
+See also [SLS 6.23.2](https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#placeholder-syntax-for-anonymous-functions).
+
+### Why couldn't Scala infer the correct type in my code?
+
+It is difficult to generalize about type inference, because various features of the language
+affect how your code is construed. There may be several ways to rewrite your code to make
+the types fall out naturally.
+
+The most straightforward workaround is to supply explicit types in your code.
+
+That may involve specifying an explicit type to a definition, or a type argument to a method.
+
+Type inference is greatly improved in Scala 3. If Scala 2 doesn't compile your code, it's worth trying with Scala 3.
+
+Sometimes, using multiple parameter lists helps inference, as explained in [this section of the language tour](https://docs.scala-lang.org/tour/multiple-parameter-lists.html#drive-type-inference).
+
+For common questions about type inference involving `toSet`, see the discussions on [this ticket](https://github.com/scala/bug/issues/7743) and a related [Q&A](https://stackoverflow.com/questions/5544536/in-scala-2-type-inference-fails-on-set-made-with-toset).
+
+### Can I chain or nest implicit conversions?
+
+Not really, but you can [make it work](https://stackoverflow.com/a/5332804).
+
+However, note that implicit conversions are, in general,
+[discouraged](https://contributors.scala-lang.org/t/can-we-wean-scala-off-implicit-conversions/4388).
+
+### Where does Scala look for implicits?
+
+See this [answer on Stack Overflow](https://stackoverflow.com/a/5598107).
+
+### Why do primitive type parameters erase to `Object`?
+
+So for example, a `List[Int]` in Scala code will appear to Java as a
+`List[Object]`. The Java type system doesn't allow primitive types to
+appear as type parameters, but couldn't they appear as their boxed
+equivalents, such as `List[java.lang.Integer]`?
+
+One would hope so, but doing it that way was tried, and it proved impossible.
+[This SO question](https://stackoverflow.com/questions/11167430/why-are-primitive-types-such-as-int-erased-to-object-in-scala)
+sadly lacks a concise explanation, but it does link to past discussions.
+
+### What's the difference between methods and functions?
+
+For example, how does a method such as:
+
+ def square(x: Int): Int = x * x
+
+differ from a function value such as:
+
+ val square: Int => Int = x => x * x
+
+For **Scala 2**, there is a [complete answer on Stack Overflow](https://stackoverflow.com/a/2530007/4111404)
+and a [summary with practical differences](https://tpolecat.github.io/2014/06/09/methods-functions.html).
+
+In **Scala 3**, the differences are fewer.
+[Context functions]({{ site.scala3ref }}/contextual/context-functions.html)
+accept given parameters and
+[polymorphic functions]({{ site.scala3ref }}/new-types/polymorphic-function-types.html)
+have type parameters.
+
+It's standard to use methods most of the time,
+except when a function value is actually needed.
+[Eta-expansion](https://stackoverflow.com/questions/39445018/what-is-the-eta-expansion-in-scala),
+converts methods to functions when needed.
+For example, a method such as `map` expects a function,
+but even if you `def square` as shown above, you can
+still `xs.map(square)`.
+
+### What's the difference between types and classes?
+
+Types are primarily a compile-time concept. At compile time,
+every expression is assigned a type by the compiler.
+
+Classes are primarily a runtime concept and are platform-dependent.
+At runtime on the JVM, every value is either a primitive value
+or an instance of exactly one class.
+
+Some type information exists only at compile time,
+for multiple reasons, most notoriously
+[type erasure](https://en.wikipedia.org/wiki/Type_erasure).
+
+For an in-depth treatment of types vs. classes, see the blog post
+["There are more types than classes"](https://typelevel.org/blog/2017/02/13/more-types-than-classes.html).
+
+### Should I declare my parameterless method with or without parentheses?
+
+In other words, should one write `def foo()` or just `def foo`?
+
+Answer: by convention, the former is used to indicate that a method
+has side effects.
+
+For more details, see the Scala Style Guide, [here](https://docs.scala-lang.org/style/naming-conventions.html#parentheses).
+
+### How can a method in a superclass return a value of the “current” type?
+
+Using `this.type` will only work if you are returning `this` itself.
+`this.type` means "the singleton type of this instance". Only `this`
+itself has the type `this.type`; other instances of the same class do
+not.
+
+What does work for returning other values of the same type?
+
+Possible solutions include F-bounded polymorphism _(familiar to Java
+programmers)_, type members, and the [typeclass
+pattern](http://tpolecat.github.io/2013/10/12/typeclass.html).
+
+This [blog post](http://tpolecat.github.io/2015/04/29/f-bounds.html)
+argues against F-bounds and in favor of typeclasses;
+see also [this Stack Overflow post](https://stackoverflow.com/questions/59813323/advantages-of-f-bounded-polymorphism-over-typeclass-for-return-current-type-prob) for some counterpoint.
+
+### What does `<:<` mean?
+
+It's a "type constraint", and it comes from the standard library,
+not from the language itself.
+See [this blog post](https://blog.bruchez.name/2015/11/generalized-type-constraints-in-scala.html).
+
+### I dislike requiring callers to wrap optional arguments in `Some(...)`; is there a better way?
+
+Not really. See [this answer on Stack Overflow](https://stackoverflow.com/a/65256691/4111404).
+
+### Why is `implicit val` usually recommended over `implicit object`?
+
+The latter has a singleton type, which is too specific.
+See [answer on Stack Overflow](https://stackoverflow.com/a/65258340/4111404).
+
+### I got a `StackOverflowError` while compiling my code. Is it a compiler bug?
+
+It might be.
+
+To find out, try giving the compiler more stack and see if the
+error goes away.
+
+It's possible for the compiler to run out of stack when compiling some
+kinds of heavily nested code. The JVM's default stack size is rather
+small, so this can happen sooner than you might expect.
+
+The stack size can be changed by passing `-Xss...` at JVM startup, for
+example `-Xss16M`. How to do this depends on what IDE and/or build
+tool you are using. For sbt, add it to `.jvmopts`.
+
+If the stack overflow doesn't go away no matter how much stack you
+give the compiler, then it's a compiler bug. Please report it on the
+[Scala 2 bug tracker](https://github.com/scala/bug/issues) or [Scala 3
+bug tracker](https://github.com/scala/scala3/issues), but check
+first if it's a duplicate of an existing ticket.
+
+### I set a setting in sbt but nothing happened. Why?
+
+There could be a lot of reasons. An extremely common one, that
+almost everyone runs into sooner or later, is that you have a bare
+setting in a multi-project build.
+
+For example, if you add this to your `build.sbt`:
+
+ scalaVersion := "2.13.16"
+
+that's a "bare" setting, and you might expect it to apply build-wide.
+But it doesn't. _It only applies to the root project._
+
+In many cases one should instead write:
+
+ ThisBuild / scalaVersion := "2.13.16"
+
+Other possibilities include:
+
+* the common settings pattern, where you put shared settings
+ in a `val`, typically named `commonSettings`, and then
+ `.settings(commonSettings)` in every project you want to
+ apply to them to.
+* in interactive usage only, `set every`
+
+Here's some further reading:
+
+* [documentation on multi-project builds](https://www.scala-sbt.org/1.x/docs/Multi-Project.html#ThisBuild)
+* [issue about bare settings](https://github.com/sbt/sbt/issues/6217)
diff --git a/_overviews/FAQ/initialization-order.md b/_overviews/FAQ/initialization-order.md
index 21d24a7ba4..ebe07308c6 100644
--- a/_overviews/FAQ/initialization-order.md
+++ b/_overviews/FAQ/initialization-order.md
@@ -2,75 +2,112 @@
layout: multipage-overview
title: Why is my abstract or overridden val null?
overview-name: FAQ
-partof: FAQ
-
-num: 9
permalink: /tutorials/FAQ/:title.html
---
## Example
-To understand the problem, let's pick the following concrete example.
+
+The following example illustrates how classes in a subclass relation
+witness the initialization of two fields which are inherited from
+their top-most parent. The values are printed during the constructor
+of each class, that is, when an instance is initialized.
abstract class A {
val x1: String
val x2: String = "mom"
- println("A: " + x1 + ", " + x2)
+ println(s"A: $x1, $x2")
}
class B extends A {
val x1: String = "hello"
- println("B: " + x1 + ", " + x2)
+ println(s"B: $x1, $x2")
}
class C extends B {
override val x2: String = "dad"
- println("C: " + x1 + ", " + x2)
+ println(s"C: $x1, $x2")
}
-Let's observe the initialization order through the Scala REPL:
+In the Scala REPL we observe:
scala> new C
A: null, null
B: hello, null
C: hello, dad
-Only when we get to the constructor of `C` are both `x1` and `x2` initialized. Therefore, constructors of `A` and `B` risk running into `NullPointerException`s.
+Only when we get to the constructor of `C` are both `x1` and `x2` properly initialized.
+Therefore, constructors of `A` and `B` risk running into `NullPointerException`s,
+since fields are null-valued until set by a constructor.
## Explanation
-A 'strict' or 'eager' val is one which is not marked lazy.
-In the absence of "early definitions" (see below), initialization of strict vals is done in the following order.
+A "strict" or "eager" val is a `val` which is not a `lazy val`.
+Initialization of strict vals is done in the following order:
1. Superclasses are fully initialized before subclasses.
-2. Otherwise, in declaration order.
-
-Naturally when a val is overridden, it is not initialized more than once. So though x2 in the above example is seemingly defined at every point, this is not the case: an overridden val will appear to be null during the construction of superclasses, as will an abstract val.
-
-There is a compiler flag which can be useful for identifying this situation:
+2. Within the body or "template" of a class, vals are initialized in declaration order,
+ the order in which they are written in source.
+
+When a `val` is overridden, it's more precise to say that its accessor method (the "getter") is overridden.
+So the access to `x2` in class `A` invokes the overridden getter in class `C`.
+That getter reads the underlying field `C.x2`.
+This field is not yet initialized during the construction of `A`.
+
+## Mitigation
+
+The [`-Wsafe-init` compiler flag](https://docs.scala-lang.org/scala3/reference/other-new-features/safe-initialization.html)
+in Scala 3 enables a compile-time warning for accesses to uninitialized fields:
+
+ -- Warning: Test.scala:8:6 -----------------------------------------------------
+ 8 | val x1: String = "hello"
+ | ^
+ | Access non-initialized value x1. Calling trace:
+ | ├── class B extends A { [ Test.scala:7 ]
+ | │ ^
+ | ├── abstract class A { [ Test.scala:1 ]
+ | │ ^
+ | └── println(s"A: $x1, $x2") [ Test.scala:5 ]
+ | ^^
+
+In Scala 2, the `-Xcheckinit` flag adds runtime checks in the generated bytecode to identify accesses of uninitialized fields.
+That code throws an exception when an uninitialized field is referenced
+that would otherwise be used as a `null` value (or `0` or `false` in the case of primitive types).
+Note that these runtime checks only report code that is actually executed at runtime.
+Although these checks can be helpful to find accesses to uninitialized fields during development,
+it is never advisable to enable them in production code due to the performance cost.
+
+## Solutions
-**-Xcheckinit**: Add runtime check to field accessors.
+Approaches for avoiding null values include:
-It is inadvisable to use this flag outside of testing. It adds significantly to the code size by putting a wrapper around all potentially uninitialized field accesses: the wrapper will throw an exception rather than allow a null (or 0/false in the case of primitive types) to silently appear. Note also that this adds a *runtime* check: it can only tell you anything about code paths which you exercise with it in place.
+### Use class / trait parameters
-Using it on the opening example:
+ abstract class A(val x1: String, val x2: String = "mom") {
+ println("A: " + x1 + ", " + x2)
+ }
+ class B(x1: String = "hello", x2: String = "mom") extends A(x1, x2) {
+ println("B: " + x1 + ", " + x2)
+ }
+ class C(x2: String = "dad") extends B(x2 = x2) {
+ println("C: " + x1 + ", " + x2)
+ }
+ // scala> new C
+ // A: hello, dad
+ // B: hello, dad
+ // C: hello, dad
- % scalac -Xcheckinit a.scala
- % scala -e 'new C'
- scala.UninitializedFieldError: Uninitialized field: a.scala: 13
- at C.x2(a.scala:13)
- at A.(a.scala:5)
- at B.(a.scala:7)
- at C.(a.scala:12)
+Values passed as parameters to the superclass constructor are available in its body.
-### Solutions ###
+Scala 3 also [supports trait parameters](https://docs.scala-lang.org/scala3/reference/other-new-features/trait-parameters.html).
-Approaches for avoiding null values include:
+Note that overriding a `val` class parameter is deprecated / disallowed in Scala 3.
+Doing so in Scala 2 can lead to surprising behavior.
-#### Use lazy vals ####
+### Use lazy vals
abstract class A {
- val x1: String
+ lazy val x1: String
lazy val x2: String = "mom"
println("A: " + x1 + ", " + x2)
@@ -90,75 +127,54 @@ Approaches for avoiding null values include:
// B: hello, dad
// C: hello, dad
-Usually the best answer. Unfortunately you cannot declare an abstract lazy val. If that is what you're after, your options include:
+Note that abstract `lazy val`s are supported in Scala 3, but not in Scala 2.
+In Scala 2, you can define an abstract `val` or `def` instead.
-1. Declare an abstract strict val, and hope subclasses will implement it as a lazy val or with an early definition. If they do not, it will appear to be uninitialized at some points during construction.
-2. Declare an abstract def, and hope subclasses will implement it as a lazy val. If they do not, it will be re-evaluated on every access.
-3. Declare a concrete lazy val which throws an exception, and hope subclasses override it. If they do not, it will... throw an exception.
+An exception during initialization of a lazy val will cause the right-hand side to be re-evaluated on the next access; see SLS 5.2.
-An exception during initialization of a lazy val will cause the right hand side to be re-evaluated on the next access: see SLS 5.2.
+Note that using multiple lazy vals incurs a new risk: cycles among lazy vals can result in a stack overflow on first access.
+When lazy vals are annotated as thread-safe in Scala 3, they risk deadlock.
-Note that using multiple lazy vals creates a new risk: cycles among lazy vals can result in a stack overflow on first access.
+### Use a nested object
-#### Use early definitions ####
- abstract class A {
- val x1: String
- val x2: String = "mom"
+For purposes of initialization, an object that is not top-level is the same as a lazy val.
- println("A: " + x1 + ", " + x2)
- }
- class B extends {
- val x1: String = "hello"
- } with A {
- println("B: " + x1 + ", " + x2)
- }
- class C extends {
- override val x2: String = "dad"
- } with B {
- println("C: " + x1 + ", " + x2)
- }
- // scala> new C
- // A: hello, dad
- // B: hello, dad
- // C: hello, dad
-
-Early definitions are a bit unwieldy, there are limitations as to what can appear and what can be referenced in an early definitions block, and they don't compose as well as lazy vals: but if a lazy val is undesirable, they present another option. They are specified in SLS 5.1.6.
+There may be reasons to prefer a lazy val, for example to specify the type of an implicit value,
+or an object where it is a companion to a class. Otherwise, the most convenient syntax may be preferred.
-#### Use constant value definitions ####
- abstract class A {
- val x1: String
- val x2: String = "mom"
+As an example, uninitialized state in a subclass may be accessed during construction of a superclass:
- println("A: " + x1 + ", " + x2)
+ class Adder {
+ var sum = 0
+ def add(x: Int): Unit = sum += x
+ add(1) // in LogAdder, the `added` set is not initialized yet
}
- class B extends A {
- val x1: String = "hello"
- final val x3 = "goodbye"
-
- println("B: " + x1 + ", " + x2)
+ class LogAdder extends Adder {
+ private var added: Set[Int] = Set.empty
+ override def add(x: Int): Unit = { added += x; super.add(x) }
}
- class C extends B {
- override val x2: String = "dad"
- println("C: " + x1 + ", " + x2)
- }
- abstract class D {
- val c: C
- val x3 = c.x3 // no exceptions!
- println("D: " + c + " but " + x3)
+In this case, the state can be initialized on demand by wrapping it in a local object:
+
+ class Adder {
+ var sum = 0
+ def add(x: Int): Unit = sum += x
+ add(1)
}
- class E extends D {
- val c = new C
- println(s"E: ${c.x1}, ${c.x2}, and $x3...")
+ class LogAdder extends Adder {
+ private object state {
+ var added: Set[Int] = Set.empty
+ }
+ import state._
+ override def add(x: Int): Unit = { added += x; super.add(x) }
}
- //scala> new E
- //D: null but goodbye
- //A: null, null
- //B: hello, null
- //C: hello, dad
- //E: hello, dad, and goodbye...
-Sometimes all you need from an interface is a compile-time constant.
+### Early definitions: deprecated
+
+Scala 2 supports early definitions, but they are deprecated in Scala 2.13 and unsupported in Scala 3.
+See the [migration guide](https://docs.scala-lang.org/scala3/guides/migration/incompat-dropped-features.html#early-initializer) for more information.
+
+Constant value definitions (specified in SLS 4.1 and available in Scala 2)
+and inlined definitions (in Scala 3) can work around initialization order issues
+because they can supply constant values without evaluating an instance that is not yet initialized.
-Constant values are stricter than strict and earlier than early definitions and have even more limitations,
-as they must be constants. They are specified in SLS 4.1.
diff --git a/_overviews/FAQ/stream-view-iterator.md b/_overviews/FAQ/stream-view-iterator.md
deleted file mode 100644
index 5942312a4f..0000000000
--- a/_overviews/FAQ/stream-view-iterator.md
+++ /dev/null
@@ -1,45 +0,0 @@
----
-layout: multipage-overview
-title: What is the difference between view, stream and iterator?
-overview-name: FAQ
-partof: FAQ
-
-num: 4
-permalink: /tutorials/FAQ/:title.html
----
-First, they are all _non-strict_. That has a particular mathematical meaning
-related to functions, but, basically, means they are computed on-demand instead
-of in advance.
-
-`Stream` is a lazy list indeed. In fact, in Scala, a `Stream` is a `List` whose
-`tail` is a `lazy val`. Once computed, a value stays computed and is reused.
-Or, as you say, the values are cached.
-
-An `Iterator` can only be used once because it is a _traversal pointer_ into a
-collection, and not a collection in itself. What makes it special in Scala is
-the fact that you can apply transformation such as `map` and `filter` and
-simply get a new `Iterator` which will only apply these transformations when
-you ask for the next element.
-
-Scala used to provide iterators which could be reset, but that is very hard to
-support in a general manner, and they didn't make version 2.8.0.
-
-Views are meant to be viewed much like a database view. It is a series of
-transformation which one applies to a collection to produce a "virtual"
-collection. As you said, all transformations are re-applied each time you need
-to fetch elements from it.
-
-Both `Iterator` and views have excellent memory characteristics. `Stream` is
-nice, but, in Scala, its main benefit is writing infinite sequences
-(particularly sequences recursively defined). One _can_ avoid keeping all of
-the `Stream` in memory, though, by making sure you don't keep a reference to
-its `head` (for example, by using `def` instead of `val` to define the
-`Stream`).
-
-Because of the penalties incurred by views, one should usually `force` it after
-applying the transformations, or keep it as a view if only few elements are
-expected to ever be fetched, compared to the total size of the view.
-
-This answer was originally submitted in response to [this question on Stack Overflow][1].
-
- [1]: https://stackoverflow.com/q/5159000/53013
diff --git a/_overviews/FAQ/yield.md b/_overviews/FAQ/yield.md
deleted file mode 100644
index 638312bd61..0000000000
--- a/_overviews/FAQ/yield.md
+++ /dev/null
@@ -1,156 +0,0 @@
----
-layout: multipage-overview
-title: How does yield work?
-partof: FAQ
-num: 2
-permalink: /tutorials/FAQ/:title.html
----
-Though there's a `yield` in other languages such as Python and Ruby, Scala's
-`yield` does something very different from them. In Scala, `yield` is part
-of for comprehensions -- a generalization of Ruby and Python's list-comprehensions.
-
-Scala's "for comprehensions" are equivalent to Haskell's "do" notation, and it
-is nothing more than a syntactic sugar for composition of multiple monadic
-operations. As this statement will most likely not help anyone who needs help,
-let's try again...
-
-Translating for-comprehensions
-------------------------------
-
-Scala's "for comprehensions" are syntactic sugar for composition of multiple
-operations with `foreach`, `map`, `flatMap`, `filter` or `withFilter`.
-Scala actually translates a for-expression into calls to those methods,
-so any class providing them, or a subset of them, can be used with for comprehensions.
-
-First, let's talk about the translations. There are very simple rules:
-
-#### Example 1
-
- for(x <- c1; y <- c2; z <-c3) {...}
-
-is translated into
-
- c1.foreach(x => c2.foreach(y => c3.foreach(z => {...})))
-
-#### Example 2
-
- for(x <- c1; y <- c2; z <- c3) yield {...}
-
-is translated into
-
- c1.flatMap(x => c2.flatMap(y => c3.map(z => {...})))
-
-#### Example 3
-
- for(x <- c; if cond) yield {...}
-
-is translated into
-
- c.withFilter(x => cond).map(x => {...})
-
-with a fallback into
-
- c.filter(x => cond).map(x => {...})
-
-if method `withFilter` is not available but `filter` is.
-The next chapter has more information on this.
-
-#### Example 4
-
- for(x <- c; y = ...) yield {...}
-
-is translated into
-
- c.map(x => (x, ...)).map((x,y) => {...})
-
-
-When you look at very simple for comprehensions, the map/foreach alternatives
-look, indeed, better. Once you start composing them, though, you can easily get
-lost in parenthesis and nesting levels. When that happens, for comprehensions
-are usually much clearer.
-
-I'll show one simple example, and intentionally omit any explanation. You can
-decide which syntax is easier to understand.
-
- l.flatMap(sl => sl.filter(el => el > 0).map(el => el.toString.length))
-
-or
-
- for{
- sl <- l
- el <- sl
- if el > 0
- } yield el.toString.length
-
-
-About withFilter, and strictness
-----------------------------------
-
-Scala 2.8 introduced a method called `withFilter`, whose main difference is
-that, instead of returning a new, filtered, collection, it filters on-demand.
-The `filter` method has its behavior defined based on the strictness of the
-collection. To understand this better, let's take a look at some Scala 2.7 with
-`List` (strict) and `Stream` (non-strict):
-
- scala> var found = false
- found: Boolean = false
-
- scala> List.range(1,10).filter(_ % 2 == 1 && !found).foreach(x => if (x == 5) found = true else println(x))
- 1
- 3
- 7
- 9
-
- scala> found = false
- found: Boolean = false
-
- scala> Stream.range(1,10).filter(_ % 2 == 1 && !found).foreach(x => if (x == 5) found = true else println(x))
- 1
- 3
-
-The difference happens because filter is immediately applied with `List`,
-returning a list of odds -- since `found` is `false`. Only then `foreach` is
-executed, but, by this time, changing `found` is meaningless, as `filter` has
-already executed.
-
-In the case of `Stream`, the condition is not immediately applied. Instead, as
-each element is requested by `foreach`, `filter` tests the condition, which
-enables `foreach` to influence it through `found`. Just to make it clear, here
-is the equivalent for-comprehension code:
-
- for (x <- List.range(1, 10); if x % 2 == 1 && !found)
- if (x == 5) found = true else println(x)
-
- for (x <- Stream.range(1, 10); if x % 2 == 1 && !found)
- if (x == 5) found = true else println(x)
-
-This caused many problems, because people expected the `if` to be considered
-on-demand, instead of being applied to the whole collection beforehand.
-
-Scala 2.8 introduced `withFilter`, which is _always_ non-strict, no matter the
-strictness of the collection. The following example shows `List` with both
-methods on Scala 2.8:
-
- scala> var found = false
- found: Boolean = false
-
- scala> List.range(1,10).filter(_ % 2 == 1 && !found).foreach(x => if (x == 5) found = true else println(x))
- 1
- 3
- 7
- 9
-
- scala> found = false
- found: Boolean = false
-
- scala> List.range(1,10).withFilter(_ % 2 == 1 && !found).foreach(x => if (x == 5) found = true else println(x))
- 1
- 3
-
-This produces the result most people expect, without changing how `filter`
-behaves. As a side note, `Range` was changed from non-strict to strict between
-Scala 2.7 and Scala 2.8.
-
-This answer was originally submitted in response to [this question on Stack Overflow][1].
-
- [1]: https://stackoverflow.com/questions/1052476/can-someone-explain-scalas-yield/1052510#1052510
diff --git a/_overviews/collections-2.13/arrays.md b/_overviews/collections-2.13/arrays.md
index 64d96a95db..32f9fb0584 100644
--- a/_overviews/collections-2.13/arrays.md
+++ b/_overviews/collections-2.13/arrays.md
@@ -14,23 +14,40 @@ permalink: /overviews/collections-2.13/:title.html
[Array](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/Array.html) is a special kind of collection in Scala. On the one hand, Scala arrays correspond one-to-one to Java arrays. That is, a Scala array `Array[Int]` is represented as a Java `int[]`, an `Array[Double]` is represented as a Java `double[]` and a `Array[String]` is represented as a Java `String[]`. But at the same time, Scala arrays offer much more than their Java analogues. First, Scala arrays can be _generic_. That is, you can have an `Array[T]`, where `T` is a type parameter or abstract type. Second, Scala arrays are compatible with Scala sequences - you can pass an `Array[T]` where a `Seq[T]` is required. Finally, Scala arrays also support all sequence operations. Here's an example of this in action:
- scala> val a1 = Array(1, 2, 3)
- a1: Array[Int] = Array(1, 2, 3)
- scala> val a2 = a1 map (_ * 3)
- a2: Array[Int] = Array(3, 6, 9)
- scala> val a3 = a2 filter (_ % 2 != 0)
- a3: Array[Int] = Array(3, 9)
- scala> a3.reverse
- res0: Array[Int] = Array(9, 3)
+{% tabs arrays_1 %}
+{% tab 'Scala 2 and 3' for=arrays_1 %}
+```scala
+scala> val a1 = Array(1, 2, 3)
+val a1: Array[Int] = Array(1, 2, 3)
+
+scala> val a2 = a1.map(_ * 3)
+val a2: Array[Int] = Array(3, 6, 9)
+
+scala> val a3 = a2.filter(_ % 2 != 0)
+val a3: Array[Int] = Array(3, 9)
+
+scala> a3.reverse
+val res0: Array[Int] = Array(9, 3)
+```
+{% endtab %}
+{% endtabs %}
Given that Scala arrays are represented just like Java arrays, how can these additional features be supported in Scala? The Scala array implementation makes systematic use of implicit conversions. In Scala, an array does not pretend to _be_ a sequence. It can't really be that because the data type representation of a native array is not a subtype of `Seq`. Instead there is an implicit "wrapping" conversion between arrays and instances of class `scala.collection.mutable.ArraySeq`, which is a subclass of `Seq`. Here you see it in action:
- scala> val seq: collection.Seq[Int] = a1
- seq: scala.collection.Seq[Int] = ArraySeq(1, 2, 3)
- scala> val a4: Array[Int] = seq.toArray
- a4: Array[Int] = Array(1, 2, 3)
- scala> a1 eq a4
- res1: Boolean = false
+{% tabs arrays_2 %}
+{% tab 'Scala 2 and 3' for=arrays_2 %}
+```scala
+scala> val seq: collection.Seq[Int] = a1
+val seq: scala.collection.Seq[Int] = ArraySeq(1, 2, 3)
+
+scala> val a4: Array[Int] = seq.toArray
+val a4: Array[Int] = Array(1, 2, 3)
+
+scala> a1 eq a4
+val res1: Boolean = false
+```
+{% endtab %}
+{% endtabs %}
The interaction above demonstrates that arrays are compatible with sequences, because there's an implicit conversion from arrays to `ArraySeq`s. To go the other way, from an `ArraySeq` to an `Array`, you can use the `toArray` method defined in `Iterable`. The last REPL line above shows that wrapping and then unwrapping with `toArray` produces a copy of the original array.
@@ -38,82 +55,188 @@ There is yet another implicit conversion that gets applied to arrays. This conve
The difference between the two implicit conversions on arrays is shown in the next REPL dialogue:
- scala> val seq: collection.Seq[Int] = a1
- seq: scala.collection.Seq[Int] = ArraySeq(1, 2, 3)
- scala> seq.reverse
- res2: scala.collection.Seq[Int] = ArraySeq(3, 2, 1)
- scala> val ops: collection.ArrayOps[Int] = a1
- ops: scala.collection.ArrayOps[Int] = scala.collection.ArrayOps@2d7df55
- scala> ops.reverse
- res3: Array[Int] = Array(3, 2, 1)
-
-You see that calling reverse on `seq`, which is an `ArraySeq`, will give again a `ArraySeq`. That's logical, because arrayseqs are `Seqs`, and calling reverse on any `Seq` will give again a `Seq`. On the other hand, calling reverse on the ops value of class `ArrayOps` will give an `Array`, not a `Seq`.
-
-The `ArrayOps` example above was quite artificial, intended only to show the difference to `ArraySeq`. Normally, you'd never define a value of class `ArrayOps`. You'd just call a `Seq` method on an array:
+{% tabs arrays_3 %}
+{% tab 'Scala 2 and 3' for=arrays_3 %}
+```scala
+scala> val seq: collection.Seq[Int] = a1
+val seq: scala.collection.Seq[Int] = ArraySeq(1, 2, 3)
- scala> a1.reverse
- res4: Array[Int] = Array(3, 2, 1)
+scala> seq.reverse
+val res2: scala.collection.Seq[Int] = ArraySeq(3, 2, 1)
-The `ArrayOps` object gets inserted automatically by the implicit conversion. So the line above is equivalent to
+scala> val ops: collection.ArrayOps[Int] = a1
+val ops: scala.collection.ArrayOps[Int] = scala.collection.ArrayOps@2d7df55
- scala> intArrayOps(a1).reverse
- res5: Array[Int] = Array(3, 2, 1)
+scala> ops.reverse
+val res3: Array[Int] = Array(3, 2, 1)
+```
+{% endtab %}
+{% endtabs %}
-where `intArrayOps` is the implicit conversion that was inserted previously. This raises the question how the compiler picked `intArrayOps` over the other implicit conversion to `ArraySeq` in the line above. After all, both conversions map an array to a type that supports a reverse method, which is what the input specified. The answer to that question is that the two implicit conversions are prioritized. The `ArrayOps` conversion has a higher priority than the `ArraySeq` conversion. The first is defined in the `Predef` object whereas the second is defined in a class `scala.LowPriorityImplicits`, which is inherited by `Predef`. Implicits in subclasses and subobjects take precedence over implicits in base classes. So if both conversions are applicable, the one in `Predef` is chosen. A very similar scheme works for strings.
+You see that calling reverse on `seq`, which is an `ArraySeq`, will give again a `ArraySeq`. That's logical, because arrayseqs are `Seqs`, and calling reverse on any `Seq` will give again a `Seq`. On the other hand, calling reverse on the ops value of class `ArrayOps` will give an `Array`, not a `Seq`.
-So now you know how arrays can be compatible with sequences and how they can support all sequence operations. What about genericity? In Java you cannot write a `T[]` where `T` is a type parameter. How then is Scala's `Array[T]` represented? In fact a generic array like `Array[T]` could be at run-time any of Java's eight primitive array types `byte[]`, `short[]`, `char[]`, `int[]`, `long[]`, `float[]`, `double[]`, `boolean[]`, or it could be an array of objects. The only common run-time type encompassing all of these types is `AnyRef` (or, equivalently `java.lang.Object`), so that's the type to which the Scala compiler maps `Array[T]`. At run-time, when an element of an array of type `Array[T]` is accessed or updated there is a sequence of type tests that determine the actual array type, followed by the correct array operation on the Java array. These type tests slow down array operations somewhat. You can expect accesses to generic arrays to be three to four times slower than accesses to primitive or object arrays. This means that if you need maximal performance, you should prefer concrete over generic arrays. Representing the generic array type is not enough, however, there must also be a way to create generic arrays. This is an even harder problem, which requires a little bit of help from you. To illustrate the problem, consider the following attempt to write a generic method that creates an array.
+The `ArrayOps` example above was quite artificial, intended only to show the difference to `ArraySeq`. Normally, you'd never define a value of class `ArrayOps`. You'd just call a `Seq` method on an array:
- // this is wrong!
- def evenElems[T](xs: Vector[T]): Array[T] = {
- val arr = new Array[T]((xs.length + 1) / 2)
- for (i <- 0 until xs.length by 2)
- arr(i / 2) = xs(i)
- arr
- }
+{% tabs arrays_4 %}
+{% tab 'Scala 2 and 3' for=arrays_4 %}
+```scala
+scala> a1.reverse
+val res4: Array[Int] = Array(3, 2, 1)
+```
+{% endtab %}
+{% endtabs %}
-The `evenElems` method returns a new array that consist of all elements of the argument vector `xs` which are at even positions in the vector. The first line of the body of `evenElems` creates the result array, which has the same element type as the argument. So depending on the actual type parameter for `T`, this could be an `Array[Int]`, or an `Array[Boolean]`, or an array of some of the other primitive types in Java, or an array of some reference type. But these types have all different runtime representations, so how is the Scala runtime going to pick the correct one? In fact, it can't do that based on the information it is given, because the actual type that corresponds to the type parameter `T` is erased at runtime. That's why you will get the following error message if you compile the code above:
+The `ArrayOps` object gets inserted automatically by the implicit conversion. So the line above is equivalent to
- error: cannot find class manifest for element type T
- val arr = new Array[T]((arr.length + 1) / 2)
- ^
+{% tabs arrays_5 %}
+{% tab 'Scala 2 and 3' for=arrays_5 %}
+```scala
+scala> intArrayOps(a1).reverse
+val res5: Array[Int] = Array(3, 2, 1)
+```
+{% endtab %}
+{% endtabs %}
+
+where `intArrayOps` is the implicit conversion that was inserted previously. This raises the question of how the compiler picked `intArrayOps` over the other implicit conversion to `ArraySeq` in the line above. After all, both conversions map an array to a type that supports a reverse method, which is what the input specified. The answer to that question is that the two implicit conversions are prioritized. The `ArrayOps` conversion has a higher priority than the `ArraySeq` conversion. The first is defined in the `Predef` object whereas the second is defined in a class `scala.LowPriorityImplicits`, which is inherited by `Predef`. Implicits in subclasses and subobjects take precedence over implicits in base classes. So if both conversions are applicable, the one in `Predef` is chosen. A very similar scheme works for strings.
+
+So now you know how arrays can be compatible with sequences and how they can support all sequence operations. What about genericity? In Java, you cannot write a `T[]` where `T` is a type parameter. How then is Scala's `Array[T]` represented? In fact a generic array like `Array[T]` could be at run-time any of Java's eight primitive array types `byte[]`, `short[]`, `char[]`, `int[]`, `long[]`, `float[]`, `double[]`, `boolean[]`, or it could be an array of objects. The only common run-time type encompassing all of these types is `AnyRef` (or, equivalently `java.lang.Object`), so that's the type to which the Scala compiler maps `Array[T]`. At run-time, when an element of an array of type `Array[T]` is accessed or updated there is a sequence of type tests that determine the actual array type, followed by the correct array operation on the Java array. These type tests slow down array operations somewhat. You can expect accesses to generic arrays to be three to four times slower than accesses to primitive or object arrays. This means that if you need maximal performance, you should prefer concrete to generic arrays. Representing the generic array type is not enough, however, there must also be a way to create generic arrays. This is an even harder problem, which requires a little of help from you. To illustrate the issue, consider the following attempt to write a generic method that creates an array.
+
+{% tabs arrays_6 class=tabs-scala-version %}
+{% tab 'Scala 2' for=arrays_6 %}
+```scala mdoc:fail
+// this is wrong!
+def evenElems[T](xs: Vector[T]): Array[T] = {
+ val arr = new Array[T]((xs.length + 1) / 2)
+ for (i <- 0 until xs.length by 2)
+ arr(i / 2) = xs(i)
+ arr
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=arrays_6 %}
+```scala
+// this is wrong!
+def evenElems[T](xs: Vector[T]): Array[T] =
+ val arr = new Array[T]((xs.length + 1) / 2)
+ for i <- 0 until xs.length by 2 do
+ arr(i / 2) = xs(i)
+ arr
+```
+{% endtab %}
+{% endtabs %}
+
+The `evenElems` method returns a new array that consist of all elements of the argument vector `xs` which are at even positions in the vector. The first line of the body of `evenElems` creates the result array, which has the same element type as the argument. So depending on the actual type parameter for `T`, this could be an `Array[Int]`, or an `Array[Boolean]`, or an array of some other primitive types in Java, or an array of some reference type. But these types have all different runtime representations, so how is the Scala runtime going to pick the correct one? In fact, it can't do that based on the information it is given, because the actual type that corresponds to the type parameter `T` is erased at runtime. That's why you will get the following error message if you compile the code above:
+
+{% tabs arrays_7 class=tabs-scala-version %}
+{% tab 'Scala 2' for=arrays_7 %}
+```scala
+error: cannot find class manifest for element type T
+ val arr = new Array[T]((arr.length + 1) / 2)
+ ^
+```
+{% endtab %}
+{% tab 'Scala 3' for=arrays_7 %}
+```scala
+-- Error: ----------------------------------------------------------------------
+3 | val arr = new Array[T]((xs.length + 1) / 2)
+ | ^
+ | No ClassTag available for T
+```
+{% endtab %}
+{% endtabs %}
What's required here is that you help the compiler out by providing some runtime hint what the actual type parameter of `evenElems` is. This runtime hint takes the form of a class manifest of type `scala.reflect.ClassTag`. A class manifest is a type descriptor object which describes what the top-level class of a type is. Alternatively to class manifests there are also full manifests of type `scala.reflect.Manifest`, which describe all aspects of a type. But for array creation, only class manifests are needed.
The Scala compiler will construct class manifests automatically if you instruct it to do so. "Instructing" means that you demand a class manifest as an implicit parameter, like this:
- def evenElems[T](xs: Vector[T])(implicit m: ClassTag[T]): Array[T] = ...
+{% tabs arrays_8 class=tabs-scala-version %}
+{% tab 'Scala 2' for=arrays_8 %}
+```scala
+def evenElems[T](xs: Vector[T])(implicit m: ClassTag[T]): Array[T] = ...
+```
+{% endtab %}
+{% tab 'Scala 3' for=arrays_8 %}
+```scala
+def evenElems[T](xs: Vector[T])(using m: ClassTag[T]): Array[T] = ...
+```
+{% endtab %}
+{% endtabs %}
Using an alternative and shorter syntax, you can also demand that the type comes with a class manifest by using a context bound. This means following the type with a colon and the class name `ClassTag`, like this:
- import scala.reflect.ClassTag
- // this works
- def evenElems[T: ClassTag](xs: Vector[T]): Array[T] = {
- val arr = new Array[T]((xs.length + 1) / 2)
- for (i <- 0 until xs.length by 2)
- arr(i / 2) = xs(i)
- arr
- }
+{% tabs arrays_9 class=tabs-scala-version %}
+{% tab 'Scala 2' for=arrays_9 %}
+```scala
+import scala.reflect.ClassTag
+// this works
+def evenElems[T: ClassTag](xs: Vector[T]): Array[T] = {
+ val arr = new Array[T]((xs.length + 1) / 2)
+ for (i <- 0 until xs.length by 2)
+ arr(i / 2) = xs(i)
+ arr
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=arrays_9 %}
+```scala
+import scala.reflect.ClassTag
+// this works
+def evenElems[T: ClassTag](xs: Vector[T]): Array[T] =
+ val arr = new Array[T]((xs.length + 1) / 2)
+ for i <- 0 until xs.length by 2 do
+ arr(i / 2) = xs(i)
+ arr
+```
+{% endtab %}
+{% endtabs %}
The two revised versions of `evenElems` mean exactly the same. What happens in either case is that when the `Array[T]` is constructed, the compiler will look for a class manifest for the type parameter T, that is, it will look for an implicit value of type `ClassTag[T]`. If such a value is found, the manifest is used to construct the right kind of array. Otherwise, you'll see an error message like the one above.
Here is some REPL interaction that uses the `evenElems` method.
- scala> evenElems(Vector(1, 2, 3, 4, 5))
- res6: Array[Int] = Array(1, 3, 5)
- scala> evenElems(Vector("this", "is", "a", "test", "run"))
- res7: Array[java.lang.String] = Array(this, a, run)
+{% tabs arrays_10 %}
+{% tab 'Scala 2 and 3' for=arrays_10 %}
+```scala
+scala> evenElems(Vector(1, 2, 3, 4, 5))
+val res6: Array[Int] = Array(1, 3, 5)
+
+scala> evenElems(Vector("this", "is", "a", "test", "run"))
+val res7: Array[java.lang.String] = Array(this, a, run)
+```
+{% endtab %}
+{% endtabs %}
In both cases, the Scala compiler automatically constructed a class manifest for the element type (first, `Int`, then `String`) and passed it to the implicit parameter of the `evenElems` method. The compiler can do that for all concrete types, but not if the argument is itself another type parameter without its class manifest. For instance, the following fails:
- scala> def wrap[U](xs: Vector[U]) = evenElems(xs)
- :6: error: No ClassTag available for U.
- def wrap[U](xs: Vector[U]) = evenElems(xs)
- ^
+{% tabs arrays_11 class=tabs-scala-version %}
+{% tab 'Scala 2' for=arrays_11 %}
+```scala
+scala> def wrap[U](xs: Vector[U]) = evenElems(xs)
+:6: error: No ClassTag available for U.
+ def wrap[U](xs: Vector[U]) = evenElems(xs)
+ ^
+```
+{% endtab %}
+{% tab 'Scala 3' for=arrays_11 %}
+```scala
+-- Error: ----------------------------------------------------------------------
+6 |def wrap[U](xs: Vector[U]) = evenElems(xs)
+ | ^
+ | No ClassTag available for U
+```
+{% endtab %}
+{% endtabs %}
What happened here is that the `evenElems` demands a class manifest for the type parameter `U`, but none was found. The solution in this case is, of course, to demand another implicit class manifest for `U`. So the following works:
- scala> def wrap[U: ClassTag](xs: Vector[U]) = evenElems(xs)
- wrap: [U](xs: Vector[U])(implicit evidence$1: scala.reflect.ClassTag[U])Array[U]
+{% tabs arrays_12 %}
+{% tab 'Scala 2 and 3' for=arrays_12 %}
+```scala
+scala> def wrap[U: ClassTag](xs: Vector[U]) = evenElems(xs)
+def wrap[U](xs: Vector[U])(implicit evidence$1: scala.reflect.ClassTag[U]): Array[U]
+```
+{% endtab %}
+{% endtabs %}
This example also shows that the context bound in the definition of `U` is just a shorthand for an implicit parameter named here `evidence$1` of type `ClassTag[U]`.
diff --git a/_overviews/collections-2.13/concrete-immutable-collection-classes.md b/_overviews/collections-2.13/concrete-immutable-collection-classes.md
index 166f3e280d..f4d746de58 100644
--- a/_overviews/collections-2.13/concrete-immutable-collection-classes.md
+++ b/_overviews/collections-2.13/concrete-immutable-collection-classes.md
@@ -24,25 +24,42 @@ A [LazyList](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/colle
Whereas lists are constructed with the `::` operator, lazy lists are constructed with the similar-looking `#::`. Here is a simple example of a lazy list containing the integers 1, 2, and 3:
- scala> val lazyList = 1 #:: 2 #:: 3 #:: LazyList.empty
- lazyList: scala.collection.immutable.LazyList[Int] = LazyList()
+{% tabs LazyList_1 %}
+{% tab 'Scala 2 and 3' for=LazyList_1 %}
+~~~scala
+scala> val lazyList = 1 #:: 2 #:: 3 #:: LazyList.empty
+lazyList: scala.collection.immutable.LazyList[Int] = LazyList()
+~~~
+{% endtab %}
+{% endtabs %}
The head of this lazy list is 1, and the tail of it has 2 and 3. None of the elements are printed here, though, because the list
hasn’t been computed yet! Lazy lists are specified to compute lazily, and the `toString` method of a lazy list is careful not to force any extra evaluation.
Below is a more complex example. It computes a lazy list that contains a Fibonacci sequence starting with the given two numbers. A Fibonacci sequence is one where each element is the sum of the previous two elements in the series.
-
- scala> def fibFrom(a: Int, b: Int): LazyList[Int] = a #:: fibFrom(b, a + b)
- fibFrom: (a: Int,b: Int)LazyList[Int]
+{% tabs LazyList_2 %}
+{% tab 'Scala 2 and 3' for=LazyList_2 %}
+~~~scala
+scala> def fibFrom(a: Int, b: Int): LazyList[Int] = a #:: fibFrom(b, a + b)
+fibFrom: (a: Int,b: Int)LazyList[Int]
+~~~
+{% endtab %}
+{% endtabs %}
This function is deceptively simple. The first element of the sequence is clearly `a`, and the rest of the sequence is the Fibonacci sequence starting with `b` followed by `a + b`. The tricky part is computing this sequence without causing an infinite recursion. If the function used `::` instead of `#::`, then every call to the function would result in another call, thus causing an infinite recursion. Since it uses `#::`, though, the right-hand side is not evaluated until it is requested.
Here are the first few elements of the Fibonacci sequence starting with two ones:
- scala> val fibs = fibFrom(1, 1).take(7)
- fibs: scala.collection.immutable.LazyList[Int] = LazyList()
- scala> fibs.toList
- res9: List[Int] = List(1, 1, 2, 3, 5, 8, 13)
+{% tabs LazyList_3 %}
+{% tab 'Scala 2 and 3' for=LazyList_3 %}
+~~~scala
+scala> val fibs = fibFrom(1, 1).take(7)
+fibs: scala.collection.immutable.LazyList[Int] = LazyList()
+scala> fibs.toList
+res9: List[Int] = List(1, 1, 2, 3, 5, 8, 13)
+~~~
+{% endtab %}
+{% endtabs %}
## Immutable ArraySeqs
@@ -56,7 +73,9 @@ and thus they can be much more convenient to write.
ArraySeqs are built and updated just like any other sequence.
-~~~
+{% tabs ArraySeq_1 %}
+{% tab 'Scala 2 and 3' for=ArraySeq_1 %}
+~~~scala
scala> val arr = scala.collection.immutable.ArraySeq(1, 2, 3)
arr: scala.collection.immutable.ArraySeq[Int] = ArraySeq(1, 2, 3)
scala> val arr2 = arr :+ 4
@@ -64,43 +83,55 @@ arr2: scala.collection.immutable.ArraySeq[Int] = ArraySeq(1, 2, 3, 4)
scala> arr2(0)
res22: Int = 1
~~~
+{% endtab %}
+{% endtabs %}
ArraySeqs are immutable, so you cannot change an element in place. However, the `updated`, `appended` and `prepended`
operations create new ArraySeqs that differ from a given ArraySeq only in a single element:
-~~~
+{% tabs ArraySeq_2 %}
+{% tab 'Scala 2 and 3' for=ArraySeq_2 %}
+~~~scala
scala> arr.updated(2, 4)
res26: scala.collection.immutable.ArraySeq[Int] = ArraySeq(1, 2, 4)
scala> arr
res27: scala.collection.immutable.ArraySeq[Int] = ArraySeq(1, 2, 3)
~~~
+{% endtab %}
+{% endtabs %}
As the last line above shows, a call to `updated` has no effect on the original ArraySeq `arr`.
-ArraySeqs store their elements in a private [Array](arrays.html). This is a compact representation that supports fast
+ArraySeqs store their elements in a private [Array]({% link _overviews/collections-2.13/arrays.md %}). This is a compact representation that supports fast
indexed access, but updating or adding one element is linear since it requires creating another array and copying all
the original array’s elements.
## Vectors
We have seen in the previous sections that `List` and `ArraySeq` are efficient data structures in some specific
-use cases but they are also inefficient in other use cases: for instance, prepending an element is constant for `List`,
+use cases, but they are also inefficient in other use cases: for instance, prepending an element is constant for `List`,
but linear for `ArraySeq`, and, conversely, indexed access is constant for `ArraySeq` but linear for `List`.
[Vector](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/Vector.html) is a collection type that provides good performance for all its operations. Vectors allow accessing any element of the sequence in "effectively" constant time. It's a larger constant than for access to the head of a List or for reading an element of an ArraySeq, but it's a constant nonetheless. As a result, algorithms using vectors do not have to be careful about accessing just the head of the sequence. They can access and modify elements at arbitrary locations, and thus they can be much more convenient to write.
Vectors are built and modified just like any other sequence.
- scala> val vec = scala.collection.immutable.Vector.empty
- vec: scala.collection.immutable.Vector[Nothing] = Vector()
- scala> val vec2 = vec :+ 1 :+ 2
- vec2: scala.collection.immutable.Vector[Int] = Vector(1, 2)
- scala> val vec3 = 100 +: vec2
- vec3: scala.collection.immutable.Vector[Int] = Vector(100, 1, 2)
- scala> vec3(0)
- res1: Int = 100
+{% tabs Vector_1 %}
+{% tab 'Scala 2 and 3' for=Vector_1 %}
+~~~scala
+scala> val vec = scala.collection.immutable.Vector.empty
+vec: scala.collection.immutable.Vector[Nothing] = Vector()
+scala> val vec2 = vec :+ 1 :+ 2
+vec2: scala.collection.immutable.Vector[Int] = Vector(1, 2)
+scala> val vec3 = 100 +: vec2
+vec3: scala.collection.immutable.Vector[Int] = Vector(100, 1, 2)
+scala> vec3(0)
+res1: Int = 100
+~~~
+{% endtab %}
+{% endtabs %}
-Vectors are represented as trees with a high branching factor. (The branching factor of a tree or a graph is the number of children at each node.) The details of how this is accomplished [changed](https://github.com/scala/scala/pull/8534) in Scala 2.13.2, but the basic idea remains the same, as follows.
+Vectors are represented as trees with a high branching factor (The branching factor of a tree or a graph is the number of children at each node). The details of how this is accomplished [changed](https://github.com/scala/scala/pull/8534) in Scala 2.13.2, but the basic idea remains the same, as follows.
Every tree node contains up to 32 elements of the vector or contains up to 32 other tree nodes. Vectors with up to 32 elements can be represented in a single node. Vectors with up to `32 * 32 = 1024` elements can be represented with a single indirection. Two hops from the root of the tree to the final element node are sufficient for vectors with up to 215 elements, three hops for vectors with 220, four hops for vectors with 225 elements and five hops for vectors with up to 230 elements. So for all vectors of reasonable size, an element selection involves up to 5 primitive array selections. This is what we meant when we wrote that element access is "effectively constant time".
@@ -108,8 +139,14 @@ Like selection, functional vector updates are also "effectively constant time".
Because vectors strike a good balance between fast random selections and fast random functional updates, they are currently the default implementation of immutable indexed sequences:
- scala> collection.immutable.IndexedSeq(1, 2, 3)
- res2: scala.collection.immutable.IndexedSeq[Int] = Vector(1, 2, 3)
+{% tabs Vector_2 %}
+{% tab 'Scala 2 and 3' for=Vector_2 %}
+~~~scala
+scala> collection.immutable.IndexedSeq(1, 2, 3)
+res2: scala.collection.immutable.IndexedSeq[Int] = Vector(1, 2, 3)
+~~~
+{% endtab %}
+{% endtabs %}
## Immutable Queues
@@ -117,25 +154,49 @@ A [Queue](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collecti
Here's how you can create an empty immutable queue:
- scala> val empty = scala.collection.immutable.Queue[Int]()
- empty: scala.collection.immutable.Queue[Int] = Queue()
+{% tabs Queue_1 %}
+{% tab 'Scala 2 and 3' for=Queue_1 %}
+~~~scala
+scala> val empty = scala.collection.immutable.Queue[Int]()
+empty: scala.collection.immutable.Queue[Int] = Queue()
+~~~
+{% endtab %}
+{% endtabs %}
You can append an element to an immutable queue with `enqueue`:
- scala> val has1 = empty.enqueue(1)
- has1: scala.collection.immutable.Queue[Int] = Queue(1)
+{% tabs Queue_2 %}
+{% tab 'Scala 2 and 3' for=Queue_2 %}
+~~~scala
+scala> val has1 = empty.enqueue(1)
+has1: scala.collection.immutable.Queue[Int] = Queue(1)
+~~~
+{% endtab %}
+{% endtabs %}
To append multiple elements to a queue, call `enqueueAll` with a collection as its argument:
- scala> val has123 = has1.enqueueAll(List(2, 3))
- has123: scala.collection.immutable.Queue[Int]
- = Queue(1, 2, 3)
+{% tabs Queue_3 %}
+{% tab 'Scala 2 and 3' for=Queue_3 %}
+~~~scala
+scala> val has123 = has1.enqueueAll(List(2, 3))
+has123: scala.collection.immutable.Queue[Int]
+ = Queue(1, 2, 3)
+~~~
+{% endtab %}
+{% endtabs %}
To remove an element from the head of the queue, you use `dequeue`:
- scala> val (element, has23) = has123.dequeue
- element: Int = 1
- has23: scala.collection.immutable.Queue[Int] = Queue(2, 3)
+{% tabs Queue_4 %}
+{% tab 'Scala 2 and 3' for=Queue_4 %}
+~~~scala
+scala> val (element, has23) = has123.dequeue
+element: Int = 1
+has23: scala.collection.immutable.Queue[Int] = Queue(2, 3)
+~~~
+{% endtab %}
+{% endtabs %}
Note that `dequeue` returns a pair consisting of the element removed and the rest of the queue.
@@ -143,15 +204,27 @@ Note that `dequeue` returns a pair consisting of the element removed and the res
A [Range](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/Range.html) is an ordered sequence of integers that are equally spaced apart. For example, "1, 2, 3," is a range, as is "5, 8, 11, 14." To create a range in Scala, use the predefined methods `to` and `by`.
- scala> 1 to 3
- res2: scala.collection.immutable.Range.Inclusive = Range(1, 2, 3)
- scala> 5 to 14 by 3
- res3: scala.collection.immutable.Range = Range(5, 8, 11, 14)
+{% tabs Range_1 %}
+{% tab 'Scala 2 and 3' for=Range_1 %}
+~~~scala
+scala> 1 to 3
+res2: scala.collection.immutable.Range.Inclusive = Range(1, 2, 3)
+scala> 5 to 14 by 3
+res3: scala.collection.immutable.Range = Range(5, 8, 11, 14)
+~~~
+{% endtab %}
+{% endtabs %}
If you want to create a range that is exclusive of its upper limit, then use the convenience method `until` instead of `to`:
- scala> 1 until 3
- res2: scala.collection.immutable.Range = Range(1, 2)
+{% tabs Range_2 %}
+{% tab 'Scala 2 and 3' for=Range_2 %}
+~~~scala
+scala> 1 until 3
+res2: scala.collection.immutable.Range = Range(1, 2)
+~~~
+{% endtab %}
+{% endtabs %}
Ranges are represented in constant space, because they can be defined by just three numbers: their start, their end, and the stepping value. Because of this representation, most operations on ranges are extremely fast.
@@ -159,7 +232,7 @@ Ranges are represented in constant space, because they can be defined by just th
Hash tries are a standard way to implement immutable sets and maps efficiently. [Compressed Hash-Array Mapped Prefix-trees](https://github.com/msteindorfer/oopsla15-artifact/) are a design for hash tries on the JVM which improves locality and makes sure the trees remain in a canonical and compact representation. They are supported by class [immutable.HashMap](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/HashMap.html). Their representation is similar to vectors in that they are also trees where every node has 32 elements or 32 subtrees. But the selection of these keys is now done based on hash code. For instance, to find a given key in a map, one first takes the hash code of the key. Then, the lowest 5 bits of the hash code are used to select the first subtree, followed by the next 5 bits and so on. The selection stops once all elements stored in a node have hash codes that differ from each other in the bits that are selected up to this level.
-Hash tries strike a nice balance between reasonably fast lookups and reasonably efficient functional insertions (`+`) and deletions (`-`). That's why they underly Scala's default implementations of immutable maps and sets. In fact, Scala has a further optimization for immutable sets and maps that contain less than five elements. Sets and maps with one to four elements are stored as single objects that just contain the elements (or key/value pairs in the case of a map) as fields. The empty immutable set and the empty immutable map is in each case a single object - there's no need to duplicate storage for those because an empty immutable set or map will always stay empty.
+Hash tries strike a nice balance between reasonably fast lookups and reasonably efficient functional insertions (`+`) and deletions (`-`). That's why they underlie Scala's default implementations of immutable maps and sets. In fact, Scala has a further optimization for immutable sets and maps that contain less than five elements. Sets and maps with one to four elements are stored as single objects that just contain the elements (or key/value pairs in the case of a map) as fields. The empty immutable set and the empty immutable map is in each case a single object - there's no need to duplicate storage for those because an empty immutable set or map will always stay empty.
## Red-Black Trees
@@ -167,11 +240,16 @@ Red-black trees are a form of balanced binary tree where some nodes are designat
Scala provides implementations of immutable sets and maps that use a red-black tree internally. Access them under the names [TreeSet](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/TreeSet.html) and [TreeMap](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/TreeMap.html).
-
- scala> scala.collection.immutable.TreeSet.empty[Int]
- res11: scala.collection.immutable.TreeSet[Int] = TreeSet()
- scala> res11 + 1 + 3 + 3
- res12: scala.collection.immutable.TreeSet[Int] = TreeSet(1, 3)
+{% tabs Red-Black_1 %}
+{% tab 'Scala 2 and 3' for=Red-Black_1 %}
+~~~scala
+scala> scala.collection.immutable.TreeSet.empty[Int]
+res11: scala.collection.immutable.TreeSet[Int] = TreeSet()
+scala> res11 + 1 + 3 + 3
+res12: scala.collection.immutable.TreeSet[Int] = TreeSet(1, 3)
+~~~
+{% endtab %}
+{% endtabs %}
Red-black trees are the standard implementation of `SortedSet` in Scala, because they provide an efficient iterator that returns all elements in sorted order.
@@ -183,14 +261,20 @@ Internally, bit sets use an array of 64-bit `Long`s. The first `Long` in the arr
Operations on bit sets are very fast. Testing for inclusion takes constant time. Adding an item to the set takes time proportional to the number of `Long`s in the bit set's array, which is typically a small number. Here are some simple examples of the use of a bit set:
- scala> val bits = scala.collection.immutable.BitSet.empty
- bits: scala.collection.immutable.BitSet = BitSet()
- scala> val moreBits = bits + 3 + 4 + 4
- moreBits: scala.collection.immutable.BitSet = BitSet(3, 4)
- scala> moreBits(3)
- res26: Boolean = true
- scala> moreBits(0)
- res27: Boolean = false
+{% tabs BitSet_1 %}
+{% tab 'Scala 2 and 3' for=BitSet_1 %}
+~~~scala
+scala> val bits = scala.collection.immutable.BitSet.empty
+bits: scala.collection.immutable.BitSet = BitSet()
+scala> val moreBits = bits + 3 + 4 + 4
+moreBits: scala.collection.immutable.BitSet = BitSet(3, 4)
+scala> moreBits(3)
+res26: Boolean = true
+scala> moreBits(0)
+res27: Boolean = false
+~~~
+{% endtab %}
+{% endtabs %}
## VectorMaps
@@ -198,7 +282,9 @@ A [VectorMap](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/coll
a map using both a `Vector` of keys and a `HashMap`. It provides an iterator that returns all the entries in their
insertion order.
-~~~
+{% tabs VectorMap_1 %}
+{% tab 'Scala 2 and 3' for=VectorMap_1 %}
+~~~scala
scala> val vm = scala.collection.immutable.VectorMap.empty[Int, String]
vm: scala.collection.immutable.VectorMap[Int,String] =
VectorMap()
@@ -211,6 +297,8 @@ vm2: scala.collection.immutable.VectorMap[Int,String] =
scala> vm2 == Map(2 -> "two", 1 -> "one")
res29: Boolean = true
~~~
+{% endtab %}
+{% endtabs %}
The first lines show that the content of the `VectorMap` keeps the insertion order, and the last line
shows that `VectorMap`s are comparable with other `Map`s and that this comparison does not take the
@@ -220,8 +308,14 @@ order of elements into account.
A [ListMap](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/ListMap.html) represents a map as a linked list of key-value pairs. In general, operations on a list map might have to iterate through the entire list. Thus, operations on a list map take time linear in the size of the map. In fact there is little usage for list maps in Scala because standard immutable maps are almost always faster. The only possible exception to this is if the map is for some reason constructed in such a way that the first elements in the list are selected much more often than the other elements.
- scala> val map = scala.collection.immutable.ListMap(1->"one", 2->"two")
- map: scala.collection.immutable.ListMap[Int,java.lang.String] =
- Map(1 -> one, 2 -> two)
- scala> map(2)
- res30: String = "two"
+{% tabs ListMap_1 %}
+{% tab 'Scala 2 and 3' for=ListMap_1 %}
+~~~scala
+scala> val map = scala.collection.immutable.ListMap(1->"one", 2->"two")
+map: scala.collection.immutable.ListMap[Int,java.lang.String] =
+ Map(1 -> one, 2 -> two)
+scala> map(2)
+res30: String = "two"
+~~~
+{% endtab %}
+{% endtabs %}
diff --git a/_overviews/collections-2.13/concrete-mutable-collection-classes.md b/_overviews/collections-2.13/concrete-mutable-collection-classes.md
index 883d1978ca..0de0bb1996 100644
--- a/_overviews/collections-2.13/concrete-mutable-collection-classes.md
+++ b/_overviews/collections-2.13/concrete-mutable-collection-classes.md
@@ -16,42 +16,72 @@ You've now seen the most commonly used immutable collection classes that Scala p
## Array Buffers
-An [ArrayBuffer](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/ArrayBuffer.html) buffer holds an array and a size. Most operations on an array buffer have the same speed as for an array, because the operations simply access and modify the underlying array. Additionally, array buffers can have data efficiently added to the end. Appending an item to an array buffer takes amortized constant time. Thus, array buffers are useful for efficiently building up a large collection whenever the new items are always added to the end.
-
- scala> val buf = scala.collection.mutable.ArrayBuffer.empty[Int]
- buf: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer()
- scala> buf += 1
- res32: buf.type = ArrayBuffer(1)
- scala> buf += 10
- res33: buf.type = ArrayBuffer(1, 10)
- scala> buf.toArray
- res34: Array[Int] = Array(1, 10)
+An [ArrayBuffer](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/ArrayBuffer.html) holds an array and a size. Most operations on an array buffer have the same speed as for an array, because the operations simply access and modify the underlying array. Additionally, array buffers can have data efficiently added to the end. Appending an item to an array buffer takes amortized constant time. Thus, array buffers are useful for efficiently building up a large collection whenever the new items are always added to the end.
+
+{% tabs ArrayBuffer_1 %}
+{% tab 'Scala 2 and 3' for=ArrayBuffer_1 %}
+~~~scala
+scala> val buf = scala.collection.mutable.ArrayBuffer.empty[Int]
+buf: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer()
+scala> buf += 1
+res32: buf.type = ArrayBuffer(1)
+scala> buf += 10
+res33: buf.type = ArrayBuffer(1, 10)
+scala> buf.toArray
+res34: Array[Int] = Array(1, 10)
+~~~
+{% endtab %}
+{% endtabs %}
## List Buffers
A [ListBuffer](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/ListBuffer.html) is like an array buffer except that it uses a linked list internally instead of an array. If you plan to convert the buffer to a list once it is built up, use a list buffer instead of an array buffer.
- scala> val buf = scala.collection.mutable.ListBuffer.empty[Int]
- buf: scala.collection.mutable.ListBuffer[Int] = ListBuffer()
- scala> buf += 1
- res35: buf.type = ListBuffer(1)
- scala> buf += 10
- res36: buf.type = ListBuffer(1, 10)
- scala> buf.toList
- res37: List[Int] = List(1, 10)
+{% tabs ListBuffer_1 %}
+{% tab 'Scala 2 and 3' for=ListBuffer_1 %}
+~~~scala
+scala> val buf = scala.collection.mutable.ListBuffer.empty[Int]
+buf: scala.collection.mutable.ListBuffer[Int] = ListBuffer()
+scala> buf += 1
+res35: buf.type = ListBuffer(1)
+scala> buf += 10
+res36: buf.type = ListBuffer(1, 10)
+scala> buf.to(List)
+res37: List[Int] = List(1, 10)
+~~~
+{% endtab %}
+{% endtabs %}
## StringBuilders
Just like an array buffer is useful for building arrays, and a list buffer is useful for building lists, a [StringBuilder](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/StringBuilder.html) is useful for building strings. String builders are so commonly used that they are already imported into the default namespace. Create them with a simple `new StringBuilder`, like this:
- scala> val buf = new StringBuilder
- buf: StringBuilder =
- scala> buf += 'a'
- res38: buf.type = a
- scala> buf ++= "bcdef"
- res39: buf.type = abcdef
- scala> buf.toString
- res41: String = abcdef
+{% tabs StringBuilders_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=StringBuilders_1 %}
+~~~scala
+scala> val buf = new StringBuilder
+buf: StringBuilder =
+scala> buf += 'a'
+res38: buf.type = a
+scala> buf ++= "bcdef"
+res39: buf.type = abcdef
+scala> buf.toString
+res41: String = abcdef
+~~~
+{% endtab %}
+{% tab 'Scala 3' for=StringBuilders_1 %}
+~~~scala
+scala> val buf = StringBuilder()
+buf: StringBuilder =
+scala> buf += 'a'
+res38: buf.type = a
+scala> buf ++= "bcdef"
+res39: buf.type = abcdef
+scala> buf.toString
+res41: String = abcdef
+~~~
+{% endtab %}
+{% endtabs %}
## ArrayDeque
@@ -66,48 +96,98 @@ an `ArrayBuffer`.
Scala provides mutable queues in addition to immutable ones. You use a `mQueue` similarly to how you use an immutable one, but instead of `enqueue`, you use the `+=` and `++=` operators to append. Also, on a mutable queue, the `dequeue` method will just remove the head element from the queue and return it. Here's an example:
- scala> val queue = new scala.collection.mutable.Queue[String]
- queue: scala.collection.mutable.Queue[String] = Queue()
- scala> queue += "a"
- res10: queue.type = Queue(a)
- scala> queue ++= List("b", "c")
- res11: queue.type = Queue(a, b, c)
- scala> queue
- res12: scala.collection.mutable.Queue[String] = Queue(a, b, c)
- scala> queue.dequeue
- res13: String = a
- scala> queue
- res14: scala.collection.mutable.Queue[String] = Queue(b, c)
+{% tabs Queues_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=Queues_1 %}
+~~~scala
+scala> val queue = new scala.collection.mutable.Queue[String]
+queue: scala.collection.mutable.Queue[String] = Queue()
+scala> queue += "a"
+res10: queue.type = Queue(a)
+scala> queue ++= List("b", "c")
+res11: queue.type = Queue(a, b, c)
+scala> queue
+res12: scala.collection.mutable.Queue[String] = Queue(a, b, c)
+scala> queue.dequeue
+res13: String = a
+scala> queue
+res14: scala.collection.mutable.Queue[String] = Queue(b, c)
+~~~
+{% endtab %}
+{% tab 'Scala 3' for=Queues_1 %}
+~~~scala
+scala> val queue = scala.collection.mutable.Queue[String]()
+queue: scala.collection.mutable.Queue[String] = Queue()
+scala> queue += "a"
+res10: queue.type = Queue(a)
+scala> queue ++= List("b", "c")
+res11: queue.type = Queue(a, b, c)
+scala> queue
+res12: scala.collection.mutable.Queue[String] = Queue(a, b, c)
+scala> queue.dequeue
+res13: String = a
+scala> queue
+res14: scala.collection.mutable.Queue[String] = Queue(b, c)
+~~~
+{% endtab %}
+{% endtabs %}
## Stacks
A stack implements a data structure which allows to store and retrieve objects in a last-in-first-out (LIFO) fashion.
It is supported by class [mutable.Stack](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/Stack.html).
- scala> val stack = new scala.collection.mutable.Stack[Int]
- stack: scala.collection.mutable.Stack[Int] = Stack()
- scala> stack.push(1)
- res0: stack.type = Stack(1)
- scala> stack
- res1: scala.collection.mutable.Stack[Int] = Stack(1)
- scala> stack.push(2)
- res0: stack.type = Stack(1, 2)
- scala> stack
- res3: scala.collection.mutable.Stack[Int] = Stack(1, 2)
- scala> stack.top
- res8: Int = 2
- scala> stack
- res9: scala.collection.mutable.Stack[Int] = Stack(1, 2)
- scala> stack.pop
- res10: Int = 2
- scala> stack
- res11: scala.collection.mutable.Stack[Int] = Stack(1)
+{% tabs Stacks_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=Stacks_1 %}
+~~~scala
+scala> val stack = new scala.collection.mutable.Stack[Int]
+stack: scala.collection.mutable.Stack[Int] = Stack()
+scala> stack.push(1)
+res0: stack.type = Stack(1)
+scala> stack
+res1: scala.collection.mutable.Stack[Int] = Stack(1)
+scala> stack.push(2)
+res0: stack.type = Stack(1, 2)
+scala> stack
+res3: scala.collection.mutable.Stack[Int] = Stack(2, 1)
+scala> stack.top
+res8: Int = 2
+scala> stack
+res9: scala.collection.mutable.Stack[Int] = Stack(2, 1)
+scala> stack.pop
+res10: Int = 2
+scala> stack
+res11: scala.collection.mutable.Stack[Int] = Stack(1)
+~~~
+{% endtab %}
+{% tab 'Scala 3' for=Stacks_1 %}
+~~~scala
+scala> val stack = scala.collection.mutable.Stack[Int]()
+stack: scala.collection.mutable.Stack[Int] = Stack()
+scala> stack.push(1)
+res0: stack.type = Stack(1)
+scala> stack
+res1: scala.collection.mutable.Stack[Int] = Stack(1)
+scala> stack.push(2)
+res0: stack.type = Stack(1, 2)
+scala> stack
+res3: scala.collection.mutable.Stack[Int] = Stack(2, 1)
+scala> stack.top
+res8: Int = 2
+scala> stack
+res9: scala.collection.mutable.Stack[Int] = Stack(2, 1)
+scala> stack.pop
+res10: Int = 2
+scala> stack
+res11: scala.collection.mutable.Stack[Int] = Stack(1)
+~~~
+{% endtab %}
+{% endtabs %}
## Mutable ArraySeqs
Array sequences are mutable sequences of fixed size which store their elements internally in an `Array[Object]`. They are implemented in Scala by class [ArraySeq](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/ArraySeq.html).
-You would typically use an `ArraySeq` if you want an array for its performance characteristics, but you also want to create generic instances of the sequence where you do not know the type of the elements and you do not have a `ClassTag` to provide it at run-time. These issues are explained in the section on [arrays]({{ site.baseurl }}/overviews/collections/arrays.html).
+You would typically use an `ArraySeq` if you want an array for its performance characteristics, but you also want to create generic instances of the sequence where you do not know the type of the elements, and you do not have a `ClassTag` to provide it at run-time. These issues are explained in the section on [arrays]({% link _overviews/collections-2.13/arrays.md %}).
## Hash Tables
@@ -115,16 +195,22 @@ A hash table stores its elements in an underlying array, placing each item at a
Hash sets and maps are used just like any other set or map. Here are some simple examples:
- scala> val map = scala.collection.mutable.HashMap.empty[Int,String]
- map: scala.collection.mutable.HashMap[Int,String] = Map()
- scala> map += (1 -> "make a web site")
- res42: map.type = Map(1 -> make a web site)
- scala> map += (3 -> "profit!")
- res43: map.type = Map(1 -> make a web site, 3 -> profit!)
- scala> map(1)
- res44: String = make a web site
- scala> map contains 2
- res46: Boolean = false
+{% tabs Hash-Tables_1 %}
+{% tab 'Scala 2 and 3' for=Hash-Tables_1 %}
+~~~scala
+scala> val map = scala.collection.mutable.HashMap.empty[Int,String]
+map: scala.collection.mutable.HashMap[Int,String] = Map()
+scala> map += (1 -> "make a web site")
+res42: map.type = Map(1 -> make a web site)
+scala> map += (3 -> "profit!")
+res43: map.type = Map(1 -> make a web site, 3 -> profit!)
+scala> map(1)
+res44: String = make a web site
+scala> map contains 2
+res46: Boolean = false
+~~~
+{% endtab %}
+{% endtabs %}
Iteration over a hash table is not guaranteed to occur in any particular order. Iteration simply proceeds through the underlying array in whichever order it happens to be in. To get a guaranteed iteration order, use a _linked_ hash map or set instead of a regular one. A linked hash map or set is just like a regular hash map or set except that it also includes a linked list of the elements in the order they were added. Iteration over such a collection is always in the same order that the elements were initially added.
@@ -145,17 +231,23 @@ A concurrent map can be accessed by several threads at once. In addition to the
| `m.replace(k, old, new)` |Replaces value associated with key `k` to `new`, if it was previously bound to `old`. |
| `m.replace (k, v)` |Replaces value associated with key `k` to `v`, if it was previously bound to some value.|
-`concurrent.Map` is a trait in the Scala collections library. Currently, it has two implementations. The first one is Java's `java.util.concurrent.ConcurrentMap`, which can be converted automatically into a Scala map using the [standard Java/Scala collection conversions]({{ site.baseurl }}/overviews/collections/conversions-between-java-and-scala-collections.html). The second implementation is [TrieMap](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/concurrent/TrieMap.html), which is a lock-free implementation of a hash array mapped trie.
+`concurrent.Map` is a trait in the Scala collections library. Currently, it has two implementations. The first one is Java's `java.util.concurrent.ConcurrentMap`, which can be converted automatically into a Scala map using the [standard Java/Scala collection conversions]({% link _overviews/collections-2.13/conversions-between-java-and-scala-collections.md %}). The second implementation is [TrieMap](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/concurrent/TrieMap.html), which is a lock-free implementation of a hash array mapped trie.
## Mutable Bitsets
A mutable bit of type [mutable.BitSet](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/BitSet.html) set is just like an immutable one, except that it is modified in place. Mutable bit sets are slightly more efficient at updating than immutable ones, because they don't have to copy around `Long`s that haven't changed.
- scala> val bits = scala.collection.mutable.BitSet.empty
- bits: scala.collection.mutable.BitSet = BitSet()
- scala> bits += 1
- res49: bits.type = BitSet(1)
- scala> bits += 3
- res50: bits.type = BitSet(1, 3)
- scala> bits
- res51: scala.collection.mutable.BitSet = BitSet(1, 3)
+{% tabs BitSet_1 %}
+{% tab 'Scala 2 and 3' for=BitSet_1 %}
+~~~scala
+scala> val bits = scala.collection.mutable.BitSet.empty
+bits: scala.collection.mutable.BitSet = BitSet()
+scala> bits += 1
+res49: bits.type = BitSet(1)
+scala> bits += 3
+res50: bits.type = BitSet(1, 3)
+scala> bits
+res51: scala.collection.mutable.BitSet = BitSet(1, 3)
+~~~
+{% endtab %}
+{% endtabs %}
diff --git a/_overviews/collections-2.13/conversion-between-option-and-the-collections.md b/_overviews/collections-2.13/conversion-between-option-and-the-collections.md
new file mode 100644
index 0000000000..d1b2e771cf
--- /dev/null
+++ b/_overviews/collections-2.13/conversion-between-option-and-the-collections.md
@@ -0,0 +1,81 @@
+---
+layout: multipage-overview
+title: Conversion Between Option and the Collections
+partof: collections-213
+overview-name: Collections
+
+num: 18
+previous-page: conversions-between-java-and-scala-collections
+
+permalink: /overviews/collections-2.13/:title.html
+---
+`Option` can be seen as a collection that has zero or exactly one element, and it provides a degree of interoperability with the collection types found in the package `scala.collection`. In particular, it implements the interface `IterableOnce`, which models the simplest form of collections: something that can be iterated over, at least once. However, `Option` does not implement the more comprehensive interface of `Iterable`. Indeed, we cannot provide a sensible implementation for the operation [`fromSpecific`](https://github.com/scala/scala/blob/6c68c2825e893bb71d6dc78465ac8c6f415cbd93/src/library/scala/collection/Iterable.scala#L173), which is supposed to create an `Option` from a collection of possibly more than one element. Starting from [Scala 2.13](https://github.com/scala/scala/pull/8038), `Option` was made an `IterableOnce` but not an `Iterable`.
+
+Hence `Option` can be used everywhere an `IterableOnce` is expected, for example, when calling `flatMap` on a collection (or inside a for-comprehension)
+
+{% tabs options_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=options_1 %}
+```scala mdoc
+for {
+ a <- Set(1)
+ b <- Option(41)
+} yield (a + b)
+// : Set[Int] = Set(42)
+```
+{% endtab %}
+{% tab 'Scala 3' for=options_1 %}
+```scala
+for
+ a <- Set(1)
+ b <- Option(41)
+yield (a + b)
+// : Set[Int] = Set(42)
+```
+{% endtab %}
+{% endtabs %}
+
+since the operation `flatMap` on the type `Set[Int]` takes a function returning an `IterableOnce`:
+
+{% tabs options_2 %}
+{% tab 'Scala 2 and 3' for=options_2 %}
+```
+def flatMap[B](f: Int => IterableOnce[B]): Set[B]
+```
+{% endtab %}
+{% endtabs %}
+
+Although `Option` does not extend `Iterable`, there exists an [implicit conversion](https://github.com/scala/scala/blob/6c68c2825e893bb71d6dc78465ac8c6f415cbd93/src/library/scala/Option.scala#L19) between `Option` and `Iterable`
+
+{% tabs options_3 %}
+{% tab 'Scala 2 and 3' for=options_3 %}
+```
+implicit def option2Iterable[A](xo: Option[A]): Iterable[A]
+```
+{% endtab %}
+{% endtabs %}
+
+so although `Option[A]` is not a full collection it can be _viewed_ as one. For example,
+
+{% tabs options_4 %}
+{% tab 'Scala 2 and 3' for=options_4 %}
+```scala mdoc
+Some(42).drop(1)
+// : Iterable[Int] = List()
+```
+{% endtab %}
+{% endtabs %}
+
+expands to
+
+{% tabs options_5 %}
+{% tab 'Scala 2 and 3' for=options_5 %}
+```scala mdoc
+Option.option2Iterable(Some(42)).drop(1)
+// : Iterable[Int] = List()
+```
+{% endtab %}
+{% endtabs %}
+
+because `drop` is not defined on `Option`. A downside of the above implicit conversion is that instead of getting back an `Option[A]` we are left with an `Iterable[A]`. For this reason, `Option`’s documentation carries the following note:
+
+> Many of the methods in `Option` are duplicative with those in the `Iterable` hierarchy, but they are duplicated for a reason: the implicit conversion tends to leave one with an `Iterable` in situations where one could have retained an `Option`.
diff --git a/_overviews/collections-2.13/conversions-between-java-and-scala-collections.md b/_overviews/collections-2.13/conversions-between-java-and-scala-collections.md
index 86070db314..d66183d84a 100644
--- a/_overviews/collections-2.13/conversions-between-java-and-scala-collections.md
+++ b/_overviews/collections-2.13/conversions-between-java-and-scala-collections.md
@@ -6,6 +6,7 @@ overview-name: Collections
num: 17
previous-page: creating-collections-from-scratch
+next-page: conversion-between-option-and-the-collections
languages: [ru]
permalink: /overviews/collections-2.13/:title.html
@@ -15,49 +16,101 @@ Like Scala, Java also has a rich collections library. There are many similaritie
Sometimes you might need to pass from one collection framework to the other. For instance, you might want to access an existing Java collection as if it were a Scala collection. Or you might want to pass one of Scala's collections to a Java method that expects its Java counterpart. It is quite easy to do this, because Scala offers implicit conversions between all the major collection types in the [CollectionConverters](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/jdk/CollectionConverters$.html) object. In particular, you will find bidirectional conversions between the following types.
+```
+Iterator <=> java.util.Iterator
+Iterator <=> java.util.Enumeration
+Iterable <=> java.lang.Iterable
+Iterable <=> java.util.Collection
+mutable.Buffer <=> java.util.List
+mutable.Set <=> java.util.Set
+mutable.Map <=> java.util.Map
+mutable.ConcurrentMap <=> java.util.concurrent.ConcurrentMap
+```
- Iterator <=> java.util.Iterator
- Iterator <=> java.util.Enumeration
- Iterable <=> java.lang.Iterable
- Iterable <=> java.util.Collection
- mutable.Buffer <=> java.util.List
- mutable.Set <=> java.util.Set
- mutable.Map <=> java.util.Map
- mutable.ConcurrentMap <=> java.util.concurrent.ConcurrentMap
+To enable these conversions, import them from the [CollectionConverters](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/jdk/CollectionConverters$.html) object:
-To enable these conversions, simply import them from the [CollectionConverters](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/jdk/CollectionConverters$.html) object:
+{% tabs java_scala_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=java_scala_1 %}
- scala> import scala.jdk.CollectionConverters._
- import scala.jdk.CollectionConverters._
+```scala
+scala> import scala.jdk.CollectionConverters._
+import scala.jdk.CollectionConverters._
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=java_scala_1 %}
+
+```scala
+scala> import scala.jdk.CollectionConverters.*
+import scala.jdk.CollectionConverters.*
+```
+
+{% endtab %}
+{% endtabs %}
This enables conversions between Scala collections and their corresponding Java collections by way of extension methods called `asScala` and `asJava`:
- scala> import collection.mutable._
- import collection.mutable._
+{% tabs java_scala_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=java_scala_2 %}
+
+```scala
+scala> import collection.mutable._
+import collection.mutable._
+
+scala> val jul: java.util.List[Int] = ArrayBuffer(1, 2, 3).asJava
+val jul: java.util.List[Int] = [1, 2, 3]
+
+scala> val buf: Seq[Int] = jul.asScala
+val buf: scala.collection.mutable.Seq[Int] = ArrayBuffer(1, 2, 3)
- scala> val jul: java.util.List[Int] = ArrayBuffer(1, 2, 3).asJava
- jul: java.util.List[Int] = [1, 2, 3]
+scala> val m: java.util.Map[String, Int] = HashMap("abc" -> 1, "hello" -> 2).asJava
+val m: java.util.Map[String,Int] = {abc=1, hello=2}
+```
- scala> val buf: Seq[Int] = jul.asScala
- buf: scala.collection.mutable.Seq[Int] = ArrayBuffer(1, 2, 3)
+{% endtab %}
+{% tab 'Scala 3' for=java_scala_2 %}
- scala> val m: java.util.Map[String, Int] = HashMap("abc" -> 1, "hello" -> 2).asJava
- m: java.util.Map[String,Int] = {abc=1, hello=2}
+```scala
+scala> import collection.mutable.*
+import collection.mutable.*
+
+scala> val jul: java.util.List[Int] = ArrayBuffer(1, 2, 3).asJava
+val jul: java.util.List[Int] = [1, 2, 3]
+
+scala> val buf: Seq[Int] = jul.asScala
+val buf: scala.collection.mutable.Seq[Int] = ArrayBuffer(1, 2, 3)
+
+scala> val m: java.util.Map[String, Int] = HashMap("abc" -> 1, "hello" -> 2).asJava
+val m: java.util.Map[String,Int] = {abc=1, hello=2}
+```
+
+{% endtab %}
+{% endtabs %}
Internally, these conversion work by setting up a "wrapper" object that forwards all operations to the underlying collection object. So collections are never copied when converting between Java and Scala. An interesting property is that if you do a round-trip conversion from, say a Java type to its corresponding Scala type, and back to the same Java type, you end up with the identical collection object you have started with.
Certain other Scala collections can also be converted to Java, but do not have a conversion back to the original Scala type:
- Seq => java.util.List
- mutable.Seq => java.util.List
- Set => java.util.Set
- Map => java.util.Map
+```
+Seq => java.util.List
+mutable.Seq => java.util.List
+Set => java.util.Set
+Map => java.util.Map
+```
Because Java does not distinguish between mutable and immutable collections in their type, a conversion from, say, `scala.immutable.List` will yield a `java.util.List`, where all mutation operations throw an "UnsupportedOperationException". Here's an example:
- scala> val jul = List(1, 2, 3).asJava
- jul: java.util.List[Int] = [1, 2, 3]
+{% tabs java_scala_3 %}
+{% tab 'Scala 2 and 3' for=java_scala_3 %}
+
+```scala
+scala> val jul = List(1, 2, 3).asJava
+val jul: java.util.List[Int] = [1, 2, 3]
+
+scala> jul.add(7)
+java.lang.UnsupportedOperationException
+ at java.util.AbstractList.add(AbstractList.java:148)
+```
- scala> jul.add(7)
- java.lang.UnsupportedOperationException
- at java.util.AbstractList.add(AbstractList.java:148)
+{% endtab %}
+{% endtabs %}
diff --git a/_overviews/collections-2.13/creating-collections-from-scratch.md b/_overviews/collections-2.13/creating-collections-from-scratch.md
index 729b3008f9..9f10410750 100644
--- a/_overviews/collections-2.13/creating-collections-from-scratch.md
+++ b/_overviews/collections-2.13/creating-collections-from-scratch.md
@@ -14,34 +14,60 @@ permalink: /overviews/collections-2.13/:title.html
You have syntax `List(1, 2, 3)` to create a list of three integers and `Map('A' -> 1, 'C' -> 2)` to create a map with two bindings. This is actually a universal feature of Scala collections. You can take any collection name and follow it by a list of elements in parentheses. The result will be a new collection with the given elements. Here are some more examples:
- Iterable() // An empty collection
- List() // The empty list
- List(1.0, 2.0) // A list with elements 1.0, 2.0
- Vector(1.0, 2.0) // A vector with elements 1.0, 2.0
- Iterator(1, 2, 3) // An iterator returning three integers.
- Set(dog, cat, bird) // A set of three animals
- HashSet(dog, cat, bird) // A hash set of the same animals
- Map('a' -> 7, 'b' -> 0) // A map from characters to integers
+{% tabs creating_1 %}
+{% tab 'Scala 2 and 3' for=creating_1 %}
+
+```scala
+val a = Iterable() // An empty collection
+val b = List() // The empty list
+val c = List(1.0, 2.0) // A list with elements 1.0, 2.0
+val d = Vector(1.0, 2.0) // A vector with elements 1.0, 2.0
+val e = Iterator(1, 2, 3) // An iterator returning three integers.
+val f = Set(dog, cat, bird) // A set of three animals
+val g = HashSet(dog, cat, bird) // A hash set of the same animals
+val h = Map('a' -> 7, 'b' -> 0) // A map from characters to integers
+```
+
+{% endtab %}
+{% endtabs %}
"Under the covers" each of the above lines is a call to the `apply` method of some object. For instance, the third line above expands to
- List.apply(1.0, 2.0)
+{% tabs creating_2 %}
+{% tab 'Scala 2 and 3' for=creating_2 %}
+
+```scala
+val c = List.apply(1.0, 2.0)
+```
+
+{% endtab %}
+{% endtabs %}
So this is a call to the `apply` method of the companion object of the `List` class. That method takes an arbitrary number of arguments and constructs a list from them. Every collection class in the Scala library has a companion object with such an `apply` method. It does not matter whether the collection class represents a concrete implementation, like `List`, `LazyList` or `Vector`, or whether it is an abstract base class such as `Seq`, `Set` or `Iterable`. In the latter case, calling apply will produce some default implementation of the abstract base class. Examples:
- scala> List(1, 2, 3)
- res17: List[Int] = List(1, 2, 3)
- scala> Iterable(1, 2, 3)
- res18: Iterable[Int] = List(1, 2, 3)
- scala> mutable.Iterable(1, 2, 3)
- res19: scala.collection.mutable.Iterable[Int] = ArrayBuffer(1, 2, 3)
+{% tabs creating_3 %}
+{% tab 'Scala 2 and 3' for=creating_3 %}
+
+```scala
+scala> List(1, 2, 3)
+val res17: List[Int] = List(1, 2, 3)
+
+scala> Iterable(1, 2, 3)
+val res18: Iterable[Int] = List(1, 2, 3)
+
+scala> mutable.Iterable(1, 2, 3)
+val res19: scala.collection.mutable.Iterable[Int] = ArrayBuffer(1, 2, 3)
+```
+
+{% endtab %}
+{% endtabs %}
Besides `apply`, every collection companion object also defines a member `empty`, which returns an empty collection. So instead of `List()` you could write `List.empty`, instead of `Map()`, `Map.empty`, and so on.
The operations provided by collection companion objects are summarized in the following table. In short, there's
* `concat`, which concatenates an arbitrary number of collections together,
-* `fill` and `tabulate`, which generate single or multi-dimensional collections of given dimensions initialized by some expression or tabulating function,
+* `fill` and `tabulate`, which generate single or multidimensional collections of given dimensions initialized by some expression or tabulating function,
* `range`, which generates integer collections with some constant step length, and
* `iterate` and `unfold`, which generates the collection resulting from repeated application of a function to a start element or state.
diff --git a/_overviews/collections-2.13/equality.md b/_overviews/collections-2.13/equality.md
index 3ce85f4815..7fa334c8d9 100644
--- a/_overviews/collections-2.13/equality.md
+++ b/_overviews/collections-2.13/equality.md
@@ -14,21 +14,34 @@ permalink: /overviews/collections-2.13/:title.html
The collection libraries have a uniform approach to equality and hashing. The idea is, first, to divide collections into sets, maps, and sequences. Collections in different categories are always unequal. For instance, `Set(1, 2, 3)` is unequal to `List(1, 2, 3)` even though they contain the same elements. On the other hand, within the same category, collections are equal if and only if they have the same elements (for sequences: the same elements in the same order). For example, `List(1, 2, 3) == Vector(1, 2, 3)`, and `HashSet(1, 2) == TreeSet(2, 1)`.
-It does not matter for the equality check whether a collection is mutable or immutable. For a mutable collection one simply considers its current elements at the time the equality test is performed. This means that a mutable collection might be equal to different collections at different times, depending what elements are added or removed. This is a potential trap when using a mutable collection as a key in a hashmap. Example:
-
- scala> import collection.mutable.{HashMap, ArrayBuffer}
- import collection.mutable.{HashMap, ArrayBuffer}
- scala> val buf = ArrayBuffer(1, 2, 3)
- buf: scala.collection.mutable.ArrayBuffer[Int] =
- ArrayBuffer(1, 2, 3)
- scala> val map = HashMap(buf -> 3)
- map: scala.collection.mutable.HashMap[scala.collection.
- mutable.ArrayBuffer[Int],Int] = Map((ArrayBuffer(1, 2, 3),3))
- scala> map(buf)
- res13: Int = 3
- scala> buf(0) += 1
- scala> map(buf)
- java.util.NoSuchElementException: key not found:
+It does not matter for the equality check whether a collection is mutable or immutable. For a mutable collection one simply considers its current elements at the time the equality test is performed. This means that a mutable collection might be equal to different collections at different times, depending on what elements are added or removed. This is a potential trap when using a mutable collection as a key in a hashmap. Example:
+
+{% tabs equality_1 %}
+{% tab 'Scala 2 and 3' for=equality_1 %}
+
+```scala
+scala> import collection.mutable.{HashMap, ArrayBuffer}
+import collection.mutable.{HashMap, ArrayBuffer}
+
+scala> val buf = ArrayBuffer(1, 2, 3)
+val buf: scala.collection.mutable.ArrayBuffer[Int] =
+ ArrayBuffer(1, 2, 3)
+
+scala> val map = HashMap(buf -> 3)
+val map: scala.collection.mutable.HashMap[scala.collection.
+ mutable.ArrayBuffer[Int],Int] = Map((ArrayBuffer(1, 2, 3),3))
+
+scala> map(buf)
+val res13: Int = 3
+
+scala> buf(0) += 1
+
+scala> map(buf)
+ java.util.NoSuchElementException: key not found:
ArrayBuffer(2, 2, 3)
+```
+
+{% endtab %}
+{% endtabs %}
In this example, the selection in the last line will most likely fail because the hash-code of the array `buf` has changed in the second-to-last line. Therefore, the hash-code-based lookup will look at a different place than the one where `buf` was stored.
diff --git a/_overviews/collections-2.13/introduction.md b/_overviews/collections-2.13/introduction.md
index 477f8ffb10..2e4d5f8abb 100644
--- a/_overviews/collections-2.13/introduction.md
+++ b/_overviews/collections-2.13/introduction.md
@@ -12,7 +12,7 @@ permalink: /overviews/collections-2.13/:title.html
---
The collections framework is the heart of the Scala 2.13 standard
-library. It provides a common, uniform, and all-encompassing
+library, also used in Scala 3.x. It provides a common, uniform, and all-encompassing
framework for collection types. This framework enables you to work
with data in memory at a high level, with the basic building blocks of
a program being whole collections, instead of individual elements.
@@ -48,7 +48,7 @@ lines run at first try.
**Fast:** Collection operations are tuned and optimized in the
libraries. As a result, using collections is typically quite
-efficient. You might be able to do a little bit better with carefully
+efficient. You might be able to do a little better with carefully
hand-tuned data structures and operations, but you might also do a lot
worse by making some suboptimal implementation decisions along the
way.
@@ -70,12 +70,18 @@ for arrays.
**Example:** Here's one line of code that demonstrates many of the
advantages of Scala's collections.
- val (minors, adults) = people partition (_.age < 18)
+{% tabs introduction_1 %}
+{% tab 'Scala 2 and 3' for=introduction_1 %}
+```
+val (minors, adults) = people partition (_.age < 18)
+```
+{% endtab %}
+{% endtabs %}
It's immediately clear what this operation does: It partitions a
collection of `people` into `minors` and `adults` depending on
their age. Because the `partition` method is defined in the root
-collection type `TraversableLike`, this code works for any kind of
+collection type `IterableOps`, this code works for any kind of
collection, including arrays. The resulting `minors` and `adults`
collections will be of the same type as the `people` collection.
diff --git a/_overviews/collections-2.13/iterators.md b/_overviews/collections-2.13/iterators.md
index 0f88475625..a72716740d 100644
--- a/_overviews/collections-2.13/iterators.md
+++ b/_overviews/collections-2.13/iterators.md
@@ -16,46 +16,99 @@ An iterator is not a collection, but rather a way to access the elements of a co
The most straightforward way to "step through" all the elements returned by an iterator `it` uses a while-loop:
- while (it.hasNext)
- println(it.next())
+{% tabs iterators_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=iterators_1 %}
+```scala
+while (it.hasNext)
+ println(it.next())
+```
+{% endtab %}
+{% tab 'Scala 3' for=iterators_1 %}
+```scala
+while it.hasNext do
+ println(it.next())
+```
+{% endtab %}
+{% endtabs %}
+
+Iterators in Scala also provide analogues of most of the methods that you find in the `Iterable` and `Seq` classes. For instance, they provide a `foreach` method which executes a given procedure on each element returned by an iterator. Using `foreach`, the loop above could be abbreviated to:
+
+{% tabs iterators_2 %}
+{% tab 'Scala 2 and 3' for=iterators_2 %}
+
+```scala
+it.foreach(println)
+```
+
+{% endtab %}
+{% endtabs %}
-Iterators in Scala also provide analogues of most of the methods that you find in the `Traversable`, `Iterable` and `Seq` classes. For instance, they provide a `foreach` method which executes a given procedure on each element returned by an iterator. Using `foreach`, the loop above could be abbreviated to:
+As always, for-expressions can be used as an alternate syntax for expressions involving `foreach`, `map`, `withFilter`, and `flatMap`, so yet another way to print all elements returned by an iterator would be:
- it foreach println
+{% tabs iterators_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=iterators_3 %}
+```scala
+for (elem <- it) println(elem)
+```
+{% endtab %}
+{% tab 'Scala 3' for=iterators_3 %}
+```scala
+for elem <- it do println(elem)
+```
+{% endtab %}
+{% endtabs %}
+
+There's an important difference between the foreach method on iterators and the same method on iterable collections: When called on an iterator, `foreach` will leave the iterator at its end when it is done. So calling `next` again on the same iterator will fail with a `NoSuchElementException`. By contrast, when called on a collection, `foreach` leaves the number of elements in the collection unchanged (unless the passed function adds or removes elements, but this is discouraged, because it may lead to surprising results).
-As always, for-expressions can be used as an alternate syntax for expressions involving `foreach`, `map`, `withFilter`, and `flatMap`, so yet another way to print all elements returned by an iterator would be:
+The other operations that `Iterator` has in common with `Iterable` have the same property. For instance, iterators provide a `map` method, which returns a new iterator:
- for (elem <- it) println(elem)
+{% tabs iterators_4 %}
+{% tab 'Scala 2 and 3' for=iterators_4 %}
-There's an important difference between the foreach method on iterators and the same method on traversable collections: When called on an iterator, `foreach` will leave the iterator at its end when it is done. So calling `next` again on the same iterator will fail with a `NoSuchElementException`. By contrast, when called on a collection, `foreach` leaves the number of elements in the collection unchanged (unless the passed function adds or removes elements, but this is discouraged, because it may lead to surprising results).
+```scala
+scala> val it = Iterator("a", "number", "of", "words")
+val it: Iterator[java.lang.String] =
-The other operations that `Iterator` has in common with `Iterable` have the same property. For instance, iterators provide a `map` method, which returns a new iterator:
+scala> it.map(_.length)
+val res1: Iterator[Int] =
+
+scala> it.hasNext
+val res2: Boolean = true
+
+scala> res1.foreach(println)
+1
+6
+2
+5
- scala> val it = Iterator("a", "number", "of", "words")
- it: Iterator[java.lang.String] =
- scala> it.map(_.length)
- res1: Iterator[Int] =
- scala> it.hasNext
- res2: Boolean = true
- scala> res1 foreach println
- 1
- 6
- 2
- 5
- scala> it.hasNext
- res4: Boolean = false
+scala> it.hasNext
+val res4: Boolean = false
+```
+
+{% endtab %}
+{% endtabs %}
As you can see, after the call to `it.map`, the `it` iterator hasn’t advanced to its end, but traversing the iterator
resulting from the call to `res1.foreach` also traverses `it` and advances it to its end.
Another example is the `dropWhile` method, which can be used to find the first elements of an iterator that has a certain property. For instance, to find the first word in the iterator above that has at least two characters you could write:
- scala> val it = Iterator("a", "number", "of", "words")
- it: Iterator[java.lang.String] =
- scala> it dropWhile (_.length < 2)
- res4: Iterator[java.lang.String] =
- scala> res4.next()
- res5: java.lang.String = number
+{% tabs iterators_5 %}
+{% tab 'Scala 2 and 3' for=iterators_5 %}
+
+```scala
+scala> val it = Iterator("a", "number", "of", "words")
+val it: Iterator[java.lang.String] =
+
+scala> it.dropWhile(_.length < 2)
+val res4: Iterator[java.lang.String] =
+
+scala> res4.next()
+val res5: java.lang.String = number
+```
+
+{% endtab %}
+{% endtabs %}
Note again that `it` was changed by the call to `dropWhile`: it now points to the second word "number" in the list.
In fact, `it` and the result `res4` returned by `dropWhile` will return exactly the same sequence of elements.
@@ -63,15 +116,23 @@ In fact, `it` and the result `res4` returned by `dropWhile` will return exactly
One way to circumvent this behavior is to `duplicate` the underlying iterator instead of calling methods on it directly.
The _two_ iterators that result will each return exactly the same elements as the underlying iterator `it`:
- scala> val (words, ns) = Iterator("a", "number", "of", "words").duplicate
- words: Iterator[String] =
- ns: Iterator[String] =
+{% tabs iterators_6 %}
+{% tab 'Scala 2 and 3' for=iterators_6 %}
+
+```scala
+scala> val (words, ns) = Iterator("a", "number", "of", "words").duplicate
+val words: Iterator[String] =
+val ns: Iterator[String] =
- scala> val shorts = words.filter(_.length < 3).toList
- shorts: List[String] = List(a, of)
+scala> val shorts = words.filter(_.length < 3).toList
+val shorts: List[String] = List(a, of)
- scala> val count = ns.map(_.length).sum
- count: Int = 14
+scala> val count = ns.map(_.length).sum
+val count: Int = 14
+```
+
+{% endtab %}
+{% endtabs %}
The two iterators work independently: advancing one does not affect the other, so that each can be
destructively modified by invoking arbitrary methods. This creates the illusion of iterating over
@@ -87,31 +148,31 @@ All operations on iterators are summarized below.
| WHAT IT IS | WHAT IT DOES |
| ------ | ------ |
| **Abstract Methods:** | |
-| `it.next()` | Returns next element on iterator and advances past it. |
-| `it.hasNext` | Returns `true` if `it` can return another element. |
+| `it.next()` | Returns next element on iterator and advances past it. |
+| `it.hasNext` | Returns `true` if `it` can return another element. |
| **Variations:** | |
| `it.buffered` | A buffered iterator returning all elements of `it`. |
-| `it grouped size` | An iterator that yields the elements returned by `it` in fixed-sized sequence "chunks". |
-| `it sliding size` | An iterator that yields the elements returned by `it` in sequences representing a sliding fixed-sized window. |
+| `it.grouped(size)` | An iterator that yields the elements returned by `it` in fixed-sized sequence "chunks". |
+| `it.sliding(size)` | An iterator that yields the elements returned by `it` in sequences representing a sliding fixed-sized window. |
| **Duplication:** | |
| `it.duplicate` | A pair of iterators that each independently return all elements of `it`. |
| **Additions:** | |
-| `it concat jt` or `it ++ jt` | An iterator returning all elements returned by iterator `it`, followed by all elements returned by iterator `jt`. |
-| `it.padTo(len, x)` | The iterator that first returns all elements of `it` and then follows that by copies of `x` until length `len` elements are returned overall. |
+| `it.concat(jt)` or `it ++ jt` | An iterator returning all elements returned by iterator `it`, followed by all elements returned by iterator `jt`. |
+| `it.padTo(len, x)` | The iterator that first returns all elements of `it` and then follows that by copies of `x` until length `len` elements are returned overall. |
| **Maps:** | |
-| `it map f` | The iterator obtained from applying the function `f` to every element returned from `it`. |
-| `it flatMap f` | The iterator obtained from applying the iterator-valued function `f` to every element in `it` and appending the results. |
-| `it collect f` | The iterator obtained from applying the partial function `f` to every element in `it` for which it is defined and collecting the results. |
+| `it.map(f)` | The iterator obtained from applying the function `f` to every element returned from `it`. |
+| `it.flatMap(f)` | The iterator obtained from applying the iterator-valued function `f` to every element in `it` and appending the results. |
+| `it.collect(f)` | The iterator obtained from applying the partial function `f` to every element in `it` for which it is defined and collecting the results. |
| **Conversions:** | |
| `it.toArray` | Collects the elements returned by `it` in an array. |
| `it.toList` | Collects the elements returned by `it` in a list. |
| `it.toIterable` | Collects the elements returned by `it` in an iterable. |
| `it.toSeq` | Collects the elements returned by `it` in a sequence. |
| `it.toIndexedSeq` | Collects the elements returned by `it` in an indexed sequence. |
-| `it.toLazyList` | Collects the elements returned by `it` in a lazy list. |
+| `it.toLazyList` | Collects the elements returned by `it` in a lazy list. |
| `it.toSet` | Collects the elements returned by `it` in a set. |
| `it.toMap` | Collects the key/value pairs returned by `it` in a map. |
-| **Copying:** | |
+| **Copying:** | |
| `it.copyToArray(arr, s, n)`| Copies at most `n` elements returned by `it` to array `arr` starting at index `s`. The last two arguments are optional. |
| **Size Info:** | |
| `it.isEmpty` | Test whether the iterator is empty (opposite of `hasNext`). |
@@ -120,44 +181,44 @@ All operations on iterators are summarized below.
| `it.length` | Same as `it.size`. |
| `it.knownSize` |The number of elements, if this one is known without modifying the iterator’s state, otherwise `-1`. |
| **Element Retrieval Index Search:**| |
-| `it find p` | An option containing the first element returned by `it` that satisfies `p`, or `None` is no element qualifies. Note: The iterator advances to after the element, or, if none is found, to the end. |
-| `it indexOf x` | The index of the first element returned by `it` that equals `x`. Note: The iterator advances past the position of this element. |
-| `it indexWhere p` | The index of the first element returned by `it` that satisfies `p`. Note: The iterator advances past the position of this element. |
+| `it.find(p)` | An option containing the first element returned by `it` that satisfies `p`, or `None` is no element qualifies. Note: The iterator advances to after the element, or, if none is found, to the end. |
+| `it.indexOf(x)` | The index of the first element returned by `it` that equals `x`. Note: The iterator advances past the position of this element. |
+| `it.indexWhere(p)` | The index of the first element returned by `it` that satisfies `p`. Note: The iterator advances past the position of this element. |
| **Subiterators:** | |
-| `it take n` | An iterator returning of the first `n` elements of `it`. Note: it will advance to the position after the `n`'th element, or to its end, if it contains less than `n` elements. |
-| `it drop n` | The iterator that starts with the `(n+1)`'th element of `it`. Note: `it` will advance to the same position. |
-| `it.slice(m,n)` | The iterator that returns a slice of the elements returned from it, starting with the `m`'th element and ending before the `n`'th element. |
-| `it takeWhile p` | An iterator returning elements from `it` as long as condition `p` is true. |
-| `it dropWhile p` | An iterator skipping elements from `it` as long as condition `p` is `true`, and returning the remainder. |
-| `it filter p` | An iterator returning all elements from `it` that satisfy the condition `p`. |
-| `it withFilter p` | Same as `it` filter `p`. Needed so that iterators can be used in for-expressions. |
-| `it filterNot p` | An iterator returning all elements from `it` that do not satisfy the condition `p`. |
+| `it.take(n)` | An iterator returning of the first `n` elements of `it`. Note: it will advance to the position after the `n`'th element, or to its end, if it contains less than `n` elements. |
+| `it.drop(n)` | The iterator that starts with the `(n+1)`'th element of `it`. Note: `it` will advance to the same position. |
+| `it.slice(m,n)` | The iterator that returns a slice of the elements returned from it, starting with the `m`'th element and ending before the `n`'th element. |
+| `it.takeWhile(p)` | An iterator returning elements from `it` as long as condition `p` is true. |
+| `it.dropWhile(p)` | An iterator skipping elements from `it` as long as condition `p` is `true`, and returning the remainder. |
+| `it.filter(p)` | An iterator returning all elements from `it` that satisfy the condition `p`. |
+| `it.withFilter(p)` | Same as `it` filter `p`. Needed so that iterators can be used in for-expressions. |
+| `it.filterNot(p)` | An iterator returning all elements from `it` that do not satisfy the condition `p`. |
| `it.distinct` | An iterator returning the elements from `it` without duplicates. |
| **Subdivisions:** | |
-| `it partition p` | Splits `it` into a pair of two iterators: one returning all elements from `it` that satisfy the predicate `p`, the other returning all elements from `it` that do not. |
-| `it span p` | Splits `it` into a pair of two iterators: one returning all elements of the prefix of `it` that satisfy the predicate `p`, the other returning all remaining elements of `it`. |
+| `it.partition(p)` | Splits `it` into a pair of two iterators: one returning all elements from `it` that satisfy the predicate `p`, the other returning all elements from `it` that do not. |
+| `it.span(p)` | Splits `it` into a pair of two iterators: one returning all elements of the prefix of `it` that satisfy the predicate `p`, the other returning all remaining elements of `it`. |
| **Element Conditions:** | |
-| `it forall p` | A boolean indicating whether the predicate p holds for all elements returned by `it`. |
-| `it exists p` | A boolean indicating whether the predicate p holds for some element in `it`. |
-| `it count p` | The number of elements in `it` that satisfy the predicate `p`. |
+| `it.forall(p)` | A boolean indicating whether the predicate p holds for all elements returned by `it`. |
+| `it.exists(p)` | A boolean indicating whether the predicate p holds for some element in `it`. |
+| `it.count(p)` | The number of elements in `it` that satisfy the predicate `p`. |
| **Folds:** | |
| `it.foldLeft(z)(op)` | Apply binary operation `op` between successive elements returned by `it`, going left to right and starting with `z`. |
| `it.foldRight(z)(op)` | Apply binary operation `op` between successive elements returned by `it`, going right to left and starting with `z`. |
-| `it reduceLeft op` | Apply binary operation `op` between successive elements returned by non-empty iterator `it`, going left to right. |
-| `it reduceRight op` | Apply binary operation `op` between successive elements returned by non-empty iterator `it`, going right to left. |
+| `it.reduceLeft(op)` | Apply binary operation `op` between successive elements returned by non-empty iterator `it`, going left to right. |
+| `it.reduceRight(op)` | Apply binary operation `op` between successive elements returned by non-empty iterator `it`, going right to left. |
| **Specific Folds:** | |
| `it.sum` | The sum of the numeric element values returned by iterator `it`. |
| `it.product` | The product of the numeric element values returned by iterator `it`. |
| `it.min` | The minimum of the ordered element values returned by iterator `it`. |
| `it.max` | The maximum of the ordered element values returned by iterator `it`. |
| **Zippers:** | |
-| `it zip jt` | An iterator of pairs of corresponding elements returned from iterators `it` and `jt`. |
-| `it.zipAll(jt, x, y)` | An iterator of pairs of corresponding elements returned from iterators `it` and `jt`, where the shorter iterator is extended to match the longer one by appending elements `x` or `y`. |
+| `it.zip(jt)` | An iterator of pairs of corresponding elements returned from iterators `it` and `jt`. |
+| `it.zipAll(jt, x, y)` | An iterator of pairs of corresponding elements returned from iterators `it` and `jt`, where the shorter iterator is extended to match the longer one by appending elements `x` or `y`. |
| `it.zipWithIndex` | An iterator of pairs of elements returned from `it` with their indices. |
| **Update:** | |
-| `it.patch(i, jt, r)` | The iterator resulting from `it` by replacing `r` elements starting with `i` by the patch iterator `jt`. |
+| `it.patch(i, jt, r)` | The iterator resulting from `it` by replacing `r` elements starting with `i` by the patch iterator `jt`. |
| **Comparison:** | |
-| `it sameElements jt` | A test whether iterators `it` and `jt` return the same elements in the same order. Note: Using the iterators after this operation is undefined and subject to change. |
+| `it.sameElements(jt)` | A test whether iterators `it` and `jt` return the same elements in the same order. Note: Using the iterators after this operation is undefined and subject to change. |
| **Strings:** | |
| `it.addString(b, start, sep, end)`| Adds a string to `StringBuilder` `b` which shows all elements returned by `it` between separators `sep` enclosed in strings `start` and `end`. `start`, `sep`, `end` are all optional. |
| `it.mkString(start, sep, end)` | Converts the collection to a string which shows all elements returned by `it` between separators `sep` enclosed in strings `start` and `end`. `start`, `sep`, `end` are all optional. |
@@ -170,44 +231,88 @@ A lazy operation does not immediately compute all of its results. Instead, it co
So the expression `(1 to 10).iterator.map(println)` would not print anything to the screen. The `map` method in this case doesn't apply its argument function to the values in the range, it returns a new `Iterator` that will do this as each one is requested. Adding `.toList` to the end of that expression will actually print the elements.
-A consequence of this is that a method like `map` or `filter` won't necessarily apply its argument function to all of the input elements. The expression `(1 to 10).iterator.map(println).take(5).toList` would only print the values `1` to `5`, for instance, since those are only ones that will be requested from the `Iterator` returned by `map`.
+A consequence of this is that a method like `map` or `filter` won't necessarily apply its argument function to all the input elements. The expression `(1 to 10).iterator.map(println).take(5).toList` would only print the values `1` to `5`, for instance, since those are only ones that will be requested from the `Iterator` returned by `map`.
This is one of the reasons why it's important to only use pure functions as arguments to `map`, `filter`, `fold` and similar methods. Remember, a pure function has no side-effects, so one would not normally use `println` in a `map`. `println` is used to demonstrate laziness as it's not normally visible with pure functions.
Laziness is still valuable, despite often not being visible, as it can prevent unneeded computations from happening, and can allow for working with infinite sequences, like so:
- def zipWithIndex[A](i: Iterator[A]): Iterator[(Int, A)] =
- Iterator.from(0).zip(i)
+{% tabs iterators_7 %}
+{% tab 'Scala 2 and 3' for=iterators_7 %}
+
+```scala
+def zipWithIndex[A](i: Iterator[A]): Iterator[(Int, A)] =
+ Iterator.from(0).zip(i)
+```
+
+{% endtab %}
+{% endtabs %}
### Buffered iterators
Sometimes you want an iterator that can "look ahead", so that you can inspect the next element to be returned without advancing past that element. Consider for instance, the task to skip leading empty strings from an iterator that returns a sequence of strings. You might be tempted to write the following
-
- def skipEmptyWordsNOT(it: Iterator[String]) =
- while (it.next().isEmpty) {}
+{% tabs iterators_8 class=tabs-scala-version %}
+{% tab 'Scala 2' for=iterators_8 %}
+```scala mdoc
+def skipEmptyWordsNOT(it: Iterator[String]) =
+ while (it.next().isEmpty) {}
+```
+{% endtab %}
+{% tab 'Scala 3' for=iterators_8 %}
+```scala
+def skipEmptyWordsNOT(it: Iterator[String]) =
+ while it.next().isEmpty do ()
+```
+{% endtab %}
+{% endtabs %}
But looking at this code more closely, it's clear that this is wrong: The code will indeed skip leading empty strings, but it will also advance `it` past the first non-empty string!
The solution to this problem is to use a buffered iterator. Class [BufferedIterator](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/BufferedIterator.html) is a subclass of [Iterator](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Iterator.html), which provides one extra method, `head`. Calling `head` on a buffered iterator will return its first element but will not advance the iterator. Using a buffered iterator, skipping empty words can be written as follows.
- def skipEmptyWords(it: BufferedIterator[String]) =
- while (it.head.isEmpty) { it.next() }
+{% tabs iterators_9 class=tabs-scala-version %}
+{% tab 'Scala 2' for=iterators_9 %}
+```scala
+def skipEmptyWords(it: BufferedIterator[String]) =
+ while (it.head.isEmpty) { it.next() }
+```
+{% endtab %}
+{% tab 'Scala 3' for=iterators_9 %}
+```scala
+def skipEmptyWords(it: BufferedIterator[String]) =
+ while it.head.isEmpty do it.next()
+```
+{% endtab %}
+{% endtabs %}
Every iterator can be converted to a buffered iterator by calling its `buffered` method. Here's an example:
- scala> val it = Iterator(1, 2, 3, 4)
- it: Iterator[Int] =
- scala> val bit = it.buffered
- bit: scala.collection.BufferedIterator[Int] =
- scala> bit.head
- res10: Int = 1
- scala> bit.next()
- res11: Int = 1
- scala> bit.next()
- res12: Int = 2
- scala> bit.headOption
- res13: Option[Int] = Some(3)
+{% tabs iterators_10 %}
+{% tab 'Scala 2 and 3' for=iterators_10 %}
+
+```scala
+scala> val it = Iterator(1, 2, 3, 4)
+val it: Iterator[Int] =
+
+scala> val bit = it.buffered
+val bit: scala.collection.BufferedIterator[Int] =
+
+scala> bit.head
+val res10: Int = 1
+
+scala> bit.next()
+val res11: Int = 1
+
+scala> bit.next()
+val res12: Int = 2
+
+scala> bit.headOption
+val res13: Option[Int] = Some(3)
+```
+
+{% endtab %}
+{% endtabs %}
Note that calling `head` on the buffered iterator `bit` does not advance it. Therefore, the subsequent call `bit.next()` returns the same value as `bit.head`.
@@ -217,21 +322,50 @@ The buffered iterator only buffers the next element when `head` is invoked. Othe
such as those produced by `duplicate` and `partition`, may buffer arbitrary subsequences of the
underlying iterator. But iterators can be efficiently joined by adding them together with `++`:
- scala> def collapse(it: Iterator[Int]) = if (!it.hasNext) Iterator.empty else {
- | var head = it.next
- | val rest = if (head == 0) it.dropWhile(_ == 0) else it
- | Iterator.single(head) ++ rest
- | }
- collapse: (it: Iterator[Int])Iterator[Int]
-
- scala> def collapse(it: Iterator[Int]) = {
- | val (zeros, rest) = it.span(_ == 0)
- | zeros.take(1) ++ rest
- | }
- collapse: (it: Iterator[Int])Iterator[Int]
-
- scala> collapse(Iterator(0, 0, 0, 1, 2, 3, 4)).toList
- res14: List[Int] = List(0, 1, 2, 3, 4)
+{% tabs iterators_11 class=tabs-scala-version %}
+{% tab 'Scala 2' for=iterators_11 %}
+
+```scala
+scala> def collapse(it: Iterator[Int]) = if (!it.hasNext) Iterator.empty else {
+ | var head = it.next
+ | val rest = if (head == 0) it.dropWhile(_ == 0) else it
+ | Iterator.single(head) ++ rest
+ |}
+def collapse(it: Iterator[Int]): Iterator[Int]
+
+scala> def collapse(it: Iterator[Int]) = {
+ | val (zeros, rest) = it.span(_ == 0)
+ | zeros.take(1) ++ rest
+ |}
+def collapse(it: Iterator[Int]): Iterator[Int]
+
+scala> collapse(Iterator(0, 0, 0, 1, 2, 3, 4)).toList
+val res14: List[Int] = List(0, 1, 2, 3, 4)
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=iterators_11 %}
+
+```scala
+scala> def collapse(it: Iterator[Int]) = if !it.hasNext then Iterator.empty else
+ | var head = it.next
+ | val rest = if head == 0 then it.dropWhile(_ == 0) else it
+ | Iterator.single(head) ++ rest
+ |
+def collapse(it: Iterator[Int]): Iterator[Int]
+
+scala> def collapse(it: Iterator[Int]) =
+ | val (zeros, rest) = it.span(_ == 0)
+ | zeros.take(1) ++ rest
+ |
+def collapse(it: Iterator[Int]): Iterator[Int]
+
+scala> collapse(Iterator(0, 0, 0, 1, 2, 3, 4)).toList
+val res14: List[Int] = List(0, 1, 2, 3, 4)
+```
+
+{% endtab %}
+{% endtabs %}
In the second version of `collapse`, the unconsumed zeros are buffered internally.
In the first version, any leading zeros are dropped and the desired result constructed
diff --git a/_overviews/collections-2.13/maps.md b/_overviews/collections-2.13/maps.md
index 3a27586614..34d9696f19 100644
--- a/_overviews/collections-2.13/maps.md
+++ b/_overviews/collections-2.13/maps.md
@@ -16,7 +16,7 @@ A [Map](https://www.scala-lang.org/api/current/scala/collection/Map.html) is an
The fundamental operations on maps are similar to those on sets. They are summarized in the following table and fall into the following categories:
-* **Lookup** operations `apply`, `get`, `getOrElse`, `contains`, and `isDefinedAt`. These turn maps into partial functions from keys to values. The fundamental lookup method for a map is: `def get(key): Option[Value]`. The operation "`m get key`" tests whether the map contains an association for the given `key`. If so, it returns the associated value in a `Some`. If no key is defined in the map, `get` returns `None`. Maps also define an `apply` method that returns the value associated with a given key directly, without wrapping it in an `Option`. If the key is not defined in the map, an exception is raised.
+* **Lookup** operations `apply`, `get`, `getOrElse`, `contains`, and `isDefinedAt`. These turn maps into partial functions from keys to values. The fundamental lookup method for a map is: `def get(key): Option[Value]`. The operation `m.get(key)` tests whether the map contains an association for the given `key`. If so, it returns the associated value in a `Some`. If no key is defined in the map, `get` returns `None`. Maps also define an `apply` method that returns the value associated with a given key directly, without wrapping it in an `Option`. If the key is not defined in the map, an exception is raised.
* **Additions and updates** `+`, `++`, `updated`, which let you add new bindings to a map or change existing bindings.
* **Removals** `-`, `--`, which remove bindings from a map.
* **Subcollection producers** `keys`, `keySet`, `keysIterator`, `values`, `valuesIterator`, which return a map's keys and values separately in various forms.
@@ -24,90 +24,140 @@ The fundamental operations on maps are similar to those on sets. They are summar
### Operations in Class Map ###
-| WHAT IT IS | WHAT IT DOES |
-| ------ | ------ |
-| **Lookups:** | |
-| `ms get k` |The value associated with key `k` in map `ms` as an option, `None` if not found.|
-| `ms(k)` |(or, written out, `ms apply k`) The value associated with key `k` in map `ms`, or exception if not found.|
-| `ms getOrElse (k, d)` |The value associated with key `k` in map `ms`, or the default value `d` if not found.|
-| `ms contains k` |Tests whether `ms` contains a mapping for key `k`.|
-| `ms isDefinedAt k` |Same as `contains`. |
-| **Subcollections:** | |
+| WHAT IT IS | WHAT IT DOES |
+| ------ | ------ |
+| **Lookups:** | |
+| `ms.get(k)` |The value associated with key `k` in map `ms` as an option, `None` if not found.|
+| `ms(k)` |(or, written out, `ms.apply(k)`) The value associated with key `k` in map `ms`, or exception if not found.|
+| `ms.getOrElse(k, d)` |The value associated with key `k` in map `ms`, or the default value `d` if not found.|
+| `ms.contains(k)` |Tests whether `ms` contains a mapping for key `k`.|
+| `ms.isDefinedAt(k)` |Same as `contains`. |
+| **Subcollections:** | |
| `ms.keys` |An iterable containing each key in `ms`. |
| `ms.keySet` |A set containing each key in `ms`. |
| `ms.keysIterator` |An iterator yielding each key in `ms`. |
| `ms.values` |An iterable containing each value associated with a key in `ms`.|
| `ms.valuesIterator` |An iterator yielding each value associated with a key in `ms`.|
-| **Transformation:** | |
-| `ms.view filterKeys p` |A map view containing only those mappings in `ms` where the key satisfies predicate `p`.|
-| `ms.view mapValues f` |A map view resulting from applying function `f` to each value associated with a key in `ms`.|
+| **Transformation:** | |
+| `ms.view.filterKeys(p)` |A map view containing only those mappings in `ms` where the key satisfies predicate `p`.|
+| `ms.view.mapValues(f)` |A map view resulting from applying function `f` to each value associated with a key in `ms`.|
Immutable maps support in addition operations to add and remove mappings by returning new `Map`s, as summarized in the following table.
### Operations in Class immutable.Map ###
-| WHAT IT IS | WHAT IT DOES |
-| ------ | ------ |
-| **Additions and Updates:**| |
+| WHAT IT IS | WHAT IT DOES |
+| ------ | ------ |
+| **Additions and Updates:**| |
| `ms.updated(k, v)` or `ms + (k -> v)` |The map containing all mappings of `ms` as well as the mapping `k -> v` from key `k` to value `v`.|
-| **Removals:** | |
-| `ms remove k` or `ms - k` |The map containing all mappings of `ms` except for any mapping of key `k`.|
-| `ms removeAll ks` or `ms -- ks` |The map containing all mappings of `ms` except for any mapping with a key in `ks`.|
+| **Removals:** | |
+| `ms.removed(k)` or `ms - k` |The map containing all mappings of `ms` except for any mapping of key `k`.|
+| `ms.removedAll(ks)` or `ms -- ks` |The map containing all mappings of `ms` except for any mapping with a key in `ks`.|
Mutable maps support in addition the operations summarized in the following table.
### Operations in Class mutable.Map ###
-| WHAT IT IS | WHAT IT DOES |
-| ------ | ------ |
-| **Additions and Updates:**| |
-| `ms(k) = v` |(Or, written out, `ms.update(x, v)`). Adds mapping from key `k` to value `v` to map ms as a side effect, overwriting any previous mapping of `k`.|
-| `ms.addOne(k -> v)` or `ms += (k -> v)` |Adds mapping from key `k` to value `v` to map `ms` as a side effect and returns `ms` itself.|
-| `ms addAll xvs` or `ms ++= kvs` |Adds all mappings in `kvs` to `ms` as a side effect and returns `ms` itself.|
-| `ms.put(k, v)` |Adds mapping from key `k` to value `v` to `ms` and returns any value previously associated with `k` as an option.|
-| `ms getOrElseUpdate (k, d)`|If key `k` is defined in map `ms`, return its associated value. Otherwise, update `ms` with the mapping `k -> d` and return `d`.|
-| **Removals:**| |
-| `ms subtractOne k` or `ms -= k` |Removes mapping with key `k` from ms as a side effect and returns `ms` itself.|
-| `ms subtractAll ks` or `ms --= ks` |Removes all keys in `ks` from `ms` as a side effect and returns `ms` itself.|
-| `ms remove k` |Removes any mapping with key `k` from `ms` and returns any value previously associated with `k` as an option.|
-| `ms filterInPlace p` |Keeps only those mappings in `ms` that have a key satisfying predicate `p`.|
-| `ms.clear()` |Removes all mappings from `ms`. |
-| **Transformation:** | |
-| `ms mapValuesInPlace f` |Transforms all associated values in map `ms` with function `f`.|
-| **Cloning:** | |
-| `ms.clone` |Returns a new mutable map with the same mappings as `ms`.|
-
-The addition and removal operations for maps mirror those for sets. A mutable map `m` is usually updated "in place", using the two variants `m(key) = value` or `m += (key -> value)`. There is also the variant `m.put(key, value)`, which returns an `Option` value that contains the value previously associated with `key`, or `None` if the `key` did not exist in the map before.
+| WHAT IT IS | WHAT IT DOES |
+| ------ | ------ |
+| **Additions and Updates:** | |
+| `ms(k) = v` |(Or, written out, `ms.update(k, v)`). Adds mapping from key `k` to value `v` to map ms as a side effect, overwriting any previous mapping of `k`.|
+| `ms.addOne(k -> v)` or `ms += (k -> v)` |Adds mapping from key `k` to value `v` to map `ms` as a side effect and returns `ms` itself.|
+| `ms.addAll(kvs)` or `ms ++= kvs` |Adds all mappings in `kvs` to `ms` as a side effect and returns `ms` itself.|
+| `ms.put(k, v)` |Adds mapping from key `k` to value `v` to `ms` and returns any value previously associated with `k` as an option.|
+| `ms.getOrElseUpdate(k, d)` |If key `k` is defined in map `ms`, return its associated value. Otherwise, update `ms` with the mapping `k -> d` and return `d`.|
+| **Removals:** | |
+| `ms.subtractOne(k)` or `ms -= k` |Removes mapping with key `k` from ms as a side effect and returns `ms` itself.|
+| `ms.subtractAll(ks)` or `ms --= ks` |Removes all keys in `ks` from `ms` as a side effect and returns `ms` itself.|
+| `ms.remove(k)` |Removes any mapping with key `k` from `ms` and returns any value previously associated with `k` as an option.|
+| `ms.filterInPlace(p)` |Keeps only those mappings in `ms` that have a key satisfying predicate `p`.|
+| `ms.clear()` |Removes all mappings from `ms`. |
+| **Transformation:** | |
+| `ms.mapValuesInPlace(f)` |Transforms all associated values in map `ms` with function `f`.|
+| **Cloning:** | |
+| `ms.clone` |Returns a new mutable map with the same mappings as `ms`.|
+
+The addition and removal operations for maps mirror those for sets. A mutable map `m` is usually updated in place, using the two variants `m(key) = value` or `m += (key -> value)`. There is also the variant `m.put(key, value)`, which returns an `Option` value that contains the value previously associated with `key`, or `None` if the `key` did not exist in the map before.
The `getOrElseUpdate` is useful for accessing maps that act as caches. Say you have an expensive computation triggered by invoking a function `f`:
- scala> def f(x: String) = {
- println("taking my time."); sleep(100)
- x.reverse }
- f: (x: String)String
+{% tabs expensive-computation-reverse class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=expensive-computation-reverse %}
+```scala
+scala> def f(x: String): String = {
+ println("taking my time."); Thread.sleep(100)
+ x.reverse
+ }
+f: (x: String)String
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=expensive-computation-reverse %}
+```scala
+scala> def f(x: String): String =
+ println("taking my time."); Thread.sleep(100)
+ x.reverse
+
+def f(x: String): String
+```
+{% endtab %}
+
+{% endtabs %}
Assume further that `f` has no side-effects, so invoking it again with the same argument will always yield the same result. In that case you could save time by storing previously computed bindings of argument and results of `f` in a map and only computing the result of `f` if a result of an argument was not found there. One could say the map is a _cache_ for the computations of the function `f`.
- scala> val cache = collection.mutable.Map[String, String]()
- cache: scala.collection.mutable.Map[String,String] = Map()
+{% tabs cache-creation %}
+{% tab 'Scala 2 and 3' for=cache-creation %}
+```scala
+scala> val cache = collection.mutable.Map[String, String]()
+cache: scala.collection.mutable.Map[String,String] = Map()
+```
+{% endtab %}
+{% endtabs %}
You can now create a more efficient caching version of the `f` function:
- scala> def cachedF(s: String) = cache.getOrElseUpdate(s, f(s))
- cachedF: (s: String)String
- scala> cachedF("abc")
- taking my time.
- res3: String = cba
- scala> cachedF("abc")
- res4: String = cba
-
-Note that the second argument to `getOrElseUpdate` is "by-name", so the computation of `f("abc")` above is only performed if `getOrElseUpdate` requires the value of its second argument, which is precisely if its first argument is not found in the `cache` map. You could also have implemented `cachedF` directly, using just basic map operations, but it would take more code to do so:
-
- def cachedF(arg: String) = cache get arg match {
- case Some(result) => result
- case None =>
- val result = f(x)
- cache(arg) = result
- result
- }
+{% tabs cache-usage %}
+{% tab 'Scala 2 and 3' for=cache-usage %}
+```scala
+scala> def cachedF(s: String): String = cache.getOrElseUpdate(s, f(s))
+cachedF: (s: String)String
+scala> cachedF("abc")
+taking my time.
+res3: String = cba
+scala> cachedF("abc")
+res4: String = cba
+```
+{% endtab %}
+{% endtabs %}
+
+Note that the second argument to `getOrElseUpdate` is by-name, so the computation of `f("abc")` above is only performed if `getOrElseUpdate` requires the value of its second argument, which is precisely if its first argument is not found in the `cache` map. You could also have implemented `cachedF` directly, using just basic map operations, but it would take more code to do so:
+
+{% tabs cacheF class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=cacheF %}
+```scala
+def cachedF(arg: String): String = cache.get(arg) match {
+ case Some(result) => result
+ case None =>
+ val result = f(x)
+ cache(arg) = result
+ result
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=cacheF %}
+```scala
+def cachedF(arg: String): String = cache.get(arg) match
+ case Some(result) => result
+ case None =>
+ val result = f(x)
+ cache(arg) = result
+ result
+```
+{% endtab %}
+
+{% endtabs %}
diff --git a/_overviews/collections-2.13/overview.md b/_overviews/collections-2.13/overview.md
index 4d739156ac..5ef0c9b0f3 100644
--- a/_overviews/collections-2.13/overview.md
+++ b/_overviews/collections-2.13/overview.md
@@ -13,10 +13,10 @@ permalink: /overviews/collections-2.13/:title.html
---
Scala collections systematically distinguish between mutable and
-immutable collections. A _mutable_ collection can be updated or
+immutable collections. A _mutable_ collection can be updated, reduced or
extended in place. This means you can change, add, or remove elements
of a collection as a side effect. _Immutable_ collections, by
-contrast, never change. You have still operations that simulate
+contrast, never change. You still have operations that simulate
additions, removals, or updates, but those operations will in each
case return a new collection and leave the old collection unchanged.
@@ -36,14 +36,14 @@ always yield a collection with the same elements.
A collection in package `scala.collection.mutable` is known to have
some operations that change the collection in place. So dealing with
-mutable collection means you need to understand which code changes
+a mutable collection means you need to understand which code changes
which collection when.
A collection in package `scala.collection` can be either mutable or
immutable. For instance, [collection.IndexedSeq\[T\]](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/IndexedSeq.html)
is a superclass of both [collection.immutable.IndexedSeq\[T\]](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/IndexedSeq.html)
and
-[collection.mutable.IndexedSeq\[T\]](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/IndexedSeq.html)
+[collection.mutable.IndexedSeq\[T\]](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/IndexedSeq.html).
Generally, the root collections in
package `scala.collection` support transformation operations
affecting the whole collection, the immutable
@@ -73,7 +73,13 @@ A useful convention if you want to use both mutable and immutable
versions of collections is to import just the package
`collection.mutable`.
- import scala.collection.mutable
+{% tabs overview_1 %}
+{% tab 'Scala 2 and 3' for=overview_1 %}
+```scala mdoc
+import scala.collection.mutable
+```
+{% endtab %}
+{% endtabs %}
Then a word like `Set` without a prefix still refers to an immutable collection,
whereas `mutable.Set` refers to the mutable counterpart.
@@ -86,10 +92,16 @@ aliases in the `scala` package, so you can use them by their simple
names without needing an import. An example is the `List` type, which
can be accessed alternatively as
- scala.collection.immutable.List // that's where it is defined
- scala.List // via the alias in the scala package
- List // because scala._
- // is always automatically imported
+{% tabs overview_2 %}
+{% tab 'Scala 2 and 3' for=overview_2 %}
+```scala mdoc
+scala.collection.immutable.List // that's where it is defined
+scala.List // via the alias in the scala package
+List // because scala._
+ // is always automatically imported
+```
+{% endtab %}
+{% endtabs %}
Other types aliased are
[Iterable](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Iterable.html), [Seq](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/Seq.html), [IndexedSeq](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/IndexedSeq.html), [Iterator](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Iterator.html), [LazyList](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/LazyList.html), [Vector](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/Vector.html), [StringBuilder](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/mutable/StringBuilder.html), and [Range](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/immutable/Range.html).
@@ -116,27 +128,45 @@ Legend:
The most important collection classes are shown in the figures above. There is quite a bit of commonality shared by all these classes. For instance, every kind of collection can be created by the same uniform syntax, writing the collection class name followed by its elements:
- Iterable("x", "y", "z")
- Map("x" -> 24, "y" -> 25, "z" -> 26)
- Set(Color.red, Color.green, Color.blue)
- SortedSet("hello", "world")
- Buffer(x, y, z)
- IndexedSeq(1.0, 2.0)
- LinearSeq(a, b, c)
+{% tabs overview_3 %}
+{% tab 'Scala 2 and 3' for=overview_3 %}
+```scala
+Iterable("x", "y", "z")
+Map("x" -> 24, "y" -> 25, "z" -> 26)
+Set(Color.red, Color.green, Color.blue)
+SortedSet("hello", "world")
+Buffer(x, y, z)
+IndexedSeq(1.0, 2.0)
+LinearSeq(a, b, c)
+```
+{% endtab %}
+{% endtabs %}
The same principle also applies for specific collection implementations, such as:
- List(1, 2, 3)
- HashMap("x" -> 24, "y" -> 25, "z" -> 26)
+{% tabs overview_4 %}
+{% tab 'Scala 2 and 3' for=overview_4 %}
+```scala
+List(1, 2, 3)
+HashMap("x" -> 24, "y" -> 25, "z" -> 26)
+```
+{% endtab %}
+{% endtabs %}
All these collections get displayed with `toString` in the same way they are written above.
All collections support the API provided by `Iterable`, but specialize types wherever this makes sense. For instance the `map` method in class `Iterable` returns another `Iterable` as its result. But this result type is overridden in subclasses. For instance, calling `map` on a `List` yields again a `List`, calling it on a `Set` yields again a `Set` and so on.
- scala> List(1, 2, 3) map (_ + 1)
- res0: List[Int] = List(2, 3, 4)
- scala> Set(1, 2, 3) map (_ * 2)
- res0: Set[Int] = Set(2, 4, 6)
+{% tabs overview_5 %}
+{% tab 'Scala 2 and 3' for=overview_5 %}
+```
+scala> List(1, 2, 3) map (_ + 1)
+res0: List[Int] = List(2, 3, 4)
+scala> Set(1, 2, 3) map (_ * 2)
+res0: Set[Int] = Set(2, 4, 6)
+```
+{% endtab %}
+{% endtabs %}
This behavior which is implemented everywhere in the collections libraries is called the _uniform return type principle_.
diff --git a/_overviews/collections-2.13/performance-characteristics.md b/_overviews/collections-2.13/performance-characteristics.md
index b40deb1e45..ed1885017a 100644
--- a/_overviews/collections-2.13/performance-characteristics.md
+++ b/_overviews/collections-2.13/performance-characteristics.md
@@ -74,7 +74,7 @@ The first table treats sequence types--both immutable and mutable--with the foll
| **apply** | Indexing. |
| **update** | Functional update (with `updated`) for immutable sequences, side-effecting update (with `update` for mutable sequences). |
| **prepend**| Adding an element to the front of the sequence. For immutable sequences, this produces a new sequence. For mutable sequences it modifies the existing sequence. |
-| **append** | Adding an element and the end of the sequence. For immutable sequences, this produces a new sequence. For mutable sequences it modifies the existing sequence. |
+| **append** | Adding an element to the end of the sequence. For immutable sequences, this produces a new sequence. For mutable sequences it modifies the existing sequence. |
| **insert** | Inserting an element at an arbitrary position in the sequence. This is only supported directly for mutable sequences. |
The second table treats mutable and immutable sets and maps with the following operations:
diff --git a/_overviews/collections-2.13/seqs.md b/_overviews/collections-2.13/seqs.md
index 1cd8ebfc7b..cabd0b8a0a 100644
--- a/_overviews/collections-2.13/seqs.md
+++ b/_overviews/collections-2.13/seqs.md
@@ -16,7 +16,7 @@ The [Seq](https://www.scala-lang.org/api/current/scala/collection/Seq.html) trai
The operations on sequences, summarized in the table below, fall into the following categories:
-* **Indexing and length** operations `apply`, `isDefinedAt`, `length`, `indices`, and `lengthCompare`. For a `Seq`, the `apply` operation means indexing; hence a sequence of type `Seq[T]` is a partial function that takes an `Int` argument (an index) and which yields a sequence element of type `T`. In other words `Seq[T]` extends `PartialFunction[Int, T]`. The elements of a sequence are indexed from zero up to the `length` of the sequence minus one. The `length` method on sequences is an alias of the `size` method of general collections. The `lengthCompare` method allows you to compare the lengths of a sequences with an Int even if the sequences has infinite length.
+* **Indexing and length** operations `apply`, `isDefinedAt`, `length`, `indices`, and `lengthCompare`. For a `Seq`, the `apply` operation means indexing; hence a sequence of type `Seq[T]` is a partial function that takes an `Int` argument (an index) and which yields a sequence element of type `T`. In other words `Seq[T]` extends `PartialFunction[Int, T]`. The elements of a sequence are indexed from zero up to the `length` of the sequence minus one. The `length` method on sequences is an alias of the `size` method of general collections. The `lengthCompare` method allows you to compare the lengths of a sequences with an Int or with an `Iterable` even if the sequences has infinite length.
* **Index search operations** `indexOf`, `lastIndexOf`, `indexOfSlice`, `lastIndexOfSlice`, `indexWhere`, `lastIndexWhere`, `segmentLength`, which return the index of an element equal to a given value or matching some predicate.
* **Addition operations** `prepended`, `prependedAll`, `appended`, `appendedAll`, `padTo`, which return new sequences obtained by adding elements at the front or the end of a sequence.
* **Update operations** `updated`, `patch`, which return a new sequence obtained by replacing some elements of the original sequence.
@@ -32,17 +32,17 @@ If a sequence is mutable, it offers in addition a side-effecting `update` method
| WHAT IT IS | WHAT IT DOES |
| ------ | ------ |
| **Indexing and Length:** | |
-| `xs(i)` |(or, written out, `xs apply i`). The element of `xs` at index `i`.|
-| `xs isDefinedAt i` |Tests whether `i` is contained in `xs.indices`.|
+| `xs(i)` |(or, written out, `xs.apply(i)`). The element of `xs` at index `i`.|
+| `xs.isDefinedAt(i)` |Tests whether `i` is contained in `xs.indices`.|
| `xs.length` |The length of the sequence (same as `size`).|
-| `xs lengthCompare n` |Returns `-1` if `xs` is shorter than `n`, `+1` if it is longer, and `0` if it is of length `n`. Works even if the sequence is infinite, for example `LazyList.from(1) lengthCompare 42` returns a positive value.|
+| `xs.lengthCompare(n)` |Returns `-1` if `xs` is shorter than `n`, `+1` if it is longer, and `0` if it is of length `n`. Works even if the sequence is infinite, for example `LazyList.from(1).lengthCompare(42)` returns a positive value.|
| `xs.indices` |The index range of `xs`, extending from `0` to `xs.length - 1`.|
| **Index Search:** | |
-| `xs indexOf x` |The index of the first element in `xs` equal to `x` (several variants exist).|
-| `xs lastIndexOf x` |The index of the last element in `xs` equal to `x` (several variants exist).|
-| `xs indexOfSlice ys` |The first index of `xs` such that successive elements starting from that index form the sequence `ys`.|
-| `xs lastIndexOfSlice ys` |The last index of `xs` such that successive elements starting from that index form the sequence `ys`.|
-| `xs indexWhere p` |The index of the first element in xs that satisfies `p` (several variants exist).|
+| `xs.indexOf(x)` |The index of the first element in `xs` equal to `x` (several variants exist).|
+| `xs.lastIndexOf(x)` |The index of the last element in `xs` equal to `x` (several variants exist).|
+| `xs.indexOfSlice(ys)` |The first index of `xs` such that successive elements starting from that index form the sequence `ys`.|
+| `xs.lastIndexOfSlice(ys)` |The last index of `xs` such that successive elements starting from that index form the sequence `ys`.|
+| `xs.indexWhere(p)` |The index of the first element in xs that satisfies `p` (several variants exist).|
| `xs.segmentLength(p, i)`|The length of the longest uninterrupted segment of elements in `xs`, starting with `xs(i)`, that all satisfy the predicate `p`.|
| **Additions:** | |
| `xs.prepended(x)` or `x +: xs` |A new sequence that consists of `x` prepended to `xs`.|
@@ -56,26 +56,26 @@ If a sequence is mutable, it offers in addition a side-effecting `update` method
| `xs(i) = x` |(or, written out, `xs.update(i, x)`, only available for `mutable.Seq`s). Changes the element of `xs` at index `i` to `x`.|
| **Sorting:** | |
| `xs.sorted` |A new sequence obtained by sorting the elements of `xs` using the standard ordering of the element type of `xs`.|
-| `xs sortWith lt` |A new sequence obtained by sorting the elements of `xs` using `lt` as comparison operation.|
-| `xs sortBy f` |A new sequence obtained by sorting the elements of `xs`. Comparison between two elements proceeds by mapping the function `f` over both and comparing the results.|
+| `xs.sortWith(lt)` |A new sequence obtained by sorting the elements of `xs` using `lt` as comparison operation.|
+| `xs.sortBy(f)` |A new sequence obtained by sorting the elements of `xs`. Comparison between two elements proceeds by mapping the function `f` over both and comparing the results.|
| **Reversals:** | |
| `xs.reverse` |A sequence with the elements of `xs` in reverse order.|
| `xs.reverseIterator` |An iterator yielding all the elements of `xs` in reverse order.|
| **Comparisons:** | |
-| `xs sameElements ys` |A test whether `xs` and `ys` contain the same elements in the same order|
-| `xs startsWith ys` |Tests whether `xs` starts with sequence `ys` (several variants exist).|
-| `xs endsWith ys` |Tests whether `xs` ends with sequence `ys` (several variants exist).|
-| `xs contains x` |Tests whether `xs` has an element equal to `x`.|
-| `xs search x` |Tests whether a sorted sequence `xs` has an element equal to `x`, possibly in a more efficient way than `xs contains x`.|
-| `xs containsSlice ys` |Tests whether `xs` has a contiguous subsequence equal to `ys`.|
-| `(xs corresponds ys)(p)` |Tests whether corresponding elements of `xs` and `ys` satisfy the binary predicate `p`.|
+| `xs.sameElements(ys)` |A test whether `xs` and `ys` contain the same elements in the same order|
+| `xs.startsWith(ys)` |Tests whether `xs` starts with sequence `ys` (several variants exist).|
+| `xs.endsWith(ys)` |Tests whether `xs` ends with sequence `ys` (several variants exist).|
+| `xs.contains(x)` |Tests whether `xs` has an element equal to `x`.|
+| `xs.search(x)` |Tests whether a sorted sequence `xs` has an element equal to `x`, possibly in a more efficient way than `xs.contains(x)`.|
+| `xs.containsSlice(ys)` |Tests whether `xs` has a contiguous subsequence equal to `ys`.|
+| `xs.corresponds(ys)(p)` |Tests whether corresponding elements of `xs` and `ys` satisfy the binary predicate `p`.|
| **Multiset Operations:** | |
-| `xs intersect ys` |The multi-set intersection of sequences `xs` and `ys` that preserves the order of elements in `xs`.|
-| `xs diff ys` |The multi-set difference of sequences `xs` and `ys` that preserves the order of elements in `xs`.|
+| `xs.intersect(ys)` |The multi-set intersection of sequences `xs` and `ys` that preserves the order of elements in `xs`.|
+| `xs.diff(ys)` |The multi-set difference of sequences `xs` and `ys` that preserves the order of elements in `xs`.|
| `xs.distinct` |A subsequence of `xs` that contains no duplicated element.|
-| `xs distinctBy f` |A subsequence of `xs` that contains no duplicated element after applying the transforming function `f`. For instance, `List("foo", "bar", "quux").distinctBy(_.length) == List("foo", "quux")`|
+| `xs.distinctBy(f)` |A subsequence of `xs` that contains no duplicated element after applying the transforming function `f`. For instance, `List("foo", "bar", "quux").distinctBy(_.length) == List("foo", "quux")`|
-Trait [Seq](https://www.scala-lang.org/api/current/scala/collection/Seq.html) has two subtraits [LinearSeq](https://www.scala-lang.org/api/current/scala/collection/LinearSeq.html), and [IndexedSeq](https://www.scala-lang.org/api/current/scala/collection/IndexedSeq.html). These do not add any new operations to the immutable branch, but each offers different performance characteristics: A linear sequence has efficient `head` and `tail` operations, whereas an indexed sequence has efficient `apply`, `length`, and (if mutable) `update` operations. Frequently used linear sequences are `scala.collection.immutable.List` and `scala.collection.immutable.LazyList`. Frequently used indexed sequences are `scala.Array` and `scala.collection.mutable.ArrayBuffer`. The `Vector` class provides an interesting compromise between indexed and linear access. It has both effectively constant time indexing overhead and constant time linear access overhead. Because of this, vectors are a good foundation for mixed access patterns where both indexed and linear accesses are used. You'll learn more on vectors [later](concrete-immutable-collection-classes.html).
+Trait [Seq](https://www.scala-lang.org/api/current/scala/collection/Seq.html) has two subtraits [LinearSeq](https://www.scala-lang.org/api/current/scala/collection/LinearSeq.html), and [IndexedSeq](https://www.scala-lang.org/api/current/scala/collection/IndexedSeq.html). These do not add any new operations to the immutable branch, but each offers different performance characteristics: A linear sequence has efficient `head` and `tail` operations, whereas an indexed sequence has efficient `apply`, `length`, and (if mutable) `update` operations. Frequently used linear sequences are `scala.collection.immutable.List` and `scala.collection.immutable.LazyList`. Frequently used indexed sequences are `scala.Array` and `scala.collection.mutable.ArrayBuffer`. The `Vector` class provides an interesting compromise between indexed and linear access. It has both effectively constant time indexing overhead and constant time linear access overhead. Because of this, vectors are a good foundation for mixed access patterns where both indexed and linear accesses are used. You'll learn more on vectors [later]({% link _overviews/collections-2.13/concrete-immutable-collection-classes.md %}).
On the mutable branch, `IndexedSeq` adds operations for transforming its elements in place (by contrast with
transformation operations such as `map` and `sort`, available on the root `Seq`, which return a new collection
@@ -102,20 +102,20 @@ Two often used implementations of buffers are `ListBuffer` and `ArrayBuffer`. A
| WHAT IT IS | WHAT IT DOES|
| ------ | ------ |
| **Additions:** | |
-| `buf append x` or `buf += x` |Appends element `x` to buffer, and returns `buf` itself as result.|
-| `buf appendAll xs` or`buf ++= xs` |Appends all elements in `xs` to buffer.|
-| `buf prepend x` or `x +=: buf` |Prepends element `x` to buffer.|
-| `buf prependAll xs` or `xs ++=: buf` |Prepends all elements in `xs` to buffer.|
+| `buf.append(x)` or `buf += x` |Appends element `x` to buffer, and returns `buf` itself as result.|
+| `buf.appendAll(xs)` or `buf ++= xs` |Appends all elements in `xs` to buffer.|
+| `buf.prepend(x)` or `x +=: buf` |Prepends element `x` to buffer.|
+| `buf.prependAll(xs)` or `xs ++=: buf` |Prepends all elements in `xs` to buffer.|
| `buf.insert(i, x)` |Inserts element `x` at index `i` in buffer.|
| `buf.insertAll(i, xs)` |Inserts all elements in `xs` at index `i` in buffer.|
| `buf.padToInPlace(n, x)` |Appends element `x` to buffer until it has `n` elements in total.|
| **Removals:** | |
-| `buf subtractOne x` or `buf -= x` |Removes element `x` from buffer.|
-| `buf subtractAll xs` or `buf --= xs` |Removes elements in `xs` from buffer.|
-| `buf remove i` |Removes element at index `i` from buffer.|
+| `buf.subtractOne(x)` or `buf -= x` |Removes element `x` from buffer.|
+| `buf.subtractAll(xs)` or `buf --= xs` |Removes elements in `xs` from buffer.|
+| `buf.remove(i)` |Removes element at index `i` from buffer.|
| `buf.remove(i, n)` |Removes `n` elements starting at index `i` from buffer.|
-| `buf trimStart n` |Removes first `n` elements from buffer.|
-| `buf trimEnd n` |Removes last `n` elements from buffer.|
+| `buf.trimStart(n)` |Removes first `n` elements from buffer.|
+| `buf.trimEnd(n)` |Removes last `n` elements from buffer.|
| `buf.clear()` |Removes all elements from buffer.|
| **Replacement:** | |
| `buf.patchInPlace(i, xs, n)` |Replaces (at most) `n` elements of buffer by elements in `xs`, starting from index `i` in buffer.|
diff --git a/_overviews/collections-2.13/sets.md b/_overviews/collections-2.13/sets.md
index 96984d34f3..a57814ffd1 100644
--- a/_overviews/collections-2.13/sets.md
+++ b/_overviews/collections-2.13/sets.md
@@ -18,14 +18,18 @@ permalink: /overviews/collections-2.13/:title.html
For example:
-
- scala> val fruit = Set("apple", "orange", "peach", "banana")
- fruit: scala.collection.immutable.Set[java.lang.String] = Set(apple, orange, peach, banana)
- scala> fruit("peach")
- res0: Boolean = true
- scala> fruit("potato")
- res1: Boolean = false
-
+{% tabs sets_1 %}
+{% tab 'Scala 2 and 3' for=sets_1 %}
+```scala
+scala> val fruit = Set("apple", "orange", "peach", "banana")
+fruit: scala.collection.immutable.Set[java.lang.String] = Set(apple, orange, peach, banana)
+scala> fruit("peach")
+res0: Boolean = true
+scala> fruit("potato")
+res1: Boolean = false
+```
+{% endtab %}
+{% endtabs %}
* **Additions** `incl` and `concat` (or `+` and `++`, respectively), which add one or more elements to a set, yielding a new set.
* **Removals** `excl` and `removedAll` (or `-` and `--`, respectively), which remove one or more elements from a set, yielding a new set.
@@ -85,22 +89,33 @@ The operation `s += elem` adds `elem` to the set `s` as a side effect, and retur
The choice of the method names `+=` and `-=` means that very similar code can work with either mutable or immutable sets. Consider first the following REPL dialogue which uses an immutable set `s`:
- scala> var s = Set(1, 2, 3)
- s: scala.collection.immutable.Set[Int] = Set(1, 2, 3)
- scala> s += 4
- scala> s -= 2
- scala> s
- res2: scala.collection.immutable.Set[Int] = Set(1, 3, 4)
+{% tabs sets_2 %}
+{% tab 'Scala 2 and 3' for=sets_2 %}
+```scala
+scala> var s = Set(1, 2, 3)
+s: scala.collection.immutable.Set[Int] = Set(1, 2, 3)
+scala> s += 4
+scala> s -= 2
+scala> s
+res2: scala.collection.immutable.Set[Int] = Set(1, 3, 4)
+```
+{% endtab %}
+{% endtabs %}
We used `+=` and `-=` on a `var` of type `immutable.Set`. A statement such as `s += 4` is an abbreviation for `s = s + 4`. So this invokes the addition method `+` on the set `s` and then assigns the result back to the `s` variable. Consider now an analogous interaction with a mutable set.
-
- scala> val s = collection.mutable.Set(1, 2, 3)
- s: scala.collection.mutable.Set[Int] = Set(1, 2, 3)
- scala> s += 4
- res3: s.type = Set(1, 4, 2, 3)
- scala> s -= 2
- res4: s.type = Set(1, 4, 3)
+{% tabs sets_3 %}
+{% tab 'Scala 2 and 3' for=sets_3 %}
+```scala
+scala> val s = collection.mutable.Set(1, 2, 3)
+s: scala.collection.mutable.Set[Int] = Set(1, 2, 3)
+scala> s += 4
+res3: s.type = Set(1, 4, 2, 3)
+scala> s -= 2
+res4: s.type = Set(1, 4, 3)
+```
+{% endtab %}
+{% endtabs %}
The end effect is very similar to the previous interaction; we start with a `Set(1, 2, 3)` and end up with a `Set(1, 3, 4)`. However, even though the statements look the same as before, they do something different. `s += 4` now invokes the `+=` method on the mutable set value `s`, changing the set in place. Likewise, `s -= 2` now invokes the `-=` method on the same set.
@@ -108,7 +123,7 @@ Comparing the two interactions shows an important principle. You often can repla
Mutable sets also provide add and remove as variants of `+=` and `-=`. The difference is that `add` and `remove` return a Boolean result indicating whether the operation had an effect on the set.
-The current default implementation of a mutable set uses a hashtable to store the set's elements. The default implementation of an immutable set uses a representation that adapts to the number of elements of the set. An empty set is represented by just a singleton object. Sets of sizes up to four are represented by a single object that stores all elements as fields. Beyond that size, immutable sets are implemented as [Compressed Hash-Array Mapped Prefix-tree](concrete-immutable-collection-classes.html).
+The current default implementation of a mutable set uses a hashtable to store the set's elements. The default implementation of an immutable set uses a representation that adapts to the number of elements of the set. An empty set is represented by just a singleton object. Sets of sizes up to four are represented by a single object that stores all elements as fields. Beyond that size, immutable sets are implemented as [Compressed Hash-Array Mapped Prefix-tree]({% link _overviews/collections-2.13/concrete-immutable-collection-classes.md %}).
A consequence of these representation choices is that, for sets of small sizes (say up to 4), immutable sets are usually more compact and also more efficient than mutable sets. So, if you expect the size of a set to be small, try making it immutable.
@@ -120,34 +135,63 @@ A [SortedSet](https://www.scala-lang.org/api/current/scala/collection/SortedSet.
To create an empty [TreeSet](https://www.scala-lang.org/api/current/scala/collection/immutable/TreeSet.html), you could first specify the desired ordering:
- scala> val myOrdering = Ordering.fromLessThan[String](_ > _)
- myOrdering: scala.math.Ordering[String] = ...
+{% tabs sorted-sets_1 %}
+{% tab 'Scala 2 and 3' for=sorted-sets_1 %}
+```scala
+scala> val myOrdering = Ordering.fromLessThan[String](_ > _)
+myOrdering: scala.math.Ordering[String] = ...
+```
+{% endtab %}
+{% endtabs %}
Then, to create an empty tree set with that ordering, use:
- scala> TreeSet.empty(myOrdering)
- res1: scala.collection.immutable.TreeSet[String] = TreeSet()
-
-Or you can leave out the ordering argument but give an element type or the empty set. In that case, the default ordering on the element type will be used.
-
- scala> TreeSet.empty[String]
- res2: scala.collection.immutable.TreeSet[String] = TreeSet()
+{% tabs sorted-sets_2 %}
+{% tab 'Scala 2 and 3' for=sorted-sets_2 %}
+```scala
+scala> TreeSet.empty(myOrdering)
+res1: scala.collection.immutable.TreeSet[String] = TreeSet()
+```
+{% endtab %}
+{% endtabs %}
+
+Or you can leave out the ordering argument but give an element type for the empty set. In that case, the default ordering on the element type will be used.
+
+{% tabs sorted-sets_3 %}
+{% tab 'Scala 2 and 3' for=sorted-sets_3 %}
+```scala
+scala> TreeSet.empty[String]
+res2: scala.collection.immutable.TreeSet[String] = TreeSet()
+```
+{% endtab %}
+{% endtabs %}
If you create new sets from a tree-set (for instance by concatenation or filtering) they will keep the same ordering as the original set. For instance,
- scala> res2 + "one" + "two" + "three" + "four"
- res3: scala.collection.immutable.TreeSet[String] = TreeSet(four, one, three, two)
+{% tabs sorted-sets_4 %}
+{% tab 'Scala 2 and 3' for=sorted-sets_4 %}
+```scala
+scala> res2 + "one" + "two" + "three" + "four"
+res3: scala.collection.immutable.TreeSet[String] = TreeSet(four, one, three, two)
+```
+{% endtab %}
+{% endtabs %}
Sorted sets also support ranges of elements. For instance, the `range` method returns all elements from a starting element up to, but excluding, an end element. Or, the `from` method returns all elements greater or equal than a starting element in the set's ordering. The result of calls to both methods is again a sorted set. Examples:
- scala> res3.range("one", "two")
- res4: scala.collection.immutable.TreeSet[String] = TreeSet(one, three)
- scala> res3 rangeFrom "three"
- res5: scala.collection.immutable.TreeSet[String] = TreeSet(three, two)
-
+{% tabs sorted-sets_5 %}
+{% tab 'Scala 2 and 3' for=sorted-sets_5 %}
+```scala
+scala> res3.range("one", "two")
+res4: scala.collection.immutable.TreeSet[String] = TreeSet(one, three)
+scala> res3 rangeFrom "three"
+res5: scala.collection.immutable.TreeSet[String] = TreeSet(three, two)
+```
+{% endtab %}
+{% endtabs %}
### Bitsets ###
-Bitsets are sets of non-negative integer elements that are implemented in one or more words of packed bits. The internal representation of a [BitSet](https://www.scala-lang.org/api/current/scala/collection/BitSet.html) uses an array of `Long`s. The first `Long` covers elements from 0 to 63, the second from 64 to 127, and so on (Immutable bitsets of elements in the range of 0 to 127 optimize the array away and store the bits directly in a one or two `Long` fields.) For every `Long`, each of its 64 bits is set to 1 if the corresponding element is contained in the set, and is unset otherwise. It follows that the size of a bitset depends on the largest integer that's stored in it. If `N` is that largest integer, then the size of the set is `N/64` `Long` words, or `N/8` bytes, plus a small number of extra bytes for status information.
+Bitsets are sets of non-negative integer elements that are implemented in one or more words of packed bits. The internal representation of a [BitSet](https://www.scala-lang.org/api/current/scala/collection/BitSet.html) uses an array of `Long`s. The first `Long` covers elements from 0 to 63, the second from 64 to 127, and so on (Immutable bitsets of elements in the range of 0 to 127 optimize the array away and store the bits directly in a one or two `Long` fields). For every `Long`, each of its 64 bits is set to 1 if the corresponding element is contained in the set, and is unset otherwise. It follows that the size of a bitset depends on the largest integer that's stored in it. If `N` is that largest integer, then the size of the set is `N/64` `Long` words, or `N/8` bytes, plus a small number of extra bytes for status information.
Bitsets are hence more compact than other sets if they contain many small elements. Another advantage of bitsets is that operations such as membership test with `contains`, or element addition and removal with `+=` and `-=` are all extremely efficient.
diff --git a/_overviews/collections-2.13/strings.md b/_overviews/collections-2.13/strings.md
index 485410df49..aebe244304 100644
--- a/_overviews/collections-2.13/strings.md
+++ b/_overviews/collections-2.13/strings.md
@@ -14,17 +14,30 @@ permalink: /overviews/collections-2.13/:title.html
Like arrays, strings are not directly sequences, but they can be converted to them, and they also support all sequence operations on strings. Here are some examples of operations you can invoke on strings.
- scala> val str = "hello"
- str: java.lang.String = hello
- scala> str.reverse
- res6: String = olleh
- scala> str.map(_.toUpper)
- res7: String = HELLO
- scala> str drop 3
- res8: String = lo
- scala> str.slice(1, 4)
- res9: String = ell
- scala> val s: Seq[Char] = str
- s: Seq[Char] = hello
+{% tabs strings_1 %}
+{% tab 'Scala 2 and 3' for=strings_1 %}
+
+```scala
+scala> val str = "hello"
+val str: java.lang.String = hello
+
+scala> str.reverse
+val res6: String = olleh
+
+scala> str.map(_.toUpper)
+val res7: String = HELLO
+
+scala> str.drop(3)
+val res8: String = lo
+
+scala> str.slice(1, 4)
+val res9: String = ell
+
+scala> val s: Seq[Char] = str
+val s: Seq[Char] = hello
+```
+
+{% endtab %}
+{% endtabs %}
These operations are supported by two implicit conversions. The first, low-priority conversion maps a `String` to a `WrappedString`, which is a subclass of `immutable.IndexedSeq`, This conversion got applied in the last line above where a string got converted into a Seq. The other, high-priority conversion maps a string to a `StringOps` object, which adds all methods on immutable sequences to strings. This conversion was implicitly inserted in the method calls of `reverse`, `map`, `drop`, and `slice` in the example above.
diff --git a/_overviews/collections-2.13/trait-iterable.md b/_overviews/collections-2.13/trait-iterable.md
index edc2ef2b1f..21b28e2282 100644
--- a/_overviews/collections-2.13/trait-iterable.md
+++ b/_overviews/collections-2.13/trait-iterable.md
@@ -14,7 +14,13 @@ permalink: /overviews/collections-2.13/:title.html
At the top of the collection hierarchy is trait `Iterable`. All methods in this trait are defined in terms of an abstract method, `iterator`, which yields the collection's elements one by one.
- def iterator: Iterator[A]
+{% tabs trait-iterable_1 %}
+{% tab 'Scala 2 and 3' for=trait-iterable_1 %}
+```scala
+def iterator: Iterator[A]
+```
+{% endtab %}
+{% endtabs %}
Collection classes that implement `Iterable` just need to define this method; all other methods can be inherited from `Iterable`.
@@ -31,119 +37,124 @@ Collection classes that implement `Iterable` just need to define this method; al
* **Element tests** `exists`, `forall`, `count` which test collection elements with a given predicate.
* **Folds** `foldLeft`, `foldRight`, `reduceLeft`, `reduceRight` which apply a binary operation to successive elements.
* **Specific folds** `sum`, `product`, `min`, `max`, which work on collections of specific types (numeric or comparable).
-* **String** operations `mkString`, `addString`, `className`, which give alternative ways of converting a collection to a string.
-* **View** operation: A view is a collection that's evaluated lazily. You'll learn more about views in [later](views.html).
+* **String** operations `mkString` and `addString` which give alternative ways of converting a collection to a string.
+* **View** operation: A view is a collection that's evaluated lazily. You'll learn more about views in [later]({% link _overviews/collections-2.13/views.md %}).
Two more methods exist in `Iterable` that return iterators: `grouped` and `sliding`. These iterators, however, do not return single elements but whole subsequences of elements of the original collection. The maximal size of these subsequences is given as an argument to these methods. The `grouped` method returns its elements in "chunked" increments, where `sliding` yields a sliding "window" over the elements. The difference between the two should become clear by looking at the following REPL interaction:
- scala> val xs = List(1, 2, 3, 4, 5)
- xs: List[Int] = List(1, 2, 3, 4, 5)
- scala> val git = xs grouped 3
- git: Iterator[List[Int]] = non-empty iterator
- scala> git.next()
- res3: List[Int] = List(1, 2, 3)
- scala> git.next()
- res4: List[Int] = List(4, 5)
- scala> val sit = xs sliding 3
- sit: Iterator[List[Int]] = non-empty iterator
- scala> sit.next()
- res5: List[Int] = List(1, 2, 3)
- scala> sit.next()
- res6: List[Int] = List(2, 3, 4)
- scala> sit.next()
- res7: List[Int] = List(3, 4, 5)
+{% tabs trait-iterable_2 %}
+{% tab 'Scala 2 and 3' for=trait-iterable_2 %}
+```
+scala> val xs = List(1, 2, 3, 4, 5)
+xs: List[Int] = List(1, 2, 3, 4, 5)
+scala> val git = xs grouped 3
+git: Iterator[List[Int]] = non-empty iterator
+scala> git.next()
+res3: List[Int] = List(1, 2, 3)
+scala> git.next()
+res4: List[Int] = List(4, 5)
+scala> val sit = xs sliding 3
+sit: Iterator[List[Int]] = non-empty iterator
+scala> sit.next()
+res5: List[Int] = List(1, 2, 3)
+scala> sit.next()
+res6: List[Int] = List(2, 3, 4)
+scala> sit.next()
+res7: List[Int] = List(3, 4, 5)
+```
+{% endtab %}
+{% endtabs %}
### Operations in Class Iterable ###
-| WHAT IT IS | WHAT IT DOES |
-| ------ | ------ |
-| **Abstract Method:** | |
-| `xs.iterator` |An `iterator` that yields every element in `xs`.|
-| **Other Iterators:** | |
-| `xs foreach f` |Executes function `f` for every element of `xs`.|
-| `xs grouped size` |An iterator that yields fixed-sized "chunks" of this collection.|
-| `xs sliding size` |An iterator that yields a sliding fixed-sized window of elements in this collection.|
-| **Addition:** | |
-| `xs concat ys` (or `xs ++ ys`) |A collection consisting of the elements of both `xs` and `ys`. `ys` is a [IterableOnce](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/IterableOnce.html) collection, i.e., either an [Iterable](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Iterable.html) or an [Iterator](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Iterator.html).|
-| **Maps:** | |
-| `xs map f` |The collection obtained from applying the function f to every element in `xs`.|
-| `xs flatMap f` |The collection obtained from applying the collection-valued function `f` to every element in `xs` and concatenating the results.|
-| `xs collect f` |The collection obtained from applying the partial function `f` to every element in `xs` for which it is defined and collecting the results.|
-| **Conversions:** | |
-| `xs.to(SortedSet)` | Generic conversion operation that takes a collection factory as parameter. |
-| `xs.toList` |Converts the collection to a list. |
-| `xs.toVector` |Converts the collection to a vector. |
-| `xs.toMap` |Converts the collection of key/value pairs to a map. If the collection does not have pairs as elements, calling this operation results in a static type error.|
-| `xs.toSet` |Converts the collection to a set. |
-| `xs.toSeq` |Converts the collection to a sequence. |
-| `xs.toIndexedSeq` |Converts the collection to an indexed sequence. |
-| `xs.toBuffer` |Converts the collection to a buffer. |
-| `xs.toArray` |Converts the collection to an array. |
-| **Copying:** | |
-| `xs copyToArray(arr, s, n)`|Copies at most `n` elements of the collection to array `arr` starting at index `s`. The last two arguments are optional.|
-| **Size info:** | |
-| `xs.isEmpty` |Tests whether the collection is empty. |
-| `xs.nonEmpty` |Tests whether the collection contains elements. |
-| `xs.size` |The number of elements in the collection. |
-| `xs.knownSize` |The number of elements, if this one takes constant time to compute, otherwise `-1`. |
-| `xs.sizeCompare(ys)` |Returns a negative value if `xs` is shorter than the `ys` collection, a positive value if it is longer, and `0` if they have the same size. Works even if the collection is infinite, for example `LazyList.from(1) sizeCompare List(1, 2)` returns a positive value. |
-| `xs.sizeCompare(n)` |Returns a negative value if `xs` is shorter than `n`, a positive value if it is longer, and `0` if it is of size `n`. Works even if the collection is infinite, for example `LazyList.from(1) sizeCompare 42` returns a positive value. |
-| `xs.sizeIs < 42`, `xs.sizeIs != 42`, etc. |Provides a more convenient syntax for `xs.sizeCompare(42) < 0`, `xs.sizeCompare(42) != 0`, etc., respectively.|
-| **Element Retrieval:** | |
-| `xs.head` |The first element of the collection (or, some element, if no order is defined).|
-| `xs.headOption` |The first element of `xs` in an option value, or None if `xs` is empty.|
-| `xs.last` |The last element of the collection (or, some element, if no order is defined).|
-| `xs.lastOption` |The last element of `xs` in an option value, or None if `xs` is empty.|
-| `xs find p` |An option containing the first element in `xs` that satisfies `p`, or `None` if no element qualifies.|
-| **Subcollections:** | |
-| `xs.tail` |The rest of the collection except `xs.head`. |
-| `xs.init` |The rest of the collection except `xs.last`. |
-| `xs.slice(from, to)` |A collection consisting of elements in some index range of `xs` (from `from` up to, and excluding `to`).|
-| `xs take n` |A collection consisting of the first `n` elements of `xs` (or, some arbitrary `n` elements, if no order is defined).|
-| `xs drop n` |The rest of the collection except `xs take n`.|
-| `xs takeWhile p` |The longest prefix of elements in the collection that all satisfy `p`.|
-| `xs dropWhile p` |The collection without the longest prefix of elements that all satisfy `p`.|
-| `xs takeRight n` |A collection consisting of the last `n` elements of `xs` (or, some arbitrary `n` elements, if no order is defined).|
-| `xs dropRight n` |The rest of the collection except `xs takeRight n`.|
-| `xs filter p` |The collection consisting of those elements of xs that satisfy the predicate `p`.|
-| `xs withFilter p` |A non-strict filter of this collection. Subsequent calls to `map`, `flatMap`, `foreach`, and `withFilter` will only apply to those elements of `xs` for which the condition `p` is true.|
-| `xs filterNot p` |The collection consisting of those elements of `xs` that do not satisfy the predicate `p`.|
-| **Subdivisions:** | |
-| `xs splitAt n` |Split `xs` at a position, giving the pair of collections `(xs take n, xs drop n)`.|
-| `xs span p` |Split `xs` according to a predicate, giving the pair of collections `(xs takeWhile p, xs.dropWhile p)`.|
-| `xs partition p` |Split `xs` into a pair of collections; one with elements that satisfy the predicate `p`, the other with elements that do not, giving the pair of collections `(xs filter p, xs.filterNot p)`|
-| `xs groupBy f` |Partition `xs` into a map of collections according to a discriminator function `f`.|
-| `xs.groupMap(f)(g)`|Partition `xs` into a map of collections according to a discriminator function `f`, and applies the transformation function `g` to each element in a group.|
-| `xs.groupMapReduce(f)(g)(h)`|Partition `xs` according to a discriminator function `f`, and then combine the results of applying the function `g` to each element in a group using the `h` function.|
-| **Element Conditions:** | |
-| `xs forall p` |A boolean indicating whether the predicate `p` holds for all elements of `xs`.|
-| `xs exists p` |A boolean indicating whether the predicate `p` holds for some element in `xs`.|
-| `xs count p` |The number of elements in `xs` that satisfy the predicate `p`.|
-| **Folds:** | |
-| `xs.foldLeft(z)(op)` |Apply binary operation `op` between successive elements of `xs`, going left to right and starting with `z`.|
-| `xs.foldRight(z)(op)` |Apply binary operation `op` between successive elements of `xs`, going right to left and ending with `z`.|
-| `xs reduceLeft op` |Apply binary operation `op` between successive elements of non-empty collection `xs`, going left to right.|
-| `xs reduceRight op` |Apply binary operation `op` between successive elements of non-empty collection `xs`, going right to left.|
-| **Specific Folds:** | |
-| `xs.sum` |The sum of the numeric element values of collection `xs`.|
-| `xs.product` |The product of the numeric element values of collection `xs`.|
-| `xs.min` |The minimum of the ordered element values of collection `xs`.|
-| `xs.max` |The maximum of the ordered element values of collection `xs`.|
-| `xs.minOption` |Like `min` but returns `None` if `xs` is empty.|
-| `xs.maxOption` |Like `max` but returns `None` if `xs` is empty.|
-| **Strings:** | |
-| `xs.addString(b, start, sep, end)`|Adds a string to `StringBuilder` `b` that shows all elements of `xs` between separators `sep` enclosed in strings `start` and `end`. `start`, `sep`, `end` are all optional.|
-| `xs.mkString(start, sep, end)`|Converts the collection to a string that shows all elements of `xs` between separators `sep` enclosed in strings `start` and `end`. `start`, `sep`, `end` are all optional.|
-| `xs.stringPrefix` |The collection name at the beginning of the string returned from `xs.toString`.|
-| **Zippers:** | |
-| `xs zip ys` |A collection of pairs of corresponding elements from `xs` and `ys`.|
-| `xs.zipAll(ys, x, y)` |A collection of pairs of corresponding elements from `xs` and `ys`, where the shorter sequence is extended to match the longer one by appending elements `x` or `y`.|
-| `xs.zipWithIndex` |An collection of pairs of elements from `xs` with their indices.|
-| **Views:** | |
-| `xs.view` |Produces a view over `xs`.|
+| WHAT IT IS | WHAT IT DOES |
+|-------------------------------------------| ------ |
+| **Abstract Method:** | |
+| `xs.iterator` |An `iterator` that yields every element in `xs`.|
+| **Other Iterators:** | |
+| `xs.foreach(f)` |Executes function `f` for every element of `xs`.|
+| `xs.grouped(size)` |An iterator that yields fixed-sized "chunks" of this collection.|
+| `xs.sliding(size)` |An iterator that yields a sliding fixed-sized window of elements in this collection.|
+| **Addition:** | |
+| `xs.concat(ys)` (or `xs ++ ys`) |A collection consisting of the elements of both `xs` and `ys`. `ys` is a [IterableOnce](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/IterableOnce.html) collection, i.e., either an [Iterable](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Iterable.html) or an [Iterator](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Iterator.html).|
+| **Maps:** | |
+| `xs.map(f)` |The collection obtained from applying the function f to every element in `xs`.|
+| `xs.flatMap(f)` |The collection obtained from applying the collection-valued function `f` to every element in `xs` and concatenating the results.|
+| `xs.collect(f)` |The collection obtained from applying the partial function `f` to every element in `xs` for which it is defined and collecting the results.|
+| **Conversions:** | |
+| `xs.to(SortedSet)` | Generic conversion operation that takes a collection factory as parameter. |
+| `xs.toList` |Converts the collection to a list. |
+| `xs.toVector` |Converts the collection to a vector. |
+| `xs.toMap` |Converts the collection of key/value pairs to a map. If the collection does not have pairs as elements, calling this operation results in a static type error.|
+| `xs.toSet` |Converts the collection to a set. |
+| `xs.toSeq` |Converts the collection to a sequence. |
+| `xs.toIndexedSeq` |Converts the collection to an indexed sequence. |
+| `xs.toBuffer` |Converts the collection to a buffer. |
+| `xs.toArray` |Converts the collection to an array. |
+| **Copying:** | |
+| `xs copyToArray(arr, s, n)` |Copies at most `n` elements of the collection to array `arr` starting at index `s`. The last two arguments are optional.|
+| **Size info:** | |
+| `xs.isEmpty` |Tests whether the collection is empty. |
+| `xs.nonEmpty` |Tests whether the collection contains elements. |
+| `xs.size` |The number of elements in the collection. |
+| `xs.knownSize` |The number of elements, if this one takes constant time to compute, otherwise `-1`. |
+| `xs.sizeCompare(ys)` |Returns a negative value if `xs` is shorter than the `ys` collection, a positive value if it is longer, and `0` if they have the same size. Works even if the collection is infinite, for example `LazyList.from(1) sizeCompare List(1, 2)` returns a positive value. |
+| `xs.sizeCompare(n)` |Returns a negative value if `xs` is shorter than `n`, a positive value if it is longer, and `0` if it is of size `n`. Works even if the collection is infinite, for example `LazyList.from(1) sizeCompare 42` returns a positive value. |
+| `xs.sizeIs < 42`, `xs.sizeIs != 42`, etc. |Provides a more convenient syntax for `xs.sizeCompare(42) < 0`, `xs.sizeCompare(42) != 0`, etc., respectively.|
+| **Element Retrieval:** | |
+| `xs.head` |The first element of the collection (or, some element, if no order is defined).|
+| `xs.headOption` |The first element of `xs` in an option value, or None if `xs` is empty.|
+| `xs.last` |The last element of the collection (or, some element, if no order is defined).|
+| `xs.lastOption` |The last element of `xs` in an option value, or None if `xs` is empty.|
+| `xs.find(p)` |An option containing the first element in `xs` that satisfies `p`, or `None` if no element qualifies.|
+| **Subcollections:** | |
+| `xs.tail` |The rest of the collection except `xs.head`. |
+| `xs.init` |The rest of the collection except `xs.last`. |
+| `xs.slice(from, to)` |A collection consisting of elements in some index range of `xs` (from `from` up to, and excluding `to`).|
+| `xs.take(n)` |A collection consisting of the first `n` elements of `xs` (or, some arbitrary `n` elements, if no order is defined).|
+| `xs.drop(n)` |The rest of the collection except `xs.take(n)`.|
+| `xs.takeWhile(p)` |The longest prefix of elements in the collection that all satisfy `p`.|
+| `xs.dropWhile(p)` |The collection without the longest prefix of elements that all satisfy `p`.|
+| `xs.takeRight(n)` |A collection consisting of the last `n` elements of `xs` (or, some arbitrary `n` elements, if no order is defined).|
+| `xs.dropRight(n)` |The rest of the collection except `xs.takeRight(n)`.|
+| `xs.filter(p)` |The collection consisting of those elements of xs that satisfy the predicate `p`.|
+| `xs.withFilter(p)` |A non-strict filter of this collection. Subsequent calls to `map`, `flatMap`, `foreach`, and `withFilter` will only apply to those elements of `xs` for which the condition `p` is true.|
+| `xs.filterNot(p)` |The collection consisting of those elements of `xs` that do not satisfy the predicate `p`.|
+| **Subdivisions:** | |
+| `xs.splitAt(n)` |Split `xs` at a position, giving the pair of collections `(xs take n, xs drop n)`.|
+| `xs.span(p)` |Split `xs` according to a predicate, giving the pair of collections `(xs takeWhile p, xs.dropWhile p)`.|
+| `xs.partition(p)` |Split `xs` into a pair of collections; one with elements that satisfy the predicate `p`, the other with elements that do not, giving the pair of collections `(xs filter p, xs.filterNot p)`|
+| `xs.groupBy(f)` |Partition `xs` into a map of collections according to a discriminator function `f`.|
+| `xs.groupMap(f)(g)` |Partition `xs` into a map of collections according to a discriminator function `f`, and applies the transformation function `g` to each element in a group.|
+| `xs.groupMapReduce(f)(g)(h)` |Partition `xs` according to a discriminator function `f`, and then combine the results of applying the function `g` to each element in a group using the `h` function.|
+| **Element Conditions:** | |
+| `xs.forall(p)` |A boolean indicating whether the predicate `p` holds for all elements of `xs`.|
+| `xs.exists(p)` |A boolean indicating whether the predicate `p` holds for some element in `xs`.|
+| `xs.count(p)` |The number of elements in `xs` that satisfy the predicate `p`.|
+| **Folds:** | |
+| `xs.foldLeft(z)(op)` |Apply binary operation `op` between successive elements of `xs`, going left to right and starting with `z`.|
+| `xs.foldRight(z)(op)` |Apply binary operation `op` between successive elements of `xs`, going right to left and starting with `z`.|
+| `xs.reduceLeft(op)` |Apply binary operation `op` between successive elements of non-empty collection `xs`, going left to right.|
+| `xs.reduceRight(op)` |Apply binary operation `op` between successive elements of non-empty collection `xs`, going right to left.|
+| **Specific Folds:** | |
+| `xs.sum` |The sum of the numeric element values of collection `xs`.|
+| `xs.product` |The product of the numeric element values of collection `xs`.|
+| `xs.min` |The minimum of the ordered element values of collection `xs`.|
+| `xs.max` |The maximum of the ordered element values of collection `xs`.|
+| `xs.minOption` |Like `min` but returns `None` if `xs` is empty.|
+| `xs.maxOption` |Like `max` but returns `None` if `xs` is empty.|
+| **Strings:** | |
+| `xs.addString(b, start, sep, end)` |Adds a string to `StringBuilder` `b` that shows all elements of `xs` between separators `sep` enclosed in strings `start` and `end`. `start`, `sep`, `end` are all optional.|
+| `xs.mkString(start, sep, end)` |Converts the collection to a string that shows all elements of `xs` between separators `sep` enclosed in strings `start` and `end`. `start`, `sep`, `end` are all optional.|
+| **Zippers:** | |
+| `xs.zip(ys)` |A collection of pairs of corresponding elements from `xs` and `ys`.|
+| `xs.zipAll(ys, x, y)` |A collection of pairs of corresponding elements from `xs` and `ys`, where the shorter sequence is extended to match the longer one by appending elements `x` or `y`.|
+| `xs.zipWithIndex` |An collection of pairs of elements from `xs` with their indices.|
+| **Views:** | |
+| `xs.view` |Produces a view over `xs`.|
In the inheritance hierarchy below `Iterable` you find three traits: [Seq](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Seq.html), [Set](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Set.html), and [Map](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/Map.html). `Seq` and `Map` implement the [PartialFunction](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/PartialFunction.html) trait with its `apply` and `isDefinedAt` methods, each implemented differently. `Set` gets its `apply` method from [SetOps](https://www.scala-lang.org/api/{{ site.scala-version }}/scala/collection/SetOps.html).
-For sequences, `apply` is positional indexing, where elements are always numbered from `0`. That is, `Seq(1, 2, 3)(1)` gives `2`. For sets, `apply` is a membership test. For instance, `Set('a', 'b', 'c')('b')` gives `true` whereas `Set()('a')` gives `false`. Finally for maps, `apply` is a selection. For instance, `Map('a' -> 1, 'b' -> 10, 'c' -> 100)('b')` gives `10`.
+For sequences, `apply` is positional indexing, where elements are always numbered from `0`. That is, `Seq(1, 2, 3)(1)` gives `2`. For sets, `apply` is a membership test. For instance, `Set('a', 'b', 'c')('b')` gives `true` whereas `Set()('a')` gives `false`. Finally, for maps, `apply` is a selection. For instance, `Map('a' -> 1, 'b' -> 10, 'c' -> 100)('b')` gives `10`.
In the following, we will explain each of the three kinds of collections in more detail.
diff --git a/_overviews/collections-2.13/views.md b/_overviews/collections-2.13/views.md
index 0c094bb38e..6b0052c5e5 100644
--- a/_overviews/collections-2.13/views.md
+++ b/_overviews/collections-2.13/views.md
@@ -18,11 +18,23 @@ There are two principal ways to implement transformers. One is _strict_, that is
As an example of a non-strict transformer consider the following implementation of a lazy map operation:
- def lazyMap[T, U](coll: Iterable[T], f: T => U) = new Iterable[U] {
- def iterator = coll.iterator map f
- }
-
-Note that `lazyMap` constructs a new `Iterable` without stepping through all elements of the given collection `coll`. The given function `f` is instead applied to the elements of the new collection's `iterator` as they are demanded.
+{% tabs views_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=views_1 %}
+```scala mdoc
+def lazyMap[T, U](iter: Iterable[T], f: T => U) = new Iterable[U] {
+ def iterator = iter.iterator.map(f)
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=views_1 %}
+```scala
+def lazyMap[T, U](iter: Iterable[T], f: T => U) = new Iterable[U]:
+ def iterator = iter.iterator.map(f)
+```
+{% endtab %}
+{% endtabs %}
+
+Note that `lazyMap` constructs a new `Iterable` without stepping through all elements of the given collection `iter`. The given function `f` is instead applied to the elements of the new collection's `iterator` as they are demanded.
Scala collections are by default strict in all their transformers, except for `LazyList`, which implements all its transformer methods lazily. However, there is a systematic way to turn every collection into a lazy one and _vice versa_, which is based on collection views. A _view_ is a special kind of collection that represents some base collection, but implements all transformers lazily.
@@ -30,42 +42,103 @@ To go from a collection to its view, you can use the `view` method on the collec
Let's see an example. Say you have a vector of Ints over which you want to map two functions in succession:
- scala> val v = Vector(1 to 10: _*)
- v: scala.collection.immutable.Vector[Int] =
- Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
- scala> v map (_ + 1) map (_ * 2)
- res5: scala.collection.immutable.Vector[Int] =
- Vector(4, 6, 8, 10, 12, 14, 16, 18, 20, 22)
+{% tabs views_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=views_2 %}
+
+```scala
+scala> val v = Vector(1 to 10: _*)
+val v: scala.collection.immutable.Vector[Int] =
+ Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+
+scala> v.map(_ + 1).map(_ * 2)
+val res5: scala.collection.immutable.Vector[Int] =
+ Vector(4, 6, 8, 10, 12, 14, 16, 18, 20, 22)
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=views_2 %}
+
+```scala
+scala> val v = Vector((1 to 10)*)
+val v: scala.collection.immutable.Vector[Int] =
+ Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+
+scala> v.map(_ + 1).map(_ * 2)
+val res5: scala.collection.immutable.Vector[Int] =
+ Vector(4, 6, 8, 10, 12, 14, 16, 18, 20, 22)
+```
+
+{% endtab %}
+{% endtabs %}
In the last statement, the expression `v map (_ + 1)` constructs a new vector which is then transformed into a third vector by the second call to `map (_ * 2)`. In many situations, constructing the intermediate result from the first call to map is a bit wasteful. In the example above, it would be faster to do a single map with the composition of the two functions `(_ + 1)` and `(_ * 2)`. If you have the two functions available in the same place you can do this by hand. But quite often, successive transformations of a data structure are done in different program modules. Fusing those transformations would then undermine modularity. A more general way to avoid the intermediate results is by turning the vector first into a view, then applying all transformations to the view, and finally forcing the view to a vector:
- scala> (v.view map (_ + 1) map (_ * 2)).to(Vector)
- res12: scala.collection.immutable.Vector[Int] =
- Vector(4, 6, 8, 10, 12, 14, 16, 18, 20, 22)
+{% tabs views_3 %}
+{% tab 'Scala 2 and 3' for=views_3 %}
+
+```scala
+scala> val w = v.view.map(_ + 1).map(_ * 2).to(Vector)
+val w: scala.collection.immutable.Vector[Int] =
+ Vector(4, 6, 8, 10, 12, 14, 16, 18, 20, 22)
+```
+
+{% endtab %}
+{% endtabs %}
Let's do this sequence of operations again, one by one:
- scala> val vv = v.view
- vv: scala.collection.IndexedSeqView[Int] = IndexedSeqView()
+{% tabs views_4 %}
+{% tab 'Scala 2 and 3' for=views_4 %}
+
+```scala
+scala> val vv = v.view
+val vv: scala.collection.IndexedSeqView[Int] = IndexedSeqView()
+```
+
+{% endtab %}
+{% endtabs %}
The application `v.view` gives you an `IndexedSeqView[Int]`, i.e. a lazily evaluated `IndexedSeq[Int]`. Like with `LazyList`,
the `toString` operation of views does not force the view elements, that’s why the content of `vv` is shown as `IndexedSeqView()`.
Applying the first `map` to the view gives:
- scala> vv map (_ + 1)
- res13: scala.collection.IndexedSeqView[Int] = IndexedSeqView()
+{% tabs views_5 %}
+{% tab 'Scala 2 and 3' for=views_5 %}
+
+```scala
+scala> vv.map(_ + 1)
+val res13: scala.collection.IndexedSeqView[Int] = IndexedSeqView()
+```
+{% endtab %}
+{% endtabs %}
The result of the `map` is another `IndexedSeqView[Int]` value. This is in essence a wrapper that *records* the fact that a `map` with function `(_ + 1)` needs to be applied on the vector `v`. It does not apply that map until the view is forced, however. Let's now apply the second `map` to the last result.
- scala> res13 map (_ * 2)
- res14: scala.collection.IndexedSeqView[Int] = IndexedSeqView()
+{% tabs views_6 %}
+{% tab 'Scala 2 and 3' for=views_6 %}
+
+```scala
+scala> res13.map(_ * 2)
+val res14: scala.collection.IndexedSeqView[Int] = IndexedSeqView()
+```
+
+{% endtab %}
+{% endtabs %}
Finally, forcing the last result gives:
- scala> res14.to(Vector)
- res15: scala.collection.immutable.Vector[Int] =
- Vector(4, 6, 8, 10, 12, 14, 16, 18, 20, 22)
+{% tabs views_7 %}
+{% tab 'Scala 2 and 3' for=views_7 %}
+
+```scala
+scala> res14.to(Vector)
+val res15: scala.collection.immutable.Vector[Int] =
+ Vector(4, 6, 8, 10, 12, 14, 16, 18, 20, 22)
+```
+
+{% endtab %}
+{% endtabs %}
Both stored functions get applied as part of the execution of the `to` operation and a new vector is constructed. That way, no intermediate data structure is needed.
@@ -84,16 +157,36 @@ These operations are documented as “always forcing the collection elements”.
The main reason for using views is performance. You have seen that by switching a collection to a view the construction of intermediate results can be avoided. These savings can be quite important. As another example, consider the problem of finding the first palindrome in a list of words. A palindrome is a word which reads backwards the same as forwards. Here are the necessary definitions:
- def isPalindrome(x: String) = x == x.reverse
- def findPalindrome(s: Seq[String]) = s find isPalindrome
+{% tabs views_8 %}
+{% tab 'Scala 2 and 3' for=views_8 %}
+
+```scala
+def isPalindrome(x: String) = x == x.reverse
+def findPalindrome(s: Seq[String]) = s.find(isPalindrome)
+```
+
+{% endtab %}
+{% endtabs %}
-Now, assume you have a very long sequence words and you want to find a palindrome in the first million words of that sequence. Can you re-use the definition of `findPalindrome`? Of course, you could write:
+Now, assume you have a very long sequence words, and you want to find a palindrome in the first million words of that sequence. Can you re-use the definition of `findPalindrome`? Of course, you could write:
- findPalindrome(words take 1000000)
+{% tabs views_9 %}
+{% tab 'Scala 2 and 3' for=views_9 %}
+```scala
+val palindromes = findPalindrome(words.take(1000000))
+```
+{% endtab %}
+{% endtabs %}
This nicely separates the two aspects of taking the first million words of a sequence and finding a palindrome in it. But the downside is that it always constructs an intermediary sequence consisting of one million words, even if the first word of that sequence is already a palindrome. So potentially, 999'999 words are copied into the intermediary result without being inspected at all afterwards. Many programmers would give up here and write their own specialized version of finding palindromes in some given prefix of an argument sequence. But with views, you don't have to. Simply write:
- findPalindrome(words.view take 1000000)
+{% tabs views_10 %}
+{% tab 'Scala 2 and 3' for=views_10 %}
+```scala
+val palindromes = findPalindrome(words.view.take(1000000))
+```
+{% endtab %}
+{% endtabs %}
This has the same nice separation of concerns, but instead of a sequence of a million elements it will only construct a single lightweight view object. This way, you do not need to choose between performance and modularity.
@@ -101,16 +194,50 @@ After having seen all these nifty uses of views you might wonder why have strict
Here's an example which bit a few users of versions of Scala before 2.8. In these versions the `Range` type was lazy, so it behaved in effect like a view. People were trying to create a number of actors like this:
- val actors = for (i <- 1 to 10) yield actor { ... }
+{% tabs views_11 class=tabs-scala-version %}
+{% tab 'Scala 2' for=views_11 %}
+```scala
+val actors = for (i <- 1 to 10) yield actor { ... }
+```
+{% endtab %}
+{% tab 'Scala 3' for=views_11 %}
+```scala
+val actors = for i <- 1 to 10 yield actor { ... }
+```
+{% endtab %}
+{% endtabs %}
They were surprised that none of the actors was executing afterwards, even though the actor method should create and start an actor from the code that's enclosed in the braces following it. To explain why nothing happened, remember that the for expression above is equivalent to an application of map:
- val actors = (1 to 10) map (i => actor { ... })
+{% tabs views_12 %}
+{% tab 'Scala 2 and 3' for=views_12 %}
+
+```scala
+val actors = (1 to 10).map(i => actor { ... })
+```
+
+{% endtab %}
+{% endtabs %}
Since previously the range produced by `(1 to 10)` behaved like a view, the result of the map was again a view. That is, no element was computed, and, consequently, no actor was created! Actors would have been created by forcing the range of the whole expression, but it's far from obvious that this is what was required to make the actors do their work.
To avoid surprises like this, the current Scala collections library has more regular rules. All collections except lazy lists and views are strict. The only way to go from a strict to a lazy collection is via the `view` method. The only way to go back is via `to`. So the `actors` definition above would now behave as expected in that it would create and start 10 actors. To get back the surprising previous behavior, you'd have to add an explicit `view` method call:
- val actors = for (i <- (1 to 10).view) yield actor { ... }
+{% tabs views_13 class=tabs-scala-version %}
+{% tab 'Scala 2' for=views_13 %}
+
+```scala
+val actors = for (i <- (1 to 10).view) yield actor { ... }
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=views_13 %}
+
+```scala
+val actors = for i <- (1 to 10).view yield actor { ... }
+```
+
+{% endtab %}
+{% endtabs %}
In summary, views are a powerful tool to reconcile concerns of efficiency with concerns of modularity. But in order not to be entangled in aspects of delayed evaluation, you should restrict views to purely functional code where collection transformations do not have side effects. What's best avoided is a mixture of views and operations that create new collections while also having side effects.
diff --git a/_overviews/collections/arrays.md b/_overviews/collections/arrays.md
index 019ac91248..637806b014 100644
--- a/_overviews/collections/arrays.md
+++ b/_overviews/collections/arrays.md
@@ -24,7 +24,7 @@ permalink: /overviews/collections/:title.html
Given that Scala arrays are represented just like Java arrays, how can these additional features be supported in Scala? In fact, the answer to this question differs between Scala 2.8 and earlier versions. Previously, the Scala compiler somewhat "magically" wrapped and unwrapped arrays to and from `Seq` objects when required in a process called boxing and unboxing. The details of this were quite complicated, in particular when one created a new array of generic type `Array[T]`. There were some puzzling corner cases and the performance of array operations was not all that predictable.
-The Scala 2.8 design is much simpler. Almost all compiler magic is gone. Instead the Scala 2.8 array implementation makes systematic use of implicit conversions. In Scala 2.8 an array does not pretend to _be_ a sequence. It can't really be that because the data type representation of a native array is not a subtype of `Seq`. Instead there is an implicit "wrapping" conversion between arrays and instances of class `scala.collection.mutable.WrappedArray`, which is a subclass of `Seq`. Here you see it in action:
+The Scala 2.8 design is much simpler. Almost all compiler magic is gone. Instead, the Scala 2.8 array implementation makes systematic use of implicit conversions. In Scala 2.8 an array does not pretend to _be_ a sequence. It can't really be that because the data type representation of a native array is not a subtype of `Seq`. Instead, there is an implicit "wrapping" conversion between arrays and instances of class `scala.collection.mutable.WrappedArray`, which is a subclass of `Seq`. Here you see it in action:
scala> val seq: Seq[Int] = a1
seq: Seq[Int] = WrappedArray(1, 2, 3)
@@ -60,9 +60,9 @@ The `ArrayOps` object gets inserted automatically by the implicit conversion. So
scala> intArrayOps(a1).reverse
res5: Array[Int] = Array(3, 2, 1)
-where `intArrayOps` is the implicit conversion that was inserted previously. This raises the question how the compiler picked `intArrayOps` over the other implicit conversion to `WrappedArray` in the line above. After all, both conversions map an array to a type that supports a reverse method, which is what the input specified. The answer to that question is that the two implicit conversions are prioritized. The `ArrayOps` conversion has a higher priority than the `WrappedArray` conversion. The first is defined in the `Predef` object whereas the second is defined in a class `scala.LowPriorityImplicits`, which is inherited by `Predef`. Implicits in subclasses and subobjects take precedence over implicits in base classes. So if both conversions are applicable, the one in `Predef` is chosen. A very similar scheme works for strings.
+where `intArrayOps` is the implicit conversion that was inserted previously. This raises the question of how the compiler picked `intArrayOps` over the other implicit conversion to `WrappedArray` in the line above. After all, both conversions map an array to a type that supports a reverse method, which is what the input specified. The answer to that question is that the two implicit conversions are prioritized. The `ArrayOps` conversion has a higher priority than the `WrappedArray` conversion. The first is defined in the `Predef` object whereas the second is defined in a class `scala.LowPriorityImplicits`, which is inherited by `Predef`. Implicits in subclasses and subobjects take precedence over implicits in base classes. So if both conversions are applicable, the one in `Predef` is chosen. A very similar scheme works for strings.
-So now you know how arrays can be compatible with sequences and how they can support all sequence operations. What about genericity? In Java you cannot write a `T[]` where `T` is a type parameter. How then is Scala's `Array[T]` represented? In fact a generic array like `Array[T]` could be at run-time any of Java's eight primitive array types `byte[]`, `short[]`, `char[]`, `int[]`, `long[]`, `float[]`, `double[]`, `boolean[]`, or it could be an array of objects. The only common run-time type encompassing all of these types is `AnyRef` (or, equivalently `java.lang.Object`), so that's the type to which the Scala compiler maps `Array[T]`. At run-time, when an element of an array of type `Array[T]` is accessed or updated there is a sequence of type tests that determine the actual array type, followed by the correct array operation on the Java array. These type tests slow down array operations somewhat. You can expect accesses to generic arrays to be three to four times slower than accesses to primitive or object arrays. This means that if you need maximal performance, you should prefer concrete over generic arrays. Representing the generic array type is not enough, however, there must also be a way to create generic arrays. This is an even harder problem, which requires a little bit of help from you. To illustrate the problem, consider the following attempt to write a generic method that creates an array.
+So now you know how arrays can be compatible with sequences and how they can support all sequence operations. What about genericity? In Java, you cannot write a `T[]` where `T` is a type parameter. How then is Scala's `Array[T]` represented? In fact a generic array like `Array[T]` could be at run-time any of Java's eight primitive array types `byte[]`, `short[]`, `char[]`, `int[]`, `long[]`, `float[]`, `double[]`, `boolean[]`, or it could be an array of objects. The only common run-time type encompassing all of these types is `AnyRef` (or, equivalently `java.lang.Object`), so that's the type to which the Scala compiler maps `Array[T]`. At run-time, when an element of an array of type `Array[T]` is accessed or updated there is a sequence of type tests that determine the actual array type, followed by the correct array operation on the Java array. These type tests slow down array operations somewhat. You can expect accesses to generic arrays to be three to four times slower than accesses to primitive or object arrays. This means that if you need maximal performance, you should prefer concrete to generic arrays. Representing the generic array type is not enough, however, there must also be a way to create generic arrays. This is an even harder problem, which requires a little of help from you. To illustrate the issue, consider the following attempt to write a generic method that creates an array.
// this is wrong!
def evenElems[T](xs: Vector[T]): Array[T] = {
@@ -72,7 +72,7 @@ So now you know how arrays can be compatible with sequences and how they can sup
arr
}
-The `evenElems` method returns a new array that consist of all elements of the argument vector `xs` which are at even positions in the vector. The first line of the body of `evenElems` creates the result array, which has the same element type as the argument. So depending on the actual type parameter for `T`, this could be an `Array[Int]`, or an `Array[Boolean]`, or an array of some of the other primitive types in Java, or an array of some reference type. But these types have all different runtime representations, so how is the Scala runtime going to pick the correct one? In fact, it can't do that based on the information it is given, because the actual type that corresponds to the type parameter `T` is erased at runtime. That's why you will get the following error message if you compile the code above:
+The `evenElems` method returns a new array that consist of all elements of the argument vector `xs` which are at even positions in the vector. The first line of the body of `evenElems` creates the result array, which has the same element type as the argument. So depending on the actual type parameter for `T`, this could be an `Array[Int]`, or an `Array[Boolean]`, or an array of some other primitive types in Java, or an array of some reference type. But these types have all different runtime representations, so how is the Scala runtime going to pick the correct one? In fact, it can't do that based on the information it is given, because the actual type that corresponds to the type parameter `T` is erased at runtime. That's why you will get the following error message if you compile the code above:
error: cannot find class manifest for element type T
val arr = new Array[T]((arr.length + 1) / 2)
diff --git a/_overviews/collections/concrete-immutable-collection-classes.md b/_overviews/collections/concrete-immutable-collection-classes.md
index 95a76570d1..6324128e48 100644
--- a/_overviews/collections/concrete-immutable-collection-classes.md
+++ b/_overviews/collections/concrete-immutable-collection-classes.md
@@ -19,7 +19,7 @@ A [List](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/colle
Lists have always been the workhorse for Scala programming, so not much needs to be said about them here. The major change in 2.8 is that the `List` class together with its subclass `::` and its subobject `Nil` is now defined in package `scala.collection.immutable`, where it logically belongs. There are still aliases for `List`, `Nil`, and `::` in the `scala` package, so from a user perspective, lists can be accessed as before.
-Another change is that lists now integrate more closely into the collections framework, and are less of a special case than before. For instance all of the numerous methods that originally lived in the `List` companion object have been deprecated. They are replaced by the [uniform creation methods]({{ site.baseurl }}/overviews/collections/creating-collections-from-scratch.html) inherited by every collection.
+Another change is that lists now integrate more closely into the collections framework, and are less of a special case than before. For instance all the numerous methods that originally lived in the `List` companion object have been deprecated. They are replaced by the [uniform creation methods]({{ site.baseurl }}/overviews/collections/creating-collections-from-scratch.html) inherited by every collection.
## Streams
diff --git a/_overviews/collections/concrete-mutable-collection-classes.md b/_overviews/collections/concrete-mutable-collection-classes.md
index bc7bf02567..108b531c9a 100644
--- a/_overviews/collections/concrete-mutable-collection-classes.md
+++ b/_overviews/collections/concrete-mutable-collection-classes.md
@@ -54,7 +54,7 @@ Just like an array buffer is useful for building arrays, and a list buffer is us
## Linked Lists
-Linked lists are mutable sequences that consist of nodes which are linked with next pointers. They are supported by class [LinkedList](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/collection/mutable/LinkedList.html). In most languages `null` would be picked as the empty linked list. That does not work for Scala collections, because even empty sequences must support all sequence methods. In particular `LinkedList.empty.isEmpty` should return `true` and not throw a `NullPointerException`. Empty linked lists are encoded instead in a special way: Their `next` field points back to the node itself. Like their immutable cousins, linked lists are best traversed sequentially. In addition linked lists make it easy to insert an element or linked list into another linked list.
+Linked lists are mutable sequences that consist of nodes which are linked with next pointers. They are supported by class [LinkedList](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/collection/mutable/LinkedList.html). In most languages `null` would be picked as the empty linked list. That does not work for Scala collections, because even empty sequences must support all sequence methods. In particular `LinkedList.empty.isEmpty` should return `true` and not throw a `NullPointerException`. Empty linked lists are encoded instead in a special way: Their `next` field points back to the node itself. Like their immutable cousins, linked lists are best traversed sequentially. In addition, linked lists make it easy to insert an element or linked list into another linked list.
## Double Linked Lists
@@ -85,7 +85,7 @@ Scala provides mutable queues in addition to immutable ones. You use a `mQueue`
Array sequences are mutable sequences of fixed size which store their elements internally in an `Array[Object]`. They are implemented in Scala by class [ArraySeq](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/collection/mutable/ArraySeq.html).
-You would typically use an `ArraySeq` if you want an array for its performance characteristics, but you also want to create generic instances of the sequence where you do not know the type of the elements and you do not have a `ClassTag` to provide it at run-time. These issues are explained in the section on [arrays]({{ site.baseurl }}/overviews/collections/arrays.html).
+You would typically use an `ArraySeq` if you want an array for its performance characteristics, but you also want to create generic instances of the sequence where you do not know the type of the elements, and you do not have a `ClassTag` to provide it at run-time. These issues are explained in the section on [arrays]({{ site.baseurl }}/overviews/collections/arrays.html).
## Stacks
diff --git a/_overviews/collections/creating-collections-from-scratch.md b/_overviews/collections/creating-collections-from-scratch.md
index a7c1a7ff5b..2468bf9e27 100644
--- a/_overviews/collections/creating-collections-from-scratch.md
+++ b/_overviews/collections/creating-collections-from-scratch.md
@@ -40,7 +40,7 @@ Besides `apply`, every collection companion object also defines a member `empty`
Descendants of `Seq` classes provide also other factory operations in their companion objects. These are summarized in the following table. In short, there's
* `concat`, which concatenates an arbitrary number of traversables together,
-* `fill` and `tabulate`, which generate single or multi-dimensional sequences of given dimensions initialized by some expression or tabulating function,
+* `fill` and `tabulate`, which generate single or multidimensional sequences of given dimensions initialized by some expression or tabulating function,
* `range`, which generates integer sequences with some constant step length, and
* `iterate`, which generates the sequence resulting from repeated application of a function to a start element.
diff --git a/_overviews/collections/equality.md b/_overviews/collections/equality.md
index c949d7aac5..bb9abc6f06 100644
--- a/_overviews/collections/equality.md
+++ b/_overviews/collections/equality.md
@@ -13,7 +13,7 @@ permalink: /overviews/collections/:title.html
The collection libraries have a uniform approach to equality and hashing. The idea is, first, to divide collections into sets, maps, and sequences. Collections in different categories are always unequal. For instance, `Set(1, 2, 3)` is unequal to `List(1, 2, 3)` even though they contain the same elements. On the other hand, within the same category, collections are equal if and only if they have the same elements (for sequences: the same elements in the same order). For example, `List(1, 2, 3) == Vector(1, 2, 3)`, and `HashSet(1, 2) == TreeSet(2, 1)`.
-It does not matter for the equality check whether a collection is mutable or immutable. For a mutable collection one simply considers its current elements at the time the equality test is performed. This means that a mutable collection might be equal to different collections at different times, depending what elements are added or removed. This is a potential trap when using a mutable collection as a key in a hashmap. Example:
+It does not matter for the equality check whether a collection is mutable or immutable. For a mutable collection one simply considers its current elements at the time the equality test is performed. This means that a mutable collection might be equal to different collections at different times, depending on what elements are added or removed. This is a potential trap when using a mutable collection as a key in a hashmap. Example:
scala> import collection.mutable.{HashMap, ArrayBuffer}
import collection.mutable.{HashMap, ArrayBuffer}
diff --git a/_overviews/collections/introduction.md b/_overviews/collections/introduction.md
index d61806d127..5fc2e3f301 100644
--- a/_overviews/collections/introduction.md
+++ b/_overviews/collections/introduction.md
@@ -55,7 +55,7 @@ lines run at first try.
**Fast:** Collection operations are tuned and optimized in the
libraries. As a result, using collections is typically quite
-efficient. You might be able to do a little bit better with carefully
+efficient. You might be able to do a little better with carefully
hand-tuned data structures and operations, but you might also do a lot
worse by making some suboptimal implementation decisions along the
way. What's more, collections have been recently adapted to parallel
diff --git a/_overviews/collections/iterators.md b/_overviews/collections/iterators.md
index f08e65d5a3..78dfcc69f0 100644
--- a/_overviews/collections/iterators.md
+++ b/_overviews/collections/iterators.md
@@ -26,7 +26,7 @@ As always, for-expressions can be used as an alternate syntax for expressions in
for (elem <- it) println(elem)
-There's an important difference between the foreach method on iterators and the same method on traversable collections: When called on an iterator, `foreach` will leave the iterator at its end when it is done. So calling `next` again on the same iterator will fail with a `NoSuchElementException`. By contrast, when called on a collection, `foreach` leaves the number of elements in the collection unchanged (unless the passed function adds to removes elements, but this is discouraged, because it may lead to surprising results).
+There's an important difference between the foreach method on iterators and the same method on traversable collections: When called on an iterator, `foreach` will leave the iterator at its end when it is done. So calling `next` again on the same iterator will fail with a `NoSuchElementException`. By contrast, when called on a collection, `foreach` leaves the number of elements in the collection unchanged (unless the passed function adds or removes elements, but this is discouraged, because it may lead to surprising results).
The other operations that Iterator has in common with `Traversable` have the same property. For instance, iterators provide a `map` method, which returns a new iterator:
@@ -166,7 +166,7 @@ A lazy operation does not immediately compute all of its results. Instead, it co
So the expression `(1 to 10).iterator.map(println)` would not print anything to the screen. The `map` method in this case doesn't apply its argument function to the values in the range, it returns a new `Iterator` that will do this as each one is requested. Adding `.toList` to the end of that expression will actually print the elements.
-A consequence of this is that a method like `map` or `filter` won't necessarily apply its argument function to all of the input elements. The expression `(1 to 10).iterator.map(println).take(5).toList` would only print the values `1` to `5`, for instance, since those are only ones that will be requested from the `Iterator` returned by `map`.
+A consequence of this is that a method like `map` or `filter` won't necessarily apply its argument function to all the input elements. The expression `(1 to 10).iterator.map(println).take(5).toList` would only print the values `1` to `5`, for instance, since those are only ones that will be requested from the `Iterator` returned by `map`.
This is one of the reasons why it's important to only use pure functions as arguments to `map`, `filter`, `fold` and similar methods. Remember, a pure function has no side-effects, so one would not normally use `println` in a `map`. `println` is used to demonstrate laziness as it's not normally visible with pure functions.
diff --git a/_overviews/collections/migrating-from-scala-27.md b/_overviews/collections/migrating-from-scala-27.md
index d621c78899..5e1efc7822 100644
--- a/_overviews/collections/migrating-from-scala-27.md
+++ b/_overviews/collections/migrating-from-scala-27.md
@@ -12,7 +12,7 @@ permalink: /overviews/collections/:title.html
Porting your existing Scala applications to use the new collections should be almost automatic. There are only a couple of possible issues to take care of.
-Generally, the old functionality of Scala 2.7 collections has been left in place. Some features have been deprecated, which means they will removed in some future release. You will get a _deprecation warning_ when you compile code that makes use of these features in Scala 2.8. In a few places deprecation was unfeasible, because the operation in question was retained in 2.8, but changed in meaning or performance characteristics. These cases will be flagged with _migration warnings_ when compiled under 2.8. To get full deprecation and migration warnings with suggestions how to change your code, pass the `-deprecation` and `-Xmigration` flags to `scalac` (note that `-Xmigration` is an extended option, so it starts with an `X`). You can also pass the same options to the `scala` REPL to get the warnings in an interactive session. Example:
+Generally, the old functionality of Scala 2.7 collections has been left in place. Some features have been deprecated, which means they will be removed in some future release. You will get a _deprecation warning_ when you compile code that makes use of these features in Scala 2.8. In a few places deprecation was unfeasible, because the operation in question was retained in 2.8, but changed in meaning or performance characteristics. These cases will be flagged with _migration warnings_ when compiled under 2.8. To get full deprecation and migration warnings with suggestions how to change your code, pass the `-deprecation` and `-Xmigration` flags to `scalac` (note that `-Xmigration` is an extended option, so it starts with an `X`). You can also pass the same options to the `scala` REPL to get the warnings in an interactive session. Example:
>scala -deprecation -Xmigration
Welcome to Scala version 2.8.0.final
@@ -38,7 +38,7 @@ Generally, the old functionality of Scala 2.7 collections has been left in place
There are two parts of the old libraries which have been replaced wholesale, and for which deprecation warnings were not feasible.
-1. The previous `scala.collection.jcl` package is gone. This package tried to mimick some of the Java collection library design in Scala, but in doing so broke many symmetries. Most people who wanted Java collections bypassed `jcl` and used `java.util` directly. Scala 2.8 offers automatic conversion mechanisms between both collection libraries in the [JavaConversions]({{ site.baseurl }}/overviews/collections/conversions-between-java-and-scala-collections.html) object which replaces the `jcl` package.
+1. The previous `scala.collection.jcl` package is gone. This package tried to mimic aspects of the Java collection library design in Scala, but in doing so broke many symmetries. Most people who wanted Java collections bypassed `jcl` and used `java.util` directly. Scala 2.8 offers automatic conversion mechanisms between both collection libraries in the [JavaConversions]({{ site.baseurl }}/overviews/collections/conversions-between-java-and-scala-collections.html) object which replaces the `jcl` package.
2. Projections have been generalized and cleaned up and are now available as views. It seems that projections were used rarely, so not much code should be affected by this change.
So, if your code uses either `jcl` or projections there might be some minor rewriting to do.
diff --git a/_overviews/collections/trait-iterable.md b/_overviews/collections/trait-iterable.md
index abc8051703..ac72783f41 100644
--- a/_overviews/collections/trait-iterable.md
+++ b/_overviews/collections/trait-iterable.md
@@ -62,6 +62,6 @@ Trait `Iterable` also adds some other methods to `Traversable` that can be imple
In the inheritance hierarchy below Iterable you find three traits: [Seq](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/collection/Seq.html), [Set](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/collection/Set.html), and [Map](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/collection/Map.html). `Seq` and `Map` implement the [PartialFunction](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/PartialFunction.html) trait with its `apply` and `isDefinedAt` methods, each implemented differently. `Set` gets its `apply` method from [GenSetLike](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/collection/GenSetLike.html).
-For sequences, `apply` is positional indexing, where elements are always numbered from `0`. That is, `Seq(1, 2, 3)(1)` gives `2`. For sets, `apply` is a membership test. For instance, `Set('a', 'b', 'c')('b')` gives `true` whereas `Set()('a')` gives `false`. Finally for maps, `apply` is a selection. For instance, `Map('a' -> 1, 'b' -> 10, 'c' -> 100)('b')` gives `10`.
+For sequences, `apply` is positional indexing, where elements are always numbered from `0`. That is, `Seq(1, 2, 3)(1)` gives `2`. For sets, `apply` is a membership test. For instance, `Set('a', 'b', 'c')('b')` gives `true` whereas `Set()('a')` gives `false`. Finally, for maps, `apply` is a selection. For instance, `Map('a' -> 1, 'b' -> 10, 'c' -> 100)('b')` gives `10`.
In the following, we will explain each of the three kinds of collections in more detail.
diff --git a/_overviews/collections/trait-traversable.md b/_overviews/collections/trait-traversable.md
index 11aaa6b349..d2173cb789 100644
--- a/_overviews/collections/trait-traversable.md
+++ b/_overviews/collections/trait-traversable.md
@@ -25,7 +25,7 @@ The `foreach` method is meant to traverse all elements of the collection, and ap
* **Conversions** `toArray`, `toList`, `toIterable`, `toSeq`, `toIndexedSeq`, `toStream`, `toSet`, `toMap`, which turn a `Traversable` collection into something more specific. All these conversions return their receiver argument unchanged if the run-time type of the collection already matches the demanded collection type. For instance, applying `toList` to a list will yield the list itself.
* **Copying operations** `copyToBuffer` and `copyToArray`. As their names imply, these copy collection elements to a buffer or array, respectively.
* **Size info** operations `isEmpty`, `nonEmpty`, `size`, and `hasDefiniteSize`: Traversable collections can be finite or infinite. An example of an infinite traversable collection is the stream of natural numbers `Stream.from(0)`. The method `hasDefiniteSize` indicates whether a collection is possibly infinite. If `hasDefiniteSize` returns true, the collection is certainly finite. If it returns false, the collection has not been fully elaborated yet, so it might be infinite or finite.
-* **Element retrieval** operations `head`, `last`, `headOption`, `lastOption`, and `find`. These select the first or last element of a collection, or else the first element matching a condition. Note, however, that not all collections have a well-defined meaning of what "first" and "last" means. For instance, a hash set might store elements according to their hash keys, which might change from run to run. In that case, the "first" element of a hash set could also be different for every run of a program. A collection is _ordered_ if it always yields its elements in the same order. Most collections are ordered, but some (_e.g._ hash sets) are not-- dropping the ordering gives a little bit of extra efficiency. Ordering is often essential to give reproducible tests and to help in debugging. That's why Scala collections give ordered alternatives for all collection types. For instance, the ordered alternative for `HashSet` is `LinkedHashSet`.
+* **Element retrieval** operations `head`, `last`, `headOption`, `lastOption`, and `find`. These select the first or last element of a collection, or else the first element matching a condition. Note, however, that not all collections have a well-defined meaning of what "first" and "last" means. For instance, a hash set might store elements according to their hash keys, which might change from run to run. In that case, the "first" element of a hash set could also be different for every run of a program. A collection is _ordered_ if it always yields its elements in the same order. Most collections are ordered, but some (_e.g._ hash sets) are not-- dropping the ordering gives a little extra efficiency. Ordering is often essential to give reproducible tests and to help in debugging. That's why Scala collections give ordered alternatives for all collection types. For instance, the ordered alternative for `HashSet` is `LinkedHashSet`.
* **Sub-collection retrieval operations** `tail`, `init`, `slice`, `take`, `drop`, `takeWhile`, `dropWhile`, `filter`, `filterNot`, `withFilter`. These all return some sub-collection identified by an index range or some predicate.
* **Subdivision operations** `splitAt`, `span`, `partition`, `groupBy`, which split the elements of this collection into several sub-collections.
* **Element tests** `exists`, `forall`, `count` which test collection elements with a given predicate.
diff --git a/_overviews/collections/views.md b/_overviews/collections/views.md
index dd3c128657..1798d77cf4 100644
--- a/_overviews/collections/views.md
+++ b/_overviews/collections/views.md
@@ -73,7 +73,7 @@ There are two reasons why you might want to consider using views. The first is p
def isPalindrome(x: String) = x == x.reverse
def findPalindrome(s: Seq[String]) = s find isPalindrome
-Now, assume you have a very long sequence words and you want to find a palindrome in the first million words of that sequence. Can you re-use the definition of `findPalindrome`? Of course, you could write:
+Now, assume you have a very long sequence of words, and you want to find a palindrome in the first million words of that sequence. Can you re-use the definition of `findPalindrome`? Of course, you could write:
findPalindrome(words take 1000000)
diff --git a/_overviews/compiler-options/errors.md b/_overviews/compiler-options/errors.md
new file mode 100644
index 0000000000..8128ef96ae
--- /dev/null
+++ b/_overviews/compiler-options/errors.md
@@ -0,0 +1,110 @@
+---
+layout: singlepage-overview
+title: Error Formatting
+---
+
+# Introduction
+
+An advanced mechanism for formatting type errors and inspecting missing
+implicits has been introduced in Scala 2.13.6.
+It is based on the compiler plugin [splain](https://github.com/tek/splain).
+
+This tool abstracts several classes of compiler errors with simple data types
+that can be processed by a few built-in routines as well as
+[user-provided analyzer plugins](/overviews/plugins/index.html).
+
+The most significant feature is the illustration of chains of implicit instances
+that allows a user to determine the root cause of an implicit error:
+
+
+
+# Basic Configuration
+
+* `-Vimplicits` enables printing of implicit chains
+* `-Vtype-diffs` enables colored diffs for found/required errors
+
+## Additional Configuration
+
+`-Vimplicits-verbose-tree` shows the implicits between the error site and the
+root cause, see [#implicit-resolution-chains].
+
+`-Vimplicits-max-refined` reduces the verbosity of refined types, see
+[#truncating-refined-types].
+
+# Features
+
+The error formatting engine provides the following enhancements:
+
+## Infix Types
+
+Instead of `shapeless.::[A, HNil]`, prints `A :: HNil`.
+
+## Found/Required Types
+
+Rather than printing up to four types, only the dealiased types are shown as a colored diff:
+
+
+
+## Implicit Resolution Chains
+
+When an implicit is not found, only the outermost error at the invocation point is printed by the regular error
+reporter.
+Previously, the flag `-Xlog-implicits` caused the compiler to print all information about processed implicits, but the
+output was highly verbose and contained all invalid implicits for parameters that have been resolved successfully.
+The flag has been renamed to `-Vimplicits` and prints a compact list of all involved implicit instances.
+`-Xlog-implicits` will continue to work as a deprecated alias.
+
+
+
+Here, `!I` stands for *could not find implicit value*, the name of the implicit
+parameter is in yellow, and its type in green.
+
+If the parameter `-Vimplicits-verbose-tree` is given, all intermediate implicits will be
+printed, potentially spanning tens of lines.
+An example of this is the circe error at the top of the page.
+
+For comparison, this is the regular compiler output for this case:
+
+```
+[error] /path/Example.scala:20:5: could not find implicit value for parameter a: io.circe.Decoder[A]
+[error] A.fun
+[error] ^
+```
+
+## Infix Type and Type Argument Line Breaking
+
+Types longer than 79 characters will be split into multiple lines:
+
+```
+implicit error;
+!I e: String
+f invalid because
+!I impPar4: List[
+ (
+ VeryLongTypeName ::::
+ VeryLongTypeName ::::
+ VeryLongTypeName ::::
+ VeryLongTypeName
+ )
+ ::::
+ (Short :::: Short) ::::
+ (
+ VeryLongTypeName ::::
+ VeryLongTypeName ::::
+ VeryLongTypeName ::::
+ VeryLongTypeName
+ )
+ ::::
+ VeryLongTypeName ::::
+ VeryLongTypeName ::::
+ VeryLongTypeName ::::
+ VeryLongTypeName
+]
+```
+
+## Truncating Refined Types
+
+Refined types, like `T { type A = X; type B = Y }`, can get rather long and clutter up error messages.
+The option `-Vimplicits-max-refined` controls how many characters the refinement may take up before it gets displayed as
+`T {...}`.
+The default is to display the unabridged type.
diff --git a/_overviews/compiler-options/index.md b/_overviews/compiler-options/index.md
index 34c1817b29..c4fd52f010 100644
--- a/_overviews/compiler-options/index.md
+++ b/_overviews/compiler-options/index.md
@@ -25,17 +25,13 @@ title: Scala Compiler Options
## Introduction
-Scala compiler `scalac` offers various **compiler options**, also referred to as **compiler flags**, to change how to compile your program.
+The Scala compiler `scalac` offers various **compiler options**, or **flags**, that change the compiler's default behavior. Some options just generate more compiler output in the form of diagnostics or warnings, while others change the result of compilation.
-Nowadays, most people are not running `scalac` from the command line.
-Instead, they use sbt, an IDE, and other tools as their interface to the compiler.
-Therefore they may not even have `scalac` installed, and won't think to do `man scalac`.
+The Scala command `scala`, which runs scripts or compiled code, accepts the same options as the `scalac` compiler, plus a few more that determine how to run a program.
-This page comes to the rescue for the people to find…
-
-* What compiler options `scalac` offers
-* How to use compiler options
+Options may be specified on the command line to `scalac` or in the configuration of a build tool or IDE.
+The Scala distribution includes a `man` page. If Scala is installed as a system command, that documentation may be available from `man scalac`.
## How to use compiler options
@@ -44,34 +40,47 @@ This page comes to the rescue for the people to find…
```bash
scalac [ ]
```
+Boolean flags are specified in the usual way:
+
+`scalac -Werror -Xlint Hello.scala`
+
+Options that require arguments use "colon" syntax:
+
+`scalac -Vprint:parser,typer`
-E.g. `scalac -encoding utf8 -Xfatal-warnings Hello.scala`
+Options that take just a single argument accept traditional syntax:
-Default paths can be listed by running a command line tool:
+`scalac -d /tmp`
+
+Conventionally, options have a prefix `-V` if they show "verbose" output;
+`-W` to manage warnings; `-X` for extended options that modify tool behavior;
+`-Y` for private options with limited support, where `Y` may suggest forking behavior.
+Several options have historical aliases, such as `-Xfatal-warnings` for `-Werror`.
+
+In Scala 2, default paths can be listed by running a tool in the distribution:
```
scala scala.tools.util.PathResolver [ ]
```
-
-
+That can help debug errors in options such as `--classpath`.
### Use compiler options with sbt
-
+Here is a typical configuration of the `scalacOptions` setting in `sbt`:
```scala
-scalacOptions ++= Seq(
- "-encoding", "utf8", // Option and arguments on same line
- "-Xfatal-warnings", // New lines for each options
- "-deprecation",
- "-unchecked",
+scalacOptions ++= Seq( // use ++= to add to existing options
+ "-encoding", "utf8", // if an option takes an arg, supply it on the same line
+ "-feature", // then put the next option on a new line for easy editing
"-language:implicitConversions",
- "-language:higherKinds",
"-language:existentials",
- "-language:postfixOps"
-)
+ "-unchecked",
+ "-Werror",
+ "-Xlint", // exploit "trailing comma" syntax so you can add an option without editing this line
+) // for "trailing comma", the closing paren must be on the next line
```
+The convention is always to append to the setting with `++=` and to supply one option per line.
-
+Normally the last option will have a trailing comma so that `git diff` is a bit cleaner when options are added.
{% for category in site.data.compiler-options %}
{{ category.category }}
@@ -116,73 +125,95 @@ scalacOptions ++= Seq(
{% endfor %}
+### Targeting a version of the JVM
+
+Applications or libraries targeting the JVM may wish to specify a target version.
+
+The `-release` option specifies the target version, such as "8" or "18".
+
+Like the option for `javac`, it allows building against an earlier version of the JDK. It will compile against the API for that version and also output class files for that version.
+
+The deprecated option `-target` does not compile against the desired API, but only specifies a target class file format.
## Additional resources
### Compilation Phases
diff --git a/_overviews/compiler-options/optimizer.md b/_overviews/compiler-options/optimizer.md
new file mode 100644
index 0000000000..5f35867bb5
--- /dev/null
+++ b/_overviews/compiler-options/optimizer.md
@@ -0,0 +1,223 @@
+---
+layout: singlepage-overview
+title: Optimizer
+---
+
+**[Lukas Rytz](https://github.com/lrytz) (2018)**
+
+**[Andrew Marki](https://github.com/som-snytt) (2022)**
+
+# The Scala 2.12 / 2.13 Inliner and Optimizer
+
+## In Brief
+
+- The Scala compiler has a compile-time optimizer that is available in versions 2.12 and 2.13, but not yet in Scala 3.
+- Don't enable the optimizer during development: it breaks incremental compilation, and it makes the compiler slower. Only enable it for testing, on CI, and to build releases.
+- Enable method-local optimizations with `-opt:local`. This option is safe for binary compatibility, but typically doesn't improve performance on its own.
+- Enable inlining in addition to method-local optimizations with `-opt:inline:[PATTERN]`.
+ - Don't inline from your dependencies when publishing a library, it breaks binary compatibility. Use `-opt:inline:my.package.**` to only inline from packages within your library.
+ - When compiling an application with global inlining (`-opt:inline:**`), ensure that the run-time classpath is **exactly the same** as the compile-time classpath.
+- The `@inline` annotation only has an effect if the inliner is enabled. It tells the inliner to always try to inline the annotated method or callsite.
+- Without the `@inline` annotation, the inliner generally inlines higher-order methods and forwarder methods. The main goal is to eliminate megamorphic callsites due to functions passed as argument, and to eliminate value boxing. Other optimizations are delegated to the JVM.
+
+Read more to learn more.
+
+## Intro
+
+The Scala compiler has included an inliner since version 2.0. Closure elimination and dead code elimination were added in 2.1. That was the first Scala optimizer, written and maintained by [Iulian Dragos](https://github.com/dragos). He continued to improve these features over time and consolidated them under the `-optimise` flag (later Americanized to `-optimize`), which remained available through Scala 2.11.
+
+The optimizer was re-written for Scala 2.12 to become more reliable and powerful – and to side-step the spelling issue by calling the new flag `-opt`. This post describes how to use the optimizer in Scala 2.12 and 2.13: what it does, how it works, and what are its limitations.
+
+The options were simplified for 2.13.9. This page uses the simplified forms.
+
+## Motivation
+
+Why does the Scala compiler even have a JVM bytecode optimizer? The JVM is a highly optimized runtime with a just-in-time (JIT) compiler that benefits from over two decades of tuning. It's because there are certain well-known code patterns that the JVM fails to optimize properly. These patterns are common in functional languages such as Scala. (Increasingly, Java code with lambdas is catching up and showing the same performance issues at run-time.)
+
+The two most important such patterns are "megamorphic dispatch" (also called "the inlining problem") and value boxing. If you'd like to learn more about these problems in the context of Scala, you could watch the part of [my Scala Days 2015 talk (starting at 26:13)](https://youtu.be/Ic4vQJcYwsU?t=1573).
+
+The goal of the Scala optimizer is to produce bytecode that the JVM can execute fast. It is also a goal to avoid performing any optimizations that the JVM can already do well.
+
+This means that the Scala optimizer may become obsolete in the future, if the JIT compiler is improved to handle these patterns better. In fact, with the arrival of GraalVM, that future might be nearer than you think! But for now, we dive into some details about the Scala optimizer.
+
+## Constraints and assumptions
+
+The Scala optimizer has to make its improvements within fairly narrow constraints:
+
+- The optimizer only changes method bodies, but never signatures of classes or methods. The generated bytecode has the same (binary) interface, whether or not the optimizer is enabled.
+- We don't assume the whole program (all user code plus all of its dependencies, that together make up an application) is known when running the optimizer. There may be classes on the run-time classpath that we don't see at compile-time: we may be compiling a library, or only a component of an application. This means that:
+ - Every non-final method can potentially be overridden, even if at compile-time there are no classes that define such an override
+ - Consequently, we can only inline methods that can be resolved at compile-time: final methods, methods in `object`s, and methods where the receiver's type is precisely known (for example, in `(new A).f`, the receiver is known to be exactly `A`, not a subtype of `A`).
+- The optimizer does not break applications that use reflection. This follows from the two points above: changes to classes could be observed by reflection, and additional classes could be loaded and instantiated dynamically.
+
+However, even when staying within these constraints, some changes performed by the optimizer can be observed at run-time:
+
+- Inlined methods disappear from call stacks.
+
+ - This can lead to unexpected behaviors when using a debugger.
+ - Related: line numbers (stored in bytecode) are discarded when a method is inlined into a different classfile, which also impacts debugging experience. (This [could be improved](https://github.com/scala/scala-dev/issues/3) and is expected to [progress](https://github.com/scala/scala3/pull/11492).)
+
+- Inlining a method can delay class loading of the class where the method is defined.
+
+- The optimizer assumes that modules (singletons like `object O`) are never `null`.
+ - This assumption can be false if the module is loaded in its superclass. The following example throws a `NullPointerException` when compiled normally, but prints `0` when compiled with the optimizer enabled:
+
+ ```scala
+ class A {
+ println(Test.f)
+ }
+ object Test extends A {
+ @inline def f = 0
+ def main(args: Array[String]): Unit = ()
+ }
+ ```
+
+ - This assumption can be disabled with `-opt:-assume-modules-non-null`, which results in additional null checks in optimized code.
+
+- The optimizer removes unnecessary loads of certain built-in modules, for example `scala.Predef` and `scala.runtime.ScalaRunTime`. This means that initialization (construction) of these modules can be skipped or delayed.
+
+ - For example, in `def f = 1 -> ""`, the method `Predef.->` is inlined and the access to `Predef` is eliminated. The resulting code is `def f = new Tuple2(1, "")`.
+ - This assumption can be disabled with `-opt:-allow-skip-core-module-init`
+
+- The optimizer eliminates unused `C.getClass` calls, which may delay class loading. This can be disabled with `-opt:-allow-skip-class-loading`.
+
+## Binary compatibility
+
+Scala minor releases are binary compatible with each other, for example, 2.12.6 and 2.12.7. The same is true for many libraries in the Scala ecosystem. These binary compatibility promises are the main reason for the Scala optimizer not to be enabled everywhere.
+
+The reason is that inlining a method from one class into another changes the (binary) interface that is accessed:
+
+```scala
+class C {
+ private[this] var x = 0
+ @inline final def inc(): Int = { x += 1; x }
+}
+```
+
+When inlining a callsite `c.inc()`, the resulting code no longer calls `inc`, but instead accesses the field `x` directly. Since that field is private (also in bytecode), inlining `inc` is only allowed within the class `C` itself. Trying to access `x` from any other class would cause an `IllegalAccessError` at run-time.
+
+However, there are many cases where implementation details in Scala source code become public in bytecode:
+
+```scala
+class C {
+ private def x = 0
+ @inline final def m: Int = x
+}
+object C {
+ def t(c: C) = c.x
+}
+```
+
+Scala allows accessing the private method `x` in the companion object `C`. In bytecode, however, the classfile for the companion `C$` is not allowed to access a private method of `C`. For that reason, the Scala compiler "mangles" the name of `x` to `C$$x` and makes the method public.
+
+This means that `m` can be inlined into classes other than `C`, since the resulting code invokes `C.C$$x` instead of `C.m`. Unfortunately this breaks Scala's binary compatibility promise: the fact that the public method `m` calls a private method `x` is considered to be an implementation detail that can change in a minor release of the library defining `C`.
+
+Even more trivially, assume that method `m` was buggy and is changed to `def m = if (fullMoon) 1 else x` in a minor release. Normally, it would be enough for a user to put the new version on the classpath. However, if the old version of `c.m` was inlined at compile-time, having the new version of C on the run-time classpath would not fix the bug.
+
+In order to safely use the Scala optimizer, users need to make sure that the compile-time and run-time classpaths are identical. This has a far-reaching consequence for library developers: **libraries that are published to be consumed by other projects should not inline code from the classpath**. The inliner can be configured to inline code from the library itself using `-opt:inline:my.package.**`.
+
+The reason for this restriction is that dependency management tools like sbt will often pick newer versions of transitive dependencies. For example, if library `A` depends on `core-1.1.1`, `B` depends on `core-1.1.2` and the application depends on both `A` and `B`, the build tool will put `core-1.1.2` on the classpath. If code from `core-1.1.1` was inlined into `A` at compile-time, it might break at run-time due to a binary incompatibility.
+
+## Using and interacting with the optimizer
+
+The compiler flag for enabling the optimizer is `-opt`. Running `scalac -opt:help` shows how to use the flag.
+
+By default (without any compiler flags, or with `-opt:default`), the Scala compiler eliminates unreachable code, but does not run any other optimizations.
+
+`-opt:local` enables all method-local optimizations, for example:
+
+- Elimination of code that loads unused values
+- Rewriting of null and `isInstanceOf` checks whose result is known at compile-time
+- Elimination of value boxes like `java.lang.Integer` or `scala.runtime.DoubleRef` that are created within a method and don't escape it
+
+Individual optimizations can be disabled. For example, `-opt:local,-nullness-tracking` disables nullness optimizations.
+
+Method-local optimizations alone typically don't have any positive effect on performance, because source code usually doesn't have unnecessary boxing or null checks. However, local optimizations can often be applied after inlining, so it's really the combination of inlining and local optimizations that can improve program performance.
+
+`-opt:inline` enables inlining in addition to method-local optimizations. However, to avoid unexpected binary compatibility issues, we also need to tell the compiler which code it is allowed to inline. This is done by specifying a pattern after the option to select packages, classes, and methods for inlining. Examples:
+
+- `-opt:inline:my.library.**` enables inlining from any class defined in package `my.library`, or in any of its sub-packages. Inlining within a library is safe for binary compatibility, so the resulting binary can be published. It will still work correctly even if one of its dependencies is updated to a newer minor version in the run-time classpath.
+- `-opt:inline:`, where the pattern is the literal string ``, enables inlining from the set of source files being compiled in the current compiler invocation. This option can also be used for compiling libraries. If the source files of a library are split up across multiple sbt projects, inlining is only done within each project. Note that in an incremental compilation, inlining would only happen within the sources being re-compiled – but in any case, it is recommended to only enable the optimizer in CI and release builds (and to run `clean` before building).
+- `-opt:inline:**` allows inlining from every class, including the JDK. This option enables full optimization when compiling an application. To avoid binary incompatibilities, it is mandatory to ensure that the run-time classpath is identical to the compile-time classpath, including the Java standard library.
+
+Running `scalac -opt:help` explains how to use the compiler flag.
+
+### Inliner heuristics and `@inline`
+
+When the inliner is enabled, it automatically selects callsites for inlining according to a heuristic.
+
+As mentioned in the introduction, the main goal of the Scala optimizer is to eliminate megamorphic dispatch and value boxing. In order to keep this post from growing too long, a followup post will include the analysis of concrete examples that motivate which callsites are selected by the inliner heuristic.
+
+Nevertheless, it is useful to have an intuition of how the heuristic works, so here is an overview:
+
+- Methods or callsites annotated [`@noinline`](https://www.scala-lang.org/api/current/scala/noinline.html) are not inlined.
+- The inliner doesn't inline *into* forwarder methods.
+- Methods or callsites annotated [`@inline`](https://www.scala-lang.org/api/current/scala/inline.html) are inlined.
+- Higher-order methods with a function literal as argument are inlined.
+- Higher-order methods where a parameter function of the callsite method is forwarded to the callee are inlined.
+- Methods with an `IntRef` / `DoubleRef` / ... parameter are inlined. When nested methods update variables of the outer method, those variables are boxed into `XRef` objects. These boxes can often be eliminated after inlining the nested method.
+- Forwarders, factory methods and trivial methods are inlined. Examples include simple closure bodies like `_ + 1` and synthetic methods (potentially with boxing / unboxing adaptations) such as bridges.
+
+To prevent methods from exceeding the JVM's method size limit, the inliner has size limits. Inlining into a method stops when the number of instructions exceeds a certain threshold.
+
+As you can see in the list above, the `@inline` and `@noinline` annotations are the only way for programmers to influence inlining decisions. In general, our recommendation is to avoid using these annotations. If you observe issues with the inliner heuristic that can be fixed by annotating methods, we are very keen to hear about them, for example in the form of a [bug report](https://github.com/scala/bug/issues).
+
+A related anecdote: in the Scala compiler and standard library (which are built with the optimizer enabled), there are roughly 330 `@inline`-annotated methods. Removing all of these annotations and re-building the project has no effect on the compiler's performance. So the annotations are well-intended and benign, but in reality unnecessary.
+
+For expert users, `@inline` annotations can be used to hand-tune performance critical code without reducing abstraction. If you have a project that falls into this category, please [let us know](https://contributors.scala-lang.org), we're interested to learn more!
+
+Finally, note that the `@inline` annotation only has an effect when the inliner is enabled, which is not the case by default. The reason is to avoid introducing accidental binary incompatibilities, as [explained above](#binary-compatibility).
+
+### Inliner warnings
+
+The inliner can issue warnings when callsites cannot be inlined. By default, these warnings are not issued individually, but only as a summary at the end of compilation (similar to deprecation warnings).
+
+```
+$> scalac Test.scala '-opt:inline:**'
+warning: there was one inliner warning; re-run enabling -Wopt for details, or try -help
+one warning found
+
+$> scalac Test.scala '-opt:inline:**' -Wopt
+Test.scala:3: warning: C::f()I is annotated @inline but could not be inlined:
+The method is not final and may be overridden.
+ def t = f
+ ^
+one warning found
+```
+
+By default, the inliner issues warnings for invocations of methods annotated `@inline` that cannot be inlined. Here is the source code that was compiled in the commands above:
+
+```scala
+class C {
+ @inline def f = 1
+ def t = f // cannot inline: C.f is not final
+}
+object T extends C {
+ override def t = f // can inline: T.f is final
+}
+```
+
+The `-Wopt` flag has more configurations. With `-Wopt:_`, a warning is issued for every callsite that is selected by the heuristic but cannot be inlined. See also `-Wopt:help`.
+
+### Inliner log
+
+If you're curious (or maybe even skeptical) about what the inliner is doing to your code, you can use the `-Vinline` verbose flag to produce a trace of the inliner's work:
+
+```scala
+package my.project
+class C {
+ def f(a: Array[Int]) = a.map(_ + 1)
+}
+```
+
+```
+$> scalac Test.scala '-opt:inline:**' -Vinline my/project/C.f
+Inlining into my/project/C.f
+ inlined scala/Predef$.intArrayOps (the callee is annotated `@inline`). Before: 15 ins, after: 30 ins.
+ inlined scala/collection/ArrayOps$.map$extension (the callee is a higher-order method, the argument for parameter (evidence$6: Function1) is a function literal). Before: 30 ins, after: 94 ins.
+ inlined scala/runtime/ScalaRunTime$.array_length (the callee is annotated `@inline`). Before: 94 ins, after: 110 ins.
+ [...]
+ rewrote invocations of closure allocated in my/project/C.f with body $anonfun$f$1: INVOKEINTERFACE scala/Function1.apply (Ljava/lang/Object;)Ljava/lang/Object; (itf)
+ inlined my/project/C.$anonfun$f$1 (the callee is a synthetic forwarder method). Before: 654 ins, after: 666 ins.
+ inlined scala/runtime/BoxesRunTime.boxToInteger (the callee is a forwarder method with boxing adaptation). Before: 666 ins, after: 674 ins.
+```
diff --git a/_overviews/contribute/add-guides.md b/_overviews/contribute/add-guides.md
new file mode 100644
index 0000000000..4840739cda
--- /dev/null
+++ b/_overviews/contribute/add-guides.md
@@ -0,0 +1,373 @@
+---
+title: Add New Guides/Tutorials
+num: 7
+---
+
+## Why Contribute New Learning Material?
+
+As [Heather Miller writes][why-contribute], contributing to [docs.scala-lang.org][home] is
+critical to making Scala accessible to newcomers, experienced programmers, and anyone who is curious.
+It is also a fantastic way to contribute for anyone who is comfortable using Scala, but maybe does not want to get
+involved with complex tools like the compiler.
+
+## Architecture
+
+This documentation website is backed by an open-source [GitHub repository](https://github.com/scala/docs.scala-lang),
+and is always contribution-ready.
+
+### Content
+
+Currently, the _types_ of documentation supported in this repository are:
+
+- **Guides/Overviews/Books**: Definitive guides/overviews of specific language features. Often long, detailed documents,
+ often produced by members of the Scala team. An example is the [Collections][collections-overview] overview.
+- **References**: The canonical reference for language features, written by members of the Scala team.
+ These provide the exact specification to understand more subtle aspects of the language. An example is the
+ [Scala 3 reference][scala-3-reference].
+- **Tutorials**: Bite-size, example-rich, and concise articles meant to get a developer up to speed quickly.
+- **Cheatsheets**: Quick reference of Scala syntax and behaviors.
+
+### Implementation
+
+The website is statically generated from [Markdown](https://en.wikipedia.org/wiki/Markdown) source using
+[Jekyll](https://github.com/mojombo/jekyll), and hosted on [GitHub Pages](https://pages.github.com/).
+This workflow was chosen to help contributors to focus on writing helpful content, rather than on configuration and
+boilerplate. It also aids publishing a static site in a central location.
+
+The Markdown syntax being used supports [Maruku](https://github.com/bhollis/maruku) extensions, and has automatic
+syntax highlighting, without the need for any tags.
+
+Additionally, [mdoc](https://github.com/scalameta/mdoc) is used during pull requests to validate Scala code blocks.
+To use this feature you must use the backtick notation as documented by mdoc,
+[see here](#code-blocks) for an example.
+
+**Note:** only validation of code is done by mdoc, and no extra output is generated.
+
+## Submitting Docs
+
+To contribute a new document, you should first
+[fork](https://help.github.com/articles/fork-a-repo/) the
+[repo](https://github.com/scala/docs.scala-lang), then write your article in
+[Markdown](https://daringfireball.net/projects/markdown/syntax) (example below), and finally submit a pull request.
+Likely after some edits and discussion, your document will be made live
+on [docs.scala-lang.org][home].
+
+ ---
+ layout: singlepage-overview
+ title: My Awesome Title
+ ---
+
+ ## An h2 Header in Markdown
+
+ And a paragraph, with a [link](https://www.scala-lang.org).
+
+Tables of contents will be automatically generated in a sidebar for your document, and syntax highlighting
+is provided.
+
+### Criteria for Docs to be Accepted
+
+The goal of this documentation repository is to be highly curated, rather than the approach by other community-driven
+documentation platforms, like wikis. Therefore, to be included on [docs.scala-lang.org][home], a document must:
+
+- **"fit in"** to the repository (_i.e.,_ it should not be a complete duplicate of another article),
+- **be polished**, i.e. it must be thorough, complete, correct, and organized; written as an article to be understood
+ by many users.
+- **be maintained**, if the document might require revisions from time to time, be prepared to keep it up to date, or
+nominate someone to take ownership.
+
+If you have something you're thinking about contributing, or that you're thinking about writing in order to contribute
+-- we'd love to consider it! Please don't hesitate to use GitHub issues and pull requests and the
+`#scala-contributors` room [on Discord](https://discord.com/invite/scala) for any questions, concerns,
+clarifications, etc.
+
+## Code blocks
+
+It's common for various kinds of documents to require code examples.
+You can contribute code in a Markdown document by either
+- in-line by putting backticks around it,
+- surrounding by triple backticks,
+- or indenting it by 4 spaces, e.g.:
+
+~~~
+inline example: `val x = 23`
+
+block example:
+
+```scala
+println("hello")
+```
+
+indented example:
+
+ case class Foo(x: Int)
+~~~
+
+### Scala 2 vs Scala 3
+
+Our goal is to have a unified documentation that covers both Scala 2 and Scala 3. In many cases, the
+code examples are the same in both Scala 2 and Scala 3, but sometimes there are some syntactic
+differences. In some less common cases, a page may explain a concept that is new in Scala 3 and has
+no equivalent in Scala 2, or a concept that has been removed in Scala 3. In all the cases, the
+documentation should clearly "label" the code examples so that the readers know in which versions
+of Scala they are valid.
+
+The following sections explain how to properly "label" the code examples.
+
+#### Labelling the code snippets of a page documenting a concept available in both Scala 2 and Scala 3
+
+When the content of a page not specific to Scala 2 or Scala 3, like for example our
+[Hello World][hello-world] chapter of the Scala Book, the code snippets should show both the
+Scala 2 and Scala 3 syntax. We achieve this by labelling the code snippets in tabs according
+to the following rules:
+
+- if the idiomatic syntax is different in Scala 2 and Scala 3, we create two tabs,
+ “Scala 2” and “Scala 3”, showing the corresponding syntax
+- if the code snippet is idiomatic in both Scala 2 and Scala 3, we create a single tab,
+ “Scala 2 and 3”
+- if the code snippet is valid only in Scala 2 or Scala 3, we create a single tab,
+ “Scala 2 Only” or “Scala 3 Only”
+
+Here is an example of how you
+can generate such tabs in Markdown with the `tabs` directive and class `tabs-scala-version`:
+
+
+~~~liquid
+{% tabs hello-world-demo class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+object hello extends App {
+ println("Hello, World!")
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+@main def hello() = println("Hello, World!")
+```
+{% endtab %}
+
+{% endtabs %}
+~~~
+
+
+It is crucial that you use the `tabs-scala-version` class to benefit from some cool user interactions:
+- all other Scala version tabs on the same page will also switch to current tab, whenever one is changed.
+- the tab picked will be remembered across the site, and when the user returns to the page after some time.
+
+For code snippets that are valid in both Scala 2 and Scala 3, please use a single tab labelled
+`'Scala 2 and 3'` (please note that the `tabs-scala-version` class is also dropped):
+
+
+~~~liquid
+{% tabs scala-2-and-3-demo %}
+{% tab 'Scala 2 and 3' %}
+```scala
+List(1, 2, 3).map(x => x + 1).sum
+```
+{% endtab %}
+{% endtabs %}
+~~~
+
+
+For examples that only apply to either one of Scala 2 or 3, use the tabs `'Scala 2 Only'` and `'Scala 3 Only'`.
+
+If you have a particularly long tab, for readability you can indicate which tab group it belongs to with
+a parameter `for=tab-group` as in this example:
+
+~~~liquid
+{% tabs my-tab-group class=tabs-scala-version %}
+...
+{% tab 'Scala 3' for=my-tab-group %}
+...
+~~~
+
+
+#### Labelling an entire page documenting a concept that is specific to a Scala version
+
+When the content of a page explains a concept that is new in Scala 3 and has no
+equivalent in Scala 2 (e.g. [TASTy]({% link scala3/guides/tasty-overview.md %})),
+or a concept that has been removed in Scala 3, we label the entire page instead
+of labelling each code example.
+
+We achieve this by setting a couple of a attributes in the [YAML front
+matter](https://jekyllrb.com/docs/front-matter/) of the Markdown file. For
+instance, for a page that is specific to Scala 3:
+
+~~~ yaml
+scala3: true
+versionSpecific: true
+~~~
+
+Or, for a page that is specific to Scala 2:
+
+~~~ yaml
+scala2: true
+versionSpecific: true
+~~~
+
+Please note that when the entire page is labelled, its code examples do not
+need to have tabs.
+
+### Typechecked Examples
+
+The site build process uses [mdoc](https://scalameta.org/mdoc/) to typecheck
+code snippets in markdown. This is a great way to ensure the code snippets that
+you're including typecheck and are valid. Here are a few quick tips to get
+started:
+
+First, add `mdoc` after `scala` when you are creating a
+code block. The `mdoc` modifier here will make sure that `mdoc` runs the code
+snippet and ensures that it's valid.
+
+
+
+
+ ```scala mdoc
+val a = 1
+```
+
+If you have a snippet that you expect to fail, you can also account for this by
+using `mdoc:fail` for a compile error `mdoc:crash` for a runtime-error.
+
+
+
+Keep in mind that a single file is all compiled as a single unit, so you can't
+redefine a variable that was defined above in another code snippet. _However_
+there are a couple ways to get around this. Firstly, you can use the `mdoc:nest`
+modifier with will wrap the snippet in a `scala.Predef.locally{...}`. This will
+essentially "hide" the snippet from the others. Another way around this is to
+use the `mdoc:reset` modifier, which _resets_ and forgets about everything up
+above. Here is an example using the various modifiers.
+
+
```scala mdoc:nest
+caseclassFoo(a: Int) // conflicts with Foo above, but it's nested so it's fine
+```
+
+
```scala mdoc
+val a = s"The time is ${now()}"// still have access to the now method from above
+```
+
+
```scala mdoc:reset
+caseclassFoo(a: String) // forget the previous Foo's and start fresh
+```
+
+
```scala mdoc
+val myFoo = Foo("hi") // now we only have access to the last Foo
+```
+
+## Document Templates
+
+### Guides/Overviews
+
+A guide or an overview that can be logically placed on **one** markdown page should be placed in the directory
+`_overviews/RELEVANT-CATEGORY/`. It should have the header:
+
+ ---
+ layout: singlepage-overview
+ title: YOUR TITLE
+ ---
+
+The rest of the document will be written in [Markdown](https://en.wikipedia.org/wiki/Markdown) syntax.
+
+You may substitute `RELEVANT-CATEGORY` for any directory that is related, or create a new one if one is not suitable.
+
+If your guide/overview consists of **multiple** pages, like the [Collections][collections-overview] overview,
+an ordering must be specified, by numbering documents in their logical order with the `num` tag in the header,
+and a name must be assigned to the collection of pages using the `partof` tag.
+For example, the following header might be used for a document in the collections overview:
+
+ ---
+ layout: multipage-overview
+ title: YOUR TITLE
+
+ partof: collections
+ num: 10
+ ---
+
+**At least one** document in the collection must contain a tag in the header, `outof`, that indicates the total number
+of documents in the large overview. Putting it on the last page in the overview is often best:
+
+ ---
+ layout: multipage-overview
+ title: YOUR TITLE
+
+ partof: collections
+ num: 15
+ outof: 15
+ ---
+
+Index pages, such as [docs.scala-lang.org/overviews/index.html][overviews-index] are
+generated by reading data from a configuration file, such as `_data/overviews.yml`, so your overview should be
+placed into a category there.
+
+### Tutorials
+
+Tutorials are different to guides, they should be written in a much more concise, task-oriented style,
+usually on a single page.
+
+Similar to guides, tutorials also use the same markdown header.
+
+Once the tutorial is written, to aid user navigation their link must be added to
+the metadata of `/tutorials.md`. e.g. it could look like
+
+ ---
+ layout: root-index-layout
+ title: Tutorials
+
+ tutorials:
+ ...
+ - title: My New Tutorial
+ url: "/tutorials/my-new-tutorial.html"
+ description: "Learn How To Do This Specific Task"
+ icon: code
+ ---
+
+You must also add the tutorial to the drop-down list in the navigation bar. To do this, add an extra entry to
+`_data/doc-nav-header.yml`. i.e.
+
+ ---
+ - title: Getting Started
+ url: "/getting-started/install-scala.html"
+ - title: Learn
+ ...
+ - title: Tutorials
+ url: "#"
+ submenu:
+ ...
+ - title: My New Tutorial
+ url: "/tutorials/my-new-tutorial.html"
+ ...
+ ---
+
+### Cheatsheets
+
+Cheatsheets have a special layout, and the content is expected to be a Markdown table. To contribute a cheatsheet,
+you should use the following format:
+
+ ---
+ layout: cheatsheet
+ title: YOUR TITLE
+ by: YOUR NAME
+ about: SOME TEXT ABOUT THE CHEAT SHEET.
+ ---
+ | Title A | Title B |
+ |---------|---------|
+ | content | more |
+
+[collections-overview]: {% link _overviews/collections-2.13/introduction.md %}
+[why-contribute]: {% link contribute.md %}
+[home]: {% link index.md %}
+[overviews-index]: {% link _overviews/index.md %}
+[scala-3-reference]: {{ site.scala3ref }}
+[hello-world]: {% link _overviews/scala3-book/taste-hello-world.md %}
diff --git a/_overviews/contribute/bug-reporting-guide.md b/_overviews/contribute/bug-reporting-guide.md
new file mode 100644
index 0000000000..20dd04546c
--- /dev/null
+++ b/_overviews/contribute/bug-reporting-guide.md
@@ -0,0 +1,90 @@
+---
+title: Bug Reporting Guide
+num: 8
+---
+
+The Scala compiler and standard library bug tracker is located at [https://github.com/scala/bug](https://github.com/scala/bug), and for Scala 3, it is located at [github.com/scala/scala3](https://github.com/scala/scala3/issues). Before you submit a bug make sure that it is certainly a bug by following instructions
+in [Is it a Bug?](#is-it-a-bug).
+
+## Is it a Bug?
+
+The first step in identifying a bug is to identify which component of the Scala distribution is affected. First, ensure that your issue falls within any of the following categories:
+
+ - **Library** bugs typically manifest themselves as run-time exceptions, or as *unexpected*/*unintuitive* behavior of Scala Standard Library methods.
+ - **Compiler** errors are manifested as compile time exceptions, unexpected behavior of your code at run time, or invalid behavior of the type system.
+ - **Reflection** are bugs that appear in the `scala.reflect` package. For the *reflection* bugs, the same rules apply as for the *library* bugs.
+ - **Scaladoc** bugs are manifested as a logical problems in the information it presents (that is, the displayed information is incorrect, such as an incorrect subclassing relationship), or incorrect behavior of the user interface. If you'd like to suggest a change in the content of the documentation, please submit a pull request (possible to do in the browser using GitHub, which is easier and faster than filing a bug). Please file a bug about the content of documentation only if you cannot provide a suggestion for its fix.
+
+If your issue is related to any of the following external projects, make sure to use its appropriate issue tracker:
+
+ - [Akka](https://doc.akka.io/docs/akka/current/project/issue-tracking.html)
+ - [Play!](https://github.com/playframework/Play20/issues)
+ - [Slick](https://github.com/slick/slick/issues)
+ - [sbt](https://github.com/sbt/sbt/issues)
+
+The following are generally considered to be bugs:
+
+- **Scala Compiler Crash** If the Scala compiler is crashing with an internal error (compile time exception) you have certainly found a bug, and can move on to the next section of this document on reporting confirmed bugs.
+- **Regressions** If some code snippet worked in a previous Scala release, but now no longer compiles or results in an exception, it is probably a regression.
+- **Verify Errors** happen when the compiled Scala program is loaded to the Java Virtual Machine. If you're getting a *Verify Error*, you've usually found a bug. Make sure first that your project is not using stale `.class` files before reporting a new issue.
+
+If you have a code snippet that is resulting in bytecode which you believe is behaving incorrectly, you may or may not have found a bug. Before reporting your issue, please attempt the following:
+
+* Make sure you minimize your problem. To correctly minimize the problem follow the following instructions:
+
+ 1. Gradually remove parts from the original failing code snippet until you believe you have the simplest representation of your problem.
+
+ 2. Ensure that you have decoupled your code snippet from any library that could be introducing the incorrect behavior. One way to achieve this is to try to recompile the offending code snippet in isolation, outside the context of any complex build environment. If your code depends on some strictly Java library and source code is available for it, make sure that the latter is also minimized.
+
+ 3. Make sure you are compiling your project from a clean slate. Your problem could be related to separate compilation, which is difficult to detect without a clean build with new `.class` files.
+
+ 4. If you have encountered a bug while building your code in the IDE, then please reproduce it on the command line. The same rule applies for build tools like **sbt** or **Mill**.
+
+ 5. If you want to file an improvement in the issue tracker please discuss it first on one of the mailing lists. They offer much bigger audience than issue tracker. The latter is not suitable for long discussions.
+
+* Keep in mind that the behavior you are witnessing could be intended. Good formal resources for verifying whether the language behavior is intended is either in the [Scala Improvement Proposal Documents][sips] or in the [Scala Language Specification](https://www.scala-lang.org/files/archive/spec/2.13/). If in doubt, you may always ask on the [Community Category](https://contributors.scala-lang.org/c/community) or [Stack Overflow](https://stackoverflow.com/questions/tagged/scala).
+
+In general, if you find yourself stuck on any of these steps, asking on [Scala Contributors](https://contributors.scala-lang.org/) can be helpful:
+
+ - For unexpected behavior use the [Community Category](https://contributors.scala-lang.org/c/community).
+ - For compiler bugs use the [Compiler Category](https://contributors.scala-lang.org/c/compiler).
+
+* Examples of exceptions reported by the compiler which usually are not bugs:
+ 1. `StackOverflowError` is typically not a bug unless the stacktrace involves the internal packages of the compiler (like `scala.tools.nsc...`, or `dotty.tools.dotc...`). Try to increase the Java stack size (`-Xss`), in most of the cases it helps.
+ 2. `AbstractMethodError` can occur when you did not recompile all the necessary Scala files (build tools, like `sbt`, can prevent that from happening) or you are mixing external libraries compiled for different Scala versions (for example one uses `2.10.x` and the other `2.11.x`).
+
+## Please Check Before Reporting a Bug
+
+Before reporting your bug, make sure to check the issue tracker for other similar bugs. The exception name or a compiler phase are the best keywords to search for. If you are experiencing unexpected behavior search for method/class names where it happens. Your issue might already be reported, and a workaround might already be available for you take advantage of. If your issue *is* reported, be sure to add your test case as a comment if it is different from any of the existing ones.
+
+**Note:** reporting a bug that already exists creates an additional overhead for you, the Scala Team, and all people that search the issue database. To avoid this inconvenience make sure that you thoroughly search for an existing issue.
+
+If you cannot find your issue in the issue tracker, create a new bug. The details about creating a bug report are in the following section.
+
+## Creating a Bug Report
+
+Please make sure to fill in as many fields as possible. Make sure you've indicated the following:
+
+ 1. **Exact Scala version** that you are using. For example, `2.13.16` or `3.3.4`. If the bug happens in multiple versions indicate all of them.
+ 2. **The component** that is affected by the bug. For example, the Standard Library, Scaladoc, etc.
+ 3. **Labels** related to your issue. For example, if you think your issue is related to the typechecker, and if you have successfully minimized your issue, label your bug as "typechecker" and "minimized". Issue tracker will suggest names for existing labels as you type them so try not to create duplicates.
+ 4. **Running environment**. Are you running on Linux? Windows? What JVM version are you using?
+
+In order for us to quickly triage the bug that you've found, it's important that the code snippet which produces the observed issue is as minimized as possible. For advice on minimizing your code snippet, please see the appropriate subsection of the above ([Is it a Bug?](#is-it-a-bug)).
+
+### Description
+
+In the description of your issue, be as detailed as you can. Bug reports which have the following information included are typically understood, triaged, and fixed very quickly:
+1. Include a test case (minimized if possible) enabling us to reproduce the problematic behavior. Include your test
+case (and output) in properly formatted code blocks:
+~~~
+```scala
+List(1, 2, 3).map(x => x + 1)
+```
+~~~
+2. The expected output.
+3. The actual output, including the stacktrace.
+4. Related discussion on the mailing lists, if applicable.
+5. If you have already looked into the issue provide interesting insights or proposals for fixing the issue.
+
+[sips]: {% link _sips/index.md %}
diff --git a/_overviews/contribute/codereviews.md b/_overviews/contribute/codereviews.md
new file mode 100644
index 0000000000..cb49220627
--- /dev/null
+++ b/_overviews/contribute/codereviews.md
@@ -0,0 +1,60 @@
+---
+title: Code Review Contributions
+num: 3
+---
+## Code Review Contributions
+
+In addition to [bug fixing][bug-fixing], you can help us review
+[waiting pull requests](#pull-requests-awaiting-comment).
+This is also a good (and recommended) way to get to know the feel of
+the bug-fixing and submissions process before jumping in with your
+own pull requests.
+
+
+### Review Guidelines
+
+[Code of Conduct reminder](https://scala-lang.org/conduct.html)
+
+* Keep comments on-topic, concise and precise.
+* Attach comments to particular lines or regions they pertain to whenever possible.
+* Short code examples are often more descriptive than prose.
+* If you have thoroughly reviewed the PR and thought through all angles, LGTM (Looks Good To Me) is the preferred acceptance response.
+* Additional reviews should try to offer additional insights: "I also thought about it from this angle, and it still looks good.."
+* Above all, remember that the people you are reviewing might be reviewing your PRs one day too.
+* If you are receiving the review, consider that the advice is being given to make you, and Scala, better rather than as a negative critique. Assume the best, rather than the worst.
+
+## Pull Requests Awaiting Comment
+
+
For other PRs, follow the scala project from here.
+
+
+
+
+Also note that the [Tools contributions][tools] page has more projects that will generate pull requests.
+
+[bug-fixing]: {% link _overviews/contribute/guide.md %}
+[tools]: {% link _overviews/contribute/tools.md %}
diff --git a/_overviews/contribute/corelibs.md b/_overviews/contribute/corelibs.md
new file mode 100644
index 0000000000..4fcab907a2
--- /dev/null
+++ b/_overviews/contribute/corelibs.md
@@ -0,0 +1,21 @@
+---
+title: Core Library Contributions
+num: 4
+---
+## Core Library Contributions
+
+There are several options for contributing to Scala's core libraries. You can:
+
+* Help with [Documentation][documentation].
+* [Report Bugs or Issues][bug-reporting-guide] against the core libraries.
+* [Fix Bugs or Issues][guide] against the
+ [reported library bugs/issues](https://github.com/scala/bug).
+
+### Significant changes
+
+For significant new functionality or a whole new API to be considered for inclusion in the core Scala distribution,
+please take into account [scala/scala-dev#661](https://github.com/scala/scala-dev/issues/661) before doing so.
+
+[documentation]: {% link _overviews/contribute/documentation.md %}
+[bug-reporting-guide]: {% link _overviews/contribute/bug-reporting-guide.md %}
+[guide]: {% link _overviews/contribute/guide.md %}
diff --git a/_overviews/contribute/documentation.md b/_overviews/contribute/documentation.md
new file mode 100644
index 0000000000..469396e40c
--- /dev/null
+++ b/_overviews/contribute/documentation.md
@@ -0,0 +1,60 @@
+---
+title: Documentation Contributions
+num: 5
+---
+## Contributing Documentation to the Scala project
+
+There are several ways you can help out with the improvement of Scala documentation. These include:
+
+* API Documentation in Scaladoc
+* Guides, Overviews, Tutorials, Cheat Sheets and more on the [docs.scala-lang.org][home] site
+* Updating [scala-lang.org](https://scala-lang.org)
+
+Please read this page, and the pages linked from this one, fully before contributing documentation. Many frequently asked questions will be answered in these resources. If you have a question that isn't answered, feel free to ask on the [Scala Contributors](https://contributors.scala-lang.org/) forum and then, please, submit a pull request with updated documentation reflecting that answer.
+
+**General requirements** for documentation submissions include spell-checking all written language, ensuring code samples compile and run correctly, correct grammar, and clean formatting/layout of the documentation.
+
+Thanks
+
+### API Documentation (Scaladoc)
+
+The Scala API documentation lives with the scala project source code. There are many ways you can help with improving Scaladoc, including:
+
+* [Log issues for missing scaladoc documentation][report-api-doc-bugs] -
+Please *follow the issue submission process closely* to help prevent duplicate issues being created.
+* [Claim Scaladoc Issues and Provide Documentation][scala-standard-library-api-documentation] - please claim issues prior to working on a specific scaladoc task to prevent duplication of effort. If you sit on an issue for too long without submitting a pull request, it will revert to unassigned, and you will need to re-claim it.
+* You can also just
+[submit new Scaladoc][scala-standard-library-api-documentation]
+without creating an issue, but please look to see if there is an issue already submitted for your task and claim it if there is. If not, please post your intention to work on a specific scaladoc task on [Scala Contributors](https://contributors.scala-lang.org/) so that people know what you are doing.
+
+### The Main Scala Documentation Site
+
+[docs.scala-lang.org][home] houses the primary source of written, non-API documentation for Scala. It's a GitHub project that you can fork and submit pull requests from. It includes:
+
+* Overviews
+* Tutorials
+* Conversion Guides from Other Languages
+* Cheat Sheets
+* A Glossary
+* The Scala Style Guide
+* The Scala Language Specification
+* SIP (Scala Improvement Process) Proposals
+and more
+
+Please read [Add New Guides/Tutorials][add-guides] through before embarking on changes. The site uses
+the [Jekyll](https://jekyllrb.com/) Markdown engine, so you will need to follow the instructions to get that running as well.
+
+### Updating scala-lang.org
+
+Additional high-level documentation (including documentation on contributing
+to Scala and related projects) is provided on the main
+[Scala Language site](https://scala-lang.org), and is also kept in the
+[scala-lang GitHub project](https://github.com/scala/scala-lang) which may be forked to create pull requests.
+
+Please read both the
+[Add New Guides/Tutorials][add-guides] document and the [scala-lang.org GitHub README](https://github.com/scala/scala-lang#scala-langorg) before embarking on any changes to the Scala language site, as it uses the same Jekyll markdown tool and many of the same conventions as the Scala documentation site.
+
+[report-api-doc-bugs]: {% link _overviews/contribute/scala-standard-library-api-documentation.md %}#contribute-api-documentation-bug-reports
+[scala-standard-library-api-documentation]: {% link _overviews/contribute/scala-standard-library-api-documentation.md %}
+[home]: {% link index.md %}
+[add-guides]: {% link _overviews/contribute/add-guides.md %}
diff --git a/_overviews/contribute/guide.md b/_overviews/contribute/guide.md
new file mode 100644
index 0000000000..f5307a325a
--- /dev/null
+++ b/_overviews/contribute/guide.md
@@ -0,0 +1,84 @@
+---
+title: Contributing guide
+num: 10
+---
+
+| **Shortcut** | **Description** |
+|----------------------------------------|-----------------|
+| [Scala Contributors][contrib-forum] | Get a peek into the inners of the Scala compiler. |
+| [Report an Issue][bug-reporting-guide] | File a bug report or a feature request. |
+| [Community Issues][community-tickets] | Get cracking on some easy to approach issues. |
+| [Scala 2 Hacker's Guide][hackers] | Learn to write good code and improve your chances of contributing to the Scala galaxy. |
+| [Scala 3 Contributing Guide][scala3-hackers] | Walkthrough contributing to the Scala 3 compiler, along with a guide to compiler internals. |
+
+
+
+### Why contribute a patch to Scala?
+
+Just to name a few common reasons:
+
+* contributing a patch is the best way to make sure your desired changes will be available in the next Scala version
+* Scala is written in Scala, so going through the source code and patching it will improve your knowledge of Scala.
+* last but not least, it only takes a few accepted commits to make it into the [Scala Contributor Hall of Fame](https://github.com/scala/scala/contributors).
+
+The main Scala project consists of the standard Scala library, the Scala reflection and macros library,
+the Scala compiler and the Scaladoc tool. This means there's plenty to choose from when deciding what to work on.
+Typically, the scaladoc tool provides a low entry point for new committers, so it is a good first step into contributing.
+
+On the [Scala bug tracker](https://github.com/scala/bug) you will find the bugs that you could pick up. Once you decided on a ticket to look at, see the next step on how to proceed further.
+
+If you are interested in contributing code, we ask you to sign the
+[Scala Contributor License Agreement](https://www.lightbend.com/contribute/cla/scala),
+which allows us to ensure that all code submitted to the project is
+unencumbered by copyrights or patents.
+
+### Bug-fix Check List
+> Originally these steps cover the [Scala 2 compiler](https://github.com/scala/scala), but they also are relevant to
+> the [Scala 3 compiler](https://github.com/scala/scala3).
+
+This is the impatient developer's checklist for the steps to submit a bug-fix pull request to the Scala project. For more information, description and justification for the steps, follow the links in that step. Further specific instructions for the release of Scala you are targeting can be found in the `CONTRIBUTING.md` file for that [GitHub branch](https://github.com/scala/scala)
+
+1. [Select a bug to fix from GitHub][community-tickets], or if you found the bug yourself and want to fix it, [create a GitHub issue][bug-reporting-guide] (but please
+[make sure it's not a duplicate][bug-report-check-dupes]).
+2. Optional ([but recommended][why-its-a-good-idea]), announce your intention to work on the bug on [Scala Contributors](https://contributors.scala-lang.org/). After all, don't you want to work on a team with
+[these friendly people][hackers-connect] - it's one of the perks of contributing.
+3. [Fork the Scala repository][hackers-fork] and clone your fork (if you haven't already).
+4. [Create a feature branch][hackers-branch] to work on: use the branch name `issue/NNNN` where NNNN is the GitHub issue number.
+5. [Fix the bug, or implement the new small feature][hackers-implement], include new tests (yes, for bug fixes too).
+6. [Test, rinse][hackers-test] and [test some more][partest-guide] until [all the tests pass][hackers-verify].
+7. [Commit your changes][hackers-commit] to your feature branch in your fork. Please choose your commit message based on the [Git Hygiene](https://github.com/scala/scala#user-content-git-hygiene) section of the Scala project README.
+8. If necessary [re-write git history](https://git-scm.com/book/en/v2/Git-Branching-Rebasing) so that [commits are organized by major steps to the fix/feature](
+https://github.com/scala/scala#git-hygiene). For bug fixes, a single commit is requested, for features several commits may be desirable (but each separate commit must compile and pass all tests)
+9. [Submit a pull request][hackers-submit].
+10. [Work with a reviewer](https://github.com/scala/scala#reviewing) to [get your pull request merged in][hackers-review].
+11. Celebrate!
+
+Need more information or a little more hand-holding for the first one? We got you covered: take a read through the entire [Hacker Guide][hackers] (or the [equivalent Scala 3 Contributing Guide][scala3-hackers]) for an example of implementing a new feature (some steps can be skipped for bug fixes, this will be obvious from reading it, but many of the steps here will help with bug fixes too).
+
+### Larger Changes, New Features
+
+For larger, more ambitious changes (e.g. new language features), the first step to making a change is to discuss it with the community at large, to make sure everyone agrees on the idea
+and on the implementation plan. Announce the change
+on the [Scala Contributors](https://contributors.scala-lang.org/) mailing list and get developer feedback. For really complex changes, a [Scala Improvement Process (SIP)][sips] document might be required, but the first step is always to discuss it on the mailing list and if a SIP is required, that will be discussed on the mailing list.
+
+Contributions, big or small, simple or complex, controversial or undisputed, need to materialize as patches against
+the Scala project source tree. The hacker's guides ([Scala 2][hackers], or [Scala 3][scala3-hackers]) will explain how to materialize your idea into a full-fledged pull request against the Scala code base.
+
+[hackers]: {% link _overviews/contribute/hacker-guide.md %}
+[community-tickets]: {% link _overviews/contribute/index.md %}#community-tickets
+[bug-reporting-guide]: {% link _overviews/contribute/bug-reporting-guide.md %}
+[bug-report-check-dupes]: {% link _overviews/contribute/bug-reporting-guide.md %}#please-check-before-reporting-a-bug
+[scala3-hackers]: {% link _overviews/contribute/scala3.md %}
+[contrib-forum]: https://contributors.scala-lang.org/
+[why-its-a-good-idea]: {% link _overviews/contribute/scala-internals.md %}#why-its-a-good-idea
+[hackers-connect]: {% link _overviews/contribute/hacker-guide.md %}#1-connect
+[hackers-fork]: {% link _overviews/contribute/hacker-guide.md %}#fork
+[hackers-branch]: {% link _overviews/contribute/hacker-guide.md %}#branch
+[hackers-implement]: {% link _overviews/contribute/hacker-guide.md %}#implement
+[hackers-test]: {% link _overviews/contribute/hacker-guide.md %}#test
+[hackers-verify]: {% link _overviews/contribute/hacker-guide.md %}#verify
+[hackers-commit]: {% link _overviews/contribute/hacker-guide.md %}#commit
+[hackers-submit]: {% link _overviews/contribute/hacker-guide.md %}#submit
+[hackers-review]: {% link _overviews/contribute/hacker-guide.md %}#review
+[partest-guide]: {% link _overviews/contribute/partest-guide.md %}
+[sips]: {% link _sips/index.md %}
diff --git a/_overviews/contribute/hacker-guide.md b/_overviews/contribute/hacker-guide.md
new file mode 100644
index 0000000000..ea77feee0d
--- /dev/null
+++ b/_overviews/contribute/hacker-guide.md
@@ -0,0 +1,387 @@
+---
+title: Scala 2 Hacker's Guide
+by: Eugene Burmako
+num: 12
+---
+
+This guide is intended to help you get from an idea of fixing a bug or implementing a new feature into a nightly Scala build, and, ultimately, to a production release of Scala incorporating your idea.
+
+This guide covers the entire process, from the conception of your idea or bugfix to the point where it is merged into Scala. Throughout, we will use a running example of an idea or bugfix one might wish to contribute.
+
+Other good starting points for first-time contributors include the [Scala README](https://github.com/scala/scala#get-in-touch) and [contributor's guidelines](https://github.com/scala/scala/blob/2.13.x/CONTRIBUTING.md).
+
+## The Running Example
+
+Let's say that you particularly enjoy the new string interpolation language feature introduced in Scala 2.10.0, and you use it quite heavily.
+
+Though, there's an annoying issue
+which you occasionally stumble upon: the formatting string interpolator `f` [does not support](https://github.com/scala/bug/issues/6725)
+new line tokens `%n`.
+
+One approach would be to go the [Scala 2 bug tracker](https://github.com/scala/bug), request that the bug be fixed, and then to wait indefinitely for the fix arrive. Another approach would be to instead patch Scala yourself, and to submit the fix to the Scala repository in hopes that it might make it into a subsequent release.
+
+**_Of note_**: There are several types of releases/builds. Nightly builds are produced every night at a fixed time. Minor releases happen once every few months. Major releases typically happen once per year.
+
+## 1. Connect
+
+Sometimes it's appealing to hack alone and not to have to interact with others. However, in the context a big project such as Scala, there might be better ways. There are people in the Scala community who have spent years accumulating knowledge about Scala libraries and internals. They might provide
+unique insights and, what's even better, direct assistance in their areas, so it is not only advantageous, but recommended to communicate with the community about your new patch.
+
+Typically, bug fixes and new features start out as an idea or an experiment posted on one of [our forums](https://scala-lang.org/community/index.html#forums) to find out how people feel
+about things you want to implement. People proficient in certain areas of Scala usually monitor forums and discussion rooms, so you'll often get some help by posting a message.
+But the most efficient way to connect is to mention in your message one of the people responsible for maintaining the aspect of Scala which you wish to contribute to.
+
+A list of language features/libraries along with their maintainer's full names and GitHub usernames is [in the Scala repo README](https://github.com/scala/scala#get-in-touch).
+
+In our running example, since Martin is the person who submitted the string interpolation Scala Improvement Proposal and implemented this language feature for Scala 2.10.0, he might be interested in learning of new bugfixes to that feature.
+
+As alluded to earlier, one must also choose an appropriate avenue to discuss the issue. Typically, one would use the [Scala Contributor's Forum][contrib-forum], as there are post categories devoted to discussions about the core internal design and implementation of the Scala system.
+
+In this example, the issue was previously discussed on the (now unused) scala-user mailing list, at the time,
+we would have posted to [the (now unused) scala-user mailing list](https://groups.google.com/group/scala-user) about our issue:
+
+
+
+
+Now that we have the approval of the feature's author, we can get to work!
+
+## 2. Set up
+
+Hacking Scala begins with creating a branch for your work item. To develop Scala we use [Git](https://git-scm.com/)
+and [GitHub](https://github.com/). This section of the guide provides a short walkthrough, but if you are new to Git,
+it probably makes sense to familiarize yourself with Git first. We recommend
+
+* the [Git Pro](https://git-scm.com/book/en/v2) online book.
+* the help page on [Forking a Git Repository](https://help.github.com/articles/fork-a-repo).
+* this great training tool [LearnGitBranching](https://pcottle.github.io/learnGitBranching/). One-hour hands-on training helps more than 1000 hours reading.
+
+### Fork
+
+Log into [GitHub](https://github.com/), go to [https://github.com/scala/scala](https://github.com/scala/scala) and click the `Fork`
+button in the top right corner of the page. This will create your own copy of our repository that will serve as a scratchpad for your work.
+
+If you're new to Git, don't be afraid of messing up-- there is no way you can corrupt our repository.
+
+
+
+### Clone
+
+If everything went okay, you will be redirected to your own fork at `https://github.com/user-name/scala`, where `username`
+is your GitHub username. You might find it helpful to read [https://help.github.com/fork-a-repo/](https://help.github.com/fork-a-repo/),
+which covers some things that will follow below. Then, _clone_ your repository (i.e. pull a copy from GitHub to your local machine) by running the following on the command line:
+
+ 16:35 ~/Projects$ git clone https://github.com/xeno-by/scala
+ Cloning into 'scala'...
+ remote: Counting objects: 258564, done.
+ remote: Compressing objects: 100% (58239/58239), done.
+ remote: Total 258564 (delta 182155), reused 254094 (delta 178356)
+ Receiving objects: 100% (258564/258564), 46.91 MiB | 700 KiB/s, done.
+ Resolving deltas: 100% (182155/182155), done.
+
+This will create a local directory called `scala`, which contains a clone of your own copy of our repository. The changes that you make
+in this directory can be propagated back to your copy hosted on GitHub and, ultimately, pushed into Scala when your patch is ready.
+
+### Branch
+
+Before you start making changes, always create your own branch. Never work on the `master` branch. Think of a name that describes
+the changes you plan on making. Use a prefix that describes the nature of your change. There are essentially two kinds of changes:
+bug fixes and new features.
+
+* For bug fixes, use `issue/NNNN` or `ticket/NNNN` for bug `NNNN` from the [Scala bug tracker](https://github.com/scala/bug).
+* For new feature use `topic/XXX` for feature `XXX`. Use feature names that make sense in the context of the whole Scala project and not just to you personally. For example, if you work on diagrams in Scaladoc, use `topic/scaladoc-diagrams` instead of just `topic/diagrams` would be a good branch name.
+
+Since in our example, we're going to fix an existing bug
+[scala/bug#6725](https://github.com/scala/bug/issues/6725), we'll create a branch named `ticket/6725`.
+
+ 16:39 ~/Projects/scala (master)$ git checkout -b ticket/6725
+ Switched to a new branch 'ticket/6725'
+
+If you are new to Git and branching, read the [Branching Chapter](https://git-scm.com/book/en/v2/Git-Branching-Branches-in-a-Nutshell) in the Git Pro book.
+
+### Build
+
+The next step after cloning your fork is setting up your machine to build Scala.
+
+You need the following tools:
+
+* A Java JDK. The baseline version is `8` for 2.13.x and higher. It's possible to use a higher JDK version for local development, but the continuous integration builds will verify against the baseline version.
+* `sbt`, an interactive build tool commonly used in Scala projects. Acquiring sbt manually is not necessary -- the recommended approach is to download the [sbt-extras runner script](https://github.com/paulp/sbt-extras/blob/master/sbt) and use it in place of `sbt`. The script will download and run the correct version of sbt when run from the Scala repository's root directory.
+* `curl` -- the build uses `curl` in the `pull-binary-libs.sh` script to download bootstrap libs.
+
+macOS and Linux builds should work. Windows is supported, but it might have issues. Please report to the [Scala 2 bug tracker](https://github.com/scala/bug) if you encounter any.
+
+Building Scala can be done with a single command `sbt dist/mkPack`, from the root of your cloned repository. In general, it's much more efficient to enter the `sbt` shell once and run the various tasks from there, instead of running each task by launching `sbt some-task` on your command prompt.
+
+Be prepared to wait for a while -- a full "clean" build takes 5+ minutes depending on your machine (longer on older machines with less memory). On a recent laptop, incremental builds usually complete within 10-30 seconds.
+
+### IDE
+
+There's no single editor of choice for working with Scala sources, as there are trade-offs associated with each available tool.
+
+IntelliJ IDEA has a Scala plugin, which is known to work with our codebase. Alternatively you can use Visual Studio Code with the [Metals IDE extension](https://marketplace.visualstudio.com/items?itemName=scalameta.metals).
+Both of these Scala IDE solutions provide navigation, refactoring, error reporting functionality, and integrated debugging.
+See [the Scala README](https://github.com/scala/scala#ide-setup) for instructions on using either IntelliJ IDEA or Metals with the Scala repository.
+
+Other alternative editors exist, such as Atom, Emacs, Sublime Text or jEdit. These are faster and much less memory/compute-intensive to run, but lack semantic services and debugging.
+
+We recognise that there exist preferences towards specific IDE/editor experiences, so ultimately we recommend that your choice be your personal preference.
+
+## 3. Hack
+
+When hacking on your topic of choice, you'll be modifying Scala, compiling it and testing it on relevant input files.
+Typically, you would want to first make sure that your changes work on a small example and afterwards verify that nothing break
+by running a comprehensive test suite.
+
+We'll start by creating a `sandbox` directory (`./sandbox` is listed in the .gitignore of the Scala repository), which will hold a single test file and its compilation results. First, let's make sure that
+[the bug](https://github.com/scala/bug/issues/6725) is indeed reproducible by putting together a simple test and compiling and running it with the Scala compiler that we built using `sbt`. The Scala compiler that we just built is located in `build/pack/bin`.
+
+ 17:25 ~/Projects/scala (ticket/6725)$ mkdir sandbox
+ 17:26 ~/Projects/scala (ticket/6725)$ cd sandbox
+ 17:26 ~/Projects/scala/sandbox (ticket/6725)$ edit Test.scala
+ 17:26 ~/Projects/scala/sandbox (ticket/6725)$ cat Test.scala
+ object Test extends App {
+ val a = 1
+ val s = f"$a%s%n$a%s"
+ println(s)
+ }
+ 17:27 ~/Projects/scala/sandbox (ticket/6725)$ ../build/pack/bin/scalac Test.scala
+ 17:28 ~/Projects/scala/sandbox (ticket/6725)$ ../build/pack/bin/scala Test
+ 1%n1 // %n should've been replaced by a newline here
+
+### Implement
+
+Now, implement your bugfix or new feature!
+
+Here are also some tips & tricks that have proven useful in Scala development:
+
+* After building your working copy with the `compile` sbt task, there's no need to leave the comfort of your sbt shell to try it out: the REPL is available as the `scala` task, and you can also run the compiler using the `scalac` task. If you prefer to run the REPL outside sbt, you can generate the scripts in `build/quick/bin` using the `dist/mkQuick` task.
+* The sbt workflow is also great for debugging, as you can create a remote debugging session in your favorite IDE, and then activate the JVM options for the next time you run the `scala` or `scalac` tasks using:
+
+```
+> set javaOptions in compiler := List("-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=8002")
+> scalac test.scala
+[info] Running scala.tools.nsc.Main -usejavacp test.scala
+Listening for transport dt_socket at address: 8002
+```
+
+* Also see [the Scala README](https://github.com/scala/scala#incremental-compilation) for tips on speeding up compile times.
+* If after introducing changes or updating your clone, you get `AbstractMethodError` or other linkage exceptions, try the `clean` task and building again.
+* Don't underestimate the power of using `println` to print debug information. When starting with Scala, I spent a lot of time in the debugger trying to figure out how
+ things work. However later I found out that print-based debugging is often more effective than jumping around. It's also useful to print stack traces to understand the flow of execution, for example what code executed before some action occurred. When working with `Trees`, you might want to use `showRaw` to get the `AST` representation.
+* You can publish your newly-built scala version locally using the `publishLocal` task in sbt.
+* It's convenient to enable the following local settings to speed up your workflow (put these in `local.sbt` in your working copy):
+
+```
+// skip docs for local publishing
+publishArtifact in (Compile, packageDoc) in ThisBuild := false
+// set version based on current sha, so that you can easily consume this build from another sbt project
+baseVersionSuffix := s"local-${Process("tools/get-scala-commit-sha").lines.head.substring(0, 7)}"
+// show more logging during a partest run
+testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff"))
+// if incremental compilation is compiling too much (should be fine under sbt 0.13.13)
+// antStyle := true
+```
+
+* Adding a macro to the `Predef` object is a pretty involved task. Due to bootstrapping, it makes it more complex to add a macro. For this reason, the process is more involved. It could be useful to replicate the way `StringContext.f` itself is added. In short, you need to define your macro under `src/compiler/scala/tools/reflect/` and provide no implementation in `Predef` (it will look like `def fn = macro ???`). Now you have to set up the wiring. Add the name of your macro to `src/reflect/scala/reflect/internal/StdNames.scala`, add the needed links to it to `src/reflect/scala/reflect/internal/Definitions.scala`, and finally specify the bindings in `src/compiler/scala/tools/reflect/FastTrack.scala`. [Here's](https://github.com/folone/scala/commit/59536ea833ca16c985339727baed5d70e577b0fe) an example of adding a macro.
+
+### Where to Find Documentation
+
+The separate projects under Scala have varying amounts of documentation:
+
+##### The Scala Library
+
+Contributing to the Scala standard library is about the same as working on one of your own libraries.
+
+If documentation is necessary for some trait/class/object/method/etc in the Scala standard library, typically maintainers will include inline comments describing their design decisions or rationale for implementing things the way they have, if it is not straightforward.
+
+The Scala collections framework, part of the Scala standard library, is more complex. You should become familiar
+with its architecture, which is documented in [the Architecture of Scala Collections][collections-arch].
+The [Scala Collections Guide][collections-intro] is more general, covering the synchronous portion of collections. For parallel collections, there also exists a detailed [Scala Parallel Collections Guide][collections-par].
+
+##### The Scala Compiler
+
+Documentation about the internal workings of the Scala compiler is scarce, and most of the knowledge is passed around by forum ([Scala Contributors](https://contributors.scala-lang.org/) forum), chat-rooms (see `#scala-contributors` on [Discord][discord-contrib]), ticket, or word of mouth. However, the situation is steadily improving. Here are the resources that might help:
+
+* [Compiler internals videos by Martin Odersky](https://www.scala-lang.org/old/node/598.html) are quite dated, but still very useful. In this three-video
+ series Martin explains the general architecture of the compiler, and the basics of the front-end, which later became the `scala-reflect` module's API.
+* [Reflection documentation][reflect-overview] describes fundamental data structures (like `Tree`s, `Symbol`s, and `Types`) that
+ are used to represent Scala programs and operations defined on then. Since much of the compiler has been factored out and made accessible via the `scala-reflect` module, all the fundamentals needed for reflection are the same for the compiler.
+* [Scala compiler corner](https://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/) contains extensive documentation about
+ most of the post-typer phases (i.e. the backend) in the Scala compiler.
+* [Scala Contributors](https://contributors.scala-lang.org/), a forum which hosts discussions about the core
+ internal design and implementation of the Scala system.
+
+##### Other Projects
+
+Tools like Scaladoc also welcome contributions. Unfortunately these smaller projects have less developer documentation. In these cases, the best thing to do is to directly explore the code base (which often contains documentation as inline comments) or to write to the appropriate maintainers for pointers.
+
+### Interlude
+
+To fix [the bug we're interested in](https://github.com/scala/bug/issues/6725) we've tracked the `StringContext.f` interpolator
+down to a macro implemented in `MacroImplementations.scala` There we notice that the interpolator only processes conversions,
+but not tokens like `%n`. Looks like an easy fix.
+
+ 18:44 ~/Projects/scala/sandbox (ticket/6725)$ git diff
+ diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/
+ index 002a3fce82..4e8f02084d 100644
+ --- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+ +++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
+ @@ -117,7 +117,8 @@ abstract class MacroImplementations {
+ if (!strIsEmpty) {
+ val len = str.length
+ while (idx < len) {
+ - if (str(idx) == '%') {
+ + def notPercentN = str(idx) != '%' || (idx + 1 < len && str(idx + 1) != 'n')
+ + if (str(idx) == '%' && notPercentN) {
+ bldr append (str substring (start, idx)) append "%%"
+ start = idx + 1
+ }
+
+After applying the fix and running `sbt compile`, our simple test case in `sandbox/Test.scala` started working!
+
+ 18:51 ~/Projects/scala/sandbox (ticket/6725)$ cd ..
+ 18:51 ~/Projects/scala (ticket/6725)$ sbt compile
+ ...
+ [success] Total time: 18 s, completed Jun 6, 2016 9:03:02 PM
+ Total time: 18 seconds
+
+ 18:51 ~/Projects/scala (ticket/6725)$ cd sandbox
+ 18:51 ~/Projects/scala/sandbox (ticket/6725)$ ../build/pack/bin/scalac Test.scala
+ 18:51 ~/Projects/scala/sandbox (ticket/6725)$ ../build/pack/bin/scala Test
+ 1
+ 1 // no longer getting the %n here - it got transformed into a newline
+
+### Test
+
+To guard your change against accidental breakage in the future, it is important to add tests.
+I have already written one test earlier, so that's a good start but not enough! Apart from obvious usages of our new functionality, we need to cover corner-cases as well.
+
+Adding tests to the test suite is as easy as moving them to the appropriate directory:
+
+* Code which should compile successfully, but doesn't need to be executed, needs to go into the [“pos” directory](https://github.com/scala/scala/tree/2.12.x/test/files/pos).
+* Code which should not compile needs to go into the [“neg” directory](https://github.com/scala/scala/tree/2.12.x/test/files/neg).
+* Code which should compile and get executed by the test suite needs to go into the [“run” directory](https://github.com/scala/scala/tree/2.12.x/test/files/run) and have a corresponding `.check` file with the expected output. You will get test failures if the content of a `.check` file is different from what the test produces while running. If the change in the output is an expected product of your work, you might not want to change the `.check` file by hand. To make partest change the `.check` file, run it with a `--update-check` flag, like so `./test/partest --update-check path/to/test.scala`. For more information on partest, please refer to its [documentation](https://github.com/scala/scala-partest).
+* Everything that can be unit-tested should go to ["junit" directory](https://github.com/scala/scala/tree/2.12.x/test/junit)
+* Property-based tests go to the ["scalacheck" directory](https://github.com/scala/scala/tree/2.12.x/test/scalacheck)
+
+Here are some more testing tips:
+
+* If you have several tests, and want a tool for only running tests that conform to some regular expression, you can use `partest-ack` in the `tools` directory: `./tools/partest-ack "dottype"`. `partest-ack` was removed in 2.12.
+* If you want to run all scalacheck tests from sbt use `scalacheck/testOnly`
+* To run scalacheck tests by name when in sbt use `scalacheck/testOnly ... `, for example `scalacheck/testOnly scala.tools.nsc.scaladoc.HtmlFactoryTest`
+* If your tests fail in the following way:
+
+ test.bc:
+ [echo] Checking backward binary compatibility for scala-library (against 2.11.0)
+ [mima] Found 2 binary incompatibiities
+ [mima] ================================
+ [mima] * synthetic method
+ [mima] scala$package$Class$method(java.lang.String)Unit in trait
+ [mima] scala.package.Class does not have a correspondent in old version
+ [mima] * synthetic method
+ [mima] scala$package$AnotherClass$anotherMethod(java.lang.String)Unit in trait
+ [mima] scala.package.AnotherClass does not have a correspondent in old version
+ [mima] Generated filter config definition
+ [mima] ==================================
+ [mima]
+ [mima] filter {
+ [mima] problems=[
+ [mima] {
+ [mima] matchName="scala.package.Class$method"
+ [mima] problemName=MissingMethodProblem
+ [mima] },
+ [mima] {
+ [mima] matchName="scala.package.AnotherClass$anotherMethod"
+ [mima] problemName=MissingMethodProblem
+ [mima] }
+ [mima] ]
+ [mima] }
+ [mima]
+
+ ...
+ Finished: FAILURE
+
+This means your change is backward or forward binary incompatible with the specified version (the check is performed by the [migration manager](https://github.com/typesafehub/migration-manager)). The error message is actually saying what you need to modify `project/MimaFilters.scala` to make the error go away. If you are getting this on an internal/experimental api, it should be safe to add suggested sections to the config. Otherwise, you might want to target a newer version of scala for this change.
+
+### Verify
+
+Now to make sure that my fix doesn't break anything I need to run the test suite. The Scala test suite uses [JUnit](https://junit.org/junit4/) and [partest][partest-guide], a tool we wrote for testing Scala.
+Run `sbt test` and `sbt partest` to run all the JUnit and partest tests, respectively.
+`partest` (not `sbt partest`) also allows you to run a subset of the tests using wildcards:
+
+ 18:52 ~/Projects/scala/sandbox (ticket/6725)$ cd ../test
+ 18:56 ~/Projects/scala/test (ticket/6725)$ partest files/run/*interpol*
+ Testing individual files
+ testing: [...]/files/run/interpolationArgs.scala [ OK ]
+ testing: [...]/files/run/interpolationMultiline1.scala [ OK ]
+ testing: [...]/files/run/interpolationMultiline2.scala [ OK ]
+ testing: [...]/files/run/sm-interpolator.scala [ OK ]
+ testing: [...]/files/run/interpolation.scala [ OK ]
+ testing: [...]/files/run/stringinterpolation_macro-run.scala [ OK ]
+ All of 6 tests were successful (elapsed time: 00:00:08)
+
+## 4. Publish
+
+After development is finished, it's time to publish the code and submit your patch for discussion and potential inclusion into Scala.
+In a nutshell, this involves:
+
+1. making sure that your code and commit messages are of high quality,
+2. clicking a few buttons in the GitHub interface,
+3. assigning one or more reviewers who will look through your pull request.
+
+Let's go into each of these points in more detail.
+
+### Commit
+
+The [Git Basics](https://git-scm.com/book/en/v2/Git-Basics-Getting-a-Git-Repository) chapter in the Git online book covers most of the basic workflow during this stage.
+There are two things you should know here:
+
+1. Commit messages are often the only way to understand the intentions of authors of code written a few years ago. Thus, writing a quality is of utmost importance. The more context you provide for the change you've introduced, the larger the chance that some future maintainer understand your intentions. Consult [the pull request policies](https://github.com/scala/scala/blob/2.12.x/CONTRIBUTING.md) for more information about the desired style of your commits.
+2. Keeping Scala's git history clean is also important. Therefore we won't accept pull requests for bug fixes that have more than one commit. For features, it is okay to have several commits, but all tests need to pass after every single commit. To clean up your commit structure, you want to [rewrite history](https://git-scm.com/book/en/v2/Git-Branching-Rebasing) using `git rebase` so that your commits are against the latest revision of `master`.
+
+Once you are satisfied with your work, synced with `master` and cleaned up your commits you are ready to submit a patch to the central Scala repository. Before proceeding make sure you have pushed all of your local changes to your fork on GitHub.
+
+ 19:22 ~/Projects/scala/test (ticket/6725)$ git add ../src/compiler/scala/tools/reflect/MacroImplementations.scala
+ 19:22 ~/Projects/scala/test (ticket/6725)$ git commit
+ [ticket/6725 3c3098693b] SI-6725 `f` interpolator now supports %n tokens
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+ 19:34 ~/Projects/scala/test (ticket/6725)$ git push origin ticket/6725
+ Username for 'https://github.com': xeno-by
+ Password for 'https://xeno-by@github.com':
+ Counting objects: 15, done.
+ Delta compression using up to 8 threads.
+ Compressing objects: 100% (8/8), done.
+ Writing objects: 100% (8/8), 1.00 KiB, done.
+ Total 8 (delta 5), reused 0 (delta 0)
+ To https://github.com/xeno-by/scala
+ * [new branch] ticket/6725 -> ticket/6725
+
+### Submit
+
+Now, we must simply submit our proposed patch. Navigate to your branch in GitHub (for me, it was `https://github.com/xeno-by/scala/tree/ticket/6725`)
+and click the pull request button to submit your patch as a pull request to Scala. If you've never submitted patches to Scala, you will
+need to sign the contributor license agreement, which [can be done online](https://www.lightbend.com/contribute/cla/scala) within a few minutes.
+
+
+
+### Review
+
+After the pull request has been submitted, you need to pick a reviewer (usually the person you've contacted in the beginning of your
+workflow) and be ready to elaborate and adjust your patch if necessary. In this example, we picked Martin, because we had such a nice chat on the mailing list:
+
+
+
+## Merge
+
+After your reviewer is happy with your code (usually signaled by a LGTM — “Looks good to me”), your job is done.
+Note that there can be a gap between a successful review and the merge, because not every reviewer has merge rights. In that case, someone else from the team will pick up your pull request and merge it.
+So don't be confused if your reviewer says “LGTM”, but your code doesn't get merged immediately.
+
+[collections-arch]: {% link _overviews/core/architecture-of-scala-collections.md %}
+[collections-intro]: {% link _overviews/collections-2.13/introduction.md %}
+[collections-par]: {% link _overviews/parallel-collections/overview.md %}
+[reflect-overview]: {% link _overviews/reflection/overview.md %}
+[partest-guide]: {% link _overviews/contribute/partest-guide.md %}
+[documentation]: {% link _overviews/contribute/documentation.md %}
+[contrib-forum]: https://contributors.scala-lang.org/
+[discord-contrib]: https://discord.com/invite/scala
diff --git a/_overviews/contribute/inclusive-language-guide.md b/_overviews/contribute/inclusive-language-guide.md
new file mode 100644
index 0000000000..d32b5144a8
--- /dev/null
+++ b/_overviews/contribute/inclusive-language-guide.md
@@ -0,0 +1,136 @@
+---
+title: Inclusive Language Guide
+num: 2
+---
+
+We are committed to providing a friendly, safe and welcoming environment for
+all, regardless of age, body size, disability, ethnicity, sex characteristics,
+gender identity and expression, level of experience, education, socio-economic
+status, nationality, personal appearance, race, religion, sexual identity
+and orientation, or other such characteristics.
+
+Language is a powerful vehicle of ideas and representations, and as such, can highlight, accentuate, or blur certain characteristics of the world.
+Language -- in its use and structure -- may bias our perception of the world, sometimes to the disadvantage of some people.
+Different language strategies have therefore been suggested to promote more inclusive forms of language, echoing the need for more equal treatment for all.
+
+This inclusive language guide is therefore intended to help us adopt a more inclusive way of communicating.
+Although the present guide does not exhaustively cover all issues pertaining to non-inclusive language, it covers the most important issues we are currently aware of.
+
+Contributions made to the core Scala projects and their documentation -- including to this website -- should follow this guide.
+
+## Non gendered language
+
+The use of *He*, *Him*, *His*, *Man* and *Men* should be avoided.
+Although these terms are intended to refer to any genders (male, female, other, unknown or irrelevant), they imply that the subject is male and therefore excludes all other genders.
+Instead, use the singular *they*, as already used by famous authors like Jane Austen.
+
+Example of the use of singular they:
+
+> When a developer wants to contribute to a project, they open a pull request.
+
+Although *they* refers to a single person, we conjugate the verb with the plural form.
+This is similar to the polite form of pronouns in certain languages, such as "Sie" in German or "vous" in French.
+
+When possible, avoid (combined) words that refer to a specific gender, and use gender-neutral alternatives instead.
+For example:
+
+* *man* or *woman* -> *person*
+* *chairman* -> *chairperson*
+
+## The words easy, simple, quick, and trivial
+
+What might be easy for you might not be easy for others.
+The same applies to other words like *quick* or *simple*.
+When used in the positive or superlative forms, try eliminating this word from sentences because usually the same meaning can be conveyed without it.
+
+Example of a positive form:
+
+> You can then simply execute the program with the `run` command.
+
+can be replaced with
+
+> You can then execute the program with the `run` command.
+
+without changing the meaning of the sentence.
+
+Example of a superlative form:
+
+> The foobar method is the easiest way to get started with our library.
+
+can be replaced with
+
+> We show here how to use the foobar method to get started with our library.
+
+However, the comparative form of these adjectives and adverbs can be used when relevant.
+
+Example of a comparative form:
+
+> The foobar method is quicker to get started with than the baz method.
+
+Similarly, the word *just* is usually redundant and can be removed without altering the meaning.
+
+Example:
+
+> You can just add these settings to your build.
+
+can be replaced with
+
+> You can add these settings to your build.
+
+Of course, every situation is different, and there may be cases where using "the easy words" is still the best thing to do.
+In that case, it should be a deliberate decision to use them, taking the above considerations into account.
+
+## Specific loaded words
+
+Some words may have a derogatory connotation and/or have clear oppressive origins.
+Avoid these words to the greatest extent possible, and use neutral alternatives instead.
+Currently, the following words, used for common computer science concepts, are discouraged.
+This list is neither comprehensive nor definitive, and it can evolve over time.
+
+* **blacklist/whitelist** \
+ While the etymology of these words has no relation to racism, their use suggests an association between the color black and some form of badness or exclusion, and between the color white and some form of goodness or inclusion.
+ Prefer alternatives when possible.
+ Several alternatives have been proposed but none sticks as "the one". We suggest using the pair *denylist*/*allowlist* or the pair *excludelist*/*includelist*, as these are generic enough to replace most uses of *blacklist*/*whitelist*.
+* **master/slave** \
+ Never use *slave*.
+ Never use *master* in conjunction with *slave*.
+ Depending on the specific architecture, use one of the following alternatives instead: *controller*/*worker*, *primary*/*secondary*, *leader*/*follower*, etc.
+ When in doubt, if you cannot choose, *primary*/*secondary* is always a decent fallback. \
+ When used with the meaning of *teacher*, *expert*, *guide*, or *reference*, the word *master* is not specifically discouraged.
+ For example, the term *Master of the arts* is acceptable. \
+ Note: there exists a broader movement of using `main` instead of `master` as the default git branch, led by GitHub and the git project themselves, and which we encourage people to follow as well.
+* **sanity check** \
+ Prefer *confidence check*.
+* **segregated** \
+ Computer science concepts like the *interface segregation principle* and *segregated networks* present segregation as being desirable, instead of bad.
+ Prefer alternatives like *separation of concerns* and *segmented networks*.
+* **guru** \
+ While a *guru* initially refers to a respected spiritual leader, it also designates the chief of a sect.
+ Both are of a spiritual nature and are ambiguous.
+ If possible, use a more precise term such as *teacher* or *expert*.
+
+A good source with explainers and references can be found at [https://github.com/dialpad/inclusive-language](https://github.com/dialpad/inclusive-language).
+
+Keep in mind that your particular application domain may contain its own share of domain-specific loaded words.
+We encourage you to research inclusive language guidelines applicable to your domain.
+
+You may want to use automated software like [In Solidarity](https://github.com/apps/in-solidarity) to steer contributors away from loaded words.
+
+## Dysphemism
+
+Dysphemisms, the opposite of euphemisms, can be disturbingly violent if you are not used to them.
+Examples include the English expressions "pull the trigger" (enforce a decision) and "bite the bullet" (endure hardship).
+Prefer the direct meaning instead.
+
+## Backward compatibility
+
+Sometimes, we have existing code, APIs or commands that do not follow the above recommendations.
+It is generally advisable to perform renaming to address the issue, but that should not be done to the detriment of backward compatibility (in particular, backward binary compatibility of libraries).
+Deprecated aliases should be retained when possible.
+
+Sometimes, it is not possible to preserve backward compatibility through renaming; for example for methods intended to be overridden by user-defined subclasses.
+In those cases, we recommend to keep the old names, but document (e.g., in Scaladoc comments) that they are named as they are for historical reasons and to preserve compatibility, and what their intended name should be.
+
+## See also
+
+* Our [code of conduct](https://scala-lang.org/conduct/).
diff --git a/_overviews/contribute/index.md b/_overviews/contribute/index.md
new file mode 100644
index 0000000000..1daa8cc13b
--- /dev/null
+++ b/_overviews/contribute/index.md
@@ -0,0 +1,287 @@
+---
+title: Becoming a Scala OSS Contributor
+num: 1
+
+explore_resources:
+ - title: Who can contribute?
+ description: "Open source is for everyone! If you are reading this you are already a contributor..."
+ icon: "fa fa-hand-sparkles"
+ link: "#who-can-contribute-to-open-source"
+ - title: Why should I contribute?
+ description: "Giving back to the community has many benefits..."
+ icon: "fa fa-circle-question"
+ link: "#why-should-i-contribute-to-open-source"
+ - title: How can I contribute?
+ description: "From friendly documentation to coding a bug-fix, there is lots to do..."
+ icon: "fa fa-clipboard-list"
+ link: "#how-can-i-contribute-to-open-source"
+ - title: Where should I contribute?
+ description: "If you are already using OSS, or are curious about projects, you can begin right away..."
+ icon: "fa fa-check-to-slot"
+ link: "#how-do-i-choose-where-to-contribute"
+
+compiler_resources:
+ - title: "Join the Compiler Issue Spree"
+ description: "A tri-weekly event where you can get mentored on the compiler. Register for participation here."
+ icon: "fa fa-clipboard-user"
+ link: https://airtable.com/app94nwzow5R6W1O6/pagvjIzxYnqTTlhwY/form
+ - title: "Compiler Academy videos"
+ description: "In-depth tours of the Scala 3 compiler's internals, aimed to help you get started."
+ icon: "fa fa-circle-play"
+ link: https://www.youtube.com/channel/UCIH0OgqE54-KEvYDg4LRhKQ
+ - title: "Scala 3 contributing guide"
+ description: "Guide to the Scala 3 Compiler and fixing an issue"
+ icon: "fa fa-code-merge"
+ link: https://dotty.epfl.ch/docs/contributing/index.html
+
+spree_resources:
+ - title: "Scala open source sprees"
+ description: "Learn about the next upcoming community spree"
+ icon: "fa fa-hand-holding-heart"
+ link: "https://github.com/scalacenter/sprees"
+ - title: "Upcoming conferences"
+ description: "See upcoming Scala conferences where you can meet open source maintainers."
+ icon: "fa fa-calendar-check"
+ link: "https://www.scala-lang.org/events/"
+
+scala_resources:
+ - title: Documentation
+ description: "Library API docs, new guides on docs.scala-lang.org, and help with scala-lang.org."
+ icon: fa fa-book
+ link: /contribute/documentation.html
+ - title: Bug fixes
+ description: "Issues with the tools, core libraries and compiler. Also, you can help us by reporting bugs."
+ icon: fa fa-bug
+ link: /contribute/guide.html
+ - title: Code Reviews
+ description: "Review pull requests against scala/scala, scala/scala3, scala/scala-lang, scala/docs.scala-lang, and others."
+ icon: fa fa-eye
+ link: /contribute/codereviews.html
+ - title: Core Libraries
+ description: "Update and expand the capabilities of the core (and associated) Scala libraries."
+ icon: fa fa-clipboard
+ link: /contribute/corelibs.html
+ - title: IDE and Build Tools
+ description: "Enhance the Scala tools with features for build tools, IDE plug-ins and other related projects."
+ icon: fa fa-terminal
+ link: /contribute/tools.html
+ - title: Compiler/Language
+ description: "Larger language features and compiler enhancements including language specification and SIPs."
+ icon: fa fa-cogs
+ link: /contribute/guide.html#larger-changes-new-features
+
+library_resources:
+ - title: Library Authors Guide
+ description: "Lists all the tools that library authors should setup to publish and document their libraries."
+ icon: "fa fa-book"
+ link: "/overviews/contributors/index.html"
+ - title: Make Projects more Inclusive
+ description: "How you can write code and documentation that welcomes all"
+ icon: "fa fa-door-open"
+ link: "inclusive-language-guide.html"
+ - title: Create a Welcoming Community
+ description: "Our code of conduct is practical agreement for a healthy community"
+ icon: "fa fa-handshake-simple"
+ link: "https://scala-lang.org/conduct"
+ - title: Binary Compatability Guide
+ description: "Evolve your library over time, giving users the confidence to upgrade safely."
+ icon: "fa fa-puzzle-piece"
+ link: "/overviews/core/binary-compatibility-for-library-authors.html"
+---
+
+Welcome to the guide on contributing to all parts of Scala's open-source ecosystem!
+
+## Newcomers' FAQ
+
+If you are reading this page, we welcome you, regardless of your background, to begin contributing to Scala's
+open-source ecosystem. We have answered some common questions for you below:
+
+{% include inner-documentation-sections.html links=page.explore_resources %}
+
+## Ways to start today
+
+### Join the nearest open source spree
+
+The [Scala Center](https://scala.epfl.ch) hosts open source sprees, colocated with other Scala events.
+In the spree, regular project maintainers will mentor you to create your first contribution to the project.
+
+{% include inner-documentation-sections.html links=page.spree_resources %}
+
+### So you want to improve the Scala 3 compiler...
+
+The [Scala 3 compiler](https://github.com/scala/scala3) is an open source project.
+If you are curious about contributing but don't know how to begin, the [Scala Center](https://scala.epfl.ch)
+runs the **Scala Compiler Academy** project to onboard and educate new people to the project. You can join the regular
+**Compiler Issue Spree**, watch in-depth videos, and read the contributing guide:
+
+{% include inner-documentation-sections.html links=page.compiler_resources %}
+
+#### Which areas are perfect for newcomers?
+- Adding new linting options, which help enforce cleaner code.
+- Improving the clarity of error messages, so that the user understands better what went wrong.
+- Add IDE quick-fix actions to error messages, e.g. PR [scala/scala3#18314](https://github.com/scala/scala3/pull/18314).
+
+### So you want to write a library...
+
+Read these guides if you are a maintainer of a library, or are thinking of starting a new project:
+
+{% include inner-documentation-sections.html links=page.library_resources %}
+
+### Want to improve Scala itself?
+The Scala programming language is an open source project with a very
+diverse community, where people from all over the world contribute their work,
+with everyone benefiting from friendly help and advice, and
+kindly helping others in return.
+
+Read on to learn how to join the Scala community and help
+everyone make things better.
+
+## Contributing to the Scala project
+
+**What Can I Do?**
+That depends on what you want to contribute. Below are some getting started resources for different contribution domains. Please read all the documentation and follow all the links from the topic pages below before attempting to contribute, as many of the questions you have will already be answered.
+
+### Reporting bugs
+
+See our [bug reporting guide][bug-reporting-guide] to learn
+how to efficiently report a bug.
+
+### Contribute
+
+Coordination of contribution efforts takes place on
+[Scala Contributors](https://contributors.scala-lang.org/).
+
+{% include inner-documentation-sections.html links=page.scala_resources %}
+
+### Guidelines
+
+When contributing, please follow:
+
+* The [Scala Code of Conduct](https://scala-lang.org/conduct/)
+* The [Inclusive Language Guide][inclusive-language-guide]
+
+### Community tickets
+
+All issues can be found in the [Scala bug tracker](https://github.com/scala/bug), or the [Scala 3 issue tracker](https://github.com/scala/scala3/issues). Most issues are labeled
+to make it easier to find issues you are interested in.
+
+### Tools and libraries
+
+The Scala ecosystem includes a great many diverse open-source projects
+with their own maintainers and community of contributors. Helping out
+one of these projects is another way to help Scala. Consider lending
+on a hand on a project you're already using. Or, to find out about
+other projects, see the
+[Libraries and Tools section](https://scala-lang.org/community/#community-libraries-and-tools)
+on our Community page.
+
+### Scala community build
+
+The Scala community build enables the Scala compiler team
+to build and test a corpus of
+Scala open source projects
+against development versions of the Scala compiler and standard
+library in order to discover regressions prior to releases.
+The build uses Lightbend's
+[dbuild](https://github.com/typesafehub/dbuild) tool,
+which leverages [sbt](https://www.scala-sbt.org).
+
+If you're the maintainer -- or just an interested user! -- of an
+open-source Scala library or tool, please visit the
+[community build documentation](https://github.com/scala/community-build/wiki)
+for guidelines on what projects are suitable for the community build
+and how projects can be added.
+
+## Your questions, answered
+
+{% capture backButton %}
+
+{% endcapture %}
+
+### Who can contribute to open source?
+{{backButton}}
+- **Everyone:** No matter your skills or background, non-technical or otherwise, there is always
+ [some way](#how-can-i-contribute-to-open-source) you can contribute to a project.
+- **Community organisers:** Communities often form around open source projects, perhaps you would like to help grow a
+ community.
+- **Scala learners:** If you are at the start of your Scala journey, once you have a basic understanding of everyday
+ Scala programming, becoming familiar with open source code will show you new techniques, helping you to improve
+ your expertise.
+- **Got a cool idea?** Perhaps you have gained confidence in your skills and are looking to give back to the community,
+ start a new project that fills that perfect niche, or maybe is the life-changing tool everyone never knew they needed.
+
+### Why should I contribute to open source?
+{{backButton}}
+- **The world is built on OSS:**
+ Open Source Software (OSS) libraries are the flesh on top of the bone structure of the core language itself.
+ They power vast majority of the commercial and non-commercial projects out there alike.
+- **Become more visible:**
+ Contributing is a great way to strengthen your CV. It's also good from the community standpoint: if you do it
+ consistently, with time, you get to know people, and people get to know you. Such a networking can lead to all
+ sorts of opportunities.
+- **Learn by doing something practical:** Contributing to open source libraries is a great way to learn Scala.
+ A standard practice in open source software is code review – which means you are going to get expert feedback
+ about your code. Learning together with feedback from competent people is much faster than making all the
+ mistakes and figuring them out alone.
+- **Have fun and help out:** Finally, by contributing you improve the projects you are using yourself. Being a part of
+ a maintainer team can be a source of personal satisfaction, and working on an innovative library can be a lot of fun.
+
+The above benefits are something good to achieve regardless of your level of experience.
+
+### How can I contribute to open source?
+{{backButton}}
+- **Documentation:** Often it is outdated, incomplete, or with mistakes. If you see a way to improve the
+ documentation for a project you are using, you should consider if the project is accepting contributions,
+ in which case you can submit a pull request to include your changes.
+- **Building community:** All projects have users, and users come together to form communities. Managing and growing
+ communities takes coordination and effort.
+- **Issue minimization:** Many of the reported issues found on a project's issue tracker are hard to reproduce and the
+ reproduction involves a lot of code. However, it is very frequently the case that only a tiny fraction of the
+ reported setup and code is necessary to reproduce the issue. More reproduction code means more work for the
+ maintainer to fix an issue. You can help them considerably by investigating already reported issues in an attempt
+ to make their reproduction as small as possible.
+- **Issue reproduction:** Some reported issues lack reproduction instructions at all! If a maintainer can't
+ reproduce it, they won't be able to fix it. Pinning down exact conditions that make an issue manifest is another
+ way to contribute.
+- **Fixing a bug:** If you are comfortable with reproducing an issue, perhaps you would like to trace its
+ origin in code, and even try to build a solution that prevents the issue from occurring.
+- **Adding a feature:** Sometimes projects maintain lists of planned or requested features, and you could assist
+ in bringing those ideas to reality. Although please beware - you should only do this if the core maintainers
+ have already approved the idea for the feature, they are not obligated to accept your additions!
+- **Feel free to ask for help:** While implementing or fixing the feature, it is important to ask for help early
+ when you feel stuck. Even if your code doesn't work, don't hesitate to submit a pull request while stating clearly
+ that you need help. More information about the guidelines of good contribution you can find in the
+ [talk by Seth Tisue](https://youtu.be/DTUpSTrnI-0) on how to be a good contributor.
+- **Open-source your own project:** Do you have a pet project you are working on? Is there anything you're working
+ on at work parts of which are generic enough that you can share them online? Open-sourcing your work is a way to
+ solve a problem for other programmers who may also have it. If you are interested in going open-source, the
+ [Library Author's Guide](https://docs.scala-lang.org/overviews/contributors/index.html) is an
+ excellent resource on how to get started.
+
+### How do I choose where to contribute?
+{{backButton}}
+- **Ask yourself, what am I using?** The best project to contribute to is the one that you are using yourself.
+ Take an inventory of your work and hobby projects: what OSS libraries do they use? Have you ever encountered bugs in
+ them? Or have you ever wanted a certain feature implemented? Pick a bug and a feature and commit to fixing or
+ implementing it. Clone the project you are trying to improve, figure out how the tests are written and run there.
+ Write a test for your feature or bug.
+- **Try out an awesome library:** [Scaladex](https://index.scala-lang.org/awesome) is a great place to find new
+ libraries. If you are passionate about contributing but don't see any attractive opportunities to contribute
+ to projects you are already using, try learning a new Scala library, push it to its limits and see where it can
+ be improved. For best results, spend a lot of time with the library to get a feel of what's important
+ and what can improve.
+- **Lookout for announcements:** You may want to keep an eye on the Scala Center
+ [LinkedIn](https://www.linkedin.com/company/scala-center/) and [Bluesky](https://bsky.app/profile/scala-lang.org) or [X](https://x.com/scala_lang) to stay up-to-date with the possible contribution opportunities. For example, every year, the Scala Center participates
+ in the Google Summer of Code program where you are paid to work on open source Scala projects over the course
+ of summer.
+{{backButton}}
+
+
+
+[bug-reporting-guide]: {% link _overviews/contribute/bug-reporting-guide.md %}
+[inclusive-language-guide]: {% link _overviews/contribute/inclusive-language-guide.md %}
diff --git a/_overviews/contribute/partest-guide.md b/_overviews/contribute/partest-guide.md
new file mode 100644
index 0000000000..c8eb5cbf02
--- /dev/null
+++ b/_overviews/contribute/partest-guide.md
@@ -0,0 +1,92 @@
+---
+title: Running the Test Suite
+num: 13
+---
+
+Partest is a custom parallel testing tool that we use to run the test suite for the Scala compiler and library. Go to the scala project folder from your local checkout and run it via `sbt`, `ant` or standalone as follows.
+
+## Using sbt
+
+The test suite can be run from the sbt console with:
+
+```
+sbt:root> partest
+```
+
+You can get a summary of the usage by running `partest --help`.
+
+If you would like to run particular tests pass the test paths as arguments
+
+```
+sbt:root> partest test/files/pos/bounds.scala test/scaladoc/run/diagrams-base.scala
+```
+
+To run only the Scaladoc tests use `--srcpath` with the location of the tests
+
+```
+sbt:root> partest --srcpath scaladoc
+```
+
+## Using ant
+
+> Please note support for ant was removed on the 2.12 branch.
+
+The test suite can be run by using ant from the command line:
+
+ $ ant test.suite
+
+## Standalone
+
+Please note the standalone scripts mentioned below were removed in 2.12.2. sbt is the preferred way to run the test suite.
+
+There are launch scripts `partest` and `partest.bat` in the `test` folder of the scala project. To have partest run failing tests only and print details about test failures to the console, you can use
+
+ ./test/partest --show-diff --show-log --failed
+
+You can get a summary of the usage by running partest without arguments.
+
+* Most commonly you want to invoke partest with an option that tells it which part of the tests to run. For example `--all`, `--pos`, `--neg` or `--run`.
+* You can test individual files by specifying individual test files (`.scala` files) as options. Several files can be tested if they are from the same category, e.g., `pos`.
+* You can enable output of log and diff using the `-show-log` and `-show-diff` options.
+* If you get into real trouble, and want to find out what partest does, you can run it with option `--verbose`. This info is useful as part of bug reports.
+* Set custom path from where to load classes: `-classpath ` and `-buildpath `.
+* You can use the `SCALAC_OPTS` environment variable to pass command line options to the compiler.
+* You can use the `JAVA_OPTS` environment variable to pass command line options to the runner (e.g., for `run/jvm` tests).
+* The launch scripts run partest as follows:
+
+ scala -cp scala.tools.partest.nest.NestRunner
+
+ Partest classes from a `quick` build, e.g., can be found in `./build/quick/classes/partest/`.
+
+ Partest will tell you where it loads compiler/library classes from by adding the `partest.debug` property:
+
+ scala -Dpartest.debug=true -cp scala.tools.partest.nest.NestRunner
+
+
+
+## ScalaCheck tests
+
+Tests that depend on [ScalaCheck](https://github.com/rickynils/scalacheck) can be added under folder `./test/files/scalacheck`. A sample test:
+
+ import org.scalacheck._
+ import Prop._
+
+ object Test {
+ val prop_ConcatLists = property{ (l1: ListInt, l2: ListInt) =>
+ l1.size + l2.size == (l1 ::: l2).size
+ }
+
+ val tests = List(("prop_ConcatLists", prop_ConcatLists))
+ }
+
+## Troubleshooting
+
+### Windows
+
+Some tests might fail because line endings in the `.check` files and the produced results do not match. In that case, set either
+
+ git config core.autocrlf false
+
+or
+
+ git config core.autocrlf input
diff --git a/_overviews/contribute/scala-internals.md b/_overviews/contribute/scala-internals.md
new file mode 100644
index 0000000000..738746f9d3
--- /dev/null
+++ b/_overviews/contribute/scala-internals.md
@@ -0,0 +1,60 @@
+---
+title: Scala Contributors Forum
+num: 9
+---
+
+The [Scala Contributors Forum][scala-contributors] is where discussions about the Scala ecosystem
+occur, from the perspectives of core compiler, documentation and library contributors. It features updates from the
+Scala Center, along with technical and logistical discussions concerning bugs, bug fixes, documentation, improvements,
+new features and other contributor related topics.
+
+> The now legacy [scala-internals mailing list](https://groups.google.com/d/forum/scala-internals) used to fulfil this
+> purpose, but has since expanded to encompass more topics in the new [forum][scala-contributors].
+
+## Coordinating on Scala Contributors
+
+Prior to commencing on contribution work on larger changes to the Scala project, it is recommended (but not required)
+that you make a post on [Scala Contributors][scala-contributors] announcing your intention.
+It's a great time to invite any help, advice or ask any questions you might have. It's also a great place to meet peers,
+one of whom will probably be reviewing your contribution at some point.
+For smaller bug fixes or documentation changes where the risk of effort duplication is minimal, you can skip this post.
+
+To help users to sort through the posts, we request that the following categories are applied when you start a
+new post please:
+
+| Category | Topics |
+|-----------------------------|---------------------------------------------------------------------|
+| `Documentation` | Documentation, e.g. docs.scala-lang.org, API (scaladoc), etc. |
+| `Compiler` | Bug reporting/fixing, Scala compiler discussions/issues |
+| `Tooling` | Tools including sbt, IDE plugins, testing, scaladoc generator, etc. |
+| `Scala Standard Library` | Core libraries |
+| `Scala Platform` | Extension libraries |
+| `Language Design` | Scala language feature discussions / informal proposals |
+| `Scala Improvement Process` | Scala language feature formal proposals |
+| `Meta Discourse` | Administrative/coordination topics |
+| `Community` | Discussions about events, community organising |
+
+### Why It's a Good Idea
+
+While it is optional to announce your intentions/work items on [Scala Contributors][scala-contributors] before starting, it is recommended thing to do for a number of reasons:
+
+* To attempt to cut down on duplicate effort (i.e. to avoid two people working on the same bug at the same time without coordinating effort).
+* Related to the above: to allow the compiler team and core committers to warn of or smooth over potential merge conflicts between separate bugs that might affect the same code.
+* Potentially someone has already thought about or even worked on that issue or a related one, and has valuable insight
+that might save you time (including warnings about what you might find and may want to avoid - perhaps one option
+already tried lead to no benefit).
+* You might find a group of impassioned individuals who want to volunteer and help you. You will have the momentum since
+you posted first, so then it's up to you to decide if you want their help or not.
+* Posting could start a dialog with a potential reviewer, smoothing the later stages of your contribution before
+merging your changes.
+* There are a lot of nice people waiting to talk to you on [Scala Contributors][scala-contributors], you might be
+surprised how valuable and pleasant you find the experience of talking to them.
+
+Even if you do not wish to post on [Scala Contributors][scala-contributors], please feel welcome to make contributions
+anyway, as posting to the forum is *not* criteria for it to be accepted. For smaller, self-contained bugs it is
+especially less important to make a post, however larger issues or features take more time to consider accepting them.
+For large contributions we strongly recommend that you do to notify of your intention, which will help you determine if
+there is large community support for your change, making it more likely that your large contribution will be accepted,
+before you spend a long time implementing it.
+
+[scala-contributors]: https://contributors.scala-lang.org
diff --git a/_overviews/contribute/scala-standard-library-api-documentation.md b/_overviews/contribute/scala-standard-library-api-documentation.md
new file mode 100644
index 0000000000..27f2093d93
--- /dev/null
+++ b/_overviews/contribute/scala-standard-library-api-documentation.md
@@ -0,0 +1,126 @@
+---
+title: Contribute to API Documentation
+num: 6
+---
+
+This page is specific to API documentation contributions – that is, API
+documentation for
+[Scala's standard library](https://scala-lang.org/api/current/#package) –
+sometimes referred to as Scaladoc contributions.
+
+For contributions to tutorial and guide-style documentation on
+[docs.scala-lang.org][home],
+see [Add New Guides/Tutorials][add-guides].
+
+*Please note, these instructions cover documentation contributions Scala core
+libraries only. For other Scala projects please check those projects for the
+contribution steps and guidelines. Thank you.*
+
+## Overview
+
+Since API documentation is located in Scala source code files, the
+process for contributing API documentation is similar to that of contributing bug-fixes
+to the Scala code base, but without the requirement that there be an issue filed on GitHub
+first. When forking/branching, it would help to use a `scaladoc/xxxx` branch name, where `xxxx` is a
+descriptive, but short branch name (e.g. `scaladoc/future-object`).
+However, if an issue *does* exist, please use `issue/NNNN`, where `NNNN` is the ticket number,
+instead.
+
+If you would like to assist us, you can
+[report missing/incorrect API documentation](#contribute-api-documentation-bug-reports), or
+[contribute new API documentation](#contribute-new-api-documentation).
+
+## Contribute API Documentation Bug Reports
+
+One good way to contribute is by helping us to identify missing documentation. To do
+this, [browse the current API documentation](https://www.scala-lang.org/api/current/)
+and identify missing, incorrect or inadequate documentation. A good place to start is
+package objects for important packages (these often get overlooked for documentation
+and are a good place for API overviews).
+
+If you find an issue, please log it in the [Scala bug tracker](https://github.com/scala/bug),
+(or else the [Scala 3 issue tracker](https://github.com/scala/scala3/issues) for Scala 3 library additions)
+**after making sure it is not already logged as an issue**. To help with
+disambiguation, please use the following format for issue title:
+
+* Use an action describing the work required, e.g. **Add**, **Document**, **Correct**, **Remove**.
+* Use the full package, class/trait/object/enum name (or state package object if
+ that is the case).
+* Extremely short description of what to do.
+* More detail can (and should) go into the issue description, including a short
+ justification for the issue if it provides additional detail.
+
+Here is an example of the title and description for an example API documentation issue:
+
+`Document scala.concurrent.Future object, include code examples`
+
+(note the explicit companion object called out in the title)
+
+and the description:
+
+> The methods on the `Future` companion object are critical
+> for using Futures effectively without blocking. Provide code
+> examples of how methods like `sequence`, `transform`, `fold` and
+> `firstCompletedOf` should be used.
+
+In addition to following these conventions, please add `documentation` and
+`community` labels to the issue, and put them in the `Documentation and API`
+component so that they show up in the correct issue filters.
+
+## Contribute New API Documentation
+
+### Required Reading
+
+Please familiarize yourself with the following before contributing
+new API documentation to save time, effort, mistakes and repetition.
+
+* [Forking the Repo][hackers-setup] - follow the setup steps through
+ the Branch section. If providing new documentation related to an existing GitHub issue, use `issue/NNNN`
+ or `ticket/NNNN` as the guide states. If providing API documentation with no associated
+ GitHub issue, use `scaladoc/xxxx` instead.
+* [Scaladoc for library authors][scaladoc-lib-authors]
+ covers the use of scaladoc tags, markdown and other features.
+* [Scaladoc's interface][scaladoc-interface]
+ covers all the features of Scaladoc's interface, e.g. switching between
+ companions, browsing package object documentation, searching, token searches
+ and so on.
+* Prior to commit, be sure to read
+ [A note about git commit messages](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html) and the [Scala Project & Developer Guidelines](https://github.com/scala/scala/blob/2.11.x/CONTRIBUTING.md).
+ Some of this latter document will clearly not apply (like the sections on providing tests,
+ however see below for some special requirements for documentation). Do still read
+ the whole document though, and pay close attention to the title and commit
+ message formats, noting *present tense*, *length limits* and that it must merge
+ cleanly. Remember that the title of the pull request will become the commit
+ message when merged. **Also**, be sure to assign one or more reviewers to the PR, if this is
+ not possible for you, you could mention a user **in the pull request comments**.
+
+### Extra Requirements for Scaladoc Documentation Commits
+
+Although some requirements for bug fix pull requests are not needed for
+API documentation commits, here are the step by step requirements to ensure your API documentation
+PR is merged in smoothly:
+
+* Any and all code examples provided should *be correct, compile and run* as
+ expected (ensure this in the REPL or your IDE).
+* Spelling must be checked for all written language *and* code examples where
+ possible. Most editors have some spell checking feature available. Scala code
+ itself is permitted to not pass a spell-checker, however any written language
+ should be checked. If you can also use a grammar checker, it will help. We
+ *will* ask for spelling and grammar to be corrected before acceptance.
+* You **must** also run `sbt doc`, fix any problems and check the formatting and
+ layout of your changes. Again, corrections will be required if formatting or
+ layout are inadequate. After running `sbt doc` the generated documents can be
+ found under the `build/scaladoc/` folders (probably in the `library` subdirectory
+ but maybe under the others depending on what section of the Scala source you
+ are working on).
+* All of these steps are required to save time for both the reviewers and
+ contributors. It benefits everyone to ensure that the PR to merge process is
+ as smooth and streamlined as possible.
+
+Thanks for helping us improve the Scaladoc API documentation!
+
+[home]: {% link index.md %}
+[add-guides]: {% link _overviews/contribute/add-guides.md %}
+[hackers-setup]: {% link _overviews/contribute/hacker-guide.md %}#2-set-up
+[scaladoc-lib-authors]: {% link _overviews/scaladoc/for-library-authors.md %}
+[scaladoc-interface]: {% link _overviews/scaladoc/interface.md %}
diff --git a/_overviews/contribute/scala3.md b/_overviews/contribute/scala3.md
new file mode 100644
index 0000000000..2501012a1e
--- /dev/null
+++ b/_overviews/contribute/scala3.md
@@ -0,0 +1,13 @@
+---
+title: Contribute to Scala 3
+description: This page describes the format of the contribution guide for the Scala 3 compiler.
+num: 14
+redirect_from: /scala3/guides/contribution/contribution-intro.html
+---
+Thank you for wanting to contribute to Scala 3!
+
+Dotty is an open-source project, and as such, we welcome contributions from the community to help us make it even better.
+
+If you are interested in contributing to Scala 3, please visit the project [developer website](https://dotty.epfl.ch/docs/contributing/index.html), where you will find all the information you need to get started. We encourage everyone, regardless of their level of expertise, to contribute to Scala 3, as there are many ways to help, from fixing bugs and implementing new features to improving documentation and testing.
+
+If you have any questions, please feel free to ask them on the [Contributors Forum](https://contributors.scala-lang.org/c/scala-3/scala-3-contributors/9).
diff --git a/_overviews/contribute/tools.md b/_overviews/contribute/tools.md
new file mode 100644
index 0000000000..77115d03ab
--- /dev/null
+++ b/_overviews/contribute/tools.md
@@ -0,0 +1,80 @@
+---
+title: IDE and Build Tool Contributions
+num: 11
+
+# Projects list:
+projects:
+ - title: sbt
+ description: The interactive build tool.
+ icon: https://www.scala-sbt.org/assets/sbt-logo.svg
+ link: https://github.com/sbt/sbt
+ homeLink: https://www.scala-sbt.org/
+ issuesLink: https://github.com/sbt/sbt#issues-and-pull-requests
+ readmeLink: https://github.com/sbt/sbt/blob/0.13/README.md
+ contributingLink: https://github.com/sbt/sbt/blob/0.13/CONTRIBUTING.md
+ - title: Scaladoc Tool
+ description: (Contribute through scala/scala)
+ icon: https://avatars1.githubusercontent.com/u/57059?v=3&s=200
+ link: https://github.com/scala/scala
+ homeLink: https://www.scala-lang.org/api
+ issuesLink: https://github.com/scala/bug/labels/scaladoc
+ readmeLink: https://github.com/scala/scala#welcome
+ contributingLink: /contribute/guide.html
+ - title: Partest
+ description: Scala Compiler/Library Testing (Contribute through scala/scala)
+ icon: https://avatars1.githubusercontent.com/u/57059?v=3&s=200
+ link: https://github.com/scala/scala
+ homeLink: https://github.com/scala/scala
+ issuesLink: https://github.com/scala/scala/issues
+ readmeLink: https://github.com/scala/scala/blob/2.13.x/CONTRIBUTING.md#partest
+ contributingLink:
+
+projectsInNeed:
+ - title: Scoverage
+ description: Scala code coverage tool
+ icon: https://avatars1.githubusercontent.com/u/5998302?v=3&s=200
+ link: https://github.com/scoverage/scalac-scoverage-plugin
+ homeLink: http://scoverage.org/
+ issuesLink: https://github.com/scoverage/scalac-scoverage-plugin/issues
+ readmeLink: https://github.com/scoverage/scalac-scoverage-plugin/blob/master/README.md
+ contributingLink: https://groups.google.com/forum/#!forum/scala-code-coverage-tool
+---
+## Contributing to IDE and Build Tools
+
+The links below are to a number of Scala build and IDE related projects that are important in the larger Scala space, and which welcome contributions.
+
+Since these tools are in separate projects, they may (and likely will) have their own rules and guidelines for contributing. You should also check the `README.md` and (if it's present) `CONTRIBUTING.md` files from the actual projects before contributing to them.
+
+Typically, issues for these projects will be reported and kept in the GitHub project issue tracker for that project rather than in the Scala bug tracker.
+
+Many of these projects have a chat room on Discord or Gitter (usually linked from their `README.md` or `CONTRIBUTING.md` files) which is a great place to discuss proposed work before starting.
+
+There are some projects in this section that are in
+[particular need](#projects-in-particular-need) so please check those out
+if you would like to help revive them.
+
+### Broken Links?
+
+Stuff changes. Found a broken link or something that needs updating on this page? Please, consider [submitting a documentation pull request](/contribute/documentation.html#updating-scala-langorg) to fix it.
+
+### Projects
+
+{% if page.projects.size > 0 %}
+{% include contributions-projects-list.html collection=page.projects %}
+{% else %}
+There are no projects.
+{% endif %}
+
+### Projects in Particular Need
+
+{% if page.projectsInNeed.size > 0 %}
+
+The following projects are important to the Scala community but are particularly in need of contributors to continue their development.
+
+{% include contributions-projects-list.html collection=page.projectsInNeed %}
+
+{% else %}
+
+There are no projects in particular need.
+
+{% endif %}
diff --git a/_overviews/contributors/index.md b/_overviews/contributors/index.md
index ddd2868a36..c482c6f8dc 100644
--- a/_overviews/contributors/index.md
+++ b/_overviews/contributors/index.md
@@ -22,7 +22,7 @@ that the license and copyright notices are preserved. For the record, Scala itse
Once you have chosen a license, *apply* it to your project by creating a `LICENSE` file in the root directory
of your project with the license contents or a link to it. This file usually indicates who owns the copyright.
-In our example of [LICENSE file](https://github.com/scalacenter/library-example/blob/master/LICENSE), we have
+In our example of [LICENSE file](https://github.com/scalacenter/library-example/blob/main/LICENSE), we have
written that all the contributors (as per the Git log) own the copyright.
## Host the Source Code
@@ -31,82 +31,80 @@ We recommend sharing the source code of your library by hosting it on a public [
hosting site such as [GitHub](https://github.com), [Bitbucket](https://bitbucket.org) or [GitLab](https://gitlab.com).
In our example, we use GitHub.
-Your project should include a [README](https://github.com/scalacenter/library-example/blob/master/README.md) file
+Your project should include a [README](https://github.com/scalacenter/library-example/blob/main/README.md) file
including a description of what the library does and some documentation (or links to the documentation).
You should take care of putting only source files under version control. For instance, artifacts generated by the
build system should *not* be versioned. You can instruct Git to ignore such files by adding them to a
-[.gitignore](https://github.com/scalacenter/library-example/blob/master/.gitignore) file.
+[.gitignore](https://github.com/scalacenter/library-example/blob/main/.gitignore) file.
In case you are using sbt, make sure your repository has a
-[project/build.properties](https://github.com/scalacenter/library-example/blob/master/project/build.properties)
+[project/build.properties](https://github.com/scalacenter/library-example/blob/main/project/build.properties)
file indicating the sbt version to use, so that people (or tools) working on your repository will automatically
use the correct sbt version.
## Setup Continuous Integration
The first reason for setting up a continuous integration (CI) server is to systematically run tests on pull requests.
-Examples of CI servers that are free for open source projects are [Travis CI](https://travis-ci.org),
-[Drone](https://drone.io) or [AppVeyor](https://appveyor.com).
+Examples of CI servers that are free for open source projects are [GitHub Actions](https://github.com/features/actions),
+[Travis CI](https://travis-ci.com), [Drone](https://drone.io) or [AppVeyor](https://appveyor.com).
-Our example uses Travis CI. To enable Travis CI on your project, go to [travis-ci.org](https://travis-ci.org/),
-sign up using your GitHub account, and enable your project repository. Then, add a `.travis.yml` file to your
-repository with the following content:
+Our example uses GitHub Actions. This feature is enabled by default on GitHub repositories. You can verify if that is
+the case in the *Actions* section of the *Settings* tab of the repository.
+If *Disable all actions* is checked, then Actions are not enabled, and you can activate them
+by selecting *Allow all actions*, *Allow local actions only* or *Allow select actions*.
-~~~ yaml
-language: scala
-~~~
-
-Push your changes and check that Travis CI triggers a build for your repository.
-
-Travis CI tries to guess which build tool your project uses and executes a default command to run the project tests.
-For instance, if your repository contains a `build.sbt` file in the root directory, Travis CI executes the
-`sbt ++$TRAVIS_SCALA_VERSION test` command, where the `TRAVIS_SCALA_VERSION` variable is, by default, set to an
-arbitrary Scala version (`2.12.8`, at the time these lines are written), which could be inconsistent with the
-`scalaVersion` defined in your `build.sbt` file.
-
-To avoid this potential inconsistency, you want to use one Scala version definition as a single source of truth.
-For instance, the [sbt-travisci](https://github.com/dwijnand/sbt-travisci) plugin lets you define the Scala version
-in the `.travis.yml` file, and then forwards this version to your sbt build definition. Alternatively, you can
-override the default command run by Travis CI to use the Scala version defined by the `scalaVersion` settings of
-your build.
-
-The latter approach is the one used in this guide. Override the command run by Travis CI by adding the folliwng
-lines to your `.travis.yml` file:
-
-~~~ yaml
-jobs:
- include:
- - stage: test
- script: sbt test
-~~~
+With Actions enabled, you can create a *workflow definition file*. A **workflow** is an automated procedure,
+composed of one or more jobs. A **job** is a set of sequential steps that are executed on the same runner.
+A **step** is an individual task that can run commands; a step can be either an *action* or a shell command.
+An **action** is the smallest building block of a workflow, it is possible to reuse community actions or to
+define new ones.
-Travis CI will now execute the `sbt test` command, which uses the Scala version from the build definition.
-
-Last, an important thing to setup is caching, to avoid the CI server to re-download your project dependencies each
-time it runs. For instance, in case you use sbt, you can instruct Travis CI to save the content of the `~/.ivy2/`
-and `~/.sbt/` directories across builds by adding the following lines to your `.travis.yml` file:
+To create a workflow, create a *yaml* file in the directory `.github/workflows/` in the repository, for example
+`.github/workflows/ci.yml` with the following content:
~~~ yaml
-# These directories are cached at the end of the build
-cache:
- directories:
- - $HOME/.ivy2/cache
- - $HOME/.sbt
-before_cache:
- # Cleanup the cached directories to avoid unnecessary cache updates
- - rm -fv $HOME/.ivy2/.sbt.ivy.lock
- - find $HOME/.ivy2/cache -name "ivydata-*.properties" -print -delete
- - find $HOME/.sbt -name "*.lock" -print -delete
-~~~
+name: Continuous integration
+on: push
-For reference, here is our complete
-[.travis.yml example file](https://github.com/scalacenter/library-example/blob/master/.travis.yml).
+jobs:
+ ci:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3 # Retrieve the content of the repository
+ - uses: actions/setup-java@v3 # Set up a jdk
+ with:
+ distribution: temurin
+ java-version: 8
+ cache: sbt # Cache the artifacts downloaded by sbt accross CI runs
+ - name: unit tests # Custom action consisting of a shell command
+ run: sbt +test
+~~~
+
+This workflow is called *Continuous integration*, and it will run every time one
+or more commits are pushed to the repository. It contains only one job called
+*ci*, which will run on an Ubuntu runner and that is composed of three
+actions. The action `setup-java` installs a JDK and caches the library dependencies
+downloaded by sbt so that they are not downloaded again everytime the CI runs.
+
+Then, the job runs `sbt +test`, which loads the sbt version specified in
+`project/build.properties`, and runs the project tests using the Scala version
+defined in the file `build.sbt`.
+
+The workflow above will run at any push to any branch of the repository. You
+can specify the branch or add more triggers such as pull requests, releases,
+tags or schedules. More information about workflow triggers is available
+[here](https://docs.github.com/en/actions/reference/events-that-trigger-workflows).
+while the `setup-java` action is hosted [in this
+repository](https://github.com/actions/setup-java).
+
+For reference, here is our complete [workflow example
+file](https://github.com/scalacenter/library-example/blob/main/.github/workflows/ci.yml).
## Publish a Release
Most build tools resolve third-party dependencies by looking them up on public repositories such as
-[Maven Central](https://search.maven.org/) or [Bintray](https://bintray.com/). These repositories host
+[Maven Central](https://search.maven.org/). These repositories host
the library binaries as well as additional information such as the library authors, the open source
license, and the dependencies of the library itself. Each release of a library is identified by
a `groupId`, an `artifactId`, and a `version` number. For instance, consider the following dependency
@@ -125,7 +123,8 @@ sign the binaries.
### Create a Sonatype Account and Project
Follow the instructions given on the [OSSRH Guide](https://central.sonatype.org/pages/ossrh-guide.html#initial-setup)
-to create a new Sonatype account (unless you already have one) and to create a new project ticket. This latter
+to create a new Sonatype account (unless you already have one) and to
+[create a new project ticket](https://issues.sonatype.org/secure/CreateIssue.jspa?issuetype=21&pid=10134). This latter
step is where you define the `groupId` that you will release to. You can use a domain name that you already own,
otherwise a common practice is to use `io.github.(username)` (where `(username)` is replaced with your GitHub
username).
@@ -134,24 +133,24 @@ This step has to be performed only once per `groupId` you want to have.
### Create a PGP Key Pair
-Sonatype [requires](https://central.sonatype.org/pages/requirements.html) that you sign the published files
-with PGP. Follow the instructions [here](https://central.sonatype.org/pages/working-with-pgp-signatures.html)
+Sonatype [requires](https://central.sonatype.org/publish/requirements) that you sign the published files
+with PGP. Follow the instructions [here](https://central.sonatype.org/publish/requirements/gpg)
to generate a key pair and to distribute your public key to a key server.
This step has to be performed only once per person.
### Setup Your Project
-In case you use sbt, we recommend using the [sbt-sonatype](https://github.com/xerial/sbt-sonatype)
+In case you use sbt, we recommend using the [sbt-sonatype](https://github.com/xerial/sbt-sonatype)
and [sbt-pgp](https://www.scala-sbt.org/sbt-pgp/) plugins to publish your artifacts. Add the following
dependencies to your `project/plugins.sbt` file:
~~~ scala
-addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.4")
-addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0")
+addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21")
+addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1")
~~~
-And make sure your build fulfills the [Sonatype requirements](https://central.sonatype.org/pages/requirements.html)
+And make sure your build fulfills the [Sonatype requirements](https://central.sonatype.org/publish/requirements)
by defining the following settings:
~~~ scala
@@ -170,7 +169,7 @@ import xerial.sbt.Sonatype._
sonatypeProjectHosting := Some(GitHubHosting("scalacenter", "library-example", "julien.richard-foy@epfl.ch"))
// publish to the sonatype repository
-publishTo := sonatypePublishTo.value
+publishTo := sonatypePublishToBundle.value
~~~
Put your Sonatype credentials in a `$HOME/.sbt/1.0/sonatype.sbt` file:
@@ -182,7 +181,7 @@ credentials += Credentials("Sonatype Nexus Repository Manager",
"(Sonatype password)")
~~~
-(Put your actual user name and password in place of `(Sonatype user name)` and `(Sonatype password)`)
+(Put your actual username and password in place of `(Sonatype user name)` and `(Sonatype password)`)
**Never** check this file into version control.
@@ -190,7 +189,7 @@ Last, we recommend using the [sbt-dynver](https://github.com/dwijnand/sbt-dynver
of your releases. Add the following dependency to your `project/plugins.sbt` file:
~~~ scala
-addSbtPlugin("com.dwijnand" % "sbt-dynver" % "3.1.0")
+addSbtPlugin("com.github.sbt" % "sbt-dynver" % "5.0.1")
~~~
And make sure your build does **not** define the `version` setting.
@@ -200,7 +199,7 @@ And make sure your build does **not** define the `version` setting.
With this setup, the process for cutting a release is the following.
Create a Git tag whose name begins with a lowercase `v` followed by the version number:
-
+
~~~ bash
$ git tag v0.1.0
~~~
@@ -215,7 +214,7 @@ $ sbt publishSigned
`sbt-sonatype` will package your project and ask your PGP passphrase to sign the files with your PGP key.
It will then upload the files to Sonatype using your account credentials. When the task is finished, you can
-check the artifacts in the [Nexus Repository Manager](https://oss.sonatype.org) (under “Staging Repositories”).
+check the artifacts in the [Nexus Repository Manager](https://oss.sonatype.org) (under “Staging Repositories” in the side menu − if you do not see it, make sure you are logged in).
Finally, perform the release with the `sonatypeRelease` sbt task:
@@ -235,9 +234,17 @@ Continuous publication addresses these issues by delegating the publication proc
follows: any contributor with write access to the repository can cut a release by pushing a Git tag, the CI server
first checks that the tests pass and then runs the publication commands.
-The remaining sections show how to setup Travis CI for continuous publication on Sonatype. You can find instructions
-for other CI servers and repositories in the [sbt-release-early](https://github.com/scalacenter/sbt-release-early)
-plugin documentation.
+We achieve this by replacing the plugins `sbt-pgp`, `sbt-sonatype`, and `sbt-dynver` with `sbt-ci-release`, in the file `project/plugins.sbt`:
+
+{% highlight diff %}
+- addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1")
+- addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21")
+- addSbtPlugin("com.github.sbt" % "sbt-dynver" % "5.0.1")
++ addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12")
+{% endhighlight %}
+
+The remaining sections show how to setup GitHub Actions for continuous publication on Sonatype. You can find instructions
+for Travis CI in the [sbt-ci-release](https://github.com/olafurpg/sbt-ci-release) plugin documentation.
### Setup the CI Server
@@ -246,28 +253,14 @@ it is possible to securely give this information by using the secret management
#### Export Your Sonatype Account Credentials
-The `SONATYPE_USERNAME` and `SONATYPE_PASSWORD` environment variables are recognized by the `sbt-sonatype`
-plugin, as documented [here](https://github.com/xerial/sbt-sonatype#homesbtsbt-version-013-or-10sonatypesbt-1).
-
-With Travis CI, you will have to install the [Travis CLI](https://github.com/travis-ci/travis.rb#installation).
-
-Then, run the following commands from your project root directory to add your Sonatype credentials as
-environment variables to your `.travis.yml` file in an encrypted form:
-
-~~~ bash
-$ travis encrypt SONATYPE_USERNAME="(Sonatype user name)" --add
-$ travis encrypt SONATYPE_PASSWORD="(Sonatype password)" --add
-~~~
-
-(Put your actual user name and password in place of `(Sonatype user name)` and `(Sonatype password)`)
+Create two [GitHub Encrypted secrets](https://docs.github.com/en/actions/reference/encrypted-secrets)
+for your Sonatype account credentials: `SONATYPE_USERNAME` and `SONATYPE_PASSWORD`.
+To do so, go to the *Settings* tab of the repository and select *Secrets* on the left panel.
+You can then use the button *New repository secret* to open the secret creation menu where you will enter
+the name of the secret and its content.
-The `--add` option updates your `.travis.yml` file with entries like the following:
-
-~~~ yaml
-env:
- global:
- - secure: "dllL1w+pZT6yTBYwy5hX07t8r0JL5Cqer6YgYnXJ+q3OhSGUs7ul2fDUiqVxGIgUpTij1cGwBmoJOTbRk2V/be4+3Ua4ZNrAxjNF2ehqUcV5KdC3ufTTTXX0ZoL9MqEIb+GKzKtPqbzR4uly/5q5NbV7J1GeZRhummnx87POl6yH4kmXTpahig7vvnwN5dLanMshRb2Z8tO8kF4SnC31QuNBDQLnS89PEajHQu+LRAJloYvcikm+NeUj79m64CYg9JZdrHvZpIYKOMY1twT+lYoerqzG+asiNE1WrDs/We1RFVgcrKLpEThcvuIxuuPKhu24+0KteAX+7z/ulT0lndyBRfuuDjHV844LrNbjhnTB64V1uF7aEdaEZRLTsFQnFZqzpoqYqxzgfow9LN/kU5CMJX1R4wwf3YgR1VC9ZfjZnu0Pbt24g48I+72ZDNk3oRZoPsN9AtovwdZcg7TgU/iPcHNKSNhEZRP6ryhv/9aX3URLkfhnDaJmTXAnC3YCYt5cGo0FBUHARA+AHcas14Dx95bFSbH7EBivb2LiDmi44goRCWR4p+vNSBJ6Ak1NZz/+paai0pXDG6S/VdqwGSmmfjn7m9H3L5c8X5xNich9qtZbWz0fj2baZGq/doA8KE91JCzX11p/9fKNzbVivQZdsw3C3ZWDjkMZM+hl++0="
-~~~
+Repository Secrets allow us to safely store confidential information and to expose
+it to Actions workflows without the risk of committing them to git history.
#### Export Your PGP Key Pair
@@ -284,72 +277,69 @@ uid Julien Richard-Foy
In my case, I have one key pair, whose ID is `BE614499`.
-Export your public and private keys into files, in a `ci` directory:
-~~~ bash
-$ mkdir ci
-$ gpg -a --export (key ID) > ci/pubring.asc
-$ gpg -a --export-secret-keys (key ID) > ci/secring.asc
-~~~
+Then:
+ 1. Create a new Secret containing the passphrase of your PGP key named `PGP_PASSPHRASE`.
+ 2. Create a new Secret containing the base64 encoded secret of your private key named `PGP_SECRET`. The encoded secret can obtain by running:
+```
+# macOS
+gpg --armor --export-secret-keys $LONG_ID | base64
+# Ubuntu (assuming GNU base64)
+gpg --armor --export-secret-keys $LONG_ID | base64 -w0
+# Arch
+gpg --armor --export-secret-keys $LONG_ID | base64 | sed -z 's;\n;;g'
+# FreeBSD (assuming BSD base64)
+gpg --armor --export-secret-keys $LONG_ID | base64
+# Windows
+gpg --armor --export-secret-keys %LONG_ID% | openssl base64
+```
+ 3. Publish your public key signature to a public server, for example [http://keyserver.ubuntu.com:11371](http://keyserver.ubuntu.com:11371/).
+ You can obtain the signature by running:
+```
+# macOS and linux
+gpg --armor --export $LONG_ID
+# Windows
+gpg --armor --export %LONG_ID%
+```
(Replace `(key ID)` with **your** key ID)
-Add the `ci/pubring.asc` file (which contains your public key) to your repository. The `secring.asc` file
-(which contains your private key) should **not** be added as it is to the repository, so make sure it will
-be ignored by Git by adding it to the `.gitignore` file:
-
-~~~
-ci/secring.asc
-~~~
-
-Encrypt it with the `travis` tool:
-
-~~~ bash
-$ travis encrypt-file ci/secring.asc ci/secring.asc.enc --add
-~~~
-
-As advised in the command output, make sure to add the `secring.asc.enc` to the git repository.
-
-The `--add` option above adds a line like the following to your `.travis.yml` file:
-
-~~~ diff
-before_install:
- - openssl aes-256-cbc -K $encrypted_602f530300eb_key -iv $encrypted_602f530300eb_iv -in ci/secring.asc.enc -out ci/secring.asc -d
-~~~
-
-Finally, add export your PGP passphrase to the `.travis.yml` file:
-
-~~~
-$ travis encrypt PGP_PASSPHRASE="(your passphrase)" --add
-~~~
-
-(Replace `(your passphrase)` with your actual passphrase)
#### Publish From the CI Server
-On Travis CI, you can define a
-[conditional stage](https://docs.travis-ci.com/user/build-stages/#specifying-stage-order-and-conditions)
-publishing the library when a tag is pushed:
+On GitHub Actions, you can define a workflow to publish the library when a tag starting with “v” is pushed:
-~~~ yaml
-jobs:
- include:
- - stage: test
- script: sbt test
- - stage: deploy
- if: tag =~ ^v
- script: sbt publishSigned sonatypeRelease
-~~~
+{% highlight yaml %}
+{% raw %}
+# .github/workflows/publish.yml
+name: Continuous publication
+on:
+ push:
+ tags: [v*]
-The last step is to tell your build definition how to retrieve the PGP passphrase from the `PGP_PASSPHRASE`
-environment variable and to use the `pubring.asc` and `secring.asc` files as the PGP key pair.
-Include the following settings in your `build.sbt` file:
+jobs:
+ release:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0 # fetch all tags, required to compute the release version
+ - uses: actions/setup-java@v3
+ with:
+ distribution: temurin
+ java-version: 8
+ cache: sbt
+ - run: sbt ci-release
+ env:
+ PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }}
+ PGP_SECRET: ${{ secrets.PGP_SECRET }}
+ SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
+ SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}
+{% endraw %}
+{% endhighlight %}
-~~~ scala
-pgpPublicRing := file("ci/pubring.asc")
-pgpSecretRing := file("ci/secring.asc")
-pgpPassphrase := sys.env.get("PGP_PASSPHRASE").map(_.toArray)
-~~~
+The `env` statement exposes the secrets you defined earlier to the publication process through
+environment variables.
### Cut a Release
@@ -360,38 +350,28 @@ $ git tag v0.2.0
$ git push origin v0.2.0
~~~
+This will trigger the workflow, which will ultimately invoke `sbt ci-release`, which will perform a `publishSigned` followed by a `sonatypeRelease`.
+
## Cross-Publish
-If you have written a library, you probably want it to be usable from several Scala major versions (e.g., 2.11.x,
-2.12.x, 2.13.x, etc.).
+If you have written a library, you probably want it to be usable from several Scala major versions (e.g.,
+2.12.x, 2.13.x, 3.x, etc.).
Define the versions you want to support in the `crossScalaVersions` setting, in your `build.sbt` file:
~~~ scala
-crossScalaVersions := Seq("2.12.8", "2.11.12")
+crossScalaVersions := Seq("3.3.0", "2.13.12", "2.12.18")
scalaVersion := crossScalaVersions.value.head
~~~
The second line makes sbt use by default the first Scala version of the `crossScalaVersions`.
-
-Modify the CI jobs to use all the Scala versions of your build definition by using the `+` prefix,
-when appropriate:
-
-~~~ yaml
-jobs:
- include:
- - stage: test
- script: sbt +test
- - stage: deploy
- if: tag =~ ^v
- script: sbt +publishSigned sonatypeRelease
-~~~
+The CI job will use all the Scala versions of your build definition.
## Publish Online Documentation
An important property of documentation is that the code examples should compile and behave as they
are presented. There are various ways to ensure that this property holds. One way, supported by
-[tut](https://github.com/tpolecat/tut) and [mdoc](https://github.com/olafurpg/mdoc), is to actually
+[mdoc](https://github.com/scalameta/mdoc), is to actually
evaluate code examples and write the result of their evaluation in the produced documentation.
Another way consists in embedding snippets of source code coming from a real module or example.
@@ -406,26 +386,24 @@ an sbt-site to GitHub Pages.
### Create the Documentation Site
-In this example we choose to use [Paradox](https://developer.lightbend.com/docs/paradox/current/index.html)
-because it runs on the JVM and thus doesn’t require setting up another VM on your system (in contrast with
+In this example we choose to use [Paradox](https://github.com/lightbend/paradox)
+because it runs on the JVM and thus doesn't require setting up another VM on your system (in contrast with
most other documentation generators, which are based on Ruby, Node.js or Python).
To install Paradox and sbt-site, add the following lines to your `project/plugins.sbt` file:
~~~ scala
-addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.3.2")
-addSbtPlugin("com.lightbend.paradox" % "sbt-paradox" % "0.4.4")
+addSbtPlugin("com.github.sbt" % "sbt-site-paradox" % "1.5.0")
~~~
And then add the following configuration to your `build.sbt` file:
{% highlight scala %}
-enablePlugins(ParadoxPlugin, ParadoxSitePlugin)
+enablePlugins(ParadoxSitePlugin, SitePreviewPlugin)
Paradox / sourceDirectory := sourceDirectory.value / "documentation"
{% endhighlight %}
-The `ParadoxPlugin` is responsible of generating the website, and the `ParadoxSitePlugin` provides
-integration with `sbt-site`.
+The `ParadoxSitePlugin` provides a task `makeSite` that generates a website using [Paradox](https://github.com/lightbend/paradox), and the `SitePreviewPlugin` provides handy tasks when working on the website content, to preview the result in your browser.
The second line is optional, it defines the location of the website source files. In our case, in
`src/documentation`.
@@ -434,6 +412,7 @@ uses the library name as title, shows a short sentence describing the purpose of
snippet for adding the library to a build definition:
{% highlight markdown %}
+{% raw %}
# Library Example
A library that does nothing.
@@ -452,6 +431,7 @@ libraryDependencies += "ch.epfl.scala" %% "library-example" % "$project.version$
* [Getting Started](getting-started.md)
* [Reference](reference.md)
@@@
+{% endraw %}
{% endhighlight %}
Note that in our case we rely on a variable substitution mechanism to inject the correct version number
@@ -483,76 +463,18 @@ and behave as they are presented.
#### Using a Markdown Preprocessor
-One approach consists in using a Markdown preprocessor, such as [tut](https://github.com/tpolecat/tut) or
-[mdoc](https://github.com/olafurpg/mdoc). These tools read your Markdown source files, search for code fences,
+One approach consists in using a Markdown preprocessor such as
+[mdoc](https://github.com/scalameta/mdoc). These tools read your Markdown source files, search for code fences,
evaluate them (throwing an error if they don’t compile), and produce a copy of your Markdown files where
code fences have been updated to also include the result of evaluating the Scala expressions.
-For instance, given the following `src/documentation/getting-started.md` file:
-
-{% highlight markdown %}
-# Getting Started
-
-First, start with the following import:
-
-```tut
-import ch.epfl.scala.Example
-```
-
-Then, do nothing with something:
-
-```tut
-Example.doNothing(42)
-```
-{% endhighlight %}
-
-The tut tool will produce the following Markdown file:
-
-{% highlight markdown %}
-# Getting Started
-
-First, start with the following import:
-
-```scala
-scala> import ch.epfl.scala.Example
-import ch.epfl.scala.Example
-```
-
-Then, do nothing with something:
-
-```scala
-scala> Example.doNothing(42)
-res0: Int = 42
-```
-{% endhighlight %}
-
-You can see that `tut` code fences have been replaced with `scala` code fences, and the result of
-evaluating their content is shown, as it would look like from a REPL.
-
-To enable tut, add the following line to your `project/plugins.sbt` file:
-
-~~~ scala
-addSbtPlugin("org.tpolecat" % "tut-plugin" % "0.6.10")
-~~~
-
-And apply the following changes to your `build.sbt` file:
-
-{% highlight diff %}
-+enablePlugins(TutPlugin)
--Paradox / sourceDirectory := sourceDirectory.value / "documentation"
-+tutSourceDirectory := sourceDirectory.value / "documentation"
-+Paradox / sourceDirectory := tutTargetDirectory.value
-+makeSite := makeSite.dependsOn(tut).value
-{% endhighlight %}
-
-These changes add the `TutPlugin`, configure it to read sources from the `src/documentation` directory,
-configure Paradox to read the output of tut, and make sure tut is run before the site is built.
-
#### Embedding Snippets
Another approach consists in embedding fragments of Scala source files that are part of a module which
is compiled by your build. For instance, given the following test in file `src/test/ch/epfl/scala/Usage.scala`:
+{% tabs usage-definition class=tabs-scala-version %}
+{% tab 'Scala 2' %}
~~~ scala
package ch.epfl.scala
@@ -569,16 +491,37 @@ object Usage extends Scalaprops {
}
~~~
+{% endtab %}
+{% tab 'Scala 3' %}
+~~~ scala
+package ch.epfl.scala
+
+import scalaprops.{Property, Scalaprops}
+
+object Usage extends Scalaprops:
+
+ val testDoNothing =
+// #do-nothing
+ Property.forAll: (x: Int) =>
+ Example.doNothing(x) == x
+// #do-nothing
+
+end Usage
+~~~
+{% endtab %}
+{% endtabs %}
You can embed the fragment surrounded by the `#do-nothing` identifiers with the `@@snip` Paradox directive,
as shown in the `src/documentation/reference.md` file:
{% highlight markdown %}
+{% raw %}
# Reference
The `doNothing` function takes anything as parameter and returns it unchanged:
@@snip [Usage.scala]($root$/src/test/scala/ch/epfl/scala/Usage.scala) { #do-nothing }
+{% endraw %}
{% endhighlight %}
The resulting documentation looks like the following:
@@ -615,7 +558,7 @@ The `@scaladoc` directive will produce a link to the `/api/ch/epfl/scala/Example
Add the `sbt-ghpages` plugin to your `project/plugins.sbt`:
~~~ scala
-addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3")
+addSbtPlugin("com.github.sbt" % "sbt-ghpages" % "0.8.0")
~~~
And add the following configuration to your `build.sbt`:
@@ -648,78 +591,34 @@ can browse it at [https://scalacenter.github.io/library-example/](https://scalac
### Continuous Publication
-You need to grant the CI job write access to the Git repository hosting the documentation. This can be achieved
-by creating an SSH key that the CI job can use to push the website to GitHub.
-
-Create an SSH key:
-
-~~~ bash
-$ ssh-keygen -t rsa -b 4096 -C "sbt-site@travis" -f ci/travis-key
-~~~
-
-Make sure to **not** define a passphrase (just leave it empty and press enter), and to add the private
-key (the `ci/travis-key` file) to your `.gitignore`:
-
-~~~
-ci/secring.asc
-ci/travis-key
-~~~
-
-Add the public key, `ci/travis-key.pub`, in the Deploy Keys section of your GitHub project’s settings page:
+You can extend `.github/workflows/publish.yml` to automatically publish documentation to GitHub pages.
+To do so, add another job:
-
+```yaml
+# .github/workflows/publish.yml
+name: Continuous publication
-Make sure you “allow write access” by checking the box.
-
-The private key has to be added to the repository, like we did with the PGP private key. Unfortunately, due
-to a limitation of Travis CI, you can not add several encrypted files. The
-[workaround](https://docs.travis-ci.com/user/encrypting-files/#encrypting-multiple-files) consists in
-creating an archive containing all the files to encrypt. In your case, you want to encrypt the PGP
-key and the SSH key into a single `ci/secrets.tar` file:
-
-~~~ bash
-$ tar cvf ci/secrets.tar ci/secring.asc ci/travis-key
-$ travis encrypt-file ci/secrets.tar ci/secrets.tar.enc --add
-~~~
-
-Make sure to add the `ci/secrets.tar` file to your `.gitignore`:
-
-~~~
-ci/secring.asc
-ci/travis-key
-ci/secrets.tar
-~~~
-
-Finally, update the `.travis.yml` file to unpack the archive and push the documentation website
-on releases:
-
-~~~ yaml
jobs:
- include:
- - stage: test
- # Run tests for all Scala versions
- script: sbt +test
- name: "Tests"
- # Check that the documentation can be built
- - script: sbt makeSite
- name: "Documentation"
-
- - stage: deploy
- if: tag =~ ^v
- script:
- # decrypt PGP secret key and GitHub SSH key
- - openssl aes-256-cbc -K $encrypted_602f530300eb_key -iv $encrypted_602f530300eb_iv -in ci/secrets.tar.enc -out ci/secrets.tar -d
- - tar xvf ci/secrets.tar
- # load the key in the ssh-agent
- - chmod 600 ci/travis-key
- - eval "$(ssh-agent -s)"
- - ssh-add ci/travis-key
- # perform deployment
- - sbt makeSite +publishSigned sonatypeRelease ghpagesPushSite
-~~~
+ release: # The release job is not changed, you can find it above
+ publishSite:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ - uses: actions/setup-java@v3
+ with:
+ distribution: temurin
+ java-version: 8
+ cache: sbt
+ - name: Generate site
+ run: sbt makeSite
+ - uses: JamesIves/github-pages-deploy-action@4.1.3
+ with:
+ branch: gh-pages
+ folder: target/site
-(Replace the `$encrypted_602f530300eb_key` and `$encrypted_602f530300eb_iv` variables with the ones produced by the
-`travis encrypt-file` command)
+```
As usual, cut a release by pushing a Git tag. The CI server will run the tests, publish the binaries and update the
online documentation.
@@ -734,7 +633,7 @@ Add a `CONTRIBUTING.md` file to your repository, answering the following questio
What are the coding practices to follow? Where are the tests and how to run them?
For reference, you can read our minimal example of
-[`CONTRIBUTING.md` file](https://github.com/scalacenter/library-example/blob/master/CONTRIBUTING.md).
+[`CONTRIBUTING.md` file](https://github.com/scalacenter/library-example/blob/main/CONTRIBUTING.md).
### Issue Labels
@@ -752,20 +651,25 @@ For instance, to use [scalafmt](https://scalameta.org/scalafmt/), add the follow
file:
~~~ scala
-addSbtPlugin("com.geirsson" % "sbt-scalafmt" % "1.5.1")
+addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2")
~~~
In the `CONTRIBUTING.md` file, mention that you use that code formatter and encourage users to use the “format
on save” feature of their editor.
-In your `.travis.yml` file, add a first stage checking that the code has been properly formatted:
+In your `.github/workflows/ci.yml` file, add a step checking that the code has been properly formatted:
~~~ yaml
+# .github/workflows/ci.yml
+# The three periods `...` indicate the parts of file that do not change
+# from the snippets above and they are omitted for brevity
jobs:
- include:
-
- - stage: style
- script: sbt scalafmtCheck
+ ci:
+ # ...
+ steps:
+ # ...
+ - name: Code style
+ run: sbt scalafmtCheck
~~~
## Evolve
@@ -773,35 +677,45 @@ jobs:
From the user point of view, upgrading to a new version of a library should be a smooth process. Possibly,
it should even be a “non-event”.
-Breaking changes and migration steps should be thoroughly documented, and a we recommend following the
+Breaking changes and migration steps should be thoroughly documented, and we recommend following the
[semantic versioning](/overviews/core/binary-compatibility-for-library-authors.html#versioning-scheme---communicating-compatibility-breakages)
policy.
-The [MiMa](https://github.com/lightbend/migration-manager) tool can help you checking that you don’t
+The [MiMa](https://github.com/lightbend/migration-manager) tool can help you to check that you don't
break this versioning policy. Add the `sbt-mima-plugin` to your build with the following, in your
`project/plugins.sbt` file:
~~~ scala
-addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.3.0")
+addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2")
~~~
-Configure it as follow, in `build.sbt`:
+Configure it as follows, in `build.sbt`:
~~~ scala
mimaPreviousArtifacts := previousStableVersion.value.map(organization.value %% name.value % _).toSet
~~~
-Last, add the following job to the “test” stage, in the `.travis.yml` file:
+Last, add the following step to the job `ci` of the `Continuous integration` workflow, in the `.github/workflows/ci.yml` file:
~~~ yaml
- - script: sbt mimaReportBinaryIssues
- name: "Binary compatibility"
+# .github/workflows/ci.yml
+# The three periods `...` indicate the parts of file that do not change
+# from the snippets above and they are omitted for brevity
+
+# ...
+jobs:
+ ci:
+ # ...
+ steps:
+ # ...
+ - name: Binary compatibility
+ run: sbt mimaReportBinaryIssues
~~~
This will check that pull requests don’t make changes that are binary incompatible with the
previous stable version.
-We suggest working with the following Git workflow: the `master` branch always receives pull requests
+We suggest working with the following Git workflow: the `main` branch always receives pull requests
for the next major version (so, binary compatibility checks are disabled, by setting the `mimaPreviousArtifacts`
value to `Set.empty`), and each major version `N` has a corresponding `N.x` branch (e.g., `1.x`, `2.x`, etc.) branch
where the binary compatibility checks are enabled.
diff --git a/_overviews/core/actors-migration-guide.md b/_overviews/core/actors-migration-guide.md
deleted file mode 100644
index c82aecc3e5..0000000000
--- a/_overviews/core/actors-migration-guide.md
+++ /dev/null
@@ -1,583 +0,0 @@
----
-layout: singlepage-overview
-title: The Scala Actors Migration Guide
-
-partof: actor-migration
-
-languages: [zh-cn]
-
-permalink: /overviews/core/:title.html
----
-
-**Vojin Jovanovic and Philipp Haller**
-
-## Introduction
-
-Starting with Scala 2.11.0, the Scala
-[Actors](actors.html)
-library is deprecated. Already in Scala 2.10.0 the default actor library is
-[Akka](https://akka.io).
-
-To ease the migration from Scala Actors to Akka we are providing the
-Actor Migration Kit (AMK). The AMK consists of an extension to Scala
-Actors which is enabled by including the `scala-actors-migration.jar`
-on a project's classpath. In addition, Akka 2.1 includes features,
-such as the `ActorDSL` singleton, which enable a simpler conversion of
-code using Scala Actors to Akka. The purpose of this document is to
-guide users through the migration process and explain how to use the
-AMK.
-
-This guide has the following structure. In Section "Limitations of the
-Migration Kit" we outline the main limitations of the migration
-kit. In Section "Migration Overview" we describe the migration process
-and talk about changes in the [Scala
-distribution](https://www.scala-lang.org/downloads) that make the
-migration possible. Finally, in Section "Step by Step Guide for
-Migrating to Akka" we show individual steps, with working examples,
-that are recommended when migrating from Scala Actors to Akka's
-actors.
-
-A disclaimer: concurrent code is notorious for bugs that are hard to
-debug and fix. Due to differences between the two actor
-implementations it is possible that errors appear. It is recommended
-to thoroughly test the code after each step of the migration process.
-
-## Limitations of the Migration Kit
-
-Due to differences in Akka and Scala actor models the complete functionality can not be migrated smoothly. The following list explains parts of the behavior that are hard to migrate:
-
-1. Relying on termination reason and bidirectional behavior with `link` method - Scala and Akka actors have different fault-handling and actor monitoring models.
-In Scala linked actors terminate if one of the linked parties terminates abnormally. If termination is tracked explicitly (by `self.trapExit`) the actor receives
-the termination reason from the failed actor. This functionality can not be migrated to Akka with the AMK. The AMK allows migration only for the
-[Akka monitoring](https://doc.akka.io/docs/akka/2.1.0/general/supervision.html#What_Lifecycle_Monitoring_Means)
-mechanism. Monitoring is different than linking because it is unidirectional and the termination reason is now known. If monitoring support is not enough, the migration
-of `link` must be postponed until the last possible moment (Step 5 of migration).
-Then, when moving to Akka, users must create an [supervision hierarchy](https://doc.akka.io/docs/akka/2.1.0/general/supervision.html) that will handle faults.
-
-2. Usage of the `restart` method - Akka does not provide explicit restart of actors so we can not provide the smooth migration for this use-case.
-The user must change the system so there are no usages of the `restart` method.
-
-3. Usage of method `getState` - Akka actors do not have explicit state so this functionality can not be migrated. The user code must not
-have `getState` invocations.
-
-4. Not starting actors right after instantiation - Akka actors are automatically started when instantiated. Users will have to
-reshape their system so it starts all the actors right after their instantiation.
-
-5. Method `mailboxSize` does not exist in Akka and therefore can not be migrated. This method is seldom used and can easily be removed.
-
-
-## Migration Overview
-
-### Migration Kit
-In Scala 2.10.0 actors reside inside the [Scala distribution](https://www.scala-lang.org/downloads) as a separate jar ( *scala-actors.jar* ), and
-the their interface is deprecated. The distribution also includes Akka actors in the *akka-actor.jar*.
-The AMK resides both in the Scala actors and in the *akka-actor.jar*. Future major releases of Scala will not contain Scala actors and the AMK.
-
-To start the migration user needs to add the *scala-actors.jar* and the *scala-actors-migration.jar* to the build of their projects.
-Addition of *scala-actors.jar* and *scala-actors-migration.jar* enables the usage of the AMK described below.
-
-### Step by Step Migration
-Actor Migration Kit should be used in 5 steps. Each step is designed to introduce minimal changes
-to the code base and allows users to run all system tests after it. In the first four steps of the migration
-the code will use the Scala actors implementation. However, the methods and class signatures will be transformed to closely resemble Akka.
-The migration kit on the Scala side introduces a new actor type (`ActWithStash`) and enforces access to actors through the `ActorRef` interface.
-
-It also enforces creation of actors through special methods on the `ActorDSL` object. In these steps it will be possible to migrate one
-actor at a time. This reduces the possibility of complex errors that are caused by several bugs introduced at the same time.
-
-After the migration on the Scala side is complete the user should change import statements and change
-the library used to Akka. On the Akka side, the `ActorDSL` and the `ActWithStash` allow
- modeling the `react` construct of Scala Actors and their life cycle. This step migrates all actors to the Akka back-end and could introduce bugs in the system. Once code is migrated to Akka, users will be able to use all the features of Akka.
-
-## Step by Step Guide for Migrating to Akka
-
-In this chapter we will go through 5 steps of the actor migration. After each step the code can be tested for possible errors. In the first 4
- steps one can migrate one actor at a time and test the functionality. However, the last step migrates all actors to Akka and it can be tested
-only as a whole. After this step the system should have the same functionality as before, however it will use the Akka actor library.
-
-### Step 1 - Everything as an Actor
-The Scala actors library provides public access to multiple types of actors. They are organized in the class hierarchy and each subclass
-provides slightly richer functionality. To make further steps of the migration easier we will first change each actor in the system to be of type `Actor`.
-This migration step is straightforward since the `Actor` class is located at the bottom of the hierarchy and provides the broadest functionality.
-
-The Actors from the Scala library should be migrated according to the following rules:
-
-1. `class MyServ extends Reactor[T]` -> `class MyServ extends Actor`
-
- Note that `Reactor` provides an additional type parameter which represents the type of the messages received. If user code uses
-that information then one needs to: _i)_ apply pattern matching with explicit type, or _ii)_ do the downcast of a message from
-`Any` to the type `T`.
-
-2. `class MyServ extends ReplyReactor` -> `class MyServ extends Actor`
-
-3. `class MyServ extends DaemonActor` -> `class MyServ extends Actor`
-
- To pair the functionality of the `DaemonActor` add the following line to the class definition.
-
- override def scheduler: IScheduler = DaemonScheduler
-
-### Step 2 - Instantiations
-
-In Akka, actors can be accessed only through the narrow interface called `ActorRef`. Instances of `ActorRef` can be acquired either
-by invoking an `actor` method on the `ActorDSL` object or through the `actorOf` method on an instance of an `ActorRefFactory`.
-In the Scala side of AMK we provide a subset of the Akka `ActorRef` and the `ActorDSL` which is the actual singleton object in the Akka library.
-
-This step of the migration makes all accesses to actors through `ActorRef`s. First, we show how to migrate common patterns for instantiating
-Scala `Actor`s. Then we show how to overcome issues with the different interfaces of `ActorRef` and `Actor`, respectively.
-
-#### Actor Instantiation
-
-The translation rules for actor instantiation (the following rules require importing `scala.actors.migration._`):
-
-1. Constructor Call Instantiation
-
- val myActor = new MyActor(arg1, arg2)
- myActor.start()
-
- should be replaced with
-
- ActorDSL.actor(new MyActor(arg1, arg2))
-
-2. DSL for Creating Actors
-
- val myActor = actor {
- // actor definition
- }
-
- should be replaced with
-
- val myActor = ActorDSL.actor(new Actor {
- def act() {
- // actor definition
- }
- })
-
-3. Object Extended from the `Actor` Trait
-
- object MyActor extends Actor {
- // MyActor definition
- }
- MyActor.start()
-
- should be replaced with
-
- class MyActor extends Actor {
- // MyActor definition
- }
-
- object MyActor {
- val ref = ActorDSL.actor(new MyActor)
- }
-
- All accesses to the object `MyActor` should be replaced with accesses to `MyActor.ref`.
-
-Note that Akka actors are always started on instantiation. In case actors in the migrated
- system are created and started at different locations, and changing this can affect the behavior of the system,
-users need to change the code so actors are started right after instantiation.
-
-Remote actors also need to be fetched as `ActorRef`s. To get an `ActorRef` of an remote actor use the method `selectActorRef`.
-
-#### Different Method Signatures
-
-At this point we have changed all the actor instantiations to return `ActorRef`s, however, we are not done yet.
-There are differences in the interface of `ActorRef`s and `Actor`s so we need to change the methods invoked on each migrated instance.
-Unfortunately, some of the methods that Scala `Actor`s provide can not be migrated. For the following methods users need to find a workaround:
-
-1. `getState()` - actors in Akka are managed by their supervising actors and are restarted by default.
-In that scenario state of an actor is not relevant.
-
-2. `restart()` - explicitly restarts a Scala actor. There is no corresponding functionality in Akka.
-
-All other `Actor` methods need to be translated to two methods that exist on the ActorRef. The translation is achieved by the rules described below.
-Note that all the rules require the following imports:
-
- import scala.concurrent.duration._
- import scala.actors.migration.pattern.ask
- import scala.actors.migration._
- import scala.concurrent._
-
-Additionally rules 1-3 require an implicit `Timeout` with infinite duration defined in the scope. However, since Akka does not allow for infinite timeouts, we will use
-100 years. For example:
-
- implicit val timeout = Timeout(36500 days)
-
-Rules:
-
-1. `!!(msg: Any): Future[Any]` gets replaced with `?`. This rule will change a return type to the `scala.concurrent.Future` which might not type check.
-Since `scala.concurrent.Future` has broader functionality than the previously returned one, this type error can be easily fixed with local changes:
-
- actor !! message -> respActor ? message
-
-2. `!![A] (msg: Any, handler: PartialFunction[Any, A]): Future[A]` gets replaced with `?`. The handler can be extracted as a separate
-function and then applied to the generated future result. The result of a handle should yield another future like
-in the following example:
-
- val handler: PartialFunction[Any, T] = ... // handler
- actor !! (message, handler) -> (respActor ? message) map handler
-
-3. `!? (msg: Any): Any` gets replaced with `?` and explicit blocking on the returned future:
-
- actor !? message ->
- Await.result(respActor ? message, Duration.Inf)
-
-4. `!? (msec: Long, msg: Any): Option[Any]` gets replaced with `?` and explicit blocking on the future:
-
- actor !? (dur, message) ->
- val res = respActor.?(message)(Timeout(dur milliseconds))
- val optFut = res map (Some(_)) recover { case _ => None }
- Await.result(optFut, Duration.Inf)
-
-Public methods that are not mentioned here are declared public for purposes of the actors DSL. They can be used only
-inside the actor definition so their migration is not relevant in this step.
-
-### Step 3 - `Actor`s become `ActWithStash`s
-
-At this point all actors inherit the `Actor` trait, we instantiate actors through special factory methods,
-and all actors are accessed through the `ActorRef` interface.
-Now we need to change all actors to the `ActWithStash` class from the AMK. This class behaves exactly the same like Scala `Actor`
-but, additionally, provides methods that correspond to methods in Akka's `Actor` trait. This allows easy, step by step, migration to the Akka behavior.
-
-To achieve this all classes that extend `Actor` should extend the `ActWithStash`. Apply the
-following rule:
-
- class MyActor extends Actor -> class MyActor extends ActWithStash
-
-After this change code might not compile. The `receive` method exists in `ActWithStash` and can not be used in the body of the `act` as is. To redirect the compiler to the previous method
-add the type parameter to all `receive` calls in your system. For example:
-
- receive { case x: Int => "Number" } ->
- receive[String] { case x: Int => "Number" }
-
-Additionally, to make the code compile, users must add the `override` keyword before the `act` method, and to create
-the empty `receive` method in the code. Method `act` needs to be overridden since its implementation in `ActWithStash`
-mimics the message processing loop of Akka. The changes are shown in the following example:
-
- class MyActor extends ActWithStash {
-
- // dummy receive method (not used for now)
- def receive = {case _ => }
-
- override def act() {
- // old code with methods receive changed to react.
- }
- }
-
-
-`ActWithStash` instances have variable `trapExit` set to `true` by default. If that is not desired set it to `false` in the initializer of the class.
-
-The remote actors will not work with `ActWithStash` out of the box. The method `register('name, this)` needs to be replaced with:
-
- registerActorRef('name, self)
-
-In later steps of the migration, calls to `registerActorRef` and `alive` should be treated like any other calls.
-
-After this point user can run the test suite and the whole system should behave as before. The `ActWithStash` and `Actor` use the same infrastructure so the system
-should behave exactly the same.
-
-### Step 4 - Removing the `act` Method
-
-In this section we describe how to remove the `act` method from `ActWithStash`s and how to
-change the methods used in the `ActWithStash` to resemble Akka. Since this step can be complex, it is recommended
-to do changes one actor at a time. In Scala, an actor's behavior is defined by implementing the `act` method. Logically, an actor is a concurrent process
-which executes the body of its `act` method, and then terminates. In Akka, the behavior is defined by using a global message
-handler which processes the messages in the actor's mailbox one by one. The message handler is a partial function, returned by the `receive` method,
-which gets applied to each message.
-
-Since the behavior of Akka methods in the `ActWithStash` depends on the removal of the `act` method we have to do that first. Then we will give the translation
-rules for translating individual methods of the `scala.actors.Actor` trait.
-
-#### Removal of `act`
-
-In the following list we present the translation rules for common message processing patterns. This list is not
-exhaustive and it covers only some common patterns. However, users can migrate more complex `act` methods to Akka by looking
- at existing translation rules and extending them for more complex situations.
-
-A note about nested `react`/`reactWithin` calls: the message handling
-partial function needs to be expanded with additional constructs that
-bring it closer to the Akka model. Although these changes can be
-complicated, migration is possible for an arbitrary level of
-nesting. See below for examples.
-
-A note about using `receive`/`receiveWithin` with complex control
-flow: migration can be complicated since it requires refactoring the
-`act` method. A `receive` call can be modeled using `react` and
-`andThen` on the message processing partial function. Again, simple
-examples are shown below.
-
-1. If there is any code in the `act` method that is being executed before the first `loop` with `react` that code
-should be moved to the `preStart` method.
-
- def act() {
- // initialization code here
- loop {
- react { ... }
- }
- }
-
- should be replaced with
-
- override def preStart() {
- // initialization code here
- }
-
- def act() {
- loop {
- react{ ... }
- }
- }
-
- This rule should be used in other patterns as well if there is code before the first react.
-
-2. When `act` is in the form of a simple `loop` with a nested `react` use the following pattern.
-
- def act() = {
- loop {
- react {
- // body
- }
- }
- }
-
- should be replaced with
-
- def receive = {
- // body
- }
-
-3. When `act` contains a `loopWhile` construct use the following translation.
-
- def act() = {
- loopWhile(c) {
- react {
- case x: Int =>
- // do task
- if (x == 42) {
- c = false
- }
- }
- }
- }
-
- should be replaced with
-
- def receive = {
- case x: Int =>
- // do task
- if (x == 42) {
- context.stop(self)
- }
- }
-
-4. When `act` contains nested `react`s use the following rule:
-
- def act() = {
- var c = true
- loopWhile(c) {
- react {
- case x: Int =>
- // do task
- if (x == 42) {
- c = false
- } else {
- react {
- case y: String =>
- // do nested task
- }
- }
- }
- }
- }
-
- should be replaced with
-
- def receive = {
- case x: Int =>
- // do task
- if (x == 42) {
- context.stop(self)
- } else {
- context.become(({
- case y: String =>
- // do nested task
- }: Receive).andThen(x => {
- unstashAll()
- context.unbecome()
- }).orElse { case x => stash(x) })
- }
- }
-
-5. For `reactWithin` method use the following translation rule:
-
- loop {
- reactWithin(t) {
- case TIMEOUT => // timeout processing code
- case msg => // message processing code
- }
- }
-
- should be replaced with
-
- import scala.concurrent.duration._
-
- context.setReceiveTimeout(t millisecond)
- def receive = {
- case ReceiveTimeout => // timeout processing code
- case msg => // message processing code
- }
-
-6. Exception handling is done in a different way in Akka. To mimic Scala actors behavior apply the following rule
-
- def act() = {
- loop {
- react {
- case msg =>
- // work that can fail
- }
- }
- }
-
- override def exceptionHandler = {
- case x: Exception => println("got exception")
- }
-
- should be replaced with
-
- def receive = PFCatch({
- case msg =>
- // work that can fail
- }, { case x: Exception => println("got exception") })
-
- where `PFCatch` is defined as
-
- class PFCatch(f: PartialFunction[Any, Unit],
- handler: PartialFunction[Exception, Unit])
- extends PartialFunction[Any, Unit] {
-
- def apply(x: Any) = {
- try {
- f(x)
- } catch {
- case e: Exception if handler.isDefinedAt(e) =>
- handler(e)
- }
- }
-
- def isDefinedAt(x: Any) = f.isDefinedAt(x)
- }
-
- object PFCatch {
- def apply(f: PartialFunction[Any, Unit],
- handler: PartialFunction[Exception, Unit]) =
- new PFCatch(f, handler)
- }
-
- `PFCatch` is not included in the AMK as it can stay as the permanent feature in the migrated code
- and the AMK will be removed with the next major release. Once the whole migration is complete fault-handling
- can also be converted to the Akka [supervision](https://doc.akka.io/docs/akka/2.1.0/general/supervision.html#What_Supervision_Means).
-
-
-
-#### Changing `Actor` Methods
-
-After we have removed the `act` method we should rename the methods that do not exist in Akka but have similar functionality. In the following list we present
-the list of differences and their translation:
-
-1. `exit()`/`exit(reason)` - should be replaced with `context.stop(self)`
-
-2. `receiver` - should be replaced with `self`
-
-3. `reply(msg)` - should be replaced with `sender ! msg`
-
-4. `link(actor)` - In Akka, linking of actors is done partially by [supervision](https://doc.akka.io/docs/akka/2.1.0/general/supervision.html#What_Supervision_Means)
-and partially by [actor monitoring](https://doc.akka.io/docs/akka/2.1.0/general/supervision.html#What_Lifecycle_Monitoring_Means). In the AMK we support
-only the monitoring method so the complete Scala functionality can not be migrated.
-
- The difference between linking and watching is that watching actors always receive the termination notification.
-However, instead of matching on the Scala `Exit` message that contains the reason of termination the Akka watching
-returns the `Terminated(a: ActorRef)` message that contains only the `ActorRef`. The functionality of getting the reason
- for termination is not supported by the migration. It can be done in Akka, after the Step 4, by organizing the actors in a [supervision hierarchy](https://doc.akka.io/docs/akka/2.1.0/general/supervision.html).
-
- If the actor that is watching does not match the `Terminated` message, and this message arrives, it will be terminated with the `DeathPactException`.
-Note that this will happen even when the watched actor terminated normally. In Scala linked actors terminate, with the same termination reason, only if
-one of the actors terminates abnormally.
-
- If the system can not be migrated solely with `watch` the user should leave invocations to `link` and `exit(reason)` as is. However since `act()` overrides the `Exit` message the following transformation
-needs to be applied:
-
- case Exit(actor, reason) =>
- println("sorry about your " + reason)
- ...
-
- should be replaced with
-
- case t @ Terminated(actorRef) =>
- println("sorry about your " + t.reason)
- ...
-
- NOTE: There is another subtle difference between Scala and Akka actors. In Scala, `link`/`watch` to the already dead actor will not have affect.
-In Akka, watching the already dead actor will result in sending the `Terminated` message. This can give unexpected behavior in the Step 5 of the migration guide.
-
-### Step 5 - Moving to the Akka Back-end
-
-At this point user code is ready to operate on Akka actors. Now we can switch the actors library from Scala to
-Akka actors. To do this configure the build to exclude the `scala-actors.jar` and the `scala-actors-migration.jar`,
- and to include *akka-actor.jar* and *typesafe-config.jar*. The AMK is built to work only with Akka actors version 2.1 which are included in the [Scala distribution](https://www.scala-lang.org/downloads)
- and can be configured by these [instructions](https://doc.akka.io/docs/akka/2.1.0/intro/getting-started.html#Using_a_build_tool).
-
-After this change the compilation will fail due to different package names and slight differences in the API. We will have to change each imported actor
-from scala to Akka. Following is the non-exhaustive list of package names that need to be changed:
-
- scala.actors._ -> akka.actor._
- scala.actors.migration.ActWithStash -> akka.actor.ActorDSL._
- scala.actors.migration.pattern.ask -> akka.pattern.ask
- scala.actors.migration.Timeout -> akka.util.Timeout
-
-Also, method declarations `def receive =` in `ActWithStash` should be prepended with `override`.
-
-In Scala actors the `stash` method needs a message as a parameter. For example:
-
- def receive = {
- ...
- case x => stash(x)
- }
-
-In Akka only the currently processed message can be stashed. Therefore replace the above example with:
-
- def receive = {
- ...
- case x => stash()
- }
-
-#### Adding Actor Systems
-
-The Akka actors are organized in [Actor systems](https://doc.akka.io/docs/akka/2.1.0/general/actor-systems.html).
- Each actor that is instantiated must belong to one `ActorSystem`. To achieve this add an `ActorSystem` instance to each actor instantiation call as a first argument. The following example shows the transformation.
-
-To achieve this transformation you need to have an actor system instantiated. The actor system is usually instantiated in Scala objects or configuration classes that are global to your system. For example:
-
- val system = ActorSystem("migration-system")
-
-Then apply the following transformation:
-
- ActorDSL.actor(...) -> ActorDSL.actor(system)(...)
-
-If many calls to `actor` use the same `ActorSystem` it can be passed as an implicit parameter. For example:
-
- ActorDSL.actor(...) ->
- import project.implicitActorSystem
- ActorDSL.actor(...)
-
-Finally, Scala programs are terminating when all the non-daemon threads and actors finish. With Akka the program ends when all the non-daemon threads finish and all actor systems are shut down.
- Actor systems need to be explicitly terminated before the program can exit. This is achieved by invoking the `shutdown` method on an Actor system.
-
-#### Remote Actors
-
-Once the code base is moved to Akka remoting will not work any more. The methods `registerActorFor` and `alive` need to be removed. In Akka, remoting is done solely by configuration and
-for further details refer to the [Akka remoting documentation](https://doc.akka.io/docs/akka/2.1.0/scala/remoting.html).
-
-#### Examples and Issues
-All of the code snippets presented in this document can be found in the [Actors Migration test suite](https://github.com/scala/actors-migration/tree/master/src/test/) as test files with the prefix `actmig`.
-
-This document and the Actor Migration Kit were designed and implemented by: [Vojin Jovanovic](https://people.epfl.ch/vojin.jovanovic) and [Philipp Haller](https://lampwww.epfl.ch/~phaller/)
-
-If you find any issues or rough edges please report them at the [Scala Bugtracker](https://github.com/scala/actors-migration/issues).
diff --git a/_overviews/core/actors.md b/_overviews/core/actors.md
deleted file mode 100644
index cd1309e2ae..0000000000
--- a/_overviews/core/actors.md
+++ /dev/null
@@ -1,506 +0,0 @@
----
-layout: singlepage-overview
-title: The Scala Actors API
-
-partof: actors
-
-languages: [zh-cn, es]
-
-permalink: /overviews/core/:title.html
----
-
-**Philipp Haller and Stephen Tu**
-
-## Introduction
-
-This guide describes the API of the `scala.actors` package of Scala 2.8/2.9. The organization follows groups of types that logically belong together. The trait hierarchy is taken into account to structure the individual sections. The focus is on the run-time behavior of the various methods that these traits define, thereby complementing the existing Scaladoc-based API documentation.
-
-NOTE: In Scala 2.10 the Actors library is deprecated and will be removed in future Scala releases. Users should use [Akka](https://akka.io) actors from the package `akka.actor`. For migration from Scala actors to Akka refer to the [Actors Migration Guide](actors-migration-guide.html).
-
-## The actor traits Reactor, ReplyReactor, and Actor
-
-### The Reactor trait
-
-`Reactor` is the super trait of all actor traits. Extending this trait allows defining actors with basic capabilities to send and receive messages.
-
-The behavior of a `Reactor` is defined by implementing its `act` method. The `act` method is executed once the `Reactor` is started by invoking `start`, which also returns the `Reactor`. The `start` method is *idempotent* which means that invoking it on an actor that has already been started has no effect.
-
-The `Reactor` trait has a type parameter `Msg` which indicates the type of messages that the actor can receive.
-
-Invoking the `Reactor`'s `!` method sends a message to the receiver. Sending a message using `!` is asynchronous which means that the sending actor does not wait until the message is received; its execution continues immediately. For example, `a ! msg` sends `msg` to `a`. All actors have a *mailbox* which buffers incoming messages until they are processed.
-
-The `Reactor` trait also defines a `forward` method. This method is inherited from `OutputChannel`. It has the same effect as the `!` method. Subtraits of `Reactor`, in particular the `ReplyReactor` trait, override this method to enable implicit reply destinations (see below).
-
-A `Reactor` receives messages using the `react` method. `react` expects an argument of type `PartialFunction[Msg, Unit]` which defines how messages of type `Msg` are handled once they arrive in the actor's mailbox. In the following example, the current actor waits to receive the string "Hello", and then prints a greeting:
-
- react {
- case "Hello" => println("Hi there")
- }
-
-Invoking `react` never returns. Therefore, any code that should run after a message has been received must be contained inside the partial function that is passed to `react`. For example, two messages can be received in sequence by nesting two invocations of `react`:
-
- react {
- case Get(from) =>
- react {
- case Put(x) => from ! x
- }
- }
-
-The `Reactor` trait also provides control structures which simplify programming with `react`.
-
-#### Termination and execution states
-
-The execution of a `Reactor` terminates when the body of its `act` method has run to completion. A `Reactor` can also terminate itself explicitly using the `exit` method. The return type of `exit` is `Nothing`, because `exit` always throws an exception. This exception is only used internally, and should never be caught.
-
-A terminated `Reactor` can be restarted by invoking its `restart` method. Invoking `restart` on a `Reactor` that has not terminated, yet, throws an `IllegalStateException`. Restarting a terminated actor causes its `act` method to be rerun.
-
-`Reactor` defines a method `getState` which returns the actor's current execution state as a member of the `Actor.State` enumeration. An actor that has not been started, yet, is in state `Actor.State.New`. An actor that can run without waiting for a message is in state `Actor.State.Runnable`. An actor that is suspended, waiting for a message is in state `Actor.State.Suspended`. A terminated actor is in state `Actor.State.Terminated`.
-
-#### Exception handling
-
-The `exceptionHandler` member allows defining an exception handler that is enabled throughout the entire lifetime of a `Reactor`:
-
- def exceptionHandler: PartialFunction[Exception, Unit]
-
-`exceptionHandler` returns a partial function which is used to handle exceptions that are not otherwise handled: whenever an exception propagates out of the body of a `Reactor`'s `act` method, the partial function is applied to that exception, allowing the actor to run clean-up code before it terminates. Note that the visibility of `exceptionHandler` is `protected`.
-
-Handling exceptions using `exceptionHandler` works well together with the control structures for programming with `react`. Whenever an exception has been handled using the partial function returned by `exceptionHandler`, execution continues with the current continuation closure. Example:
-
- loop {
- react {
- case Msg(data) =>
- if (cond) // process data
- else throw new Exception("cannot process data")
- }
- }
-
-Assuming that the `Reactor` overrides `exceptionHandler`, after an exception thrown inside the body of `react` is handled, execution continues with the next loop iteration.
-
-### The ReplyReactor trait
-
-The `ReplyReactor` trait extends `Reactor[Any]` and adds or overrides the following methods:
-
-- The `!` method is overridden to obtain a reference to the current
- actor (the sender); together with the actual message, the sender
- reference is transferred to the mailbox of the receiving actor. The
- receiver has access to the sender of a message through its `sender`
- method (see below).
-
-- The `forward` method is overridden to obtain a reference to the
- `sender` of the message that is currently being processed. Together
- with the actual message, this reference is transferred as the sender
- of the current message. As a consequence, `forward` allows
- forwarding messages on behalf of actors different from the current
- actor.
-
-- The added `sender` method returns the sender of the message that is
- currently being processed. Given the fact that a message might have
- been forwarded, `sender` may not return the actor that actually sent
- the message.
-
-- The added `reply` method sends a message back to the sender of the
- last message. `reply` is also used to reply to a synchronous message
- send or a message send with future (see below).
-
-- The added `!?` methods provide *synchronous message sends*. Invoking
- `!?` causes the sending actor to wait until a response is received
- which is then returned. There are two overloaded variants. The
- two-parameter variant takes in addition a timeout argument (in
- milliseconds), and its return type is `Option[Any]` instead of
- `Any`. If the sender does not receive a response within the
- specified timeout period, `!?` returns `None`, otherwise it returns
- the response wrapped in `Some`.
-
-- The added `!!` methods are similar to synchronous message sends in
- that they allow transferring a response from the receiver. However,
- instead of blocking the sending actor until a response is received,
- they return `Future` instances. A `Future` can be used to retrieve
- the response of the receiver once it is available; it can also be
- used to find out whether the response is already available without
- blocking the sender. There are two overloaded variants. The
- two-parameter variant takes in addition an argument of type
- `PartialFunction[Any, A]`. This partial function is used for
- post-processing the receiver's response. Essentially, `!!` returns a
- future which applies the partial function to the response once it is
- received. The result of the future is the result of this
- post-processing.
-
-- The added `reactWithin` method allows receiving messages within a
- given period of time. Compared to `react` it takes an additional
- parameter `msec` which indicates the time period in milliseconds
- until the special `TIMEOUT` pattern matches (`TIMEOUT` is a case
- object in package `scala.actors`). Example:
-
- reactWithin(2000) {
- case Answer(text) => // process text
- case TIMEOUT => println("no answer within 2 seconds")
- }
-
- The `reactWithin` method also allows non-blocking access to the
- mailbox. When specifying a time period of 0 milliseconds, the
- mailbox is first scanned to find a matching message. If there is no
- matching message after the first scan, the `TIMEOUT` pattern
- matches. For example, this enables receiving certain messages with a
- higher priority than others:
-
- reactWithin(0) {
- case HighPriorityMsg => // ...
- case TIMEOUT =>
- react {
- case LowPriorityMsg => // ...
- }
- }
-
- In the above example, the actor first processes the next
- `HighPriorityMsg`, even if there is a `LowPriorityMsg` that arrived
- earlier in its mailbox. The actor only processes a `LowPriorityMsg`
- *first* if there is no `HighPriorityMsg` in its mailbox.
-
-In addition, `ReplyReactor` adds the `Actor.State.TimedSuspended` execution state. A suspended actor, waiting to receive a message using `reactWithin` is in state `Actor.State.TimedSuspended`.
-
-### The Actor trait
-
-The `Actor` trait extends `ReplyReactor` and adds or overrides the following members:
-
-- The added `receive` method behaves like `react` except that it may
- return a result. This is reflected in its type, which is polymorphic
- in its result: `def receive[R](f: PartialFunction[Any, R]): R`.
- However, using `receive` makes the actor more heavyweight, since
- `receive` blocks the underlying thread while the actor is suspended
- waiting for a message. The blocked thread is unavailable to execute
- other actors until the invocation of `receive` returns.
-
-- The added `link` and `unlink` methods allow an actor to link and unlink
- itself to and from another actor, respectively. Linking can be used
- for monitoring and reacting to the termination of another actor. In
- particular, linking affects the behavior of invoking `exit` as
- explained in the API documentation of the `Actor` trait.
-
-- The `trapExit` member allows reacting to the termination of linked
- actors independently of the exit reason (that is, it does not matter
- whether the exit reason is `'normal` or not). If an actor's `trapExit`
- member is set to `true`, this actor will never terminate because of
- linked actors. Instead, whenever one of its linked actors terminates
- it will receive a message of type `Exit`. The `Exit` case class has two
- members: `from` refers to the actor that terminated; `reason` refers to
- the exit reason.
-
-#### Termination and execution states
-
-When terminating the execution of an actor, the exit reason can be set
-explicitly by invoking the following variant of `exit`:
-
- def exit(reason: AnyRef): Nothing
-
-An actor that terminates with an exit reason different from the symbol
-`'normal` propagates its exit reason to all actors linked to it. If an
-actor terminates because of an uncaught exception, its exit reason is
-an instance of the `UncaughtException` case class.
-
-The `Actor` trait adds two new execution states. An actor waiting to
-receive a message using `receive` is in state
-`Actor.State.Blocked`. An actor waiting to receive a message using
-`receiveWithin` is in state `Actor.State.TimedBlocked`.
-
-## Control structures
-
-The `Reactor` trait defines control structures that simplify programming
-with the non-returning `react` operation. Normally, an invocation of
-`react` does not return. If the actor should execute code subsequently,
-then one can either pass the actor's continuation code explicitly to
-`react`, or one can use one of the following control structures which
-hide these continuations.
-
-The most basic control structure is `andThen`. It allows registering a
-closure that is executed once the actor has finished executing
-everything else.
-
- actor {
- {
- react {
- case "hello" => // processing "hello"
- }: Unit
- } andThen {
- println("hi there")
- }
- }
-
-For example, the above actor prints a greeting after it has processed
-the `"hello"` message. Even though the invocation of `react` does not
-return, we can use `andThen` to register the code which prints the
-greeting as the actor's continuation.
-
-Note that there is a *type ascription* that follows the `react`
-invocation (`: Unit`). Basically, it lets you treat the result of
-`react` as having type `Unit`, which is legal, since the result of an
-expression can always be dropped. This is necessary to do here, since
-`andThen` cannot be a member of type `Nothing` which is the result
-type of `react`. Treating the result type of `react` as `Unit` allows
-the application of an implicit conversion which makes the `andThen`
-member available.
-
-The API provides a few more control structures:
-
-- `loop { ... }`. Loops indefinitely, executing the code in braces in
- each iteration. Invoking `react` inside the loop body causes the
- actor to react to a message as usual. Subsequently, execution
- continues with the next iteration of the same loop.
-
-- `loopWhile (c) { ... }`. Executes the code in braces while the
- condition `c` returns `true`. Invoking `react` in the loop body has
- the same effect as in the case of `loop`.
-
-- `continue`. Continues with the execution of the current continuation
- closure. Invoking `continue` inside the body of a `loop` or
- `loopWhile` will cause the actor to finish the current iteration and
- continue with the next iteration. If the current continuation has
- been registered using `andThen`, execution continues with the
- closure passed as the second argument to `andThen`.
-
-The control structures can be used anywhere in the body of a `Reactor`'s
-`act` method and in the bodies of methods (transitively) called by
-`act`. For actors created using the `actor { ... }` shorthand the control
-structures can be imported from the `Actor` object.
-
-#### Futures
-
-The `ReplyReactor` and `Actor` traits support result-bearing message
-send operations (the `!!` methods) that immediately return a
-*future*. A future, that is, an instance of the `Future` trait, is a
-handle that can be used to retrieve the response to such a message
-send-with-future.
-
-The sender of a message send-with-future can wait for the future's
-response by *applying* the future. For example, sending a message using
-`val fut = a !! msg` allows the sender to wait for the result of the
-future as follows: `val res = fut()`.
-
-In addition, a `Future` can be queried to find out whether its result
-is available without blocking using the `isSet` method.
-
-A message send-with-future is not the only way to obtain a
-future. Futures can also be created from computations directly.
-In the following example, the computation body is started to
-run concurrently, returning a future for its result:
-
- val fut = Future { body }
- // ...
- fut() // wait for future
-
-What makes futures special in the context of actors is the possibility
-to retrieve their result using the standard actor-based receive
-operations, such as `receive` etc. Moreover, it is possible to use the
-event-based operations `react` and `reactWithin`. This enables an actor to
-wait for the result of a future without blocking its underlying
-thread.
-
-The actor-based receive operations are made available through the
-future's `inputChannel`. For a future of type `Future[T]`, its type is
-`InputChannel[T]`. Example:
-
- val fut = a !! msg
- // ...
- fut.inputChannel.react {
- case Response => // ...
- }
-
-## Channels
-
-Channels can be used to simplify the handling of messages that have
-different types but that are sent to the same actor. The hierarchy of
-channels is divided into `OutputChannel`s and `InputChannel`s.
-
-`OutputChannel`s can be sent messages. An `OutputChannel` `out`
-supports the following operations.
-
-- `out ! msg`. Asynchronously sends `msg` to `out`. A reference to the
- sending actor is transferred as in the case where `msg` is sent
- directly to an actor.
-
-- `out forward msg`. Asynchronously forwards `msg` to `out`. The
- sending actor is determined as in the case where `msg` is forwarded
- directly to an actor.
-
-- `out.receiver`. Returns the unique actor that is receiving messages
- sent to the `out` channel.
-
-- `out.send(msg, from)`. Asynchronously sends `msg` to `out` supplying
- `from` as the sender of the message.
-
-Note that the `OutputChannel` trait has a type parameter that specifies
-the type of messages that can be sent to the channel (using `!`,
-`forward`, and `send`). The type parameter is contravariant:
-
- trait OutputChannel[-Msg]
-
-Actors can receive messages from `InputChannel`s. Like `OutputChannel`,
-the `InputChannel` trait has a type parameter that specifies the type of
-messages that can be received from the channel. The type parameter is
-covariant:
-
- trait InputChannel[+Msg]
-
-An `InputChannel[Msg]` `in` supports the following operations.
-
-- `in.receive { case Pat1 => ... ; case Patn => ... }` (and similarly,
- `in.receiveWithin`). Receives a message from `in`. Invoking
- `receive` on an input channel has the same semantics as the standard
- `receive` operation for actors. The only difference is that the
- partial function passed as an argument has type
- `PartialFunction[Msg, R]` where `R` is the return type of `receive`.
-
-- `in.react { case Pat1 => ... ; case Patn => ... }` (and similarly,
- `in.reactWithin`). Receives a message from `in` using the
- event-based `react` operation. Like `react` for actors, the return
- type is `Nothing`, indicating that invocations of this method never
- return. Like the `receive` operation above, the partial function
- passed as an argument has a more specific type:
-
- PartialFunction[Msg, Unit]
-
-### Creating and sharing channels
-
-Channels are created using the concrete `Channel` class. It extends both
-`InputChannel` and `OutputChannel`. A channel can be shared either by
-making the channel visible in the scopes of multiple actors, or by
-sending it in a message.
-
-The following example demonstrates scope-based sharing.
-
- actor {
- var out: OutputChannel[String] = null
- val child = actor {
- react {
- case "go" => out ! "hello"
- }
- }
- val channel = new Channel[String]
- out = channel
- child ! "go"
- channel.receive {
- case msg => println(msg.length)
- }
- }
-
-Running this example prints the string `"5"` to the console. Note that
-the `child` actor has only access to `out` which is an
-`OutputChannel[String]`. The `channel` reference, which can also be used
-to receive messages, is hidden. However, care must be taken to ensure
-the output channel is initialized to a concrete channel before the
-`child` sends messages to it. This is done using the `"go"` message. When
-receiving from `channel` using `channel.receive` we can make use of the
-fact that `msg` is of type `String`; therefore, it provides a `length`
-member.
-
-An alternative way to share channels is by sending them in
-messages. The following example demonstrates this.
-
- case class ReplyTo(out: OutputChannel[String])
-
- val child = actor {
- react {
- case ReplyTo(out) => out ! "hello"
- }
- }
-
- actor {
- val channel = new Channel[String]
- child ! ReplyTo(channel)
- channel.receive {
- case msg => println(msg.length)
- }
- }
-
-The `ReplyTo` case class is a message type that we use to distribute a
-reference to an `OutputChannel[String]`. When the `child` actor receives a
-`ReplyTo` message it sends a string to its output channel. The second
-actor receives a message on that channel as before.
-
-## Schedulers
-
-A `Reactor` (or an instance of a subtype) is executed using a
-*scheduler*. The `Reactor` trait introduces the `scheduler` member which
-returns the scheduler used to execute its instances:
-
- def scheduler: IScheduler
-
-The run-time system executes actors by submitting tasks to the
-scheduler using one of the `execute` methods defined in the `IScheduler`
-trait. Most of the trait's other methods are only relevant when
-implementing a new scheduler from scratch, which is rarely necessary.
-
-The default schedulers used to execute instances of `Reactor` and `Actor`
-detect the situation when all actors have finished their
-execution. When this happens, the scheduler shuts itself down
-(terminating any threads used by the scheduler). However, some
-schedulers, such as the `SingleThreadedScheduler` (in package `scheduler`)
-have to be shut down explicitly by invoking their `shutdown` method.
-
-The easiest way to create a custom scheduler is by extending
-`SchedulerAdapter`, implementing the following abstract member:
-
- def execute(fun: => Unit): Unit
-
-Typically, a concrete implementation would use a thread pool to
-execute its by-name argument `fun`.
-
-## Remote Actors
-
-This section describes the remote actors API. Its main interface is
-the [`RemoteActor`](https://www.scala-lang.org/api/2.9.1/scala/actors/remote/RemoteActor$.html) object in package `scala.actors.remote`. This object
-provides methods to create and connect to remote actor instances. In
-the code snippets shown below we assume that all members of
-`RemoteActor` have been imported; the full list of imports that we use
-is as follows:
-
- import scala.actors._
- import scala.actors.Actor._
- import scala.actors.remote._
- import scala.actors.remote.RemoteActor._
-
-### Starting remote actors
-
-A remote actor is uniquely identified by a [`Symbol`](https://www.scala-lang.org/api/current/scala/Symbol.html). This symbol is
-unique to the JVM instance on which the remote actor is executed. A
-remote actor identified with name `'myActor` can be created as follows.
-
- class MyActor extends Actor {
- def act() {
- alive(9000)
- register('myActor, self)
- // ...
- }
- }
-
-Note that a name can only be registered with a single (alive) actor at
-a time. For example, to register an actor *A* as `'myActor`, and then
-register another actor *B* as `'myActor`, one would first have to wait
-until *A* terminated. This requirement applies across all ports, so
-simply registering *B* on a different port as *A* is not sufficient.
-
-### Connecting to remote actors
-
-Connecting to a remote actor is just as simple. To obtain a remote
-reference to a remote actor running on machine `myMachine`, on port
-8000, with name `'anActor`, use `select` in the following manner:
-
- val myRemoteActor = select(Node("myMachine", 8000), 'anActor)
-
-The actor returned from `select` has type `AbstractActor` which provides
-essentially the same interface as a regular actor, and thus supports
-the usual message send operations:
-
- myRemoteActor ! "Hello!"
- receive {
- case response => println("Response: " + response)
- }
- myRemoteActor !? "What is the meaning of life?" match {
- case 42 => println("Success")
- case oops => println("Failed: " + oops)
- }
- val future = myRemoteActor !! "What is the last digit of PI?"
-
-Note that `select` is lazy; it does not actually initiate any network
-connections. It simply creates a new `AbstractActor` instance which is
-ready to initiate a new network connection when needed (for instance,
-when `!` is invoked).
diff --git a/_overviews/core/architecture-of-scala-213-collections.md b/_overviews/core/architecture-of-scala-213-collections.md
index 4574abab9f..1d8da0859d 100644
--- a/_overviews/core/architecture-of-scala-213-collections.md
+++ b/_overviews/core/architecture-of-scala-213-collections.md
@@ -9,13 +9,13 @@ permalink: /overviews/core/:title.html
This document describes the architecture of the Scala collections
framework in detail. Compared to
-[the Collections Introduction]({{ site.baseurl }}/overviews/collections/introduction.html) you
+[the Collections Introduction]({{ site.baseurl }}/overviews/collections-2.13/introduction.html) you
will find out more about the internal workings of the framework. You
will also learn how this architecture helps you define your own
collections in a few lines of code, while reusing the overwhelming
part of collection functionality from the framework.
-[The Collections API]({{ site.baseurl }}/overviews/collections/introduction.html)
+[The Collections API]({{ site.baseurl }}/overviews/collections-2.13/trait-iterable.html)
contains a large number of collection
operations, which exist uniformly on many different collection
implementations. Implementing every collection operation anew for
@@ -70,6 +70,8 @@ because we want them to return collection types that are unknown yet.
For instance, consider the signature of the `map` operation on `List[A]`
and `Vector[A]`:
+{% tabs factoring_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=factoring_1 %}
~~~ scala
trait List[A] {
def map[B](f: A => B): List[B]
@@ -79,6 +81,17 @@ trait Vector[A] {
def map[B](f: A => B): Vector[B]
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=factoring_1 %}
+~~~ scala
+trait List[A]:
+ def map[B](f: A => B): List[B]
+
+trait Vector[A]:
+ def map[B](f: A => B): Vector[B]
+~~~
+{% endtab %}
+{% endtabs %}
To generalize the type signature of `map` we have to abstract over
the resulting *collection type constructor*.
@@ -86,6 +99,8 @@ the resulting *collection type constructor*.
A slightly different example is `filter`. Consider its type signature on
`List[A]` and `Map[K, V]`:
+{% tabs factoring_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=factoring_2 %}
~~~ scala
trait List[A] {
def filter(p: A => Boolean): List[A]
@@ -95,6 +110,17 @@ trait Map[K, V] {
def filter(p: ((K, V)) => Boolean): Map[K, V]
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=factoring_2 %}
+~~~ scala
+trait List[A]:
+ def filter(p: A => Boolean): List[A]
+
+trait Map[K, V]:
+ def filter(p: ((K, V)) => Boolean): Map[K, V]
+~~~
+{% endtab %}
+{% endtabs %}
To generalize the type signature of `filter` we have to abstract
over the resulting *collection type*.
@@ -112,9 +138,13 @@ on the `Iterable[A]` collection type.
Here is the header of trait `IterableOps`:
+{% tabs abstracting_1 %}
+{% tab 'Scala 2 and 3' for=abstracting_1 %}
~~~ scala
trait IterableOps[+A, +CC[_], +C] { … }
~~~
+{% endtab %}
+{% endtabs %}
The type parameter `A` stands for the element type of the iterable,
the type parameter `CC` stands for the collection type constructor
@@ -123,21 +153,36 @@ and the type parameter `C` stands for the collection type.
This allows us to define the signature of `filter` and `map` like
so:
+{% tabs abstracting_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=abstracting_2 %}
~~~ scala
trait IterableOps[+A, +CC[_], +C] {
def filter(p: A => Boolean): C = …
def map[B](f: A => B): CC[B] = …
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=abstracting_2 %}
+~~~ scala
+trait IterableOps[+A, +CC[_], +C]:
+ def filter(p: A => Boolean): C = …
+ def map[B](f: A => B): CC[B] = …
+~~~
+{% endtab %}
+{% endtabs %}
Leaf collection types appropriately instantiate the type
parameters. For instance, in the case of `List[A]` we want `CC` to
be `List` and `C` to be `List[A]`:
+{% tabs abstracting_3 %}
+{% tab 'Scala 2 and 3' for=abstracting_3 %}
~~~ scala
trait List[+A] extends Iterable[A]
with IterableOps[A, List, List[A]]
~~~
+{% endtab %}
+{% endtabs %}
## Four branches of templates traits ##
@@ -149,19 +194,33 @@ parameter whereas `Map[K, V]` takes two type parameters.
To support collection types constructors with two types parameters
we have another template trait named `MapOps`:
+{% tabs fourBranches_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=fourBranches_1 %}
~~~ scala
trait MapOps[K, +V, +CC[_, _], +C] extends IterableOps[(K, V), Iterable, C] {
def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = …
}
-~~~
+~~~
+{% endtab %}
+{% tab 'Scala 3' for=fourBranches_1 %}
+~~~ scala
+trait MapOps[K, +V, +CC[_, _], +C] extends IterableOps[(K, V), Iterable, C]:
+ def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = …
+~~~
+{% endtab %}
+{% endtabs %}
And then `Map[K, V]` can extend this trait and appropriately instantiate its
type parameters:
+{% tabs fourBranches_2 %}
+{% tab 'Scala 2 and 3' for=fourBranches_2 %}
~~~ scala
trait Map[K, V] extends Iterable[(K, V)]
with MapOps[K, V, Map, Map[K, V]]
~~~
+{% endtab %}
+{% endtabs %}
Note that the `MapOps` trait inherits from `IterableOps` so that operations
defined in `IterableOps` are also available in `MapOps`. Also note that
@@ -169,6 +228,8 @@ the collection type constructor passed to the `IterableOps` trait is
`Iterable`. This means that `Map[K, V]` inherits two overloads of the `map`
operation:
+{% tabs fourBranches_3 %}
+{% tab 'Scala 2 and 3' for=fourBranches_3 %}
~~~ scala
// from MapOps
def map[K2, V2](f: ((K, V)) => (K2, V2)): Map[K2, V2]
@@ -176,6 +237,8 @@ def map[K2, V2](f: ((K, V)) => (K2, V2)): Map[K2, V2]
// from IterableOps
def map[B](f: ((K, V)) => B): Iterable[B]
~~~
+{% endtab %}
+{% endtabs %}
At use-site, when you call the `map` operation, the compiler selects one of
the two overloads. If the function passed as argument to `map` returns a pair,
@@ -196,9 +259,18 @@ operations defined in `IterableOps` don’t match the type signature of a
more concrete collection type: `SortedSet[A]`. In that case the type
signature of the `map` operation is the following:
+{% tabs fourBranches_4 class=tabs-scala-version %}
+{% tab 'Scala 2' for=fourBranches_4 %}
~~~ scala
def map[B](f: A => B)(implicit ord: Ordering[B]): SortedSet[B]
~~~
+{% endtab %}
+{% tab 'Scala 3' for=fourBranches_4 %}
+~~~ scala
+def map[B](f: A => B)(using ord: Ordering[B]): SortedSet[B]
+~~~
+{% endtab %}
+{% endtabs %}
The difference with the signature we have in `IterableOps` is that here
we need an implicit `Ordering` instance for the type of elements.
@@ -206,24 +278,36 @@ we need an implicit `Ordering` instance for the type of elements.
Like for `Map`, `SortedSet` needs a specialized template trait with
overloads for transformation operations:
+{% tabs fourBranches_5 class=tabs-scala-version %}
+{% tab 'Scala 2' for=fourBranches_5 %}
~~~ scala
trait SortedSetOps[A, +CC[_], +C] extends IterableOps[A, Set, C] {
-
def map[B](f: A => B)(implicit ord: Ordering[B]): CC[B] = …
-
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=fourBranches_5 %}
+~~~ scala
+trait SortedSetOps[A, +CC[_], +C] extends IterableOps[A, Set, C]:
+ def map[B](f: A => B)(using ord: Ordering[B]): CC[B] = …
+~~~
+{% endtab %}
+{% endtabs %}
And then collection types that inherit the `SortedSetOps` trait appropriately
instantiate its type parameters:
+{% tabs fourBranches_6 %}
+{% tab 'Scala 2 and 3' for=fourBranches_6 %}
~~~ scala
trait SortedSet[A] extends SortedSetOps[A, SortedSet, SortedSet[A]]
~~~
+{% endtab %}
+{% endtabs %}
Last, there is a fourth kind of collection that requires a specialized template
trait: `SortedMap[K, V]`. This type of collection has two type parameters and
-needs an implicit ordering instance on the type of keys. Therefore we have a
+needs an implicit ordering instance on the type of keys. Therefore, we have a
`SortedMapOps` template trait that provides the appropriate overloads.
In total, we’ve seen that we have four branches of template traits:
@@ -260,11 +344,21 @@ non-strict `View`. For the record, a `View` “describes” an operation applied
to a collection but does not evaluate its result until the `View` is
effectively traversed. Here is the (simplified) definition of `View`:
+{% tabs nonStrict_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=nonStrict_1 %}
~~~ scala
trait View[+A] extends Iterable[A] with IterableOps[A, View, View[A]] {
def iterator: Iterator[A]
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=nonStrict_1 %}
+~~~ scala
+trait View[+A] extends Iterable[A], IterableOps[A, View, View[A]]:
+ def iterator: Iterator[A]
+~~~
+{% endtab %}
+{% endtabs %}
A `View` is an `Iterable` that has only one abstract method returning
an `Iterator` for traversing its elements. The `View` elements are
@@ -276,6 +370,8 @@ Now that we are more familiar with the hierarchy of the template traits, we can
a look at the actual implementation of some operations. Consider for instance the
implementations of `filter` and `map`:
+{% tabs operations_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=operations_1 %}
~~~ scala
trait IterableOps[+A, +CC[_], +C] {
@@ -289,6 +385,22 @@ trait IterableOps[+A, +CC[_], +C] {
protected def from[E](it: IterableOnce[E]): CC[E]
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=operations_1 %}
+~~~ scala
+trait IterableOps[+A, +CC[_], +C]:
+
+ def filter(pred: A => Boolean): C =
+ fromSpecific(View.Filter(this, pred))
+
+ def map[B](f: A => B): CC[B] =
+ from(View.Map(this, f))
+
+ protected def fromSpecific(coll: IterableOnce[A]): C
+ protected def from[E](it: IterableOnce[E]): CC[E]
+~~~
+{% endtab %}
+{% endtabs %}
Let’s detail the implementation of `filter`, step by step:
@@ -299,13 +411,15 @@ Let’s detail the implementation of `filter`, step by step:
concrete collections: they can decide to evaluate in a strict or non-strict way
the elements resulting from the operation.
-The implementation of `map` is similar, excepted that instead of using
+The implementation of `map` is similar, except that instead of using
`fromSpecific` it uses `from` which takes as parameter an
iterable whose element type `E` is arbitrary.
Actually, the `from` operation is not defined directly in `IterableOps` but is accessed via
an (abstract) `iterableFactory` member:
+{% tabs operations_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=operations_2 %}
~~~ scala
trait IterableOps[+A, +CC[_], +C] {
@@ -313,24 +427,47 @@ trait IterableOps[+A, +CC[_], +C] {
def map[B](f: A => B): CC[B] =
iterableFactory.from(new View.Map(this, f))
-
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=operations_2 %}
+~~~ scala
+trait IterableOps[+A, +CC[_], +C]:
+
+ def iterableFactory: IterableFactory[CC]
+
+ def map[B](f: A => B): CC[B] =
+ iterableFactory.from(View.Map(this, f))
+~~~
+{% endtab %}
+{% endtabs %}
This `iterableFactory` member is implemented by concrete collections and typically
refer to their companion object, which provides factory methods to create concrete
collection instances. Here is an excerpt of the definition of `IterableFactory`:
+{% tabs operations_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=operations_3 %}
~~~ scala
trait IterableFactory[+CC[_]] {
def from[A](source: IterableOnce[A]): CC[A]
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=operations_3 %}
+~~~ scala
+trait IterableFactory[+CC[_]]:
+ def from[A](source: IterableOnce[A]): CC[A]
+~~~
+{% endtab %}
+{% endtabs %}
Last but not least, as explained in the above sections, since we have four branches
of template traits, we have four corresponding branches of factories. For instance,
-here are the relevant parts of code of the `map` operation implementation in `Map`:
+here are the relevant parts of code of the `map` operation implementation in `MapOps`:
+{% tabs operations_4 class=tabs-scala-version %}
+{% tab 'Scala 2' for=operations_4 %}
~~~ scala
trait MapOps[K, +V, +CC[_, _], +C]
extends IterableOps[(K, V), Iterable, C] {
@@ -347,11 +484,28 @@ trait MapFactory[+CC[_, _]] {
def from[K, V](it: IterableOnce[(K, V)]): CC[K, V]
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=operations_4 %}
+~~~ scala
+trait MapOps[K, +V, +CC[_, _], +C]
+ extends IterableOps[(K, V), Iterable, C]:
+
+ def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] =
+ mapFactory.from(View.Map(this, f))
+
+ // Similar to iterableFactory, but for Map collection types
+ def mapFactory: MapFactory[CC]
+
+trait MapFactory[+CC[_, _]]:
+ def from[K, V](it: IterableOnce[(K, V)]): CC[K, V]
+~~~
+{% endtab %}
+{% endtabs %}
## When a strict evaluation is preferable (or unavoidable) ##
In the previous sections we explained that the “strictness” of concrete collections
-should be preserved by default operation implementations. However in some cases this
+should be preserved by default operation implementations. However, in some cases this
leads to less efficient implementations. For instance, `partition` has to perform
two traversals of the underlying collection. In some other case (e.g. `groupBy`) it
is simply not possible to implement the operation without evaluating the collection
@@ -361,6 +515,8 @@ For those cases, we also provide ways to implement operations in a strict mode.
The pattern is different: instead of being based on a `View`, it is based on a
`Builder`. Here is an outline of the `Builder` trait:
+{% tabs builders_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=builders_1 %}
~~~ scala
package scala.collection.mutable
@@ -369,6 +525,17 @@ trait Builder[-A, +C] {
def result(): C
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=builders_1 %}
+~~~ scala
+package scala.collection.mutable
+
+trait Builder[-A, +C]:
+ def addOne(elem: A): this.type
+ def result(): C
+~~~
+{% endtab %}
+{% endtabs %}
Builders are generic in both the element type `A` and the type of collection they
return, `C`.
@@ -381,6 +548,8 @@ to get a builder resulting in a collection of the same type but with a different
type of elements. The following code shows the relevant parts of `IterableOps` and
`IterableFactory` to build collections in both strict and non-strict modes:
+{% tabs builders_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=builders_2 %}
~~~ scala
trait IterableOps[+A, +CC[_], +C] {
def iterableFactory: IterableFactory[CC]
@@ -393,8 +562,22 @@ trait IterableFactory[+CC[_]] {
def newBuilder[A]: Builder[A, CC[A]]
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=builders_2 %}
+~~~ scala
+trait IterableOps[+A, +CC[_], +C]:
+ def iterableFactory: IterableFactory[CC]
+ protected def fromSpecific(coll: IterableOnce[A]): C
+ protected def newSpecificBuilder: Builder[A, C]
+
+trait IterableFactory[+CC[_]]:
+ def from[A](source: IterableOnce[A]): CC[A]
+ def newBuilder[A]: Builder[A, CC[A]]
+~~~
+{% endtab %}
+{% endtabs %}
-Note that, in general, an operation that doesn’t *have to* be strict should
+Note that, in general, an operation that doesn't *have to* be strict should
be implemented in a non-strict mode, otherwise it would lead to surprising
behaviour when used on a non-strict concrete collection (you can read more
about that statement in
diff --git a/_overviews/core/architecture-of-scala-collections.md b/_overviews/core/architecture-of-scala-collections.md
index 76bcde648f..74f2bd1b98 100644
--- a/_overviews/core/architecture-of-scala-collections.md
+++ b/_overviews/core/architecture-of-scala-collections.md
@@ -217,7 +217,7 @@ maps the key/value pair to an integer, namely its value component. In
that case, we cannot form a `Map` from the results, but we can still
form an `Iterable`, a supertrait of `Map`.
-You might ask, why not restrict `map` so that it can always return the
+You might ask why, not restrict `map` so that it can always return the
same kind of collection? For instance, on bit sets `map` could accept
only `Int`-to-`Int` functions and on `Map`s it could only accept
pair-to-pair functions. Not only are such restrictions undesirable
@@ -270,7 +270,7 @@ construct another `BitSet` provided the element type of the collection to build
is `Int`. If this is not the case, the compiler will check the superclasses, and
fall back to the implicit builder factory defined in
`mutable.Set`'s companion object. The type of this more general builder
-factory, where `A` is a generic type parameter, is:
+factory, where `A` is a type parameter, is:
CanBuildFrom[Set[_], A, Set[A]]
@@ -646,7 +646,7 @@ function, which is also the element type of the new collection. The
`That` appears as the result type of `map`, so it represents the type of
the new collection that gets created.
-How is the `That` type determined? In fact it is linked to the other
+How is the `That` type determined? In fact, it is linked to the other
types by an implicit parameter `cbf`, of type `CanBuildFrom[Repr, B, That]`.
These `CanBuildFrom` implicits are defined by the individual
collection classes. Recall that an implicit value of type
@@ -747,7 +747,7 @@ ignoring its argument.
That is it. The final [`RNA` class](#final-version-of-rna-strands-class)
implements all collection methods at
-their expected types. Its implementation requires a little bit of
+their expected types. Its implementation requires a little of
protocol. In essence, you need to know where to put the `newBuilder`
factories and the `canBuildFrom` implicits. On the plus side, with
relatively little code you get a large number of methods automatically
@@ -789,7 +789,7 @@ storing the strings "abc", "abd", "al", "all" and "xy" would look
like this:
A sample patricia trie:
-
+
To find the node corresponding to the string "abc" in this trie,
simply follow the subtree labeled "a", proceed from there to the
@@ -979,14 +979,14 @@ provided by the `empty` method, which is the last method defined in
}
}
-We'll now turn to the companion object `PrefixMap`. In fact it is not
+We'll now turn to the companion object `PrefixMap`. In fact, it is not
strictly necessary to define this companion object, as class `PrefixMap`
can stand well on its own. The main purpose of object `PrefixMap` is to
define some convenience factory methods. It also defines a
`CanBuildFrom` implicit to make typing work out better.
The two convenience methods are `empty` and `apply`. The same methods are
-present for all other collections in Scala's collection framework so
+present for all other collections in Scala's collection framework, so
it makes sense to define them here, too. With the two methods, you can
write `PrefixMap` literals like you do for any other collection:
diff --git a/_overviews/core/binary-compatibility-of-scala-releases.md b/_overviews/core/binary-compatibility-of-scala-releases.md
index cadac052e8..f72d3979fd 100644
--- a/_overviews/core/binary-compatibility-of-scala-releases.md
+++ b/_overviews/core/binary-compatibility-of-scala-releases.md
@@ -7,31 +7,80 @@ partof: binary-compatibility
permalink: /overviews/core/:title.html
---
-When two versions of Scala are binary compatible, it is safe to compile your project on one Scala version and link against another Scala version at run time. Safe run-time linkage (only!) means that the JVM does not throw a (subclass of) [`LinkageError`](https://docs.oracle.com/javase/7/docs/api/java/lang/LinkageError.html) when executing your program in the mixed scenario, assuming that none arise when compiling and running on the same version of Scala. Concretely, this means you may have external dependencies on your run-time classpath that use a different version of Scala than the one you're compiling with, as long as they're binary compatible. In other words, separate compilation on different binary compatible versions does not introduce problems compared to compiling and running everything on the same version of Scala.
+When two versions of Scala are binary compatible, it is safe to compile your project on one Scala version and link against another Scala version at run time. Safe run-time linkage (only!) means that the JVM does not throw a (subclass of) [`LinkageError`](https://docs.oracle.com/javase/8/docs/api/java/lang/LinkageError.html) when executing your program in the mixed scenario, assuming that none arise when compiling and running on the same version of Scala. Concretely, this means you may have external dependencies on your run-time classpath that use a different version of Scala than the one you're compiling with, as long as they're binary compatible. In other words, separate compilation on different binary compatible versions does not introduce problems compared to compiling and running everything on the same version of Scala.
-We check binary compatibility automatically with [MiMa](https://github.com/lightbend/migration-manager). We strive to maintain a similar invariant for the `behavior` (as opposed to just linkage) of the standard library, but this is not checked mechanically (Scala is not a proof assistant so this is out of reach for its type system).
+We check binary compatibility automatically with [MiMa](https://github.com/lightbend/mima). We strive to maintain a similar invariant for the `behavior` (as opposed to just linkage) of the standard library, but this is not checked mechanically (Scala is not a proof assistant so this is out of reach for its type system).
+
+Note that for Scala.js and Scala Native, binary compatibility issues result in errors at build time, as opposed to run-time exceptions.
+They happen during their respective "linking" phases: `{fast,full}LinkJS` for Scala.js and `nativeLink` for Scala Native.
#### Forward and Back
-We distinguish forward and backward compatibility (think of these as properties of a sequence of versions, not of an individual version). Maintaining backwards compatibility means code compiled on an older version will link with code compiled with newer ones. Forward compatibility allows you to compile on new versions and run on older ones.
+We distinguish forward and backward compatibility (think of these as properties of a sequence of versions, not of an individual version). Maintaining backward compatibility means code compiled on an older version will link with code compiled with newer ones. Forward compatibility allows you to compile on new versions and run on older ones.
+
+Thus, backward compatibility precludes the removal of (non-private) methods, as older versions could call them, not knowing they would be removed, whereas forward compatibility disallows adding new (non-private) methods, because newer programs may come to depend on them, which would prevent them from running on older versions (private methods are exempted here as well, as their definition and call sites must be in the same source file).
+
+#### Guarantees and Versioning
+For Scala 2, the *minor* version is the *third* number in a version, e.g., 16 in v2.13.16.
+The major version is the second number, which is 13 in our example.
+
+Scala 2 up to 2.13.16 guarantees both backward and forward compatibility across *minor* releases within a single major release.
+This is about to change now that [SIP-51 has been accepted](https://docs.scala-lang.org/sips/drop-stdlib-forwards-bin-compat.html), future Scala 2.13 releases may be backward compatible only.
+
+For Scala 3, the minor version is the *second* number in a version, e.g., 2 in v3.2.1.
+The third number is the *patch* version.
+The major version is always 3.
+
+Scala 3 guarantees both backward and forward compatibility across *patch* releases within a single minor release (enforcing forward binary compatibility is helpful to maintain source compatibility).
+In particular, this applies within an entire [Long-Term-Support (LTS) series](https://www.scala-lang.org/blog/2022/08/17/long-term-compatibility-plans.html) such as Scala 3.3.x.
+
+Scala 3 also guarantees *backward* compatibility across *minor* releases in the entire 3.x series, but not forward compatibility.
+This means that libraries compiled with any Scala 3.x version can be used in projects compiled with any Scala 3.y version with y >= x.
-Thus, backwards compatibility precludes the removal of (non-private) methods, as older versions could call them, not knowing they would be removed, whereas forwards compatibility disallows adding new (non-private) methods, because newer programs may come to depend on them, which would prevent them from running on older versions (private methods are exempted here as well, as their definition and call sites must be in the same compilation unit).
+In addition, Scala 3.x provides backward binary compatibility with respect to Scala 2.13.y.
+Libraries compiled with Scala 2.13.y can be used in projects using Scala 3.x.
+This policy does not apply to experimental Scala 2 features, which notably includes *macros*.
-These are strict constraints, but they have worked well for us since Scala 2.10.x. They didn't stop us from fixing large numbers of issues in minor releases. The advantages are clear, so we will maintain this policy for future Scala major releases.
+In general, none of those guarantees apply to *experimental* features and APIs.
-#### Meta
-Note that so far we've only talked about the jars generated by scalac for the standard library and reflection.
-Our policies do not extend to the meta-issue: ensuring binary compatibility for bytecode generated from identical sources, by different version of scalac? (The same problem exists for compiling on different JDKs.) While we strive to achieve this, it's not something we can test in general. Notable examples where we know meta-binary compatibility is hard to achieve: specialisation and the optimizer.
+#### Checking
+For the Scala library artifacts (`scala-library`, `scala-reflect` and `scala3-library`), these guarantees are mechanically checked with [MiMa](https://github.com/lightbend/mima).
-In short, we recommend that library authors use [MiMa](https://github.com/lightbend/migration-manager) to verify compatibility of minor versions before releasing.
-Compiling identical sources with different versions of the scala compiler (or on different JVM versions!) could result in binary incompatible bytecode. This is rare, and we try to avoid it, but we can't guarantee it will never happen.
+The *policies* above extend to libraries compiled by particular Scala compiler versions.
+Every effort is made to preserve the binary compatibility of artifacts produced by the compiler.
+*However*, that cannot be mechanically checked.
+It is therefore possible, due to bugs or unforeseen consequences, that recompiling a library with a different compiler version affects its binary API.
+We cannot *guarantee* that it will never happen.
+
+We recommend that library authors use [MiMa](https://github.com/lightbend/mima) themselves to verify compatibility of minor versions before releasing.
+
+#### TASTy and Pickle Compatibility
+*Binary* compatibility is a concept relevant at link time of the target platform (JVM, Scala.js or Scala Native).
+TASTy and Pickle compatibility are similar but apply at *compile* time for the Scala compiler.
+TASTy applies to Scala 3, Pickle to Scala 2.
+
+If a library was compiled with an older version of the compiler, we say that the library is backward TASTy/Pickle compatible if it can be used within an application compiled with a newer compiler version.
+Likewise, forward TASTy/Pickle compatibility goes in the other direction.
+
+The same policies as for binary compatibility apply to TASTy/Pickle compatibility, although they are not mechanically checked.
+
+Library authors may automatically check TASTy/Pickle backward compatibility for their libraries using [TASTy-MiMa](https://github.com/scalacenter/tasty-mima).
+Disclaimer: TASTy-MiMa is a young project.
+At this point, you are likely going to run into bugs.
+Please report issues you find to its issue tracker.
#### Concretely
-We guarantee forwards and backwards compatibility of the `"org.scala-lang" % "scala-library" % "2.N.x"` and `"org.scala-lang" % "scala-reflect" % "2.N.x"` artifacts, except for
-- the `scala.reflect.internal` and `scala.reflect.io` packages, as scala-reflect is still experimental, and
+We guarantee backward compatibility of the `"org.scala-lang" % "scala-library" % "2.N.x"` and `"org.scala-lang" % "scala-reflect" % "2.N.x"` artifacts, except for
+- the `scala.reflect.internal` and `scala.reflect.io` packages, as scala-reflect is experimental, and
- the `scala.runtime` package, which contains classes used by generated code at runtime.
We also strongly discourage relying on the stability of `scala.concurrent.impl`, `scala.sys.process.*Impl`, and `scala.reflect.runtime`, though we will only break compatibility for severe bugs here.
-Note that we will only enforce *backwards* binary compatibility for modules (artifacts under the groupId `org.scala-lang.modules`). As they are opt-in, it's less of a burden to require having the latest version on the classpath. (Without forward compatibility, the latest version of the artifact must be on the run-time classpath to avoid linkage errors.)
+We guarantee backward compatibility of the `"org.scala-lang" % "scala3-library_3" % "3.x.y"` artifact.
+Forward compatibility is only guaranteed for `3.N.y` within a given `N`.
+
+We enforce *backward* (but not forward) binary compatibility for *modules* (artifacts under the groupId `org.scala-lang.modules`). As they are opt-in, it's less of a burden to require having the latest version on the classpath. (Without forward compatibility, the latest version of the artifact must be on the run-time classpath to avoid linkage errors.)
-Finally, from Scala 2.11 until Scala 2.13.0-M1, `scala-library-all` aggregates all modules that constitute a Scala release. Note that this means it does not provide forward binary compatibility, whereas the core `scala-library` artifact does. We consider the versions of the modules that `"scala-library-all" % "2.N.x"` depends on to be the canonical ones, that are part of the official Scala distribution. (The distribution itself is defined by the `scala-dist` maven artifact.)
+#### Build Tools
+Build tools like sbt and mill have assumptions about backward binary compatibility built in.
+They build a graph of a project's dependencies and select the most recent versions that are needed.
+To learn more, see the page on [library dependencies](https://www.scala-sbt.org/1.x/docs/Library-Dependencies.html) in the sbt documentation.
diff --git a/_overviews/core/collections-migration-213.md b/_overviews/core/collections-migration-213.md
index 2d3df08410..76cd202cd3 100644
--- a/_overviews/core/collections-migration-213.md
+++ b/_overviews/core/collections-migration-213.md
@@ -15,6 +15,8 @@ The most important changes in the Scala 2.13 collections library are:
- Transformation methods no longer have an implicit `CanBuildFrom` parameter. This makes the library easier to understand (in source code, Scaladoc, and IDE code completion). It also makes compiling user code more efficient.
- The type hierarchy is simplified. `Traversable` no longer exists, only `Iterable`.
- The `to[Collection]` method was replaced by the `to(Collection)` method.
+ - The `toC` methods are strict by convention and yield the default collection type where applicable. For example, `Iterator.continually(42).take(10).toSeq` produces a `List[Int]` and without the limit would not.
+ - `toIterable` is deprecated wherever defined. For `Iterator`, in particular, prefer `to(LazyList)`.
- Views have been vastly simplified and work reliably now. They no longer extend their corresponding collection type, for example, an `IndexedSeqView` no longer extends `IndexedSeq`.
- `collection.breakOut` no longer exists, use `.view` and `.to(Collection)` instead.
- Immutable hash sets and hash maps have a new implementation (`ChampHashSet` and `ChampHashMap`, based on the ["CHAMP" encoding](https://michael.steindorfer.name/publications/oopsla15.pdf)).
@@ -27,7 +29,7 @@ The most important changes in the Scala 2.13 collections library are:
## Tools for migrating and cross-building
-The [scala-collection-compat](https://github.com/scala/scala-collection-compat) is a library released for 2.11, 2.12 and 2.13 that provides some of the new APIs from Scala 2.13 for the older versions. This simplifies cross-building projects.
+The [scala-collection-compat](https://github.com/scala/scala-collection-compat) is a library released for 2.11, 2.12 and 2.13 that provides some new APIs from Scala 2.13 for the older versions. This simplifies cross-building projects.
The module also provides [migration rules](https://github.com/scala/scala-collection-compat#migration-tool) for [scalafix](https://scalacenter.github.io/scalafix/docs/users/installation.html) that can update a project's source code to work with the 2.13 collections library.
@@ -40,7 +42,7 @@ a method such as `orderFood(xs: _*)` the varargs parameter `xs` must be an immut
[SLS 6.6]: https://www.scala-lang.org/files/archive/spec/2.12/06-expressions.html#function-applications
-Therefore any method signature in Scala 2.13 which includes `scala.Seq`, varargs, or `scala.IndexedSeq` is going
+Therefore, any method signature in Scala 2.13 which includes `scala.Seq`, varargs, or `scala.IndexedSeq` is going
to have a breaking change in API semantics (as the immutable sequence types require more — immutability — than the
not-immutable types). For example, users of a method like `def orderFood(order: Seq[Order]): Seq[Food]` would
previously have been able to pass in an `ArrayBuffer` of `Order`, but cannot in 2.13.
@@ -66,7 +68,7 @@ We recommend using `import scala.collection`/`import scala.collection.immutable`
`collection.Seq`/`immutable.Seq`.
We recommend against using `import scala.collection.Seq`, which shadows the automatically imported `scala.Seq`,
-because even if it's a oneline change it causes name confusion. For code generation or macros the safest option
+because even if it's a one-line change it causes name confusion. For code generation or macros the safest option
is using the fully-qualified `_root_.scala.collection.Seq`.
As an example, the migration would look something like this:
@@ -79,7 +81,7 @@ object FoodToGo {
}
~~~
-However users of this code in Scala 2.13 would also have to migrate, as the result type is source-incompatible
+However, users of this code in Scala 2.13 would also have to migrate, as the result type is source-incompatible
with any `scala.Seq` (or just `Seq`) usage in their code:
~~~ scala
@@ -231,7 +233,7 @@ Other notable changes are:
You can make this conversion explicit by writing `f _` or `f(_)` instead of `f`.
scala> Map(1 -> "a").map(f _)
res10: scala.collection.immutable.Map[Int,String] = ChampHashMap(2 -> a)
- - `View`s have been completely redesigned and we expect their usage to have a more predictable evaluation model.
+ - `View`s have been completely redesigned, and we expect their usage to have a more predictable evaluation model.
You can read more about the new design [here](https://scala-lang.org/blog/2017/11/28/view-based-collections.html).
- `mutable.ArraySeq` (which wraps an `Array[AnyRef]` in 2.12, meaning that primitives were boxed in the array) can now wrap boxed and unboxed arrays. `mutable.ArraySeq` in 2.13 is in fact equivalent to `WrappedArray` in 2.12, there are specialized subclasses for primitive arrays. Note that a `mutable.ArraySeq` can be used either way for primitive arrays (TODO: document how). `WrappedArray` is deprecated.
- There is no "default" `Factory` (previously known as `[A, C] => CanBuildFrom[Nothing, A, C]`): use `Factory[A, Vector[A]]` explicitly instead.
@@ -267,7 +269,7 @@ The following table lists the changes that continue to work with a deprecation w
## Deprecated things in 2.12 that have been removed in 2.13
- `collection.JavaConversions`. Use `scala.jdk.CollectionConverters` instead. Previous advice was to use `collection.JavaConverters` which is now deprecated ;
-- `collection.mutable.MutableList` (was not deprecated in 2.12 but was considered to be an implementation detail for implementing other collections). Use an `ArrayDeque` instead, or a `List` and a `var` ;
+- `collection.mutable.MutableList` (was not deprecated in 2.12 but was considered to be an implementation detail for implementing other collections). Use an `ArrayDeque` or `mutable.ListBuffer` instead, or a `List` and a `var` ;
- `collection.immutable.Stack`. Use a `List` instead ;
- `StackProxy`, `MapProxy`, `SetProxy`, `SeqProxy`, etc. No replacement ;
- `SynchronizedMap`, `SynchronizedBuffer`, etc. Use `java.util.concurrent` instead ;
diff --git a/_overviews/core/custom-collection-operations.md b/_overviews/core/custom-collection-operations.md
index 87b2f863ae..f6d4f08d34 100644
--- a/_overviews/core/custom-collection-operations.md
+++ b/_overviews/core/custom-collection-operations.md
@@ -29,6 +29,8 @@ as parameter, or an `Iterable[A]` if you need more than one traversal.
For instance, say we want to implement a `sumBy` operation that sums the elements of a
collection after they have been transformed by a function:
+{% tabs sumBy_1 %}
+{% tab 'Scala 2 and 3' for=sumBy_1 %}
~~~ scala
case class User(name: String, age: Int)
@@ -36,10 +38,14 @@ val users = Seq(User("Alice", 22), User("Bob", 20))
println(users.sumBy(_.age)) // “42”
~~~
+{% endtab %}
+{% endtabs %}
+
+{% tabs sumBy_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=sumBy_2 %}
We can define the `sumBy` operation as an extension method, using an
[implicit class](/overviews/core/implicit-classes.html), so that it can be called like a method:
-
~~~ scala
import scala.collection.IterableOnce
@@ -54,15 +60,35 @@ implicit class SumByOperation[A](coll: IterableOnce[A]) {
}
}
~~~
-
Unfortunately, this extension method does not work with values of type `String` and not
even with `Array`. This is because these types are not part of the Scala collections
hierarchy. They can be converted to proper collection types, though, but the extension method
will not work directly on `String` and `Array` because that would require applying two implicit
conversions in a row.
+{% endtab %}
+{% tab 'Scala 3' for=sumBy_2 %}
+
+We can define the `sumBy` operation as an extension method so that it can be called like a method:
+~~~ scala
+import scala.collection.IterableOnce
+
+extension [A](coll: IterableOnce[A])
+ def sumBy[B: Numeric](f: A => B): B =
+ val it = coll.iterator
+ var result = f(it.next())
+ while it.hasNext do
+ result = summon[Numeric[B]].plus(result, f(it.next()))
+ result
+~~~
+{% endtab %}
+{% endtabs %}
+
### Consuming any type that is *like* a collection
+{% tabs sumBy_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=sumBy_3 %}
+
If we want the `sumBy` to work on any type that is *like* a collection, such as `String`
and `Array`, we have to add another indirection level:
@@ -81,11 +107,34 @@ The type `IsIterable[Repr]` has implicit instances for all types `Repr` that can
to `IterableOps[A, Iterable, C]` (for some element type `A` and some collection type `C`). There are
instances for actual collection types and also for `String` and `Array`.
+{% endtab %}
+{% tab 'Scala 3' for=sumBy_3 %}
+
+We expect the `sumBy` to work on any type that is *like* a collection, such as `String`
+and `Array`. Fortunately, the type `IsIterable[Repr]` has implicit instances for all types `Repr` that can be converted
+to `IterableOps[A, Iterable, C]` (for some element type `A` and some collection type `C`) and there are
+instances for actual collection types and also for `String` and `Array`.
+
+~~~ scala
+import scala.collection.generic.IsIterable
+
+extension [Repr](repr: Repr)(using iter: IsIterable[Repr])
+ def sumBy[B: Numeric](f: iter.A => B): B =
+ val coll = iter(repr)
+ ... // same as before
+~~~
+
+{% endtab %}
+{% endtabs %}
+
### Consuming a more specific collection than `Iterable`
In some cases we want (or need) the receiver of the operation to be more specific than `Iterable`.
For instance, some operations make sense only on `Seq` but not on `Set`.
+{% tabs sumBy_4 class=tabs-scala-version %}
+{% tab 'Scala 2' for=sumBy_4 %}
+
In such a case, again, the most straightforward solution would be to take as parameter a `Seq` instead
of an `Iterable` or an `IterableOnce`, but this would work only with *actual* `Seq` values. If you want
to support `String` and `Array` values you have to use `IsSeq` instead. `IsSeq` is similar to
@@ -95,6 +144,20 @@ Using `IsSeq` is also required to make your operation work on `SeqView` values,
does not extend `Seq`. Similarly, there is an `IsMap` type that makes operations work with
both `Map` and `MapView` values.
+{% endtab %}
+{% tab 'Scala 3' for=sumBy_4 %}
+
+In such a case, again, the most straightforward solution would be to take as parameter a `Seq` instead
+of an `Iterable` or an `IterableOnce`. Similarly to `IsIterable`, `IsSeq` provides a
+conversion to `SeqOps[A, Iterable, C]` (for some types `A` and `C`).
+
+`IsSeq` also make your operation works on `SeqView` values, because `SeqView`
+does not extend `Seq`. Similarly, there is an `IsMap` type that makes operations work with
+both `Map` and `MapView` values.
+
+{% endtab %}
+{% endtabs %}
+
## Producing any collection
This situation happens when a library provides an operation that produces a collection while leaving the
@@ -105,6 +168,8 @@ Such a type class is typically used to create arbitrary test data.
Our goal is to define a `collection` operation that generates arbitrary collections containing arbitrary
values. Here is an example of use of `collection`:
+{% tabs Gen_1 %}
+{% tab 'Scala 2 and 3' for=Gen_1 %}
~~~
scala> collection[List, Int].get
res0: List[Int] = List(606179450, -1479909815, 2107368132, 332900044, 1833159330, -406467525, 646515139, -575698977, -784473478, -1663770602)
@@ -115,19 +180,34 @@ res1: LazyList[Boolean] = LazyList(_, ?)
scala> collection[Set, Int].get
res2: Set[Int] = HashSet(-1775377531, -1376640531, -1009522404, 526943297, 1431886606, -1486861391)
~~~
+{% endtab %}
+{% endtabs %}
A very basic definition of `Gen[A]` could be the following:
-```tut
+{% tabs Gen_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=Gen_2 %}
+```scala mdoc
trait Gen[A] {
/** Get a generated value of type `A` */
def get: A
}
```
+{% endtab %}
+{% tab 'Scala 3' for=Gen_2 %}
+```scala
+trait Gen[A]:
+ /** Get a generated value of type `A` */
+ def get: A
+```
+{% endtab %}
+{% endtabs %}
And the following instances can be defined:
-```tut
+{% tabs Gen_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=Gen_3 %}
+```scala mdoc
import scala.util.Random
object Gen {
@@ -150,6 +230,29 @@ object Gen {
}
```
+{% endtab %}
+{% tab 'Scala 3' for=Gen_3 %}
+```scala
+import scala.util.Random
+
+object Gen:
+
+ /** Generator of `Int` values */
+ given Gen[Int] with
+ def get: Int = Random.nextInt()
+
+ /** Generator of `Boolean` values */
+ given Gen[Boolean] with
+ def get: Boolean = Random.nextBoolean()
+
+ /** Given a generator of `A` values, provides a generator of `List[A]` values */
+ given[A: Gen]: Gen[List[A]] with
+ def get: List[A] =
+ if Random.nextInt(100) < 10 then Nil
+ else summon[Gen[A]].get :: get
+```
+{% endtab %}
+{% endtabs %}
The last definition (`list`) generates a value of type `List[A]` given a generator
of values of type `A`. We could implement a generator of `Vector[A]` or `Set[A]` as
@@ -160,6 +263,8 @@ can decide which collection type they want to produce.
To achieve that we have to use `scala.collection.Factory`:
+{% tabs Gen_4 class=tabs-scala-version %}
+{% tab 'Scala 2' for=Gen_4 %}
~~~ scala
trait Factory[-A, +C] {
@@ -177,6 +282,27 @@ trait Factory[-A, +C] {
def newBuilder: Builder[A, C]
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=Gen_4 %}
+~~~ scala
+trait Factory[-A, +C]:
+
+ /** @return A collection of type `C` containing the same elements
+ * as the source collection `it`.
+ * @param it Source collection
+ */
+ def fromSpecific(it: IterableOnce[A]): C
+
+ /** Get a Builder for the collection. For non-strict collection
+ * types this will use an intermediate buffer.
+ * Building collections with `fromSpecific` is preferred
+ * because it can be lazy for lazy collections.
+ */
+ def newBuilder: Builder[A, C]
+end Factory
+~~~
+{% endtab %}
+{% endtabs %}
The `Factory[A, C]` trait provides two ways of building a collection `C` from
elements of type `A`:
@@ -193,6 +319,8 @@ In practice, it is recommended to [not eagerly evaluate the elements of the coll
Finally, here is how we can implement a generator of arbitrary collection types:
+{% tabs Gen_5 class=tabs-scala-version %}
+{% tab 'Scala 2' for=Gen_5 %}
~~~ scala
import scala.collection.Factory
@@ -211,6 +339,22 @@ implicit def collection[CC[_], A](implicit
}
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=Gen_5 %}
+~~~ scala
+import scala.collection.Factory
+
+given[CC[_], A: Gen](using Factory[A, CC[A]]): Gen[CC[A]] with
+ def get: CC[A] =
+ val lazyElements =
+ LazyList.unfold(()) { _ =>
+ if Random.nextInt(100) < 10 then None
+ else Some((summon[Gen[A]].get, ()))
+ }
+ summon[Factory[A, CC[A]]].fromSpecific(lazyElements)
+~~~
+{% endtab %}
+{% endtabs %}
The implementation uses a lazy source collection of a random size (`lazyElements`).
Then it calls the `fromSpecific` method of the `Factory` to build the collection
@@ -225,10 +369,14 @@ For instance, we want to implement an `intersperse` operation that can be applie
any sequence and returns a sequence with a new element inserted between each element of the
source sequence:
+{% tabs intersperse_1 %}
+{% tab 'Scala 2 and 3' for=intersperse_1 %}
~~~ scala
List(1, 2, 3).intersperse(0) == List(1, 0, 2, 0, 3)
"foo".intersperse(' ') == "f o o"
~~~
+{% endtab %}
+{% endtabs %}
When we call it on a `List`, we want to get back another `List`, and when we call it on
a `String` we want to get back another `String`, and so on.
@@ -236,12 +384,15 @@ a `String` we want to get back another `String`, and so on.
Building on what we’ve learned from the previous sections, we can start defining an extension method
using `IsSeq` and producing a collection by using an implicit `Factory`:
+{% tabs intersperse_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=intersperse_2 %}
~~~ scala
-import scala.collection.{ AbstractIterator, AbstractView, Factory, SeqOps }
+import scala.collection.{ AbstractIterator, AbstractView, Factory }
import scala.collection.generic.IsSeq
-class IntersperseOperation[A](seqOps: SeqOps[A, Iterable, _]) {
- def intersperse[B >: A, That](sep: B)(implicit factory: Factory[B, That]): That =
+class IntersperseOperation[Repr](coll: Repr, seq: IsSeq[Repr]) {
+ def intersperse[B >: seq.A, That](sep: B)(implicit factory: Factory[B, That]): That = {
+ val seqOps = seq(coll)
factory.fromSpecific(new AbstractView[B] {
def iterator = new AbstractIterator[B] {
val it = seqOps.iterator
@@ -254,18 +405,45 @@ class IntersperseOperation[A](seqOps: SeqOps[A, Iterable, _]) {
}
}
})
+ }
}
-implicit def IntersperseOperation[Repr](coll: Repr)(implicit seq: IsSeq[Repr]): IntersperseOperation[seq.A] =
- new IntersperseOperation(seq(coll))
+implicit def IntersperseOperation[Repr](coll: Repr)(implicit seq: IsSeq[Repr]): IntersperseOperation[Repr] =
+ new IntersperseOperation(coll, seq)
+~~~
+{% endtab %}
+{% tab 'Scala 3' for=intersperse_2 %}
+~~~ scala
+import scala.collection.{ AbstractIterator, AbstractView, Factory }
+import scala.collection.generic.IsSeq
+
+extension [Repr](coll: Repr)(using seq: IsSeq[Repr])
+ def intersperse[B >: seq.A, That](sep: B)(using factory: Factory[B, That]): That =
+ val seqOps = seq(coll)
+ factory.fromSpecific(new AbstractView[B]:
+ def iterator = new AbstractIterator[B]:
+ val it = seqOps.iterator
+ var intersperseNext = false
+ def hasNext = intersperseNext || it.hasNext
+ def next() =
+ val elem = if intersperseNext then sep else it.next()
+ intersperseNext = !intersperseNext && it.hasNext
+ elem
+ )
~~~
+{% endtab %}
+{% endtabs %}
However, if we try it we get the following behaviour:
+{% tabs intersperse_3 %}
+{% tab 'Scala 2 and 3' for=intersperse_3 %}
~~~
scala> List(1, 2, 3).intersperse(0)
res0: Array[Int] = Array(1, 0, 2, 0, 3)
~~~
+{% endtab %}
+{% endtabs %}
We get back an `Array` although the source collection was a `List`! Indeed, there is
nothing that constrains the result type of `intersperse` to depend on the receiver type.
@@ -274,6 +452,8 @@ To produce a collection whose type depends on a source collection, we have to us
`scala.collection.BuildFrom` (formerly known as `CanBuildFrom`) instead of `Factory`.
`BuildFrom` is defined as follows:
+{% tabs intersperse_4 class=tabs-scala-version %}
+{% tab 'Scala 2' for=intersperse_4 %}
~~~ scala
trait BuildFrom[-From, -A, +C] {
/** @return a collection of type `C` containing the same elements
@@ -287,11 +467,29 @@ trait BuildFrom[-From, -A, +C] {
def newBuilder(from: From): Builder[A, C]
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=intersperse_4 %}
+~~~ scala
+trait BuildFrom[-From, -A, +C]:
+ /** @return a collection of type `C` containing the same elements
+ * (of type `A`) as the source collection `it`.
+ */
+ def fromSpecific(from: From)(it: IterableOnce[A]): C
+
+ /** @return a Builder for the collection type `C`, containing
+ * elements of type `A`.
+ */
+ def newBuilder(from: From): Builder[A, C]
+~~~
+{% endtab %}
+{% endtabs %}
`BuildFrom` has similar operations to `Factory`, but they take an additional `from`
parameter. Before explaining how implicit instances of `BuildFrom` are resolved, let’s first have
a look at how you can use it. Here is the implementation of `intersperse` based on `BuildFrom`:
+{% tabs intersperse_5 class=tabs-scala-version %}
+{% tab 'Scala 2' for=intersperse_5 %}
~~~ scala
import scala.collection.{ AbstractView, BuildFrom }
import scala.collection.generic.IsSeq
@@ -308,13 +506,32 @@ class IntersperseOperation[Repr, S <: IsSeq[Repr]](coll: Repr, seq: S) {
implicit def IntersperseOperation[Repr](coll: Repr)(implicit seq: IsSeq[Repr]): IntersperseOperation[Repr, seq.type] =
new IntersperseOperation(coll, seq)
~~~
+{% endtab %}
+{% tab 'Scala 3' for=intersperse_5 %}
+~~~ scala
+import scala.collection.{ AbstractIterator, AbstractView, BuildFrom }
+import scala.collection.generic.IsSeq
+
+extension [Repr](coll: Repr)(using seq: IsSeq[Repr])
+ def intersperse[B >: seq.A, That](sep: B)(using bf: BuildFrom[Repr, B, That]): That =
+ val seqOps = seq(coll)
+ bf.fromSpecific(coll)(new AbstractView[B]:
+ // same as before
+ )
+~~~
+{% endtab %}
+{% endtabs %}
Note that we track the type of the receiver collection `Repr` in the `IntersperseOperation`
class. Now, consider what happens when we write the following expression:
+{% tabs intersperse_6 %}
+{% tab 'Scala 2 and 3' for=intersperse_6 %}
~~~ scala
List(1, 2, 3).intersperse(0)
~~~
+{% endtab %}
+{% endtabs %}
An implicit parameter of type `BuildFrom[Repr, B, That]` has to be resolved by the compiler.
The type `Repr` is constrained by the receiver type (here, `List[Int]`) and the type `B` is
@@ -329,5 +546,5 @@ be `List[Int]`.
as parameter,
- To also support `String`, `Array` and `View`, use `IsIterable`,
- To produce a collection given its type, use a `Factory`,
-- To produce a collection based on the type of a source collection and the type of elements of the collection
- to produce, use `BuildFrom`.
\ No newline at end of file
+- To produce a collection based on the type of source collection and the type of elements of the collection
+ to produce, use `BuildFrom`.
diff --git a/_overviews/core/custom-collections.md b/_overviews/core/custom-collections.md
index 6720bcf5de..6164ec3af2 100644
--- a/_overviews/core/custom-collections.md
+++ b/_overviews/core/custom-collections.md
@@ -27,15 +27,21 @@ to choose `Seq` because our collection can contain duplicates and
iteration order is determined by insertion order. However, some
[properties of `Seq`](/overviews/collections/seqs.html) are not satisfied:
+{% tabs notCapped_1 %}
+{% tab 'Scala 2 and 3' for=notCapped_1 %}
~~~ scala
(xs ++ ys).size == xs.size + ys.size
~~~
+{% endtab %}
+{% endtabs %}
Consequently, the only sensible choice as a base collection type
is `collection.immutable.Iterable`.
### First version of `Capped` class ###
+{% tabs capped1_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=capped1_1 %}
~~~ scala
import scala.collection._
@@ -72,11 +78,54 @@ class Capped1[A] private (val capacity: Int, val length: Int, offset: Int, elems
elem
}
}
-
+
override def className = "Capped1"
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=capped1_1 %}
+~~~scala
+import scala.collection.*
+
+class Capped1[A] private (val capacity: Int, val length: Int, offset: Int, elems: Array[Any])
+ extends immutable.Iterable[A]:
+ self =>
+
+ def this(capacity: Int) =
+ this(capacity, length = 0, offset = 0, elems = Array.ofDim(capacity))
+
+ def appended[B >: A](elem: B): Capped1[B] =
+ val newElems = Array.ofDim[Any](capacity)
+ Array.copy(elems, 0, newElems, 0, capacity)
+ val (newOffset, newLength) =
+ if length == capacity then
+ newElems(offset) = elem
+ ((offset + 1) % capacity, length)
+ else
+ newElems(length) = elem
+ (offset, length + 1)
+ Capped1[B](capacity, newLength, newOffset, newElems)
+ end appended
+
+ inline def :+ [B >: A](elem: B): Capped1[B] = appended(elem)
+
+ def apply(i: Int): A = elems((i + offset) % capacity).asInstanceOf[A]
+
+ def iterator: Iterator[A] = new AbstractIterator[A]:
+ private var current = 0
+ def hasNext = current < self.length
+ def next(): A =
+ val elem = self(current)
+ current += 1
+ elem
+ end iterator
+
+ override def className = "Capped1"
+end Capped1
+~~~
+{% endtab %}
+{% endtabs %}
The above listing presents the first version of our capped collection
implementation. It will be refined later. The class `Capped1` has a
@@ -100,33 +149,37 @@ the `offset`.
These two methods, `appended` and `apply`, implement the specific
behavior of the `Capped1` collection type. In addition to them, we have
to implement `iterator` to make the generic collection operations
-(such as `foldLeft`, `count`, etc.) work on `Capped` collections.
+(such as `foldLeft`, `count`, etc.) work on `Capped1` collections.
Here we implement it by using indexed access.
Last, we override `className` to return the name of the collection,
-“Capped1”. This name is used by the `toString` operation.
+`“Capped1”`. This name is used by the `toString` operation.
Here are some interactions with the `Capped1` collection:
+{% tabs capped1_2 %}
+{% tab 'Scala 2 and 3' for=capped1_2 %}
~~~ scala
-scala> new Capped1(capacity = 4)
-res0: Capped1[Nothing] = Capped1()
+scala> val c0 = new Capped1(capacity = 4)
+val c0: Capped1[Nothing] = Capped1()
-scala> res0 :+ 1 :+ 2 :+ 3
-res1: Capped1[Int] = Capped1(1, 2, 3)
+scala> val c1 = c0 :+ 1 :+ 2 :+ 3
+val c1: Capped1[Int] = Capped1(1, 2, 3)
-scala> res1.length
-res2: Int = 3
+scala> c1.length
+val res2: Int = 3
-scala> res1.lastOption
-res3: Option[Int] = Some(3)
+scala> c1.lastOption
+val res3: Option[Int] = Some(3)
-scala> res1 :+ 4 :+ 5 :+ 6
-res4: Capped1[Int] = Capped1(3, 4, 5, 6)
+scala> val c2 = c1 :+ 4 :+ 5 :+ 6
+val c2: Capped1[Int] = Capped1(3, 4, 5, 6)
-scala> res4.take(3)
-res5: collection.immutable.Iterable[Int] = List(3, 4, 5)
+scala> val c3 = c2.take(3)
+val c3: collection.immutable.Iterable[Int] = List(3, 4, 5)
~~~
+{% endtab %}
+{% endtabs %}
You can see that if we try to grow the collection with more than four
elements, the first elements are dropped (see `res4`). The operations
@@ -144,7 +197,13 @@ question should be what needs to be done to change them? One way to do
this would be to override the `take` method in class `Capped1`, maybe like
this:
- def take(count: Int): Capped1 = …
+{% tabs take_signature %}
+{% tab 'Scala 2 and 3' for=take_signature %}
+```scala
+def take(count: Int): Capped1 = …
+```
+{% endtab %}
+{% endtabs %}
This would do the job for `take`. But what about `drop`, or `filter`, or
`init`? In fact there are over fifty methods on collections that return
@@ -155,6 +214,8 @@ effect, as shown in the next section.
### Second version of `Capped` class ###
+{% tabs capped2_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=capped2_1 %}
~~~ scala
import scala.collection._
@@ -191,6 +252,44 @@ class Capped2Factory(capacity: Int) extends IterableFactory[Capped2] {
}
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=capped2_1 %}
+~~~ scala
+class Capped2[A] private(val capacity: Int, val length: Int, offset: Int, elems: Array[Any])
+ extends immutable.Iterable[A],
+ IterableOps[A, Capped2, Capped2[A]]:
+ self =>
+
+ def this(capacity: Int) = // as before
+
+ def appended[B >: A](elem: B): Capped2[B] = // as before
+ inline def :+[B >: A](elem: B): Capped2[B] = // as before
+ def apply(i: Int): A = // as before
+
+ def iterator: Iterator[A] = // as before
+
+ override def className = "Capped2"
+ override val iterableFactory: IterableFactory[Capped2] = Capped2Factory(capacity)
+ override protected def fromSpecific(coll: IterableOnce[A]): Capped2[A] = iterableFactory.from(coll)
+ override protected def newSpecificBuilder: mutable.Builder[A, Capped2[A]] = iterableFactory.newBuilder
+ override def empty: Capped2[A] = iterableFactory.empty
+end Capped2
+
+class Capped2Factory(capacity: Int) extends IterableFactory[Capped2]:
+
+ def from[A](source: IterableOnce[A]): Capped2[A] =
+ (newBuilder[A] ++= source).result()
+
+ def empty[A]: Capped2[A] = Capped2[A](capacity)
+
+ def newBuilder[A]: mutable.Builder[A, Capped2[A]] =
+ new mutable.ImmutableBuilder[A, Capped2[A]](empty):
+ def addOne(elem: A): this.type =
+ elems = elems :+ elem; this
+end Capped2Factory
+~~~
+{% endtab %}
+{% endtabs %}
The Capped class needs to inherit not only from `Iterable`, but also
from its implementation trait `IterableOps`. This is shown in the
@@ -229,31 +328,35 @@ With the refined implementation of the [`Capped2` class](#second-version-of-capp
the transformation operations work now as expected, and the
`Capped2Factory` class provides seamless conversions from other collections:
+{% tabs capped2_2 %}
+{% tab 'Scala 2 and 3' for=capped2_2 %}
~~~ scala
scala> object Capped extends Capped2Factory(capacity = 4)
defined object Capped
scala> Capped(1, 2, 3)
-res0: Capped2[Int] = Capped2(1, 2, 3)
+val res0: Capped2[Int] = Capped2(1, 2, 3)
scala> res0.take(2)
-res1: Capped2[Int] = Capped2(1, 2)
+val res1: Capped2[Int] = Capped2(1, 2)
scala> res0.filter(x => x % 2 == 1)
-res2: Capped2[Int] = Capped2(1, 3)
+val res2: Capped2[Int] = Capped2(1, 3)
scala> res0.map(x => x * x)
-res3: Capped2[Int] = Capped2(1, 4, 9)
+val res3: Capped2[Int] = Capped2(1, 4, 9)
scala> List(1, 2, 3, 4, 5).to(Capped)
-res4: Capped2[Int] = Capped2(2, 3, 4, 5)
+val res4: Capped2[Int] = Capped2(2, 3, 4, 5)
~~~
+{% endtab %}
+{% endtabs %}
This implementation now behaves correctly, but we can still improve
a few things:
- since our collection is strict, we can take advantage
- of the better performance offered by
+ of the better performance offered by
strict implementations of transformation operations,
- since our `fromSpecific`, `newSpecificBuilder` and `empty`
operation just forward to the `iterableFactory` member,
@@ -262,6 +365,8 @@ a few things:
### Final version of `Capped` class ###
+{% tabs capped_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=capped_1 %}
~~~ scala
import scala.collection._
@@ -324,6 +429,69 @@ class CappedFactory(capacity: Int) extends IterableFactory[Capped] {
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=capped_1 %}
+~~~ scala
+import scala.collection.*
+
+final class Capped[A] private (val capacity: Int, val length: Int, offset: Int, elems: Array[Any])
+ extends immutable.Iterable[A],
+ IterableOps[A, Capped, Capped[A]],
+ IterableFactoryDefaults[A, Capped],
+ StrictOptimizedIterableOps[A, Capped, Capped[A]]:
+ self =>
+
+ def this(capacity: Int) =
+ this(capacity, length = 0, offset = 0, elems = Array.ofDim(capacity))
+
+ def appended[B >: A](elem: B): Capped[B] =
+ val newElems = Array.ofDim[Any](capacity)
+ Array.copy(elems, 0, newElems, 0, capacity)
+ val (newOffset, newLength) =
+ if length == capacity then
+ newElems(offset) = elem
+ ((offset + 1) % capacity, length)
+ else
+ newElems(length) = elem
+ (offset, length + 1)
+ Capped[B](capacity, newLength, newOffset, newElems)
+ end appended
+
+ inline def :+ [B >: A](elem: B): Capped[B] = appended(elem)
+
+ def apply(i: Int): A = elems((i + offset) % capacity).asInstanceOf[A]
+
+ def iterator: Iterator[A] = view.iterator
+
+ override def view: IndexedSeqView[A] = new IndexedSeqView[A]:
+ def length: Int = self.length
+ def apply(i: Int): A = self(i)
+
+ override def knownSize: Int = length
+
+ override def className = "Capped"
+
+ override val iterableFactory: IterableFactory[Capped] = new CappedFactory(capacity)
+
+end Capped
+
+class CappedFactory(capacity: Int) extends IterableFactory[Capped]:
+
+ def from[A](source: IterableOnce[A]): Capped[A] =
+ source match
+ case capped: Capped[?] if capped.capacity == capacity => capped.asInstanceOf[Capped[A]]
+ case _ => (newBuilder[A] ++= source).result()
+
+ def empty[A]: Capped[A] = Capped[A](capacity)
+
+ def newBuilder[A]: mutable.Builder[A, Capped[A]] =
+ new mutable.ImmutableBuilder[A, Capped[A]](empty):
+ def addOne(elem: A): this.type = { elems = elems :+ elem; this }
+
+end CappedFactory
+~~~
+{% endtab %}
+{% endtabs %}
That is it. The final [`Capped` class](#final-version-of-capped-class):
@@ -345,33 +513,58 @@ methods (such as `iterator` in our case), if any.
## RNA sequences ##
-To start with the second example, we define the four RNA Bases:
-
- abstract class Base
- case object A extends Base
- case object U extends Base
- case object G extends Base
- case object C extends Base
+To start with the second example, say you want to create a new immutable sequence type for RNA strands.
+These are sequences of bases A (adenine), U (uracil), G (guanine), and C
+(cytosine). The definitions for bases are set up as shown in the
+listing of RNA bases below:
- object Base {
- val fromInt: Int => Base = Array(A, U, G, C)
- val toInt: Base => Int = Map(A -> 0, U -> 1, G -> 2, C -> 3)
- }
-
-Say you want to create a new immutable sequence type for RNA strands, which are
-sequences of bases A (adenine), U (uracil), G (guanine), and C
-(cytosine). The definitions for bases are easily set up as shown in the
-listing of RNA bases above.
+{% tabs Base_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=Base_1 %}
+~~~ scala
+abstract class Base
+case object A extends Base
+case object U extends Base
+case object G extends Base
+case object C extends Base
+
+object Base {
+ val fromInt: Int => Base = Array(A, U, G, C)
+ val toInt: Base => Int = Map(A -> 0, U -> 1, G -> 2, C -> 3)
+}
+~~~
Every base is defined as a case object that inherits from a common
abstract class `Base`. The `Base` class has a companion object that
defines two functions that map between bases and the integers 0 to 3.
-You can see in the examples two different ways to use collections
+
+You can see in the above example two different ways to use collections
to implement these functions. The `toInt` function is implemented as a
`Map` from `Base` values to integers. The reverse function, `fromInt`, is
implemented as an array. This makes use of the fact that both maps and
arrays *are* functions because they inherit from the `Function1` trait.
+{% endtab %}
+{% tab 'Scala 3' for=Base_1 %}
+~~~ scala
+enum Base:
+ case A, U, G, C
+
+object Base:
+ val fromInt: Int => Base = values
+ val toInt: Base => Int = _.ordinal
+~~~
+
+Every base is defined as a case of the `Base` enum. `Base` has a companion object
+that defines two functions that map between bases and the integers 0 to 3.
+
+The `toInt` function is implemented by delegating to the `ordinal` method defined on `Base`,
+which is automatically defined because `Base` is an enum. Each enum case will have a unique `ordinal` value.
+The reverse function, `fromInt`, is implemented as an array. This makes use of the fact that
+arrays *are* functions because they inherit from the `Function1` trait.
+
+{% endtab %}
+{% endtabs %}
+
The next task is to define a class for strands of RNA. Conceptually, a
strand of RNA is simply a `Seq[Base]`. However, RNA strands can get
quite long, so it makes sense to invest some work in a compact
@@ -383,51 +576,104 @@ representation.
### First version of RNA strands class ###
- import collection.mutable
- import collection.immutable.{ IndexedSeq, IndexedSeqOps }
+{% tabs RNA1_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=RNA1_1 %}
+~~~ scala
+import collection.mutable
+import collection.immutable.{ IndexedSeq, IndexedSeqOps }
- final class RNA1 private (
- val groups: Array[Int],
- val length: Int
- ) extends IndexedSeq[Base]
- with IndexedSeqOps[Base, IndexedSeq, RNA1] {
+final class RNA1 private (
+ val groups: Array[Int],
+ val length: Int
+) extends IndexedSeq[Base]
+ with IndexedSeqOps[Base, IndexedSeq, RNA1] {
- import RNA1._
+ import RNA1._
- def apply(idx: Int): Base = {
- if (idx < 0 || length <= idx)
- throw new IndexOutOfBoundsException
- Base.fromInt(groups(idx / N) >> (idx % N * S) & M)
- }
+ def apply(idx: Int): Base = {
+ if (idx < 0 || length <= idx)
+ throw new IndexOutOfBoundsException
+ Base.fromInt(groups(idx / N) >> (idx % N * S) & M)
+ }
- override protected def fromSpecific(coll: IterableOnce[Base]): RNA1 =
- fromSeq(coll.iterator.toSeq)
- override protected def newSpecificBuilder: mutable.Builder[Base, RNA1] =
- iterableFactory.newBuilder[Base].mapResult(fromSeq)
- override def empty: RNA1 = fromSeq(Seq.empty)
- override def className = "RNA1"
- }
+ override protected def fromSpecific(coll: IterableOnce[Base]): RNA1 =
+ fromSeq(coll.iterator.toSeq)
+ override protected def newSpecificBuilder: mutable.Builder[Base, RNA1] =
+ iterableFactory.newBuilder[Base].mapResult(fromSeq)
+ override def empty: RNA1 = fromSeq(Seq.empty)
+ override def className = "RNA1"
+}
- object RNA1 {
+object RNA1 {
- // Number of bits necessary to represent group
- private val S = 2
+ // Number of bits necessary to represent group
+ private val S = 2
- // Number of groups that fit in an Int
- private val N = 32 / S
+ // Number of groups that fit in an Int
+ private val N = 32 / S
- // Bitmask to isolate a group
- private val M = (1 << S) - 1
+ // Bitmask to isolate a group
+ private val M = (1 << S) - 1
- def fromSeq(buf: collection.Seq[Base]): RNA1 = {
- val groups = new Array[Int]((buf.length + N - 1) / N)
- for (i <- 0 until buf.length)
- groups(i / N) |= Base.toInt(buf(i)) << (i % N * S)
- new RNA1(groups, buf.length)
- }
+ def fromSeq(buf: collection.Seq[Base]): RNA1 = {
+ val groups = new Array[Int]((buf.length + N - 1) / N)
+ for (i <- 0 until buf.length)
+ groups(i / N) |= Base.toInt(buf(i)) << (i % N * S)
+ new RNA1(groups, buf.length)
+ }
- def apply(bases: Base*) = fromSeq(bases)
- }
+ def apply(bases: Base*) = fromSeq(bases)
+}
+~~~
+{% endtab %}
+{% tab 'Scala 3' for=RNA1_1 %}
+~~~ scala
+import collection.mutable
+import collection.immutable.{ IndexedSeq, IndexedSeqOps }
+
+final class RNA1 private
+( val groups: Array[Int],
+ val length: Int
+) extends IndexedSeq[Base],
+ IndexedSeqOps[Base, IndexedSeq, RNA1]:
+
+ import RNA1.*
+
+ def apply(idx: Int): Base =
+ if idx < 0 || length <= idx then
+ throw IndexOutOfBoundsException()
+ Base.fromInt(groups(idx / N) >> (idx % N * S) & M)
+
+ override protected def fromSpecific(coll: IterableOnce[Base]): RNA1 =
+ fromSeq(coll.iterator.toSeq)
+ override protected def newSpecificBuilder: mutable.Builder[Base, RNA1] =
+ iterableFactory.newBuilder[Base].mapResult(fromSeq)
+ override def empty: RNA1 = fromSeq(Seq.empty)
+ override def className = "RNA1"
+end RNA1
+
+object RNA1:
+
+ // Number of bits necessary to represent group
+ private val S = 2
+
+ // Number of groups that fit in an Int
+ private val N = 32 / S
+
+ // Bitmask to isolate a group
+ private val M = (1 << S) - 1
+
+ def fromSeq(buf: collection.Seq[Base]): RNA1 =
+ val groups = new Array[Int]((buf.length + N - 1) / N)
+ for i <- 0 until buf.length do
+ groups(i / N) |= Base.toInt(buf(i)) << (i % N * S)
+ new RNA1(groups, buf.length)
+
+ def apply(bases: Base*) = fromSeq(bases)
+end RNA1
+~~~
+{% endtab %}
+{% endtabs %}
The [RNA strands class listing](#first-version-of-rna-strands-class) above
presents the first version of this
@@ -484,14 +730,22 @@ in the `RNA1` object. It takes a variable number of `Base` arguments and
simply forwards them as a sequence to `fromSeq`. Here are the two
creation schemes in action:
- scala> val xs = List(A, G, U, A)
- xs: List[Base] = List(A, G, U, A)
+{% tabs RNA1_2 %}
+{% tab 'Scala 2 and 3' for=RNA1_2 %}
+
+```scala
+scala> val xs = List(A, G, U, A)
+val xs: List[Base] = List(A, G, U, A)
+
+scala> RNA1.fromSeq(xs)
+val res1: RNA1 = RNA1(A, G, U, A)
- scala> RNA1.fromSeq(xs)
- res1: RNA1 = RNA1(A, G, U, A)
+scala> val rna1 = RNA1(A, U, G, G, C)
+val rna1: RNA1 = RNA1(A, U, G, G, C)
+```
- scala> val rna1 = RNA1(A, U, G, G, C)
- rna1: RNA1 = RNA1(A, U, G, G, C)
+{% endtab %}
+{% endtabs %}
Also note that the type parameters of the `IndexedSeqOps` trait that
we inherit from are: `Base`, `IndexedSeq` and `RNA1`. The first one
@@ -507,11 +761,19 @@ third one is `RNA1`. This means that operations like `map` or
Here is an example showing the usage of `take` and `filter`:
- scala> rna1.take(3)
- res5: RNA1 = RNA1(A, U, G)
+{% tabs RNA1_3 %}
+{% tab 'Scala 2 and 3' for=RNA1_3 %}
+
+```scala
+scala> val rna1_2 = rna1.take(3)
+val rna1_2: RNA1 = RNA1(A, U, G)
+
+scala> val rna1_3 = rna1.filter(_ != U)
+val rna1_3: RNA1 = RNA1(A, G, G, C)
+```
- scala> rna1.filter(_ != U)
- res6: RNA1 = RNA1(A, G, G, C)
+{% endtab %}
+{% endtabs %}
### Dealing with map and friends ###
@@ -523,14 +785,22 @@ methods be adapted to RNA strands? The desired behavior would be to get
back an RNA strand when mapping bases to bases or appending two RNA strands
with `++`:
- scala> val rna = RNA(A, U, G, G, C)
- rna: RNA = RNA(A, U, G, G, C)
+{% tabs RNA1_4 %}
+{% tab 'Scala 2 and 3' for=RNA1_4 %}
- scala> rna map { case A => U case b => b }
- res7: RNA = RNA(U, U, G, G, C)
+```scala
+scala> val rna = RNA(A, U, G, G, C)
+val rna: RNA = RNA(A, U, G, G, C)
- scala> rna ++ rna
- res8: RNA = RNA(A, U, G, G, C, A, U, G, G, C)
+scala> rna.map { case A => U case b => b }
+val res7: RNA = RNA(U, U, G, G, C)
+
+scala> rna ++ rna
+val res8: RNA = RNA(A, U, G, G, C, A, U, G, G, C)
+```
+
+{% endtab %}
+{% endtabs %}
On the other hand, mapping bases to some other type over an RNA strand
cannot yield another RNA strand because the new elements have the
@@ -538,26 +808,42 @@ wrong type. It has to yield a sequence instead. In the same vein
appending elements that are not of type `Base` to an RNA strand can
yield a general sequence, but it cannot yield another RNA strand.
- scala> rna map Base.toInt
- res2: IndexedSeq[Int] = Vector(0, 1, 2, 2, 3)
+{% tabs RNA1_5 %}
+{% tab 'Scala 2 and 3' for=RNA1_5 %}
+
+```scala
+scala> rna.map(Base.toInt)
+val res2: IndexedSeq[Int] = Vector(0, 1, 2, 2, 3)
+
+scala> rna ++ List("missing", "data")
+val res3: IndexedSeq[java.lang.Object] =
+ Vector(A, U, G, G, C, missing, data)
+```
- scala> rna ++ List("missing", "data")
- res3: IndexedSeq[java.lang.Object] =
- Vector(A, U, G, G, C, missing, data)
+{% endtab %}
+{% endtabs %}
This is what you'd expect in the ideal case. But this is not what the
[`RNA1` class](#first-version-of-rna-strands-class) provides. In fact, all
examples will return instances of `Vector`, not just the last two. If you run
the first three commands above with instances of this class you obtain:
- scala> val rna1 = RNA1(A, U, G, G, C)
- rna1: RNA1 = RNA1(A, U, G, G, C)
+{% tabs RNA1_6 %}
+{% tab 'Scala 2 and 3' for=RNA1_6 %}
- scala> rna1 map { case A => U case b => b }
- res0: IndexedSeq[Base] = Vector(U, U, G, G, C)
+```scala
+scala> val rna1 = RNA1(A, U, G, G, C)
+val rna1: RNA1 = RNA1(A, U, G, G, C)
- scala> rna1 ++ rna1
- res1: IndexedSeq[Base] = Vector(A, U, G, G, C, A, U, G, G, C)
+scala> rna1.map { case A => U case b => b }
+val res0: IndexedSeq[Base] = Vector(U, U, G, G, C)
+
+scala> rna1 ++ rna1
+val res1: IndexedSeq[Base] = Vector(A, U, G, G, C, A, U, G, G, C)
+```
+
+{% endtab %}
+{% endtabs %}
So the result of `map` and `++` is never an RNA strand, even if the
element type of the generated collection is `Base`. To see how to do
@@ -566,7 +852,13 @@ method (or of `++`, which has a similar signature). The `map` method is
originally defined in class `scala.collection.IterableOps` with the
following signature:
- def map[B](f: A => B): CC[B]
+{% tabs map_signature %}
+{% tab 'Scala 2 and 3' for=map_signature %}
+```scala
+def map[B](f: A => B): CC[B]
+```
+{% endtab %}
+{% endtabs %}
Here `A` is the type of elements of the collection, and `CC` is the type
constructor passed as a second parameter to the `IterableOps` trait.
@@ -576,38 +868,84 @@ this is why we always get a `Vector` as a result.
### Second version of RNA strands class ###
- import scala.collection.{ View, mutable }
- import scala.collection.immutable.{ IndexedSeq, IndexedSeqOps }
-
- final class RNA2 private (val groups: Array[Int], val length: Int)
- extends IndexedSeq[Base] with IndexedSeqOps[Base, IndexedSeq, RNA2] {
-
- import RNA2._
-
- def apply(idx: Int): Base = // as before
- override protected def fromSpecific(coll: IterableOnce[Base]): RNA2 = // as before
- override protected def newSpecificBuilder: mutable.Builder[Base, RNA2] = // as before
-
- // Overloading of `appended`, `prepended`, `appendedAll`,
- // `prependedAll`, `map`, `flatMap` and `concat` to return an `RNA2`
- // when possible
- def concat(suffix: IterableOnce[Base]): RNA2 =
- fromSpecific(iterator ++ suffix.iterator)
- // symbolic alias for `concat`
- @inline final def ++ (suffix: IterableOnce[Base]): RNA2 = concat(suffix)
- def appended(base: Base): RNA2 =
- fromSpecific(new View.Append(this, base))
- def appendedAll(suffix: IterableOnce[Base]): RNA2 =
- concat(suffix)
- def prepended(base: Base): RNA2 =
- fromSpecific(new View.Prepend(base, this))
- def prependedAll(prefix: IterableOnce[Base]): RNA2 =
- fromSpecific(prefix.iterator ++ iterator)
- def map(f: Base => Base): RNA2 =
- fromSpecific(new View.Map(this, f))
- def flatMap(f: Base => IterableOnce[Base]): RNA2 =
- fromSpecific(new View.FlatMap(this, f))
- }
+{% tabs RNA2_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=RNA2_1 %}
+~~~ scala
+import scala.collection.{ View, mutable }
+import scala.collection.immutable.{ IndexedSeq, IndexedSeqOps }
+
+final class RNA2 private (val groups: Array[Int], val length: Int)
+ extends IndexedSeq[Base] with IndexedSeqOps[Base, IndexedSeq, RNA2] {
+
+ import RNA2._
+
+ def apply(idx: Int): Base = // as before
+ override protected def fromSpecific(coll: IterableOnce[Base]): RNA2 = // as before
+ override protected def newSpecificBuilder: mutable.Builder[Base, RNA2] = // as before
+ override def empty: RNA2 = // as before
+ override def className = "RNA2"
+
+ // Overloading of `appended`, `prepended`, `appendedAll`,
+ // `prependedAll`, `map`, `flatMap` and `concat` to return an `RNA2`
+ // when possible
+ def concat(suffix: IterableOnce[Base]): RNA2 =
+ fromSpecific(iterator ++ suffix.iterator)
+ // symbolic alias for `concat`
+ @inline final def ++ (suffix: IterableOnce[Base]): RNA2 = concat(suffix)
+ def appended(base: Base): RNA2 =
+ fromSpecific(new View.Appended(this, base))
+ def appendedAll(suffix: IterableOnce[Base]): RNA2 =
+ concat(suffix)
+ def prepended(base: Base): RNA2 =
+ fromSpecific(new View.Prepended(base, this))
+ def prependedAll(prefix: IterableOnce[Base]): RNA2 =
+ fromSpecific(prefix.iterator ++ iterator)
+ def map(f: Base => Base): RNA2 =
+ fromSpecific(new View.Map(this, f))
+ def flatMap(f: Base => IterableOnce[Base]): RNA2 =
+ fromSpecific(new View.FlatMap(this, f))
+}
+~~~
+{% endtab %}
+{% tab 'Scala 3' for=RNA2_1 %}
+~~~ scala
+import scala.collection.{ View, mutable }
+import scala.collection.immutable.{ IndexedSeq, IndexedSeqOps }
+
+final class RNA2 private (val groups: Array[Int], val length: Int)
+ extends IndexedSeq[Base], IndexedSeqOps[Base, IndexedSeq, RNA2]:
+
+ import RNA2.*
+
+ def apply(idx: Int): Base = // as before
+ override protected def fromSpecific(coll: IterableOnce[Base]): RNA2 = // as before
+ override protected def newSpecificBuilder: mutable.Builder[Base, RNA2] = // as before
+ override def empty: RNA2 = // as before
+ override def className = "RNA2"
+
+ // Overloading of `appended`, `prepended`, `appendedAll`,
+ // `prependedAll`, `map`, `flatMap` and `concat` to return an `RNA2`
+ // when possible
+ def concat(suffix: IterableOnce[Base]): RNA2 =
+ fromSpecific(iterator ++ suffix.iterator)
+ // symbolic alias for `concat`
+ inline final def ++ (suffix: IterableOnce[Base]): RNA2 = concat(suffix)
+ def appended(base: Base): RNA2 =
+ fromSpecific(View.Appended(this, base))
+ def appendedAll(suffix: IterableOnce[Base]): RNA2 =
+ concat(suffix)
+ def prepended(base: Base): RNA2 =
+ fromSpecific(View.Prepended(base, this))
+ def prependedAll(prefix: IterableOnce[Base]): RNA2 =
+ fromSpecific(prefix.iterator ++ iterator)
+ def map(f: Base => Base): RNA2 =
+ fromSpecific(View.Map(this, f))
+ def flatMap(f: Base => IterableOnce[Base]): RNA2 =
+ fromSpecific(View.FlatMap(this, f))
+end RNA2
+~~~
+{% endtab %}
+{% endtabs %}
To address this shortcoming, you need to overload the methods that
return an `IndexedSeq[B]` for the case where `B` is known to be `Base`,
@@ -622,9 +960,11 @@ collection is strict, we could take advantage of the better performance offered
in transformation operations.
Also, if we try to convert an `Iterable[Base]` into an `RNA2` it fails:
-~~~
+{% tabs RNA2_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=RNA2_2 %}
+~~~scala
scala> val bases: Iterable[Base] = List(A, U, C, C)
-bases: Iterable[Base] = List(A, U, C, C)
+val bases: Iterable[Base] = List(A, U, C, C)
scala> bases.to(RNA2)
^
@@ -632,9 +972,28 @@ scala> bases.to(RNA2)
found : RNA2.type
required: scala.collection.Factory[Base,?]
~~~
+{% endtab %}
+{% tab 'Scala 3' for=RNA2_2 %}
+~~~scala
+scala> val bases: Iterable[Base] = List(A, U, C, C)
+val bases: Iterable[Base] = List(A, U, C, C)
+
+scala> bases.to(RNA2)
+-- [E007] Type Mismatch Error: -------------------------------------------------
+1 |bases.to(RNA2)
+ | ^^^^
+ | Found: RNA2.type
+ | Required: scala.collection.Factory[Base, Any]
+ |
+ | longer explanation available when compiling with `-explain`
+~~~
+{% endtab %}
+{% endtabs %}
### Final version of RNA strands class ###
+{% tabs RNA_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=RNA_1 %}
~~~ scala
import scala.collection.{ AbstractIterator, SpecificIterableFactory, StrictOptimizedSeqOps, View, mutable }
import scala.collection.immutable.{ IndexedSeq, IndexedSeqOps }
@@ -723,6 +1082,94 @@ object RNA extends SpecificIterableFactory[Base, RNA] {
}
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=RNA_1 %}
+~~~ scala
+import scala.collection.{ AbstractIterator, SpecificIterableFactory, StrictOptimizedSeqOps, View, mutable }
+import scala.collection.immutable.{ IndexedSeq, IndexedSeqOps }
+
+final class RNA private
+( val groups: Array[Int],
+ val length: Int
+) extends IndexedSeq[Base],
+ IndexedSeqOps[Base, IndexedSeq, RNA],
+ StrictOptimizedSeqOps[Base, IndexedSeq, RNA]:
+ rna =>
+
+ import RNA.*
+
+ // Mandatory implementation of `apply` in `IndexedSeqOps`
+ def apply(idx: Int): Base =
+ if idx < 0 || length <= idx then
+ throw new IndexOutOfBoundsException
+ Base.fromInt(groups(idx / N) >> (idx % N * S) & M)
+
+ // Mandatory overrides of `fromSpecific`, `newSpecificBuilder`,
+ // and `empty`, from `IterableOps`
+ override protected def fromSpecific(coll: IterableOnce[Base]): RNA =
+ RNA.fromSpecific(coll)
+ override protected def newSpecificBuilder: mutable.Builder[Base, RNA] =
+ RNA.newBuilder
+ override def empty: RNA = RNA.empty
+
+ // Overloading of `appended`, `prepended`, `appendedAll`, `prependedAll`,
+ // `map`, `flatMap` and `concat` to return an `RNA` when possible
+ def concat(suffix: IterableOnce[Base]): RNA =
+ strictOptimizedConcat(suffix, newSpecificBuilder)
+ inline final def ++ (suffix: IterableOnce[Base]): RNA = concat(suffix)
+ def appended(base: Base): RNA =
+ (newSpecificBuilder ++= this += base).result()
+ def appendedAll(suffix: Iterable[Base]): RNA =
+ strictOptimizedConcat(suffix, newSpecificBuilder)
+ def prepended(base: Base): RNA =
+ (newSpecificBuilder += base ++= this).result()
+ def prependedAll(prefix: Iterable[Base]): RNA =
+ (newSpecificBuilder ++= prefix ++= this).result()
+ def map(f: Base => Base): RNA =
+ strictOptimizedMap(newSpecificBuilder, f)
+ def flatMap(f: Base => IterableOnce[Base]): RNA =
+ strictOptimizedFlatMap(newSpecificBuilder, f)
+
+ // Optional re-implementation of iterator,
+ // to make it more efficient.
+ override def iterator: Iterator[Base] = new AbstractIterator[Base]:
+ private var i = 0
+ private var b = 0
+ def hasNext: Boolean = i < rna.length
+ def next(): Base =
+ b = if i % N == 0 then groups(i / N) else b >>> S
+ i += 1
+ Base.fromInt(b & M)
+
+ override def className = "RNA"
+end RNA
+
+object RNA extends SpecificIterableFactory[Base, RNA]:
+
+ private val S = 2 // number of bits in group
+ private val M = (1 << S) - 1 // bitmask to isolate a group
+ private val N = 32 / S // number of groups in an Int
+
+ def fromSeq(buf: collection.Seq[Base]): RNA =
+ val groups = new Array[Int]((buf.length + N - 1) / N)
+ for i <- 0 until buf.length do
+ groups(i / N) |= Base.toInt(buf(i)) << (i % N * S)
+ new RNA(groups, buf.length)
+
+ // Mandatory factory methods: `empty`, `newBuilder`
+ // and `fromSpecific`
+ def empty: RNA = fromSeq(Seq.empty)
+
+ def newBuilder: mutable.Builder[Base, RNA] =
+ mutable.ArrayBuffer.newBuilder[Base].mapResult(fromSeq)
+
+ def fromSpecific(it: IterableOnce[Base]): RNA = it match
+ case seq: collection.Seq[Base] => fromSeq(seq)
+ case _ => fromSeq(mutable.ArrayBuffer.from(it))
+end RNA
+~~~
+{% endtab %}
+{% endtabs %}
The final [`RNA` class](#final-version-of-rna-strands-class):
@@ -771,7 +1218,7 @@ storing the strings "abc", "abd", "al", "all" and "xy" would look
like this:
A sample patricia trie:
-
+
To find the node corresponding to the string "abc" in this trie,
simply follow the subtree labeled "a", proceed from there to the
@@ -793,17 +1240,35 @@ of a map that's implemented as a Patricia trie. We call the map a
selects a submap of all keys starting with a given prefix. We'll first
define a prefix map with the keys shown in the running example:
- scala> val m = PrefixMap("abc" -> 0, "abd" -> 1, "al" -> 2,
- "all" -> 3, "xy" -> 4)
- m: PrefixMap[Int] = PrefixMap((abc,0), (abd,1), (al,2), (all,3), (xy,4))
+{% tabs prefixMap_1 %}
+{% tab 'Scala 2 and 3' for=prefixMap_1 %}
+
+```scala
+scala> val m = PrefixMap("abc" -> 0, "abd" -> 1, "al" -> 2,
+ "all" -> 3, "xy" -> 4)
+val m: PrefixMap[Int] = PrefixMap((abc,0), (abd,1), (al,2), (all,3), (xy,4))
+```
+
+{% endtab %}
+{% endtabs %}
Then calling `withPrefix` on `m` will yield another prefix map:
- scala> m withPrefix "a"
- res14: PrefixMap[Int] = PrefixMap((bc,0), (bd,1), (l,2), (ll,3))
+{% tabs prefixMap_2 %}
+{% tab 'Scala 2 and 3' for=prefixMap_2 %}
+
+```scala
+scala> m.withPrefix("a")
+val res14: PrefixMap[Int] = PrefixMap((bc,0), (bd,1), (l,2), (ll,3))
+```
+
+{% endtab %}
+{% endtabs %}
### Patricia trie implementation ###
+{% tabs prefixMap_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=prefixMap_3 %}
~~~ scala
import scala.collection._
import scala.collection.mutable.{ GrowableBuilder, Builder }
@@ -818,18 +1283,18 @@ class PrefixMap[A]
def get(s: String): Option[A] =
if (s.isEmpty) value
- else suffixes get (s(0)) flatMap (_.get(s substring 1))
+ else suffixes.get(s(0)).flatMap(_.get(s.substring(1)))
def withPrefix(s: String): PrefixMap[A] =
if (s.isEmpty) this
else {
val leading = s(0)
- suffixes get leading match {
+ suffixes.get(leading) match {
case None =>
suffixes = suffixes + (leading -> empty)
case _ =>
}
- suffixes(leading) withPrefix (s substring 1)
+ suffixes(leading).withPrefix(s.substring(1))
}
def iterator: Iterator[(String, A)] =
@@ -844,7 +1309,7 @@ class PrefixMap[A]
def subtractOne(s: String): this.type = {
if (s.isEmpty) { val prev = value; value = None; prev }
- else suffixes get (s(0)) flatMap (_.remove(s substring 1))
+ else suffixes.get(s(0)).flatMap(_.remove(s.substring(1)))
this
}
@@ -864,7 +1329,7 @@ class PrefixMap[A]
// Members declared in scala.collection.IterableOps
override protected def fromSpecific(coll: IterableOnce[(String, A)]): PrefixMap[A] = PrefixMap.from(coll)
override protected def newSpecificBuilder: mutable.Builder[(String, A), PrefixMap[A]] = PrefixMap.newBuilder
-
+
override def className = "PrefixMap"
}
@@ -892,6 +1357,91 @@ object PrefixMap {
}
~~~
+{% endtab %}
+{% tab 'Scala 3' for=prefixMap_3 %}
+~~~ scala
+import scala.collection.*
+import scala.collection.mutable.{ GrowableBuilder, Builder }
+
+class PrefixMap[A]
+ extends mutable.Map[String, A],
+ mutable.MapOps[String, A, mutable.Map, PrefixMap[A]],
+ StrictOptimizedIterableOps[(String, A), mutable.Iterable, PrefixMap[A]]:
+
+ private var suffixes: immutable.Map[Char, PrefixMap[A]] = immutable.Map.empty
+ private var value: Option[A] = None
+
+ def get(s: String): Option[A] =
+ if s.isEmpty then value
+ else suffixes.get(s(0)).flatMap(_.get(s.substring(1)))
+
+ def withPrefix(s: String): PrefixMap[A] =
+ if s.isEmpty then this
+ else
+ val leading = s(0)
+ suffixes.get(leading) match
+ case None =>
+ suffixes = suffixes + (leading -> empty)
+ case _ =>
+ suffixes(leading).withPrefix(s.substring(1))
+
+ def iterator: Iterator[(String, A)] =
+ (for v <- value.iterator yield ("", v)) ++
+ (for (chr, m) <- suffixes.iterator
+ (s, v) <- m.iterator yield (chr +: s, v))
+
+ def addOne(kv: (String, A)): this.type =
+ withPrefix(kv._1).value = Some(kv._2)
+ this
+
+ def subtractOne(s: String): this.type =
+ if s.isEmpty then { val prev = value; value = None; prev }
+ else suffixes.get(s(0)).flatMap(_.remove(s.substring(1)))
+ this
+
+ // Overloading of transformation methods that should return a PrefixMap
+ def map[B](f: ((String, A)) => (String, B)): PrefixMap[B] =
+ strictOptimizedMap(PrefixMap.newBuilder, f)
+ def flatMap[B](f: ((String, A)) => IterableOnce[(String, B)]): PrefixMap[B] =
+ strictOptimizedFlatMap(PrefixMap.newBuilder, f)
+
+ // Override `concat` and `empty` methods to refine their return type
+ override def concat[B >: A](suffix: IterableOnce[(String, B)]): PrefixMap[B] =
+ strictOptimizedConcat(suffix, PrefixMap.newBuilder)
+ override def empty: PrefixMap[A] = PrefixMap()
+
+ // Members declared in scala.collection.mutable.Clearable
+ override def clear(): Unit = suffixes = immutable.Map.empty
+ // Members declared in scala.collection.IterableOps
+ override protected def fromSpecific(coll: IterableOnce[(String, A)]): PrefixMap[A] = PrefixMap.from(coll)
+ override protected def newSpecificBuilder: mutable.Builder[(String, A), PrefixMap[A]] = PrefixMap.newBuilder
+
+ override def className = "PrefixMap"
+end PrefixMap
+
+object PrefixMap:
+ def empty[A] = new PrefixMap[A]
+
+ def from[A](source: IterableOnce[(String, A)]): PrefixMap[A] =
+ source match
+ case pm: PrefixMap[A @unchecked] => pm
+ case _ => (newBuilder ++= source).result()
+
+ def apply[A](kvs: (String, A)*): PrefixMap[A] = from(kvs)
+
+ def newBuilder[A]: mutable.Builder[(String, A), PrefixMap[A]] =
+ mutable.GrowableBuilder[(String, A), PrefixMap[A]](empty)
+
+ import scala.language.implicitConversions
+
+ implicit def toFactory[A](self: this.type): Factory[(String, A), PrefixMap[A]] =
+ new Factory[(String, A), PrefixMap[A]]:
+ def fromSpecific(it: IterableOnce[(String, A)]): PrefixMap[A] = self.from(it)
+ def newBuilder: mutable.Builder[(String, A), PrefixMap[A]] = self.newBuilder
+end PrefixMap
+~~~
+{% endtab %}
+{% endtabs %}
The previous listing shows the definition of `PrefixMap`. The map has
keys of type `String` and the values are of parametric type `A`. It extends
@@ -968,7 +1518,7 @@ However, in all these cases, to build the right kind of collection
you need to start with an empty collection of that kind. This is
provided by the `empty` method, which simply returns a fresh `PrefixMap`.
-We'll now turn to the companion object `PrefixMap`. In fact it is not
+We'll now turn to the companion object `PrefixMap`. In fact, it is not
strictly necessary to define this companion object, as class `PrefixMap`
can stand well on its own. The main purpose of object `PrefixMap` is to
define some convenience factory methods. It also defines an implicit
@@ -980,15 +1530,23 @@ can not because a `Factory` fixes the type of collection elements,
whereas `PrefixMap` has a polymorphic type of values).
The two convenience methods are `empty` and `apply`. The same methods are
-present for all other collections in Scala's collection framework so
+present for all other collections in Scala's collection framework, so
it makes sense to define them here, too. With the two methods, you can
write `PrefixMap` literals like you do for any other collection:
- scala> PrefixMap("hello" -> 5, "hi" -> 2)
- res0: PrefixMap[Int] = PrefixMap(hello -> 5, hi -> 2)
+{% tabs prefixMap_4 %}
+{% tab 'Scala 2 and 3' for=prefixMap_4 %}
+
+```scala
+scala> PrefixMap("hello" -> 5, "hi" -> 2)
+val res0: PrefixMap[Int] = PrefixMap(hello -> 5, hi -> 2)
+
+scala> res0 += "foo" -> 3
+val res1: res0.type = PrefixMap(hello -> 5, hi -> 2, foo -> 3)
+```
- scala> res0 += "foo" -> 3
- res1: res0.type = PrefixMap(hello -> 5, hi -> 2, foo -> 3)
+{% endtab %}
+{% endtabs %}
## Summary ##
diff --git a/_overviews/core/futures.md b/_overviews/core/futures.md
index c8ceef7257..9f01a43710 100644
--- a/_overviews/core/futures.md
+++ b/_overviews/core/futures.md
@@ -14,7 +14,8 @@ permalink: /overviews/core/:title.html
## Introduction
Futures provide a way to reason about performing many operations
-in parallel-- in an efficient and non-blocking way.
+in parallel -- in an efficient and non-blocking way.
+
A [`Future`](https://www.scala-lang.org/api/current/scala/concurrent/Future.html)
is a placeholder object for a value that may not yet exist.
Generally, the value of the Future is supplied concurrently and can subsequently be used.
@@ -40,18 +41,34 @@ environment to resize itself if necessary to guarantee progress.
A typical future looks like this:
+{% tabs futures-00 %}
+{% tab 'Scala 2 and 3' for=futures-00 %}
val inverseFuture: Future[Matrix] = Future {
fatMatrix.inverse() // non-blocking long lasting computation
}(executionContext)
+{% endtab %}
+{% endtabs %}
Or with the more idiomatic:
+{% tabs futures-01 class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=futures-01 %}
implicit val ec: ExecutionContext = ...
val inverseFuture : Future[Matrix] = Future {
fatMatrix.inverse()
} // ec is implicitly passed
+{% endtab %}
+
+{% tab 'Scala 3' for=futures-01 %}
+ given ExecutionContext = ...
+ val inverseFuture : Future[Matrix] = Future {
+ fatMatrix.inverse()
+ } // execution context is implicitly passed
+{% endtab %}
+{% endtabs %}
Both code snippets delegate the execution of `fatMatrix.inverse()` to an `ExecutionContext` and embody the result of the computation in `inverseFuture`.
@@ -80,11 +97,11 @@ only if each blocking call is wrapped inside a `blocking` call (more on that bel
Otherwise, there is a risk that the thread pool in the global execution context is starved,
and no computation can proceed.
-By default the `ExecutionContext.global` sets the parallelism level of its underlying fork-join pool to the number of available processors
+By default, the `ExecutionContext.global` sets the parallelism level of its underlying fork-join pool to the number of available processors
([Runtime.availableProcessors](https://docs.oracle.com/javase/7/docs/api/java/lang/Runtime.html#availableProcessors%28%29)).
This configuration can be overridden by setting one (or more) of the following VM attributes:
- * scala.concurrent.context.minThreads - defaults to `Runtime.availableProcessors`
+ * scala.concurrent.context.minThreads - defaults to `1`
* scala.concurrent.context.numThreads - can be a number or a multiplier (N) in the form 'xN' ; defaults to `Runtime.availableProcessors`
* scala.concurrent.context.maxThreads - defaults to `Runtime.availableProcessors`
@@ -93,7 +110,10 @@ The parallelism level will be set to `numThreads` as long as it remains within `
As stated above the `ForkJoinPool` can increase the number of threads beyond its `parallelismLevel` in the presence of blocking computation.
As explained in the `ForkJoinPool` API, this is only possible if the pool is explicitly notified:
- import scala.concurrent.Future
+{% tabs futures-02 class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=futures-02 %}
+ import scala.concurrent.{ Future, ExecutionContext }
import scala.concurrent.forkjoin._
// the following is equivalent to `implicit val ec = ExecutionContext.global`
@@ -118,10 +138,40 @@ As explained in the `ForkJoinPool` API, this is only possible if the pool is exp
}
)
}
+{% endtab %}
+{% tab 'Scala 3' for=futures-02 %}
+ import scala.concurrent.{ Future, ExecutionContext }
+ import scala.concurrent.forkjoin.*
+
+ // the following is equivalent to `given ExecutionContext = ExecutionContext.global`
+ import ExecutionContext.Implicits.global
+
+ Future {
+ ForkJoinPool.managedBlock(
+ new ManagedBlocker {
+ var done = false
+
+ def block(): Boolean =
+ try
+ myLock.lock()
+ // ...
+ finally
+ done = true
+ true
+
+ def isReleasable: Boolean = done
+ }
+ )
+ }
+{% endtab %}
+
+{% endtabs %}
Fortunately the concurrent package provides a convenient way for doing so:
+{% tabs blocking %}
+{% tab 'Scala 2 and 3' for=blocking %}
import scala.concurrent.Future
import scala.concurrent.blocking
@@ -131,26 +181,43 @@ Fortunately the concurrent package provides a convenient way for doing so:
// ...
}
}
+{% endtab %}
+{% endtabs %}
Note that `blocking` is a general construct that will be discussed more in depth [below](#blocking-inside-a-future).
-Last but not least, you must remember that the `ForkJoinPool` is not designed for long lasting blocking operations.
+Last but not least, you must remember that the `ForkJoinPool` is not designed for long-lasting blocking operations.
Even when notified with `blocking` the pool might not spawn new workers as you would expect,
and when new workers are created they can be as many as 32767.
To give you an idea, the following code will use 32000 threads:
+{% tabs futures-03 class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=futures-03 %}
implicit val ec = ExecutionContext.global
- for( i <- 1 to 32000 ) {
+ for (i <- 1 to 32000) {
Future {
blocking {
Thread.sleep(999999)
}
}
}
+{% endtab %}
+{% tab 'Scala 3' for=futures-03 %}
+ given ExecutionContext = ExecutionContext.global
+ for i <- 1 to 32000 do
+ Future {
+ blocking {
+ Thread.sleep(999999)
+ }
+ }
+{% endtab %}
+
+{% endtabs %}
-If you need to wrap long lasting blocking operations we recommend using a dedicated `ExecutionContext`, for instance by wrapping a Java `Executor`.
+If you need to wrap long-lasting blocking operations we recommend using a dedicated `ExecutionContext`, for instance by wrapping a Java `Executor`.
### Adapting a Java Executor
@@ -158,26 +225,43 @@ If you need to wrap long lasting blocking operations we recommend using a dedica
Using the `ExecutionContext.fromExecutor` method you can wrap a Java `Executor` into an `ExecutionContext`.
For instance:
+{% tabs executor class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=executor %}
ExecutionContext.fromExecutor(new ThreadPoolExecutor( /* your configuration */ ))
+{% endtab %}
+{% tab 'Scala 3' for=executor %}
+ ExecutionContext.fromExecutor(ThreadPoolExecutor( /* your configuration */ ))
+{% endtab %}
+
+{% endtabs %}
### Synchronous Execution Context
One might be tempted to have an `ExecutionContext` that runs computations within the current thread:
+{% tabs bad-example %}
+{% tab 'Scala 2 and 3' for=bad-example %}
val currentThreadExecutionContext = ExecutionContext.fromExecutor(
new Executor {
// Do not do this!
- def execute(runnable: Runnable) { runnable.run() }
+ def execute(runnable: Runnable) = runnable.run()
})
+{% endtab %}
+{% endtabs %}
This should be avoided as it introduces non-determinism in the execution of your future.
+{% tabs bad-example-2 %}
+{% tab 'Scala 2 and 3' for=bad-example-2 %}
Future {
doSomething
}(ExecutionContext.global).map {
doSomethingElse
}(currentThreadExecutionContext)
+{% endtab %}
+{% endtabs %}
The `doSomethingElse` call might either execute in `doSomething`'s thread or in the main thread, and therefore be either asynchronous or synchronous.
As explained [here](https://blog.ometer.com/2011/07/24/callbacks-synchronous-and-asynchronous/) a callback should not be both.
@@ -200,7 +284,7 @@ Completion can take one of two forms:
A `Future` has an important property that it may only be assigned
once.
Once a `Future` object is given a value or an exception, it becomes
-in effect immutable-- it can never be overwritten.
+in effect immutable -- it can never be overwritten.
The simplest way to create a future object is to invoke the `Future.apply`
method which starts an asynchronous computation and returns a
@@ -219,6 +303,9 @@ popular social network to obtain a list of friends for a given user.
We will open a new session and then send
a request to obtain a list of friends of a particular user:
+{% tabs futures-04 class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=futures-04 %}
import scala.concurrent._
import ExecutionContext.Implicits.global
@@ -226,6 +313,17 @@ a request to obtain a list of friends of a particular user:
val f: Future[List[Friend]] = Future {
session.getFriends()
}
+{% endtab %}
+{% tab 'Scala 3' for=futures-04 %}
+ import scala.concurrent.*
+ import ExecutionContext.Implicits.global
+
+ val session = socialNetwork.createSessionFor("user", credentials)
+ val f: Future[List[Friend]] = Future {
+ session.getFriends()
+ }
+{% endtab %}
+{% endtabs %}
Above, we first import the contents of the `scala.concurrent` package
to make the type `Future` visible.
@@ -238,8 +336,8 @@ To obtain the list of friends of a user, a request
has to be sent over a network, which can take a long time.
This is illustrated with the call to the method `getFriends` that returns `List[Friend]`.
To better utilize the CPU until the response arrives, we should not
-block the rest of the program-- this computation should be scheduled
-asynchronously. The `Future.apply` method does exactly that-- it performs
+block the rest of the program -- this computation should be scheduled
+asynchronously. The `Future.apply` method does exactly that -- it performs
the specified computation block concurrently, in this case sending
a request to the server and waiting for a response.
@@ -251,10 +349,14 @@ the following example, the `session` value is incorrectly
initialized, so the computation in the `Future` block will throw a `NullPointerException`.
This future `f` is then failed with this exception instead of being completed successfully:
+{% tabs futures-04b %}
+{% tab 'Scala 2 and 3' for=futures-04b %}
val session = null
val f: Future[List[Friend]] = Future {
- session.getFriends
+ session.getFriends()
}
+{% endtab %}
+{% endtabs %}
The line `import ExecutionContext.Implicits.global` above imports
the default global execution context.
@@ -270,16 +372,20 @@ Our example was based on a hypothetical social network API where
the computation consists of sending a network request and waiting
for a response.
It is fair to offer an example involving an asynchronous computation
-which you can try out of the box. Assume you have a text file and
+which you can try out of the box. Assume you have a text file, and
you want to find the position of the first occurrence of a particular keyword.
This computation may involve blocking while the file contents
are being retrieved from the disk, so it makes sense to perform it
concurrently with the rest of the computation.
+{% tabs futures-04c %}
+{% tab 'Scala 2 and 3' for=futures-04c %}
val firstOccurrence: Future[Int] = Future {
val source = scala.io.Source.fromFile("myText.txt")
source.toSeq.indexOfSlice("myKeyword")
}
+{% endtab %}
+{% endtabs %}
### Callbacks
@@ -291,7 +397,7 @@ We are often interested in the result of the computation, not just its
side-effects.
In many future implementations, once the client of the future becomes interested
-in its result, it has to block its own computation and wait until the future is completed--
+in its result, it has to block its own computation and wait until the future is completed --
only then can it use the value of the future to continue its own computation.
Although this is allowed by the Scala `Future` API as we will show later,
from a performance point of view a better way to do it is in a completely
@@ -323,32 +429,63 @@ value is a `Throwable`.
Coming back to our social network example, let's assume we want to
fetch a list of our own recent posts and render them to the screen.
We do so by calling a method `getRecentPosts` which returns
-a `List[String]`-- a list of recent textual posts:
+a `List[String]` -- a list of recent textual posts:
+{% tabs futures-05 class=tabs-scala-version %}
+{% tab 'Scala 2' for=futures-05 %}
import scala.util.{Success, Failure}
val f: Future[List[String]] = Future {
- session.getRecentPosts
+ session.getRecentPosts()
}
- f onComplete {
+ f.onComplete {
case Success(posts) => for (post <- posts) println(post)
case Failure(t) => println("An error has occurred: " + t.getMessage)
}
+{% endtab %}
+{% tab 'Scala 3' for=futures-05 %}
+ import scala.util.{Success, Failure}
+
+ val f: Future[List[String]] = Future {
+ session.getRecentPosts()
+ }
+
+ f.onComplete {
+ case Success(posts) => for post <- posts do println(post)
+ case Failure(t) => println("An error has occurred: " + t.getMessage)
+ }
+{% endtab %}
+{% endtabs %}
The `onComplete` method is general in the sense that it allows the
client to handle the result of both failed and successful future
computations. In the case where only successful results need to be
handled, the `foreach` callback can be used:
+{% tabs futures-06 class=tabs-scala-version %}
+{% tab 'Scala 2' for=futures-06 %}
val f: Future[List[String]] = Future {
- session.getRecentPosts
+ session.getRecentPosts()
}
- f foreach { posts =>
- for (post <- posts) println(post)
+ for {
+ posts <- f
+ post <- posts
+ } println(post)
+{% endtab %}
+{% tab 'Scala 3' for=futures-06 %}
+ val f: Future[List[String]] = Future {
+ session.getRecentPosts()
}
+ for
+ posts <- f
+ post <- posts
+ do println(post)
+{% endtab %}
+{% endtabs %}
+
`Future`s provide a clean way of handling only failed results using
the `failed` projection which converts a `Failure[Throwable]` to a
`Success[Throwable]`. An example of doing this is provided in the
@@ -358,15 +495,19 @@ Coming back to the previous example with searching for the first
occurrence of a keyword, you might want to print the position
of the keyword to the screen:
+{% tabs futures-oncomplete %}
+{% tab 'Scala 2 and 3' for=futures-oncomplete %}
val firstOccurrence: Future[Int] = Future {
val source = scala.io.Source.fromFile("myText.txt")
source.toSeq.indexOfSlice("myKeyword")
}
- firstOccurrence onComplete {
+ firstOccurrence.onComplete {
case Success(idx) => println("The keyword first appears at position: " + idx)
case Failure(t) => println("Could not process file: " + t.getMessage)
}
+{% endtab %}
+{% endtabs %}
The `onComplete` and `foreach` methods both have result type `Unit`, which
@@ -393,19 +534,23 @@ This means that in the following example the variable `totalA` may not be set
to the correct number of lower case and upper case `a` characters from the computed
text.
+{% tabs volatile %}
+{% tab 'Scala 2 and 3' for=volatile %}
@volatile var totalA = 0
val text = Future {
"na" * 16 + "BATMAN!!!"
}
- text foreach { txt =>
+ text.foreach { txt =>
totalA += txt.count(_ == 'a')
}
- text foreach { txt =>
+ text.foreach { txt =>
totalA += txt.count(_ == 'A')
}
+{% endtab %}
+{% endtabs %}
Above, the two callbacks may execute one after the other, in
which case the variable `totalA` holds the expected value `18`.
@@ -434,10 +579,10 @@ callbacks may be executed concurrently with one another.
However, a particular `ExecutionContext` implementation may result
in a well-defined order.
-5. In the event that some of the callbacks throw an exception, the
+5. In the event that some callbacks throw an exception, the
other callbacks are executed regardless.
-6. In the event that some of the callbacks never complete (e.g. the
+6. In the event that some callbacks never complete (e.g. the
callback contains an infinite loop), the other callbacks may not be
executed at all. In these cases, a potentially blocking callback must
use the `blocking` construct (see below).
@@ -456,25 +601,42 @@ interfacing with a currency trading service. Suppose we want to buy US
dollars, but only when it's profitable. We first show how this could
be done using callbacks:
+{% tabs futures-07 class=tabs-scala-version %}
+{% tab 'Scala 2' for=futures-07 %}
val rateQuote = Future {
connection.getCurrentValue(USD)
}
- rateQuote foreach { quote =>
+ for (quote <- rateQuote) {
val purchase = Future {
if (isProfitable(quote)) connection.buy(amount, quote)
else throw new Exception("not profitable")
}
- purchase foreach { amount =>
+ for (amount <- purchase)
println("Purchased " + amount + " USD")
- }
}
+{% endtab %}
+{% tab 'Scala 3' for=futures-07 %}
+ val rateQuote = Future {
+ connection.getCurrentValue(USD)
+ }
+
+ for quote <- rateQuote do
+ val purchase = Future {
+ if isProfitable(quote) then connection.buy(amount, quote)
+ else throw Exception("not profitable")
+ }
+
+ for amount <- purchase do
+ println("Purchased " + amount + " USD")
+{% endtab %}
+{% endtabs %}
We start by creating a future `rateQuote` which gets the current exchange
rate.
After this value is obtained from the server and the future successfully
-completed, the computation proceeds in the `foreach` callback and we are
+completed, the computation proceeds in the `foreach` callback, and we are
ready to decide whether to buy or not.
We therefore create another future `purchase` which makes a decision to buy only if it's profitable
to do so, and then sends a request.
@@ -489,7 +651,7 @@ some other currency. We would have to repeat this pattern within the
to reason about.
Second, the `purchase` future is not in the scope with the rest of
-the code-- it can only be acted upon from within the `foreach`
+the code -- it can only be acted upon from within the `foreach`
callback. This means that other parts of the application do not
see the `purchase` future and cannot register another `foreach`
callback to it, for example, to sell some other currency.
@@ -504,18 +666,36 @@ about mapping collections.
Let's rewrite the previous example using the `map` combinator:
+{% tabs futures-08 class=tabs-scala-version %}
+{% tab 'Scala 2' for=futures-08 %}
val rateQuote = Future {
connection.getCurrentValue(USD)
}
- val purchase = rateQuote map { quote =>
+ val purchase = rateQuote.map { quote =>
if (isProfitable(quote)) connection.buy(amount, quote)
else throw new Exception("not profitable")
}
- purchase foreach { amount =>
+ purchase.foreach { amount =>
println("Purchased " + amount + " USD")
}
+{% endtab %}
+{% tab 'Scala 3' for=futures-08 %}
+ val rateQuote = Future {
+ connection.getCurrentValue(USD)
+ }
+
+ val purchase = rateQuote.map { quote =>
+ if isProfitable(quote) then connection.buy(amount, quote)
+ else throw Exception("not profitable")
+ }
+
+ purchase.foreach { amount =>
+ println("Purchased " + amount + " USD")
+ }
+{% endtab %}
+{% endtabs %}
By using `map` on `rateQuote` we have eliminated one `foreach` callback and,
more importantly, the nesting.
@@ -544,11 +724,13 @@ combinators. The `flatMap` method takes a function that maps the value
to a new future `g`, and then returns a future which is completed once
`g` is completed.
-Lets assume that we want to exchange US dollars for Swiss francs
+Let's assume that we want to exchange US dollars for Swiss francs
(CHF). We have to fetch quotes for both currencies, and then decide on
buying based on both quotes.
Here is an example of `flatMap` and `withFilter` usage within for-comprehensions:
+{% tabs futures-09 class=tabs-scala-version %}
+{% tab 'Scala 2' for=futures-09 %}
val usdQuote = Future { connection.getCurrentValue(USD) }
val chfQuote = Future { connection.getCurrentValue(CHF) }
@@ -561,20 +743,40 @@ Here is an example of `flatMap` and `withFilter` usage within for-comprehensions
purchase foreach { amount =>
println("Purchased " + amount + " CHF")
}
+{% endtab %}
+{% tab 'Scala 3' for=futures-09 %}
+ val usdQuote = Future { connection.getCurrentValue(USD) }
+ val chfQuote = Future { connection.getCurrentValue(CHF) }
+
+ val purchase = for
+ usd <- usdQuote
+ chf <- chfQuote
+ if isProfitable(usd, chf)
+ yield connection.buy(amount, chf)
+
+ purchase.foreach { amount =>
+ println("Purchased " + amount + " CHF")
+ }
+{% endtab %}
+{% endtabs %}
The `purchase` future is completed only once both `usdQuote`
-and `chfQuote` are completed-- it depends on the values
+and `chfQuote` are completed -- it depends on the values
of both these futures so its own computation cannot begin
earlier.
The for-comprehension above is translated into:
- val purchase = usdQuote flatMap {
+{% tabs for-translation %}
+{% tab 'Scala 2 and 3' for=for-translation %}
+ val purchase = usdQuote.flatMap {
usd =>
- chfQuote
- .withFilter(chf => isProfitable(usd, chf))
- .map(chf => connection.buy(amount, chf))
+ chfQuote
+ .withFilter(chf => isProfitable(usd, chf))
+ .map(chf => connection.buy(amount, chf))
}
+{% endtab %}
+{% endtabs %}
which is a bit harder to grasp than the for-comprehension, but
we analyze it to better understand the `flatMap` operation.
@@ -611,11 +813,15 @@ amount. The `connection.buy` method takes an `amount` to buy and the expected
future to contain `0` instead of the exception, we use the `recover`
combinator:
- val purchase: Future[Int] = rateQuote map {
+{% tabs recover %}
+{% tab 'Scala 2 and 3' for=recover %}
+ val purchase: Future[Int] = rateQuote.map {
quote => connection.buy(amount, quote)
- } recover {
+ }.recover {
case QuoteChangedException() => 0
}
+{% endtab %}
+{% endtabs %}
The `recover` combinator creates a new future which holds the same
result as the original future if it completed successfully. If it did
@@ -640,20 +846,24 @@ the exception from this future, as in the following example which
tries to print US dollar value, but prints the Swiss franc value in
the case it fails to obtain the dollar value:
+{% tabs fallback-to %}
+{% tab 'Scala 2 and 3' for=fallback-to %}
val usdQuote = Future {
connection.getCurrentValue(USD)
- } map {
+ }.map {
usd => "Value: " + usd + "$"
}
val chfQuote = Future {
connection.getCurrentValue(CHF)
- } map {
+ }.map {
chf => "Value: " + chf + "CHF"
}
- val anyQuote = usdQuote fallbackTo chfQuote
+ val anyQuote = usdQuote.fallbackTo(chfQuote)
- anyQuote foreach { println(_) }
+ anyQuote.foreach { println(_) }
+{% endtab %}
+{% endtabs %}
The `andThen` combinator is used purely for side-effecting purposes.
It returns a new future with exactly the same result as the current
@@ -665,17 +875,34 @@ multiple `andThen` calls are ordered, as in the following example
which stores the recent posts from a social network to a mutable set
and then renders all the posts to the screen:
- val allPosts = mutable.Set[String]()
+{% tabs futures-10 class=tabs-scala-version %}
+{% tab 'Scala 2' for=futures-10 %}
+ val allPosts = mutable.Set[String]()
- Future {
- session.getRecentPosts
- } andThen {
- case Success(posts) => allPosts ++= posts
- } andThen {
- case _ =>
- clearAll()
- for (post <- allPosts) render(post)
- }
+ Future {
+ session.getRecentPosts()
+ }.andThen {
+ case Success(posts) => allPosts ++= posts
+ }.andThen {
+ case _ =>
+ clearAll()
+ for (post <- allPosts) render(post)
+ }
+{% endtab %}
+{% tab 'Scala 3' for=futures-10 %}
+ val allPosts = mutable.Set[String]()
+
+ Future {
+ session.getRecentPosts()
+ }.andThen {
+ case Success(posts) => allPosts ++= posts
+ }.andThen {
+ case _ =>
+ clearAll()
+ for post <- allPosts do render(post)
+ }
+{% endtab %}
+{% endtabs %}
In summary, the combinators on futures are purely functional.
Every combinator returns a new future which is related to the
@@ -691,10 +918,20 @@ futures also have projections. If the original future fails, the
fails with a `NoSuchElementException`. The following is an example
which prints the exception to the screen:
+{% tabs futures-11 class=tabs-scala-version %}
+{% tab 'Scala 2' for=futures-11 %}
val f = Future {
2 / 0
}
for (exc <- f.failed) println(exc)
+{% endtab %}
+{% tab 'Scala 3' for=futures-11 %}
+ val f = Future {
+ 2 / 0
+ }
+ for exc <- f.failed do println(exc)
+{% endtab %}
+{% endtabs %}
The for-comprehension in this example is translated to:
@@ -704,10 +941,20 @@ Because `f` is unsuccessful here, the closure is registered to
the `foreach` callback on a newly-successful `Future[Throwable]`.
The following example does not print anything to the screen:
+{% tabs futures-12 class=tabs-scala-version %}
+{% tab 'Scala 2' for=futures-12 %}
val g = Future {
4 / 2
}
for (exc <- g.failed) println(exc)
+{% endtab %}
+{% tab 'Scala 3' for=futures-12 %}
+ val g = Future {
+ 4 / 2
+ }
+ for exc <- g.failed do println(exc)
+{% endtab %}
+{% endtabs %}
+{% tabs install-cs-setup-tabs class=platform-os-options %}
+
+
+{% tab macOS for=install-cs-setup-tabs %}
+Run the following command in your terminal, following the on-screen instructions:
+{% include code-snippet.html language='bash' codeSnippet=site.data.setup-scala.macOS-brew %}
+{% altDetails cs-setup-macos-nobrew "Alternatively, if you don't use Homebrew:" %}
+ On the Apple Silicon (M1, M2, …) architecture:
+ {% include code-snippet.html language='bash' codeSnippet=site.data.setup-scala.macOS-arm64 %}
+ Otherwise, on the x86-64 architecture:
+ {% include code-snippet.html language='bash' codeSnippet=site.data.setup-scala.macOS-x86-64 %}
+{% endaltDetails %}
+{% endtab %}
+
+
+
+{% tab Linux for=install-cs-setup-tabs %}
+ Run the following command in your terminal, following the on-screen instructions.
+
+ On the x86-64 architecture:
+ {% include code-snippet.html language='bash' codeSnippet=site.data.setup-scala.linux-x86-64 %}
+ Otherwise, on the ARM64 architecture:
+ {% include code-snippet.html language='bash' codeSnippet=site.data.setup-scala.linux-arm64 %}
+{% endtab %}
+
+
+
+{% tab Windows for=install-cs-setup-tabs %}
+ Download and execute [the Scala installer for Windows]({{site.data.setup-scala.windows-link}})
+ based on Coursier, and follow the on-screen instructions.
+{% endtab %}
+
+
+
+{% tab Other for=install-cs-setup-tabs defaultTab %}
+
+ Follow the documentation from Coursier on
+ [how to install and run `cs setup`](https://get-coursier.io/docs/cli-installation).
+{% endtab %}
+
+
+{% endtabs %}
+
+
+> You may need to restart your terminal, log out,
+> or reboot in order for the changes to take effect.
+{: .help-info}
+
+
+{% altDetails testing-your-setup 'Testing your setup' %}
+Check your setup with the command `scala -version`, which should output:
+```bash
+$ scala -version
+Scala code runner version: 1.4.3
+Scala version (default): {{site.scala-3-version}}
+```
+{% endaltDetails %}
+
+
+
+Along with managing JVMs, `cs setup` also installs useful command-line tools:
+
+| Commands | Description |
+|----------|-------------|
+| `scalac` | the Scala compiler |
+| `scala`, `scala-cli` | [Scala CLI](https://scala-cli.virtuslab.org), interactive toolkit for Scala |
+| `sbt`, `sbtn` | The [sbt](https://www.scala-sbt.org/) build tool |
+| `amm` | [Ammonite](https://ammonite.io/) is an enhanced REPL |
+| `scalafmt` | [Scalafmt](https://scalameta.org/scalafmt/) is the Scala code formatter |
+
+For more information about `cs`, read
+[coursier-cli documentation](https://get-coursier.io/docs/cli-overview).
+
+> `cs setup` installs the Scala 3 compiler and runner by default (the `scalac` and
+> `scala` commands, respectively). Whether you intend to use Scala 2 or 3,
+> this is usually not an issue because most projects use a build tool that will
+> use the correct version of Scala irrespective of the one installed "globally".
+> Nevertheless, you can always launch a specific version of Scala using
+> ```
+> $ cs launch scala:{{ site.scala-version }}
+> $ cs launch scalac:{{ site.scala-version }}
+> ```
+> If you prefer Scala 2 to be run by default, you can force that version to be installed with:
+> ```
+> $ cs install scala:{{ site.scala-version }} scalac:{{ site.scala-version }}
+> ```
+
+### ...or manually
+
+You only need two tools to compile, run, test, and package a Scala project: Java 8 or 11,
+and Scala CLI.
+To install them manually:
+
+1. if you don't have Java 8 or 11 installed, download
+ Java from [Oracle Java 8](https://www.oracle.com/java/technologies/javase-jdk8-downloads.html), [Oracle Java 11](https://www.oracle.com/java/technologies/javase-jdk11-downloads.html),
+ or [AdoptOpenJDK 8/11](https://adoptopenjdk.net/). Refer to [JDK Compatibility](/overviews/jdk-compatibility/overview.html) for Scala/Java compatibility detail.
+1. Install [Scala CLI](https://scala-cli.virtuslab.org/install)
+
+## Using the Scala CLI
+
+In a directory of your choice, which we will call ``, create a file named `hello.scala` with the following code:
+```scala
+//> using scala {{site.scala-3-version}}
+
+@main
+def hello(): Unit =
+ println("Hello, World!")
+```
+
+You can define a method with the `def` keyword and mark it as a "main" method with the `@main` annotation, designating it as
+the entry point in program execution. The method's type is `Unit`, which means it does not return a value. `Unit`
+can be thought of as an analogue to the `void` keyword found in other languages. The `println` method will print the `"Hello, World!"`
+string to standard output.
+
+To run the program, execute `scala run hello.scala` command from a terminal, within the `` directory. The file will be compiled and executed, with console output
+similar to following:
+```
+$ scala run hello.scala
+Compiling project (Scala {{site.scala-3-version}}, JVM (20))
+Compiled project (Scala {{site.scala-3-version}}, JVM (20))
+Hello, World!
+```
+
+### Handling command-line arguments
+
+Rewrite the `hello.scala` file so that the program greets the person running it.
+```scala
+//> using scala {{site.scala-3-version}}
+
+@main
+def hello(name: String): Unit =
+ println(s"Hello, $name!")
+```
+
+The `name` argument is expected to be provided when executing the program, and if it's not found, the execution will fail.
+The `println` method receives an interpolated string, as indicated by the `s` letter preceding its content. `$name` will be substituted by
+the content of the `name` argument.
+
+To pass the arguments when executing the program, put them after `--`:
+```
+$ scala run hello.scala -- Gabriel
+Compiling project (Scala {{site.scala-3-version}}, JVM (20))
+Compiled project (Scala {{site.scala-3-version}}, JVM (20))
+Hello, Gabriel!
+```
+
+You can read more about [main methods](/scala3/book/methods-main-methods.html) and [string interpolation](/scala3/book/string-interpolation.html) in the Scala Book.
+
+### Adding dependencies
+
+We now write a program that will count the files and directories present in its working directory.
+We use the [os-lib](https://github.com/com-lihaoyi/os-lib) library from the [Scala toolkit](/toolkit/introduction.html)
+for that purpose. A dependency on the library can be added with the `//> using` directive. Put the following code in `counter.scala`.
+```scala
+//> using scala {{site.scala-3-version}}
+//> using dep "com.lihaoyi::os-lib:0.11.4"
+
+@main
+def countFiles(): Unit =
+ val paths = os.list(os.pwd)
+ println(paths.length)
+```
+
+In the code above, `os.pwd` returns the current working directory. We pass it to `os.list`, which returns a sequence
+of paths directly within the directory passed as an argument. We use a `val` to declare an immutable value, in this example storing the
+sequence of paths.
+
+Execute the program. The dependency will be automatically downloaded. The execution should result in a similar output:
+```
+$ scala run counter.scala
+Compiling project (Scala {{site.scala-3-version}}, JVM (20))
+Compiled project (Scala {{site.scala-3-version}}, JVM (20))
+4
+```
+The printed number should be 4: `hello.scala`, `counter.scala` and two hidden directories created automatically when a program is executed:
+`.bsp` containing information about project used by IDEs, and `.scala-build` containing the results of compilation.
+
+As it turns out, the `os-lib` library is a part of Scala Toolkit, a collection of libraries recommended for tasks like testing,
+operating system interaction or handling JSONs. You can read more about the libraries included in the toolkit [here](/toolkit/introduction.html).
+To include the toolkit libraries, use the `//> using toolkit 0.5.0` directive:
+```scala
+//> using scala {{site.scala-3-version}}
+//> using toolkit 0.5.0
+
+@main
+def countFiles(): Unit =
+ val paths = os.list(os.pwd)
+ println(paths.length)
+```
+
+This program is identical to the one above. However, other toolkit libraries will also be available to use, should you need them.
+
+### Using the REPL
+
+You can execute code interactively using the REPL provided by the `scala` command. Execute `scala` in the console without any arguments.
+```
+$ scala
+Welcome to Scala {{site.scala-3-version}} (20-ea, Java OpenJDK 64-Bit Server VM).
+Type in expressions for evaluation. Or try :help.
+
+scala>
+```
+
+Write a line of code to be executed and press enter.
+```
+scala> println("Hello, World!")
+Hello, World!
+
+scala>
+```
+
+The result will be printed immediately after executing the line. You can declare values:
+```
+scala> val i = 1
+val i: Int = 1
+
+scala>
+```
+
+A new value of type `Int` has been created. If you provide an expression that can be evaluated, its result will be stored in an automatically created value.
+```
+scala> i + 3
+val res0: Int = 4
+
+scala>
+```
+You can exit the REPL with `:exit`.
+
+## Using an IDE
+
+> You can read a short summary of Scala IDEs on [a dedicated page](/getting-started/scala-ides.html).
+
+Let's use an IDE to open the code we wrote above. The most popular ones are [IntelliJ](https://www.jetbrains.com/idea/) and
+[VSCode](https://scalameta.org/metals/docs/editors/vscode).
+They both offer rich IDE features, but you can still use [many other editors](https://scalameta.org/metals/docs/editors/overview.html).
+
+### Prepare the project
+
+First, remove all the using directives, and put them in a single file `project.scala` in the `` directory.
+This makes it easier to import as a project in an IDE:
+
+```scala
+//> using scala {{site.scala-3-version}}
+//> using toolkit 0.5.0
+```
+
+> Optionally, you can re-initialise the necessary IDE files from within the `` directory with the command `scala setup-ide .`, but these files will already exist if you have previously run the project with the Scala CLI `run` command.
+
+### Using IntelliJ
+
+1. Download and install [IntelliJ Community Edition](https://www.jetbrains.com/help/idea/installation-guide.html)
+1. Install the Scala plugin by following [the instructions on how to install IntelliJ plugins](https://www.jetbrains.com/help/idea/discover-intellij-idea-for-scala.html)
+1. Open the `` directory, which should be imported automatically as a BSP project.
+
+### Using VSCode with Metals
+
+1. Download [VSCode](https://code.visualstudio.com/Download)
+1. Install the Metals extension from [the Marketplace](https://marketplace.visualstudio.com/items?itemName=scalameta.metals)
+1. Next, open the `` directory in VSCode. Metals should activate and begin importing the project automatically.
+
+### Play with the source code
+
+View these three files in your IDE:
+
+- _project.scala_
+- _hello.scala_
+- _counter.scala_
+
+You should notice the benefits of an IDE, such as syntax highlighting, and smart code interactions.
+For example you can place the cursor over any part of the code, such as `os.pwd` in _counter.scala_ and documentation for the method will appear.
+
+When you run your project in the next step, the configuration in _project.scala_ will be used to run the code in the other source files.
+
+### Run the code
+
+If you’re comfortable using your IDE, you can run the code in _counter.scala_ from your IDE.
+Attached to the `countFiles` method should be a prompt button. Click it to run the method. This should run without issue.
+The `hello` method in _hello.scala_ needs arguments however, so will require extra configuration via the IDE to provide the argument.
+
+Otherwise, you can run either application from the IDE's built-in terminal as described in above sections.
+
+## Next steps
+
+Now that you have tasted a little bit of Scala, you can further explore the language itself, consider checking out:
+
+* [The Scala Book](/scala3/book/introduction.html) (see the Scala 2 version [here](/overviews/scala-book/introduction.html)), which provides a set of short lessons introducing Scala’s main features.
+* [The Tour of Scala](/tour/tour-of-scala.html) for bite-sized introductions to Scala's features.
+* [Learning Courses](/online-courses.html), which includes online interactive tutorials and courses.
+* [Our list of some popular Scala books](/books.html).
+
+There are also other tutorials for other build-tools you can use with Scala:
+* [Getting Started with Scala and sbt](/getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.html)
+* [Using Scala and Maven](/tutorials/scala-with-maven.html)
+
+## Getting Help
+There are a multitude of mailing lists and real-time chat rooms in case you want to quickly connect with other Scala users. Check out our [community](https://scala-lang.org/community/) page for a list of these resources, and for where to reach out for help.
diff --git a/_getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.md b/_overviews/getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.md
similarity index 89%
rename from _getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.md
rename to _overviews/getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.md
index d1eab7d4ae..6dc397f089 100644
--- a/_getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.md
+++ b/_overviews/getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.md
@@ -2,7 +2,7 @@
title: Building a Scala Project with IntelliJ and sbt
layout: singlepage-overview
partof: building-a-scala-project-with-intellij-and-sbt
-languages: [ja]
+languages: [ja, ru, uk]
disqus: true
previous-page: getting-started/intellij-track/getting-started-with-scala-in-intellij
next-page: testing-scala-in-intellij-with-scalatest
@@ -54,16 +54,15 @@ but here's a glance at what everything is for:
1. On the **Project** panel on the left, expand `SbtExampleProject` => `src`
=> `main`
1. Right-click `scala` and select **New** => **Package**
-1. Name the package `example` and click **OK**.
-1. Right-click the package `example` and select **New** => **Scala class**.
-1. Name the class `Main` and change the **Kind** to `object`.
+1. Name the package `example` and click **OK** (or just press the Enter or Return key).
+1. Right-click the package `example` and select **New** => **Scala class** (if you don't see this option, right-click the `SbtExampleProject`, click **Add Frameworks Support**, select **Scala** and proceed)
+1. Name the class `Main` and change the **Kind** to `Object`.
1. Change the code in the class to the following:
```
-object Main extends App {
+@main def run() =
val ages = Seq(42, 75, 29, 64)
println(s"The oldest person is ${ages.max}")
-}
```
Note: IntelliJ has its own implementation of the Scala compiler, and sometimes your
@@ -105,6 +104,7 @@ Continue to the next tutorial in the _getting started with IntelliJ_ series, and
**or**
+* [The Scala Book](/scala3/book/introduction.html), which provides a set of short lessons introducing Scala’s main features.
+* [The Tour of Scala](/tour/tour-of-scala.html) for bite-sized introductions to Scala's features.
- Continue learning Scala interactively online on
[Scala Exercises](https://www.scala-exercises.org/scala_tutorial).
-- Learn about Scala's features in bite-sized pieces by stepping through our [Tour of Scala]({{ site.baseurl }}/tour/tour-of-scala.html).
diff --git a/_getting-started/intellij-track/getting-started-with-scala-in-intellij.md b/_overviews/getting-started/intellij-track/getting-started-with-scala-in-intellij.md
similarity index 61%
rename from _getting-started/intellij-track/getting-started-with-scala-in-intellij.md
rename to _overviews/getting-started/intellij-track/getting-started-with-scala-in-intellij.md
index 2e24d76826..8bbd163a00 100644
--- a/_getting-started/intellij-track/getting-started-with-scala-in-intellij.md
+++ b/_overviews/getting-started/intellij-track/getting-started-with-scala-in-intellij.md
@@ -2,7 +2,7 @@
title: Getting Started with Scala in IntelliJ
layout: singlepage-overview
partof: getting-started-with-scala-in-intellij
-languages: [ja]
+languages: [ja, ru, uk]
disqus: true
next-page: building-a-scala-project-with-intellij-and-sbt
@@ -13,36 +13,40 @@ In this tutorial, we'll see how to build a minimal Scala project using IntelliJ
IDE with the Scala plugin. In this guide, IntelliJ will download Scala for you.
## Installation
-1. Make sure you have the Java 8 JDK (also known as 1.8)
- * Run `javac -version` on the command line and make sure you see
- `javac 1.8.___`
- * If you don't have version 1.8 or higher, [install the JDK](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)
-1. Next, download and install [IntelliJ Community Edition](https://www.jetbrains.com/idea/download/)
+1. Make sure you have the Java 8 JDK (also known as 1.8) or newer:
+ * run `javac -version` on the command line to check the Java version,
+ * if you don't have version 1.8 or higher, [install the JDK](https://www.oracle.com/java/technologies/downloads/).
+1. Next, download and install [IntelliJ Community Edition](https://www.jetbrains.com/idea/download/).
1. Then, after starting up IntelliJ, you can download and install the Scala plugin by following the instructions on
-[how to install IntelliJ plugins](https://www.jetbrains.com/help/idea/installing-updating-and-uninstalling-repository-plugins.html) (search for "Scala" in the plugins menu.)
+[how to install IntelliJ plugins](https://www.jetbrains.com/help/idea/managing-plugins.html) (search for "Scala" in the plugins menu.)
When we create the project, we'll install the latest version of Scala.
Note: If you want to open an existing Scala project, you can click **Open**
when you start IntelliJ.
## Creating the Project
-1. Open up IntelliJ and click **File** => **New** => **Project**
-1. On the left panel, select Scala. On the right panel, select IDEA.
-1. Name the project **HelloWorld**
+1. Open up IntelliJ and click **File** => **New** => **Project**.
+1. Name the project **HelloWorld**.
+1. Select **Scala** from the **Language** list.
+1. Select **IntelliJ** from the **Build system** list.
1. Assuming this is your first time creating a Scala project with IntelliJ,
you'll need to install a Scala SDK. To the right of the Scala SDK field,
click the **Create** button.
1. Select the highest version number (e.g. {{ site.scala-version }}) and click **Download**. This might
take a few minutes but subsequent projects can use the same SDK.
-1. Once the SDK is created and you're back to the "New Project" window click **Finish**.
+1. Once the SDK is created, and you're back to the "New Project" window, click **Create**.
## Writing code
1. On the **Project** pane on the left, right-click `src` and select
-**New** => **Scala class**. If you don't see **Scala class**, right-click on **HelloWorld** and click on **Add Framework Support...**, select **Scala** and proceed. If you see **Error: library is not specified**, you can either click download button, or select the library path manually.
+**New** => **Scala class**. If you don't see **Scala class**, right-click on **HelloWorld** and click on **Add Framework Support...**, select **Scala** and proceed. If you see **Error: library is not specified**, you can either click download button, or select the library path manually. If you only see **Scala Worksheet** try expanding the `src` folder and its `main` subfolder, and right-click on the `scala` folder.
1. Name the class `Hello` and change the **Kind** to `object`.
-1. Change the code in the class to the following:
+1. Change the code in the file to the following:
+
+{% tabs hello-world-entry-point class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=hello-world-entry-point %}
```
object Hello extends App {
@@ -50,10 +54,42 @@ object Hello extends App {
}
```
+{% endtab %}
+
+{% tab 'Scala 3' for=hello-world-entry-point %}
+
+```
+@main def hello(): Unit =
+ println("Hello, World!")
+```
+
+In Scala 3, you can remove the object `Hello` and define a top-level method
+`hello` instead, which you annotate with `@main`.
+
+{% endtab %}
+
+{% endtabs %}
+
## Running it
+
+{% tabs hello-world-run class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=hello-world-run %}
+
* Right click on `Hello` in your code and select **Run 'Hello'**.
* You're done!
+{% endtab %}
+
+{% tab 'Scala 3' for=hello-world-run %}
+
+* Right click on `hello` in your code and select **Run 'hello'**.
+* You're done!
+
+{% endtab %}
+
+{% endtabs %}
+
## Experimenting with Scala
A good way to try out code samples is with Scala Worksheets
@@ -62,14 +98,18 @@ A good way to try out code samples is with Scala Worksheets
2. Name your new Scala worksheet "Mathematician".
3. Enter the following code into the worksheet:
+{% tabs square %}
+{% tab 'Scala 2 and 3' for=square %}
```
-def square(x: Int) = x * x
+def square(x: Int): Int = x * x
square(2)
```
+{% endtab %}
+{% endtabs %}
As you change your code, you'll notice that it gets evaluated
-in the right pane. If you do not see a right pane, right click on your Scala worksheet in the Project pane, and click on Evaluate Worksheet.
+in the right pane. If you do not see a right pane, right-click on your Scala worksheet in the Project pane, and click on Evaluate Worksheet.
## Next Steps
diff --git a/_getting-started/intellij-track/testing-scala-in-intellij-with-scalatest.md b/_overviews/getting-started/intellij-track/testing-scala-in-intellij-with-scalatest.md
similarity index 83%
rename from _getting-started/intellij-track/testing-scala-in-intellij-with-scalatest.md
rename to _overviews/getting-started/intellij-track/testing-scala-in-intellij-with-scalatest.md
index 77d0b3341a..8a51eca2e0 100644
--- a/_getting-started/intellij-track/testing-scala-in-intellij-with-scalatest.md
+++ b/_overviews/getting-started/intellij-track/testing-scala-in-intellij-with-scalatest.md
@@ -2,7 +2,7 @@
title: Testing Scala in IntelliJ with ScalaTest
layout: singlepage-overview
partof: testing-scala-in-intellij-with-scalatest
-languages: [ja]
+languages: [ja, ru, uk]
disqus: true
previous-page: building-a-scala-project-with-intellij-and-sbt
@@ -20,37 +20,34 @@ This assumes you know [how to build a project in IntelliJ](building-a-scala-proj
1. Add the ScalaTest dependency:
1. Add the ScalaTest dependency to your `build.sbt` file:
```
- libraryDependencies += "org.scalatest" %% "scalatest" % "3.0.8" % Test
+ libraryDependencies += "org.scalatest" %% "scalatest" % "3.2.19" % Test
```
1. If you get a notification "build.sbt was changed", select **auto-import**.
1. These two actions will cause `sbt` to download the ScalaTest library.
- 1. Wait for the `sbt` sync to finish; otherwise, `FunSuite` and `test()` will be
+ 1. Wait for the `sbt` sync to finish; otherwise, `AnyFunSuite` and `test()` will be
unrecognized.
1. On the project pane on the left, expand `src` => `main`.
1. Right-click on `scala` and select **New** => **Scala class**.
-1. Call it `CubeCalculator`, change the **Kind** to `object`, and click **OK**.
+1. Call it `CubeCalculator`, change the **Kind** to `object`, and hit enter or double-click on `object`.
1. Replace the code with the following:
```
- object CubeCalculator extends App {
- def cube(x: Int) = {
+ object CubeCalculator:
+ def cube(x: Int) =
x * x * x
- }
- }
```
## Creating a test
1. On the project pane on the left, expand `src` => `test`.
1. Right-click on `scala` and select **New** => **Scala class**.
-1. Name the class `CubeCalculatorTest` and click **OK**.
+1. Name the class `CubeCalculatorTest` and hit enter or double-click on `class`.
1. Replace the code with the following:
```
- import org.scalatest.FunSuite
+ import org.scalatest.funsuite.AnyFunSuite
- class CubeCalculatorTest extends FunSuite {
+ class CubeCalculatorTest extends AnyFunSuite:
test("CubeCalculator.cube") {
assert(CubeCalculator.cube(3) === 27)
}
- }
```
1. In the source code, right-click `CubeCalculatorTest` and select
**Run 'CubeCalculatorTest'**.
@@ -60,9 +57,9 @@ This assumes you know [how to build a project in IntelliJ](building-a-scala-proj
Let's go over this line by line:
* `class CubeCalculatorTest` means we are testing the object `CubeCalculator`
-* `extends FunSuite` lets us use functionality of ScalaTest's FunSuite class
+* `extends AnyFunSuite` lets us use functionality of ScalaTest's AnyFunSuite class
such as the `test` function
-* `test` is function that comes from the FunSuite library that collects
+* `test` is a function that comes from the FunSuite library that collects
results from assertions within the function body.
* `"CubeCalculator.cube"` is a name for the test. You can call it anything but
one convention is "ClassName.methodName".
diff --git a/_getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.md b/_overviews/getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.md
similarity index 78%
rename from _getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.md
rename to _overviews/getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.md
index 5c7bc37325..11c90825ea 100644
--- a/_getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.md
+++ b/_overviews/getting-started/sbt-track/getting-started-with-scala-and-sbt-on-the-command-line.md
@@ -2,7 +2,7 @@
title: Getting Started with Scala and sbt on the Command Line
layout: singlepage-overview
partof: getting-started-with-scala-and-sbt-on-the-command-line
-languages: [ja]
+languages: [ja, ru, uk]
disqus: true
next-page: testing-scala-with-sbt-on-the-command-line
@@ -26,6 +26,10 @@ We assume you know how to use a terminal.
* [Linux](https://www.scala-sbt.org/1.x/docs/Installing-sbt-on-Linux.html)
## Create the project
+
+{% tabs sbt-welcome-1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=sbt-welcome-1 %}
+
1. `cd` to an empty folder.
1. Run the following command `sbt new scala/hello-world.g8`.
This pulls the 'hello-world' template from GitHub.
@@ -34,6 +38,21 @@ It will also create a `target` folder, which you can ignore.
create a project called "hello-world".
1. Let's take a look at what just got generated:
+{% endtab %}
+{% tab 'Scala 3' for=sbt-welcome-1 %}
+
+1. `cd` to an empty folder.
+1. Run the following command `sbt new scala/scala3.g8`.
+This pulls the 'scala3' template from GitHub.
+It will also create a `target` folder, which you can ignore.
+1. When prompted, name the application `hello-world`. This will
+create a project called "hello-world".
+1. Let's take a look at what just got generated:
+
+{% endtab %}
+{% endtabs %}
+
+
```
- hello-world
- project (sbt uses this to install and manage plugins and dependencies)
@@ -69,18 +88,22 @@ extra functionality to our apps.
1. Open up `build.sbt` and add the following line:
```
-libraryDependencies += "org.scala-lang.modules" %% "scala-parser-combinators" % "1.1.2"
+libraryDependencies += "org.scala-lang.modules" %% "scala-parser-combinators" % "2.1.1"
```
Here, `libraryDependencies` is a set of dependencies, and by using `+=`,
we're adding the [scala-parser-combinators](https://github.com/scala/scala-parser-combinators) dependency to the set of dependencies that sbt will go
and fetch when it starts up. Now, in any Scala file, you can import classes,
-objects, etc, from scala-parser-combinators with a regular import.
+objects, etc, from `scala-parser-combinators` with a regular import.
You can find more published libraries on
[Scaladex](https://index.scala-lang.org/), the Scala library index, where you
can also copy the above dependency information for pasting into your `build.sbt`
file.
+> **Note for Java Libraries:** For a regular Java library, you should only use one percent (`%`) between the
+> organization name and artifact name. Double percent (`%%`) is a specialisation for Scala libraries.
+> You can learn more about the reason for this in the [sbt documentation][sbt-docs-lib-dependencies].
+
## Next steps
Continue to the next tutorial in the _getting started with sbt_ series, and learn about [testing Scala code with sbt in the command line](testing-scala-with-sbt-on-the-command-line.html).
@@ -90,3 +113,5 @@ Continue to the next tutorial in the _getting started with sbt_ series, and lear
- Continue learning Scala interactively online on
[Scala Exercises](https://www.scala-exercises.org/scala_tutorial).
- Learn about Scala's features in bite-sized pieces by stepping through our [Tour of Scala]({{ site.baseurl }}/tour/tour-of-scala.html).
+
+[sbt-docs-lib-dependencies]: https://www.scala-sbt.org/1.x/docs/Library-Dependencies.html#Getting+the+right+Scala+version+with
diff --git a/_getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.md b/_overviews/getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.md
similarity index 90%
rename from _getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.md
rename to _overviews/getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.md
index 345324c990..9a446b1c76 100644
--- a/_getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.md
+++ b/_overviews/getting-started/sbt-track/testing-scala-with-sbt-on-the-command-line.md
@@ -2,7 +2,7 @@
title: Testing Scala with sbt and ScalaTest on the Command Line
layout: singlepage-overview
partof: testing-scala-with-sbt-on-the-command-line
-languages: [ja]
+languages: [ja, ru, uk]
disqus: true
previous-page: getting-started-with-scala-and-sbt-on-the-command-line
@@ -11,7 +11,7 @@ redirect_from: "/getting-started-sbt-track/testing-scala-with-sbt-on-the-command
There are multiple libraries and testing methodologies for Scala,
but in this tutorial, we'll demonstrate one popular option from the ScalaTest framework
-called [FunSuite](https://www.scalatest.org/getting_started_with_fun_suite).
+called [AnyFunSuite](https://www.scalatest.org/scaladoc/3.2.2/org/scalatest/funsuite/AnyFunSuite.html).
We assume you know [how to create a Scala project with sbt](getting-started-with-scala-and-sbt-on-the-command-line.html).
## Setup
@@ -46,9 +46,9 @@ sbt test
named after the object we're testing.
```
- import org.scalatest.FunSuite
+ import org.scalatest.funsuite.AnyFunSuite
- class CubeCalculatorTest extends FunSuite {
+ class CubeCalculatorTest extends AnyFunSuite {
test("CubeCalculator.cube") {
assert(CubeCalculator.cube(3) === 27)
}
@@ -58,9 +58,9 @@ named after the object we're testing.
Let's go over this line by line.
* `class CubeCalculatorTest` means we are testing the object `CubeCalculator`
-* `extends FunSuite` lets us use functionality of ScalaTest's FunSuite class
+* `extends AnyFunSuite` lets us use functionality of ScalaTest's AnyFunSuite class
such as the `test` function
-* `test` is function that comes from FunSuite that collects
+* `test` is function that comes from AnyFunSuite that collects
results from assertions within the function body.
* `"CubeCalculator.cube"` is a name for the test. You can call it anything but
one convention is "ClassName.methodName".
@@ -72,9 +72,9 @@ indeed 27. The `===` is part of ScalaTest and provides clean error messages.
1. Add another test block with its own `assert` statement that checks for the cube of `0`.
```
- import org.scalatest.FunSuite
+ import org.scalatest.funsuite.AnyFunSuite
- class CubeCalculatorTest extends FunSuite {
+ class CubeCalculatorTest extends AnyFunSuite {
test("CubeCalculator.cube 3 should be 27") {
assert(CubeCalculator.cube(3) === 27)
}
diff --git a/_overviews/getting-started/scala-ides.md b/_overviews/getting-started/scala-ides.md
new file mode 100644
index 0000000000..9f210d4b1e
--- /dev/null
+++ b/_overviews/getting-started/scala-ides.md
@@ -0,0 +1,55 @@
+---
+layout: singlepage-overview
+title: Scala IDEs
+
+partof: scala-ides
+
+permalink: /getting-started/:title.html
+
+keywords:
+- Scala
+- IDE
+- JetBrains
+- IntelliJ
+- VSCode
+- Metals
+---
+
+It's of course possible to write Scala code in any editor and compile and run the code from the command line. But most developers prefer to use an IDE (Integrated Development Environment), especially for coding anything beyond simple exercises.
+
+The following IDEs are available for Scala:
+
+## IntelliJ IDEA + Scala plugin
+
+[https://jetbrains.com/scala](https://jetbrains.com/scala)
+
+
+
+IntelliJ IDEA is a cross-platform IDE developed by JetBrains that provides a consistent experience for a wide range of programming languages and technologies. It also supports Scala through the IntelliJ Scala Plugin, which is being developed at JetBrains. First, install IntelliJ IDEA Community Edition (unless you don't already use the Ultimate edition) and then add the IntelliJ Scala Plugin.
+
+IntelliJ IDEA and Scala Plugin will assist you in virtually every part of a Scala software developer's work. Use it if you like a solid integrated experience, sane default settings, and tested solutions.
+
+For more information, check out our tutorial [Getting Started with Scala in IntelliJ](/getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.html)
+
+## Visual Studio Code + Metals
+
+[https://scalameta.org/metals](https://scalameta.org/metals)
+
+
+
+Visual Studio Code, commonly called VS Code, is a source code editor from Microsoft. To add Scala support, you install an extension called Metals.
+
+(Why "Metals"? Because the underlying technologies are Scalameta and LSP ([Language Server Protocol](https://microsoft.github.io/language-server-protocol/)), and "Meta" + "LS" equals "Metals".)
+
+In contrast to IntelliJ IDEA + Scala Plugin, VS Code + Metals is aimed at people who like to get feedback and code intelligence straight from the compiler, which enables them to also try out experimental Scala features.
+
+## Your favorite editor + Metals
+
+Metals is most commonly used with VS Code, but it's also available for the following popular editors:
+
+* Emacs
+* Vim
+* Sublime Text
+* Helix
+
+as documented [here](https://scalameta.org/metals/docs/#editor-support).
diff --git a/_overviews/index.md b/_overviews/index.md
index 32a2fcbb7f..53ad207975 100644
--- a/_overviews/index.md
+++ b/_overviews/index.md
@@ -2,8 +2,11 @@
layout: overviews
partof: overviews
title: Guides and Overviews
-languages: [ja, zh-cn, ru]
+languages: [ja, zh-cn, ru, uk]
permalink: /overviews/:title.html
+redirect_from:
+ - /scala3/guides.html
+ - /guides.html
---
diff --git a/_overviews/jdk-compatibility/overview.md b/_overviews/jdk-compatibility/overview.md
index 932a4016a0..c42ee96090 100644
--- a/_overviews/jdk-compatibility/overview.md
+++ b/_overviews/jdk-compatibility/overview.md
@@ -4,41 +4,67 @@ title: JDK Compatibility
permalink: /overviews/jdk-compatibility/overview.html
---
-Scala's primary platform is the Java Virtual Machine (JVM). (Other supported platforms: [Scala.js](https://www.scala-js.org/), [Scala Native](https://scala-native.readthedocs.io/).)
+Scala's primary platform is the Java Virtual Machine (JVM). (Other supported platforms: [Scala.js](https://www.scala-js.org/), [Scala Native](https://scala-native.org/).)
Sometimes new JVM and JDK (Java Development Kit) versions require us to update Scala to remain compatible.
-## Version compatibility table
+## Scala compatibility table
-| JDK version | Minimum Scala versions | Recommended Scala versions |
-|:-----------:|:---------------------------------|:-----------------------------------------------------------|
-| 13, 14 | 2.13.2, 2.12.11 | 2.13.2, 2.12.11 |
-| 12 | 2.13.1, 2.12.9 | 2.13.2, 2.12.11 |
-| 11 | 2.13.0, 2.12.4, 2.11.12 | 2.13.2, 2.12.11, 2.11.12 |
-| 8 | 2.13.0, 2.12.0, 2.11.0, 2.10.2 | 2.13.2, 2.12.11, 2.11.12, 2.10.7 |
-| 6, 7 | 2.11.0, 2.10.0 | 2.11.12, 2.10.7 |
+Minimum Scala versions:
-Even when a version combination isn't listed as supported, most features may still work. (But Scala 2.12+ definitely doesn't work at all on JDK 6 or 7.)
+| JDK | 3 | 3 LTS | 2.13 | 2.12 | 2.11 |
+|:-----------:|:--------:|:--------:|:---------:|:---------:|:----------:|
+| 25 (ea) | 3.7.1 | 3.3.6 | 2.13.17* | 2.12.21* | |
+| 24 | 3.6.4 | 3.3.6 | 2.13.16 | 2.12.21* | |
+| 23 | 3.6.2 | 3.3.5 | 2.13.15 | 2.12.20 | |
+| 22 | 3.4.0 | 3.3.4 | 2.13.13 | 2.12.19 | |
+| 21 (LTS) | 3.4.0 | 3.3.1 | 2.13.11 | 2.12.18 | |
+| 17 (LTS) | 3.0.0 | 3.3.0 | 2.13.6 | 2.12.15 | |
+| 11 (LTS) | 3.0.0 | 3.3.0 | 2.13.0 | 2.12.4 | 2.11.12 |
+| 8 (LTS) | 3.0.0 | 3.3.0 | 2.13.0 | 2.12.0 | 2.11.0 |
-In general, Scala works on JDK 11+, including GraalVM, but it might not take advantage of features that were added after JDK 8. See [below](#jdk-11-compatibility-notes).
+\* = forthcoming; support available in [nightly builds](https://stackoverflow.com/q/40622878/86485)
-Lightbend offers [commercial support](https://www.lightbend.com/lightbend-platform-subscription) for Scala. The linked page includes contact information for inquiring about supported and recommended versions.
+Even when a version combination isn't listed as supported, most features might still work.
+
+Using the latest patch version of your chosen Scala version line is always recommended.
+
+Akka offers [commercial support](https://akka.io/pricing) for Scala 2. The linked page includes contact information for inquiring about supported and recommended versions.
+
+## Tooling compatibility table
+
+Minimum working versions:
+
+| JDK | scala-cli | sbt | mill |
+|:-----------:|:----------:|:---------:|:-----------|
+| 23 | 1.4.1 | 1.9.0 | 0.11.8 |
+| 21 (LTS) | 1.0.0 | 1.9.0 | 0.11.5 |
+| 17 (LTS) | 1.0.0 | 1.6.0 | 0.7.0 |
+| 11 (LTS) | 1.0.0 | 1.1.0 | 0.1.5 |
+| 8 (LTS) | 1.0.0 | 1.0.0 | 0.1.0 |
+
+Even when a version combination isn't listed as supported, most features might still work.
+
+Using a different build tool, such as Gradle or Maven? We invite pull
+requests adding additional columns to this table.
## Running versus compiling
-We generally recommend JDK 8 or 11 for *compiling* Scala code. Since the JVM tends to be backward compatible, it is usually safe to use a newer JVM for *running* your code, especially if you are not using JVM features designated "experimental" or "unsafe".
+JDK 8, 11, 17, and 21 are all reasonable choices both for *compiling* and *running* Scala code.
+
+Since the JVM is normally backwards compatible, it is usually safe to use a newer JVM for *running* your code than the one it was compiled on, especially if you are not using JVM features designated "experimental" or "unsafe".
-If you compile on JDK 11+ but have users on JDK 8, additional care is needed to avoid using APIs and features that don't exist in 8. Therefore, compiling on 8 may be the safer choice. Some Scala developers use JDK 11+ for their daily work but do release builds on JDK 8.
+JDK 8 remains in use at some shops (as of 2023), but usage is declining and some projects are dropping support. If you compile on JDK 11+ but want to allow your users to stay on 8, additional care is needed to avoid using APIs and features that don't exist in 8. (For this reason, some Scala developers use a newer JDK for their daily work but do release builds on JDK 8.)
## Long Term Support (LTS) versions
After Java 8, Oracle introduced the concept of LTS versions of the JDK. These versions will remain supported (by Oracle, and likely by the rest of the ecosystem, including Scala) for longer than the versions in between. See .
-JDK 8 and 11 are LTS versions. The next LTS version will be JDK 17, planned for September 2021.
+JDK 8, 11, 17, and 21 are LTS versions. (The next LTS version will be 25.)
-Scala provides experimental support for running the Scala compiler on non-LTS versions of the JDK. The current LTS versions are normally tested in our CI matrix and by the Scala community build. We may also test non-LTS versions, but any issues found there are considered lower priority, and will not be considered release blockers. (Lightbend may be able to offer faster resolution of issues like this under commercial support.)
+Scala provides experimental support for running the Scala compiler on non-LTS versions of the JDK. The current LTS versions are normally tested in our CI matrix and by the Scala community build. We may also test non-LTS versions, but any issues found there are considered lower priority, and will not be considered release blockers. (The Scala team at Akka may be able to offer faster resolution of issues like this under commercial support.)
-As already mentioned, Scala code compiled on JDK 8 should run without problems in later JVMs. We will give higher priority to bugs that break this property. (For example, in the 2.13.x series we intend to provide support for JPMS module access checks, to ensure your code won't incur `LinkageErrors` due to module access violations.)
+As already mentioned, Scala code compiled on JDK 8 should run without problems in later JVMs. We will give higher priority to bugs that break this property. (For example, in 2.13.x we might eventually provide support for JPMS module access checks, to ensure your code won't incur `LinkageErrors` due to module access violations.)
## JDK vendors and distributions
@@ -46,37 +72,88 @@ In almost every case, you're free to use the JDK and JVM of your choice.
JDK 8 users typically use the Oracle JDK or some flavor of OpenJDK.
-Most JDK 11+ users are using either OpenJDK or GraalVM.
+Most JDK 11+ users are using OpenJDK, or GraalVM which runs in the context of OpenJDK. GraalVM performs well on the Scala benchmarks, and it benefits from GraalVM runtime and runs faster too.
-OpenJDK comes in various flavors, offered by different providers. We build and test Scala using [AdoptOpenJDK](https://adoptopenjdk.net) in particular, but the differences are unlikely to matter to most users.
+OpenJDK comes in various flavors, offered by different providers. We build and test Scala using [Temurin](https://adoptium.net) primarily, but the differences are unlikely to matter to most users.
## JDK 11 compatibility notes
-Although the table above jumps from 8 to 11, JDK 9 and 10 will probably also work wherever 11 does. But unlike 9 and 10, 11 is an LTS release, so 11 is what we actually test on and recommend.
+The Scala test suite and Scala community build are green on JDK 11.
-The Scala compiler does not enforce the restrictions of the Java Platform Module System, which means that code that typechecks may incur linkage errors at runtime. Scala 2.13.x will eventually provide [rudimentary support](https://github.com/scala/scala/pull/7218) for this (perhaps only in nightlies built on JDK 11).
+In general, Scala works on JDK 11+, including GraalVM, but may not take special advantage of features that were added after JDK 8.
-For sbt users, JDK 11 support requires minimum sbt version 1.1.0. sbt 1.3.9 or newer is recommended. (If you are still on the 0.13.x series, use 0.13.18.)
+For example, the Scala compiler does not enforce the restrictions of the Java Platform Module System, which means that code that typechecks may incur linkage errors at runtime. Scala 2.13.x will eventually provide [rudimentary support](https://github.com/scala/scala/pull/7218) for this (perhaps only in nightlies built on JDK 11).
-To track progress on JDK 11 related issues, watch:
+To track progress on JDK 11 related issues in Scala, watch:
* the ["Support JDK 11"](https://github.com/scala/scala-dev/issues/139 "scala/scala-dev #139") issue
* the [jdk11 label](https://github.com/scala/bug/labels/jdk11) in scala/bug
-To help with testing on JDK 11, see [scala/scala-dev#559](https://github.com/scala/scala-dev/issues/559).
+## JDK 17 compatibility notes
+
+JDK 17 is an LTS release.
+
+Scala 2.13.6+ and 2.12.15+ support JDK 17.
+
+The Scala test suite and Scala community build are green on JDK 17.
+
+For sbt users, sbt 1.6.0-RC1 is the first version to support JDK 17, but in practice sbt 1.5.5 may also work. (It will print a warning on startup about `TrapExit` that you can ignore.)
+
+For possible Scala issues, see the [jdk11](https://github.com/scala/bug/labels/jdk11) and [jdk17](https://github.com/scala/bug/labels/jdk17) labels in the Scala 2 bug tracker.
+
+## JDK 21 compatibility notes
+
+JDK 21 is an LTS release.
+
+Scala 3.3.1+, 2.13.11+, and 2.12.18+ support JDK 21.
+
+The Scala test suite and Scala 2.13 community build are green on JDK 21.
+
+For sbt users, sbt 1.9.0 is the first version to support JDK 21.
+
+For possible Scala issues, see the [jdk11](https://github.com/scala/bug/labels/jdk11), [jdk17](https://github.com/scala/bug/labels/jdk17), and [jdk21](https://github.com/scala/bug/labels/jdk21) labels in the Scala 2 bug tracker.
+
+## JDK 22 compatibility notes
+
+JDK 22 is non-LTS.
+
+Scala 2.13.13+, 2.12.19+, 3.3.4+, and 3.6.2+ support JDK 22.
+
+For possible Scala 2 issues, see the [jdk11](https://github.com/scala/bug/labels/jdk11), [jdk17](https://github.com/scala/bug/labels/jdk17), and [jdk21](https://github.com/scala/bug/labels/jdk21) labels in the Scala 2 bug tracker.
+
+## JDK 23 compatibility notes
+
+JDK 23 is non-LTS.
+
+Scala 2.13.15+, Scala 2.12.20+, and Scala 3.6.2+ support JDK 23.
+
+We are working on adding JDK 23 support to Scala 3.3.x.
+(Support may be available in nightly builds and/or release candidates.)
+
+For possible Scala 2 issues, see the [jdk11](https://github.com/scala/bug/labels/jdk11), [jdk17](https://github.com/scala/bug/labels/jdk17), and [jdk21](https://github.com/scala/bug/labels/jdk21) labels in the Scala 2 bug tracker.
+
+## JDK 24 compatibility notes
+
+JDK 24 will be non-LTS.
+
+Scala 2.13.16+ supports, and Scala 2.12.21 (forthcoming) will support, JDK 24. We are also working on adding JDK 24 support to Scala 3. (Support may be available in nightly builds and/or release candidates.)
-## JDK 12, 13, and 14 compatibility notes
+For possible Scala 2 issues, see the [jdk11](https://github.com/scala/bug/labels/jdk11), [jdk17](https://github.com/scala/bug/labels/jdk17), and [jdk21](https://github.com/scala/bug/labels/jdk21) labels in the Scala 2 bug tracker.
-JDK 14 was released in March 2020. But 12, 13, and 14 are not LTS releases, so the remarks above about non-LTS releases apply.
+## GraalVM Native Image compatibility notes
-JDK 12, 13, and 14 are expected to work wherever JDK 11 does. The Scala community build now runs on JDK 14 (as well as 11 and 8).
+There are several records of successfully using Scala with [GraalVM](https://www.graalvm.org) Native Image (i.e., ahead of time compiler) to produce directly executable binaries.
+Beware that, even using solely the Scala standard library, Native Image compilation have some heavy requirements in terms of [reflective access](https://www.graalvm.org/reference-manual/native-image/metadata/), and it very likely require additional configuration steps to be performed.
-As of March 2020, the [jdk12](https://github.com/scala/bug/labels/jdk12) and [jdk13](https://github.com/scala/bug/labels/jdk13) labels in scala/bug have no open bugs. New issues will likely be reported against the newer non-LTS [jdk14 label](https://github.com/scala/bug/labels/jdk14) or the LTS [jdk11 label](https://github.com/scala/bug/labels/jdk11).
+A few sbt plugins are offering support for GraalVM Native Image compilation:
-As far as we know, 12, 13, and 14 are similar to 11 with respect to Scala compatibility.
+- [sbt-native-packager](https://www.scala-sbt.org/sbt-native-packager/formats/graalvm-native-image.html)
+- [sbt-native-image](https://github.com/scalameta/sbt-native-image)
## Scala 3
-Scala 3.0.x will still support JDK 8. It is also expected to work on 11 and beyond.
+At present, both Scala 3 LTS and Scala Next support JDK 8, as well as 11 and beyond.
-As Scala and the JVM continue to evolve, some eventual Scala 3.x version may drop support for JDK 8, in order to better take advantage of new JVM features. It isn't clear yet what the new minimum supported version might become.
+As per [this blog post](https://www.scala-lang.org/news/next-scala-lts.html),
+a forthcoming Scala 3 LTS version will drop JDK 8 support and may drop
+11 as well. Stay tuned.
diff --git a/_overviews/macros/annotations.md b/_overviews/macros/annotations.md
index 3fbf9203cc..7300704010 100644
--- a/_overviews/macros/annotations.md
+++ b/_overviews/macros/annotations.md
@@ -35,10 +35,9 @@ As a first step, we define an annotation that inherits `StaticAnnotation` and de
(the name `macroTransform` and the signature `annottees: Any*` of that macro are important as they tell the macro engine
that the enclosing annotation is a macro annotation).
- import scala.reflect.macros.Context
+ import scala.annotation.{StaticAnnotation, compileTimeOnly}
import scala.language.experimental.macros
- import scala.annotation.StaticAnnotation
- import scala.annotation.compileTimeOnly
+ import scala.reflect.macros.whitebox
@compileTimeOnly("enable macro paradise to expand macro annotations")
class identity extends StaticAnnotation {
@@ -58,8 +57,8 @@ results have to be wrapped in a `Block` for the lack of better notion in the ref
At this point you might be wondering. A single annottee and a single result is understandable, but what is the many-to-many
mapping supposed to mean? There are several rules guiding the process:
-1. If a class is annotated and it has a companion, then both are passed into the macro. (But not vice versa - if an object
- is annotated and it has a companion class, only the object itself is expanded).
+1. If a class is annotated, and it has a companion, then both are passed into the macro. (But not vice versa - if an object
+ is annotated, and it has a companion class, only the object itself is expanded).
1. If a parameter of a class, method or type member is annotated, then it expands its owner. First comes the annottee,
then the owner and then its companion as specified by the previous rule.
1. Annottees can expand into whatever number of trees of any flavor, and the compiler will then transparently
@@ -76,8 +75,17 @@ but we haven't encapsulated this boilerplate in a helper, because compiler plugi
(By the way, this boilerplate can be abstracted away by a suitable annotation macro, and we'll probably provide such a macro
at a later point in the future).
+ import scala.annotation.{StaticAnnotation, compileTimeOnly}
+ import scala.language.experimental.macros
+ import scala.reflect.macros.whitebox
+
+ @compileTimeOnly("enable macro paradise to expand macro annotations")
+ class identity extends StaticAnnotation {
+ def macroTransform(annottees: Any*): Any = macro identityMacro.impl
+ }
+
object identityMacro {
- def impl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = {
+ def impl(c: whitebox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = {
import c.universe._
val inputs = annottees.map(_.tree).toList
val (annottee, expandees) = inputs match {
@@ -101,8 +109,8 @@ at a later point in the future).
In the spirit of Scala macros, macro annotations are as untyped as possible to stay flexible and
as typed as possible to remain useful. On the one hand, macro annottees are untyped, so that we can change their signatures (e.g. lists
of class members). But on the other hand, the thing about all flavors of Scala macros is integration with the typechecker, and
-macro annotations are not an exceptions. During expansion we can have all the type information that's possible to have
-(e.g. we can reflect against the surrounding program or perform type checks / implicit lookups in the enclosing context).
+macro annotations are not an exceptions. During expansion, we can have all the type information that's possible to have
+(e.g. we can reflect against the surrounding program or perform type checks / implicit lookup in the enclosing context).
## Blackbox vs whitebox
diff --git a/_overviews/macros/blackbox-whitebox.md b/_overviews/macros/blackbox-whitebox.md
index 07c13f2aa2..d29cd6b16d 100644
--- a/_overviews/macros/blackbox-whitebox.md
+++ b/_overviews/macros/blackbox-whitebox.md
@@ -19,7 +19,7 @@ Separation of macros into blackbox ones and whitebox ones is a feature of Scala
With macros becoming a part of the official Scala 2.10 release, programmers in research and industry have found creative ways of using macros to address all sorts of problems, far extending our original expectations.
-In fact, macros became an important part of our ecosystem so quickly that just a couple months after the release of Scala 2.10, when macros were introduced in experimental capacity, we had a Scala language team meeting and decided to standardize macros and make them a full-fledged feature of Scala by 2.12.
+In fact, macros became an important part of our ecosystem so quickly that just a couple of months after the release of Scala 2.10, when macros were introduced in experimental capacity, we had a Scala language team meeting and decided to standardize macros and make them a full-fledged feature of Scala by 2.12.
UPDATE It turned out that it was not that simple to stabilize macros by Scala 2.12. Our research into that has resulted in establishing a new metaprogramming foundation for Scala, called [scala.meta](https://scalameta.org), whose first beta is expected to be released simultaneously with Scala 2.12 and might later be included in future versions of Scala. In the meanwhile, Scala 2.12 is not going to see any changes to reflection and macros - everything is going to stay experimental as it was in Scala 2.10 and Scala 2.11, and no features are going to be removed. However, even though circumstances under which this document has been written have changed, the information still remains relevant, so please continue reading.
@@ -30,13 +30,13 @@ comprehensibility.
## Blackbox and whitebox macros
-However sometimes def macros transcend the notion of "just a regular method". For example, it is possible for a macro expansion to yield an expression of a type that is more specific than the return type of a macro. In Scala 2.10, such expansion will retain its precise type as highlighted in the ["Static return type of Scala macros"](https://stackoverflow.com/questions/13669974/static-return-type-of-scala-macros) article at Stack Overflow.
+However, sometimes def macros transcend the notion of "just a regular method". For example, it is possible for a macro expansion to yield an expression of a type that is more specific than the return type of macro. In Scala 2.10, such expansion will retain its precise type as highlighted in the ["Static return type of Scala macros"](https://stackoverflow.com/questions/13669974/static-return-type-of-scala-macros) article at Stack Overflow.
-This curious feature provides additional flexibility, enabling [fake type providers](https://meta.plasm.us/posts/2013/07/11/fake-type-providers-part-2/), [extended vanilla materialization](/sips/source-locations.html), [fundep materialization]({{ site.baseurl }}/overviews/macros/implicits.html#fundep-materialization) and [extractor macros](https://github.com/scala/scala/commit/84a335916556cb0fe939d1c51f27d80d9cf980dc), but it also sacrifices clarity - both for humans and for machines.
+This curious feature provides additional flexibility, enabling [fake type providers](https://meta.plasm.us/posts/2013/07/11/fake-type-providers-part-2/), [extended vanilla materialization](https://github.com/scala/improvement-proposals/pull/18), [fundep materialization]({{ site.baseurl }}/overviews/macros/implicits.html#fundep-materialization) and [extractor macros](https://github.com/scala/scala/commit/84a335916556cb0fe939d1c51f27d80d9cf980dc), but it also sacrifices clarity - both for humans and for machines.
To concretize the crucial distinction between macros that behave just like normal methods and macros that refine their return types, we introduce the notions of blackbox macros and whitebox macros. Macros that faithfully follow their type signatures are called **blackbox macros** as their implementations are irrelevant to understanding their behaviour (could be treated as black boxes). Macros that can't have precise signatures in Scala's type system are called **whitebox macros** (whitebox def macros do have signatures, but these signatures are only approximations).
-We recognize the importance of both blackbox and whitebox macros, however we feel more confidence in blackbox macros, because they are easier to explain, specify and support. Therefore our plans to standardize macros only include blackbox macros. Later on, we might also include whitebox macros into our plans, but it's too early to tell.
+We recognize the importance of both blackbox and whitebox macros, however we feel more confidence in blackbox macros, because they are easier to explain, specify and support. Therefore, our plans to standardize macros only include blackbox macros. Later on, we might also include whitebox macros into our plans, but it's too early to tell.
## Codifying the distinction
@@ -48,7 +48,7 @@ Blackbox def macros are treated differently from def macros of Scala 2.10. The f
1. When an application of a blackbox macro expands into tree `x`, the expansion is wrapped into a type ascription `(x: T)`, where `T` is the declared return type of the blackbox macro with type arguments and path dependencies applied in consistency with the particular macro application being expanded. This invalidates blackbox macros as an implementation vehicle of [type providers](https://meta.plasm.us/posts/2013/07/11/fake-type-providers-part-2/).
1. When an application of a blackbox macro still has undetermined type parameters after Scala's type inference algorithm has finished working, these type parameters are inferred forcedly, in exactly the same manner as type inference happens for normal methods. This makes it impossible for blackbox macros to influence type inference, prohibiting [fundep materialization]({{ site.baseurl }}/overviews/macros/implicits.html#fundep-materialization).
-1. When an application of a blackbox macro is used as an implicit candidate, no expansion is performed until the macro is selected as the result of the implicit search. This makes it impossible to [dynamically calculate availability of implicit macros]({{ site.baseurl }}/sips/source-locations.html).
+1. When an application of a blackbox macro is used as an implicit candidate, no expansion is performed until the macro is selected as the result of the implicit search. This makes it impossible to [dynamically calculate availability of implicit macros](https://github.com/scala/improvement-proposals/pull/18).
1. When an application of a blackbox macro is used as an extractor in a pattern match, it triggers an unconditional compiler error, preventing customizations of pattern matching implemented with macros.
Whitebox def macros work exactly like def macros used to work in Scala 2.10. No restrictions of any kind get applied, so everything that could be done with macros in 2.10 should be possible in 2.11 and 2.12.
diff --git a/_overviews/macros/bundles.md b/_overviews/macros/bundles.md
index 57f380b7f6..255b504391 100644
--- a/_overviews/macros/bundles.md
+++ b/_overviews/macros/bundles.md
@@ -18,7 +18,7 @@ Macro bundles are a feature of Scala 2.11.x and Scala 2.12.x. Macro bundles are
## Macro bundles
In Scala 2.10.x, macro implementations are represented with functions. Once the compiler sees an application of a macro definition,
-it calls the macro implementation - as simple as that. However practice shows that just functions are often not enough due to the
+it calls the macro implementation - as simple as that. However, practice shows that just functions are often not enough due to the
following reasons:
1. Being limited to functions makes modularizing complex macros awkward. It's quite typical to see macro logic concentrate in helper
diff --git a/_overviews/macros/implicits.md b/_overviews/macros/implicits.md
index 1f660d6ec9..04852d0f2d 100644
--- a/_overviews/macros/implicits.md
+++ b/_overviews/macros/implicits.md
@@ -140,7 +140,7 @@ macro, which synthesizes `Iso[C, L]`, scalac will helpfully infer `L` as `Nothin
As demonstrated by [https://github.com/scala/scala/pull/2499](https://github.com/scala/scala/pull/2499), the solution to the outlined
problem is extremely simple and elegant.
-In 2.10 we don't allow macro applications to expand until all their type arguments are inferred. However we don't have to do that.
+In 2.10 we don't allow macro applications to expand until all their type arguments are inferred. However, we don't have to do that.
The typechecker can infer as much as it possibly can (e.g. in the running example `C` will be inferred to `Foo` and
`L` will remain uninferred) and then stop. After that we expand the macro and then proceed with type inference using the type of the
expansion to help the typechecker with previously undetermined type arguments. This is how it's implemented in Scala 2.11.0.
diff --git a/_overviews/macros/overview.md b/_overviews/macros/overview.md
index 87cf64ee8b..c66b1c6d48 100644
--- a/_overviews/macros/overview.md
+++ b/_overviews/macros/overview.md
@@ -223,15 +223,15 @@ The walkthrough in this guide uses the simplest possible command-line compilatio
* Macros needs scala-reflect.jar in library dependencies.
* The separate compilation restriction requires macros to be placed in a separate project.
-### Using macros with Scala IDE or Intellij IDEA
+### Using macros with Intellij IDEA
-Both in Scala IDE and in Intellij IDEA macros are known to work fine, given they are moved to a separate project.
+In Intellij IDEA, macros are known to work fine, given they are moved to a separate project.
### Debugging macros
Debugging macros (i.e. the logic that drives macro expansion) is fairly straightforward. Since macros are expanded within the compiler, all that you need is to run the compiler under a debugger. To do that, you need to: 1) add all (!) the libraries from the lib directory in your Scala home (which include such jar files as `scala-library.jar`, `scala-reflect.jar` and `scala-compiler.jar`) to the classpath of your debug configuration, 2) set `scala.tools.nsc.Main` as an entry point, 3) provide the `-Dscala.usejavacp=true` system property for the JVM (very important!), 4) set command-line arguments for the compiler as `-cp Test.scala`, where `Test.scala` stands for a test file containing macro invocations to be expanded. After all that is done, you should be able to put a breakpoint inside your macro implementation and launch the debugger.
-What really requires special support in tools is debugging the results of macro expansion (i.e. the code that is generated by a macro). Since this code is never written out manually, you cannot set breakpoints there, and you won't be able to step through it. Scala IDE and Intellij IDEA teams will probably add support for this in their debuggers at some point, but for now the only way to debug macro expansions are diagnostic prints: `-Ymacro-debug-lite` (as described below), which prints out the code emitted by macros, and println to trace the execution of the generated code.
+What really requires special support in tools is debugging the results of macro expansion (i.e. the code that is generated by a macro). Since this code is never written out manually, you cannot set breakpoints there, and you won't be able to step through it. The Intellij IDEA team will probably add support for this in their debugger at some point, but for now the only way to debug macro expansions are diagnostic prints: `-Ymacro-debug-lite` (as described below), which prints out the code emitted by macros, and println to trace the execution of the generated code.
### Inspecting generated code
diff --git a/_overviews/macros/paradise.md b/_overviews/macros/paradise.md
index 14e61dd9a5..72637b0854 100644
--- a/_overviews/macros/paradise.md
+++ b/_overviews/macros/paradise.md
@@ -20,7 +20,7 @@ Macro paradise is a plugin for several versions of Scala compilers.
It is designed to reliably work with production releases of scalac,
making latest macro developments available way before they end up in future versions Scala.
Refer to the roadmap for [the list of supported features and versions](roadmap.html)
-and visit [the paradise announcement](https://scalamacros.org/news/2013/08/07/roadmap-for-macro-paradise.html)
+and visit [the paradise announcement](https://github.com/scalamacros/scalamacros.github.com/blob/5904f7ef88a439c668204b4bf262835e89fb13cb/news/_posts/2013-08-07-roadmap-for-macro-paradise.html)
to learn more about our support guarantees.
~/210x $ scalac -Xplugin:paradise_*.jar -Xshow-phases
@@ -35,7 +35,7 @@ to learn more about our support guarantees.
Some features in macro paradise bring a compile-time dependency on the macro paradise plugin,
some features do not, however none of those features need macro paradise at runtime.
-Proceed to the [the feature list](roadmap.html) document for more information.
+Proceed to [the feature list](roadmap.html) document for more information.
Consult [https://github.com/scalamacros/sbt-example-paradise](https://github.com/scalamacros/sbt-example-paradise)
for an end-to-end example, but in a nutshell working with macro paradise is as easy as adding the following two lines
diff --git a/_overviews/macros/typemacros.md b/_overviews/macros/typemacros.md
index 691b2f5e83..773819fa6d 100644
--- a/_overviews/macros/typemacros.md
+++ b/_overviews/macros/typemacros.md
@@ -12,7 +12,7 @@ permalink: /overviews/macros/:title.html
Type macros used to be available in previous versions of ["Macro Paradise"](paradise.html),
but are not supported anymore in macro paradise 2.0.
-Visit [the paradise 2.0 announcement](https://scalamacros.org/news/2013/08/05/macro-paradise-2.0.0-snapshot.html)
+Visit [the paradise 2.0 announcement](https://github.com/scalamacros/scalamacros.github.com/blob/5904f7ef88a439c668204b4bf262835e89fb13cb/news/_posts/2013-08-05-macro-paradise-2.0.0-snapshot.html)
for an explanation and suggested migration strategy.
## Intuition
@@ -84,7 +84,7 @@ In Scala programs type macros can appear in one of five possible roles: type rol
To put it in a nutshell, expansion of a type macro replace the usage of a type macro with a tree it returns. To find out whether an expansion makes sense, mentally replace some usage of a macro with its expansion and check whether the resulting program is correct.
-For example, a type macro used as `TM(2)(3)` in `class C extends TM(2)(3)` can expand into `Apply(Ident(TypeName("B")), List(Literal(Constant(2))))`, because that would result in `class C extends B(2)`. However the same expansion wouldn't make sense if `TM(2)(3)` was used as a type in `def x: TM(2)(3) = ???`, because `def x: B(2) = ???` (given that `B` itself is not a type macro; if it is, it will be recursively expanded and the result of the expansion will determine validity of the program).
+For example, a type macro used as `TM(2)(3)` in `class C extends TM(2)(3)` can expand into `Apply(Ident(TypeName("B")), List(Literal(Constant(2))))`, because that would result in `class C extends B(2)`. However, the same expansion wouldn't make sense if `TM(2)(3)` was used as a type in `def x: TM(2)(3) = ???`, because `def x: B(2) = ???` (given that `B` itself is not a type macro; if it is, it will be recursively expanded and the result of the expansion will determine validity of the program).
## Tips and tricks
diff --git a/_overviews/macros/typeproviders.md b/_overviews/macros/typeproviders.md
index 175126eab1..1e90c17003 100644
--- a/_overviews/macros/typeproviders.md
+++ b/_overviews/macros/typeproviders.md
@@ -85,7 +85,7 @@ captures the essence of the generated classes, providing a statically typed inte
This approach to type providers is quite neat, because it can be used with production versions of Scala, however
it has performance problems caused by the fact that Scala emits reflective calls when compiling accesses to members
-of structural types. There are several strategies of dealing with that, but this margin is too narrow to contain them
+of structural types. There are several strategies of dealing with that, but this margin is too narrow to contain them,
so I refer you to an amazing blog series by Travis Brown for details: [post 1](https://meta.plasm.us/posts/2013/06/19/macro-supported-dsls-for-schema-bindings/), [post 2](https://meta.plasm.us/posts/2013/07/11/fake-type-providers-part-2/), [post 3](https://meta.plasm.us/posts/2013/07/12/vampire-methods-for-structural-types/).
## Public type providers
diff --git a/_overviews/macros/untypedmacros.md b/_overviews/macros/untypedmacros.md
index cfceefb78c..cccb85729b 100644
--- a/_overviews/macros/untypedmacros.md
+++ b/_overviews/macros/untypedmacros.md
@@ -12,13 +12,13 @@ permalink: /overviews/macros/:title.html
Untyped macros used to be available in previous versions of ["Macro Paradise"](paradise.html),
but are not supported anymore in macro paradise 2.0.
-Visit [the paradise 2.0 announcement](https://scalamacros.org/news/2013/08/05/macro-paradise-2.0.0-snapshot.html)
+Visit [the paradise 2.0 announcement](https://github.com/scalamacros/scalamacros.github.com/blob/5904f7ef88a439c668204b4bf262835e89fb13cb/news/_posts/2013-08-05-macro-paradise-2.0.0-snapshot.html)
for an explanation and suggested migration strategy.
## Intuition
Being statically typed is great, but sometimes that is too much of a burden. Take for example, the latest experiment of Alois Cochard with
-implementing enums using type macros - the so called [Enum Paradise](https://github.com/aloiscochard/enum-paradise). Here's how Alois has
+implementing enums using type macros - the so-called [Enum Paradise](https://github.com/aloiscochard/enum-paradise). Here's how Alois has
to write his type macro, which synthesizes an enumeration module from a lightweight spec:
object Days extends Enum('Monday, 'Tuesday, 'Wednesday...)
@@ -56,9 +56,9 @@ of the linked JIRA issue. Untyped macros make the full power of textual abstract
unit test provides details on this matter.
If a macro has one or more untyped parameters, then when typing its expansions, the typechecker will do nothing to its arguments
-and will pass them to the macro untyped. Even if some of the parameters do have type annotations, they will currently be ignored. This
+and will pass them to the macro untyped. Even if some parameters do have type annotations, they will currently be ignored. This
is something we plan on improving: [SI-6971](https://issues.scala-lang.org/browse/SI-6971). Since arguments aren't typechecked, you
-also won't having implicits resolved and type arguments inferred (however, you can do both with `c.typeCheck` and `c.inferImplicitValue`).
+also won't have implicits resolved and type arguments inferred (however, you can do both with `c.typeCheck` and `c.inferImplicitValue`).
Explicitly provided type arguments will be passed to the macro as is. If type arguments aren't provided, they will be inferred as much as
possible without typechecking the value arguments and passed to the macro in that state. Note that type arguments still get typechecked, but
@@ -69,6 +69,6 @@ the first typecheck of a def macro expansion is performed against the return typ
against the expected type of the expandee. More information can be found at Stack Overflow: [Static return type of Scala macros](https://stackoverflow.com/questions/13669974/static-return-type-of-scala-macros). Type macros never underwent the first typecheck, so
nothing changes for them (and you won't be able to specify any return type for a type macro to begin with).
-Finally the untyped macros patch enables using `c.Tree` instead of `c.Expr[T]` everywhere in signatures of macro implementations.
+Finally, the untyped macros patch enables using `c.Tree` instead of `c.Expr[T]` everywhere in signatures of macro implementations.
Both for parameters and return types, all four combinations of untyped/typed in macro def and tree/expr in macro impl are supported.
Check our unit tests for more information: test/files/run/macro-untyped-conformance.
diff --git a/_overviews/macros/usecases.md b/_overviews/macros/usecases.md
index 335d3f6bd5..eed399f3b1 100644
--- a/_overviews/macros/usecases.md
+++ b/_overviews/macros/usecases.md
@@ -19,12 +19,12 @@ to the realm of possible. Both commercial and research users of Scala use macros
At EPFL we are leveraging macros to power our research. Lightbend also employs macros in a number of projects.
Macros are also popular in the community and have already given rise to a number of interesting applications.
-The recent talk ["What Are Macros Good For?"](https://scalamacros.org/paperstalks/2014-02-04-WhatAreMacrosGoodFor.pdf)
+The recent talk ["What Are Macros Good For?"](https://github.com/scalamacros/scalamacros.github.com/blob/5904f7ef88a439c668204b4bf262835e89fb13cb/paperstalks/2014-02-04-WhatAreMacrosGoodFor.pdf)
describes and systemizes uses that macros found among Scala 2.10 users. The thesis of the talk is that macros are good for
code generation, static checking and DSLs, illustrated with a number of examples from research and industry.
We have also published a paper in the Scala'13 workshop,
-["Scala Macros: Let Our Powers Combine!"](https://scalamacros.org/paperstalks/2013-04-22-LetOurPowersCombine.pdf),
+["Scala Macros: Let Our Powers Combine!"](https://github.com/scalamacros/scalamacros.github.com/blob/5904f7ef88a439c668204b4bf262835e89fb13cb/paperstalks/2013-04-22-LetOurPowersCombine.pdf),
covering the state of the art of macrology in Scala 2.10 from a more academic point of view.
In the paper we show how the rich syntax and static types of Scala synergize with macros and
explore how macros enable new and unique ways to use pre-existing language features.
diff --git a/_overviews/parallel-collections/architecture.md b/_overviews/parallel-collections/architecture.md
index 2b64486f63..f98b628210 100644
--- a/_overviews/parallel-collections/architecture.md
+++ b/_overviews/parallel-collections/architecture.md
@@ -87,13 +87,13 @@ Scala's parallel collection's draws much inspiration from the design of
Scala's (sequential) collections library-- as a matter of fact, it mirrors the
regular collections framework's corresponding traits, as shown below.
-[]({{ site.baseurl }}/resources/images/parallel-collections-hierarchy.png)
+[]({{ site.baseurl }}/resources/images/parallel-collections-hierarchy.png)
Hierarchy of Scala's Collections and Parallel Collections Libraries
The goal is of course to integrate parallel collections as tightly as possible
-with sequential collections, so as to allow for straightforward substitution
+with sequential collections, to allow for straightforward substitution
of sequential and parallel collections.
In order to be able to have a reference to a collection which may be either
diff --git a/_overviews/parallel-collections/concrete-parallel-collections.md b/_overviews/parallel-collections/concrete-parallel-collections.md
index 2885e72bc9..428f142918 100644
--- a/_overviews/parallel-collections/concrete-parallel-collections.md
+++ b/_overviews/parallel-collections/concrete-parallel-collections.md
@@ -84,10 +84,10 @@ is an ordered sequence of elements equally spaced apart. A parallel range is
created in a similar way as the sequential
[Range](https://www.scala-lang.org/api/{{ site.scala-212-version }}/scala/collection/immutable/Range.html):
- scala> 1 to 3 par
+ scala> (1 to 3).par
res0: scala.collection.parallel.immutable.ParRange = ParRange(1, 2, 3)
- scala> 15 to 5 by -2 par
+ scala> (15 to 5 by -2).par
res1: scala.collection.parallel.immutable.ParRange = ParRange(15, 13, 11, 9, 7, 5)
Just as sequential ranges have no builders, parallel ranges have no
@@ -146,7 +146,7 @@ and
scala> val phs = scala.collection.parallel.immutable.ParHashSet(1 until 1000: _*)
phs: scala.collection.parallel.immutable.ParHashSet[Int] = ParSet(645, 892, 69, 809, 629, 365, 138, 760, 101, 479,...
- scala> phs map { x => x * x } sum
+ scala> phs.map(x => x * x).sum
res0: Int = 332833500
Similar to parallel hash tables, parallel hash trie
diff --git a/_overviews/parallel-collections/custom-parallel-collections.md b/_overviews/parallel-collections/custom-parallel-collections.md
index 7ea4330c62..88307d3910 100644
--- a/_overviews/parallel-collections/custom-parallel-collections.md
+++ b/_overviews/parallel-collections/custom-parallel-collections.md
@@ -72,10 +72,10 @@ Finally, methods `split` and `psplit` are used to create splitters which
traverse subsets of the elements of the current splitter. Method `split` has
the contract that it returns a sequence of splitters which traverse disjoint,
non-overlapping subsets of elements that the current splitter traverses, none
-of which is empty. If the current splitter has 1 or less elements, then
+of which is empty. If the current splitter has 1 or fewer elements, then
`split` just returns a sequence of this splitter. Method `psplit` has to
return a sequence of splitters which traverse exactly as many elements as
-specified by the `sizes` parameter. If the `sizes` parameter specifies less
+specified by the `sizes` parameter. If the `sizes` parameter specifies fewer
elements than the current splitter, then an additional splitter with the rest
of the elements is appended at the end. If the `sizes` parameter requires more
elements than there are remaining in the current splitter, it will append an
@@ -112,9 +112,9 @@ may be suboptimal - producing a string again from the vector after filtering may
## Parallel collections with combiners
-Lets say we want to `filter` the characters of the parallel string, to get rid
+Let's say we want to `filter` the characters of the parallel string, to get rid
of commas for example. As noted above, calling `filter` produces a parallel
-vector and we want to obtain a parallel string (since some interface in the
+vector, and we want to obtain a parallel string (since some interface in the
API might require a sequential string).
To avoid this, we have to write a combiner for the parallel string collection.
@@ -134,7 +134,7 @@ is internally used by `filter`.
protected[this] override def newCombiner: Combiner[Char, ParString] = new ParStringCombiner
Next we define the `ParStringCombiner` class. Combiners are subtypes of
-builders and they introduce an additional method called `combine`, which takes
+builders, and they introduce an additional method called `combine`, which takes
another combiner as an argument and returns a new combiner which contains the
elements of both the current and the argument combiner. The current and the
argument combiner are invalidated after calling `combine`. If the argument is
@@ -195,7 +195,7 @@ live with this sequential bottleneck.
There are no predefined recipes-- it depends on the data-structure at
hand, and usually requires a bit of ingenuity on the implementer's
-part. However there are a few approaches usually taken:
+part. However, there are a few approaches usually taken:
1. Concatenation and merge. Some data-structures have efficient
implementations (usually logarithmic) of these operations.
diff --git a/_overviews/parallel-collections/overview.md b/_overviews/parallel-collections/overview.md
index 11fab887a6..1ced205636 100644
--- a/_overviews/parallel-collections/overview.md
+++ b/_overviews/parallel-collections/overview.md
@@ -12,10 +12,12 @@ permalink: /overviews/parallel-collections/:title.html
**Aleksandar Prokopec, Heather Miller**
+If you're using Scala 2.13+ and want to use Scala's parallel collections, you'll have to import a separate module, as described [here](https://github.com/scala/scala-parallel-collections).
+
## Motivation
Amidst the shift in recent years by processor manufacturers from single to
-multi-core architectures, academia and industry alike have conceded that
+multicore architectures, academia and industry alike have conceded that
_Popular Parallel Programming_ remains a formidable challenge.
Parallel collections were included in the Scala standard library in an effort
@@ -63,7 +65,7 @@ from Scala's (sequential) collection library, including:
In addition to a common architecture, Scala's parallel collections library
additionally shares _extensibility_ with the sequential collections library.
That is, like normal sequential collections, users can integrate their own
-collection types and automatically inherit all of the predefined (parallel)
+collection types and automatically inherit all the predefined (parallel)
operations available on the other parallel collections in the standard
library.
@@ -153,13 +155,13 @@ sections of this guide.
While the parallel collections abstraction feels very much the same as normal
sequential collections, it's important to note that its semantics differs,
-especially with regards to side-effects and non-associative operations.
+especially in regard to side-effects and non-associative operations.
In order to see how this is the case, first, we visualize _how_ operations are
performed in parallel. Conceptually, Scala's parallel collections framework
parallelizes an operation on a parallel collection by recursively "splitting"
a given collection, applying an operation on each partition of the collection
-in parallel, and re-"combining" all of the results that were completed in
+in parallel, and re-"combining" all the results that were completed in
parallel.
These concurrent, and "out-of-order" semantics of parallel collections lead to
@@ -174,7 +176,7 @@ Given the _concurrent_ execution semantics of the parallel collections
framework, operations performed on a collection which cause side-effects
should generally be avoided, in order to maintain determinism. A simple
example is by using an accessor method, like `foreach` to increment a `var`
-declared outside of the closure which is passed to `foreach`.
+declared outside the closure which is passed to `foreach`.
scala> var sum = 0
sum: Int = 0
diff --git a/_overviews/parallel-collections/performance.md b/_overviews/parallel-collections/performance.md
index b64b95d7d7..2f7aa27f2f 100644
--- a/_overviews/parallel-collections/performance.md
+++ b/_overviews/parallel-collections/performance.md
@@ -24,7 +24,7 @@ little optimization. On most modern JVMs, once the program bytecode is run, it
is converted into machine code for the computer architecture on which it is
being run. This is called the just-in-time compilation. The level of code
optimization is, however, low with just-in-time compilation, since it has to
-be fast. To avoid recompiling, the so called HotSpot compiler only optimizes
+be fast. To avoid recompiling, the so-called HotSpot compiler only optimizes
parts of the code which are executed frequently. What this means for the
benchmark writer is that a program might have different performance each time
it is run. Executing the same piece of code (e.g. a method) multiple times in
@@ -45,7 +45,7 @@ garbage collections.
One common cause of a performance deterioration is also boxing and unboxing
that happens implicitly when passing a primitive type as an argument to a
generic method. At runtime, primitive types are converted to objects which
-represent them, so that they could be passed to a method with a generic type
+represent them, so that they could be passed to a method with a type
parameter. This induces extra allocations and is slower, also producing
additional garbage on the heap.
@@ -81,13 +81,13 @@ For proper benchmark examples, you can see the source code inside [Scala library
This is a question commonly asked. The answer is somewhat involved.
-The size of the collection at which the parallelization pays of really
+The size of the collection at which the parallelization pays off really
depends on many factors. Some of them, but not all, include:
- Machine architecture. Different CPU types have different
performance and scalability characteristics. Orthogonal to that,
whether the machine is multicore or has multiple processors
- communicating via motherboard.
+ communicating via the motherboard.
- JVM vendor and version. Different VMs apply different
optimizations to the code at runtime. They implement different memory
management and synchronization techniques. Some do not support
@@ -122,6 +122,6 @@ depends on many factors. Some of them, but not all, include:
2. [Dynamic compilation and performance measurement, Brian Goetz][2]
3. [Scala library benchmarks][3]
- [1]: https://www.ibm.com/developerworks/java/library/j-jtp02225/index.html "flawed-benchmark"
- [2]: https://www.ibm.com/developerworks/library/j-jtp12214/ "dynamic-compilation"
+ [1]: https://web.archive.org/web/20210305174819/https://www.ibm.com/developerworks/java/library/j-jtp02225/index.html "flawed-benchmark"
+ [2]: https://web.archive.org/web/20210228055617/http://www.ibm.com/developerworks/library/j-jtp12214/ "dynamic-compilation"
[3]: https://github.com/scala/scala/tree/2.12.x/test/benchmarks
diff --git a/_overviews/plugins/index.md b/_overviews/plugins/index.md
index 13f1666a2d..0b1ea54d55 100644
--- a/_overviews/plugins/index.md
+++ b/_overviews/plugins/index.md
@@ -17,6 +17,13 @@ compiler. It does not go into depth on how to make your plugin
actually do something useful, but just shows the basics needed to
write a plugin and hook it into the Scala compiler.
+## You can read, but you can also watch TV
+
+The contents of this guide overlaps substantially with Seth Tisue's
+talk "Scala Compiler Plugins 101" ([32 minute video](https://www.youtube.com/watch?v=h5NZjuxS5Qo)).
+Although the talk is from April 2018, nearly all of the information
+in it still applies (as of November 2020).
+
## When to write a plugin
Plugins let you modify the behavior of the Scala compiler without
@@ -28,25 +35,23 @@ You should not actually need to modify the Scala compiler very
frequently, because Scala's light, flexible syntax will frequently
allow you to provide a better solution using a clever library.
-There are some times, though, where a compiler modification is the
+There are some cases, though, where a compiler modification is the
best choice even for Scala. Popular compiler plugins (as of 2018)
include:
- Alternate compiler back ends such as [Scala.js](https://www.scala-js.org), [Scala Native](http://scala-native.org), and
[Fortify SCA for Scala](https://developer.lightbend.com/docs/fortify/current/).
- Linters such as [Wartremover](https://www.wartremover.org) and [Scapegoat](https://github.com/sksamuel/scapegoat).
-- Plugins that support reformatting and other changes
- to source code, such as [scalafix](https://scalacenter.github.io/scalafix/) and [scalafmt](https://scalameta.org/scalafmt/) (which are
- built on the [semanticdb](https://scalameta.org/docs/semanticdb/guide.html) and [scalahost](https://github.com/Duhemm/scalahost) compiler plugins).
- Plugins that alter Scala's syntax, such as [kind-projector](https://github.com/typelevel/kind-projector).
- Plugins that alter Scala's behavior around errors and warnings,
- such as [silencer](https://github.com/ghik/silencer).
+ such as [silencer](https://github.com/ghik/silencer), [splain](https://github.com/tek/splain) and [clippy](https://scala-clippy.org/).
- Plugins that analyze the structure of source code, such as
- [Sculpt](https://github.com/lightbend/scala-sculpt) and [acyclic](https://github.com/lihaoyi/acyclic).
+ [Sculpt](https://github.com/lightbend/scala-sculpt), [acyclic](https://github.com/lihaoyi/acyclic) and [graph-buddy](https://github.com/VirtusLab/graphbuddy).
- Plugins that instrument user code to collect information,
such as the code coverage tool [scoverage](https://github.com/scoverage/scalac-scoverage-plugin).
-- Plugins that add metaprogramming facilities to Scala,
- such as [Macro Paradise](https://github.com/scalamacros/paradise).
+- Plugins that enable tooling. One such plugin is [semanticdb](https://scalameta.org/docs/semanticdb/guide.html), which enables [scalafix](https://scalacenter.github.io/scalafix/) (a well-known refactoring and linting tool) to do its work. Another one is [Macro Paradise](https://github.com/scalamacros/paradise) (only needed for Scala 2.12).
+- Plugins that modify existing Scala constructs in user code,
+ such as [better-monadic-for](https://github.com/oleg-py/better-monadic-for) and [better-tostring](https://github.com/polyvariant/better-tostring).
- Plugins that add entirely new constructs to Scala by
restructuring user code, such as [scala-continuations](https://github.com/scala/scala-continuations).
@@ -68,6 +73,7 @@ All of this is then packaged in a JAR file.
To use the plugin, a user adds the JAR file to their compile-time
classpath and enables it by invoking `scalac` with `-Xplugin:...`.
+(Some build tools provide shortcuts for this; see below.)
All of this will be described in more detail below.
@@ -147,6 +153,12 @@ aspects of note.
desire on the given compilation unit. Usually this involves
examining the trees within the unit and doing some transformation on
the tree.
+- The pattern match inside the body of `apply` shows one way of
+ detecting certain tree shapes in user code.
+ (Quasiquotes are another way.) `Apply` denotes a method call,
+ and `Select` denotes the "selection" of a member, such as `a.b`.
+ The details of tree processing are out of scope for this document,
+ but see "Going further", below, for links to further documentation.
The `runsAfter` method gives the plugin author control over when the
phase is executed. As seen above, it is expected to return a list of
@@ -187,6 +199,11 @@ with that file plus your compiled code:
cp scalac-plugin.xml classes
(cd classes; jar cf ../divbyzero.jar .)
+That's how it works with no build tool. If you are using sbt to build
+your plugin, then the XML file goes in `src/main/resources`.
+
+## Using a plugin with scalac
+
Now you can use your plugin with `scalac` by adding the `-Xplugin:`
option:
@@ -196,23 +213,76 @@ option:
^
one error found
+## Publishing your plugin
+
When you are happy with how the plugin behaves, you may wish to
publish the JAR to a Maven or Ivy repository where it can be resolved
-by a build tool.
+by a build tool. (For testing purposes, you can also publish it to
+your local machine only. In sbt, this is accomplished with
+`publishLocal`.)
-sbt, for example, provides an `addCompilerPlugin` method you can
+In most respects, compiler plugins are ordinary Scala libraries,
+so publishing a plugin is like publishing any library.
+See the [Library Author Guide]({{site.baseurl}}/overviews/contributors/index.html)
+and/or your build tool's documentation on publishing.
+
+## Using a plugin from sbt
+
+To make it convenient for end users to use your plugin once it has
+been published, sbt provides an `addCompilerPlugin` method you can
call in your build definition, e.g.:
- addCompilerPlugin("org.divbyzero" % "divbyzero" % "1.0")
+ addCompilerPlugin("org.divbyzero" %% "divbyzero" % "1.0")
+
+`addCompilerPlugin` performs multiple actions. It adds the JAR to the
+classpath (the compilation classpath only, not the runtime classpath)
+via `libraryDependencies`, and it also customizes `scalacOptions` to
+enable the plugin using `-Xplugin`.
+
+For more details, see [Compiler Plugin
+Support](https://www.scala-sbt.org/1.x/docs/Compiler-Plugins.html) in
+the sbt manual.
+
+## Using your plugin in Mill
-Note however that `addCompilerPlugin` only adds the JAR to the
-compilation classpath; it doesn't actually enable the plugin. To
-do that, you must customize `scalacOptions` to include the appropriate
-`-Xplugin` call. To shield users from having to know this, it's
-relatively common for compiler plugin authors to also write an
-accompanying sbt plugin that takes of customizing the classpath and
-compiler options appropriately. Then using your plugin only requires
-adding an `addSbtPlugin(...)` call to `project/plugins.sbt`.
+To use a scalac compiler plugin in your Mill project, you can override
+the `scalacPluginIvyDeps` target to add your plugins dependency coordinates.
+
+Plugin options can be specified in `scalacOptions`.
+
+Example:
+
+```scala
+// build.sc
+import mill._, mill.scalalib._
+
+object foo extends ScalaModule {
+ // Add the compiler plugin divbyzero in version 1.0
+ def scalacPluginIvyDeps = Agg(ivy"org.divbyzero:::divbyzero:1.0")
+ // Enable the `verbose` option of the divbyzero plugin
+ def scalacOptions = Seq("-P:divbyzero:verbose:true")
+ // other settings
+ // ...
+}
+
+```
+
+Please notice, that compiler plugins are typically bound to the full
+version of the compiler, hence you have to use the `:::` (instead of
+normal `::`) between the organization and the artifact name,
+to declare your dependency.
+
+For more information about plugin usage in Mill, please refer to the
+[Mill documentation for Scala compiler plugins](https://mill-build.org/mill/Scala_Module_Config.html#_scala_compiler_plugins).
+
+## Developing compiler plugins with an IDE
+
+Internally, the use of path-dependent types in the Scala compiler
+may confuse some IDEs such as IntelliJ. Correct plugin code may
+sometimes be highlighted as erroneous. The IDE is usually still
+useful under these circumstances, but remember to take its feedback
+with a grain of salt. If the error highlighting is distracting,
+the IDE may have a setting where you can disable it.
## Useful compiler options
@@ -317,9 +387,12 @@ behavior other than to print out its option.
## Going further
For the details on how to make your plugin accomplish some task, you
-must consult other documentation on compiler internals (such as the
-documentation on [Symbols, Trees, and Types]({{site.baseurl
-}}/overviews/reflection/symbols-trees-types.html).
+must consult other documentation on compiler internals. Relevant
+documents include:
+
+* [Symbols, Trees, and Types]({{site.baseurl}}/overviews/reflection/symbols-trees-types.html) is the single most important reference about the data structures used inside the compiler.
+* [Quasiquotes]({{site.baseurl}}/overviews/quasiquotes/intro.html) are useful for pattern matching on ASTs.
+ * The [syntax summary]({{site.baseurl}}/overviews/quasiquotes/syntax-summary.html) in the quasiquotes guide is a useful concordance between user-level syntax and AST node types.
It's also useful to look at other plugins and to study existing phases
within the compiler source code.
diff --git a/_overviews/quasiquotes/expression-details.md b/_overviews/quasiquotes/expression-details.md
index 62e810697d..6ef424fac1 100644
--- a/_overviews/quasiquotes/expression-details.md
+++ b/_overviews/quasiquotes/expression-details.md
@@ -16,7 +16,7 @@ permalink: /overviews/quasiquotes/:title.html
1. `Val`s, `Var`s and `Def`s without the right-hand side have it set to `q""`.
2. Abstract type definitions without bounds have them set to `q""`.
-3. `Try` expressions without a finally clause have it set to `q""`.
+3. `Try` expressions without a `finally` clause have it set to `q""`.
4. `Case` clauses without guards have them set to `q""`.
The default `toString` formats `q""` as ``.
@@ -58,13 +58,13 @@ During deconstruction you can use [unlifting]({{ site.baseurl }}/overviews/quasi
scala> val q"${x: Int}" = q"1"
x: Int = 1
-Similarly it would work with all the literal types except `Null`. (see [standard unliftables]({{ site.baseurl }}/overviews/quasiquotes/unlifting.html#standard-unliftables))
+Similarly, it would work with all the literal types except `Null`. (see [standard unliftables]({{ site.baseurl }}/overviews/quasiquotes/unlifting.html#standard-unliftables))
## Identifier and Selection
Identifiers and member selections are two fundamental primitives that let you refer to other definitions. A combination of two of them is also known as a `RefTree`.
-Each term identifier is defined by its name and whether or not it is backquoted:
+Each term identifier is defined by its name and whether it is backquoted:
scala> val name = TermName("Foo")
name: universe.TermName = Foo
@@ -90,7 +90,7 @@ Apart from matching on identifiers with a given name, you can also extract their
Name ascription is important here because without it you'll get a pattern that is equivalent to regular pattern variable binding.
-Similarly you can create and extract member selections:
+Similarly, you can create and extract member selections:
scala> val member = TermName("bar")
member: universe.TermName = bar
@@ -112,7 +112,7 @@ This tree supports following variations:
So an unqualified `q"this"` is equivalent to `q"${tpnme.EMPTY}.this"`.
-Similarly for `super` we have:
+Similarly, for `super` we have:
scala> val q"$name.super[$qual].$field" = q"super.foo"
name: universe.TypeName =
@@ -145,7 +145,7 @@ This can be accomplished with the following:
type arguments: List(Int), value arguments: List(1, 2)
type arguments: List(), value arguments: List(scala.Symbol("a"), scala.Symbol("b"))
-As you can see, we were able to match both calls regardless as to whether or not a specific type application exists. This happens because the type application matcher extracts the empty list of type arguments if the tree is not an actual type application, making it possible to handle both situations uniformly.
+As you can see, we were able to match both calls regardless of whether a specific type application exists. This happens because the type application matcher extracts the empty list of type arguments if the tree is not an actual type application, making it possible to handle both situations uniformly.
It is recommended to always include type applications when you match on a function with type arguments, as they will be inserted by the compiler during type checking, even if the user didn't write them explicitly:
@@ -175,7 +175,7 @@ Here we might get one, or two subsequent value applications:
scala> val q"g(...$argss)" = q"g"
argss: List[List[universe.Tree]] = List()
-Therefore it's recommended to use more specific patterns that check that ensure the extracted `argss` is not empty.
+Therefore, it's recommended to use more specific patterns that check that ensure the extracted `argss` is not empty.
Similarly to type arguments, implicit value arguments are automatically inferred during type checking:
@@ -244,7 +244,7 @@ The *throw* expression is used to throw a throwable:
## Ascription
-Ascriptions let users annotate the type of an intermediate expression:
+Ascriptions let users annotate the type of intermediate expression:
scala> val ascribed = q"(1 + 1): Int"
ascribed: universe.Typed = (1.$plus(1): Int)
@@ -469,7 +469,7 @@ There are three ways to create anonymous function:
scala> val f3 = q"(a: Int) => a + 1"
anon3: universe.Function = ((a: Int) => a.$plus(1))
-The first one uses the placeholder syntax. The second one names the function parameter but still relies on type inference to infer its type. An the last one explicitly defines the function parameter. Due to an implementation restriction, the second notation can only be used in parentheses or inside another expression. If you leave them out the you must specify the parameter types.
+The first one uses the placeholder syntax. The second one names the function parameter but still relies on type inference to infer its type. An the last one explicitly defines the function parameter. Due to an implementation restriction, the second notation can only be used in parentheses or inside another expression. If you leave them out then you must specify the parameter types.
Parameters are represented as [Vals]({{ site.baseurl }}/overviews/quasiquotes/definition-details.html#val-and-var-definitions). If you want to programmatically create a `val` that should have its type inferred you need to use the [empty type]({{ site.baseurl }}/overviews/quasiquotes/type-details.html#empty-type):
@@ -576,7 +576,7 @@ Each enumerator in the comprehension can be expressed with the `fq"..."` interpo
scala> val `for-yield` = q"for (..$enums) yield y"
for-yield: universe.Tree
-Similarly one can deconstruct the `for-yield` back into a list of enumerators and body:
+Similarly, one can deconstruct the `for-yield` back into a list of enumerators and body:
scala> val q"for (..$enums) yield $body" = `for-yield`
enums: List[universe.Tree] = List(`<-`((x @ _), xs), `if`(x.$greater(0)), (y @ _) = x.$times(2))
@@ -609,10 +609,10 @@ Selectors are extracted as pattern trees that are syntactically similar to selec
1. Simple identifier selectors are represented as pattern bindings: `pq"bar"`
2. Renaming selectors are represented as thin arrow patterns: `pq"baz -> boo"`
-3. Unimport selectors are represented as thin arrows with a wildcard right hand side: `pq"poison -> _"`
+3. Unimport selectors are represented as thin arrows with a wildcard right-hand side: `pq"poison -> _"`
4. The wildcard selector is represented as a wildcard pattern: `pq"_"`
-Similarly one construct imports back from a programmatically created list of selectors:
+Similarly, one construct imports back from a programmatically created list of selectors:
scala> val ref = q"a.b"
scala> val sels = List(pq"foo -> _", pq"_")
diff --git a/_overviews/quasiquotes/hygiene.md b/_overviews/quasiquotes/hygiene.md
index 1523655696..f08a9145de 100644
--- a/_overviews/quasiquotes/hygiene.md
+++ b/_overviews/quasiquotes/hygiene.md
@@ -12,7 +12,7 @@ permalink: /overviews/quasiquotes/:title.html
The notion of hygiene has been widely popularized by macro research in Scheme. A code generator is called hygienic if it ensures the absence of name clashes between regular and generated code, preventing accidental capture of identifiers. As numerous experience reports show, hygiene is of great importance to code generation, because name binding problems are often non-obvious and lack of hygiene might manifest itself in subtle ways.
-Sophisticated macro systems such as Racket's have mechanisms that make macros hygienic without any effort from macro writers. In Scala we don't have automatic hygiene - both of our codegen facilities (compile-time codegen with macros and runtime codegen with toolboxes) require programmers to handle hygiene manually. You must know how to work around the absence of hygiene, which is what this section is about.
+Sophisticated macro systems such as Racket's have mechanisms that make macros hygienic without any effort from macro writers. In Scala, we don't have automatic hygiene - both of our codegen facilities (compile-time codegen with macros and runtime codegen with toolboxes) require programmers to handle hygiene manually. You must know how to work around the absence of hygiene, which is what this section is about.
Preventing name clashes between regular and generated code means two things. First, we must ensure that, regardless of the context in which we put generated code, its meaning will not change (*referential transparency*). Second, we must make certain that regardless of the context in which we splice regular code, its meaning will not change (often called *hygiene in the narrow sense*). Let's see what can be done to this end on a series of examples.
@@ -56,7 +56,7 @@ Here we can see that the unqualified reference to `Map` does not respect our cus
MyMacro(2)
}
-If we compile both the macro and it's usage, we'll see that `println` will not be called when the application runs. This will happen because, after macro expansion, `Test.scala` will look like:
+If we compile both the macro, and it's usage, we'll see that `println` will not be called when the application runs. This will happen because, after macro expansion, `Test.scala` will look like:
// Expanded Test.scala
package example
diff --git a/_overviews/quasiquotes/intro.md b/_overviews/quasiquotes/intro.md
index 4ffba9e912..de31e4f162 100644
--- a/_overviews/quasiquotes/intro.md
+++ b/_overviews/quasiquotes/intro.md
@@ -90,7 +90,7 @@ Similarly, patterns and expressions are also not equivalent:
It's extremely important to use the right interpolator for the job in order to construct a valid syntax tree.
-Additionally there are two auxiliary interpolators that let you work with minor areas of scala syntax:
+Additionally, there are two auxiliary interpolators that let you work with minor areas of scala syntax:
| Used for
----|-------------------------------------
diff --git a/_overviews/quasiquotes/lifting.md b/_overviews/quasiquotes/lifting.md
index b0f2f54910..e218eca1cf 100644
--- a/_overviews/quasiquotes/lifting.md
+++ b/_overviews/quasiquotes/lifting.md
@@ -24,7 +24,7 @@ This code runs successfully because `Int` is considered to be `Liftable` by defa
def apply(value: T): Tree
}
-Whenever there is an implicit value of `Liftable[T]` available, one can unquote `T` in quasiquotes. This design pattern is known as a *type class*. You can read more about it in ["Type Classes as Objects and Implicits"](https://ropas.snu.ac.kr/~bruno/papers/TypeClasses.pdf).
+Whenever there is an implicit value of `Liftable[T]` available, one can unquote `T` in quasiquotes. This design pattern is known as a *type class*. You can read more about it in ["Type Classes as Objects and Implicits"](https://infoscience.epfl.ch/record/150280/files/TypeClasses.pdf).
A number of data types that are supported natively by quasiquotes will never trigger the usage of a `Liftable` representation, even if it\'s available: subtypes of `Tree`, `Symbol`, `Name`, `Modifiers` and `FlagSet`.
diff --git a/_overviews/quasiquotes/setup.md b/_overviews/quasiquotes/setup.md
index b121d666d6..155ee8a32b 100644
--- a/_overviews/quasiquotes/setup.md
+++ b/_overviews/quasiquotes/setup.md
@@ -18,9 +18,9 @@ All examples and code snippets in this guide are run under in 2.11 REPL with one
scala> val universe: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe
scala> import universe._
-A wildcard import from a universe (be it a runtime reflection universe like here or a compile-time universe provided in macros) is all that's needed to use quasiquotes. All of the examples will assume that import.
+A wildcard import from a universe (be it a runtime reflection universe like here or a compile-time universe provided in macros) is all that's needed to use quasiquotes. All the examples will assume that import.
-Additionally some examples that use `ToolBox` API will need a few more lines to get things rolling:
+Additionally, some examples that use `ToolBox` API will need a few more lines to get things rolling:
scala> import scala.reflect.runtime.currentMirror
scala> import scala.tools.reflect.ToolBox
diff --git a/_overviews/quasiquotes/syntax-summary.md b/_overviews/quasiquotes/syntax-summary.md
index f38d08bf8c..2fd706a83a 100644
--- a/_overviews/quasiquotes/syntax-summary.md
+++ b/_overviews/quasiquotes/syntax-summary.md
@@ -120,7 +120,7 @@ permalink: /overviews/quasiquotes/:title.html
| Quasiquote | Type
------------------------------|-----------------------------------------------------------------------------------------------------------------------------|-----------
[Val][401] | `q"$mods val $tname: $tpt = $expr"` or `q"$mods val $pat = $expr"` | ValDef
- [Var][401] | `q"$mods var $tname: $tpt = $expr"` or `q"$mods val $pat = $expr"` | ValDef
+ [Var][401] | `q"$mods var $tname: $tpt = $expr"` or `q"$mods var $pat = $expr"` | ValDef
[Val Pattern][403] | `q"$mods val $pat: $tpt = $expr"` | Tree
[Var Pattern][404] | `q"$mods var $pat: $tpt = $expr"` | Tree
[Method][403] | `q"$mods def $tname[..$tparams](...$paramss): $tpt = $expr"` | DefDef
diff --git a/_overviews/quasiquotes/terminology.md b/_overviews/quasiquotes/terminology.md
index c68d1828ad..ce5cf7eded 100644
--- a/_overviews/quasiquotes/terminology.md
+++ b/_overviews/quasiquotes/terminology.md
@@ -10,7 +10,7 @@ permalink: /overviews/quasiquotes/:title.html
---
EXPERIMENTAL
-* **Quasiquote** (not quasi-quote) can refer to either the quasiquote library or any usage of one its [interpolators](intro.html#interpolators). The name is not hyphenated for the sake of consistency with implementations of the same concept in other languages (e.g. [Scheme and Racket](https://docs.racket-lang.org/reference/quasiquote.html), [Haskell](https://www.haskell.org/haskellwiki/Quasiquotation))
+* **Quasiquote** (not quasi-quote) can refer to either the quasiquote library or any usage of one of its [interpolators](intro.html#interpolators). The name is not hyphenated for the sake of consistency with implementations of the same concept in other languages (e.g. [Scheme and Racket](https://docs.racket-lang.org/reference/quasiquote.html), [Haskell](https://wiki.haskell.org/Quasiquotation))
* **Tree** or **AST** (Abstract Syntax Tree) is a representation of a Scala program or a part of it through means of the Scala reflection API's Tree type.
* **Tree construction** refers to usages of quasiquotes as expressions to represent creation of new tree values.
* **Tree deconstruction** refers to usages of quasiquotes as patterns to structurally tear apart trees.
diff --git a/_overviews/quasiquotes/type-details.md b/_overviews/quasiquotes/type-details.md
index f67cd4e563..a3cd254d24 100644
--- a/_overviews/quasiquotes/type-details.md
+++ b/_overviews/quasiquotes/type-details.md
@@ -37,7 +37,7 @@ It is recommended to always ascribe the name as `TypeName` when you work with ty
## Singleton Type
-A singleton type is a way to express a type of a term definition that is being referenced:
+A singleton type is a way to express a type of term definition that is being referenced:
scala> val singleton = tq"foo.bar.type".sr
singleton: String = SingletonTypeTree(Select(Ident(TermName("foo")), TermName("bar")))
@@ -124,7 +124,7 @@ A compound type lets users express a combination of a number of types with an op
parents: List[universe.Tree] = List(A, B, C)
defns: List[universe.Tree] = List()
-Braces after parents are required to signal that this type is a compound type, even if there are no refinements and we just want to extract a sequence of types combined with the `with` keyword.
+Braces after parents are required to signal that this type is a compound type, even if there are no refinements, and we just want to extract a sequence of types combined with the `with` keyword.
On the other side of the spectrum are pure refinements without explicit parents (a.k.a. structural types):
diff --git a/_overviews/quasiquotes/unlifting.md b/_overviews/quasiquotes/unlifting.md
index e23f2d7152..adb8d4ed41 100644
--- a/_overviews/quasiquotes/unlifting.md
+++ b/_overviews/quasiquotes/unlifting.md
@@ -65,7 +65,7 @@ Here one must pay attention to a few nuances:
1. Similarly to `Liftable`, `Unliftable` defines a helper `apply` function in
the companion object to simplify the creation of `Unliftable` instances. It
- take a type parameter `T` as well as a partial function `PartialFunction[Tree, T]`
+ takes a type parameter `T` as well as a partial function `PartialFunction[Tree, T]`
and returns an `Unliftable[T]`. At all inputs where a partial function is defined
it is expected to return an instance of `T` unconditionally.
diff --git a/_overviews/reflection/annotations-names-scopes.md b/_overviews/reflection/annotations-names-scopes.md
index 797a2bcb0e..a4d1bbcce0 100644
--- a/_overviews/reflection/annotations-names-scopes.md
+++ b/_overviews/reflection/annotations-names-scopes.md
@@ -58,7 +58,7 @@ represent different kinds of Java annotation arguments:
## Names
Names are simple wrappers for strings.
-[Name](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Names$NameApi.html)
+[Name](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Names$NameApi.html)
has two subtypes `TermName` and `TypeName` which distinguish names of terms (like
objects or members) and types (like classes, traits, and type members). A term
and a type of the same name can co-exist in the same object. In other words,
@@ -86,11 +86,7 @@ the `map` method (which is a term) declared in the `List` class, one can do:
res1: scala.reflect.runtime.universe.Symbol = method map
To search for a type member, one can follow the same procedure, using
-`TypeName` instead. It is also possible to rely on implicit conversions to
-convert between strings and term or type names:
-
- scala> listTpe.member("map": TermName)
- res2: scala.reflect.runtime.universe.Symbol = method map
+`TypeName` instead.
### Standard Names
@@ -108,32 +104,32 @@ There are both
Some names, such as "package", exist both as a type name and a term name.
Standard names are made available through the `termNames` and `typeNames` members of
class `Universe`. For a complete specification of all standard names, see the
-[API documentation](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/StandardNames.html).
+[API documentation](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/StandardNames.html).
## Scopes
A scope object generally maps names to symbols available in a corresponding
lexical scope. Scopes can be nested. The base type exposed in the reflection
API, however, only exposes a minimal interface, representing a scope as an
-iterable of [Symbol](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Symbols$Symbol.html)s.
+iterable of [Symbol](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Symbols$Symbol.html)s.
Additional functionality is exposed in *member scopes* that are returned by
`members` and `decls` defined in
-[scala.reflect.api.Types#TypeApi](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Types$TypeApi.html).
-[scala.reflect.api.Scopes#MemberScope](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Scopes$MemberScope.html)
+[scala.reflect.api.Types#TypeApi](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Types$TypeApi.html).
+[scala.reflect.api.Scopes#MemberScope](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Scopes$MemberScope.html)
supports the `sorted` method, which sorts members *in declaration order*.
-The following example returns a list of the symbols of all overridden members
+The following example returns a list of the symbols of all final members
of the `List` class, in declaration order:
- scala> val overridden = listTpe.decls.sorted.filter(_.isOverride)
- overridden: List[scala.reflect.runtime.universe.Symbol] = List(method companion, method ++, method +:, method toList, method take, method drop, method slice, method takeRight, method splitAt, method takeWhile, method dropWhile, method span, method reverse, method stringPrefix, method toStream, method foreach)
+ scala> val finals = listTpe.decls.sorted.filter(_.isFinal)
+ finals: List(method isEmpty, method map, method collect, method flatMap, method takeWhile, method span, method foreach, method reverse, method foldRight, method length, method lengthCompare, method forall, method exists, method contains, method find, method mapConserve, method toList)
## Exprs
In addition to type `scala.reflect.api.Trees#Tree`, the base type of abstract
syntax trees, typed trees can also be represented as instances of type
-[`scala.reflect.api.Exprs#Expr`](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Exprs$Expr.html).
+[`scala.reflect.api.Exprs#Expr`](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Exprs$Expr.html).
An `Expr` wraps
an abstract syntax tree and an internal type tag to provide access to the type
of the tree. `Expr`s are mainly used to simply and conveniently create typed
@@ -193,9 +189,9 @@ expressions are compile-time constants (see [section 6.24 of the Scala language
2. String literals - represented as instances of the string.
-3. References to classes, typically constructed with [scala.Predef#classOf](https://www.scala-lang.org/api/current/index.html#scala.Predef$@classOf[T]:Class[T]) - represented as [types](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Types$Type.html).
+3. References to classes, typically constructed with [scala.Predef#classOf](https://www.scala-lang.org/api/current/index.html#scala.Predef$@classOf[T]:Class[T]) - represented as [types](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Types$Type.html).
-4. References to Java enumeration values - represented as [symbols](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Symbols$Symbol.html).
+4. References to Java enumeration values - represented as [symbols](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Symbols$Symbol.html).
Constant expressions are used to represent
@@ -204,7 +200,8 @@ Constant expressions are used to represent
Example:
- Literal(Constant(5))
+ scala> Literal(Constant(5))
+ val res6: reflect.runtime.universe.Literal = 5
The above expression creates an AST representing the integer literal `5` in
Scala source code.
@@ -290,8 +287,8 @@ Example:
## Printers
Utilities for nicely printing
-[`Trees`](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Trees.html) and
-[`Types`](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Types.html).
+[`Trees`](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Trees.html) and
+[`Types`](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Types.html).
### Printing Trees
@@ -411,7 +408,7 @@ additionally shows the unique identifiers of symbols, as well as their kind
## Positions
Positions (instances of the
-[Position](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Position.html) trait)
+[Position](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Position.html) trait)
are used to track the origin of symbols and tree nodes. They are commonly used when
displaying warnings and errors, to indicate the incorrect point in the
program. Positions indicate a column and line in a source file (the offset
@@ -424,7 +421,7 @@ Positions can refer either to only a single character in a source file, or to
a *range*. In the latter case, a *range position* is used (positions that are
not range positions are also called *offset positions*). Range positions have
in addition `start` and `end` offsets. The `start` and `end` offsets can be
-"focussed" on using the `focusStart` and `focusEnd` methods which return
+"focused" on using the `focusStart` and `focusEnd` methods which return
positions (when called on a position which is not a range position, they just
return `this`).
diff --git a/_overviews/reflection/overview.md b/_overviews/reflection/overview.md
index a5fa5b77f8..d388e4016e 100644
--- a/_overviews/reflection/overview.md
+++ b/_overviews/reflection/overview.md
@@ -7,7 +7,7 @@ overview-name: Reflection
num: 1
-languages: [ja]
+languages: [ja, zh-cn]
permalink: /overviews/reflection/:title.html
---
@@ -21,7 +21,7 @@ and logic programming paradigms.
While some languages are built around reflection as a guiding principle, many
languages progressively evolve their reflection abilities over time.
-Reflection involves the ability to **reify** (ie. make explicit) otherwise-implicit
+Reflection involves the ability to **reify** (i.e. make explicit) otherwise-implicit
elements of a program. These elements can be either static program elements
like classes, methods, or expressions, or dynamic elements like the current
continuation or execution events such as method invocations and field accesses.
@@ -130,7 +130,7 @@ available that are loaded by the current classloader, including class
The second step involves obtaining a `ClassMirror` for class `Person` using
the `reflectClass` method. The `ClassMirror` provides access to the
-constructor of class `Person`.
+constructor of class `Person`. (If this step causes an exception, the easy workaround is to use these flags when starting REPL. `scala -Yrepl-class-based:false`)
scala> val ctor = ru.typeOf[Person].decl(ru.termNames.CONSTRUCTOR).asMethod
ctor: scala.reflect.runtime.universe.MethodSymbol = constructor Person
@@ -262,7 +262,7 @@ precise runtime _types_ of these Scala objects. Scala runtime types carry
along all type info from compile-time, avoiding these types mismatches between
compile-time and run-time.
-Below, we use define a method which uses Scala reflection to get the runtime
+Below, we define a method which uses Scala reflection to get the runtime
types of its arguments, and then checks the subtyping relationship between the
two. If its first argument's type is a subtype of its second argument's type,
it returns `true`.
@@ -325,7 +325,7 @@ reflection, such as `Types`, `Trees`, and `Annotations`. For more details, see
the section of this guide on
[Universes]({{ site.baseurl}}/overviews/reflection/environment-universes-mirrors.html),
or the
-[Universes API docs](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Universe.html)
+[Universes API docs](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Universe.html)
in package `scala.reflect.api`.
To use most aspects of Scala reflection, including most code examples provided
@@ -345,5 +345,5 @@ different flavors of mirrors must be used.
For more details, see the section of this guide on
[Mirrors]({{ site.baseurl}}/overviews/reflection/environment-universes-mirrors.html),
or the
-[Mirrors API docs](https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/api/Mirrors.html)
+[Mirrors API docs](https://www.scala-lang.org/api/2.x/scala-reflect/scala/reflect/api/Mirrors.html)
in package `scala.reflect.api`.
diff --git a/_overviews/reflection/symbols-trees-types.md b/_overviews/reflection/symbols-trees-types.md
index 76bfde2008..4fba8ca28e 100644
--- a/_overviews/reflection/symbols-trees-types.md
+++ b/_overviews/reflection/symbols-trees-types.md
@@ -350,14 +350,14 @@ For example, to look up the `map` method of `List`, one can do:
scala> import scala.reflect.runtime.universe._
import scala.reflect.runtime.universe._
- scala> typeOf[List[_]].member("map": TermName)
+ scala> typeOf[List[_]].member(TermName("map"))
res0: scala.reflect.runtime.universe.Symbol = method map
Note that we pass method `member` a `TermName`, since we're looking up a
method. If we were to look up a type member, such as `List`'s self type, `Self`, we
would pass a `TypeName`:
- scala> typeOf[List[_]].member("Self": TypeName)
+ scala> typeOf[List[_]].member(TypeName("Self"))
res1: scala.reflect.runtime.universe.Symbol = type Self
We can also query all members or declarations on a type in interesting ways.
@@ -694,25 +694,29 @@ section:
It's important to note that, unlike `reify`, toolboxes aren't limited by the
typeability requirement-- although this flexibility is achieved by sacrificing
-robustness. That is, here we can see that `parse`, unlike `reify`, doesn’t
+robustness. That is, here we can see that `parse`, unlike `reify`, doesn't
reflect the fact that `println` should be bound to the standard `println`
method.
-_Note:_ when using macros, one shouldn’t use `ToolBox.parse`. This is because
+_Note:_ when using macros, one shouldn't use `ToolBox.parse`. This is because
there’s already a `parse` method built into the macro context. For example:
+ bash$ scala -Yrepl-class-based:false
+
scala> import scala.language.experimental.macros
import scala.language.experimental.macros
- scala> def impl(c: scala.reflect.macros.Context) = c.Expr[Unit](c.parse("println(2)"))
- impl: (c: scala.reflect.macros.Context)c.Expr[Unit]
+ scala> def impl(c: scala.reflect.macros.whitebox.Context) = c.Expr[Unit](c.parse("println(2)"))
+ def impl(c: scala.reflect.macros.whitebox.Context): c.Expr[Unit]
- scala> def test = macro impl
- test: Unit
+ scala> def test: Unit = macro impl
+ def test: Unit
scala> test
2
+You can find more about the two `Context`s in [this Macros article]({{ site.baseurl }}/overviews/macros/blackbox-whitebox.html).
+
##### Typechecking with ToolBoxes
As earlier alluded to, `ToolBox`es enable one to do more than just
@@ -722,7 +726,7 @@ and execute trees.
In addition to outlining the structure of the program, trees also hold
important information about the semantics of the program encoded in `symbol`
(a symbol assigned to trees that introduce or reference definitions), and
-`tpe` (the type of the tree). By default these fields are empty, but
+`tpe` (the type of the tree). By default, these fields are empty, but
typechecking fills them in.
When using the runtime reflection framework, typechecking is implemented by
diff --git a/_overviews/reflection/thread-safety.md b/_overviews/reflection/thread-safety.md
index 5ea8071637..6c5aaa2e11 100644
--- a/_overviews/reflection/thread-safety.md
+++ b/_overviews/reflection/thread-safety.md
@@ -6,7 +6,7 @@ overview-name: Reflection
num: 6
-languages: [ja]
+languages: [ja, zh-cn]
permalink: /overviews/reflection/:title.html
---
@@ -20,7 +20,7 @@ and to look up technical details, and here's a concise summary of the state of t
NEW Thread safety issues have been fixed in Scala 2.11.0-RC1, but we are going to keep this document available for now, since the problem still remains in the Scala 2.10.x series, and we currently don't have concrete plans on when the fix is going to be backported.
-Currently we know about two kinds of races associated with reflection. First of all, reflection initialization (the code that is called
+Currently, we know about two kinds of races associated with reflection. First of all, reflection initialization (the code that is called
when `scala.reflect.runtime.universe` is accessed for the first time) cannot be safely called from multiple threads. Secondly, symbol
initialization (the code that is called when symbol's flags or type signature are accessed for the first time) isn't safe as well.
Here's a typical manifestation:
diff --git a/_overviews/reflection/typetags-manifests.md b/_overviews/reflection/typetags-manifests.md
index e1c32a1c8f..6b6febff89 100644
--- a/_overviews/reflection/typetags-manifests.md
+++ b/_overviews/reflection/typetags-manifests.md
@@ -6,7 +6,7 @@ overview-name: Reflection
num: 5
-languages: [ja]
+languages: [ja, zh-cn]
permalink: /overviews/reflection/:title.html
---
diff --git a/_overviews/repl/overview.md b/_overviews/repl/overview.md
index 38d5008dd6..c462643399 100644
--- a/_overviews/repl/overview.md
+++ b/_overviews/repl/overview.md
@@ -79,4 +79,4 @@ Its facilities can be witnessed using `:imports` or `-Xprint:parser`.
### Contributing to Scala REPL
The REPL source is part of the Scala project. Issues are tracked by the standard
-mechanism for the project and pull requests are accepted at [the github repository](https://github.com/scala/scala).
+mechanism for the project and pull requests are accepted at [the GitHub repository](https://github.com/scala/scala).
diff --git a/_overviews/scala-book/abstract-classes.md b/_overviews/scala-book/abstract-classes.md
index a5ec3b96fa..88c496945c 100644
--- a/_overviews/scala-book/abstract-classes.md
+++ b/_overviews/scala-book/abstract-classes.md
@@ -5,11 +5,11 @@ title: Abstract Classes
description: This page shows how to use abstract classes, including when and why you should use abstract classes.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 27
outof: 54
previous-page: traits-abstract-mixins
next-page: collections-101
+new-version: /scala3/book/domain-modeling-tools.html#abstract-classes
---
@@ -107,11 +107,3 @@ d.speak
```
We encourage you to copy and paste that code into the REPL to be sure that it works as expected, and then experiment with it as desired.
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/anonymous-functions.md b/_overviews/scala-book/anonymous-functions.md
index bbd7bc8d8d..619d8854a7 100644
--- a/_overviews/scala-book/anonymous-functions.md
+++ b/_overviews/scala-book/anonymous-functions.md
@@ -5,11 +5,11 @@ title: Anonymous Functions
description: This page shows how to use anonymous functions in Scala, including examples with the List class 'map' and 'filter' functions.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 34
outof: 54
previous-page: set-class
next-page: collections-methods
+new-version: /scala3/book/fun-anonymous-functions.html
---
@@ -201,16 +201,3 @@ is the same as this example:
```scala
val y = ints.filter(_ < 5)
```
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/arraybuffer-examples.md b/_overviews/scala-book/arraybuffer-examples.md
index fba905bc89..06bd6d1af2 100644
--- a/_overviews/scala-book/arraybuffer-examples.md
+++ b/_overviews/scala-book/arraybuffer-examples.md
@@ -5,11 +5,11 @@ title: The ArrayBuffer Class
description: This page provides examples of how to use the Scala ArrayBuffer class, including adding and removing elements.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 29
outof: 54
previous-page: collections-101
next-page: list-class
+new-version: /scala3/book/collections-classes.html#arraybuffer
---
@@ -46,7 +46,7 @@ scala> ints += 2
res1: ints.type = ArrayBuffer(1, 2)
```
-That’s just one way create an `ArrayBuffer` and add elements to it. You can also create an `ArrayBuffer` with initial elements like this:
+That’s just one way to create an `ArrayBuffer` and add elements to it. You can also create an `ArrayBuffer` with initial elements like this:
```scala
val nums = ArrayBuffer(1, 2, 3)
@@ -114,31 +114,20 @@ As a brief overview, here are several methods you can use with an `ArrayBuffer`:
```scala
val a = ArrayBuffer(1, 2, 3) // ArrayBuffer(1, 2, 3)
a.append(4) // ArrayBuffer(1, 2, 3, 4)
-a.append(5, 6) // ArrayBuffer(1, 2, 3, 4, 5, 6)
-a.appendAll(Seq(7,8)) // ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8)
+a.appendAll(Seq(5, 6)) // ArrayBuffer(1, 2, 3, 4, 5, 6)
a.clear // ArrayBuffer()
val a = ArrayBuffer(9, 10) // ArrayBuffer(9, 10)
a.insert(0, 8) // ArrayBuffer(8, 9, 10)
a.insertAll(0, Vector(4, 5, 6, 7)) // ArrayBuffer(4, 5, 6, 7, 8, 9, 10)
a.prepend(3) // ArrayBuffer(3, 4, 5, 6, 7, 8, 9, 10)
-a.prepend(1, 2) // ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
-a.prependAll(Array(0)) // ArrayBuffer(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+a.prependAll(Array(0, 1, 2)) // ArrayBuffer(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
val a = ArrayBuffer.range('a', 'h') // ArrayBuffer(a, b, c, d, e, f, g)
a.remove(0) // ArrayBuffer(b, c, d, e, f, g)
a.remove(2, 3) // ArrayBuffer(b, c, g)
val a = ArrayBuffer.range('a', 'h') // ArrayBuffer(a, b, c, d, e, f, g)
-a.trimStart(2) // ArrayBuffer(c, d, e, f, g)
-a.trimEnd(2) // ArrayBuffer(c, d, e)
+a.dropInPlace(2) // ArrayBuffer(c, d, e, f, g)
+a.dropRightInPlace(2) // ArrayBuffer(c, d, e)
```
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/built-in-types.md b/_overviews/scala-book/built-in-types.md
index 209b1b8f0b..c251b9a4f1 100644
--- a/_overviews/scala-book/built-in-types.md
+++ b/_overviews/scala-book/built-in-types.md
@@ -5,11 +5,11 @@ title: A Few Built-In Types
description: A brief introduction to Scala's built-in types.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 10
outof: 54
previous-page: type-is-optional
next-page: two-notes-about-strings
+new-version: /scala3/book/first-look-at-types.html#scalas-value-types
---
@@ -106,10 +106,3 @@ val c: Char = 'a'
```
As shown, enclose strings in double-quotes and a character in single-quotes.
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/case-classes.md b/_overviews/scala-book/case-classes.md
index a497e1e33a..9ffae6db23 100644
--- a/_overviews/scala-book/case-classes.md
+++ b/_overviews/scala-book/case-classes.md
@@ -1,14 +1,15 @@
---
+type: chapter
layout: multipage-overview
title: Case Classes
description: This lesson provides an introduction to 'case classes' in Scala.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 49
outof: 54
previous-page: companion-objects
next-page: case-objects
+new-version: /scala3/book/domain-modeling-tools.html#case-classes
---
@@ -184,5 +185,3 @@ res0: Person = Person(Christina,niece)
## The biggest advantage
While all of these features are great benefits to functional programming, as they write in the book, [Programming in Scala](https://www.amazon.com/Programming-Scala-Updated-2-12/dp/0981531687/) (Odersky, Spoon, and Venners), “the biggest advantage of case classes is that they support pattern matching.” Pattern matching is a major feature of FP languages, and Scala’s case classes provide a simple way to implement pattern matching in match expressions and other areas.
-
-
diff --git a/_overviews/scala-book/case-objects.md b/_overviews/scala-book/case-objects.md
index 1b7426f12a..9bb17d2ec7 100644
--- a/_overviews/scala-book/case-objects.md
+++ b/_overviews/scala-book/case-objects.md
@@ -5,11 +5,11 @@ title: Case Objects
description: This lesson introduces Scala 'case objects', which are used to create singletons with a few additional features.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 50
outof: 54
previous-page: case-classes
next-page: functional-error-handling
+new-version: /scala3/book/domain-modeling-tools.html#case-objects
---
@@ -123,11 +123,3 @@ class Speak extends Actor {
```
This is a good, safe way to pass messages around in Scala applications.
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/classes-aux-constructors.md b/_overviews/scala-book/classes-aux-constructors.md
index 0daf140e88..8bca7dc8cf 100644
--- a/_overviews/scala-book/classes-aux-constructors.md
+++ b/_overviews/scala-book/classes-aux-constructors.md
@@ -5,11 +5,11 @@ title: Auxiliary Class Constructors
description: This page shows how to write auxiliary Scala class constructors, including several examples of the syntax.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 20
outof: 54
previous-page: classes
next-page: constructors-default-values
+new-version: /scala3/book/domain-modeling-tools.html#auxiliary-constructors
---
@@ -28,17 +28,17 @@ val DefaultCrustType = "THIN"
class Pizza (var crustSize: Int, var crustType: String) {
// one-arg auxiliary constructor
- def this(crustSize: Int) {
+ def this(crustSize: Int) = {
this(crustSize, DefaultCrustType)
}
// one-arg auxiliary constructor
- def this(crustType: String) {
+ def this(crustType: String) = {
this(DefaultCrustSize, crustType)
}
// zero-arg auxiliary constructor
- def this() {
+ def this() = {
this(DefaultCrustSize, DefaultCrustType)
}
@@ -72,11 +72,3 @@ class Pizza(
var crustType: String = DefaultCrustType
)
```
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/classes.md b/_overviews/scala-book/classes.md
index bc4fe65b66..bc7928eea0 100644
--- a/_overviews/scala-book/classes.md
+++ b/_overviews/scala-book/classes.md
@@ -5,11 +5,11 @@ title: Scala Classes
description: This page shows examples of how to create Scala classes, including the basic Scala class constructor.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 19
outof: 54
previous-page: try-catch-finally
next-page: classes-aux-constructors
+new-version: /scala3/book/domain-modeling-tools.html#classes
---
@@ -209,14 +209,3 @@ class Address (
var state: String
)
```
-
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/collections-101.md b/_overviews/scala-book/collections-101.md
index 6df136b528..995c20520b 100644
--- a/_overviews/scala-book/collections-101.md
+++ b/_overviews/scala-book/collections-101.md
@@ -5,11 +5,11 @@ title: Scala Collections
description: This page provides an introduction to the Scala collections classes, including Vector, List, ArrayBuffer, Map, Set, and more.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 28
outof: 54
previous-page: abstract-classes
next-page: arraybuffer-examples
+new-version: /scala3/book/collections-intro.html
---
@@ -34,6 +34,3 @@ The main Scala collections classes you’ll use on a regular basis are:
We’ll demonstrate the basics of these classes in the following lessons.
>In the following lessons on Scala collections classes, whenever we use the word *immutable*, it’s safe to assume that the class is intended for use in a *functional programming* (FP) style. With these classes you don’t modify the collection; you apply functional methods to the collection to create a new result. You’ll see what this means in the examples that follow.
-
-
-
diff --git a/_overviews/scala-book/collections-maps.md b/_overviews/scala-book/collections-maps.md
index 22c0a3100e..0abc9da611 100644
--- a/_overviews/scala-book/collections-maps.md
+++ b/_overviews/scala-book/collections-maps.md
@@ -5,11 +5,11 @@ title: Common Map Methods
description: This page shows examples of the most common methods that are available on Scala Maps.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 36
outof: 54
previous-page: collections-methods
next-page: misc
+new-version: /scala3/book/collections-methods.html
---
@@ -46,7 +46,7 @@ keys: Iterable[Int] = Set(1, 2, 3, 4)
scala> val values = m.values
val values: Iterable[String] = MapLike.DefaultValuesIterable(a, b, c, d)
-// how to test if a Map contains a value
+// how to test if a Map contains a key
scala> val contains3 = m.contains(3)
contains3: Boolean = true
@@ -83,18 +83,18 @@ Here are some things you can do with a mutable `Map`:
```scala
// add elements with +=
states += ("AZ" -> "Arizona")
-states += ("CO" -> "Colorado", "KY" -> "Kentucky")
+states ++= Map("CO" -> "Colorado", "KY" -> "Kentucky")
// remove elements with -=
states -= "KY"
-states -= ("AZ", "CO")
+states --= List("AZ", "CO")
// update elements by reassigning them
states("AK") = "Alaska, The Big State"
-// retain elements by supplying a function that operates on
+// filter elements by supplying a function that operates on
// the keys and/or values
-states.retain((k,v) => k == "AK")
+states.filterInPlace((k,v) => k == "AK")
```
@@ -102,13 +102,3 @@ states.retain((k,v) => k == "AK")
## See also
There are many more things you can do with maps. See the [Map class documentation]({{site.baseurl}}/overviews/collections-2.13/maps.html) for more details and examples.
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/collections-methods.md b/_overviews/scala-book/collections-methods.md
index 527922b628..e6620ec6cc 100644
--- a/_overviews/scala-book/collections-methods.md
+++ b/_overviews/scala-book/collections-methods.md
@@ -5,11 +5,11 @@ title: Common Sequence Methods
description: This page shows examples of the most common methods that are available on the Scala sequences (collections classes).
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 35
outof: 54
previous-page: anonymous-functions
next-page: collections-maps
+new-version: /scala3/book/collections-methods.html
---
@@ -24,8 +24,7 @@ Because there are so many methods available to you, they won’t all be shown he
- `tail`
- `take`, `takeWhile`
- `drop`, `dropWhile`
-- `find`
-- `reduce`, `fold`
+- `reduce`
The following methods will work on all of the collections “sequence” classes, including `Array`, `ArrayBuffer`, `List`, `Vector`, etc., but these examples will use a `List` unless otherwise specified.
@@ -321,8 +320,3 @@ That might be a little mind-blowing if you’ve never seen it before, but after
## Even more!
There are literally dozens of additional methods on the Scala sequence classes that will keep you from ever needing to write another `for` loop. However, because this is a simple introduction book they won’t all be covered here. For more information, see [the collections overview of sequence traits]({{site.baseurl}}/overviews/collections-2.13/seqs.html).
-
-
-
-
-
diff --git a/_overviews/scala-book/command-line-io.md b/_overviews/scala-book/command-line-io.md
index ffb35f698e..b3ea6ca64c 100644
--- a/_overviews/scala-book/command-line-io.md
+++ b/_overviews/scala-book/command-line-io.md
@@ -5,11 +5,11 @@ title: Command-Line I/O
description: An introduction to command-line I/O in Scala.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 12
outof: 54
previous-page: two-notes-about-strings
next-page: control-structures
+new-version: /scala3/book/taste-hello-world.html#ask-for-user-input
---
@@ -98,12 +98,3 @@ import scala.io.StdIn.readLine
```
That import statement brings the `readLine` method into the current scope so you can use it in the application.
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/companion-objects.md b/_overviews/scala-book/companion-objects.md
index 6babb21eb9..dc8cb8b1d3 100644
--- a/_overviews/scala-book/companion-objects.md
+++ b/_overviews/scala-book/companion-objects.md
@@ -5,11 +5,11 @@ title: Companion Objects
description: This lesson provides an introduction to 'companion objects' in Scala, including writing 'apply' and 'unapply' methods.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 48
outof: 54
previous-page: no-null-values
next-page: case-classes
+new-version: /scala3/book/domain-modeling-tools.html#companion-objects
---
@@ -271,13 +271,3 @@ The key points of this lesson are:
- A companion object and its class can access each other’s private members
- A companion object’s `apply` method lets you create new instances of a class without using the `new` keyword
- A companion object’s `unapply` method lets you de-construct an instance of a class into its individual components
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/concurrency-signpost.md b/_overviews/scala-book/concurrency-signpost.md
index 6f609013b5..49ab2cd094 100644
--- a/_overviews/scala-book/concurrency-signpost.md
+++ b/_overviews/scala-book/concurrency-signpost.md
@@ -1,17 +1,16 @@
---
-type: section
+type: chapter
layout: multipage-overview
title: Concurrency
description: An introduction to concurrency in Scala.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 52
outof: 54
previous-page: functional-error-handling
next-page: futures
+new-version: /scala3/book/concurrency.html
---
In the next lesson you’ll see a primary tool for writing parallel and concurrent applications, the Scala `Future`.
-
diff --git a/_overviews/scala-book/constructors-default-values.md b/_overviews/scala-book/constructors-default-values.md
index aa4429305e..952fe3fd46 100644
--- a/_overviews/scala-book/constructors-default-values.md
+++ b/_overviews/scala-book/constructors-default-values.md
@@ -5,11 +5,11 @@ title: Supplying Default Values for Constructor Parameters
description: This page shows how to provide default values for Scala constructor parameters, with several examples.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 21
outof: 54
previous-page: classes-aux-constructors
next-page: methods-first-look
+new-version: /scala3/book/domain-modeling-tools.html#default-parameter-values
---
Scala lets you supply default values for constructor parameters. For example, in previous lessons we showed that you can define a `Socket` class like this:
@@ -88,12 +88,3 @@ is more readable than this code:
```scala
val s = new Socket(2000, 3000)
```
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/control-structures.md b/_overviews/scala-book/control-structures.md
index 813a05face..8724ba7050 100644
--- a/_overviews/scala-book/control-structures.md
+++ b/_overviews/scala-book/control-structures.md
@@ -5,11 +5,11 @@ title: Control Structures
description: This page provides an introduction to Scala's control structures, including if/then/else, for loops, try/catch/finally, etc.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 13
outof: 54
previous-page: command-line-io
next-page: if-then-else-construct
+new-version: /scala3/book/control-structures.html
---
@@ -25,10 +25,3 @@ It also has a few unique constructs, including:
- `for` expressions
We’ll demonstrate these in the following lessons.
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/enumerations-pizza-class.md b/_overviews/scala-book/enumerations-pizza-class.md
index 31625129f0..abe76d8b07 100644
--- a/_overviews/scala-book/enumerations-pizza-class.md
+++ b/_overviews/scala-book/enumerations-pizza-class.md
@@ -5,11 +5,11 @@ title: Enumerations (and a Complete Pizza Class)
description: This page introduces Scala enumerations, and further shows how to create a complete OOP 'Pizza' class that uses those enumerations.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 23
outof: 54
previous-page: methods-first-look
next-page: traits-intro
+new-version: /scala3/book/domain-modeling-fp.html#modeling-the-data
---
@@ -186,7 +186,3 @@ Toppings: ArrayBuffer(Cheese, Pepperoni)
That code combines several different concepts — including two things we haven’t discussed yet in the `import` statement and the `ArrayBuffer` — but if you have experience with Java and other languages, hopefully it’s not too much to throw at you at one time.
At this point we encourage you to work with that code as desired. Make changes to the code, and try using the `removeTopping` and `removeAllToppings` methods to make sure they work the way you expect them to work.
-
-
-
-
diff --git a/_overviews/scala-book/for-expressions.md b/_overviews/scala-book/for-expressions.md
index 7977777872..e7e5c0a90a 100644
--- a/_overviews/scala-book/for-expressions.md
+++ b/_overviews/scala-book/for-expressions.md
@@ -5,11 +5,11 @@ title: for Expressions
description: This page shows how to use Scala 'for' expressions (also known as 'for-expressions'), including examples of how to use it with the 'yield' keyword.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 16
outof: 54
previous-page: for-loops
next-page: match-expressions
+new-version: /scala3/book/control-structures.html#for-expressions
---
@@ -125,11 +125,3 @@ You can also put curly braces around the algorithm, if you prefer:
```scala
val capNames = for (name <- names) yield { name.drop(1).capitalize }
```
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/for-loops.md b/_overviews/scala-book/for-loops.md
index 5eef6cc279..b462c4d289 100644
--- a/_overviews/scala-book/for-loops.md
+++ b/_overviews/scala-book/for-loops.md
@@ -5,11 +5,11 @@ title: for Loops
description: This page provides an introduction to the Scala 'for' loop, including how to iterate over Scala collections.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 15
outof: 54
previous-page: if-then-else-construct
next-page: for-expressions
+new-version: /scala3/book/control-structures.html#for-loops
---
@@ -107,7 +107,3 @@ ratings.foreach {
case(movie, rating) => println(s"key: $movie, value: $rating")
}
```
-
-
-
-
diff --git a/_overviews/scala-book/functional-error-handling.md b/_overviews/scala-book/functional-error-handling.md
index 84d4cd2145..bdbcc2f228 100644
--- a/_overviews/scala-book/functional-error-handling.md
+++ b/_overviews/scala-book/functional-error-handling.md
@@ -5,11 +5,11 @@ title: Functional Error Handling in Scala
description: This lesson takes a look at error handling with functional programming in Scala.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 51
outof: 54
previous-page: case-objects
next-page: concurrency-signpost
+new-version: /scala3/book/fp-functional-error-handling.html
---
@@ -55,7 +55,7 @@ These approaches were discussed in the “No Null Values” lesson, so we won’
Another trio of classes named `Try`, `Success`, and `Failure` work just like `Option`, `Some`, and `None`, but with two nice features:
- `Try` makes it very simple to catch exceptions
-- `Failure` contains the exception message
+- `Failure` contains the exception
Here’s the `toInt` method re-written to use these classes. First, import the classes into the current scope:
@@ -93,7 +93,7 @@ scala> val b = toInt("boo")
b: scala.util.Try[Int] = Failure(java.lang.NumberFormatException: For input string: "boo")
```
-As that output shows, the `Failure` that’s returned by `toInt` contains the reason for the failure, i.e., the exception message.
+As that output shows, the `Failure` that’s returned by `toInt` contains the reason for the failure, i.e., the exception.
There are quite a few ways to work with the results of a `Try` — including the ability to “recover” from the failure — but common approaches still involve using `match` and `for` expressions:
@@ -129,15 +129,3 @@ scala.util.Try[Int] = Failure(java.lang.NumberFormatException: For input string:
There are other classes that work in a similar manner, including Either/Left/Right in the Scala library, and other third-party libraries, but Option/Some/None and Try/Success/Failure are commonly used, and good to learn first.
You can use whatever you like, but Try/Success/Failure is generally used when dealing with code that can throw exceptions — because you almost always want to understand the exception — and Option/Some/None is used in other places, such as to avoid using null values.
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/functional-programming.md b/_overviews/scala-book/functional-programming.md
index aa0581cf87..806697f189 100644
--- a/_overviews/scala-book/functional-programming.md
+++ b/_overviews/scala-book/functional-programming.md
@@ -5,11 +5,11 @@ title: Functional Programming
description: This lesson begins a second on 'An introduction to functional programming in Scala'.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 44
outof: 54
previous-page: sbt-scalatest-bdd
next-page: pure-functions
+new-version: /scala3/book/fp-intro.html
---
@@ -19,13 +19,3 @@ Scala lets you write code in an object-oriented programming (OOP) style, a funct
*Functional programming* is a style of programming that emphasizes writing applications using only pure functions and immutable values. As Alvin Alexander wrote in [Functional Programming, Simplified](https://alvinalexander.com/scala/functional-programming-simplified-book), rather than using that description, it can be helpful to say that functional programmers have an extremely strong desire to see their code as math — to see the combination of their functions as a series of algebraic equations. In that regard, you could say that functional programmers like to think of themselves as mathematicians. That’s the driving desire that leads them to use *only* pure functions and immutable values, because that’s what you use in algebra and other forms of math.
Functional programming is a large topic, and there’s no simple way to condense the entire topic into this little book, but in the following lessons we’ll give you a taste of FP, and show some of the tools Scala provides for developers to write functional code.
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/futures.md b/_overviews/scala-book/futures.md
index 9324b0ddf9..8493ed1931 100644
--- a/_overviews/scala-book/futures.md
+++ b/_overviews/scala-book/futures.md
@@ -5,11 +5,11 @@ title: Scala Futures
description: This page provides an introduction to Futures in Scala, including Future callback methods.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 53
outof: 54
previous-page: concurrency-signpost
next-page: where-next
+new-version: /scala3/book/concurrency.html
---
When you want to write parallel and concurrent applications in Scala, you *could* still use the native Java `Thread` — but the Scala [Future](https://www.scala-lang.org/api/current/scala/concurrent/Future$.html) makes parallel/concurrent programming much simpler, and it’s preferred.
@@ -348,12 +348,3 @@ While this was a short introduction, hopefully those examples give you an idea o
- A small demo GUI application named *Future Board* was written to accompany this lesson. It works a little like [Flipboard](https://flipboard.com), updating a group of news sources simultaneously. You can find the source code for Future Board in [this Github repository](https://github.com/alvinj/FPFutures).
- While futures are intended for one-short, relatively short-lived concurrent processes, [Akka](https://akka.io) is an “actor model” library for Scala, and provides a terrific way to implement long-running parallel processes. (If this term is new to you, an *actor* is a long-running process that runs in parallel to the main application thread, and responds to messages that are sent to it.)
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/hello-world-1.md b/_overviews/scala-book/hello-world-1.md
index 6f161bdaaf..d9f9ddc0c6 100644
--- a/_overviews/scala-book/hello-world-1.md
+++ b/_overviews/scala-book/hello-world-1.md
@@ -5,11 +5,11 @@ title: Hello, World
description: This page shares a Scala 'Hello, world' example.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 5
outof: 54
previous-page: scala-features
next-page: hello-world-2
+new-version: /scala3/book/taste-hello-world.html
---
Since the release of the book, *C Programming Language*, most programming books have begun with a simple “Hello, world” example, and in keeping with tradition, here’s the source code for a Scala “Hello, world” example:
@@ -68,7 +68,7 @@ That Scala code is pretty much the same as this Java code:
```java
public class Hello {
public static void main(String[] args) {
- System.out.println("Hello, world")
+ System.out.println("Hello, world");
}
}
```
@@ -87,7 +87,3 @@ public final class Hello {
````
As that output shows, the `javap` command reads that *.class* file just as if it was created from Java source code. Scala code runs on the JVM and can use existing Java libraries — and both are terrific benefits for Scala programmers.
-
-
-
-
diff --git a/_overviews/scala-book/hello-world-2.md b/_overviews/scala-book/hello-world-2.md
index d07b7da00f..ac2f61cfe2 100644
--- a/_overviews/scala-book/hello-world-2.md
+++ b/_overviews/scala-book/hello-world-2.md
@@ -5,11 +5,11 @@ title: Hello, World - Version 2
description: This is a second Scala 'Hello, World' example.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 6
outof: 54
previous-page: hello-world-1
next-page: scala-repl
+new-version: /scala3/book/taste-hello-world.html
---
While that first “Hello, World” example works just fine, Scala provides a way to write applications more conveniently. Rather than including a `main` method, your `object` can just extend the `App` trait, like this:
@@ -62,15 +62,3 @@ This shows:
- Command-line arguments are automatically made available to you in a variable named `args`.
- You determine the number of elements in `args` with `args.size` (or `args.length`, if you prefer).
- `args` is an `Array`, and you access `Array` elements as `args(0)`, `args(1)`, etc. Because `args` is an object, you access the array elements with parentheses (not `[]` or any other special syntax).
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/if-then-else-construct.md b/_overviews/scala-book/if-then-else-construct.md
index 6fd09ef879..7087c3340c 100644
--- a/_overviews/scala-book/if-then-else-construct.md
+++ b/_overviews/scala-book/if-then-else-construct.md
@@ -5,11 +5,11 @@ title: The if/then/else Construct
description: This page demonstrates Scala's if/then/else construct, including several examples you can try in the REPL.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 14
outof: 54
previous-page: control-structures
next-page: for-loops
+new-version: /scala3/book/control-structures.html#the-ifthenelse-construct
---
@@ -79,10 +79,3 @@ println("Hello")
```
The first example runs the `doSomething` method as a side effect when `a` is equal to `b`. The second example is used for the side effect of writing a string to STDOUT. As you learn more about Scala you’ll find yourself writing more *expressions* and fewer *statements*. The differences between expressions and statements will also become more apparent.
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/introduction.md b/_overviews/scala-book/introduction.md
index 7b417388fb..42bfe49502 100644
--- a/_overviews/scala-book/introduction.md
+++ b/_overviews/scala-book/introduction.md
@@ -8,6 +8,7 @@ overview-name: Scala Book
num: 1
outof: 54
next-page: prelude-taste-of-scala
+new-version: /scala3/book/introduction.html
---
In these pages, *Scala Book* provides a quick introduction and overview of the Scala programming language. The book is written in an informal style, and consists of more than 50 small lessons. Each lesson is long enough to give you an idea of how the language features in that lesson work, but short enough that you can read it in fifteen minutes or less.
@@ -17,10 +18,3 @@ One note before beginning:
- In regards to programming style, most Scala programmers indent their code with two spaces, but we use four spaces because we think it makes the code easier to read, especially in a book format.
To begin reading, click the “next” link, or select the *Prelude: A Taste of Scala* lesson in the table of contents.
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/list-class.md b/_overviews/scala-book/list-class.md
index 1a606afc98..568463f1f7 100644
--- a/_overviews/scala-book/list-class.md
+++ b/_overviews/scala-book/list-class.md
@@ -5,11 +5,11 @@ title: The List Class
description: This page provides examples of the Scala List class, including how to add and remove elements from a List.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 30
outof: 54
previous-page: arraybuffer-examples
next-page: vector-class
+new-version: /scala3/book/collections-classes.html#list
---
[The List class](https://www.scala-lang.org/api/current/scala/collection/immutable/List.html) is a linear, immutable sequence. All this means is that it’s a linked-list that you can’t modify. Any time you want to add or remove `List` elements, you create a new `List` from an existing `List`.
@@ -140,15 +140,3 @@ list: List[Int] = List(1, 2, 3)
```
This works because a `List` is a singly-linked list that ends with the `Nil` element.
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/map-class.md b/_overviews/scala-book/map-class.md
index 4a7d48db98..88efb3eec8 100644
--- a/_overviews/scala-book/map-class.md
+++ b/_overviews/scala-book/map-class.md
@@ -5,11 +5,11 @@ title: The Map Class
description: This page provides examples of the Scala 'Map' class, including how to add and remove elements from a Map, and iterate over Map elements.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 32
outof: 54
previous-page: vector-class
next-page: set-class
+new-version: /scala3/book/collections-classes.html#maps
---
@@ -158,12 +158,3 @@ ratings.foreach {
## See also
There are other ways to work with Scala Maps, and a nice collection of Map classes for different needs. See the [Map class documentation]({{site.baseurl}}/overviews/collections-2.13/maps.html) for more information and examples.
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/match-expressions.md b/_overviews/scala-book/match-expressions.md
index 4b54dfb35c..1c19d09c07 100644
--- a/_overviews/scala-book/match-expressions.md
+++ b/_overviews/scala-book/match-expressions.md
@@ -5,11 +5,11 @@ title: match Expressions
description: This page shows examples of the Scala 'match' expression, including how to write match/case expressions.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 17
outof: 54
previous-page: for-expressions
next-page: try-catch-finally
+new-version: /scala3/book/control-structures.html#match-expressions
---
@@ -247,11 +247,3 @@ stock match {
## Even more
`match` expressions are very powerful, and there are even more things you can do with them, but hopefully these examples provide a good start towards using them.
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/methods-first-look.md b/_overviews/scala-book/methods-first-look.md
index 7e10bdfd77..7a8d8bb71e 100644
--- a/_overviews/scala-book/methods-first-look.md
+++ b/_overviews/scala-book/methods-first-look.md
@@ -5,11 +5,11 @@ title: A First Look at Scala Methods
description: This page provides a first look at how to write Scala methods, including how to test them in the REPL.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 22
outof: 54
previous-page: constructors-default-values
next-page: enumerations-pizza-class
+new-version: /scala3/book/methods-intro.html
---
@@ -104,9 +104,3 @@ If you paste that code into the REPL, you’ll see that it works just like the p
scala> addThenDouble(1, 1)
res0: Int = 4
```
-
-
-
-
-
-
diff --git a/_overviews/scala-book/misc.md b/_overviews/scala-book/misc.md
index 61c19bd1b2..d7c7b77c89 100644
--- a/_overviews/scala-book/misc.md
+++ b/_overviews/scala-book/misc.md
@@ -5,11 +5,11 @@ title: A Few Miscellaneous Items
description: A few miscellaneous items about Scala
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 37
outof: 54
previous-page: collections-maps
next-page: tuples
+new-version: /scala3/book/introduction.html
---
@@ -17,5 +17,3 @@ In this section we’ll cover a few miscellaneous items about Scala:
- Tuples
- A Scala OOP example of a pizza restaurant order-entry system
-
-
diff --git a/_overviews/scala-book/no-null-values.md b/_overviews/scala-book/no-null-values.md
index dc77febe52..66771927f0 100644
--- a/_overviews/scala-book/no-null-values.md
+++ b/_overviews/scala-book/no-null-values.md
@@ -5,11 +5,11 @@ title: No Null Values
description: This lesson demonstrates the Scala Option, Some, and None classes, including how to use them instead of null values.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 47
outof: 54
previous-page: passing-functions-around
next-page: companion-objects
+new-version: /scala3/book/fp-functional-error-handling.html
---
@@ -120,7 +120,7 @@ val y = for {
When that expression finishes running, `y` will be one of two things:
- If all three strings convert to integers, `y` will be a `Some[Int]`, i.e., an integer wrapped inside a `Some`
-- If any of the three strings can’t be converted to an inside, `y` will be a `None`
+- If any of the three strings can’t be converted to an integer, `y` will be a `None`
You can test this for yourself in the Scala REPL. First, paste these three string variables into the REPL:
@@ -301,10 +301,3 @@ This lesson was a little longer than the others, so here’s a quick review of t
## See also
- Tony Hoare invented the null reference in 1965, and refers to it as his “[billion dollar mistake](https://en.wikipedia.org/wiki/Tony_Hoare#Apologies_and_retractions).”
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/oop-pizza-example.md b/_overviews/scala-book/oop-pizza-example.md
index 7d50686e71..a7d11f9ff5 100644
--- a/_overviews/scala-book/oop-pizza-example.md
+++ b/_overviews/scala-book/oop-pizza-example.md
@@ -5,11 +5,11 @@ title: An OOP Example
description: This lesson shares an example of some OOP-style classes for a pizza restaurant order entry system, including Pizza, Topping, and Order classes.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 39
outof: 54
previous-page: tuples
next-page: sbt-scalatest-intro
+new-version: /scala3/book/domain-modeling-oop.html
---
@@ -216,9 +216,4 @@ To experiment with this on your own, please see the *PizzaOopExample* project in
- [github.com/alvinj/HelloScalaExamples](https://github.com/alvinj/HelloScalaExamples)
-To compile this project it will help to either (a) use IntelliJ IDEA or Eclipse, or (b) know how to use the [Scala Build Tool](http://www.scala-sbt.org).
-
-
-
-
-
+To compile this project it will help to either (a) use IntelliJ IDEA or Metals, or (b) know how to use the [Scala Build Tool](http://www.scala-sbt.org).
diff --git a/_overviews/scala-book/passing-functions-around.md b/_overviews/scala-book/passing-functions-around.md
index 2700ea06c7..91ca50d198 100644
--- a/_overviews/scala-book/passing-functions-around.md
+++ b/_overviews/scala-book/passing-functions-around.md
@@ -5,11 +5,11 @@ title: Passing Functions Around
description: Like a good functional programming language, Scala lets you use functions just like other variables, including passing them into other functions.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 46
outof: 54
previous-page: pure-functions
next-page: no-null-values
+new-version: /scala3/book/fp-functions-are-values.html
---
@@ -104,11 +104,3 @@ Those examples that use a “regular” function are equivalent to these anonymo
List("foo", "bar").map(s => s.toUpperCase)
List("foo", "bar").map(_.toUpperCase)
```
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/preliminaries.md b/_overviews/scala-book/preliminaries.md
index e8057e37d9..8308f59818 100644
--- a/_overviews/scala-book/preliminaries.md
+++ b/_overviews/scala-book/preliminaries.md
@@ -5,11 +5,11 @@ title: Preliminaries
description: A few things to know about getting started with Scala.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 3
outof: 54
previous-page: prelude-taste-of-scala
next-page: scala-features
+new-version: /scala3/book/taste-intro.html#setting-up-scala
---
@@ -21,7 +21,7 @@ That being said, there are a few good things to know before you read this book.
## Installing Scala
-First, to run the examples in this book you’ll need to install Scala on your computer. See our general [Getting Started]({{site.baseurl}}/getting-started/index.html) page for details on how to use Scala (a) in an IDE and (b) from the command line.
+First, to run the examples in this book you’ll need to install Scala on your computer. See our general [Getting Started]({{site.baseurl}}/getting-started/install-scala.html) page for details on how to use Scala (a) in an IDE and (b) from the command line.
@@ -45,11 +45,10 @@ One good thing to know up front is that comments in Scala are just like comments
## IDEs
-The three main IDEs (integrated development environments) for Scala are:
+The two main IDEs (integrated development environments) for Scala are:
- [IntelliJ IDEA](https://www.jetbrains.com/idea/download)
- [Visual Studio Code](https://code.visualstudio.com)
-- [Scala IDE for Eclipse](http://scala-ide.org)
@@ -60,12 +59,3 @@ Another good thing to know is that Scala naming conventions follow the same “c
- Class names: `Person`, `StoreEmployee`
- Variable names: `name`, `firstName`
- Method names: `convertToInt`, `toUpper`
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/prelude-taste-of-scala.md b/_overviews/scala-book/prelude-taste-of-scala.md
index 1564032440..970631acf6 100644
--- a/_overviews/scala-book/prelude-taste-of-scala.md
+++ b/_overviews/scala-book/prelude-taste-of-scala.md
@@ -5,16 +5,16 @@ title: Prelude꞉ A Taste of Scala
description: This page shares a Taste Of Scala example, quickly covering Scala's main features.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 2
outof: 54
previous-page: introduction
next-page: preliminaries
+new-version: /scala3/book/taste-intro.html
---
Our hope in this book is to demonstrate that [Scala](http://scala-lang.org) is a beautiful, modern, expressive programming language. To help demonstrate that, in this first chapter we’ll jump right in and provide a whirlwind tour of Scala’s main features. After this tour, the book begins with a more traditional “Getting Started” chapter.
->In this book we assume that you’ve used a language like Java before, and are ready to see a series of Scala examples to get a feel for what the language looks like. Although it’s not 100% necessary, it will also help if you’ve already [downloaded and installed Scala](https://www.scala-lang.org/download) so you can test the examples as you go along. You can also test these examples online with [ScalaFiddle.io](https://scalafiddle.io).
+>In this book we assume that you’ve used a language like Java before, and are ready to see a series of Scala examples to get a feel for what the language looks like. Although it’s not 100% necessary, it will also help if you’ve already [downloaded and installed Scala](https://www.scala-lang.org/download) so you can test the examples as you go along. You can also test these examples online with [Scastie](https://scastie.scala-lang.org/).
@@ -462,19 +462,30 @@ There are many (many!) more methods available to Scala collections classes, and
## Tuples
-Tuples let you put a heterogenous collection of elements in a little container. Tuples can contain between two and 22 values, and they can all be different types. For example, given a `Person` class like this:
+Tuples let you put a heterogenous collection of elements in a little container. A tuple can contain between two and 22 values, and all of the values can have different types. For example, this is a tuple that holds three different types, an `Int`, a `Double`, and a `String`:
```scala
-class Person(var name: String)
+(11, 11.0, "Eleven")
```
-You can create a tuple that contains three different types like this:
+This is known as a `Tuple3`, because it contains three elements.
+
+Tuples are convenient in many places, such as where you might use an ad-hoc class in other languages. For instance, you can return a tuple from a method instead of returning a class:
```scala
-val t = (11, "Eleven", new Person("Eleven"))
+def getAaplInfo(): (String, BigDecimal, Long) = {
+ // get the stock symbol, price, and volume
+ ("AAPL", BigDecimal(123.45), 101202303L)
+}
```
-You can access the tuple values by number:
+Then you can assign the result of the method to a variable:
+
+```scala
+val t = getAaplInfo()
+```
+
+Once you have a tuple variable, you can access its values by number, preceded by an underscore:
```scala
t._1
@@ -482,13 +493,39 @@ t._2
t._3
```
-Or assign the tuple fields to variables:
+The REPL demonstrates the results of accessing those fields:
+
+```scala
+scala> t._1
+res0: String = AAPL
+
+scala> t._2
+res1: scala.math.BigDecimal = 123.45
+
+scala> t._3
+res2: Long = 101202303
+```
+
+The values of a tuple can also be extracted using pattern matching. In this next example, the fields inside the tuple are assigned to the variables `symbol`, `price`, and `volume`:
+
+```scala
+val (symbol, price, volume) = getAaplInfo()
+```
+
+Once again, the REPL shows the result:
```scala
-val (num, string, person) = (11, "Eleven", new Person("Eleven"))
+scala> val (symbol, price, volume) = getAaplInfo()
+symbol: String = AAPL
+price: scala.math.BigDecimal = 123.45
+volume: Long = 101202303
```
-Tuples are nice for those times when you need to put a little “bag” of things together for a little while.
+Tuples are nice for those times when you want to quickly (and temporarily) group some things together.
+If you notice that you are using the same tuples multiple times, it could be useful to declare a dedicated case class, such as:
+```scala
+case class StockInfo(symbol: String, price: BigDecimal, volume: Long)
+```
@@ -516,13 +553,3 @@ If you like what you’ve seen so far, we hope you’ll like the rest of the boo
## A bit of background
Scala was created by [Martin Odersky](https://en.wikipedia.org/wiki/Martin_Odersky), who studied under [Niklaus Wirth](https://en.wikipedia.org/wiki/Niklaus_Wirth), who created Pascal and several other languages. Mr. Odersky is one of the co-designers of Generic Java, and is also known as the “father” of the `javac` compiler.
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/pure-functions.md b/_overviews/scala-book/pure-functions.md
index e753d67ce4..35597bd01a 100644
--- a/_overviews/scala-book/pure-functions.md
+++ b/_overviews/scala-book/pure-functions.md
@@ -5,11 +5,11 @@ title: Pure Functions
description: This lesson provides an introduction to writing pure functions in Scala.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 45
outof: 54
previous-page: functional-programming
next-page: passing-functions-around
+new-version: /scala3/book/fp-pure-functions.html
---
@@ -49,7 +49,7 @@ Conversely, the following functions are *impure* because they violate the defini
The `foreach` method on collections classes is impure because it’s only used for its side effects, such as printing to STDOUT.
->A great hint that `foreach` is impure is that it’s method signature declares that it returns the type `Unit`. Because it returns nothing, logically the only reason you ever call it is to achieve some side effect. Similarly, *any* method that returns `Unit` is going to be an impure function.
+>A great hint that `foreach` is impure is that its method signature declares that it returns the type `Unit`. Because it returns nothing, logically the only reason you ever call it is to achieve some side effect. Similarly, *any* method that returns `Unit` is going to be an impure function.
Date and time related methods like `getDayOfWeek`, `getHour`, and `getMinute` are all impure because their output depends on something other than their input parameters. Their results rely on some form of hidden I/O, *hidden input* in these examples.
@@ -100,19 +100,3 @@ The first key point of this lesson is the definition of a pure function:
>A *pure function* is a function that depends only on its declared inputs and its internal algorithm to produce its output. It does not read any other values from “the outside world” — the world outside of the function’s scope — and it does not modify any values in the outside world.
A second key point is that real-world applications consist of a combination of pure and impure functions. A common recommendation is to write the core of your application using pure functions, and then to use impure functions to communicate with the outside world.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/sbt-scalatest-bdd.md b/_overviews/scala-book/sbt-scalatest-bdd.md
index fe933cce41..29ba5e1eb6 100644
--- a/_overviews/scala-book/sbt-scalatest-bdd.md
+++ b/_overviews/scala-book/sbt-scalatest-bdd.md
@@ -1,22 +1,22 @@
---
type: section
layout: multipage-overview
-title: Writing BDD Style Tests with ScalaTest and SBT
-description: This lesson shows how to write ScalaTest unit tests with SBT in a behavior-driven development (TDD) style.
+title: Writing BDD Style Tests with ScalaTest and sbt
+description: This lesson shows how to write ScalaTest unit tests with sbt in a behavior-driven development (TDD) style.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 43
outof: 54
previous-page: sbt-scalatest-tdd
next-page: functional-programming
+new-version: /scala3/book/tools-sbt.html#using-sbt-with-scalatest
---
In the previous lesson you saw how to write Test-Driven Development (TDD) tests with [ScalaTest](http://www.scalatest.org). ScalaTest also supports a [Behavior-Driven Development (BDD)](https://dannorth.net/introducing-bdd/) style of testing, which we’ll demonstrate next.
->This lesson uses the same SBT project as the previous lesson, so you don’t have to go through the initial setup work again.
+>This lesson uses the same sbt project as the previous lesson, so you don’t have to go through the initial setup work again.
@@ -45,9 +45,9 @@ Next, create a file named *MathUtilsTests.scala* in the *src/test/scala/simplete
```scala
package simpletest
-import org.scalatest.FunSpec
+import org.scalatest.funspec.AnyFunSpec
-class MathUtilsSpec extends FunSpec {
+class MathUtilsSpec extends AnyFunSpec {
describe("MathUtils::double") {
@@ -70,7 +70,7 @@ class MathUtilsSpec extends FunSpec {
As you can see, this is a very different-looking style than the TDD tests in the previous lesson. If you’ve never used a BDD style of testing before, a main idea is that the tests should be relatively easy to read for one of the “domain experts” who work with the programmers to create the application. A few notes about this code:
-- It uses the `FunSpec` class where the TDD tests used `FunSuite`
+- It uses the `AnyFunSpec` class where the TDD tests used `AnyFunSuite`
- A set of tests begins with `describe`
- Each test begins with `it`. The idea is that the test should read like, “It should do XYZ...,” where “it” is the `double` function
- This example also shows how to mark a test as “pending”
@@ -96,7 +96,7 @@ With those files in place you can again run `sbt test`. The important part of th
[info] Suites: completed 2, aborted 0
[info] Tests: succeeded 4, failed 0, canceled 0, ignored 0, pending 1
[info] All tests passed.
-[success] Total time: 4 s, completed Jan 6, 2018 4:58:23 PM
+[success] Total time: 4 s
````
A few notes about that output:
@@ -111,15 +111,7 @@ If you want to have a little fun with this, change one or more of the tests so t
## Where to go from here
-For more information about SBT and ScalaTest, see the following resources:
-
-- [The main SBT documentation](http://www.scala-sbt.org/documentation.html)
-- [The ScalaTest documentation](http://www.scalatest.org/user_guide)
-
-
-
-
-
-
-
+For more information about sbt and ScalaTest, see the following resources:
+- [The main sbt documentation](https://www.scala-sbt.org/1.x/docs/)
+- [The ScalaTest documentation](https://www.scalatest.org/user_guide)
diff --git a/_overviews/scala-book/sbt-scalatest-intro.md b/_overviews/scala-book/sbt-scalatest-intro.md
index c2f92cf32b..2c80d06799 100644
--- a/_overviews/scala-book/sbt-scalatest-intro.md
+++ b/_overviews/scala-book/sbt-scalatest-intro.md
@@ -1,23 +1,21 @@
---
type: chapter
layout: multipage-overview
-title: SBT and ScalaTest
-description: In this lesson we'll start to introduce SBT and ScalaTest, two tools commonly used on Scala projects.
+title: sbt and ScalaTest
+description: In this lesson we'll start to introduce sbt and ScalaTest, two tools commonly used on Scala projects.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 40
outof: 54
previous-page: oop-pizza-example
next-page: scala-build-tool-sbt
+new-version: /scala3/book/tools-sbt.html
---
In the next few lessons you’ll see a couple of tools that are commonly used in Scala projects:
-- The [SBT build tool](http://www.scala-sbt.org)
+- The [sbt build tool](http://www.scala-sbt.org)
- [ScalaTest](http://www.scalatest.org), a code testing framework
-We’ll start by showing how to use SBT, and then you’ll see how to use ScalaTest and SBT together to build and test your Scala projects.
-
-
+We’ll start by showing how to use sbt, and then you’ll see how to use ScalaTest and sbt together to build and test your Scala projects.
diff --git a/_overviews/scala-book/sbt-scalatest-tdd.md b/_overviews/scala-book/sbt-scalatest-tdd.md
index a02b268bd1..dbdbeeb53c 100644
--- a/_overviews/scala-book/sbt-scalatest-tdd.md
+++ b/_overviews/scala-book/sbt-scalatest-tdd.md
@@ -1,24 +1,24 @@
---
type: section
layout: multipage-overview
-title: Using ScalaTest with SBT
-description: This lesson shows how to write ScalaTest unit tests with SBT in a test-driven development (TDD) style.
+title: Using ScalaTest with sbt
+description: This lesson shows how to write ScalaTest unit tests with sbt in a test-driven development (TDD) style.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 42
outof: 54
previous-page: scala-build-tool-sbt
next-page: sbt-scalatest-bdd
+new-version: /scala3/book/tools-sbt.html#using-sbt-with-scalatest
---
-[ScalaTest](http://www.scalatest.org) is one of the main testing libraries for Scala projects, and in this lesson you’ll see how to create a Scala project that uses ScalaTest. You’ll also be able to compile, test, and run the project with SBT.
+[ScalaTest](http://www.scalatest.org) is one of the main testing libraries for Scala projects, and in this lesson you’ll see how to create a Scala project that uses ScalaTest. You’ll also be able to compile, test, and run the project with sbt.
## Creating the project directory structure
-As with the previous lesson, create an SBT project directory structure for a project named *HelloScalaTest* with the following commands:
+As with the previous lesson, create an sbt project directory structure for a project named *HelloScalaTest* with the following commands:
```sh
mkdir HelloScalaTest
@@ -39,15 +39,15 @@ version := "1.0"
scalaVersion := "{{site.scala-version}}"
libraryDependencies +=
- "org.scalatest" %% "scalatest" % "3.0.8" % Test
-)
+ "org.scalatest" %% "scalatest" % "3.2.19" % Test
+
```
-The first three lines of this file are essentially the same as the first example, and the `libraryDependencies` lines tell SBT to include the dependencies (jar files) that are needed to run ScalaTest:
+The first three lines of this file are essentially the same as the first example, and the `libraryDependencies` lines tell sbt to include the dependencies (jar files) that are needed to run ScalaTest:
```scala
libraryDependencies +=
- "org.scalatest" %% "scalatest" % "3.0.8" % Test
+ "org.scalatest" %% "scalatest" % "3.2.19" % Test
```
>The ScalaTest documentation has always been good, and you can always find the up to date information on what those lines should look like on the [Installing ScalaTest](http://www.scalatest.org/install) page.
@@ -85,8 +85,8 @@ There isn’t much that can go wrong with that source code, but it provides a si
[warn] consider launching sbt without any commands, or explicitly passing 'shell'
...
...
-[info] Compiling 1 Scala source to /Users/al/Projects/Scala/HelloScalaTest/target/scala-2.12/classes...
-[info] Running simpletest.Hello
+[info] compiling 1 Scala source to /Users/al/Projects/Scala/HelloScalaTest/target/scala-2.13/classes...
+[info] running simpletest.Hello
Hello Alvin Alexander
[success] Total time: 4 s
````
@@ -108,9 +108,9 @@ Next, create a file named *HelloTests.scala* in that directory with the followin
```scala
package simpletest
-import org.scalatest.FunSuite
+import org.scalatest.funsuite.AnyFunSuite
-class HelloTests extends FunSuite {
+class HelloTests extends AnyFunSuite {
// test 1
test("the name is set correctly in constructor") {
@@ -130,7 +130,7 @@ class HelloTests extends FunSuite {
This file demonstrates the ScalaTest `FunSuite` approach. A few important points:
-- Your class should extend `FunSuite`
+- Your class should extend `AnyFunSuite`
- You create tests as shown, by giving each `test` a unique name
- At the end of each test you should call `assert` to test that a condition has been satisfied
@@ -140,7 +140,7 @@ Now you can run these tests with the `sbt test` command. Skipping the first few
````
> sbt test
-[info] Set current project to HelloScalaTest (in build file:/Users/al/Projects/Scala/HelloScalaTest/)
+[info] set current project to HelloScalaTest (in build file:/Users/al/Projects/Scala/HelloScalaTest/)
[info] HelloTests:
[info] - the name is set correctly in constructor
[info] - a Person's name can be changed
@@ -156,14 +156,6 @@ Now you can run these tests with the `sbt test` command. Skipping the first few
## TDD tests
-This example demonstrates a *Test-Driven Development* (TDD) style of testing with ScalaTest. In the next lesson you’ll see how to write *Behavior-Driven Development* (BDD) tests with ScalaTest and SBT.
+This example demonstrates a *Test-Driven Development* (TDD) style of testing with ScalaTest. In the next lesson you’ll see how to write *Behavior-Driven Development* (BDD) tests with ScalaTest and sbt.
>Keep the project you just created. You’ll use it again in the next lesson.
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/scala-build-tool-sbt.md b/_overviews/scala-book/scala-build-tool-sbt.md
index 91c34cbd41..c329d06aa4 100644
--- a/_overviews/scala-book/scala-build-tool-sbt.md
+++ b/_overviews/scala-book/scala-build-tool-sbt.md
@@ -1,30 +1,30 @@
---
type: section
layout: multipage-overview
-title: The Scala Build Tool (SBT)
-description: This page provides an introduction to the Scala Build Tool, SBT, including a simple 'Hello, world' project.
+title: The most used scala build tool (sbt)
+description: This page provides an introduction to the Scala Build Tool, sbt, including a simple 'Hello, world' project.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 41
outof: 54
previous-page: sbt-scalatest-intro
next-page: sbt-scalatest-tdd
+new-version: /scala3/book/tools-sbt.html#building-scala-projects-with-sbt
---
-You can use several different tools to build your Scala projects, including Ant, Maven, Gradle, and more. But a tool named [SBT](http://www.scala-sbt.org) was the first build tool that was specifically created for Scala, and these days it’s supported by [Lightbend](https://www.lightbend.com), the company that was co-founded by Scala creator Martin Odersky that also maintains Akka, the Play web framework, and more.
+You can use several different tools to build your Scala projects, including Ant, Maven, Gradle, and more. But a tool named [sbt](http://www.scala-sbt.org) was the first build tool that was specifically created for Scala, and these days it’s supported by [Lightbend](https://www.lightbend.com), the company that was co-founded by Scala creator Martin Odersky that also maintains Akka, the Play web framework, and more.
->If you haven’t already installed SBT, here’s a link to [its download page](http://www.scala-sbt.org/download.html).
+>If you haven’t already installed sbt, here’s a link to [its download page](http://www.scala-sbt.org/download.html).
-## The SBT directory structure
+## The sbt directory structure
-Like Maven, SBT uses a standard project directory structure. If you use that standard directory structure you’ll find that it’s relatively simple to build your first projects.
+Like Maven, sbt uses a standard project directory structure. If you use that standard directory structure you’ll find that it’s relatively simple to build your first projects.
-The first thing to know is that underneath your main project directory, SBT expects a directory structure that looks like this:
+The first thing to know is that underneath your main project directory, sbt expects a directory structure that looks like this:
```bash
build.sbt
@@ -42,9 +42,9 @@ target/
```
-## Creating a “Hello, world” SBT project directory structure
+## Creating a “Hello, world” sbt project directory structure
-Creating this directory structure is pretty simple, and you can use a shell script like [sbtmkdirs](https://alvinalexander.com/sbtmkdirs) to create new projects. But you don’t have to use that script; assuming that you’re using a Unix/Linux system, you can just use these commands to create your first SBT project directory structure:
+Creating this directory structure is pretty simple, and you can use a shell script like [sbtmkdirs](https://alvinalexander.com/sbtmkdirs) to create new projects. But you don’t have to use that script; assuming that you’re using a Unix/Linux system, you can just use these commands to create your first sbt project directory structure:
```bash
mkdir HelloWorld
@@ -73,7 +73,7 @@ $ find .
If you see that, you’re in great shape for the next step.
->There are other ways to create the files and directories for an SBT project. One way is to use the `sbt new` command, [which is documented here on scala-sbt.org](http://www.scala-sbt.org/1.x/docs/Hello.html). That approach isn’t shown here because some of the files it creates are more complicated than necessary for an introduction like this.
+>There are other ways to create the files and directories for an sbt project. One way is to use the `sbt new` command, [which is documented here on scala-sbt.org](http://www.scala-sbt.org/1.x/docs/Hello.html). That approach isn’t shown here because some of the files it creates are more complicated than necessary for an introduction like this.
@@ -82,7 +82,7 @@ If you see that, you’re in great shape for the next step.
At this point you only need two more things to run a “Hello, world” project:
- A *build.sbt* file
-- A *Hello.scala* file
+- A *HelloWorld.scala* file
For a little project like this, the *build.sbt* file only needs to contain a few lines, like this:
@@ -92,7 +92,7 @@ version := "1.0"
scalaVersion := "{{ site.scala-version }}"
```
-Because SBT projects use a standard directory structure, SBT already knows everything else it needs to know.
+Because sbt projects use a standard directory structure, sbt already knows everything else it needs to know.
Now you just need to add a little “Hello, world” program.
@@ -108,7 +108,7 @@ object HelloWorld extends App {
}
```
-Now you can use SBT to compile your project, where in this example, your project consists of that one file. Use the `sbt run` command to compile and run your project. When you do so, you’ll see output that looks like this:
+Now you can use sbt to compile your project, where in this example, your project consists of that one file. Use the `sbt run` command to compile and run your project. When you do so, you’ll see output that looks like this:
````
$ sbt run
@@ -131,7 +131,7 @@ Hello, world
[success] Total time: 4 s
````
-The first time you run `sbt` it needs to download some things and can take a while to run, but after that it gets much faster. As the first comment in that output shows, it’s also faster to run SBT interactively. To do that, first run the `sbt` command by itself:
+The first time you run `sbt` it needs to download some things and can take a while to run, but after that it gets much faster. As the first comment in that output shows, it’s also faster to run sbt interactively. To do that, first run the `sbt` command by itself:
````
> sbt
@@ -150,25 +150,16 @@ Hello, world
There, that’s much faster.
-If you type `help` at the SBT command prompt you’ll see a bunch of other commands you can run. But for now, just type `exit` to leave the SBT shell. You can also press `CTRL-D` instead of typing `exit`.
+If you type `help` at the sbt command prompt you’ll see a bunch of other commands you can run. But for now, just type `exit` to leave the sbt shell. You can also press `CTRL-D` instead of typing `exit`.
## See also
-Here’s a list of other build tools you can use to build Scala projects are:
+Here’s a list of other build tools you can use to build Scala projects:
- [Ant](http://ant.apache.org/)
- [Gradle](https://gradle.org/)
- [Maven](https://maven.apache.org/)
-- [Fury](https://fury.build)
-- [Mill](http://www.lihaoyi.com/mill)
-
-
-
-
-
-
-
-
-
+- [Fury](https://github.com/propensive/fury)
+- [Mill](https://com-lihaoyi.github.io/mill/)
diff --git a/_overviews/scala-book/scala-features.md b/_overviews/scala-book/scala-features.md
index eee55bd089..5973f1ea1a 100644
--- a/_overviews/scala-book/scala-features.md
+++ b/_overviews/scala-book/scala-features.md
@@ -5,19 +5,19 @@ title: Scala Features
description: TODO
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 4
outof: 54
previous-page: preliminaries
next-page: hello-world-1
+new-version: /scala3/book/scala-features.html
---
-The name *Scala* comes from the word *scalable*, and true to that name, it’s used to power the busiest websites in the world, including Twitter, Netflix, Tumblr, LinkedIn, Foursquare, and many more.
+The name *Scala* comes from the word *scalable*, and true to that name, it’s used to power the busiest websites in the world, including X, Netflix, Tumblr, LinkedIn, Foursquare, and many more.
Here are a few more nuggets about Scala:
-- It’s a modern programming language created by [Martin Odersky](https://twitter.com/odersky?lang=en) (the father of `javac`), and influenced by Java, Ruby, Smalltalk, ML, Haskell, Erlang, and others.
+- It’s a modern programming language created by [Martin Odersky](https://x.com/odersky?lang=en) (the father of `javac`), and influenced by Java, Ruby, Smalltalk, ML, Haskell, Erlang, and others.
- It’s a high-level language.
- It’s statically typed.
- It has a sophisticated type inference system.
@@ -28,6 +28,3 @@ Here are a few more nuggets about Scala:
- Scala also works extremely well with the thousands of Java libraries that have been developed over the years.
- A great thing about Scala is that you can be productive with it on Day 1, but it’s also a deep language, so as you go along you’ll keep learning, and finding newer, better ways to write code. Some people say that Scala will change the way you think about programming (and that’s a good thing).
- A great Scala benefit is that it lets you write concise, readable code. The time a programmer spends reading code compared to the time spent writing code is said to be at least a 10:1 ratio, so writing code that’s *concise and readable* is a big deal. Because Scala has these attributes, programmers say that it’s *expressive*.
-
-
-
diff --git a/_overviews/scala-book/scala-repl.md b/_overviews/scala-book/scala-repl.md
index c1ced1f219..d3227b15b1 100644
--- a/_overviews/scala-book/scala-repl.md
+++ b/_overviews/scala-book/scala-repl.md
@@ -5,11 +5,11 @@ title: The Scala REPL
description: This page shares an introduction to the Scala REPL.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 7
outof: 54
previous-page: hello-world-2
next-page: two-types-variables
+new-version: /scala3/book/taste-repl.html
---
@@ -72,14 +72,5 @@ In addition to the REPL there are a couple of other, similar tools you can use:
- [Scastie](https://scastie.scala-lang.org) is “an interactive playground for Scala” with several nice features, including being able to control build settings and share code snippets
- IntelliJ IDEA has a Worksheet plugin that lets you do the same things inside your IDE
-- The Scala IDE for Eclipse also has a Worksheet plugin
-- [scalafiddle.io](https://scalafiddle.io) lets you run similar experiments in a web browser
For more information on the Scala REPL, see the [Scala REPL overview]({{site.baseurl}}/overviews/repl/overview.html)
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/set-class.md b/_overviews/scala-book/set-class.md
index 55fefb31d0..6123650f6f 100644
--- a/_overviews/scala-book/set-class.md
+++ b/_overviews/scala-book/set-class.md
@@ -5,11 +5,11 @@ title: The Set Class
description: This page provides examples of the Scala 'Set' class, including how to add and remove elements from a Set, and iterate over Set elements.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 33
outof: 54
previous-page: map-class
next-page: anonymous-functions
+new-version: /scala3/book/collections-classes.html#working-with-sets
---
@@ -122,11 +122,3 @@ res3: Boolean = false
## More Sets
Scala has several more `Set` classes, including `SortedSet`, `LinkedHashSet`, and more. Please see the [Set class documentation]({{site.baseurl}}/overviews/collections-2.13/sets.html) for more details on those classes.
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/traits-abstract-mixins.md b/_overviews/scala-book/traits-abstract-mixins.md
index 265e95f684..1bcbb87936 100644
--- a/_overviews/scala-book/traits-abstract-mixins.md
+++ b/_overviews/scala-book/traits-abstract-mixins.md
@@ -5,11 +5,11 @@ title: Using Scala Traits Like Abstract Classes
description: This page shows how to use Scala traits just like abstract classes in Java, with examples of concrete and abstract methods.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 26
outof: 54
previous-page: traits-interfaces
next-page: abstract-classes
+new-version: /scala3/book/domain-modeling-tools.html#traits
---
@@ -28,7 +28,7 @@ trait Pet {
}
```
-When a class extends a trait, each defined method must be implemented, so here’s a class that extends `Pet` and defines `comeToMaster`:
+When a class extends a trait, each abstract method must be implemented, so here’s a class that extends `Pet` and defines `comeToMaster`:
```scala
class Dog(name: String) extends Pet {
@@ -195,12 +195,3 @@ I'm running
```
This example works because all of the methods in the `TailWagger` and `Runner` traits are defined (they’re not abstract).
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/traits-interfaces.md b/_overviews/scala-book/traits-interfaces.md
index a10ed18a61..1aab8ee4e8 100644
--- a/_overviews/scala-book/traits-interfaces.md
+++ b/_overviews/scala-book/traits-interfaces.md
@@ -5,11 +5,11 @@ title: Using Scala Traits as Interfaces
description: This page shows how to use Scala traits just like Java interfaces, including several examples.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 25
outof: 54
previous-page: traits-intro
next-page: traits-abstract-mixins
+new-version: /scala3/book/domain-modeling-tools.html#traits
---
## Using Scala Traits as Interfaces
@@ -146,10 +146,3 @@ Key points of this code:
- Use `with` to extend subsequent traits
From what you’ve seen so far, Scala traits work just like Java interfaces. But there’s more ...
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/traits-intro.md b/_overviews/scala-book/traits-intro.md
index a6d1db0f26..66c7cf99d6 100644
--- a/_overviews/scala-book/traits-intro.md
+++ b/_overviews/scala-book/traits-intro.md
@@ -5,19 +5,14 @@ title: Scala Traits and Abstract Classes
description: An introduction to Scala traits and abstract classes.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 24
outof: 54
previous-page: enumerations-pizza-class
next-page: traits-interfaces
+new-version: /scala3/book/domain-modeling-tools.html#traits
---
Scala traits are a great feature of the language. As you’ll see in the following lessons, you can use them just like a Java interface, and you can also use them like abstract classes that have real methods. Scala classes can also extend and “mix in” multiple traits.
Scala also has the concept of an abstract class, and we’ll show when you should use an abstract class instead of a trait.
-
-
-
-
-
diff --git a/_overviews/scala-book/try-catch-finally.md b/_overviews/scala-book/try-catch-finally.md
index 5dee7890a4..a9e855cce1 100644
--- a/_overviews/scala-book/try-catch-finally.md
+++ b/_overviews/scala-book/try-catch-finally.md
@@ -5,11 +5,11 @@ title: try/catch/finally Expressions
description: This page shows how to use Scala's try/catch/finally construct, including several complete examples.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 18
outof: 54
previous-page: match-expressions
next-page: classes
+new-version: /scala3/book/control-structures.html#trycatchfinally
---
@@ -58,9 +58,3 @@ catch {
## More later
We’ll cover more details about Scala’s try/catch/finally syntax in later lessons, such as in the “Functional Error Handling” lessons, but these examples demonstrate how the syntax works. A great thing about the syntax is that it’s consistent with the `match` expression syntax. This makes your code consistent and easier to read, and you don’t have to remember a special/different syntax.
-
-
-
-
-
-
diff --git a/_overviews/scala-book/tuples.md b/_overviews/scala-book/tuples.md
index c2eb8e4225..dab29195c8 100644
--- a/_overviews/scala-book/tuples.md
+++ b/_overviews/scala-book/tuples.md
@@ -5,11 +5,11 @@ title: Tuples
description: This page is an introduction to the Scala 'tuple' data type, showing examples of how to use tuples in your Scala code.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 38
outof: 54
previous-page: misc
next-page: oop-pizza-example
+new-version: /scala3/book/taste-collections.html#tuples
---
@@ -115,9 +115,3 @@ For cases like this where it feels like overkill to create a class for the metho
## Tuples aren’t collections
Technically, Scala 2.x tuples aren’t collections classes, they’re just a convenient little container. Because they aren’t a collection, they don’t have methods like `map`, `filter`, etc.
-
-
-
-
-
-
diff --git a/_overviews/scala-book/two-notes-about-strings.md b/_overviews/scala-book/two-notes-about-strings.md
index fdcbd7a29a..31a097f758 100644
--- a/_overviews/scala-book/two-notes-about-strings.md
+++ b/_overviews/scala-book/two-notes-about-strings.md
@@ -5,11 +5,11 @@ title: Two Notes About Strings
description: This page shares two important notes about strings in Scala.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 11
outof: 54
previous-page: built-in-types
next-page: command-line-io
+new-version: /scala3/book/first-look-at-types.html#strings
---
@@ -110,6 +110,3 @@ our fathers ...
```
Because this is what you generally want, this is a common way to create multiline strings.
-
-
-
diff --git a/_overviews/scala-book/two-types-variables.md b/_overviews/scala-book/two-types-variables.md
index 678c89dc3d..3ce00a0e54 100644
--- a/_overviews/scala-book/two-types-variables.md
+++ b/_overviews/scala-book/two-types-variables.md
@@ -5,11 +5,11 @@ title: Two Types of Variables
description: Scala has two types of variables, val and var.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 8
outof: 54
previous-page: scala-repl
next-page: type-is-optional
+new-version: /scala3/book/taste-vars-data-types.html
---
@@ -94,8 +94,7 @@ object Hello3 extends App {
As before:
- Save that code in a file named *Hello3.scala*
-- Compile it with `scalac Hello3.scala`
-- Run it with `scala Hello3`
+- Compile and run it with `scala run Hello3.scala`
@@ -112,12 +111,3 @@ age: Int = 19
```
`val` fields can’t be redefined like that in the real world, but they can be redefined in the REPL playground.
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/type-is-optional.md b/_overviews/scala-book/type-is-optional.md
index 3b21654433..6a49d6b751 100644
--- a/_overviews/scala-book/type-is-optional.md
+++ b/_overviews/scala-book/type-is-optional.md
@@ -5,11 +5,11 @@ title: The Type is Optional
description: A note about explicit and implicit data type declarations in Scala.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 9
outof: 54
previous-page: two-types-variables
next-page: built-in-types
+new-version: /scala3/book/taste-vars-data-types.html#declaring-variable-types
---
@@ -56,13 +56,3 @@ val p: Person = new Person("Candy") // unnecessarily verbose
## Use the explicit form when you need to be clear
One place where you’ll want to show the data type is when you want to be clear about what you’re creating. That is, if you don’t explicitly declare the data type, the compiler may make a wrong assumption about what you want to create. Some examples of this are when you want to create numbers with specific data types. We show this in the next lesson.
-
-
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/vector-class.md b/_overviews/scala-book/vector-class.md
index 610aa2e41f..7da81e3125 100644
--- a/_overviews/scala-book/vector-class.md
+++ b/_overviews/scala-book/vector-class.md
@@ -5,11 +5,11 @@ title: The Vector Class
description: This page provides examples of the Scala 'Vector' class, including how to add and remove elements from a Vector.
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 31
outof: 54
previous-page: list-class
next-page: map-class
+new-version: /scala3/book/collections-classes.html#vector
---
[The Vector class](https://www.scala-lang.org/api/current/scala/collection/immutable/Vector.html) is an indexed, immutable sequence. The “indexed” part of the description means that you can access `Vector` elements very rapidly by their index value, such as accessing `listOfPeople(999999)`.
@@ -52,13 +52,13 @@ The REPL shows how this works:
```scala
scala> val a = Vector(1,2,3)
-a: Vector[Int] = List(1, 2, 3)
+a: Vector[Int] = Vector(1, 2, 3)
scala> val b = a :+ 4
-b: Vector[Int] = List(1, 2, 3, 4)
+b: Vector[Int] = Vector(1, 2, 3, 4)
scala> val b = a ++ Vector(4, 5)
-b: Vector[Int] = List(1, 2, 3, 4, 5)
+b: Vector[Int] = Vector(1, 2, 3, 4, 5)
```
You can also *prepend* elements like this:
@@ -77,10 +77,10 @@ Once again the REPL shows how this works:
```scala
scala> val b = 0 +: a
-b: Vector[Int] = List(0, 1, 2, 3)
+b: Vector[Int] = Vector(0, 1, 2, 3)
scala> val b = Vector(-1, 0) ++: a
-b: Vector[Int] = List(-1, 0, 1, 2, 3)
+b: Vector[Int] = Vector(-1, 0, 1, 2, 3)
```
Because `Vector` is not a linked-list (like `List`), you can prepend and append elements to it, and the speed of both approaches should be similar.
@@ -96,11 +96,3 @@ Joel
Chris
Ed
```
-
-
-
-
-
-
-
-
diff --git a/_overviews/scala-book/where-next.md b/_overviews/scala-book/where-next.md
index 4b045d7182..9210e690e7 100644
--- a/_overviews/scala-book/where-next.md
+++ b/_overviews/scala-book/where-next.md
@@ -5,14 +5,12 @@ title: Where To Go Next
description: Where to go next after reading the Scala Book
partof: scala_book
overview-name: Scala Book
-discourse: true
num: 54
outof: 54
previous-page: futures
+new-version: /scala3/book/where-next.html
---
We hope you enjoyed this introduction to the Scala programming language, and we also hope we were able to share some of the beauty of the language.
As you continue working with Scala, you can find many more details at the [Guides and Overviews section]({{site.baseurl}}/overviews/index.html) of our website.
-
-
diff --git a/_overviews/scala3-book/ca-context-bounds.md b/_overviews/scala3-book/ca-context-bounds.md
new file mode 100644
index 0000000000..d4346ed94c
--- /dev/null
+++ b/_overviews/scala3-book/ca-context-bounds.md
@@ -0,0 +1,123 @@
+---
+title: Context Bounds
+type: section
+description: This page demonstrates Context Bounds in Scala.
+languages: [ru, zh-cn]
+num: 63
+previous-page: ca-context-parameters
+next-page: ca-given-imports
+---
+
+In many situations the name of a [context parameter]({% link _overviews/scala3-book/ca-context-parameters.md %}#context-parameters) does not have to be mentioned explicitly, since it is only used by the compiler in synthesized arguments for other context parameters.
+In that case you don’t have to define a parameter name, and can just provide the parameter type.
+
+
+## Background
+
+For example, consider a method `maxElement` that returns the maximum value in a collection:
+
+{% tabs context-bounds-max-named-param class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+def maxElement[A](as: List[A])(implicit ord: Ord[A]): A =
+ as.reduceLeft(max(_, _)(ord))
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+def maxElement[A](as: List[A])(using ord: Ord[A]): A =
+ as.reduceLeft(max(_, _)(using ord))
+```
+{% endtab %}
+
+{% endtabs %}
+
+The method `maxElement` takes a _context parameter_ of type `Ord[A]` only to pass it on as an argument to the method
+`max`.
+
+For the sake of completeness, here are the definitions of `max` and `Ord` (note that in practice we would use the
+existing method `max` on `List`, but we made up this example for illustration purpose):
+
+{% tabs context-bounds-max-ord class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+/** Defines how to compare values of type `A` */
+trait Ord[A] {
+ def greaterThan(a1: A, a2: A): Boolean
+}
+
+/** Returns the maximum of two values */
+def max[A](a1: A, a2: A)(implicit ord: Ord[A]): A =
+ if (ord.greaterThan(a1, a2)) a1 else a2
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+/** Defines how to compare values of type `A` */
+trait Ord[A]:
+ def greaterThan(a1: A, a2: A): Boolean
+
+/** Returns the maximum of two values */
+def max[A](a1: A, a2: A)(using ord: Ord[A]): A =
+ if ord.greaterThan(a1, a2) then a1 else a2
+```
+{% endtab %}
+
+{% endtabs %}
+
+Note that the method `max` takes a context parameter of type `Ord[A]`, like the method `maxElement`.
+
+## Omitting context arguments
+
+Since `ord` is a context parameter in the method `max`, the compiler can supply it for us in the implementation of `maxElement`,
+when we call the method `max`:
+
+{% tabs context-bounds-context class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+def maxElement[A](as: List[A])(implicit ord: Ord[A]): A =
+ as.reduceLeft(max(_, _))
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+def maxElement[A](as: List[A])(using Ord[A]): A =
+ as.reduceLeft(max(_, _))
+```
+
+Note that, because we don’t need to explicitly pass it to the method `max`, we can leave out its name in the definition
+of the method `maxElement`. This is an _anonymous context parameter_.
+{% endtab %}
+
+{% endtabs %}
+
+## Context bounds
+
+Given that background, a _context bound_ is a shorthand syntax for expressing the pattern of, “a context parameter applied to a type parameter.”
+
+Using a context bound, the `maxElement` method can be written like this:
+
+{% tabs context-bounds-max-rewritten %}
+
+{% tab 'Scala 2 and 3' %}
+
+```scala
+def maxElement[A: Ord](as: List[A]): A =
+ as.reduceLeft(max(_, _))
+```
+
+{% endtab %}
+
+{% endtabs %}
+
+
+A bound like `: Ord` on a type parameter `A` of a method or class indicates a context parameter with type `Ord[A]`.
+Under the hood, the compiler transforms this syntax into the one shown in the Background section.
+
+For more information about context bounds, see the [“What are context bounds?”]({% link _overviews/FAQ/index.md %}#what-are-context-bounds) section of the Scala FAQ.
diff --git a/_overviews/scala3-book/ca-context-parameters.md b/_overviews/scala3-book/ca-context-parameters.md
new file mode 100644
index 0000000000..3da62d4b3b
--- /dev/null
+++ b/_overviews/scala3-book/ca-context-parameters.md
@@ -0,0 +1,157 @@
+---
+title: Context Parameters
+type: section
+description: This page demonstrates how to declare context parameters, and how the compiler infers them at call-site.
+languages: [ru, zh-cn]
+num: 62
+previous-page: ca-extension-methods
+next-page: ca-context-bounds
+redirect_from: /scala3/book/ca-given-using-clauses.html
+---
+
+Scala offers two important features for contextual abstraction:
+
+- **Context Parameters** allow you to specify parameters that, at the call-site, can be omitted by the programmer and should be automatically provided by the context.
+- **Given Instances** (in Scala 3) or **Implicit Definitions** (in Scala 2) are terms that can be used by the Scala compiler to fill in the missing arguments.
+
+## Context Parameters
+
+When designing a system, often context information like _configuration_ or settings need to be provided to the different components of your system.
+One common way to achieve this is by passing the configuration as an additional argument (or arguments) to your methods.
+
+In the following example, we define a case class `Config` to model some website configuration and pass it around in the different methods.
+
+{% tabs example %}
+{% tab 'Scala 2 and 3' %}
+```scala
+case class Config(port: Int, baseUrl: String)
+
+def renderWebsite(path: String, config: Config): String =
+ "" + renderWidget(List("cart"), config) + ""
+
+def renderWidget(items: List[String], config: Config): String = ???
+
+val config = Config(8080, "docs.scala-lang.org")
+renderWebsite("/home", config)
+```
+{% endtab %}
+{% endtabs %}
+
+Let us assume that the configuration does not change throughout most of our code base.
+Passing `config` to each and every method call (like `renderWidget`) becomes very tedious and makes our program more difficult to read, since we need to ignore the `config` argument.
+
+### Marking parameters as contextual
+
+We can mark some parameters of our methods as _contextual_.
+
+{% tabs 'contextual-parameters' class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+def renderWebsite(path: String)(implicit config: Config): String =
+ "" + renderWidget(List("cart")) + ""
+ // ^
+ // no argument config required anymore
+
+def renderWidget(items: List[String])(implicit config: Config): String = ???
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+def renderWebsite(path: String)(using config: Config): String =
+ "" + renderWidget(List("cart")) + ""
+ // ^
+ // no argument config required anymore
+
+def renderWidget(items: List[String])(using config: Config): String = ???
+```
+{% endtab %}
+{% endtabs %}
+
+By starting a parameter section with the keyword `using` in Scala 3 or `implicit` in Scala 2, we tell the compiler that at the call-site it should automatically find an argument with the correct type.
+The Scala compiler thus performs **term inference**.
+
+In our call to `renderWidget(List("cart"))` the Scala compiler will see that there is a term of type `Config` in scope (the `config`) and automatically provide it to `renderWidget`.
+So the program is equivalent to the one above.
+
+In fact, since we do not need to refer to `config` in our implementation of `renderWebsite` anymore, we can even omit its name in the signature in Scala 3:
+
+{% tabs 'anonymous' %}
+{% tab 'Scala 3 Only' %}
+```scala
+// no need to come up with a parameter name
+// vvvvvvvvvvvvv
+def renderWebsite(path: String)(using Config): String =
+ "" + renderWidget(List("cart")) + ""
+```
+{% endtab %}
+{% endtabs %}
+
+In Scala 2, the name of implicit parameters is still mandatory.
+
+### Explicitly providing contextual arguments
+
+We have seen how to _abstract_ over contextual parameters and that the Scala compiler can provide arguments automatically for us.
+But how can we specify which configuration to use for our call to `renderWebsite`?
+
+{% tabs 'explicit' class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+We explicitly supply the argument value as if it was a regular argument:
+```scala
+renderWebsite("/home")(config)
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+Like we specified our parameter section with `using`, we can also explicitly provide contextual arguments with `using`:
+```scala
+renderWebsite("/home")(using config)
+```
+{% endtab %}
+{% endtabs %}
+
+Explicitly providing contextual parameters can be useful if we have multiple different values in scope that would make sense, and we want to make sure that the correct one is passed to the function.
+
+For all other cases, as we will see in the next section, there is also another way to bring contextual values into scope.
+
+## Given Instances (Implicit Definitions in Scala 2)
+
+We have seen that we can explicitly pass arguments as contextual parameters.
+However, if there is _a single canonical value_ for a particular type, there is another preferred way to make it available to the Scala compiler: by marking it as `given` in Scala 3 or `implicit` in Scala 2.
+
+{% tabs 'instances' class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+implicit val config: Config = Config(8080, "docs.scala-lang.org")
+// ^^^^^^
+// this is the value the Scala compiler will infer
+// as argument to contextual parameters of type Config
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+val config = Config(8080, "docs.scala-lang.org")
+
+// this is the type that we want to provide the
+// canonical value for
+// vvvvvv
+given Config = config
+// ^^^^^^
+// this is the value the Scala compiler will infer
+// as argument to contextual parameters of type Config
+```
+{% endtab %}
+{% endtabs %}
+
+In the above example we specify that whenever a contextual parameter of type `Config` is omitted in the current scope, the compiler should infer `config` as an argument.
+
+Having defined a canonical value for the type `Config`, we can call `renderWebsite` as follows:
+
+```scala
+renderWebsite("/home")
+// ^
+// again no argument
+```
+
+A detailed guide to where Scala looks for canonical values can be found in [the FAQ]({% link _overviews/FAQ/index.md %}#where-does-scala-look-for-implicits).
+
+[reference]: {{ site.scala3ref }}/overview.html
+[blog-post]: /2020/11/06/explicit-term-inference-in-scala-3.html
diff --git a/_overviews/scala3-book/ca-contextual-abstractions-intro.md b/_overviews/scala3-book/ca-contextual-abstractions-intro.md
new file mode 100644
index 0000000000..8f7f5f79af
--- /dev/null
+++ b/_overviews/scala3-book/ca-contextual-abstractions-intro.md
@@ -0,0 +1,87 @@
+---
+title: Contextual Abstractions
+type: chapter
+description: This chapter provides an introduction to the Scala 3 concept of Contextual Abstractions.
+languages: [ru, zh-cn]
+num: 60
+previous-page: types-others
+next-page: ca-extension-methods
+---
+
+
+## Background
+
+Contextual abstractions are a way to abstract over context.
+They represent a unified paradigm with a great variety of use cases, among them:
+
+- Implementing type classes
+- Establishing context
+- Dependency injection
+- Expressing capabilities
+- Computing new types, and proving relationships between them
+
+Other languages have been influenced by Scala in this regard. E.g., Rust’s traits or Swift’s protocol extensions.
+Design proposals are also on the table for Kotlin as compile time dependency resolution, for C# as Shapes and Extensions or for F# as Traits.
+Contextual abstractions are also a common feature of theorem provers such as Coq or Agda.
+
+Even though these designs use different terminology, they’re all variants of the core idea of **term inference**: given a type, the compiler synthesizes a “canonical” term that has that type.
+
+## Scala 3 Redesign
+
+In Scala 2, contextual abstractions are supported by marking definitions (methods and values) or parameters as `implicit` (see [Context Parameters]({% link _overviews/scala3-book/ca-context-parameters.md %})).
+
+Scala 3 includes a redesign of contextual abstractions.
+While these concepts were gradually “discovered” in Scala 2, they’re now well known and understood, and the redesign takes advantage of that knowledge.
+
+The design of Scala 3 focuses on **intent** rather than **mechanism**.
+Instead of offering one very powerful feature of implicits, Scala 3 offers several use-case oriented features:
+
+- **Retroactively extending classes**.
+ In Scala 2, extension methods are encoded by using [implicit conversions][implicit-conversions] or [implicit classes]({% link _overviews/core/implicit-classes.md %}).
+ In contrast, in Scala 3 [extension methods][extension-methods] are now directly built into the language, leading to better error messages and improved type inference.
+
+- **Abstracting over contextual information**.
+ [Using clauses][givens] allow programmers to abstract over information that is available in the calling context and should be passed implicitly.
+ As an improvement over Scala 2 implicits, using clauses can be specified by type, freeing function signatures from term variable names that are never explicitly referred to.
+
+- **Providing Type-class instances**.
+ [Given instances][givens] allow programmers to define the _canonical value_ of a certain type.
+ This makes programming with [type-classes][type-classes] more straightforward without leaking implementation details.
+
+- **Viewing one type as another**.
+ Implicit conversions have been [redesigned][implicit-conversions] from the ground up as instances of a type-class `Conversion`.
+
+- **Higher-order contextual abstractions**.
+ The _all-new_ feature of [context functions][contextual-functions] makes contextual abstractions a first-class citizen.
+ They are an important tool for library authors and allow to express concise domain specific languages.
+
+- **Actionable feedback from the compiler**.
+ In case an implicit parameter can not be resolved by the compiler, it now provides you [import suggestions](https://www.scala-lang.org/blog/2020/05/05/scala-3-import-suggestions.html) that may fix the problem.
+
+
+## Benefits
+
+These changes in Scala 3 achieve a better separation of term inference from the rest of the language:
+
+- There’s a single way to define givens
+- There’s a single way to introduce implicit parameters and arguments
+- There’s a separate way to [import givens][given-imports] that does not allow them to hide in a sea of normal imports
+- There’s a single way to define an [implicit conversion][implicit-conversions], which is clearly marked as such, and does not require special syntax
+
+Benefits of these changes include:
+
+- The new design thus avoids feature interactions and makes the language more consistent
+- It makes implicits easier to learn and harder to abuse
+- It greatly improves the clarity of the 95% of Scala programs that use implicits
+- It has the potential to enable term inference in a principled way that is also accessible and friendly
+
+This chapter introduces many of these new features in the following sections.
+
+[givens]: {% link _overviews/scala3-book/ca-context-parameters.md %}
+[given-imports]: {% link _overviews/scala3-book/ca-given-imports.md %}
+[implicit-conversions]: {% link _overviews/scala3-book/ca-implicit-conversions.md %}
+[extension-methods]: {% link _overviews/scala3-book/ca-extension-methods.md %}
+[context-bounds]: {% link _overviews/scala3-book/ca-context-bounds.md %}
+[type-classes]: {% link _overviews/scala3-book/ca-type-classes.md %}
+[equality]: {% link _overviews/scala3-book/ca-multiversal-equality.md %}
+[contextual-functions]: {{ site.scala3ref }}/contextual/context-functions.html
diff --git a/_overviews/scala3-book/ca-extension-methods.md b/_overviews/scala3-book/ca-extension-methods.md
new file mode 100644
index 0000000000..49f07b45be
--- /dev/null
+++ b/_overviews/scala3-book/ca-extension-methods.md
@@ -0,0 +1,126 @@
+---
+title: Extension Methods
+type: section
+description: This page demonstrates how Extension Methods work in Scala 3.
+languages: [ru, zh-cn]
+num: 61
+previous-page: ca-contextual-abstractions-intro
+next-page: ca-context-parameters
+scala3: true
+versionSpecific: true
+---
+
+In Scala 2, a similar result could be achieved with [implicit classes]({% link _overviews/core/implicit-classes.md %}).
+
+---
+
+Extension methods let you add methods to a type after the type is defined, i.e., they let you add new methods to closed classes.
+For example, imagine that someone else has created a `Circle` class:
+
+{% tabs ext1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+case class Circle(x: Double, y: Double, radius: Double)
+```
+{% endtab %}
+{% endtabs %}
+
+Now imagine that you need a `circumference` method, but you can’t modify their source code.
+Before the concept of term inference was introduced into programming languages, the only thing you could do was write a method in a separate class or object like this:
+
+{% tabs ext2 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+object CircleHelpers {
+ def circumference(c: Circle): Double = c.radius * math.Pi * 2
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+object CircleHelpers:
+ def circumference(c: Circle): Double = c.radius * math.Pi * 2
+```
+{% endtab %}
+{% endtabs %}
+
+Then you’d use that method like this:
+
+{% tabs ext3 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val aCircle = Circle(2, 3, 5)
+
+// without extension methods
+CircleHelpers.circumference(aCircle)
+```
+{% endtab %}
+{% endtabs %}
+
+But with extension methods you can create a `circumference` method to work on `Circle` instances:
+
+{% tabs ext4 %}
+{% tab 'Scala 3 Only' %}
+```scala
+extension (c: Circle)
+ def circumference: Double = c.radius * math.Pi * 2
+```
+{% endtab %}
+{% endtabs %}
+
+In this code:
+
+- `Circle` is the type that the extension method `circumference` will be added to
+- The `c: Circle` syntax lets you reference the variable `c` in your extension method(s)
+
+Then in your code you use `circumference` just as though it was originally defined in the `Circle` class:
+
+{% tabs ext5 %}
+{% tab 'Scala 3 Only' %}
+```scala
+aCircle.circumference
+```
+{% endtab %}
+{% endtabs %}
+
+### Import extension method
+
+Imagine, that `circumference` is defined in package `lib`, you can import it by
+
+{% tabs ext6 %}
+{% tab 'Scala 3 Only' %}
+```scala
+import lib.circumference
+
+aCircle.circumference
+```
+{% endtab %}
+{% endtabs %}
+
+The compiler also supports you if the import is missing by showing a detailed compilation error message such as the following:
+
+```text
+value circumference is not a member of Circle, but could be made available as an extension method.
+
+The following import might fix the problem:
+
+ import lib.circumference
+```
+
+## Discussion
+
+The `extension` keyword declares that you’re about to define one or more extension methods on the type that’s put in parentheses.
+To define multiple extension methods on a type, use this syntax:
+
+{% tabs ext7 %}
+{% tab 'Scala 3 Only' %}
+```scala
+extension (c: Circle)
+ def circumference: Double = c.radius * math.Pi * 2
+ def diameter: Double = c.radius * 2
+ def area: Double = math.Pi * c.radius * c.radius
+```
+{% endtab %}
+{% endtabs %}
+
+
diff --git a/_overviews/scala3-book/ca-given-imports.md b/_overviews/scala3-book/ca-given-imports.md
new file mode 100644
index 0000000000..bc7c0754f4
--- /dev/null
+++ b/_overviews/scala3-book/ca-given-imports.md
@@ -0,0 +1,51 @@
+---
+title: Given Imports
+type: section
+description: This page demonstrates how 'given' import statements work in Scala 3.
+languages: [ru, zh-cn]
+num: 64
+previous-page: ca-context-bounds
+next-page: ca-type-classes
+scala3: true
+versionSpecific: true
+---
+
+
+To make it more clear where givens in the current scope are coming from, a special form of the `import` statement is used to import `given` instances.
+The basic form is shown in this example:
+
+```scala
+object A:
+ class TC
+ given tc: TC = ???
+ def f(using TC) = ???
+
+object B:
+ import A.* // import all non-given members
+ import A.given // import the given instance
+```
+
+In this code the `import A.*` clause of object `B` imports all members of `A` *except* the `given` instance, `tc`.
+Conversely, the second import, `import A.given`, imports *only* that `given` instance.
+The two `import` clauses can also be merged into one:
+
+```scala
+object B:
+ import A.{given, *}
+```
+
+## Discussion
+
+The wildcard selector `*` brings all definitions other than givens or extensions into scope, whereas a `given` selector brings all *givens*---including those resulting from extensions---into scope.
+
+These rules have two main benefits:
+
+- It’s more clear where givens in the current scope are coming from.
+ In particular, it’s not possible to hide imported givens in a long list of other wildcard imports.
+- It enables importing all givens without importing anything else.
+ This is important because givens can be anonymous, so the usual use of named imports is not practical.
+
+More examples of the “import given” syntax are shown in the [Packaging and Imports chapter][imports].
+
+
+[imports]: {% link _overviews/scala3-book/packaging-imports.md %}
diff --git a/_overviews/scala3-book/ca-implicit-conversions.md b/_overviews/scala3-book/ca-implicit-conversions.md
new file mode 100644
index 0000000000..2c2884aa56
--- /dev/null
+++ b/_overviews/scala3-book/ca-implicit-conversions.md
@@ -0,0 +1,223 @@
+---
+title: Implicit Conversions
+type: section
+description: This page demonstrates how to implement Implicit Conversions in Scala.
+languages: [ru, zh-cn]
+num: 67
+previous-page: ca-multiversal-equality
+next-page: ca-summary
+---
+
+Implicit conversions are a powerful Scala feature that allows users to supply an argument
+of one type as if it were another type, to avoid boilerplate.
+
+> Note that in Scala 2, implicit conversions were also used to provide additional members
+> to closed classes (see [Implicit Classes]({% link _overviews/core/implicit-classes.md %})).
+> In Scala 3, we recommend to address this use-case by defining [extension methods] instead
+> of implicit conversions (although the standard library still relies on implicit conversions
+> for historical reasons).
+
+## Example
+
+Consider for instance a method `findUserById` that takes a parameter of type `Long`:
+
+{% tabs implicit-conversions-1 %}
+{% tab 'Scala 2 and 3' %}
+~~~ scala
+def findUserById(id: Long): Option[User]
+~~~
+{% endtab %}
+{% endtabs %}
+
+We omit the definition of the type `User` for the sake of brevity, it does not matter for
+our example.
+
+In Scala, it is possible to call the method `findUserById` with an argument of type `Int`
+instead of the expected type `Long`, because the argument will be implicitly converted
+into the type `Long`:
+
+{% tabs implicit-conversions-2 %}
+{% tab 'Scala 2 and 3' %}
+~~~ scala
+val id: Int = 42
+findUserById(id) // OK
+~~~
+{% endtab %}
+{% endtabs %}
+
+This code does not fail to compile with an error like “type mismatch: expected `Long`,
+found `Int`” because there is an implicit conversion that converts the argument `id`
+to a value of type `Long`.
+
+## Detailed Explanation
+
+This section describes how to define and use implicit conversions.
+
+### Defining an Implicit Conversion
+
+{% tabs implicit-conversions-3 class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+In Scala 2, an implicit conversion from type `S` to type `T` is defined by an
+[implicit class]({% link _overviews/core/implicit-classes.md %}) `T` that takes
+a single constructor parameter of type `S`, an
+[implicit value]({% link _overviews/scala3-book/ca-context-parameters.md %}) of
+function type `S => T`, or by an implicit method convertible to a value of that type.
+
+For example, the following code defines an implicit conversion from `Int` to `Long`:
+
+~~~ scala
+import scala.language.implicitConversions
+
+implicit def int2long(x: Int): Long = x.toLong
+~~~
+
+This is an implicit method convertible to a value of type `Int => Long`.
+
+See the section “Beware the Power of Implicit Conversions” below for an
+explanation of the clause `import scala.language.implicitConversions`
+at the beginning.
+{% endtab %}
+
+{% tab 'Scala 3' %}
+In Scala 3, an implicit conversion from type `S` to type `T` is defined by a
+[`given` instance]({% link _overviews/scala3-book/ca-context-parameters.md %})
+of type `scala.Conversion[S, T]`. For compatibility with Scala 2, it can also
+be defined by an implicit method (read more in the Scala 2 tab).
+
+For example, this code defines an implicit conversion from `Int` to `Long`:
+
+```scala
+given int2long: Conversion[Int, Long] with
+ def apply(x: Int): Long = x.toLong
+```
+
+Like other given definitions, implicit conversions can be anonymous:
+
+~~~ scala
+given Conversion[Int, Long] with
+ def apply(x: Int): Long = x.toLong
+~~~
+
+Using an alias, this can be expressed more concisely as:
+
+```scala
+given Conversion[Int, Long] = (x: Int) => x.toLong
+```
+{% endtab %}
+
+{% endtabs %}
+
+### Using an Implicit Conversion
+
+Implicit conversions are applied in two situations:
+
+1. If an expression `e` is of type `S`, and `S` does not conform to the expression's expected type `T`.
+2. In a selection `e.m` with `e` of type `S`, if the selector `m` does not denote a member of `S`
+ (to support Scala-2-style [extension methods]).
+
+In the first case, a conversion `c` is searched for, which is applicable to `e` and whose result type conforms to `T`.
+
+In our example above, when we pass the argument `id` of type `Int` to the method `findUserById`,
+the implicit conversion `int2long(id)` is inserted.
+
+In the second case, a conversion `c` is searched for, which is applicable to `e` and whose result contains a member named `m`.
+
+An example is to compare two strings `"foo" < "bar"`. In this case, `String` has no member `<`, so the implicit conversion `Predef.augmentString("foo") < "bar"` is inserted. (`scala.Predef` is automatically imported into all Scala programs.)
+
+### How Are Implicit Conversions Brought Into Scope?
+
+When the compiler searches for applicable conversions:
+
+- first, it looks into the current lexical scope
+ - implicit conversions defined in the current scope or the outer scopes
+ - imported implicit conversions
+ - implicit conversions imported by a wildcard import (Scala 2 only)
+- then, it looks into the [companion objects] _associated_ with the argument
+ type `S` or the expected type `T`. The companion objects associated with
+ a type `X` are:
+ - the companion object `X` itself
+ - the companion objects associated with any of `X`’s inherited types
+ - the companion objects associated with any type argument in `X`
+ - if `X` is an inner class, the outer objects in which it is embedded
+
+For instance, consider an implicit conversion `fromStringToUser` defined in an
+object `Conversions`:
+
+{% tabs implicit-conversions-4 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+~~~ scala
+import scala.language.implicitConversions
+
+object Conversions {
+ implicit def fromStringToUser(name: String): User = User(name)
+}
+~~~
+{% endtab %}
+{% tab 'Scala 3' %}
+~~~ scala
+object Conversions:
+ given fromStringToUser: Conversion[String, User] = (name: String) => User(name)
+~~~
+{% endtab %}
+{% endtabs %}
+
+The following imports would equivalently bring the conversion into scope:
+
+{% tabs implicit-conversions-5 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+~~~ scala
+import Conversions.fromStringToUser
+// or
+import Conversions._
+~~~
+{% endtab %}
+{% tab 'Scala 3' %}
+~~~ scala
+import Conversions.fromStringToUser
+// or
+import Conversions.given
+// or
+import Conversions.{given Conversion[String, User]}
+~~~
+
+Note that in Scala 3, a wildcard import (ie `import Conversions.*`) does not import given
+definitions.
+{% endtab %}
+{% endtabs %}
+
+In the introductory example, the conversion from `Int` to `Long` does not require an import
+because it is defined in the object `Int`, which is the companion object of the type `Int`.
+
+Further reading:
+[Where does Scala look for implicits? (on Stackoverflow)](https://stackoverflow.com/a/5598107).
+
+### Beware the Power of Implicit Conversions
+
+{% tabs implicit-conversions-6 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+Because implicit conversions can have pitfalls if used indiscriminately the compiler warns when compiling the implicit conversion definition.
+
+To turn off the warnings take either of these actions:
+
+* Import `scala.language.implicitConversions` into the scope of the implicit conversion definition
+* Invoke the compiler with `-language:implicitConversions`
+
+No warning is emitted when the conversion is applied by the compiler.
+{% endtab %}
+{% tab 'Scala 3' %}
+Because implicit conversions can have pitfalls if used indiscriminately the compiler warns in two situations:
+- when compiling a Scala 2 style implicit conversion definition.
+- at the call site where a given instance of `scala.Conversion` is inserted as a conversion.
+
+To turn off the warnings take either of these actions:
+
+- Import `scala.language.implicitConversions` into the scope of:
+ - a Scala 2 style implicit conversion definition
+ - call sites where a given instance of `scala.Conversion` is inserted as a conversion.
+- Invoke the compiler with `-language:implicitConversions`
+{% endtab %}
+{% endtabs %}
+
+[extension methods]: {% link _overviews/scala3-book/ca-extension-methods.md %}
+[companion objects]: {% link _overviews/scala3-book/domain-modeling-tools.md %}#companion-objects
diff --git a/_overviews/scala3-book/ca-multiversal-equality.md b/_overviews/scala3-book/ca-multiversal-equality.md
new file mode 100644
index 0000000000..dfc6b4cdb0
--- /dev/null
+++ b/_overviews/scala3-book/ca-multiversal-equality.md
@@ -0,0 +1,201 @@
+---
+title: Multiversal Equality
+type: section
+description: This page demonstrates how to implement Multiversal Equality in Scala 3.
+languages: [ru, zh-cn]
+num: 66
+previous-page: ca-type-classes
+next-page: ca-implicit-conversions
+scala3: true
+versionSpecific: true
+---
+
+Previously, Scala had *universal equality*: Two values of any types could be compared with each other using `==` and `!=`.
+This came from the fact that `==` and `!=` are implemented in terms of Java’s `equals` method, which can also compare values of any two reference types.
+
+Universal equality is convenient, but it’s also dangerous since it undermines type safety.
+For instance, let’s assume that after some refactoring, you’re left with an erroneous program where a value `y` has type `S` instead of the correct type `T`:
+
+```scala
+val x = ... // of type T
+val y = ... // of type S, but should be T
+x == y // typechecks, will always yield false
+```
+
+If `y` gets compared to other values of type `T`, the program will still typecheck, since values of all types can be compared with each other.
+But it will probably give unexpected results and fail at runtime.
+
+A type-safe programming language can do better, and multiversal equality is an opt-in way to make universal equality safer.
+It uses the binary type class `CanEqual` to indicate that values of two given types can be compared with each other.
+
+
+## Allowing the comparison of class instances
+
+By default, in Scala 3 you can still create an equality comparison like this:
+
+```scala
+case class Cat(name: String)
+case class Dog(name: String)
+val d = Dog("Fido")
+val c = Cat("Morris")
+
+d == c // false, but it compiles
+```
+
+But with Scala 3 you can disable such comparisons.
+By (a) importing `scala.language.strictEquality` or (b) using the `-language:strictEquality` compiler flag, this comparison no longer compiles:
+
+```scala
+import scala.language.strictEquality
+
+val rover = Dog("Rover")
+val fido = Dog("Fido")
+println(rover == fido) // compiler error
+
+// compiler error message:
+// Values of types Dog and Dog cannot be compared with == or !=
+```
+
+
+## Enabling comparisons
+
+There are two ways to enable this comparison using the Scala 3 `CanEqual` type class.
+For simple cases like this, your class can *derive* the `CanEqual` class:
+
+```scala
+// Option 1
+case class Dog(name: String) derives CanEqual
+```
+
+As you’ll see in a few moments, when you need more flexibility you can also use this syntax:
+
+```scala
+// Option 2
+case class Dog(name: String)
+given CanEqual[Dog, Dog] = CanEqual.derived
+```
+
+Either of those two approaches now let `Dog` instances to be compared to each other.
+
+
+## A more real-world example
+
+In a more real-world example, imagine you have an online bookstore and want to allow or disallow the comparison of physical, printed books, and audiobooks.
+With Scala 3 you start by enabling multiversal equality as shown in the previous example:
+
+```scala
+// [1] add this import, or this command line flag: -language:strictEquality
+import scala.language.strictEquality
+```
+
+Then create your domain objects as usual:
+
+```scala
+// [2] create your class hierarchy
+trait Book:
+ def author: String
+ def title: String
+ def year: Int
+
+case class PrintedBook(
+ author: String,
+ title: String,
+ year: Int,
+ pages: Int
+) extends Book
+
+case class AudioBook(
+ author: String,
+ title: String,
+ year: Int,
+ lengthInMinutes: Int
+) extends Book
+```
+
+Finally, use `CanEqual` to define which comparisons you want to allow:
+
+```scala
+// [3] create type class instances to define the allowed comparisons.
+// allow `PrintedBook == PrintedBook`
+// allow `AudioBook == AudioBook`
+given CanEqual[PrintedBook, PrintedBook] = CanEqual.derived
+given CanEqual[AudioBook, AudioBook] = CanEqual.derived
+
+// [4a] comparing two printed books works as desired
+val p1 = PrintedBook("1984", "George Orwell", 1961, 328)
+val p2 = PrintedBook("1984", "George Orwell", 1961, 328)
+println(p1 == p2) // true
+
+// [4b] you can’t compare a printed book and an audiobook
+val pBook = PrintedBook("1984", "George Orwell", 1961, 328)
+val aBook = AudioBook("1984", "George Orwell", 2006, 682)
+println(pBook == aBook) // compiler error
+```
+
+The last line of code results in this compiler error message:
+
+````
+Values of types PrintedBook and AudioBook cannot be compared with == or !=
+````
+
+This is how multiversal equality catches illegal type comparisons at compile time.
+
+
+### Enabling “PrintedBook == AudioBook”
+
+That works as desired, but in some situations you may want to allow the comparison of physical books to audiobooks.
+When you want this, create these two additional equality comparisons:
+
+```scala
+// allow `PrintedBook == AudioBook`, and `AudioBook == PrintedBook`
+given CanEqual[PrintedBook, AudioBook] = CanEqual.derived
+given CanEqual[AudioBook, PrintedBook] = CanEqual.derived
+```
+
+Now you can compare physical books to audiobooks without a compiler error:
+
+```scala
+println(pBook == aBook) // false
+println(aBook == pBook) // false
+```
+
+#### Implement “equals” to make them really work
+
+While these comparisons are now allowed, they will always be `false` because their `equals` methods don’t know how to make these comparisons.
+Therefore, the solution is to override the `equals` methods for each class.
+For instance, when you override the `equals` method for `AudioBook`:
+
+```scala
+case class AudioBook(
+ author: String,
+ title: String,
+ year: Int,
+ lengthInMinutes: Int
+) extends Book:
+ // override to allow AudioBook to be compared to PrintedBook
+ override def equals(that: Any): Boolean = that match
+ case a: AudioBook =>
+ this.author == a.author
+ && this.title == a.title
+ && this.year == a.year
+ && this.lengthInMinutes == a.lengthInMinutes
+ case p: PrintedBook =>
+ this.author == p.author && this.title == p.title
+ case _ =>
+ false
+```
+
+You can now compare an `AudioBook` to a `PrintedBook`:
+
+```scala
+println(aBook == pBook) // true (works because of `equals` in `AudioBook`)
+println(pBook == aBook) // false
+```
+
+Currently, the `PrintedBook` book doesn’t have an `equals` method, so the second comparison returns `false`.
+To enable that comparison, just override the `equals` method in `PrintedBook`.
+
+You can find additional information on [multiversal equality][ref-equal] in the reference documentation.
+
+
+[ref-equal]: {{ site.scala3ref }}/contextual/multiversal-equality.html
diff --git a/_overviews/scala3-book/ca-summary.md b/_overviews/scala3-book/ca-summary.md
new file mode 100644
index 0000000000..bdd8c58537
--- /dev/null
+++ b/_overviews/scala3-book/ca-summary.md
@@ -0,0 +1,34 @@
+---
+title: Summary
+type: section
+description: This page provides a summary of the Contextual Abstractions lessons.
+languages: [ru, zh-cn]
+num: 68
+previous-page: ca-implicit-conversions
+next-page: concurrency
+---
+
+This chapter provides an introduction to most Contextual Abstractions topics, including:
+
+- [Extension Methods]({% link _overviews/scala3-book/ca-extension-methods.md %})
+- [Given Instances and Using Clauses]({% link _overviews/scala3-book/ca-context-parameters.md %})
+- [Context Bounds]({% link _overviews/scala3-book/ca-context-bounds.md %})
+- [Given Imports]({% link _overviews/scala3-book/ca-given-imports.md %})
+- [Type Classes]({% link _overviews/scala3-book/ca-type-classes.md %})
+- [Multiversal Equality]({% link _overviews/scala3-book/ca-multiversal-equality.md %})
+- [Implicit Conversions]({% link _overviews/scala3-book/ca-implicit-conversions.md %})
+
+These features are all variants of the core idea of **term inference**: given a type, the compiler synthesizes a “canonical” term that has that type.
+
+A few more advanced topics aren’t covered here, including:
+
+- Conditional Given Instances
+- Type Class Derivation
+- Context Functions
+- By-Name Context Parameters
+- Relationship with Scala 2 Implicits
+
+Those topics are discussed in detail in the [Reference documentation][ref].
+
+
+[ref]: {{ site.scala3ref }}/contextual
diff --git a/_overviews/scala3-book/ca-type-classes.md b/_overviews/scala3-book/ca-type-classes.md
new file mode 100644
index 0000000000..2a56a5de47
--- /dev/null
+++ b/_overviews/scala3-book/ca-type-classes.md
@@ -0,0 +1,188 @@
+---
+title: Type Classes
+type: section
+description: This page demonstrates how to create and use type classes.
+languages: [ru, zh-cn]
+num: 65
+previous-page: ca-given-imports
+next-page: ca-multiversal-equality
+redirect_from: /scala3/book/types-type-classes.html
+---
+
+A _type class_ is an abstract, parameterized type that lets you add new behavior to any closed data type without using sub-typing.
+If you are coming from Java, you can think of type classes as something like [`java.util.Comparator[T]`][comparator].
+
+> The paper [“Type Classes as Objects and Implicits”][typeclasses-paper] (2010) by Oliveira et al. discusses the basic ideas behind type classes in Scala.
+> Even though the paper uses an older version of Scala, the ideas still hold to the current day.
+
+A type class is useful in multiple use-cases, for example:
+
+- Expressing how a type you don’t own---from the standard library or a third-party library---conforms to such behavior
+- Expressing such a behavior for multiple types without involving sub-typing relationships between those types
+
+Type classes are traits with one or more parameters whose implementations are provided as `given` instances in Scala 3 or `implicit` values in Scala 2.
+
+## Example
+
+For example, `Show` is a well-known type class in Haskell, and the following code shows one way to implement it in Scala.
+If you imagine that Scala classes don’t have a `toString` method, you can define a `Show` type class to add this behavior to any type that you want to be able to convert to a custom string.
+
+### The type class
+
+The first step in creating a type class is to declare a parameterized trait that has one or more abstract methods.
+Because `Showable` only has one method named `show`, it’s written like this:
+
+{% tabs 'definition' class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+// a type class
+trait Showable[A] {
+ def show(a: A): String
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+// a type class
+trait Showable[A]:
+ extension (a: A) def show: String
+```
+{% endtab %}
+{% endtabs %}
+
+Notice that this approach is close to the usual object-oriented approach, where you would typically define a trait `Show` as follows:
+
+{% tabs 'trait' class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+// a trait
+trait Show {
+ def show: String
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+// a trait
+trait Show:
+ def show: String
+```
+{% endtab %}
+{% endtabs %}
+
+There are a few important things to point out:
+
+1. Type-classes like `Showable` take a type parameter `A` to say which type we provide the implementation of `show` for; in contrast, classic traits like `Show` do not.
+2. To add the show functionality to a certain type `A`, the classic trait requires that `A extends Show`, while for type-classes we require to have an implementation of `Showable[A]`.
+3. In Scala 3, to allow the same method calling syntax in both `Showable` that mimics the one of `Show`, we define `Showable.show` as an extension method.
+
+### Implement concrete instances
+
+The next step is to determine what classes in your application `Showable` should work for, and then implement that behavior for them.
+For instance, to implement `Showable` for this `Person` class:
+
+{% tabs 'person' %}
+{% tab 'Scala 2 and 3' %}
+```scala
+case class Person(firstName: String, lastName: String)
+```
+{% endtab %}
+{% endtabs %}
+
+you’ll define a single _canonical value_ of type `Showable[Person]`, ie an instance of `Showable` for the type `Person`, as the following code example demonstrates:
+
+{% tabs 'instance' class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+implicit val showablePerson: Showable[Person] = new Showable[Person] {
+ def show(p: Person): String =
+ s"${p.firstName} ${p.lastName}"
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+given Showable[Person] with
+ extension (p: Person) def show: String =
+ s"${p.firstName} ${p.lastName}"
+```
+{% endtab %}
+{% endtabs %}
+
+### Using the type class
+
+Now you can use this type class like this:
+
+{% tabs 'usage' class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+val person = Person("John", "Doe")
+println(showablePerson.show(person))
+```
+
+Note that in practice, type classes are typically used with values whose type is unknown, unlike the type `Person`, as shown in the next section.
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+val person = Person("John", "Doe")
+println(person.show)
+```
+{% endtab %}
+{% endtabs %}
+
+Again, if Scala didn’t have a `toString` method available to every class, you could use this technique to add `Showable` behavior to any class that you want to be able to convert to a `String`.
+
+### Writing methods that use the type class
+
+As with inheritance, you can define methods that use `Showable` as a type parameter:
+
+{% tabs 'method' class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+def showAll[A](as: List[A])(implicit showable: Showable[A]): Unit =
+ as.foreach(a => println(showable.show(a)))
+
+showAll(List(Person("Jane"), Person("Mary")))
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+def showAll[A: Showable](as: List[A]): Unit =
+ as.foreach(a => println(a.show))
+
+showAll(List(Person("Jane"), Person("Mary")))
+```
+{% endtab %}
+{% endtabs %}
+
+### A type class with multiple methods
+
+Note that if you want to create a type class that has multiple methods, the initial syntax looks like this:
+
+{% tabs 'multiple-methods' class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+trait HasLegs[A] {
+ def walk(a: A): Unit
+ def run(a: A): Unit
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+trait HasLegs[A]:
+ extension (a: A)
+ def walk(): Unit
+ def run(): Unit
+```
+{% endtab %}
+{% endtabs %}
+
+### A real-world example
+
+For a real-world example of how type classes are used in Scala 3, see the `CanEqual` discussion in the [Multiversal Equality section][multiversal].
+
+[typeclasses-paper]: https://infoscience.epfl.ch/record/150280/files/TypeClasses.pdf
+[typeclasses-chapter]: {% link _overviews/scala3-book/ca-type-classes.md %}
+[comparator]: https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html
+[multiversal]: {% link _overviews/scala3-book/ca-multiversal-equality.md %}
diff --git a/_overviews/scala3-book/collections-classes.md b/_overviews/scala3-book/collections-classes.md
new file mode 100644
index 0000000000..acf3a7ff87
--- /dev/null
+++ b/_overviews/scala3-book/collections-classes.md
@@ -0,0 +1,975 @@
+---
+title: Collections Types
+type: section
+description: This page introduces the common Scala 3 collections types and some of their methods.
+languages: [ru, zh-cn]
+num: 39
+previous-page: collections-intro
+next-page: collections-methods
+---
+
+
+{% comment %}
+TODO: mention Array, ArrayDeque, ListBuffer, Queue, Stack, StringBuilder?
+LATER: note that methods like `+`, `++`, etc., are aliases for other methods
+LATER: add links to the Scaladoc for the major types shown here
+{% endcomment %}
+
+
+This page demonstrates the common Scala 3 collections and their accompanying methods.
+Scala comes with a wealth of collections types, but you can go a long way by starting with just a few of them, and later using the others as needed.
+Similarly, each collection type has dozens of methods to make your life easier, but you can achieve a lot by starting with just a handful of them.
+
+Therefore, this section introduces and demonstrates the most common types and methods that you’ll need to get started.
+When you need more flexibility, see these pages at the end of this section for more details.
+
+
+
+## Three main categories of collections
+
+Looking at Scala collections from a high level, there are three main categories to choose from:
+
+- **Sequences** are a sequential collection of elements and may be _indexed_ (like an array) or _linear_ (like a linked list)
+- **Maps** contain a collection of key/value pairs, like a Java `Map`, Python dictionary, or Ruby `Hash`
+- **Sets** are an unordered collection of unique elements
+
+All of those are basic types, and have subtypes for specific purposes, such as concurrency, caching, and streaming.
+In addition to those three main categories, there are other useful collection types, including ranges, stacks, and queues.
+
+
+### Collections hierarchy
+
+As a brief overview, the next three figures show the hierarchy of classes and traits in the Scala collections.
+
+This first figure shows the collections types in package
+_scala.collection_.
+These are all high-level abstract classes or traits, which
+generally have _immutable_ and _mutable_ implementations.
+
+![General collection hierarchy][collections1]
+
+This figure shows all collections in package _scala.collection.immutable_:
+
+![Immutable collection hierarchy][collections2]
+
+And this figure shows all collections in package _scala.collection.mutable_:
+
+![Mutable collection hierarchy][collections3]
+
+Having seen that detailed view of all the collections types, the following sections introduce some common types you’ll use on a regular basis.
+
+{% comment %}
+NOTE: those images come from this page: https://docs.scala-lang.org/overviews/collections-2.13/overview.html
+{% endcomment %}
+
+
+
+## Common collections
+
+The main collections you’ll use on a regular basis are:
+
+| Collection Type | Immutable | Mutable | Description |
+| ------------- | --------- | ------- | ----------- |
+| `List` | ✓ | | A linear (linked list), immutable sequence |
+| `Vector` | ✓ | | An indexed, immutable sequence |
+| `LazyList` | ✓ | | A lazy immutable linked list, its elements are computed only when they’re needed; Good for large or infinite sequences. |
+| `ArrayBuffer` | | ✓ | The go-to type for a mutable, indexed sequence |
+| `ListBuffer` | | ✓ | Used when you want a mutable `List`; typically converted to a `List` |
+| `Map` | ✓ | ✓ | An iterable collection that consists of pairs of keys and values. |
+| `Set` | ✓ | ✓ | An iterable collection with no duplicate elements |
+
+As shown, `Map` and `Set` come in both immutable and mutable versions.
+
+The basics of each type are demonstrated in the following sections.
+
+> In Scala, a _buffer_---such as `ArrayBuffer` and `ListBuffer`---is a sequence that can grow and shrink.
+
+
+### A note about immutable collections
+
+In the sections that follow, whenever the word _immutable_ is used, it’s safe to assume that the type is intended for use in a _functional programming_ (FP) style.
+With these types you don’t modify the collection; you apply functional methods to the collection to create a new result.
+
+
+
+## Choosing a sequence
+
+When choosing a _sequence_---a sequential collection of elements---you have two main decisions:
+
+- Should the sequence be indexed (like an array), allowing rapid access to any element, or should it be implemented as a linear linked list?
+- Do you want a mutable or immutable collection?
+
+The recommended, general-purpose, “go to” sequential collections for the combinations of mutable/immutable and indexed/linear are shown here:
+
+| Type/Category | Immutable | Mutable |
+| --------------------- | --------- | ------------ |
+| Indexed | `Vector` |`ArrayBuffer` |
+| Linear (Linked lists) | `List` |`ListBuffer` |
+
+For example, if you need an immutable, indexed collection, in general you should use a `Vector`.
+Conversely, if you need a mutable, indexed collection, use an `ArrayBuffer`.
+
+> `List` and `Vector` are often used when writing code in a functional style.
+> `ArrayBuffer` is commonly used when writing code in an imperative style.
+> `ListBuffer` is used when you’re mixing styles, such as building a list.
+
+The next several sections briefly demonstrate the `List`, `Vector`, and `ArrayBuffer` types.
+
+
+
+## `List`
+
+[The List type](https://www.scala-lang.org/api/current/scala/collection/immutable/List.html) is a linear, immutable sequence.
+This just means that it’s a linked-list that you can’t modify.
+Any time you want to add or remove `List` elements, you create a new `List` from an existing `List`.
+
+### Creating Lists
+
+This is how you create an initial `List`:
+
+{% tabs list-creation %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val ints = List(1, 2, 3)
+val names = List("Joel", "Chris", "Ed")
+
+// another way to construct a List
+val namesAgain = "Joel" :: "Chris" :: "Ed" :: Nil
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+You can also declare the `List`’s type, if you prefer, though it generally isn’t necessary:
+
+{% tabs list-type %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val ints: List[Int] = List(1, 2, 3)
+val names: List[String] = List("Joel", "Chris", "Ed")
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+One exception is when you have mixed types in a collection; in that case you may want to explicitly specify its type:
+
+{% tabs list-mixed-types class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+val things: List[Any] = List(1, "two", 3.0)
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+val things: List[String | Int | Double] = List(1, "two", 3.0) // with union types
+val thingsAny: List[Any] = List(1, "two", 3.0) // with any
+```
+{% endtab %}
+
+{% endtabs %}
+
+### Adding elements to a List
+
+Because `List` is immutable, you can’t add new elements to it.
+Instead, you create a new list by prepending or appending elements to an existing `List`.
+For instance, given this `List`:
+
+{% tabs adding-elements-init %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = List(1, 2, 3)
+```
+{% endtab %}
+
+{% endtabs %}
+
+When working with a `List`, _prepend_ one element with `::`, and prepend another `List` with `:::`, as shown here:
+
+{% tabs adding-elements-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val b = 0 :: a // List(0, 1, 2, 3)
+val c = List(-1, 0) ::: a // List(-1, 0, 1, 2, 3)
+```
+{% endtab %}
+
+{% endtabs %}
+
+You can also _append_ elements to a `List`, but because `List` is a singly-linked list, you should generally only prepend elements to it;
+appending elements to it is a relatively slow operation, especially when you work with large sequences.
+
+> Tip: If you want to prepend and append elements to an immutable sequence, use `Vector` instead.
+
+Because `List` is a linked-list, you shouldn’t try to access the elements of large lists by their index value.
+For instance, if you have a `List` with one million elements in it, accessing an element like `myList(999_999)` will take a relatively long time, because that request has to traverse all those elements.
+If you have a large collection and want to access elements by their index, use a `Vector` or `ArrayBuffer` instead.
+
+### How to remember the method names
+
+These days IDEs help us out tremendously, but one way to remember those method names is to think that the `:` character represents the side that the sequence is on, so when you use `+:` you know that the list needs to be on the right, like this:
+
+{% tabs list-prepending %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+0 +: a
+```
+{% endtab %}
+
+{% endtabs %}
+
+Similarly, when you use `:+` you know the list needs to be on the left:
+
+{% tabs list-appending %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+a :+ 4
+```
+{% endtab %}
+
+{% endtabs %}
+
+There are more technical ways to think about this, but this can be a helpful way to remember the method names.
+
+{% comment %}
+LATER: Add a discussion of `:` on method names, right-associativity, and infix operators.
+{% endcomment %}
+
+Also, a good thing about these symbolic method names is that they’re consistent.
+The same method names are used with other immutable sequences, such as `Seq` and `Vector`.
+You can also use non-symbolic method names to append and prepend elements, if you prefer.
+
+### How to loop over lists
+
+Given a `List` of names:
+
+{% tabs list-loop-init %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val names = List("Joel", "Chris", "Ed")
+```
+{% endtab %}
+
+{% endtabs %}
+
+you can print each string like this:
+
+{% tabs list-loop-example class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+for (name <- names) println(name)
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+for name <- names do println(name)
+```
+{% endtab %}
+
+{% endtabs %}
+
+This is what it looks like in the REPL:
+
+{% tabs list-loop-repl class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+scala> for (name <- names) println(name)
+Joel
+Chris
+Ed
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+scala> for name <- names do println(name)
+Joel
+Chris
+Ed
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+A great thing about using `for` loops with collections is that Scala is consistent, and the same approach works with all sequences, including `Array`, `ArrayBuffer`, `List`, `Seq`, `Vector`, `Map`, `Set`, etc.
+
+### A little bit of history
+
+For those interested in a little bit of history, the Scala `List` is similar to the `List` from [the Lisp programming language](https://en.wikipedia.org/wiki/Lisp_(programming_language)), which was originally specified in 1958.
+Indeed, in addition to creating a `List` like this:
+
+{% tabs list-history-init %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val ints = List(1, 2, 3)
+```
+{% endtab %}
+
+{% endtabs %}
+
+you can also create the exact same list this way:
+
+{% tabs list-history-init2 %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val list = 1 :: 2 :: 3 :: Nil
+```
+{% endtab %}
+
+{% endtabs %}
+
+The REPL shows how this works:
+
+{% tabs list-history-repl %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> val list = 1 :: 2 :: 3 :: Nil
+list: List[Int] = List(1, 2, 3)
+```
+{% endtab %}
+
+{% endtabs %}
+
+This works because a `List` is a singly-linked list that ends with the `Nil` element, and `::` is a `List` method that works like Lisp’s “cons” operator.
+
+
+### Aside: The LazyList
+
+The Scala collections also include a [LazyList](https://www.scala-lang.org/api/current/scala/collection/immutable/LazyList.html), which is a _lazy_ immutable linked list.
+It’s called “lazy”---or non-strict---because it computes its elements only when they are needed.
+
+You can see how lazy a `LazyList` is in the REPL:
+
+{% tabs lazylist-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val x = LazyList.range(1, Int.MaxValue)
+x.take(1) // LazyList()
+x.take(5) // LazyList()
+x.map(_ + 1) // LazyList()
+```
+{% endtab %}
+
+{% endtabs %}
+
+In all of those examples, nothing happens.
+Indeed, nothing will happen until you force it to happen, such as by calling its `foreach` method:
+
+{% tabs lazylist-evaluation-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> x.take(1).foreach(println)
+1
+```
+{% endtab %}
+
+{% endtabs %}
+
+For more information on the uses, benefits, and drawbacks of strict and non-strict (lazy) collections, see the “strict” and “non-strict” discussions on the [The Architecture of Scala 2.13’s Collections][strict] page.
+
+
+
+
+
+## Vector
+
+[Vector](https://www.scala-lang.org/api/current/scala/collection/immutable/Vector.html) is an indexed, immutable sequence.
+The “indexed” part of the description means that it provides random access and update in effectively constant time, so you can access `Vector` elements rapidly by their index value, such as accessing `listOfPeople(123_456_789)`.
+
+In general, except for the difference that (a) `Vector` is indexed and `List` is not, and (b) `List` has the `::` method, the two types work the same, so we’ll quickly run through the following examples.
+
+Here are a few ways you can create a `Vector`:
+
+{% tabs vector-creation %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val nums = Vector(1, 2, 3, 4, 5)
+
+val strings = Vector("one", "two")
+
+case class Person(name: String)
+val people = Vector(
+ Person("Bert"),
+ Person("Ernie"),
+ Person("Grover")
+)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Because `Vector` is immutable, you can’t add new elements to it.
+Instead, you create a new sequence by appending or prepending elements to an existing `Vector`.
+These examples show how to _append_ elements to a `Vector`:
+
+{% tabs vector-appending %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = Vector(1,2,3) // Vector(1, 2, 3)
+val b = a :+ 4 // Vector(1, 2, 3, 4)
+val c = a ++ Vector(4, 5) // Vector(1, 2, 3, 4, 5)
+```
+{% endtab %}
+
+{% endtabs %}
+
+This is how you _prepend_ elements:
+
+{% tabs vector-prepending %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = Vector(1,2,3) // Vector(1, 2, 3)
+val b = 0 +: a // Vector(0, 1, 2, 3)
+val c = Vector(-1, 0) ++: a // Vector(-1, 0, 1, 2, 3)
+```
+{% endtab %}
+
+{% endtabs %}
+
+In addition to fast random access and updates, `Vector` provides fast append and prepend times, so you can use these features as desired.
+
+> See the [Collections Performance Characteristics](https://docs.scala-lang.org/overviews/collections-2.13/performance-characteristics.html) for performance details about `Vector` and other collections.
+
+Finally, you use a `Vector` in a `for` loop just like a `List`, `ArrayBuffer`, or any other sequence:
+
+{% tabs vector-loop class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+scala> val names = Vector("Joel", "Chris", "Ed")
+val names: Vector[String] = Vector(Joel, Chris, Ed)
+
+scala> for (name <- names) println(name)
+Joel
+Chris
+Ed
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+scala> val names = Vector("Joel", "Chris", "Ed")
+val names: Vector[String] = Vector(Joel, Chris, Ed)
+
+scala> for name <- names do println(name)
+Joel
+Chris
+Ed
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+## ArrayBuffer
+
+Use `ArrayBuffer` when you need a general-purpose, mutable indexed sequence in your Scala applications.
+It’s mutable, so you can change its elements, and also resize it.
+Because it’s indexed, random access of elements is fast.
+
+### Creating an ArrayBuffer
+
+To use an `ArrayBuffer`, first import it:
+
+{% tabs arraybuffer-import %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+import scala.collection.mutable.ArrayBuffer
+```
+{% endtab %}
+
+{% endtabs %}
+
+If you need to start with an empty `ArrayBuffer`, just specify its type:
+
+{% tabs arraybuffer-creation %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+var strings = ArrayBuffer[String]()
+var ints = ArrayBuffer[Int]()
+var people = ArrayBuffer[Person]()
+```
+{% endtab %}
+
+{% endtabs %}
+
+If you know the approximate size your `ArrayBuffer` eventually needs to be, you can create it with an initial size:
+
+{% tabs list-creation-with-size %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+// ready to hold 100,000 ints
+val buf = new ArrayBuffer[Int](100_000)
+```
+{% endtab %}
+
+{% endtabs %}
+
+To create a new `ArrayBuffer` with initial elements, just specify its initial elements, just like a `List` or `Vector`:
+
+{% tabs arraybuffer-init %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val nums = ArrayBuffer(1, 2, 3)
+val people = ArrayBuffer(
+ Person("Bert"),
+ Person("Ernie"),
+ Person("Grover")
+)
+```
+{% endtab %}
+
+{% endtabs %}
+
+### Adding elements to an ArrayBuffer
+
+Append new elements to an `ArrayBuffer` with the `+=` and `++=` methods.
+Or if you prefer methods with textual names you can also use `append`, `appendAll`, `insert`, `insertAll`, `prepend`, and `prependAll`.
+
+Here are some examples of `+=` and `++=`:
+
+{% tabs arraybuffer-add %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val nums = ArrayBuffer(1, 2, 3) // ArrayBuffer(1, 2, 3)
+nums += 4 // ArrayBuffer(1, 2, 3, 4)
+nums ++= List(5, 6) // ArrayBuffer(1, 2, 3, 4, 5, 6)
+```
+{% endtab %}
+
+{% endtabs %}
+
+### Removing elements from an ArrayBuffer
+
+`ArrayBuffer` is mutable, so it has methods like `-=`, `--=`, `clear`, `remove`, and more.
+These examples demonstrate the `-=` and `--=` methods:
+
+{% tabs arraybuffer-remove %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = ArrayBuffer.range('a', 'h') // ArrayBuffer(a, b, c, d, e, f, g)
+a -= 'a' // ArrayBuffer(b, c, d, e, f, g)
+a --= Seq('b', 'c') // ArrayBuffer(d, e, f, g)
+a --= Set('d', 'e') // ArrayBuffer(f, g)
+```
+{% endtab %}
+
+{% endtabs %}
+
+### Updating ArrayBuffer elements
+
+Update elements in an `ArrayBuffer` by either reassigning the desired element, or use the `update` method:
+
+{% tabs arraybuffer-update %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = ArrayBuffer.range(1,5) // ArrayBuffer(1, 2, 3, 4)
+a(2) = 50 // ArrayBuffer(1, 2, 50, 4)
+a.update(0, 10) // ArrayBuffer(10, 2, 50, 4)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+
+## Maps
+
+A `Map` is an iterable collection that consists of pairs of keys and values.
+Scala has both mutable and immutable `Map` types, and this section demonstrates how to use the _immutable_ `Map`.
+
+### Creating an immutable Map
+
+Create an immutable `Map` like this:
+
+{% tabs map-init %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val states = Map(
+ "AK" -> "Alaska",
+ "AL" -> "Alabama",
+ "AZ" -> "Arizona"
+)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Once you have a `Map` you can traverse its elements in a `for` loop like this:
+
+{% tabs map-loop class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+for ((k, v) <- states) println(s"key: $k, value: $v")
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+for (k, v) <- states do println(s"key: $k, value: $v")
+```
+{% endtab %}
+
+{% endtabs %}
+
+The REPL shows how this works:
+
+{% tabs map-repl class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+scala> for ((k, v) <- states) println(s"key: $k, value: $v")
+key: AK, value: Alaska
+key: AL, value: Alabama
+key: AZ, value: Arizona
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+scala> for (k, v) <- states do println(s"key: $k, value: $v")
+key: AK, value: Alaska
+key: AL, value: Alabama
+key: AZ, value: Arizona
+```
+{% endtab %}
+
+{% endtabs %}
+
+### Accessing Map elements
+
+Access map elements by specifying the desired key value in parentheses:
+
+{% tabs map-access-element %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val ak = states("AK") // ak: String = Alaska
+val al = states("AL") // al: String = Alabama
+```
+{% endtab %}
+
+{% endtabs %}
+
+In practice, you’ll also use methods like `keys`, `keySet`, `keysIterator`, `for` loops, and higher-order functions like `map` to work with `Map` keys and values.
+
+### Adding elements to a Map
+
+Add elements to an immutable map using `+` and `++`, remembering to assign the result to a new variable:
+
+{% tabs map-add-element %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = Map(1 -> "one") // a: Map(1 -> one)
+val b = a + (2 -> "two") // b: Map(1 -> one, 2 -> two)
+val c = b ++ Seq(
+ 3 -> "three",
+ 4 -> "four"
+)
+// c: Map(1 -> one, 2 -> two, 3 -> three, 4 -> four)
+```
+{% endtab %}
+
+{% endtabs %}
+
+### Removing elements from a Map
+
+Remove elements from an immutable map using `-` or `--` and the key values to remove, remembering to assign the result to a new variable:
+
+{% tabs map-remove-element %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = Map(
+ 1 -> "one",
+ 2 -> "two",
+ 3 -> "three",
+ 4 -> "four"
+)
+
+val b = a - 4 // b: Map(1 -> one, 2 -> two, 3 -> three)
+val c = a - 4 - 3 // c: Map(1 -> one, 2 -> two)
+```
+{% endtab %}
+
+{% endtabs %}
+
+### Updating Map elements
+
+To update elements in an immutable map, use the `updated` method (or the `+` operator) while assigning the result to a new variable:
+
+{% tabs map-update-element %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = Map(
+ 1 -> "one",
+ 2 -> "two",
+ 3 -> "three"
+)
+
+val b = a.updated(3, "THREE!") // b: Map(1 -> one, 2 -> two, 3 -> THREE!)
+val c = a + (2 -> "TWO...") // c: Map(1 -> one, 2 -> TWO..., 3 -> three)
+```
+{% endtab %}
+
+{% endtabs %}
+
+### Traversing a Map
+
+As shown earlier, this is a common way to manually traverse elements in a map using a `for` loop:
+
+
+{% tabs map-traverse class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+val states = Map(
+ "AK" -> "Alaska",
+ "AL" -> "Alabama",
+ "AZ" -> "Arizona"
+)
+
+for ((k, v) <- states) println(s"key: $k, value: $v")
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+val states = Map(
+ "AK" -> "Alaska",
+ "AL" -> "Alabama",
+ "AZ" -> "Arizona"
+)
+
+for (k, v) <- states do println(s"key: $k, value: $v")
+```
+{% endtab %}
+
+{% endtabs %}
+
+That being said, there are _many_ ways to work with the keys and values in a map.
+Common `Map` methods include `foreach`, `map`, `keys`, and `values`.
+
+Scala has many more specialized `Map` types, including `CollisionProofHashMap`, `HashMap`, `LinkedHashMap`, `ListMap`, `SortedMap`, `TreeMap`, `WeakHashMap`, and more.
+
+
+
+## Working with Sets
+
+The Scala [Set]({{site.baseurl}}/overviews/collections-2.13/sets.html) is an iterable collection with no duplicate elements.
+
+Scala has both mutable and immutable `Set` types.
+This section demonstrates the _immutable_ `Set`.
+
+
+### Creating a Set
+
+Create new empty sets like this:
+
+{% tabs set-creation %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val nums = Set[Int]()
+val letters = Set[Char]()
+```
+{% endtab %}
+
+{% endtabs %}
+
+Create sets with initial data like this:
+
+{% tabs set-init %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val nums = Set(1, 2, 3, 3, 3) // Set(1, 2, 3)
+val letters = Set('a', 'b', 'c', 'c') // Set('a', 'b', 'c')
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+### Adding elements to a Set
+
+Add elements to an immutable `Set` using `+` and `++`, remembering to assign the result to a new variable:
+
+{% tabs set-add-element %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = Set(1, 2) // Set(1, 2)
+val b = a + 3 // Set(1, 2, 3)
+val c = b ++ Seq(4, 1, 5, 5) // HashSet(5, 1, 2, 3, 4)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Notice that when you attempt to add duplicate elements, they’re quietly dropped.
+
+Also notice that the order of iteration of the elements is arbitrary.
+
+
+### Deleting elements from a Set
+
+Remove elements from an immutable set using `-` and `--`, again assigning the result to a new variable:
+
+{% tabs set-remove-element %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = Set(1, 2, 3, 4, 5) // HashSet(5, 1, 2, 3, 4)
+val b = a - 5 // HashSet(1, 2, 3, 4)
+val c = b -- Seq(3, 4) // HashSet(1, 2)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+
+## Range
+
+The Scala `Range` is often used to populate data structures and to iterate over `for` loops.
+These REPL examples demonstrate how to create ranges:
+
+{% comment %}
+LATER: the dotty repl currently shows results differently
+{% endcomment %}
+
+{% tabs range-init %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+1 to 5 // Range(1, 2, 3, 4, 5)
+1 until 5 // Range(1, 2, 3, 4)
+1 to 10 by 2 // Range(1, 3, 5, 7, 9)
+'a' to 'c' // NumericRange(a, b, c)
+```
+{% endtab %}
+
+{% endtabs %}
+
+You can use ranges to populate collections:
+
+{% tabs range-conversion %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val x = (1 to 5).toList // List(1, 2, 3, 4, 5)
+val x = (1 to 5).toBuffer // ArrayBuffer(1, 2, 3, 4, 5)
+```
+{% endtab %}
+
+{% endtabs %}
+
+They’re also used in `for` loops:
+
+{% tabs range-iteration class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+scala> for (i <- 1 to 3) println(i)
+1
+2
+3
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+scala> for i <- 1 to 3 do println(i)
+1
+2
+3
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+There are also `range` methods on :
+
+{% tabs range-methods %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+Vector.range(1, 5) // Vector(1, 2, 3, 4)
+List.range(1, 10, 2) // List(1, 3, 5, 7, 9)
+Set.range(1, 10) // HashSet(5, 1, 6, 9, 2, 7, 3, 8, 4)
+```
+{% endtab %}
+
+{% endtabs %}
+
+When you’re running tests, ranges are also useful for generating test collections:
+
+{% tabs range-tests %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val evens = (0 to 10 by 2).toList // List(0, 2, 4, 6, 8, 10)
+val odds = (1 to 10 by 2).toList // List(1, 3, 5, 7, 9)
+val doubles = (1 to 5).map(_ * 2.0) // Vector(2.0, 4.0, 6.0, 8.0, 10.0)
+
+// create a Map
+val map = (1 to 3).map(e => (e,s"$e")).toMap
+ // map: Map[Int, String] = Map(1 -> "1", 2 -> "2", 3 -> "3")
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+## More details
+
+When you need more information about specialized collections, see the following resources:
+
+- [Concrete Immutable Collection Classes](https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html)
+- [Concrete Mutable Collection Classes](https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html)
+- [How are the collections structured? Which one should I choose?](https://docs.scala-lang.org/tutorials/FAQ/collections.html)
+
+
+
+[strict]: {% link _overviews/core/architecture-of-scala-213-collections.md %}
+[collections1]: /resources/images/tour/collections-diagram-213.svg
+[collections2]: /resources/images/tour/collections-immutable-diagram-213.svg
+[collections3]: /resources/images/tour/collections-mutable-diagram-213.svg
diff --git a/_overviews/scala3-book/collections-intro.md b/_overviews/scala3-book/collections-intro.md
new file mode 100644
index 0000000000..e953b95302
--- /dev/null
+++ b/_overviews/scala3-book/collections-intro.md
@@ -0,0 +1,25 @@
+---
+title: Scala Collections
+type: chapter
+description: This page provides and introduction to the common collections classes and their methods in Scala 3.
+languages: [ru, zh-cn]
+num: 38
+previous-page: packaging-imports
+next-page: collections-classes
+---
+
+This chapter introduces the most common Scala 3 collections and their accompanying methods.
+Scala comes with a wealth of collections types, but you can go a long way by starting with just a few of them, and later using the others as needed.
+Similarly, each type has dozens of methods to make your life easier, but you can achieve a lot by starting with just a handful of them.
+
+Therefore, this section introduces and demonstrates the most common collections types and methods that you’ll need to get started.
+
+
+{% comment %}
+LATER: Use more of the content from this page:
+ https://docs.scala-lang.org/overviews/index.html
+{% endcomment %}
+
+
+
+
diff --git a/_overviews/scala3-book/collections-methods.md b/_overviews/scala3-book/collections-methods.md
new file mode 100644
index 0000000000..6a56814b5c
--- /dev/null
+++ b/_overviews/scala3-book/collections-methods.md
@@ -0,0 +1,644 @@
+---
+title: Collections Methods
+type: section
+description: This page demonstrates the common methods on the Scala 3 collections classes.
+languages: [ru, zh-cn]
+num: 40
+previous-page: collections-classes
+next-page: collections-summary
+---
+
+
+
+A great strength of Scala collections is that they come with dozens of methods out of the box, and those methods are consistently available across the immutable and mutable collections types.
+The benefits of this are that you no longer need to write custom `for` loops every time you need to work with a collection, and when you move from one project to another, you’ll find these same methods used, rather than more custom `for` loops.
+
+There are *dozens* of methods available to you, so they aren’t all shown here.
+Instead, only some of the most commonly-used methods are shown, including:
+
+- `map`
+- `filter`
+- `foreach`
+- `head`
+- `tail`
+- `take`, `takeWhile`
+- `drop`, `dropWhile`
+- `reduce`
+
+The following methods work on all of the sequence types, including `List`, `Vector`, `ArrayBuffer`, etc., but these examples use a `List` unless otherwise specified.
+
+> As a very important note, none of the methods on `List` mutate the list.
+> They all work in a functional style, meaning that they return a new collection with the modified results.
+
+
+
+## Examples of common methods
+
+To give you an overview of what you’ll see in the following sections, these examples show some of the most commonly used collections methods.
+First, here are some methods that don’t use lambdas:
+
+{% tabs common-method-examples %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = List(10, 20, 30, 40, 10) // List(10, 20, 30, 40, 10)
+
+a.distinct // List(10, 20, 30, 40)
+a.drop(2) // List(30, 40, 10)
+a.dropRight(2) // List(10, 20, 30)
+a.head // 10
+a.headOption // Some(10)
+a.init // List(10, 20, 30, 40)
+a.intersect(List(19,20,21)) // List(20)
+a.last // 10
+a.lastOption // Some(10)
+a.slice(2,4) // List(30, 40)
+a.tail // List(20, 30, 40, 10)
+a.take(3) // List(10, 20, 30)
+a.takeRight(2) // List(40, 10)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+### Higher-order functions and lambdas
+
+Next, we’ll show some commonly used higher-order functions (HOFs) that accept lambdas (anonymous functions).
+To get started, here are several variations of the lambda syntax, starting with the longest form, working in steps towards the most concise form:
+
+{% tabs higher-order-functions-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+// these functions are all equivalent and return
+// the same data: List(10, 20, 10)
+
+a.filter((i: Int) => i < 25) // 1. most explicit form
+a.filter((i) => i < 25) // 2. `Int` is not required
+a.filter(i => i < 25) // 3. the parens are not required
+a.filter(_ < 25) // 4. `i` is not required
+```
+{% endtab %}
+
+{% endtabs %}
+
+In those numbered examples:
+
+1. The first example shows the longest form.
+ This much verbosity is _rarely_ required, and only needed in the most complex usages.
+2. The compiler knows that `a` contains `Int`, so it’s not necessary to restate that here.
+3. Parentheses aren’t needed when you have only one parameter, such as `i`.
+4. When you have a single parameter, and it appears only once in your anonymous function, you can replace the parameter with `_`.
+
+The [Anonymous Function][lambdas] provides more details and examples of the rules related to shortening lambda expressions.
+
+Now that you’ve seen the concise form, here are examples of other HOFs that use the short-form lambda syntax:
+
+{% tabs anonymous-functions-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+a.dropWhile(_ < 25) // List(30, 40, 10)
+a.filter(_ > 100) // List()
+a.filterNot(_ < 25) // List(30, 40)
+a.find(_ > 20) // Some(30)
+a.takeWhile(_ < 30) // List(10, 20)
+```
+{% endtab %}
+
+{% endtabs %}
+
+It’s important to note that HOFs also accept methods and functions as parameters---not just lambda expressions.
+Here are some examples of the `map` HOF that uses a method named `double`.
+Several variations of the lambda syntax are shown again:
+
+{% tabs method-as-parameter-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+def double(i: Int) = i * 2
+
+// these all return `List(20, 40, 60, 80, 20)`
+a.map(i => double(i))
+a.map(double(_))
+a.map(double)
+```
+{% endtab %}
+
+{% endtabs %}
+
+In the last example, when an anonymous function consists of one function call that takes a single argument, you don’t have to name the argument, so even `_` isn’t required.
+
+Finally, you can combine HOFs as desired to solve problems:
+
+{% tabs higher-order-functions-combination-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+// yields `List(100, 200)`
+a.filter(_ < 40)
+ .takeWhile(_ < 30)
+ .map(_ * 10)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+
+## Sample data
+
+The examples in the following sections use these lists:
+
+{% tabs sample-data %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val oneToTen = (1 to 10).toList
+val names = List("adam", "brandy", "chris", "david")
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+
+## `map`
+
+The `map` method steps through each element in the existing list, applying the function you supply to each element, one at a time;
+it then returns a new list with all of the modified elements.
+
+Here’s an example of the `map` method being applied to the `oneToTen` list:
+
+{% tabs map-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> val doubles = oneToTen.map(_ * 2)
+doubles: List[Int] = List(2, 4, 6, 8, 10, 12, 14, 16, 18, 20)
+```
+{% endtab %}
+
+{% endtabs %}
+
+You can also write anonymous functions using a long form, like this:
+
+{% tabs map-example-anonymous %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> val doubles = oneToTen.map(i => i * 2)
+doubles: List[Int] = List(2, 4, 6, 8, 10, 12, 14, 16, 18, 20)
+```
+{% endtab %}
+
+{% endtabs %}
+
+However, in this lesson we’ll always use the first, shorter form.
+
+Here are a few more examples of the `map` method being applied to the `oneToTen` and `names` lists:
+
+{% tabs few-more-examples %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> val capNames = names.map(_.capitalize)
+capNames: List[String] = List(Adam, Brandy, Chris, David)
+
+scala> val nameLengthsMap = names.map(s => (s, s.length)).toMap
+nameLengthsMap: Map[String, Int] = Map(adam -> 4, brandy -> 6, chris -> 5, david -> 5)
+
+scala> val isLessThanFive = oneToTen.map(_ < 5)
+isLessThanFive: List[Boolean] = List(true, true, true, true, false, false, false, false, false, false)
+```
+{% endtab %}
+
+{% endtabs %}
+
+As shown in the last two examples, it’s perfectly legal (and common) to use `map` to return a collection that has a different type than the original type.
+
+
+
+## `filter`
+
+The `filter` method creates a new list containing the element that satisfy the provided predicate.
+A predicate, or condition, is a function that returns a `Boolean` (`true` or `false`).
+Here are a few examples:
+
+{% tabs filter-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> val lessThanFive = oneToTen.filter(_ < 5)
+lessThanFive: List[Int] = List(1, 2, 3, 4)
+
+scala> val evens = oneToTen.filter(_ % 2 == 0)
+evens: List[Int] = List(2, 4, 6, 8, 10)
+
+scala> val shortNames = names.filter(_.length <= 4)
+shortNames: List[String] = List(adam)
+```
+{% endtab %}
+
+{% endtabs %}
+
+A great thing about the functional methods on collections is that you can chain them together to solve problems.
+For instance, this example shows how to chain `filter` and `map`:
+
+{% tabs filter-example-anonymous %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+oneToTen.filter(_ < 4).map(_ * 10)
+```
+{% endtab %}
+
+{% endtabs %}
+
+The REPL shows the result:
+
+{% tabs filter-example-anonymous-repl %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> oneToTen.filter(_ < 4).map(_ * 10)
+val res1: List[Int] = List(10, 20, 30)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+
+## `foreach`
+
+The `foreach` method is used to loop over all elements in a collection.
+Note that `foreach` is used for side-effects, such as printing information.
+Here’s an example with the `names` list:
+
+{% tabs foreach-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> names.foreach(println)
+adam
+brandy
+chris
+david
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+
+## `head`
+
+The `head` method comes from Lisp and other earlier functional programming languages.
+It’s used to access the first element (the head element) of a list:
+
+{% tabs head-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+oneToTen.head // 1
+names.head // adam
+```
+{% endtab %}
+
+{% endtabs %}
+
+Because a `String` can be seen as a sequence of characters, you can also treat it like a list.
+This is how `head` works on these strings:
+
+{% tabs string-head-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+"foo".head // 'f'
+"bar".head // 'b'
+```
+{% endtab %}
+
+{% endtabs %}
+
+`head` is a great method to work with, but as a word of caution it can also throw an exception when called on an empty collection:
+
+{% tabs head-error-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val emptyList = List[Int]() // emptyList: List[Int] = List()
+emptyList.head // java.util.NoSuchElementException: head of empty list
+```
+{% endtab %}
+
+{% endtabs %}
+
+Because of this you may want to use `headOption` instead of `head`, especially when programming in a functional style:
+
+{% tabs head-option-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+emptyList.headOption // None
+```
+{% endtab %}
+
+{% endtabs %}
+
+As shown, it doesn't throw an exception, it simply returns the type `Option` that has the value `None`.
+You can learn more about this programming style in the [Functional Programming][fp-intro] chapter.
+
+
+
+## `tail`
+
+The `tail` method also comes from Lisp, and it’s used to print every element in a list after the head element.
+A few examples demonstrate this:
+
+{% tabs tail-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+oneToTen.head // 1
+oneToTen.tail // List(2, 3, 4, 5, 6, 7, 8, 9, 10)
+
+names.head // adam
+names.tail // List(brandy, chris, david)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Just like `head`, `tail` also works on strings:
+
+{% tabs string-tail-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+"foo".tail // "oo"
+"bar".tail // "ar"
+```
+{% endtab %}
+
+{% endtabs %}
+
+`tail` throws a _java.lang.UnsupportedOperationException_ if the list is empty, so just like `head` and `headOption`, there’s also a `tailOption` method, which is preferred in functional programming.
+
+A list can also be matched, so you can write expressions like this:
+
+{% tabs tail-match-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val x :: xs = names
+```
+{% endtab %}
+
+{% endtabs %}
+
+Putting that code in the REPL shows that `x` is assigned to the head of the list, and `xs` is assigned to the tail:
+
+{% tabs tail-match-example-repl %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> val x :: xs = names
+val x: String = adam
+val xs: List[String] = List(brandy, chris, david)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Pattern matching like this is useful in many situations, such as writing a `sum` method using recursion:
+
+{% tabs tail-match-sum-example class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+def sum(list: List[Int]): Int = list match {
+ case Nil => 0
+ case x :: xs => x + sum(xs)
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+def sum(list: List[Int]): Int = list match
+ case Nil => 0
+ case x :: xs => x + sum(xs)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+
+## `take`, `takeRight`, `takeWhile`
+
+The `take`, `takeRight`, and `takeWhile` methods give you a nice way of “taking” the elements from a list that you want to use to create a new list.
+This is `take` and `takeRight`:
+
+{% tabs take-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+oneToTen.take(1) // List(1)
+oneToTen.take(2) // List(1, 2)
+
+oneToTen.takeRight(1) // List(10)
+oneToTen.takeRight(2) // List(9, 10)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Notice how these methods work with “edge” cases, where we ask for more elements than are in the sequence, or ask for zero elements:
+
+{% tabs take-edge-cases-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+oneToTen.take(Int.MaxValue) // List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+oneToTen.takeRight(Int.MaxValue) // List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+oneToTen.take(0) // List()
+oneToTen.takeRight(0) // List()
+```
+{% endtab %}
+
+{% endtabs %}
+
+And this is `takeWhile`, which works with a predicate function:
+
+{% tabs take-while-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+oneToTen.takeWhile(_ < 5) // List(1, 2, 3, 4)
+names.takeWhile(_.length < 5) // List(adam)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+## `drop`, `dropRight`, `dropWhile`
+
+`drop`, `dropRight`, and `dropWhile` are essentially the opposite of their “take” counterparts, dropping elements from a list.
+Here are some examples:
+
+{% tabs drop-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+oneToTen.drop(1) // List(2, 3, 4, 5, 6, 7, 8, 9, 10)
+oneToTen.drop(5) // List(6, 7, 8, 9, 10)
+
+oneToTen.dropRight(8) // List(1, 2)
+oneToTen.dropRight(7) // List(1, 2, 3)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Again notice how these methods work with edge cases:
+
+{% tabs drop-edge-cases-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+oneToTen.drop(Int.MaxValue) // List()
+oneToTen.dropRight(Int.MaxValue) // List()
+oneToTen.drop(0) // List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+oneToTen.dropRight(0) // List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+```
+{% endtab %}
+
+{% endtabs %}
+
+And this is `dropWhile`, which works with a predicate function:
+
+{% tabs drop-while-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+oneToTen.dropWhile(_ < 5) // List(5, 6, 7, 8, 9, 10)
+names.dropWhile(_ != "chris") // List(chris, david)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+
+## `reduce`
+
+When you hear the term, “map reduce,” the “reduce” part refers to methods like `reduce`.
+It takes a function (or anonymous function) and applies that function to successive elements in the list.
+
+The best way to explain `reduce` is to create a little helper method you can pass into it.
+For example, this is an `add` method that adds two integers together, and also provides us some nice debug output:
+
+{% tabs reduce-example class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+def add(x: Int, y: Int): Int = {
+ val theSum = x + y
+ println(s"received $x and $y, their sum is $theSum")
+ theSum
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+def add(x: Int, y: Int): Int =
+ val theSum = x + y
+ println(s"received $x and $y, their sum is $theSum")
+ theSum
+```
+{% endtab %}
+
+{% endtabs %}
+
+Given that method and this list:
+
+{% tabs reduce-example-init %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = List(1,2,3,4)
+```
+{% endtab %}
+
+{% endtabs %}
+
+this is what happens when you pass the `add` method into `reduce`:
+
+{% tabs reduce-example-evaluation %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> a.reduce(add)
+received 1 and 2, their sum is 3
+received 3 and 3, their sum is 6
+received 6 and 4, their sum is 10
+res0: Int = 10
+```
+{% endtab %}
+
+{% endtabs %}
+
+As that result shows, `reduce` uses `add` to reduce the list `a` into a single value, in this case, the sum of the integers in the list.
+
+Once you get used to `reduce`, you’ll write a “sum” algorithm like this:
+
+{% tabs reduce-example-sum %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> a.reduce(_ + _)
+res0: Int = 10
+```
+{% endtab %}
+
+{% endtabs %}
+
+Similarly, a “product” algorithm looks like this:
+
+{% tabs reduce-example-multiply %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> a.reduce(_ * _)
+res1: Int = 24
+```
+{% endtab %}
+
+{% endtabs %}
+
+> An important concept to know about `reduce` is that---as its name implies---it’s used to _reduce_ a collection down to a single value.
+
+
+
+## Even more
+
+There are literally dozens of additional methods on the Scala collections types that will keep you from ever needing to write another `for` loop. See [Mutable and Immutable Collections][mut-immut-colls] and [The Architecture of Scala Collections][architecture] for many more details on the Scala collections.
+
+> As a final note, if you’re using Java code in a Scala project, you can convert Java collections to Scala collections.
+> By doing this you can use those collections in `for` expressions, and can also take advantage of Scala’s functional collections methods.
+> See the [Interacting with Java][interacting] section for more details.
+
+
+
+[interacting]: {% link _overviews/scala3-book/interacting-with-java.md %}
+[lambdas]: {% link _overviews/scala3-book/fun-anonymous-functions.md %}
+[fp-intro]: {% link _overviews/scala3-book/fp-intro.md %}
+[mut-immut-colls]: {% link _overviews/collections-2.13/overview.md %}
+[architecture]: {% link _overviews/core/architecture-of-scala-213-collections.md %}
+
diff --git a/_overviews/scala3-book/collections-summary.md b/_overviews/scala3-book/collections-summary.md
new file mode 100644
index 0000000000..4a7aa1c385
--- /dev/null
+++ b/_overviews/scala3-book/collections-summary.md
@@ -0,0 +1,31 @@
+---
+title: Summary
+type: section
+description: This page provides a summary of the Collections chapter.
+languages: [ru, zh-cn]
+num: 41
+previous-page: collections-methods
+next-page: fp-intro
+---
+
+This chapter provides a summary of the common Scala 3 collections and their accompanying methods.
+As shown, Scala comes with a wealth of collections and methods.
+
+When you need to see more details about the collections types shown in this chapter, see their Scaladoc pages:
+
+- [List](https://www.scala-lang.org/api/current/scala/collection/immutable/List.html)
+- [Vector](https://www.scala-lang.org/api/current/scala/collection/immutable/Vector.html)
+- [ArrayBuffer](https://www.scala-lang.org/api/current/scala/collection/mutable/ArrayBuffer.html)
+- [Range](https://www.scala-lang.org/api/current/scala/collection/immutable/Range.html)
+
+Also mentioned are the immutable `Map` and `Set`:
+
+- [Map](https://www.scala-lang.org/api/current/scala/collection/immutable/Map.html)
+- [Set](https://www.scala-lang.org/api/current/scala/collection/immutable/Set.html)
+
+and the mutable `Map` and `Set`:
+
+- [Map](https://www.scala-lang.org/api/current/scala/collection/mutable/Map.html)
+- [Set](https://www.scala-lang.org/api/current/scala/collection/mutable/Set.html)
+
+
diff --git a/_overviews/scala3-book/concurrency.md b/_overviews/scala3-book/concurrency.md
new file mode 100644
index 0000000000..4364239bd8
--- /dev/null
+++ b/_overviews/scala3-book/concurrency.md
@@ -0,0 +1,325 @@
+---
+title: Concurrency
+type: chapter
+description: This page discusses how Scala concurrency works, with an emphasis on Scala Futures.
+languages: [ru, zh-cn]
+num: 69
+previous-page: ca-summary
+next-page: scala-tools
+---
+
+
+When you want to write parallel and concurrent applications in Scala, you _can_ use the native Java `Thread`---but the Scala [Future](https://www.scala-lang.org/api/current/scala/concurrent/Future$.html) offers a more high level and idiomatic approach, so it’s preferred, and covered in this chapter.
+
+
+
+## Introduction
+
+Here’s a description of the Scala `Future` from its Scaladoc:
+
+> “A `Future` represents a value which may or may not _currently_ be available, but will be available at some point, or an exception if that value could not be made available.”
+
+To demonstrate what that means, let’s first look at single-threaded programming.
+In the single-threaded world you bind the result of a method call to a variable like this:
+
+```scala
+def aShortRunningTask(): Int = 42
+val x = aShortRunningTask()
+```
+
+In this code, the value `42` is immediately bound to `x`.
+
+When you’re working with a `Future`, the assignment process looks similar:
+
+```scala
+def aLongRunningTask(): Future[Int] = ???
+val x = aLongRunningTask()
+```
+
+But the main difference in this case is that because `aLongRunningTask` takes an indeterminate amount of time to return, the value in `x` may or may not be _currently_ available, but it will be available at some point---in the future.
+
+Another way to look at this is in terms of blocking.
+In this single-threaded example, the `println` statement isn’t printed until `aShortRunningTask` completes:
+
+```scala
+def aShortRunningTask(): Int =
+ Thread.sleep(500)
+ 42
+val x = aShortRunningTask()
+println("Here")
+```
+
+Conversely, if `aShortRunningTask` is created as a `Future`, the `println` statement is printed almost immediately because `aShortRunningTask` is spawned off on some other thread---it doesn't block.
+
+In this chapter you’ll see how to use futures, including how to run multiple futures in parallel and combine their results in a `for` expression.
+You’ll also see examples of methods that are used to handle the value in a future once it returns.
+
+> When you think about futures, it’s important to know that they’re intended as a one-shot, “Handle this relatively slow computation on some other thread, and call me back with a result when you’re done” construct.
+> As a point of contrast, [Akka](https://akka.io) actors are intended to run for a long time and respond to many requests during their lifetime.
+> While an actor may live forever, a future eventually contains the result
+> of a computation that ran only once.
+
+
+
+## An example in the REPL
+
+A future is used to create a temporary pocket of concurrency.
+For instance, you use a future when you need to call an algorithm that runs an indeterminate amount of time---such as calling a remote microservice---so you want to run it off of the main thread.
+
+To demonstrate how this works, let’s start with a `Future` example in the REPL.
+First, paste in these required `import` statements:
+
+```scala
+import scala.concurrent.Future
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.util.{Failure, Success}
+```
+
+Now you’re ready to create a future.
+For this example, first define a long-running, single-threaded algorithm:
+
+```scala
+def longRunningAlgorithm() =
+ Thread.sleep(10_000)
+ 42
+```
+
+That fancy algorithm returns the integer value `42` after a ten-second delay.
+Now call that algorithm by wrapping it into the `Future` constructor, and assigning the result to a variable:
+
+```scala
+scala> val eventualInt = Future(longRunningAlgorithm())
+eventualInt: scala.concurrent.Future[Int] = Future()
+```
+
+Right away, your computation---the call to `longRunningAlgorithm()`---begins running.
+If you immediately check the value of the variable `eventualInt`, you see that the future hasn't been completed yet:
+
+```scala
+scala> eventualInt
+val res1: scala.concurrent.Future[Int] = Future()
+```
+
+But if you check again after ten seconds, you’ll see that it is completed successfully:
+
+```scala
+scala> eventualInt
+val res2: scala.concurrent.Future[Int] = Future(Success(42))
+```
+
+While that’s a relatively simple example, it shows the basic approach: Just construct a new `Future` with your long-running algorithm.
+
+One thing to notice is that the `42` you expected is wrapped in a `Success`, which is further wrapped in a `Future`.
+This is a key concept to understand: the value in a `Future` is always an instance of one of the `scala.util.Try` types: `Success` or `Failure`.
+Therefore, when you work with the result of a future, you use the usual `Try`-handling techniques.
+
+
+### Using `map` with futures
+
+`Future` has a `map` method, which you use just like the `map` method on collections.
+This is what the result looks like when you call `map` right after creating the variable `a`:
+
+```scala
+scala> val a = Future(longRunningAlgorithm()).map(_ * 2)
+a: scala.concurrent.Future[Int] = Future()
+```
+
+As shown, for the future that was created with the `longRunningAlgorithm`, the initial output shows `Future()`.
+But when you check `a`’s value after ten seconds you’ll see that it contains the expected result of `84`:
+
+```scala
+scala> a
+res1: scala.concurrent.Future[Int] = Future(Success(84))
+```
+
+Once again, the successful result is wrapped inside a `Success` and a `Future`.
+
+
+### Using callback methods with futures
+
+In addition to higher-order functions like `map`, you can also use callback methods with futures.
+One commonly used callback method is `onComplete`, which takes a *partial function* in which you handle the `Success` and `Failure` cases:
+
+```scala
+Future(longRunningAlgorithm()).onComplete {
+ case Success(value) => println(s"Got the callback, value = $value")
+ case Failure(e) => e.printStackTrace
+}
+```
+
+When you paste that code in the REPL you’ll eventually see the result:
+
+```scala
+Got the callback, value = 42
+```
+
+
+
+## Other Future methods
+
+The `Future` class has other methods you can use.
+It has some methods that you find on Scala collections classes, including:
+
+- `filter`
+- `flatMap`
+- `map`
+
+Its callback methods are:
+
+- `onComplete`
+- `andThen`
+- `foreach`
+
+Other transformation methods include:
+
+- `fallbackTo`
+- `recover`
+- `recoverWith`
+
+See the [Futures and Promises][futures] page for a discussion of additional methods available to futures.
+
+
+
+## Running multiple futures and joining their results
+
+To run multiple computations in parallel and join their results when all of the futures have been completed, use a `for` expression.
+
+The correct approach is:
+
+1. Start the computations that return `Future` results
+2. Merge their results in a `for` expression
+3. Extract the merged result using `onComplete` or a similar technique
+
+
+### An example
+
+The three steps of the correct approach are shown in the following example.
+A key is that you first start the computations that return futures, and then join them in the `for` expression:
+
+```scala
+import scala.concurrent.Future
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.util.{Failure, Success}
+
+val startTime = System.currentTimeMillis()
+def delta() = System.currentTimeMillis() - startTime
+def sleep(millis: Long) = Thread.sleep(millis)
+
+@main def multipleFutures1 =
+
+ println(s"creating the futures: ${delta()}")
+
+ // (1) start the computations that return futures
+ val f1 = Future { sleep(800); 1 } // eventually returns 1
+ val f2 = Future { sleep(200); 2 } // eventually returns 2
+ val f3 = Future { sleep(400); 3 } // eventually returns 3
+
+ // (2) join the futures in a `for` expression
+ val result =
+ for
+ r1 <- f1
+ r2 <- f2
+ r3 <- f3
+ yield
+ println(s"in the 'yield': ${delta()}")
+ (r1 + r2 + r3)
+
+ // (3) process the result
+ result.onComplete {
+ case Success(x) =>
+ println(s"in the Success case: ${delta()}")
+ println(s"result = $x")
+ case Failure(e) =>
+ e.printStackTrace
+ }
+
+ println(s"before the 'sleep(3000)': ${delta()}")
+
+ // important for a little parallel demo: keep the jvm alive
+ sleep(3000)
+```
+
+When you run that application, you see output that looks like this:
+
+````
+creating the futures: 1
+before the 'sleep(3000)': 2
+in the 'yield': 806
+in the Success case: 806
+result = 6
+````
+
+As that output shows, the futures are created very rapidly, and in just two milliseconds the print statement right before the `sleep(3000)` statement at the end of the method is reached.
+All of that code is run on the JVM’s main thread.
+Then, at 806 ms, the three futures complete and the code in the `yield` block is run.
+Then the code immediately goes to the `Success` case in the `onComplete` method.
+
+The 806 ms output is a key to seeing that the three computations are run in parallel.
+If they were run sequentially, the total time would be about 1,400 ms---the sum of the sleep times of the three computations.
+But because they’re run in parallel, the total time is just slightly longer than the longest-running computation: `f1`, which is 800 ms.
+
+> Notice that if the computations were run within the `for` expression, they
+> would be executed sequentially, not in parallel:
+> ~~~
+> // Sequential execution (no parallelism!)
+> for
+> r1 <- Future { sleep(800); 1 }
+> r2 <- Future { sleep(200); 2 }
+> r3 <- Future { sleep(400); 3 }
+> yield
+> r1 + r2 + r3
+> ~~~
+> So, if you want the computations to be possibly run in parallel, remember
+> to run them outside the `for` expression.
+
+### A method that returns a future
+
+So far you’ve seen how to pass a single-threaded algorithm into a `Future` constructor.
+You can use the same technique to create a method that returns a `Future`:
+
+```scala
+// simulate a slow-running method
+def slowlyDouble(x: Int, delay: Long): Future[Int] = Future {
+ sleep(delay)
+ x * 2
+}
+```
+
+As with the previous examples, just assign the result of the method call to a new variable.
+Then when you check the result right away you’ll see that it’s not completed, but after the delay time the future will have a result:
+
+````
+scala> val f = slowlyDouble(2, 5_000L)
+val f: concurrent.Future[Int] = Future()
+
+scala> f
+val res0: concurrent.Future[Int] = Future()
+
+scala> f
+val res1: concurrent.Future[Int] = Future(Success(4))
+````
+
+
+
+## Key points about futures
+
+Hopefully those examples give you an idea of how Scala futures work.
+To summarize, a few key points about futures are:
+
+- You construct futures to run tasks off of the main thread
+- Futures are intended for one-shot, potentially long-running concurrent tasks that *eventually* return a value; they create a temporary pocket of concurrency
+- A future starts running as soon as you construct it
+- A benefit of futures over threads is that they work with `for` expressions, and come with a variety of callback methods that simplify the process of working with concurrent threads
+- When you work with futures you don’t have to concern yourself with the low-level details of thread management
+- You handle the result of a future with callback methods like `onComplete` and `andThen`, or transformation methods like `filter`, `map`, etc.
+- The value inside a `Future` is always an instance of one of the `Try` types: `Success` or `Failure`
+- If you’re using multiple futures to yield a single result, combine them in a `for` expression
+
+Also, as you saw with the `import` statements in these examples, the Scala `Future` depends on an `ExecutionContext`.
+
+For more details about futures, see [Futures and Promises][futures], an article that discusses futures, promises, and execution contexts.
+It also provides a discussion of how a `for` expression is translated into a `flatMap` operation.
+
+
+
+[futures]: {% link _overviews/core/futures.md %}
diff --git a/_overviews/scala3-book/control-structures.md b/_overviews/scala3-book/control-structures.md
new file mode 100644
index 0000000000..9d44db59cb
--- /dev/null
+++ b/_overviews/scala3-book/control-structures.md
@@ -0,0 +1,1097 @@
+---
+title: Control Structures
+type: chapter
+description: This page provides an introduction to Scala's control structures, including if/then/else, 'for' loops, 'for' expressions, 'match' expressions, try/catch/finally, and 'while' loops.
+languages: [ru, zh-cn]
+num: 19
+previous-page: string-interpolation
+next-page: domain-modeling-intro
+---
+
+
+Scala has the control structures you expect to find in a programming language, including:
+
+- `if`/`then`/`else`
+- `for` loops
+- `while` loops
+- `try`/`catch`/`finally`
+
+It also has two other powerful constructs that you may not have seen before, depending on your programming background:
+
+- `for` expressions (also known as _`for` comprehensions_)
+- `match` expressions
+
+These are all demonstrated in the following sections.
+
+## The if/then/else construct
+
+A one-line Scala `if` statement looks like this:
+
+{% tabs control-structures-1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-1 %}
+```scala
+if (x == 1) println(x)
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-1 %}
+```scala
+if x == 1 then println(x)
+```
+{% endtab %}
+{% endtabs %}
+
+When you need to run multiple lines of code after an `if` equality comparison, use this syntax:
+
+{% tabs control-structures-2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-2 %}
+```scala
+if (x == 1) {
+ println("x is 1, as you can see:")
+ println(x)
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-2 %}
+```scala
+if x == 1 then
+ println("x is 1, as you can see:")
+ println(x)
+```
+{% endtab %}
+{% endtabs %}
+
+The `if`/`else` syntax looks like this:
+
+{% tabs control-structures-3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-3 %}
+```scala
+if (x == 1) {
+ println("x is 1, as you can see:")
+ println(x)
+} else {
+ println("x was not 1")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-3 %}
+```scala
+if x == 1 then
+ println("x is 1, as you can see:")
+ println(x)
+else
+ println("x was not 1")
+```
+{% endtab %}
+{% endtabs %}
+
+And this is the `if`/`else if`/`else` syntax:
+
+{% tabs control-structures-4 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-4 %}
+```scala
+if (x < 0)
+ println("negative")
+else if (x == 0)
+ println("zero")
+else
+ println("positive")
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-4 %}
+```scala
+if x < 0 then
+ println("negative")
+else if x == 0 then
+ println("zero")
+else
+ println("positive")
+```
+{% endtab %}
+{% endtabs %}
+
+### `end if` statement
+
+
+ This is new in Scala 3, and not supported in Scala 2.
+
+
+You can optionally include an `end if` statement at the end of each expression, if you prefer:
+
+{% tabs control-structures-5 %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+if x == 1 then
+ println("x is 1, as you can see:")
+ println(x)
+end if
+```
+
+{% endtab %}
+{% endtabs %}
+
+### `if`/`else` expressions always return a result
+
+Note that `if`/`else` comparisons form _expressions_, meaning that they return a value which you can assign to a variable.
+Because of this, there’s no need for a special ternary operator:
+
+{% tabs control-structures-6 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-6 %}
+```scala
+val minValue = if (a < b) a else b
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-6 %}
+```scala
+val minValue = if a < b then a else b
+```
+{% endtab %}
+{% endtabs %}
+
+Because they return a value, you can use `if`/`else` expressions as the body of a method:
+
+{% tabs control-structures-7 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-7 %}
+```scala
+def compare(a: Int, b: Int): Int =
+ if (a < b)
+ -1
+ else if (a == b)
+ 0
+ else
+ 1
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-7 %}
+```scala
+def compare(a: Int, b: Int): Int =
+ if a < b then
+ -1
+ else if a == b then
+ 0
+ else
+ 1
+```
+{% endtab %}
+{% endtabs %}
+
+### Aside: Expression-oriented programming
+
+As a brief note about programming in general, when every expression you write returns a value, that style is referred to as _expression-oriented programming_, or EOP.
+For example, this is an _expression_:
+
+{% tabs control-structures-8 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-8 %}
+```scala
+val minValue = if (a < b) a else b
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-8 %}
+```scala
+val minValue = if a < b then a else b
+```
+{% endtab %}
+{% endtabs %}
+
+Conversely, lines of code that don’t return values are called _statements_, and they’re used for their _side-effects_.
+For example, these lines of code don’t return values, so they’re used for their side effects:
+
+{% tabs control-structures-9 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-9 %}
+```scala
+if (a == b) action()
+println("Hello")
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-9 %}
+```scala
+if a == b then action()
+println("Hello")
+```
+{% endtab %}
+{% endtabs %}
+
+The first example runs the `action` method as a side effect when `a` is equal to `b`.
+The second example is used for the side effect of printing a string to STDOUT.
+As you learn more about Scala you’ll find yourself writing more _expressions_ and fewer _statements_.
+
+## `for` loops
+
+In its most simple use, a Scala `for` loop can be used to iterate over the elements in a collection.
+For example, given a sequence of integers, you can loop over its elements and print their values like this:
+
+{% tabs control-structures-10 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-10 %}
+```scala
+val ints = Seq(1, 2, 3)
+for (i <- ints) println(i)
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-10 %}
+```scala
+val ints = Seq(1, 2, 3)
+for i <- ints do println(i)
+```
+{% endtab %}
+{% endtabs %}
+
+
+The code `i <- ints` is referred to as a _generator_. In any generator `p <- e`, the expression `e` can generate zero or many bindings to the pattern `p`.
+
+This is what the result looks like in the Scala REPL:
+
+{% tabs control-structures-11 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-11 %}
+````
+scala> val ints = Seq(1,2,3)
+ints: Seq[Int] = List(1, 2, 3)
+
+scala> for (i <- ints) println(i)
+1
+2
+3
+````
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-11 %}
+````
+scala> val ints = Seq(1,2,3)
+ints: Seq[Int] = List(1, 2, 3)
+
+scala> for i <- ints do println(i)
+1
+2
+3
+````
+{% endtab %}
+{% endtabs %}
+
+
+When you need a multiline block of code following the `for` generator, use the following syntax:
+
+{% tabs control-structures-12 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-12 %}
+```scala
+for (i <- ints) {
+ val x = i * 2
+ println(s"i = $i, x = $x")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-12 %}
+```scala
+for i <- ints
+do
+ val x = i * 2
+ println(s"i = $i, x = $x")
+```
+{% endtab %}
+{% endtabs %}
+
+
+### Multiple generators
+
+`for` loops can have multiple generators, as shown in this example:
+
+{% tabs control-structures-13 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-13 %}
+```scala
+for {
+ i <- 1 to 2
+ j <- 'a' to 'b'
+ k <- 1 to 10 by 5
+} {
+ println(s"i = $i, j = $j, k = $k")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-13 %}
+```scala
+for
+ i <- 1 to 2
+ j <- 'a' to 'b'
+ k <- 1 to 10 by 5
+do
+ println(s"i = $i, j = $j, k = $k")
+```
+{% endtab %}
+{% endtabs %}
+
+
+That expression prints this output:
+
+````
+i = 1, j = a, k = 1
+i = 1, j = a, k = 6
+i = 1, j = b, k = 1
+i = 1, j = b, k = 6
+i = 2, j = a, k = 1
+i = 2, j = a, k = 6
+i = 2, j = b, k = 1
+i = 2, j = b, k = 6
+````
+
+### Guards
+
+`for` loops can also contain `if` statements, which are known as _guards_:
+
+{% tabs control-structures-14 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-14 %}
+```scala
+for {
+ i <- 1 to 5
+ if i % 2 == 0
+} {
+ println(i)
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-14 %}
+```scala
+for
+ i <- 1 to 5
+ if i % 2 == 0
+do
+ println(i)
+```
+{% endtab %}
+{% endtabs %}
+
+
+The output of that loop is:
+
+````
+2
+4
+````
+
+A `for` loop can have as many guards as needed.
+This example shows one way to print the number `4`:
+
+{% tabs control-structures-15 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-15 %}
+```scala
+for {
+ i <- 1 to 10
+ if i > 3
+ if i < 6
+ if i % 2 == 0
+} {
+ println(i)
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-15 %}
+```scala
+for
+ i <- 1 to 10
+ if i > 3
+ if i < 6
+ if i % 2 == 0
+do
+ println(i)
+```
+{% endtab %}
+{% endtabs %}
+
+### Using `for` with Maps
+
+You can also use `for` loops with a `Map`.
+For example, given this `Map` of state abbreviations and their full names:
+
+{% tabs map %}
+{% tab 'Scala 2 and 3' for=map %}
+```scala
+val states = Map(
+ "AK" -> "Alaska",
+ "AL" -> "Alabama",
+ "AR" -> "Arizona"
+)
+```
+{% endtab %}
+{% endtabs %}
+
+You can print the keys and values using `for`, like this:
+
+{% tabs control-structures-16 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-16 %}
+```scala
+for ((abbrev, fullName) <- states) println(s"$abbrev: $fullName")
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-16 %}
+```scala
+for (abbrev, fullName) <- states do println(s"$abbrev: $fullName")
+```
+{% endtab %}
+{% endtabs %}
+
+Here’s what that looks like in the REPL:
+
+{% tabs control-structures-17 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-17 %}
+```scala
+scala> for ((abbrev, fullName) <- states) println(s"$abbrev: $fullName")
+AK: Alaska
+AL: Alabama
+AR: Arizona
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-17 %}
+```scala
+scala> for (abbrev, fullName) <- states do println(s"$abbrev: $fullName")
+AK: Alaska
+AL: Alabama
+AR: Arizona
+```
+{% endtab %}
+{% endtabs %}
+
+As the `for` loop iterates over the map, each key/value pair is bound to the variables `abbrev` and `fullName`, which are in a tuple:
+
+```scala
+(abbrev, fullName) <- states
+```
+
+As the loop runs, the variable `abbrev` is assigned to the current _key_ in the map, and the variable `fullName` is assigned to the current map _value_.
+
+## `for` expressions
+
+In the previous `for` loop examples, those loops were all used for _side effects_, specifically to print those values to STDOUT using `println`.
+
+It’s important to know that you can also create `for` _expressions_ that return values.
+You create a `for` expression by adding the `yield` keyword and an expression to return, like this:
+
+{% tabs control-structures-18 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-18 %}
+```scala
+val list =
+ for (i <- 10 to 12)
+ yield i * 2
+
+// list: IndexedSeq[Int] = Vector(20, 22, 24)
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-18 %}
+```scala
+val list =
+ for i <- 10 to 12
+ yield i * 2
+
+// list: IndexedSeq[Int] = Vector(20, 22, 24)
+```
+{% endtab %}
+{% endtabs %}
+
+
+After that `for` expression runs, the variable `list` is a `Vector` that contains the values shown.
+This is how the expression works:
+
+1. The `for` expression starts to iterate over the values in the range `(10, 11, 12)`.
+ It first works on the value `10`, multiplies it by `2`, then _yields_ that result, the value `20`.
+2. Next, it works on the `11`---the second value in the range.
+ It multiplies it by `2`, then yields the value `22`.
+ You can think of these yielded values as accumulating in a temporary holding place.
+3. Finally, the loop gets the number `12` from the range, multiplies it by `2`, yielding the number `24`.
+ The loop completes at this point and yields the final result, the `Vector(20, 22, 24)`.
+
+{% comment %}
+NOTE: This is a place where it would be great to have a TIP or NOTE block:
+{% endcomment %}
+
+While the intent of this section is to demonstrate `for` expressions, it can help to know that the `for` expression shown is equivalent to this `map` method call:
+
+{% tabs map-call %}
+{% tab 'Scala 2 and 3' for=map-call %}
+```scala
+val list = (10 to 12).map(i => i * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+`for` expressions can be used any time you need to traverse all the elements in a collection and apply an algorithm to those elements to create a new list.
+
+Here’s an example that shows how to use a block of code after the `yield`:
+
+{% tabs control-structures-19 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-19 %}
+```scala
+val names = List("_olivia", "_walter", "_peter")
+
+val capNames = for (name <- names) yield {
+ val nameWithoutUnderscore = name.drop(1)
+ val capName = nameWithoutUnderscore.capitalize
+ capName
+}
+
+// capNames: List[String] = List(Olivia, Walter, Peter)
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-19 %}
+```scala
+val names = List("_olivia", "_walter", "_peter")
+
+val capNames = for name <- names yield
+ val nameWithoutUnderscore = name.drop(1)
+ val capName = nameWithoutUnderscore.capitalize
+ capName
+
+// capNames: List[String] = List(Olivia, Walter, Peter)
+```
+{% endtab %}
+{% endtabs %}
+
+### Using a `for` expression as the body of a method
+
+Because a `for` expression yields a result, it can be used as the body of a method that returns a useful value.
+This method returns all the values in a given list of integers that are between `3` and `10`:
+
+{% tabs control-structures-20 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-20 %}
+```scala
+def between3and10(xs: List[Int]): List[Int] =
+ for {
+ x <- xs
+ if x >= 3
+ if x <= 10
+ } yield x
+
+between3and10(List(1, 3, 7, 11)) // : List[Int] = List(3, 7)
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-20 %}
+```scala
+def between3and10(xs: List[Int]): List[Int] =
+ for
+ x <- xs
+ if x >= 3
+ if x <= 10
+ yield x
+
+between3and10(List(1, 3, 7, 11)) // : List[Int] = List(3, 7)
+```
+{% endtab %}
+{% endtabs %}
+
+## `while` loops
+
+Scala `while` loop syntax looks like this:
+
+{% tabs control-structures-21 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-21 %}
+```scala
+var i = 0
+
+while (i < 3) {
+ println(i)
+ i += 1
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-21 %}
+```scala
+var i = 0
+
+while i < 3 do
+ println(i)
+ i += 1
+```
+{% endtab %}
+{% endtabs %}
+
+## `match` expressions
+
+Pattern matching is a major feature of functional programming languages, and Scala includes a `match` expression that has many capabilities.
+
+In the most simple case you can use a `match` expression like a Java `switch` statement, matching cases based on an integer value.
+Notice that this really is an expression, as it evaluates to a result:
+
+{% tabs control-structures-22 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-22 %}
+```scala
+// `i` is an integer
+val day = i match {
+ case 0 => "Sunday"
+ case 1 => "Monday"
+ case 2 => "Tuesday"
+ case 3 => "Wednesday"
+ case 4 => "Thursday"
+ case 5 => "Friday"
+ case 6 => "Saturday"
+ case _ => "invalid day" // the default, catch-all
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-22 %}
+```scala
+// `i` is an integer
+val day = i match
+ case 0 => "Sunday"
+ case 1 => "Monday"
+ case 2 => "Tuesday"
+ case 3 => "Wednesday"
+ case 4 => "Thursday"
+ case 5 => "Friday"
+ case 6 => "Saturday"
+ case _ => "invalid day" // the default, catch-all
+```
+{% endtab %}
+{% endtabs %}
+
+In this example, the variable `i` is tested against the cases shown.
+If it’s between `0` and `6`, `day` is bound to the string that represents that day of the week.
+Otherwise, it matches the catch-all case represented by the character, `_`, and `day` is bound to the string, `"invalid day"`.
+
+Since the cases are considered in the order they are written, and the first matching case is used, the default case, which matches any value, must come last. Any cases after the catch-all will be warned as unreachable cases.
+
+> When writing simple `match` expressions like this, it’s recommended to use the `@switch` annotation on the variable `i`.
+> This annotation provides a compile-time warning if the switch can’t be compiled to a `tableswitch` or `lookupswitch`, which are better for performance.
+
+### Using the default value
+
+When you need to access the catch-all, default value in a `match` expression, just provide a variable name on the left side of the `case` statement instead of `_`, and then use that variable name on the right side of the statement as needed:
+
+{% tabs control-structures-23 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-23 %}
+```scala
+i match {
+ case 0 => println("1")
+ case 1 => println("2")
+ case what => println(s"You gave me: $what")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-23 %}
+```scala
+i match
+ case 0 => println("1")
+ case 1 => println("2")
+ case what => println(s"You gave me: $what")
+```
+{% endtab %}
+{% endtabs %}
+
+The name used in the pattern must begin with a lowercase letter.
+A name beginning with an uppercase letter does not introduce a variable, but matches a value in scope:
+
+{% tabs control-structures-24 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-24 %}
+```scala
+val N = 42
+i match {
+ case 0 => println("1")
+ case 1 => println("2")
+ case N => println("42")
+ case n => println(s"You gave me: $n" )
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-24 %}
+```scala
+val N = 42
+i match
+ case 0 => println("1")
+ case 1 => println("2")
+ case N => println("42")
+ case n => println(s"You gave me: $n" )
+```
+{% endtab %}
+{% endtabs %}
+
+If `i` is equal to `42`, then `case N` will match, and it will print the string `"42"`. It won't reach the default case.
+
+### Handling multiple possible matches on one line
+
+As mentioned, `match` expressions have many capabilities.
+This example shows how to use multiple possible pattern matches in each `case` statement:
+
+{% tabs control-structures-25 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-25 %}
+```scala
+val evenOrOdd = i match {
+ case 1 | 3 | 5 | 7 | 9 => println("odd")
+ case 2 | 4 | 6 | 8 | 10 => println("even")
+ case _ => println("some other number")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-25 %}
+```scala
+val evenOrOdd = i match
+ case 1 | 3 | 5 | 7 | 9 => println("odd")
+ case 2 | 4 | 6 | 8 | 10 => println("even")
+ case _ => println("some other number")
+```
+{% endtab %}
+{% endtabs %}
+
+### Using `if` guards in `case` clauses
+
+You can also use guards in the `case`s of a match expression.
+In this example the second and third `case` both use guards to match multiple integer values:
+
+{% tabs control-structures-26 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-26 %}
+```scala
+i match {
+ case 1 => println("one, a lonely number")
+ case x if x == 2 || x == 3 => println("two’s company, three’s a crowd")
+ case x if x > 3 => println("4+, that’s a party")
+ case _ => println("i’m guessing your number is zero or less")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-26 %}
+```scala
+i match
+ case 1 => println("one, a lonely number")
+ case x if x == 2 || x == 3 => println("two’s company, three’s a crowd")
+ case x if x > 3 => println("4+, that’s a party")
+ case _ => println("i’m guessing your number is zero or less")
+```
+{% endtab %}
+{% endtabs %}
+
+Here’s another example, which shows how to match a given value against ranges of numbers:
+
+{% tabs control-structures-27 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-27 %}
+```scala
+i match {
+ case a if 0 to 9 contains a => println(s"0-9 range: $a")
+ case b if 10 to 19 contains b => println(s"10-19 range: $b")
+ case c if 20 to 29 contains c => println(s"20-29 range: $c")
+ case _ => println("Hmmm...")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-27 %}
+```scala
+i match
+ case a if 0 to 9 contains a => println(s"0-9 range: $a")
+ case b if 10 to 19 contains b => println(s"10-19 range: $b")
+ case c if 20 to 29 contains c => println(s"20-29 range: $c")
+ case _ => println("Hmmm...")
+```
+{% endtab %}
+{% endtabs %}
+
+#### Case classes and match expressions
+
+You can also extract fields from `case` classes---and classes that have properly written `apply`/`unapply` methods---and use those in your guard conditions.
+Here’s an example using a simple `Person` case class:
+
+{% tabs control-structures-28 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-28 %}
+```scala
+case class Person(name: String)
+
+def speak(p: Person) = p match {
+ case Person(name) if name == "Fred" => println(s"$name says, Yubba dubba doo")
+ case Person(name) if name == "Bam Bam" => println(s"$name says, Bam bam!")
+ case _ => println("Watch the Flintstones!")
+}
+
+speak(Person("Fred")) // "Fred says, Yubba dubba doo"
+speak(Person("Bam Bam")) // "Bam Bam says, Bam bam!"
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-28 %}
+```scala
+case class Person(name: String)
+
+def speak(p: Person) = p match
+ case Person(name) if name == "Fred" => println(s"$name says, Yubba dubba doo")
+ case Person(name) if name == "Bam Bam" => println(s"$name says, Bam bam!")
+ case _ => println("Watch the Flintstones!")
+
+speak(Person("Fred")) // "Fred says, Yubba dubba doo"
+speak(Person("Bam Bam")) // "Bam Bam says, Bam bam!"
+```
+{% endtab %}
+{% endtabs %}
+
+#### Binding matched patterns to variables
+
+You can bind the matched pattern to a variable to use type-specific behavior:
+
+{% tabs pattern-binding class=tabs-scala-version %}
+{% tab 'Scala 2' for=pattern-binding %}
+```scala
+trait Animal {
+ val name: String
+}
+case class Cat(name: String) extends Animal {
+ def meow: String = "Meow"
+}
+case class Dog(name: String) extends Animal {
+ def bark: String = "Bark"
+}
+
+def speak(animal: Animal) = animal match {
+ case c @ Cat(name) if name == "Felix" => println(s"$name says, ${c.meow}!")
+ case d @ Dog(name) if name == "Rex" => println(s"$name says, ${d.bark}!")
+ case _ => println("I don't know you!")
+}
+
+speak(Cat("Felix")) // "Felix says, Meow!"
+speak(Dog("Rex")) // "Rex says, Bark!"
+```
+{% endtab %}
+{% tab 'Scala 3' for=pattern-binding %}
+```scala
+trait Animal:
+ val name: String
+case class Cat(name: String) extends Animal:
+ def meow: String = "Meow"
+case class Dog(name: String) extends Animal:
+ def bark: String = "Bark"
+
+def speak(animal: Animal) = animal match
+ case c @ Cat(name) if name == "Felix" => println(s"$name says, ${c.meow}!")
+ case d @ Dog(name) if name == "Rex" => println(s"$name says, ${d.bark}!")
+ case _ => println("I don't know you!")
+
+speak(Cat("Felix")) // "Felix says, Meow!"
+speak(Dog("Rex")) // "Rex says, Bark!"
+```
+{% endtab %}
+{% endtabs %}
+
+### Using a `match` expression as the body of a method
+
+Because `match` expressions return a value, they can be used as the body of a method.
+This method takes a `Matchable` value as an input parameter, and returns a `Boolean`, based on the result of the `match` expression:
+
+{% tabs control-structures-29 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-29 %}
+```scala
+def isTruthy(a: Matchable) = a match {
+ case 0 | "" | false => false
+ case _ => true
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-29 %}
+```scala
+def isTruthy(a: Matchable) = a match
+ case 0 | "" | false => false
+ case _ => true
+```
+{% endtab %}
+{% endtabs %}
+
+The input parameter `a` is defined to be the [`Matchable` type][matchable]---which is the root of all Scala types that pattern matching can be performed on.
+The method is implemented by matching on the input, providing two cases:
+The first one checks whether the given value is either the integer `0`, an empty string or `false` and returns `false` in this case.
+In the default case, we return `true` for any other value.
+These examples show how this method works:
+
+{% tabs is-truthy-call %}
+{% tab 'Scala 2 and 3' for=is-truthy-call %}
+```scala
+isTruthy(0) // false
+isTruthy(false) // false
+isTruthy("") // false
+isTruthy(1) // true
+isTruthy(" ") // true
+isTruthy(2F) // true
+```
+{% endtab %}
+{% endtabs %}
+
+Using a `match` expression as the body of a method is a very common use.
+
+#### Match expressions support many different types of patterns
+
+There are many different forms of patterns that can be used to write `match` expressions.
+Examples include:
+
+- Constant patterns (such as `case 3 => `)
+- Sequence patterns (such as `case List(els : _*) =>`)
+- Tuple patterns (such as `case (x, y) =>`)
+- Constructor pattern (such as `case Person(first, last) =>`)
+- Type test patterns (such as `case p: Person =>`)
+
+All of these kinds of patterns are shown in the following `pattern` method, which takes an input parameter of type `Matchable` and returns a `String`:
+
+{% tabs control-structures-30 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-30 %}
+```scala
+def pattern(x: Matchable): String = x match {
+
+ // constant patterns
+ case 0 => "zero"
+ case true => "true"
+ case "hello" => "you said 'hello'"
+ case Nil => "an empty List"
+
+ // sequence patterns
+ case List(0, _, _) => "a 3-element list with 0 as the first element"
+ case List(1, _*) => "list, starts with 1, has any number of elements"
+ case Vector(1, _*) => "vector, starts w/ 1, has any number of elements"
+
+ // tuple patterns
+ case (a, b) => s"got $a and $b"
+ case (a, b, c) => s"got $a, $b, and $c"
+
+ // constructor patterns
+ case Person(first, "Alexander") => s"Alexander, first name = $first"
+ case Dog("Zeus") => "found a dog named Zeus"
+
+ // type test patterns
+ case s: String => s"got a string: $s"
+ case i: Int => s"got an int: $i"
+ case f: Float => s"got a float: $f"
+ case a: Array[Int] => s"array of int: ${a.mkString(",")}"
+ case as: Array[String] => s"string array: ${as.mkString(",")}"
+ case d: Dog => s"dog: ${d.name}"
+ case list: List[?] => s"got a List: $list"
+ case m: Map[?, ?] => m.toString
+
+ // the default wildcard pattern
+ case _ => "Unknown"
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-30 %}
+```scala
+def pattern(x: Matchable): String = x match
+
+ // constant patterns
+ case 0 => "zero"
+ case true => "true"
+ case "hello" => "you said 'hello'"
+ case Nil => "an empty List"
+
+ // sequence patterns
+ case List(0, _, _) => "a 3-element list with 0 as the first element"
+ case List(1, _*) => "list, starts with 1, has any number of elements"
+ case Vector(1, _*) => "vector, starts w/ 1, has any number of elements"
+
+ // tuple patterns
+ case (a, b) => s"got $a and $b"
+ case (a, b, c) => s"got $a, $b, and $c"
+
+ // constructor patterns
+ case Person(first, "Alexander") => s"Alexander, first name = $first"
+ case Dog("Zeus") => "found a dog named Zeus"
+
+ // type test patterns
+ case s: String => s"got a string: $s"
+ case i: Int => s"got an int: $i"
+ case f: Float => s"got a float: $f"
+ case a: Array[Int] => s"array of int: ${a.mkString(",")}"
+ case as: Array[String] => s"string array: ${as.mkString(",")}"
+ case d: Dog => s"dog: ${d.name}"
+ case list: List[?] => s"got a List: $list"
+ case m: Map[?, ?] => m.toString
+
+ // the default wildcard pattern
+ case _ => "Unknown"
+```
+{% endtab %}
+{% endtabs %}
+
+You can also write the code on the right side of the `=>` on multiple lines if you think it is easier to read. Here is one example:
+
+{% tabs control-structures-31 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-31 %}
+```scala
+count match {
+ case 1 =>
+ println("one, a lonely number")
+ case x if x == 2 || x == 3 =>
+ println("two's company, three's a crowd")
+ case x if x > 3 =>
+ println("4+, that's a party")
+ case _ =>
+ println("i'm guessing your number is zero or less")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-31 %}
+```scala
+count match
+ case 1 =>
+ println("one, a lonely number")
+ case x if x == 2 || x == 3 =>
+ println("two's company, three's a crowd")
+ case x if x > 3 =>
+ println("4+, that's a party")
+ case _ =>
+ println("i'm guessing your number is zero or less")
+```
+{% endtab %}
+{% endtabs %}
+
+In Scala 3, `match` expressions can be chained:
+
+{% tabs 'control-structures-32' %}
+{% tab 'Scala 3 Only' %}
+```scala
+i match
+ case odd: Int if odd % 2 == 1 => "odd"
+ case even: Int if even % 2 == 0 => "even"
+ case _ => "not an integer"
+match
+ case "even" => true
+ case _ => false
+```
+{% endtab %}
+{% endtabs %}
+
+The `match` expression can also follow a period, which simplifies matching on results returned by chained method calls:
+
+{% tabs 'control-structures-33' %}
+{% tab 'Scala 3 Only' %}
+```scala
+List(1, 2, 3)
+ .map(_ * 2)
+ .headOption
+ .match
+ case Some(value) => println(s"The head is: $value")
+ case None => println("The list is empty")
+```
+{% endtab %}
+{% endtabs %}
+
+## try/catch/finally
+
+Like Java, Scala has a `try`/`catch`/`finally` construct to let you catch and manage exceptions.
+For consistency, Scala uses the same syntax that `match` expressions use and supports pattern matching on the different possible exceptions that can occur.
+
+In the following example, `openAndReadAFile` is a method that does what its name implies: it opens a file and reads the text in it, assigning the result to the mutable variable `text`:
+
+{% tabs control-structures-34 class=tabs-scala-version %}
+{% tab 'Scala 2' for=control-structures-34 %}
+```scala
+var text = ""
+try {
+ text = openAndReadAFile(filename)
+} catch {
+ case fnf: FileNotFoundException => fnf.printStackTrace()
+ case ioe: IOException => ioe.printStackTrace()
+} finally {
+ // close your resources here
+ println("Came to the 'finally' clause.")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=control-structures-34 %}
+```scala
+var text = ""
+try
+ text = openAndReadAFile(filename)
+catch
+ case fnf: FileNotFoundException => fnf.printStackTrace()
+ case ioe: IOException => ioe.printStackTrace()
+finally
+ // close your resources here
+ println("Came to the 'finally' clause.")
+```
+{% endtab %}
+{% endtabs %}
+
+Assuming that the `openAndReadAFile` method uses the Java `java.io.*` classes to read a file and doesn't catch its exceptions, attempting to open and read a file can result in both a `FileNotFoundException` and an `IOException`, and those two exceptions are caught in the `catch` block of this example.
+
+[matchable]: {{ site.scala3ref }}/other-new-features/matchable.html
diff --git a/_overviews/scala3-book/domain-modeling-fp.md b/_overviews/scala3-book/domain-modeling-fp.md
new file mode 100644
index 0000000000..bc08f034c2
--- /dev/null
+++ b/_overviews/scala3-book/domain-modeling-fp.md
@@ -0,0 +1,818 @@
+---
+title: FP Modeling
+type: section
+description: This chapter provides an introduction to FP domain modeling with Scala 3.
+languages: [ru, zh-cn]
+num: 23
+previous-page: domain-modeling-oop
+next-page: methods-intro
+---
+
+
+This chapter provides an introduction to domain modeling using functional programming (FP) in Scala 3.
+When modeling the world around us with FP, you typically use these Scala constructs:
+
+- Enumerations
+- Case classes
+- Traits
+
+> If you’re not familiar with algebraic data types (ADTs) and their generalized version (GADTs), you may want to read the [Algebraic Data Types][adts] section before reading this section.
+
+## Introduction
+
+In FP, the *data* and the *operations on that data* are two separate things; you aren’t forced to encapsulate them together like you do with OOP.
+
+The concept is similar to numerical algebra.
+When you think about whole numbers whose values are greater than or equal to zero, you have a *set* of possible values that looks like this:
+
+````
+0, 1, 2 ... Int.MaxValue
+````
+
+Ignoring the division of whole numbers, the possible *operations* on those values are:
+
+````
++, -, *
+````
+
+In FP, business domains are modeled in a similar way:
+
+- You describe your set of values (your data)
+- You describe operations that work on those values (your functions)
+
+> As we will see, reasoning about programs in this style is quite different from the object-oriented programming.
+> Data in FP simply **is**:
+> Separating functionality from your data lets you inspect your data without having to worry about behavior.
+
+In this chapter we’ll model the data and operations for a “pizza” in a pizza store.
+You’ll see how to implement the “data” portion of the Scala/FP model, and then you’ll see several different ways you can organize the operations on that data.
+
+## Modeling the Data
+
+In Scala, describing the data model of a programming problem is simple:
+
+- If you want to model data with different alternatives, use the `enum` construct, (or `case object` in Scala 2).
+- If you only want to group things (or need more fine-grained control) use `case` classes
+
+### Describing Alternatives
+
+Data that simply consists of different alternatives, like crust size, crust type, and toppings, is precisely modelled
+in Scala by an enumeration.
+
+{% tabs data_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=data_1 %}
+
+In Scala 2 enumerations are expressed with a combination of a `sealed class` and several `case object` that extend the class:
+
+```scala
+sealed abstract class CrustSize
+object CrustSize {
+ case object Small extends CrustSize
+ case object Medium extends CrustSize
+ case object Large extends CrustSize
+}
+
+sealed abstract class CrustType
+object CrustType {
+ case object Thin extends CrustType
+ case object Thick extends CrustType
+ case object Regular extends CrustType
+}
+
+sealed abstract class Topping
+object Topping {
+ case object Cheese extends Topping
+ case object Pepperoni extends Topping
+ case object BlackOlives extends Topping
+ case object GreenOlives extends Topping
+ case object Onions extends Topping
+}
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=data_1 %}
+
+In Scala 3 enumerations are concisely expressed with the `enum` construct:
+
+```scala
+enum CrustSize:
+ case Small, Medium, Large
+
+enum CrustType:
+ case Thin, Thick, Regular
+
+enum Topping:
+ case Cheese, Pepperoni, BlackOlives, GreenOlives, Onions
+```
+
+{% endtab %}
+{% endtabs %}
+
+> Data types that describe different alternatives (like `CrustSize`) are also sometimes referred to as _sum types_.
+
+### Describing Compound Data
+
+A pizza can be thought of as a _compound_ container of the different attributes above.
+We can use a `case` class to describe that a `Pizza` consists of a `crustSize`, `crustType`, and potentially multiple `toppings`:
+
+{% tabs data_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=data_2 %}
+
+```scala
+import CrustSize._
+import CrustType._
+import Topping._
+
+case class Pizza(
+ crustSize: CrustSize,
+ crustType: CrustType,
+ toppings: Seq[Topping]
+)
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=data_2 %}
+
+```scala
+import CrustSize.*
+import CrustType.*
+import Topping.*
+
+case class Pizza(
+ crustSize: CrustSize,
+ crustType: CrustType,
+ toppings: Seq[Topping]
+)
+```
+
+{% endtab %}
+{% endtabs %}
+
+> Data Types that aggregate multiple components (like `Pizza`) are also sometimes referred to as _product types_.
+
+And that’s it.
+That’s the data model for an FP-style pizza system.
+This solution is very concise because it doesn’t require the operations on a pizza to be combined with the data model.
+The data model is easy to read, like declaring the design for a relational database.
+It is also very easy to create values of our data model and inspect them:
+
+{% tabs data_3 %}
+{% tab 'Scala 2 and 3' for=data_3 %}
+
+```scala
+val myFavPizza = Pizza(Small, Regular, Seq(Cheese, Pepperoni))
+println(myFavPizza.crustType) // prints Regular
+```
+
+{% endtab %}
+{% endtabs %}
+
+#### More of the data model
+
+We might go on in the same way to model the entire pizza-ordering system.
+Here are a few other `case` classes that are used to model such a system:
+
+{% tabs data_4 %}
+{% tab 'Scala 2 and 3' for=data_4 %}
+
+```scala
+case class Address(
+ street1: String,
+ street2: Option[String],
+ city: String,
+ state: String,
+ zipCode: String
+)
+
+case class Customer(
+ name: String,
+ phone: String,
+ address: Address
+)
+
+case class Order(
+ pizzas: Seq[Pizza],
+ customer: Customer
+)
+```
+
+{% endtab %}
+{% endtabs %}
+
+#### “Skinny domain objects”
+
+In his book, *Functional and Reactive Domain Modeling*, Debasish Ghosh states that where OOP practitioners describe their classes as “rich domain models” that encapsulate data and behaviors, FP data models can be thought of as “skinny domain objects.”
+This is because---as this lesson shows---the data models are defined as `case` classes with attributes, but no behaviors, resulting in short and concise data structures.
+
+## Modeling the Operations
+
+This leads to an interesting question: Because FP separates the data from the operations on that data, how do you implement those operations in Scala?
+
+The answer is actually quite simple: you simply write functions (or methods) that operate on values of the data we just modeled.
+For instance, we can define a function that computes the price of a pizza.
+
+{% tabs data_5 class=tabs-scala-version %}
+{% tab 'Scala 2' for=data_5 %}
+
+```scala
+def pizzaPrice(p: Pizza): Double = p match {
+ case Pizza(crustSize, crustType, toppings) => {
+ val base = 6.00
+ val crust = crustPrice(crustSize, crustType)
+ val tops = toppings.map(toppingPrice).sum
+ base + crust + tops
+ }
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=data_5 %}
+
+```scala
+def pizzaPrice(p: Pizza): Double = p match
+ case Pizza(crustSize, crustType, toppings) =>
+ val base = 6.00
+ val crust = crustPrice(crustSize, crustType)
+ val tops = toppings.map(toppingPrice).sum
+ base + crust + tops
+```
+
+{% endtab %}
+{% endtabs %}
+
+You can notice how the implementation of the function simply follows the shape of the data: since `Pizza` is a case class, we use pattern matching to extract the components and call helper functions to compute the individual prices.
+
+{% tabs data_6 class=tabs-scala-version %}
+{% tab 'Scala 2' for=data_6 %}
+
+```scala
+def toppingPrice(t: Topping): Double = t match {
+ case Cheese | Onions => 0.5
+ case Pepperoni | BlackOlives | GreenOlives => 0.75
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=data_6 %}
+
+```scala
+def toppingPrice(t: Topping): Double = t match
+ case Cheese | Onions => 0.5
+ case Pepperoni | BlackOlives | GreenOlives => 0.75
+```
+
+{% endtab %}
+{% endtabs %}
+
+Similarly, since `Topping` is an enumeration, we use pattern matching to distinguish between the different variants.
+Cheese and onions are priced at 50ct while the rest is priced at 75ct each.
+
+{% tabs data_7 class=tabs-scala-version %}
+{% tab 'Scala 2' for=data_7 %}
+
+```scala
+def crustPrice(s: CrustSize, t: CrustType): Double =
+ (s, t) match {
+ // if the crust size is small or medium,
+ // the type is not important
+ case (Small | Medium, _) => 0.25
+ case (Large, Thin) => 0.50
+ case (Large, Regular) => 0.75
+ case (Large, Thick) => 1.00
+ }
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=data_7 %}
+
+```scala
+def crustPrice(s: CrustSize, t: CrustType): Double =
+ (s, t) match
+ // if the crust size is small or medium,
+ // the type is not important
+ case (Small | Medium, _) => 0.25
+ case (Large, Thin) => 0.50
+ case (Large, Regular) => 0.75
+ case (Large, Thick) => 1.00
+```
+
+{% endtab %}
+{% endtabs %}
+
+To compute the price of the crust we simultaneously pattern match on both the size and the type of the crust.
+
+> An important point about all functions shown above is that they are *pure functions*: they do not mutate any data or have other side-effects (like throwing exceptions or writing to a file).
+> All they do is simply receive values and compute the result.
+
+{% comment %}
+I’ve added this comment per [this GitHub comment](https://github.com/scalacenter/docs.scala-lang/pull/3#discussion_r543372428).
+To that point, I’ve added these definitions here from our Slack conversation, in case anyone wants to update the “pure function” definition. If not, please delete this comment.
+
+Sébastien:
+----------
+A function `f` is pure if, given the same input `x`, it will always return the same output `f(x)`, and it never modifies any state outside it (therefore potentially causing other functions to behave differently in the future).
+
+Jonathan:
+---------
+We say a function is 'pure' if it does not depend on or modify the context it is called in.
+
+Wikipedia
+---------
+The function always evaluates to the same result value given the same argument value(s). It cannot depend on any hidden state or value, and it cannot depend on any I/O.
+Evaluation of the result does not cause any semantically observable side effect or output, such as mutation of mutable objects or output to I/O devices.
+
+Mine (Alvin, now modified, from fp-pure-functions.md):
+------------------------------------------------------
+- A function `f` is pure if, given the same input `x`, it always returns the same output `f(x)`
+- The function’s output depends *only* on its input variables and its internal algorithm
+- It doesn’t modify its input parameters
+- It doesn’t mutate any hidden state
+- It doesn’t have any “back doors”: It doesn’t read data from the outside world (including the console, web services, databases, files, etc.), or write data to the outside world
+{% endcomment %}
+
+## How to Organize Functionality
+
+When implementing the `pizzaPrice` function above, we did not say _where_ we would define it.
+Scala gives you many great tools to organize your logic in different namespaces and modules.
+
+There are several different ways to implement and organize behaviors:
+
+- Define your functions in companion objects
+- Use a modular programming style
+- Use a “functional objects” approach
+- Define the functionality in extension methods
+
+These different solutions are shown in the remainder of this section.
+
+### Companion Object
+
+A first approach is to define the behavior---the functions---in a companion object.
+
+> As discussed in the Domain Modeling [Tools section][modeling-tools], a _companion object_ is an `object` that has the same name as a class, and is declared in the same file as the class.
+
+With this approach, in addition to the enumeration or case class you also define an equally named companion object that contains the behavior.
+
+{% tabs org_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=org_1 %}
+
+```scala
+case class Pizza(
+ crustSize: CrustSize,
+ crustType: CrustType,
+ toppings: Seq[Topping]
+)
+
+// the companion object of case class Pizza
+object Pizza {
+ // the implementation of `pizzaPrice` from above
+ def price(p: Pizza): Double = ...
+}
+
+sealed abstract class Topping
+
+// the companion object of enumeration Topping
+object Topping {
+ case object Cheese extends Topping
+ case object Pepperoni extends Topping
+ case object BlackOlives extends Topping
+ case object GreenOlives extends Topping
+ case object Onions extends Topping
+
+ // the implementation of `toppingPrice` above
+ def price(t: Topping): Double = ...
+}
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=org_1 %}
+
+```scala
+case class Pizza(
+ crustSize: CrustSize,
+ crustType: CrustType,
+ toppings: Seq[Topping]
+)
+
+// the companion object of case class Pizza
+object Pizza:
+ // the implementation of `pizzaPrice` from above
+ def price(p: Pizza): Double = ...
+
+enum Topping:
+ case Cheese, Pepperoni, BlackOlives, GreenOlives, Onions
+
+// the companion object of enumeration Topping
+object Topping:
+ // the implementation of `toppingPrice` above
+ def price(t: Topping): Double = ...
+```
+
+{% endtab %}
+{% endtabs %}
+
+With this approach you can create a `Pizza` and compute its price like this:
+
+{% tabs org_2 %}
+{% tab 'Scala 2 and 3' for=org_2 %}
+
+```scala
+val pizza1 = Pizza(Small, Thin, Seq(Cheese, Onions))
+Pizza.price(pizza1)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Grouping functionality this way has a few advantages:
+
+- It associates functionality with data and makes it easier to find for programmers (and the compiler).
+- It creates a namespace and for instance lets us use `price` as a method name without having to rely on overloading.
+- The implementation of `Topping.price` can access enumeration values like `Cheese` without having to import them.
+
+However, there are also a few tradeoffs that should be considered:
+
+- It tightly couples the functionality to your data model.
+ In particular, the companion object needs to be defined in the same file as your `case` class.
+- It might be unclear where to define functions like `crustPrice` that could equally well be placed in a companion object of `CrustSize` or `CrustType`.
+
+## Modules
+
+A second way to organize behavior is to use a “modular” approach.
+The book, *Programming in Scala*, defines a *module* as, “a ‘smaller program piece’ with a well-defined interface and a hidden implementation.”
+Let’s look at what this means.
+
+### Creating a `PizzaService` interface
+
+The first thing to think about are the `Pizza`s “behaviors”.
+When doing this, you sketch a `PizzaServiceInterface` trait like this:
+
+{% tabs module_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=module_1 %}
+
+```scala
+trait PizzaServiceInterface {
+
+ def price(p: Pizza): Double
+
+ def addTopping(p: Pizza, t: Topping): Pizza
+ def removeAllToppings(p: Pizza): Pizza
+
+ def updateCrustSize(p: Pizza, cs: CrustSize): Pizza
+ def updateCrustType(p: Pizza, ct: CrustType): Pizza
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=module_1 %}
+
+```scala
+trait PizzaServiceInterface:
+
+ def price(p: Pizza): Double
+
+ def addTopping(p: Pizza, t: Topping): Pizza
+ def removeAllToppings(p: Pizza): Pizza
+
+ def updateCrustSize(p: Pizza, cs: CrustSize): Pizza
+ def updateCrustType(p: Pizza, ct: CrustType): Pizza
+```
+
+{% endtab %}
+{% endtabs %}
+
+As shown, each method takes a `Pizza` as an input parameter---along with other parameters---and then returns a `Pizza` instance as a result
+
+When you write a pure interface like this, you can think of it as a contract that states, “all non-abstract classes that extend this trait *must* provide an implementation of these services.”
+
+What you might also do at this point is imagine that you’re the consumer of this API.
+When you do that, it helps to sketch out some sample “consumer” code to make sure the API looks like what you want:
+
+{% tabs module_2 %}
+{% tab 'Scala 2 and 3' for=module_2 %}
+
+```scala
+val p = Pizza(Small, Thin, Seq(Cheese))
+
+// how you want to use the methods in PizzaServiceInterface
+val p1 = addTopping(p, Pepperoni)
+val p2 = addTopping(p1, Onions)
+val p3 = updateCrustType(p2, Thick)
+val p4 = updateCrustSize(p3, Large)
+```
+
+{% endtab %}
+{% endtabs %}
+
+If that code seems okay, you’ll typically start sketching another API---such as an API for orders---but since we’re only looking at pizzas right now, we’ll stop thinking about interfaces and create a concrete implementation of this interface.
+
+> Notice that this is usually a two-step process.
+> In the first step, you sketch the contract of your API as an *interface*.
+> In the second step you create a concrete *implementation* of that interface.
+> In some cases you’ll end up creating multiple concrete implementations of the base interface.
+
+### Creating a concrete implementation
+
+Now that you know what the `PizzaServiceInterface` looks like, you can create a concrete implementation of it by writing the body for all of the methods you defined in the interface:
+
+{% tabs module_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=module_3 %}
+
+```scala
+object PizzaService extends PizzaServiceInterface {
+
+ def price(p: Pizza): Double =
+ ... // implementation from above
+
+ def addTopping(p: Pizza, t: Topping): Pizza =
+ p.copy(toppings = p.toppings :+ t)
+
+ def removeAllToppings(p: Pizza): Pizza =
+ p.copy(toppings = Seq.empty)
+
+ def updateCrustSize(p: Pizza, cs: CrustSize): Pizza =
+ p.copy(crustSize = cs)
+
+ def updateCrustType(p: Pizza, ct: CrustType): Pizza =
+ p.copy(crustType = ct)
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=module_3 %}
+
+```scala
+object PizzaService extends PizzaServiceInterface:
+
+ def price(p: Pizza): Double =
+ ... // implementation from above
+
+ def addTopping(p: Pizza, t: Topping): Pizza =
+ p.copy(toppings = p.toppings :+ t)
+
+ def removeAllToppings(p: Pizza): Pizza =
+ p.copy(toppings = Seq.empty)
+
+ def updateCrustSize(p: Pizza, cs: CrustSize): Pizza =
+ p.copy(crustSize = cs)
+
+ def updateCrustType(p: Pizza, ct: CrustType): Pizza =
+ p.copy(crustType = ct)
+
+end PizzaService
+```
+
+{% endtab %}
+{% endtabs %}
+
+While this two-step process of creating an interface followed by an implementation isn’t always necessary, explicitly thinking about the API and its use is a good approach.
+
+With everything in place you can use your `Pizza` class and `PizzaService`:
+
+{% tabs module_4 class=tabs-scala-version %}
+{% tab 'Scala 2' for=module_4 %}
+
+```scala
+import PizzaService._
+
+val p = Pizza(Small, Thin, Seq(Cheese))
+
+// use the PizzaService methods
+val p1 = addTopping(p, Pepperoni)
+val p2 = addTopping(p1, Onions)
+val p3 = updateCrustType(p2, Thick)
+val p4 = updateCrustSize(p3, Large)
+
+println(price(p4)) // prints 8.75
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=module_4 %}
+
+```scala
+import PizzaService.*
+
+val p = Pizza(Small, Thin, Seq(Cheese))
+
+// use the PizzaService methods
+val p1 = addTopping(p, Pepperoni)
+val p2 = addTopping(p1, Onions)
+val p3 = updateCrustType(p2, Thick)
+val p4 = updateCrustSize(p3, Large)
+
+println(price(p4)) // prints 8.75
+```
+
+{% endtab %}
+{% endtabs %}
+
+### Functional Objects
+
+In the book, *Programming in Scala*, the authors define the term, “Functional Objects” as “objects that do not have any mutable state”.
+This is also the case for types in `scala.collection.immutable`.
+For example, methods on `List` do not mutate the interal state, but instead create a copy of the `List` as a result.
+
+You can think of this approach as a “hybrid FP/OOP design” because you:
+
+- Model the data using immutable `case` classes.
+- Define the behaviors (methods) in the _same type_ as the data.
+- Implement the behavior as pure functions: They don’t mutate any internal state; rather, they return a copy.
+
+> This really is a hybrid approach: like in an **OOP design**, the methods are encapsulated in the class with the data, but as typical for a **FP design**, methods are implemented as pure functions that don’t mutate the data
+
+#### Example
+
+Using this approach, you can directly implement the functionality on pizzas in the case class:
+
+{% tabs module_5 class=tabs-scala-version %}
+{% tab 'Scala 2' for=module_5 %}
+
+```scala
+case class Pizza(
+ crustSize: CrustSize,
+ crustType: CrustType,
+ toppings: Seq[Topping]
+) {
+
+ // the operations on the data model
+ def price: Double =
+ pizzaPrice(this) // implementation from above
+
+ def addTopping(t: Topping): Pizza =
+ this.copy(toppings = this.toppings :+ t)
+
+ def removeAllToppings: Pizza =
+ this.copy(toppings = Seq.empty)
+
+ def updateCrustSize(cs: CrustSize): Pizza =
+ this.copy(crustSize = cs)
+
+ def updateCrustType(ct: CrustType): Pizza =
+ this.copy(crustType = ct)
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=module_5 %}
+
+```scala
+case class Pizza(
+ crustSize: CrustSize,
+ crustType: CrustType,
+ toppings: Seq[Topping]
+):
+
+ // the operations on the data model
+ def price: Double =
+ pizzaPrice(this) // implementation from above
+
+ def addTopping(t: Topping): Pizza =
+ this.copy(toppings = this.toppings :+ t)
+
+ def removeAllToppings: Pizza =
+ this.copy(toppings = Seq.empty)
+
+ def updateCrustSize(cs: CrustSize): Pizza =
+ this.copy(crustSize = cs)
+
+ def updateCrustType(ct: CrustType): Pizza =
+ this.copy(crustType = ct)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Notice that unlike the previous approaches, because these are methods on the `Pizza` class, they don’t take a `Pizza` reference as an input parameter.
+Instead, they have their own reference to the current pizza instance as `this`.
+
+Now you can use this new design like this:
+
+{% tabs module_6 %}
+{% tab 'Scala 2 and 3' for=module_6 %}
+
+```scala
+Pizza(Small, Thin, Seq(Cheese))
+ .addTopping(Pepperoni)
+ .updateCrustType(Thick)
+ .price
+```
+
+{% endtab %}
+{% endtabs %}
+
+### Extension Methods
+
+Finally, we show an approach that lies between the first one (defining functions in the companion object) and the last one (defining functions as methods on the type itself).
+
+Extension methods let us create an API that is like the one of functional object, without having to define functions as methods on the type itself.
+This can have multiple advantages:
+
+- Our data model is again _very concise_ and does not mention any behavior.
+- We can equip types with additional methods _retroactively_ without having to change the original definition.
+- Other than companion objects or direct methods on the types, extension methods can be defined _externally_ in another file.
+
+Let us revisit our example once more.
+
+{% tabs module_7 class=tabs-scala-version %}
+{% tab 'Scala 2' for=module_7 %}
+
+```scala
+case class Pizza(
+ crustSize: CrustSize,
+ crustType: CrustType,
+ toppings: Seq[Topping]
+)
+
+implicit class PizzaOps(p: Pizza) {
+ def price: Double =
+ pizzaPrice(p) // implementation from above
+
+ def addTopping(t: Topping): Pizza =
+ p.copy(toppings = p.toppings :+ t)
+
+ def removeAllToppings: Pizza =
+ p.copy(toppings = Seq.empty)
+
+ def updateCrustSize(cs: CrustSize): Pizza =
+ p.copy(crustSize = cs)
+
+ def updateCrustType(ct: CrustType): Pizza =
+ p.copy(crustType = ct)
+}
+```
+In the above code, we define the different methods on pizzas as methods in an _implicit class_.
+With `implicit class PizzaOps(p: Pizza)` then wherever `PizzaOps` is imported its methods will be available on
+instances of `Pizza`. The receiver in this case is `p`.
+
+{% endtab %}
+{% tab 'Scala 3' for=module_7 %}
+
+```scala
+case class Pizza(
+ crustSize: CrustSize,
+ crustType: CrustType,
+ toppings: Seq[Topping]
+)
+
+extension (p: Pizza)
+ def price: Double =
+ pizzaPrice(p) // implementation from above
+
+ def addTopping(t: Topping): Pizza =
+ p.copy(toppings = p.toppings :+ t)
+
+ def removeAllToppings: Pizza =
+ p.copy(toppings = Seq.empty)
+
+ def updateCrustSize(cs: CrustSize): Pizza =
+ p.copy(crustSize = cs)
+
+ def updateCrustType(ct: CrustType): Pizza =
+ p.copy(crustType = ct)
+```
+In the above code, we define the different methods on pizzas as _extension methods_.
+With `extension (p: Pizza)` we say that we want to make the methods available on instances of `Pizza`. The receiver
+in this case is `p`.
+
+{% endtab %}
+{% endtabs %}
+
+Using our extension methods, we can obtain the same API as before:
+
+{% tabs module_8 %}
+{% tab 'Scala 2 and 3' for=module_8 %}
+
+```scala
+Pizza(Small, Thin, Seq(Cheese))
+ .addTopping(Pepperoni)
+ .updateCrustType(Thick)
+ .price
+```
+
+{% endtab %}
+{% endtabs %}
+
+while being able to define extensions in any other module.
+Typically, if you are the designer of the data model, you will define your extension methods in the companion object.
+This way, they are already available to all users.
+Otherwise, extension methods need to be imported explicitly to be usable.
+
+## Summary of this Approach
+
+Defining a data model in Scala/FP tends to be simple: Just model variants of the data with enumerations and compound data with `case` classes.
+Then, to model the behavior, define functions that operate on values of your data model.
+We have seen different ways to organize your functions:
+
+- You can put your methods in companion objects
+- You can use a modular programming style, separating interface and implementation
+- You can use a “functional objects” approach and store the methods on the defined data type
+- You can use extension methods to equip your data model with functionality
+
+[adts]: {% link _overviews/scala3-book/types-adts-gadts.md %}
+[modeling-tools]: {% link _overviews/scala3-book/domain-modeling-tools.md %}
diff --git a/_overviews/scala3-book/domain-modeling-intro.md b/_overviews/scala3-book/domain-modeling-intro.md
new file mode 100644
index 0000000000..fada05d5f3
--- /dev/null
+++ b/_overviews/scala3-book/domain-modeling-intro.md
@@ -0,0 +1,15 @@
+---
+title: Domain Modeling
+type: chapter
+description: This chapter provides an introduction to domain modeling in Scala 3.
+languages: [ru, zh-cn]
+num: 20
+previous-page: control-structures
+next-page: domain-modeling-tools
+---
+
+This chapter shows how you can model the world around you with Scala 3:
+
+- The Tools section introduces the tools that are available to you, including classes, traits, enums, and more
+- The OOP Modeling section looks at modeling attributes and behaviors in an object-oriented programming (OOP) style
+- The FP Modeling section looks at domain modeling in a functional programming (FP) style
diff --git a/_overviews/scala3-book/domain-modeling-oop.md b/_overviews/scala3-book/domain-modeling-oop.md
new file mode 100644
index 0000000000..948504139e
--- /dev/null
+++ b/_overviews/scala3-book/domain-modeling-oop.md
@@ -0,0 +1,593 @@
+---
+title: OOP Modeling
+type: section
+description: This chapter provides an introduction to OOP domain modeling with Scala 3.
+languages: [ru, zh-cn]
+num: 22
+previous-page: domain-modeling-tools
+next-page: domain-modeling-fp
+---
+
+
+This chapter provides an introduction to domain modeling using object-oriented programming (OOP) in Scala 3.
+
+## Introduction
+
+Scala provides all the necessary tools for object-oriented design:
+
+- **Traits** let you specify (abstract) interfaces, as well as concrete implementations.
+- **Mixin Composition** gives you the tools to compose components from smaller parts.
+- **Classes** can implement the interfaces specified by traits.
+- **Instances** of classes can have their own private state.
+- **Subtyping** lets you use an instance of one class where an instance of a superclass is expected.
+- **Access modifiers** lets you control which members of a class can be accessed by which part of the code.
+
+## Traits
+
+Perhaps different from other languages with support for OOP, such as Java, the primary tool of decomposition in Scala is not classes, but traits.
+They can serve to describe abstract interfaces like:
+
+{% tabs traits_1 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+trait Showable {
+ def show: String
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+trait Showable:
+ def show: String
+```
+{% endtab %}
+{% endtabs %}
+
+and can also contain concrete implementations:
+
+{% tabs traits_2 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+trait Showable {
+ def show: String
+ def showHtml = "
"
+```
+{% endtab %}
+{% endtabs %}
+
+You can see that we define the method `showHtml` _in terms_ of the abstract method `show`.
+
+[Odersky and Zenger][scalable] present the _service-oriented component model_ and view:
+
+- **abstract members** as _required_ services: they still need to be implemented by a subclass.
+- **concrete members** as _provided_ services: they are provided to the subclass.
+
+We can already see this with our example of `Showable`: defining a class `Document` that extends `Showable`, we still have to define `show`, but are provided with `showHtml`:
+
+{% tabs traits_3 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+class Document(text: String) extends Showable {
+ def show = text
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+class Document(text: String) extends Showable:
+ def show = text
+```
+
+{% endtab %}
+{% endtabs %}
+
+#### Abstract Members
+
+Abstract methods are not the only thing that can be left abstract in a trait.
+A trait can contain:
+
+- abstract methods (`def m(): T`)
+- abstract value definitions (`val x: T`)
+- abstract type members (`type T`), potentially with bounds (`type T <: S`)
+- abstract givens (`given t: T`)
+Scala 3 only
+
+Each of the above features can be used to specify some form of requirement on the implementor of the trait.
+
+## Mixin Composition
+
+Not only can traits contain abstract and concrete definitions, Scala also provides a powerful way to compose multiple traits: a feature which is often referred to as _mixin composition_.
+
+Let us assume the following two (potentially independently defined) traits:
+
+{% tabs traits_4 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+trait GreetingService {
+ def translate(text: String): String
+ def sayHello = translate("Hello")
+}
+
+trait TranslationService {
+ def translate(text: String): String = "..."
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+trait GreetingService:
+ def translate(text: String): String
+ def sayHello = translate("Hello")
+
+trait TranslationService:
+ def translate(text: String): String = "..."
+```
+
+{% endtab %}
+{% endtabs %}
+
+To compose the two services, we can simply create a new trait extending them:
+
+{% tabs traits_5 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+trait ComposedService extends GreetingService with TranslationService
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+trait ComposedService extends GreetingService, TranslationService
+```
+
+{% endtab %}
+{% endtabs %}
+
+Abstract members in one trait (such as `translate` in `GreetingService`) are automatically matched with concrete members in another trait.
+This not only works with methods as in this example, but also with all the other abstract members mentioned above (that is, types, value definitions, etc.).
+
+## Classes
+
+Traits are great to modularize components and describe interfaces (required and provided).
+But at some point we’ll want to create instances of them.
+When designing software in Scala, it’s often helpful to only consider using classes at the leafs of your inheritance model:
+
+{% comment %}
+NOTE: I think “leaves” may technically be the correct word to use, but I prefer “leafs.”
+{% endcomment %}
+
+{% tabs table-traits-cls-summary class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+| Traits | `T1`, `T2`, `T3`
+| Composed traits | `S1 extends T1 with T2`, `S2 extends T2 with T3`
+| Classes | `C extends S1 with T3`
+| Instances | `new C()`
+{% endtab %}
+{% tab 'Scala 3' %}
+| Traits | `T1`, `T2`, `T3`
+| Composed traits | `S1 extends T1, T2`, `S2 extends T2, T3`
+| Classes | `C extends S1, T3`
+| Instances | `C()`
+{% endtab %}
+{% endtabs %}
+
+This is even more the case in Scala 3, where traits now can also take parameters, further eliminating the need for classes.
+
+#### Defining Classes
+
+Like traits, classes can extend multiple traits (but only one super class):
+
+{% tabs class_1 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+class MyService(name: String) extends ComposedService with Showable {
+ def show = s"$name says $sayHello"
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+class MyService(name: String) extends ComposedService, Showable:
+ def show = s"$name says $sayHello"
+```
+
+{% endtab %}
+{% endtabs %}
+
+#### Subtyping
+
+We can create an instance of `MyService` as follows:
+
+{% tabs class_2 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+val s1: MyService = new MyService("Service 1")
+```
+
+{% endtab %}
+{% tab 'Scala 3' %}
+
+```scala
+val s1: MyService = MyService("Service 1")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Through the means of subtyping, our instance `s1` can be used everywhere that any of the extended traits is expected:
+
+{% tabs class_3 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val s2: GreetingService = s1
+val s3: TranslationService = s1
+val s4: Showable = s1
+// ... and so on ...
+```
+{% endtab %}
+{% endtabs %}
+
+#### Planning for Extension
+
+As mentioned before, it is possible to extend another class:
+
+{% tabs class_4 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+class Person(name: String)
+class SoftwareDeveloper(name: String, favoriteLang: String)
+ extends Person(name)
+```
+
+{% endtab %}
+{% endtabs %}
+
+However, since _traits_ are designed as the primary means of decomposition,
+it is not recommended to extend a class that is defined in one file from another file.
+
+
Open Classes Scala 3 only
+
+In Scala 3 extending non-abstract classes in other files is restricted. In order to allow this, the base class needs to
+be marked as `open`:
+
+{% tabs class_5 %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+open class Person(name: String)
+```
+{% endtab %}
+{% endtabs %}
+
+Marking classes with [`open`][open] is a new feature of Scala 3. Having to explicitly mark classes as open avoids many common pitfalls in OO design.
+In particular, it requires library designers to explicitly plan for extension and for instance document the classes that are marked as open with additional extension contracts.
+
+{% comment %}
+NOTE/FWIW: In his book, “Effective Java,” Joshua Bloch describes this as “Item 19: Design and document for inheritance or else prohibit it.”
+Unfortunately I can’t find any good links to this on the internet.
+I only mention this because I think that book and phrase is pretty well known in the Java world.
+{% endcomment %}
+
+## Instances and Private Mutable State
+
+Like in other languages with support for OOP, traits and classes in Scala can define mutable fields:
+
+{% tabs instance_6 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+class Counter {
+ // can only be observed by the method `count`
+ private var currentCount = 0
+
+ def tick(): Unit = currentCount += 1
+ def count: Int = currentCount
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+class Counter:
+ // can only be observed by the method `count`
+ private var currentCount = 0
+
+ def tick(): Unit = currentCount += 1
+ def count: Int = currentCount
+```
+
+{% endtab %}
+{% endtabs %}
+
+Every instance of the class `Counter` has its own private state that can only be observed through the method `count`, as the following interaction illustrates:
+
+{% tabs instance_7 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+val c1 = new Counter()
+c1.count // 0
+c1.tick()
+c1.tick()
+c1.count // 2
+```
+
+{% endtab %}
+{% tab 'Scala 3' %}
+
+```scala
+val c1 = Counter()
+c1.count // 0
+c1.tick()
+c1.tick()
+c1.count // 2
+```
+
+{% endtab %}
+{% endtabs %}
+
+#### Access Modifiers
+
+By default, all member definitions in Scala are publicly visible.
+To hide implementation details, it’s possible to define members (methods, fields, types, etc.) to be `private` or `protected`.
+This way you can control how they are accessed or overridden.
+Private members are only visible to the class/trait itself and to its companion object.
+Protected members are also visible to subclasses of the class.
+
+## Advanced Example: Service Oriented Design
+
+In the following, we illustrate some advanced features of Scala and show how they can be used to structure larger software components.
+The examples are adapted from the paper ["Scalable Component Abstractions"][scalable] by Martin Odersky and Matthias Zenger.
+Don’t worry if you don’t understand all the details of the example; it’s primarily intended to demonstrate how to use several type features to construct larger components.
+
+Our goal is to define a software component with a _family of types_ that can be refined later in implementations of the component.
+Concretely, the following code defines the component `SubjectObserver` as a trait with two abstract type members, `S` (for subjects) and `O` (for observers):
+
+{% tabs example_1 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+trait SubjectObserver {
+
+ type S <: Subject
+ type O <: Observer
+
+ trait Subject { self: S =>
+ private var observers: List[O] = List()
+ def subscribe(obs: O): Unit = {
+ observers = obs :: observers
+ }
+ def publish() = {
+ for ( obs <- observers ) obs.notify(this)
+ }
+ }
+
+ trait Observer {
+ def notify(sub: S): Unit
+ }
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+trait SubjectObserver:
+
+ type S <: Subject
+ type O <: Observer
+
+ trait Subject:
+ self: S =>
+ private var observers: List[O] = List()
+ def subscribe(obs: O): Unit =
+ observers = obs :: observers
+ def publish() =
+ for obs <- observers do obs.notify(this)
+
+ trait Observer:
+ def notify(sub: S): Unit
+```
+
+{% endtab %}
+{% endtabs %}
+
+There are a few things that need explanation.
+
+#### Abstract Type Members
+
+The declaration `type S <: Subject` says that within the trait `SubjectObserver` we can refer to some _unknown_ (that is, abstract) type that we call `S`.
+However, the type is not completely unknown: we know at least that it is _some subtype_ of the trait `Subject`.
+All traits and classes extending `SubjectObserver` are free to choose any type for `S` as long as the chosen type is a subtype of `Subject`.
+The `<: Subject` part of the declaration is also referred to as an _upper bound on `S`_.
+
+#### Nested Traits
+
+_Within_ trait `SubjectObserver`, we define two other traits.
+Let us begin with trait `Observer`, which only defines one abstract method `notify` that takes an argument of type `S`.
+As we will see momentarily, it is important that the argument has type `S` and not type `Subject`.
+
+The second trait, `Subject`, defines one private field `observers` to store all observers that subscribed to this particular subject.
+Subscribing to a subject simply stores the object into this list.
+Again, the type of parameter `obs` is `O`, not `Observer`.
+
+#### Self-type Annotations
+
+Finally, you might have wondered what the `self: S =>` on trait `Subject` is supposed to mean.
+This is called a _self-type annotation_.
+It requires subtypes of `Subject` to also be subtypes of `S`.
+This is necessary to be able to call `obs.notify` with `this` as an argument, since it requires a value of type `S`.
+If `S` was a _concrete_ type, the self-type annotation could be replaced by `trait Subject extends S`.
+
+### Implementing the Component
+
+We can now implement the above component and define the abstract type members to be concrete types:
+
+{% tabs example_2 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+object SensorReader extends SubjectObserver {
+ type S = Sensor
+ type O = Display
+
+ class Sensor(val label: String) extends Subject {
+ private var currentValue = 0.0
+ def value = currentValue
+ def changeValue(v: Double) = {
+ currentValue = v
+ publish()
+ }
+ }
+
+ class Display extends Observer {
+ def notify(sub: Sensor) =
+ println(s"${sub.label} has value ${sub.value}")
+ }
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+object SensorReader extends SubjectObserver:
+ type S = Sensor
+ type O = Display
+
+ class Sensor(val label: String) extends Subject:
+ private var currentValue = 0.0
+ def value = currentValue
+ def changeValue(v: Double) =
+ currentValue = v
+ publish()
+
+ class Display extends Observer:
+ def notify(sub: Sensor) =
+ println(s"${sub.label} has value ${sub.value}")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Specifically, we define a _singleton_ object `SensorReader` that extends `SubjectObserver`.
+In the implementation of `SensorReader`, we say that type `S` is now defined as type `Sensor`, and type `O` is defined to be equal to type `Display`.
+Both `Sensor` and `Display` are defined as nested classes within `SensorReader`, implementing the traits `Subject` and `Observer`, correspondingly.
+
+Besides, being an example of a service oriented design, this code also highlights many aspects of object-oriented programming:
+
+- The class `Sensor` introduces its own private state (`currentValue`) and encapsulates modification of the state behind the method `changeValue`.
+- The implementation of `changeValue` uses the method `publish` defined in the extended trait.
+- The class `Display` extends the trait `Observer`, and implements the missing method `notify`.
+{% comment %}
+NOTE: You might say “the abstract method `notify`” in that last sentence, but I like “missing.”
+{% endcomment %}
+
+It is important to point out that the implementation of `notify` can only safely access the label and value of `sub`, since we originally declared the parameter to be of type `S`.
+
+### Using the Component
+
+Finally, the following code illustrates how to use our `SensorReader` component:
+
+{% tabs example_3 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+import SensorReader._
+
+// setting up a network
+val s1 = new Sensor("sensor1")
+val s2 = new Sensor("sensor2")
+val d1 = new Display()
+val d2 = new Display()
+s1.subscribe(d1)
+s1.subscribe(d2)
+s2.subscribe(d1)
+
+// propagating updates through the network
+s1.changeValue(2)
+s2.changeValue(3)
+
+// prints:
+// sensor1 has value 2.0
+// sensor1 has value 2.0
+// sensor2 has value 3.0
+
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+import SensorReader.*
+
+// setting up a network
+val s1 = Sensor("sensor1")
+val s2 = Sensor("sensor2")
+val d1 = Display()
+val d2 = Display()
+s1.subscribe(d1)
+s1.subscribe(d2)
+s2.subscribe(d1)
+
+// propagating updates through the network
+s1.changeValue(2)
+s2.changeValue(3)
+
+// prints:
+// sensor1 has value 2.0
+// sensor1 has value 2.0
+// sensor2 has value 3.0
+```
+
+{% endtab %}
+{% endtabs %}
+
+With all the object-oriented programming utilities under our belt, in the next section we will demonstrate how to design programs in a functional style.
+
+{% comment %}
+NOTE: One thing I occasionally do is flip things like this around, so I first show how to use a component, and then show how to implement that component. I don’t have a rule of thumb about when to do this, but sometimes it’s motivational to see the use first, and then see how to create the code to make that work.
+{% endcomment %}
+
+[scalable]: https://doi.org/10.1145/1094811.1094815
+[open]: {{ site.scala3ref }}/other-new-features/open-classes.html
+[trait-params]: {{ site.scala3ref }}/other-new-features/trait-parameters.html
diff --git a/_overviews/scala3-book/domain-modeling-tools.md b/_overviews/scala3-book/domain-modeling-tools.md
new file mode 100644
index 0000000000..c1475ce161
--- /dev/null
+++ b/_overviews/scala3-book/domain-modeling-tools.md
@@ -0,0 +1,1359 @@
+---
+title: Tools
+type: section
+description: This chapter provides an introduction to the available domain modeling tools in Scala 3, including classes, traits, enums, and more.
+languages: [ru, zh-cn]
+num: 21
+previous-page: domain-modeling-intro
+next-page: domain-modeling-oop
+---
+
+
+Scala provides many different constructs so we can model the world around us:
+
+- Classes
+- Objects
+- Companion objects
+- Traits
+- Abstract classes
+- Enums
+Scala 3 only
+- Case classes
+- Case objects
+
+This section briefly introduces each of these language features.
+
+## Classes
+
+As with other languages, a _class_ in Scala is a template for the creation of object instances.
+Here are some examples of classes:
+
+{% tabs class_1 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+class Person(var name: String, var vocation: String)
+class Book(var title: String, var author: String, var year: Int)
+class Movie(var name: String, var director: String, var year: Int)
+```
+
+{% endtab %}
+{% endtabs %}
+
+These examples show that Scala has a very lightweight way to declare classes.
+
+All the parameters of our example classes are defined as `var` fields, which means they are mutable: you can read them, and also modify them.
+If you want them to be immutable---read only---create them as `val` fields instead, or use a case class.
+
+Prior to Scala 3, you used the `new` keyword to create a new instance of a class:
+
+{% tabs class_2 %}
+{% tab 'Scala 2 Only' %}
+
+```scala
+val p = new Person("Robert Allen Zimmerman", "Harmonica Player")
+// ---
+```
+
+{% endtab %}
+{% endtabs %}
+
+However, with [universal apply methods][creator] this isn’t required in Scala 3:
+Scala 3 only
+
+{% tabs class_3 %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+val p = Person("Robert Allen Zimmerman", "Harmonica Player")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Once you have an instance of a class such as `p`, you can access its fields, which in this example are all constructor parameters:
+
+{% tabs class_4 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+p.name // "Robert Allen Zimmerman"
+p.vocation // "Harmonica Player"
+```
+
+{% endtab %}
+{% endtabs %}
+
+As mentioned, all of these parameters were created as `var` fields, so you can also mutate them:
+
+{% tabs class_5 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+p.name = "Bob Dylan"
+p.vocation = "Musician"
+```
+
+{% endtab %}
+{% endtabs %}
+
+### Fields and methods
+
+Classes can also have methods and additional fields that are not part of constructors.
+They are defined in the body of the class.
+The body is initialized as part of the default constructor:
+
+{% tabs method class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+class Person(var firstName: String, var lastName: String) {
+
+ println("initialization begins")
+ val fullName = firstName + " " + lastName
+
+ // a class method
+ def printFullName: Unit =
+ // access the `fullName` field, which is created above
+ println(fullName)
+
+ printFullName
+ println("initialization ends")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+class Person(var firstName: String, var lastName: String):
+
+ println("initialization begins")
+ val fullName = firstName + " " + lastName
+
+ // a class method
+ def printFullName: Unit =
+ // access the `fullName` field, which is created above
+ println(fullName)
+
+ printFullName
+ println("initialization ends")
+```
+
+{% endtab %}
+{% endtabs %}
+
+The following REPL session shows how to create a new `Person` instance with this class:
+
+{% tabs demo-person class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+````scala
+scala> val john = new Person("John", "Doe")
+initialization begins
+John Doe
+initialization ends
+val john: Person = Person@55d8f6bb
+
+scala> john.printFullName
+John Doe
+````
+{% endtab %}
+{% tab 'Scala 3' %}
+````scala
+scala> val john = Person("John", "Doe")
+initialization begins
+John Doe
+initialization ends
+val john: Person = Person@55d8f6bb
+
+scala> john.printFullName
+John Doe
+````
+{% endtab %}
+{% endtabs %}
+
+Classes can also extend traits and abstract classes, which we cover in dedicated sections below.
+
+### Default parameter values
+
+As a quick look at a few other features, class constructor parameters can also have default values:
+
+{% tabs default-values_1 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+class Socket(val timeout: Int = 5_000, val linger: Int = 5_000) {
+ override def toString = s"timeout: $timeout, linger: $linger"
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+class Socket(val timeout: Int = 5_000, val linger: Int = 5_000):
+ override def toString = s"timeout: $timeout, linger: $linger"
+```
+
+{% endtab %}
+{% endtabs %}
+
+A great thing about this feature is that it lets consumers of your code create classes in a variety of different ways, as though the class had alternate constructors:
+
+{% tabs default-values_2 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+val s = new Socket() // timeout: 5000, linger: 5000
+val s = new Socket(2_500) // timeout: 2500, linger: 5000
+val s = new Socket(10_000, 10_000) // timeout: 10000, linger: 10000
+val s = new Socket(timeout = 10_000) // timeout: 10000, linger: 5000
+val s = new Socket(linger = 10_000) // timeout: 5000, linger: 10000
+```
+
+{% endtab %}
+{% tab 'Scala 3' %}
+
+```scala
+val s = Socket() // timeout: 5000, linger: 5000
+val s = Socket(2_500) // timeout: 2500, linger: 5000
+val s = Socket(10_000, 10_000) // timeout: 10000, linger: 10000
+val s = Socket(timeout = 10_000) // timeout: 10000, linger: 5000
+val s = Socket(linger = 10_000) // timeout: 5000, linger: 10000
+```
+
+{% endtab %}
+{% endtabs %}
+
+When creating a new instance of a class, you can also use named parameters.
+This is particularly helpful when many of the parameters have the same type, as shown in this comparison:
+
+{% tabs default-values_3 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+// option 1
+val s = new Socket(10_000, 10_000)
+
+// option 2
+val s = new Socket(
+ timeout = 10_000,
+ linger = 10_000
+)
+```
+
+{% endtab %}
+{% tab 'Scala 3' %}
+
+```scala
+// option 1
+val s = Socket(10_000, 10_000)
+
+// option 2
+val s = Socket(
+ timeout = 10_000,
+ linger = 10_000
+)
+```
+
+{% endtab %}
+{% endtabs %}
+
+### Auxiliary constructors
+
+You can define a class to have multiple constructors so consumers of your class can build it in different ways.
+For example, let’s assume that you need to write some code to model students in a college admission system.
+While analyzing the requirements you’ve seen that you need to be able to construct a `Student` instance in three ways:
+
+- With a name and government ID, for when they first start the admissions process
+- With a name, government ID, and an additional application date, for when they submit their application
+- With a name, government ID, and their student ID, for after they’ve been admitted
+
+One way to handle this situation in an OOP style is with this code:
+
+{% tabs structor_1 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+import java.time._
+
+// [1] the primary constructor
+class Student(
+ var name: String,
+ var govtId: String
+) {
+ private var _applicationDate: Option[LocalDate] = None
+ private var _studentId: Int = 0
+
+ // [2] a constructor for when the student has completed
+ // their application
+ def this(
+ name: String,
+ govtId: String,
+ applicationDate: LocalDate
+ ) = {
+ this(name, govtId)
+ _applicationDate = Some(applicationDate)
+ }
+
+ // [3] a constructor for when the student is approved
+ // and now has a student id
+ def this(
+ name: String,
+ govtId: String,
+ studentId: Int
+ ) = {
+ this(name, govtId)
+ _studentId = studentId
+ }
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+import java.time.*
+
+// [1] the primary constructor
+class Student(
+ var name: String,
+ var govtId: String
+):
+ private var _applicationDate: Option[LocalDate] = None
+ private var _studentId: Int = 0
+
+ // [2] a constructor for when the student has completed
+ // their application
+ def this(
+ name: String,
+ govtId: String,
+ applicationDate: LocalDate
+ ) =
+ this(name, govtId)
+ _applicationDate = Some(applicationDate)
+
+ // [3] a constructor for when the student is approved
+ // and now has a student id
+ def this(
+ name: String,
+ govtId: String,
+ studentId: Int
+ ) =
+ this(name, govtId)
+ _studentId = studentId
+```
+
+{% endtab %}
+{% endtabs %}
+
+{% comment %}
+// for testing that code
+override def toString = s"""
+|Name: $name
+|GovtId: $govtId
+|StudentId: $_studentId
+|Date Applied: $_applicationDate
+""".trim.stripMargin
+{% endcomment %}
+
+The class has three constructors, given by the numbered comments in the code:
+
+1. The primary constructor, given by the `name` and `govtId` in the class definition
+2. An auxiliary constructor with the parameters `name`, `govtId`, and `applicationDate`
+3. Another auxiliary constructor with the parameters `name`, `govtId`, and `studentId`
+
+Those constructors can be called like this:
+
+{% tabs structor_2 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+val s1 = new Student("Mary", "123")
+val s2 = new Student("Mary", "123", LocalDate.now())
+val s3 = new Student("Mary", "123", 456)
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+val s1 = Student("Mary", "123")
+val s2 = Student("Mary", "123", LocalDate.now())
+val s3 = Student("Mary", "123", 456)
+```
+
+{% endtab %}
+{% endtabs %}
+
+While this technique can be used, bear in mind that constructor parameters can also have default values, which make it seem that a class has multiple constructors.
+This is shown in the previous `Socket` example.
+
+## Objects
+
+An object is a class that has exactly one instance.
+It’s initialized lazily when its members are referenced, similar to a `lazy val`.
+Objects in Scala allow grouping methods and fields under one namespace, similar to how you use `static` members on a class in Java, Javascript (ES6), or `@staticmethod` in Python.
+
+Declaring an `object` is similar to declaring a `class`.
+Here’s an example of a “string utilities” object that contains a set of methods for working with strings:
+
+{% tabs object_1 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+object StringUtils {
+ def truncate(s: String, length: Int): String = s.take(length)
+ def containsWhitespace(s: String): Boolean = s.matches(".*\\s.*")
+ def isNullOrEmpty(s: String): Boolean = s == null || s.trim.isEmpty
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+object StringUtils:
+ def truncate(s: String, length: Int): String = s.take(length)
+ def containsWhitespace(s: String): Boolean = s.matches(".*\\s.*")
+ def isNullOrEmpty(s: String): Boolean = s == null || s.trim.isEmpty
+```
+
+{% endtab %}
+{% endtabs %}
+
+We can use the object as follows:
+
+{% tabs object_2 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+StringUtils.truncate("Chuck Bartowski", 5) // "Chuck"
+```
+
+{% endtab %}
+{% endtabs %}
+
+Importing in Scala is very flexible, and allows us to import _all_ members of an object:
+
+{% tabs object_3 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+import StringUtils._
+truncate("Chuck Bartowski", 5) // "Chuck"
+containsWhitespace("Sarah Walker") // true
+isNullOrEmpty("John Casey") // false
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+import StringUtils.*
+truncate("Chuck Bartowski", 5) // "Chuck"
+containsWhitespace("Sarah Walker") // true
+isNullOrEmpty("John Casey") // false
+```
+
+{% endtab %}
+{% endtabs %}
+
+or just _some_ members:
+
+{% tabs object_4 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+import StringUtils.{truncate, containsWhitespace}
+truncate("Charles Carmichael", 7) // "Charles"
+containsWhitespace("Captain Awesome") // true
+isNullOrEmpty("Morgan Grimes") // Not found: isNullOrEmpty (error)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Objects can also contain fields, which are also accessed like static members:
+
+{% tabs object_5 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+object MathConstants {
+ val PI = 3.14159
+ val E = 2.71828
+}
+
+println(MathConstants.PI) // 3.14159
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+object MathConstants:
+ val PI = 3.14159
+ val E = 2.71828
+
+println(MathConstants.PI) // 3.14159
+```
+
+{% endtab %}
+{% endtabs %}
+
+## Companion objects
+
+An `object` that has the same name as a class, and is declared in the same file as the class, is called a _"companion object_."
+Similarly, the corresponding class is called the object’s companion class.
+A companion class or object can access the private members of its companion.
+
+Companion objects are used for methods and values that are not specific to instances of the companion class.
+For instance, in the following example the class `Circle` has a member named `area` which is specific to each instance, and its companion object has a method named `calculateArea` that’s (a) not specific to an instance, and (b) is available to every instance:
+
+{% tabs companion class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+import scala.math._
+
+class Circle(val radius: Double) {
+ def area: Double = Circle.calculateArea(radius)
+}
+
+object Circle {
+ private def calculateArea(radius: Double): Double = Pi * pow(radius, 2.0)
+}
+
+val circle1 = new Circle(5.0)
+circle1.area
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+import scala.math.*
+
+class Circle(val radius: Double):
+ def area: Double = Circle.calculateArea(radius)
+
+object Circle:
+ private def calculateArea(radius: Double): Double = Pi * pow(radius, 2.0)
+
+val circle1 = Circle(5.0)
+circle1.area
+```
+
+{% endtab %}
+{% endtabs %}
+
+In this example the `area` method that’s available to each instance uses the `calculateArea` method that’s defined in the companion object.
+Once again, `calculateArea` is similar to a static method in Java.
+Also, because `calculateArea` is private, it can’t be accessed by other code, but as shown, it can be seen by instances of the `Circle` class.
+
+### Other uses
+
+Companion objects can be used for several purposes:
+
+- As shown, they can be used to group “static” methods under a namespace
+ - These methods can be public or private
+ - If `calculateArea` was public, it would be accessed as `Circle.calculateArea`
+- They can contain `apply` methods, which---thanks to some syntactic sugar---work as factory methods to construct new instances
+- They can contain `unapply` methods, which are used to deconstruct objects, such as with pattern matching
+
+Here’s a quick look at how `apply` methods can be used as factory methods to create new objects:
+
+{% tabs companion-use class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+class Person {
+ var name = ""
+ var age = 0
+ override def toString = s"$name is $age years old"
+}
+
+object Person {
+ // a one-arg factory method
+ def apply(name: String): Person = {
+ var p = new Person
+ p.name = name
+ p
+ }
+
+ // a two-arg factory method
+ def apply(name: String, age: Int): Person = {
+ var p = new Person
+ p.name = name
+ p.age = age
+ p
+ }
+}
+
+val joe = Person("Joe")
+val fred = Person("Fred", 29)
+
+//val joe: Person = Joe is 0 years old
+//val fred: Person = Fred is 29 years old
+```
+
+The `unapply` method isn’t covered here, but it’s covered in the [Language Specification](https://scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#extractor-patterns).
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+class Person:
+ var name = ""
+ var age = 0
+ override def toString = s"$name is $age years old"
+
+object Person:
+
+ // a one-arg factory method
+ def apply(name: String): Person =
+ var p = new Person
+ p.name = name
+ p
+
+ // a two-arg factory method
+ def apply(name: String, age: Int): Person =
+ var p = new Person
+ p.name = name
+ p.age = age
+ p
+
+end Person
+
+val joe = Person("Joe")
+val fred = Person("Fred", 29)
+
+//val joe: Person = Joe is 0 years old
+//val fred: Person = Fred is 29 years old
+```
+
+The `unapply` method isn’t covered here, but it’s covered in the [Reference documentation]({{ site.scala3ref }}/changed-features/pattern-matching.html).
+
+{% endtab %}
+{% endtabs %}
+
+## Traits
+
+If you’re familiar with Java, a Scala trait is similar to an interface in Java 8+. Traits can contain:
+
+- Abstract methods and fields
+- Concrete methods and fields
+
+In a basic use, a trait can be used as an interface, defining only abstract members that will be implemented by other classes:
+
+{% tabs traits_1 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+trait Employee {
+ def id: Int
+ def firstName: String
+ def lastName: String
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+trait Employee:
+ def id: Int
+ def firstName: String
+ def lastName: String
+```
+
+{% endtab %}
+{% endtabs %}
+
+However, traits can also contain concrete members.
+For instance, the following trait defines two abstract members---`numLegs` and `walk()`---and also has a concrete implementation of a `stop()` method:
+
+{% tabs traits_2 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+trait HasLegs {
+ def numLegs: Int
+ def walk(): Unit
+ def stop() = println("Stopped walking")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+trait HasLegs:
+ def numLegs: Int
+ def walk(): Unit
+ def stop() = println("Stopped walking")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Here’s another trait with an abstract member and two concrete implementations:
+
+{% tabs traits_3 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+trait HasTail {
+ def tailColor: String
+ def wagTail() = println("Tail is wagging")
+ def stopTail() = println("Tail is stopped")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+trait HasTail:
+ def tailColor: String
+ def wagTail() = println("Tail is wagging")
+ def stopTail() = println("Tail is stopped")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Notice how each trait only handles very specific attributes and behaviors: `HasLegs` deals only with legs, and `HasTail` deals only with tail-related functionality.
+Traits let you build small modules like this.
+
+Later in your code, classes can mix multiple traits to build larger components:
+
+{% tabs traits_4 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+class IrishSetter(name: String) extends HasLegs with HasTail {
+ val numLegs = 4
+ val tailColor = "Red"
+ def walk() = println("I’m walking")
+ override def toString = s"$name is a Dog"
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+class IrishSetter(name: String) extends HasLegs, HasTail:
+ val numLegs = 4
+ val tailColor = "Red"
+ def walk() = println("I’m walking")
+ override def toString = s"$name is a Dog"
+```
+
+{% endtab %}
+{% endtabs %}
+
+Notice that the `IrishSetter` class implements the abstract members that are defined in `HasLegs` and `HasTail`.
+Now you can create new `IrishSetter` instances:
+
+{% tabs traits_5 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+val d = new IrishSetter("Big Red") // "Big Red is a Dog"
+```
+
+{% endtab %}
+{% tab 'Scala 3' %}
+
+```scala
+val d = IrishSetter("Big Red") // "Big Red is a Dog"
+```
+
+{% endtab %}
+{% endtabs %}
+
+This is just a taste of what you can accomplish with traits.
+For more details, see the remainder of these modeling lessons.
+
+## Abstract classes
+
+{% comment %}
+LATER: If anyone wants to update this section, our comments about abstract classes and traits are on Slack. The biggest points seem to be:
+
+- The `super` of a trait is dynamic
+- At the use site, people can mix in traits but not classes
+- It remains easier to extend a class than a trait from Java, if the trait has at least a field
+- Similarly, in Scala.js, a class can be imported from or exported to JavaScript. A trait cannot
+- There are also some point that unrelated classes can’t be mixed together, and this can be a modeling advantage
+{% endcomment %}
+
+When you want to write a class, but you know it will have abstract members, you can either create a trait or an abstract class.
+In most situations you’ll use traits, but historically there have been two situations where it’s better to use an abstract class than a trait:
+
+- You want to create a base class that takes constructor arguments
+- The code will be called from Java code
+
+### A base class that takes constructor arguments
+
+Prior to Scala 3, when a base class needed to take constructor arguments, you’d declare it as an `abstract class`:
+
+{% tabs abstract_1 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+abstract class Pet(name: String) {
+ def greeting: String
+ def age: Int
+ override def toString = s"My name is $name, I say $greeting, and I’m $age"
+}
+
+class Dog(name: String, var age: Int) extends Pet(name) {
+ val greeting = "Woof"
+}
+
+val d = new Dog("Fido", 1)
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+abstract class Pet(name: String):
+ def greeting: String
+ def age: Int
+ override def toString = s"My name is $name, I say $greeting, and I’m $age"
+
+class Dog(name: String, var age: Int) extends Pet(name):
+ val greeting = "Woof"
+
+val d = Dog("Fido", 1)
+```
+
+{% endtab %}
+{% endtabs %}
+
+
Trait Parameters Scala 3 only
+
+However, with Scala 3, traits can now have [parameters][trait-params], so you can now use traits in the same situation:
+
+{% tabs abstract_2 %}
+
+{% tab 'Scala 3 Only' %}
+
+```scala
+trait Pet(name: String):
+ def greeting: String
+ def age: Int
+ override def toString = s"My name is $name, I say $greeting, and I’m $age"
+
+class Dog(name: String, var age: Int) extends Pet(name):
+ val greeting = "Woof"
+
+val d = Dog("Fido", 1)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Traits are more flexible to compose---you can mix in multiple traits, but only extend one class---and should be preferred to classes and abstract classes most of the time.
+The rule of thumb is to use classes whenever you want to create instances of a particular type, and traits when you want to decompose and reuse behaviour.
+
+
Enums Scala 3 only
+
+An enumeration can be used to define a type that consists of a finite set of named values (in the section on [FP modeling][fp-modeling], we will see that enums are much more flexible than this).
+Basic enumerations are used to define sets of constants, like the months in a year, the days in a week, directions like north/south/east/west, and more.
+
+As an example, these enumerations define sets of attributes related to pizzas:
+
+{% tabs enum_1 %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+enum CrustSize:
+ case Small, Medium, Large
+
+enum CrustType:
+ case Thin, Thick, Regular
+
+enum Topping:
+ case Cheese, Pepperoni, BlackOlives, GreenOlives, Onions
+```
+
+{% endtab %}
+{% endtabs %}
+
+To use them in other code, first import them, and then use them:
+
+{% tabs enum_2 %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+import CrustSize.*
+val currentCrustSize = Small
+```
+
+{% endtab %}
+{% endtabs %}
+
+Enum values can be compared using equals (`==`), and also matched on:
+
+{% tabs enum_3 %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+// if/then
+if currentCrustSize == Large then
+ println("You get a prize!")
+
+// match
+currentCrustSize match
+ case Small => println("small")
+ case Medium => println("medium")
+ case Large => println("large")
+```
+
+{% endtab %}
+{% endtabs %}
+
+### Additional Enum Features
+
+Enumerations can also be parameterized:
+
+{% tabs enum_4 %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+enum Color(val rgb: Int):
+ case Red extends Color(0xFF0000)
+ case Green extends Color(0x00FF00)
+ case Blue extends Color(0x0000FF)
+```
+
+{% endtab %}
+{% endtabs %}
+
+And they can also have members (like fields and methods):
+
+{% tabs enum_5 %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+enum Planet(mass: Double, radius: Double):
+ private final val G = 6.67300E-11
+ def surfaceGravity = G * mass / (radius * radius)
+ def surfaceWeight(otherMass: Double) =
+ otherMass * surfaceGravity
+
+ case Mercury extends Planet(3.303e+23, 2.4397e6)
+ case Earth extends Planet(5.976e+24, 6.37814e6)
+ // more planets here ...
+```
+
+{% endtab %}
+{% endtabs %}
+
+### Compatibility with Java Enums
+
+If you want to use Scala-defined enums as Java enums, you can do so by extending the class `java.lang.Enum` (which is imported by default) as follows:
+
+{% tabs enum_6 %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+enum Color extends Enum[Color] { case Red, Green, Blue }
+```
+
+{% endtab %}
+{% endtabs %}
+
+The type parameter comes from the Java `enum` definition, and should be the same as the type of the enum.
+There’s no need to provide constructor arguments (as defined in the Java API docs) to `java.lang.Enum` when extending it---the compiler generates them automatically.
+
+After defining `Color` like that, you can use it like you would a Java enum:
+
+````
+scala> Color.Red.compareTo(Color.Green)
+val res0: Int = -1
+````
+
+The section on [algebraic datatypes][adts] and the [reference documentation][ref-enums] cover enumerations in more detail.
+
+## Case classes
+
+Case classes are used to model immutable data structures.
+Take the following example:
+
+{% tabs case-classes_1 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala:
+case class Person(name: String, relation: String)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Since we declare `Person` as a case class, the fields `name` and `relation` are public and immutable by default.
+We can create instances of case classes as follows:
+
+{% tabs case-classes_2 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val christina = Person("Christina", "niece")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Note that the fields can’t be mutated:
+
+{% tabs case-classes_3 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+christina.name = "Fred" // error: reassignment to val
+```
+
+{% endtab %}
+{% endtabs %}
+
+Since the fields of a case class are assumed to be immutable, the Scala compiler can generate many helpful methods for you:
+
+- An `unapply` method is generated, which allows you to perform pattern matching on a case class (that is, `case Person(n, r) => ...`).
+- A `copy` method is generated in the class, which is very useful to create modified copies of an instance.
+- `equals` and `hashCode` methods using structural equality are generated, allowing you to use instances of case classes in `Map`s.
+- A default `toString` method is generated, which is helpful for debugging.
+
+These additional features are demonstrated in the below example:
+
+{% tabs case-classes_4 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+// Case classes can be used as patterns
+christina match {
+ case Person(n, r) => println("name is " + n)
+}
+
+// `equals` and `hashCode` methods generated for you
+val hannah = Person("Hannah", "niece")
+christina == hannah // false
+
+// `toString` method
+println(christina) // Person(Christina,niece)
+
+// built-in `copy` method
+case class BaseballTeam(name: String, lastWorldSeriesWin: Int)
+val cubs1908 = BaseballTeam("Chicago Cubs", 1908)
+val cubs2016 = cubs1908.copy(lastWorldSeriesWin = 2016)
+// result:
+// cubs2016: BaseballTeam = BaseballTeam(Chicago Cubs,2016)
+
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+// Case classes can be used as patterns
+christina match
+ case Person(n, r) => println("name is " + n)
+
+// `equals` and `hashCode` methods generated for you
+val hannah = Person("Hannah", "niece")
+christina == hannah // false
+
+// `toString` method
+println(christina) // Person(Christina,niece)
+
+// built-in `copy` method
+case class BaseballTeam(name: String, lastWorldSeriesWin: Int)
+val cubs1908 = BaseballTeam("Chicago Cubs", 1908)
+val cubs2016 = cubs1908.copy(lastWorldSeriesWin = 2016)
+// result:
+// cubs2016: BaseballTeam = BaseballTeam(Chicago Cubs,2016)
+```
+
+{% endtab %}
+{% endtabs %}
+
+### Support for functional programming
+
+As mentioned, case classes support functional programming (FP):
+
+- In FP, you try to avoid mutating data structures.
+ It thus makes sense that constructor fields default to `val`.
+ Since instances of case classes can’t be changed, they can easily be shared without fearing mutation or race conditions.
+- Instead of mutating an instance, you can use the `copy` method as a template to create a new (potentially changed) instance.
+ This process can be referred to as “update as you copy.”
+- Having an `unapply` method auto-generated for you also lets case classes be used in advanced ways with pattern matching.
+
+{% comment %}
+NOTE: We can use this following text, if desired. If it’s used, it needs to be updated a little bit.
+
+### An `unapply` method
+
+A great thing about a case class is that it automatically generates an `unapply` method for your class, so you don’t have to write one.
+
+To demonstrate this, imagine that you have this trait:
+
+{% tabs case-classes_5 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+trait Person {
+ def name: String
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+trait Person:
+ def name: String
+```
+
+{% endtab %}
+{% endtabs %}
+
+Then, create these case classes to extend that trait:
+
+{% tabs case-classes_6 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+case class Student(name: String, year: Int) extends Person
+case class Teacher(name: String, specialty: String) extends Person
+```
+
+{% endtab %}
+{% endtabs %}
+
+Because those are defined as case classes---and they have built-in `unapply` methods---you can write a match expression like this:
+
+{% tabs case-classes_7 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+def getPrintableString(p: Person): String = p match {
+ case Student(name, year) =>
+ s"$name is a student in Year $year."
+ case Teacher(name, whatTheyTeach) =>
+ s"$name teaches $whatTheyTeach."
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+def getPrintableString(p: Person): String = p match
+ case Student(name, year) =>
+ s"$name is a student in Year $year."
+ case Teacher(name, whatTheyTeach) =>
+ s"$name teaches $whatTheyTeach."
+```
+
+{% endtab %}
+{% endtabs %}
+
+Notice these two patterns in the `case` statements:
+
+{% tabs case-classes_8 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+case Student(name, year) =>
+case Teacher(name, whatTheyTeach) =>
+```
+
+{% endtab %}
+{% endtabs %}
+
+Those patterns work because `Student` and `Teacher` are defined as case classes that have `unapply` methods whose type signature conforms to a certain standard.
+Technically, the specific type of pattern matching shown in these examples is known as a _constructor pattern_.
+
+> The Scala standard is that an `unapply` method returns the case class constructor fields in a tuple that’s wrapped in an `Option`.
+> The “tuple” part of the solution was shown in the previous lesson.
+
+To show how that code works, create an instance of `Student` and `Teacher`:
+
+{% tabs case-classes_9 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+val s = new Student("Al", 1)
+val t = new Teacher("Bob Donnan", "Mathematics")
+```
+
+{% endtab %}
+{% tab 'Scala 3' %}
+
+```scala
+val s = Student("Al", 1)
+val t = Teacher("Bob Donnan", "Mathematics")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Next, this is what the output looks like in the REPL when you call `getPrintableString` with those two instances:
+
+{% tabs case-classes_10 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+scala> getPrintableString(s)
+res0: String = Al is a student in Year 1.
+
+scala> getPrintableString(t)
+res1: String = Bob Donnan teaches Mathematics.
+```
+
+{% endtab %}
+{% endtabs %}
+
+> All of this content on `unapply` methods and extractors is a little advanced for an introductory book like this, but because case classes are an important FP topic, it seems better to cover them, rather than skipping over them.
+
+#### Add pattern matching to any type with unapply
+
+A great Scala feature is that you can add pattern matching to any type by writing your own `unapply` method.
+As an example, this class defines an `unapply` method in its companion object:
+
+{% tabs case-classes_11 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+class Person(var name: String, var age: Int)
+object Person {
+ def unapply(p: Person): Tuple2[String, Int] = (p.name, p.age)
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+class Person(var name: String, var age: Int)
+object Person:
+ def unapply(p: Person): Tuple2[String, Int] = (p.name, p.age)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Because it defines an `unapply` method, and because that method returns a tuple, you can now use `Person` with a `match` expression:
+
+{% tabs case-classes_12 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+val p = new Person("Astrid", 33)
+
+p match {
+ case Person(n,a) => println(s"name: $n, age: $a")
+ case null => println("No match")
+}
+
+// that code prints: "name: Astrid, age: 33"
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+val p = Person("Astrid", 33)
+
+p match
+ case Person(n,a) => println(s"name: $n, age: $a")
+ case null => println("No match")
+
+// that code prints: "name: Astrid, age: 33"
+```
+
+{% endtab %}
+{% endtabs %}
+
+{% endcomment %}
+
+## Case objects
+
+Case objects are to objects what case classes are to classes: they provide a number of automatically-generated methods to make them more powerful.
+They’re particularly useful whenever you need a singleton object that needs a little extra functionality, such as being used with pattern matching in `match` expressions.
+
+Case objects are useful when you need to pass immutable messages around.
+For instance, if you’re working on a music player project, you’ll create a set of commands or messages like this:
+
+{% tabs case-objects_1 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+sealed trait Message
+case class PlaySong(name: String) extends Message
+case class IncreaseVolume(amount: Int) extends Message
+case class DecreaseVolume(amount: Int) extends Message
+case object StopPlaying extends Message
+```
+
+{% endtab %}
+{% endtabs %}
+
+Then in other parts of your code, you can write methods like this, which use pattern matching to handle the incoming message (assuming the methods `playSong`, `changeVolume`, and `stopPlayingSong` are defined somewhere else):
+
+{% tabs case-objects_2 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+def handleMessages(message: Message): Unit = message match {
+ case PlaySong(name) => playSong(name)
+ case IncreaseVolume(amount) => changeVolume(amount)
+ case DecreaseVolume(amount) => changeVolume(-amount)
+ case StopPlaying => stopPlayingSong()
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+def handleMessages(message: Message): Unit = message match
+ case PlaySong(name) => playSong(name)
+ case IncreaseVolume(amount) => changeVolume(amount)
+ case DecreaseVolume(amount) => changeVolume(-amount)
+ case StopPlaying => stopPlayingSong()
+```
+
+{% endtab %}
+{% endtabs %}
+
+[ref-enums]: {{ site.scala3ref }}/enums/enums.html
+[adts]: {% link _overviews/scala3-book/types-adts-gadts.md %}
+[fp-modeling]: {% link _overviews/scala3-book/domain-modeling-fp.md %}
+[creator]: {{ site.scala3ref }}/other-new-features/creator-applications.html
+[unapply]: {{ site.scala3ref }}/changed-features/pattern-matching.html
+[trait-params]: {{ site.scala3ref }}/other-new-features/trait-parameters.html
diff --git a/_overviews/scala3-book/first-look-at-types.md b/_overviews/scala3-book/first-look-at-types.md
new file mode 100644
index 0000000000..5cdb32e57f
--- /dev/null
+++ b/_overviews/scala3-book/first-look-at-types.md
@@ -0,0 +1,303 @@
+---
+title: A First Look at Types
+type: chapter
+description: This page provides a brief introduction to Scala's built-in data types, including Int, Double, String, Long, Any, AnyRef, Nothing, and Null.
+languages: [ru, zh-cn]
+num: 17
+previous-page: taste-summary
+next-page: string-interpolation
+---
+
+
+## All values have a type
+
+In Scala, all values have a type, including numerical values and functions.
+The diagram below illustrates a subset of the type hierarchy.
+
+
+
+## Scala type hierarchy
+
+`Any` is the supertype of all types, also called the **top type**.
+It defines certain universal methods such as `equals`, `hashCode`, and `toString`.
+
+The top-type `Any` has a subtype [`Matchable`][matchable], which is used to mark all types that we can perform pattern matching on. It is important to guarantee a property call _"parametricity"_.
+We will not go into details here, but in summary, it means that we cannot pattern match on values of type `Any`, but only on values that are a subtype of `Matchable`.
+The [reference documentation][matchable] contains more information about `Matchable`.
+
+`Matchable` has two important subtypes: `AnyVal` and `AnyRef`.
+
+*`AnyVal`* represents value types.
+There are a couple of predefined value types, and they are non-nullable: `Double`, `Float`, `Long`, `Int`, `Short`, `Byte`, `Char`, `Unit`, and `Boolean`.
+`Unit` is a value type which carries no meaningful information.
+There is exactly one instance of `Unit` which we can refer to as: `()`.
+
+*`AnyRef`* represents reference types.
+All non-value types are defined as reference types.
+Every user-defined type in Scala is a subtype of `AnyRef`.
+If Scala is used in the context of a Java runtime environment, `AnyRef` corresponds to `java.lang.Object`.
+
+In statement-based languages, `void` is used for methods that don’t return anything.
+If you write methods in Scala that have no return value, such as the following method, `Unit` is used for the same purpose:
+
+{% tabs unit %}
+{% tab 'Scala 2 and 3' for=unit %}
+```scala
+def printIt(a: Any): Unit = println(a)
+```
+{% endtab %}
+{% endtabs %}
+
+Here’s an example that demonstrates that strings, integers, characters, boolean values, and functions are all instances of `Any` and can be treated just like every other object:
+
+{% tabs any %}
+{% tab 'Scala 2 and 3' for=any %}
+```scala
+val list: List[Any] = List(
+ "a string",
+ 732, // an integer
+ 'c', // a character
+ '\'', // a character with a backslash escape
+ true, // a boolean value
+ () => "an anonymous function returning a string"
+)
+
+list.foreach(element => println(element))
+```
+{% endtab %}
+{% endtabs %}
+
+The code defines a value `list` of type `List[Any]`.
+The list is initialized with elements of various types, but each is an instance of `scala.Any`, so we can add them to the list.
+
+Here’s the output of the program:
+
+```
+a string
+732
+c
+'
+true
+
+```
+
+## Scala’s “value types”
+
+As shown above, Scala’s numeric types extend `AnyVal`, and they’re all full-blown objects.
+These examples show how to declare variables of these numeric types:
+
+{% tabs anyval %}
+{% tab 'Scala 2 and 3' for=anyval %}
+```scala
+val b: Byte = 1
+val i: Int = 1
+val l: Long = 1
+val s: Short = 1
+val d: Double = 2.0
+val f: Float = 3.0
+```
+{% endtab %}
+{% endtabs %}
+
+In the first four examples, if you don’t explicitly specify a type, the number `1` will default to an `Int`, so if you want one of the other data types---`Byte`, `Long`, or `Short`---you need to explicitly declare those types, as shown.
+Numbers with a decimal (like 2.0) will default to a `Double`, so if you want a `Float` you need to declare a `Float`, as shown in the last example.
+
+Because `Int` and `Double` are the default numeric types, you typically create them without explicitly declaring the data type:
+
+{% tabs anynum %}
+{% tab 'Scala 2 and 3' for=anynum %}
+```scala
+val i = 123 // defaults to Int
+val x = 1.0 // defaults to Double
+```
+{% endtab %}
+{% endtabs %}
+
+In your code you can also append the characters `L`, `D`, and `F` (and their lowercase equivalents) to numbers to specify that they are `Long`, `Double`, or `Float` values:
+
+{% tabs type-post %}
+{% tab 'Scala 2 and 3' for=type-post %}
+```scala
+val x = 1_000L // val x: Long = 1000
+val y = 2.2D // val y: Double = 2.2
+val z = -3.3F // val z: Float = -3.3
+```
+
+You may also use hexadecimal notation to format integer numbers (normally `Int`, but which also support the
+`L` suffix to specify that they are `Long`):
+
+```scala
+val a = 0xACE // val a: Int = 2766
+val b = 0xfd_3aL // val b: Long = 64826
+```
+
+Scala supports many different ways to format the same floating point number, e.g.
+```scala
+val q = .25 // val q: Double = 0.25
+val r = 2.5e-1 // val r: Double = 0.25
+val s = .0025e2F // val s: Float = 0.25
+```
+{% endtab %}
+{% endtabs %}
+
+Scala also has `String` and `Char` types, which you can generally declare with the implicit form:
+
+{% tabs type-string %}
+{% tab 'Scala 2 and 3' for=type-string %}
+```scala
+val s = "Bill"
+val c = 'a'
+```
+{% endtab %}
+{% endtabs %}
+
+As shown, enclose strings in double-quotes---or triple-quotes for multiline strings---and enclose a character in single-quotes.
+
+Those data types and their ranges are:
+
+| Data Type | Possible Values |
+|-----------|--------------------------------------------------------------------------------------------------|
+| Boolean | `true` or `false` |
+| Byte | 8-bit signed two’s complement integer (-2^7 to 2^7-1, inclusive) -128 to 127 |
+| Short | 16-bit signed two’s complement integer (-2^15 to 2^15-1, inclusive) -32,768 to 32,767 |
+| Int | 32-bit two’s complement integer (-2^31 to 2^31-1, inclusive) -2,147,483,648 to 2,147,483,647 |
+| Long | 64-bit two’s complement integer (-2^63 to 2^63-1, inclusive) (-2^63 to 2^63-1, inclusive) |
+| Float | 32-bit IEEE 754 single-precision float 1.40129846432481707e-45 to 3.40282346638528860e+38 |
+| Double | 64-bit IEEE 754 double-precision float 4.94065645841246544e-324 to 1.79769313486231570e+308 |
+| Char | 16-bit unsigned Unicode character (0 to 2^16-1, inclusive) 0 to 65,535 |
+| String | a sequence of `Char` |
+
+## Strings
+
+Scala strings are similar to Java strings though unlike Java (at least before Java 15),
+it's easy to create multiline strings with triple quotes:
+
+{% tabs string-mlines1 %}
+{% tab 'Scala 2 and 3' for=string-mlines1 %}
+```scala
+val quote = """The essence of Scala:
+ Fusion of functional and object-oriented
+ programming in a typed setting."""
+```
+{% endtab %}
+{% endtabs %}
+
+One drawback of this basic approach is that the lines after the first line are indented, and look like this:
+
+{% tabs string-mlines2 %}
+{% tab 'Scala 2 and 3' for=string-mlines2 %}
+```scala
+"The essence of Scala:
+ Fusion of functional and object-oriented
+ programming in a typed setting."
+```
+{% endtab %}
+{% endtabs %}
+
+When spacing is important, put a `|` symbol in front of all lines after the first line, and call the `stripMargin` method after the string:
+
+{% tabs string-mlines3 %}
+{% tab 'Scala 2 and 3' for=string-mlines3 %}
+```scala
+val quote = """The essence of Scala:
+ |Fusion of functional and object-oriented
+ |programming in a typed setting.""".stripMargin
+```
+{% endtab %}
+{% endtabs %}
+
+Now all of the lines are left-justified inside the string:
+
+{% tabs string-mlines4 %}
+{% tab 'Scala 2 and 3' for=string-mlines4 %}
+```scala
+"The essence of Scala:
+Fusion of functional and object-oriented
+programming in a typed setting."
+```
+{% endtab %}
+{% endtabs %}
+
+Scala strings also support powerful string interpolation methods, which we'll talk about
+in the [next chapter][string-interpolation].
+
+## `BigInt` and `BigDecimal`
+
+When you need really large numbers, use the `BigInt` and `BigDecimal` types:
+
+{% tabs type-bigint %}
+{% tab 'Scala 2 and 3' for=type-bigint %}
+```scala
+val a = BigInt(1_234_567_890_987_654_321L)
+val b = BigDecimal(123_456.789)
+```
+{% endtab %}
+{% endtabs %}
+
+Where `Double` and `Float` are approximate decimal numbers, `BigDecimal` is used for precise arithmetic, such as when working with currency.
+
+A great thing about `BigInt` and `BigDecimal` is that they support all the operators you’re used to using with numeric types:
+
+{% tabs type-bigint2 %}
+{% tab 'Scala 2 and 3' for=type-bigint2 %}
+```scala
+val b = BigInt(1234567890) // scala.math.BigInt = 1234567890
+val c = b + b // scala.math.BigInt = 2469135780
+val d = b * b // scala.math.BigInt = 1524157875019052100
+```
+{% endtab %}
+{% endtabs %}
+
+## Type casting
+
+Value types can be cast in the following way:
+
+
+For example:
+
+{% tabs cast1 %}
+{% tab 'Scala 2 and 3' for=cast1 %}
+```scala
+val b: Byte = 127
+val i: Int = b // 127
+
+val face: Char = '☺'
+val number: Int = face // 9786
+```
+{% endtab %}
+{% endtabs %}
+
+You can only cast to a type if there is no loss of information. Otherwise, you need to be explicit about the cast:
+
+{% tabs cast2 %}
+{% tab 'Scala 2 and 3' for=cast2 %}
+```scala
+val x: Long = 987654321
+val y: Float = x.toFloat // 9.8765434E8 (note that `.toFloat` is required because the cast results in precision loss)
+val z: Long = y // Error
+```
+{% endtab %}
+{% endtabs %}
+
+You can also cast a reference type to a subtype.
+This will be covered later in the tour.
+
+## `Nothing` and `null`
+
+`Nothing` is a subtype of all types, also called the **bottom type**.
+There is no value that has the type `Nothing`.
+A common use is to signal non-termination, such as a thrown exception, program exit, or an infinite loop---i.e., it is the type of an expression which does not evaluate to a value, or a method that does not return normally.
+
+`Null` is a subtype of all reference types (i.e. any subtype of `AnyRef`).
+It has a single value identified by the keyword literal `null`.
+Currently, the usage of `null` is considered bad practice. It should be used mostly for interoperability with other JVM languages. An opt-in compiler option changes the status of `Null` to fix the caveats related to its usage. This option might become the default in a future version of Scala. You can learn more about it [here][safe-null].
+
+In the meantime, `null` should almost never be used in Scala code.
+Alternatives to `null` are discussed in the [Functional Programming chapter][fp] of this book, and the [API documentation][option-api].
+
+[reference]: {{ site.scala3ref }}/overview.html
+[matchable]: {{ site.scala3ref }}/other-new-features/matchable.html
+[fp]: {% link _overviews/scala3-book/fp-intro.md %}
+[string-interpolation]: {% link _overviews/scala3-book/string-interpolation.md %}
+[option-api]: https://scala-lang.org/api/3.x/scala/Option.html
+[safe-null]: {{ site.scala3ref }}/experimental/explicit-nulls.html
diff --git a/_overviews/scala3-book/fp-functional-error-handling.md b/_overviews/scala3-book/fp-functional-error-handling.md
new file mode 100644
index 0000000000..e22fc2b4bb
--- /dev/null
+++ b/_overviews/scala3-book/fp-functional-error-handling.md
@@ -0,0 +1,542 @@
+---
+title: Functional Error Handling
+type: section
+description: This section provides an introduction to functional error handling in Scala 3.
+languages: [ru, zh-cn]
+num: 46
+previous-page: fp-functions-are-values
+next-page: fp-summary
+---
+
+
+
+Functional programming is like writing a series of algebraic equations, and because algebra doesn’t have null values or throw exceptions, you don’t use these features in FP.
+This brings up an interesting question: In the situations where you might normally use a null value or exception in OOP code, what do you do?
+
+Scala’s solution is to use constructs like the `Option`/`Some`/`None` classes.
+This lesson provides an introduction to using these techniques.
+
+Two notes before we jump in:
+
+- The `Some` and `None` classes are subclasses of `Option`.
+- Instead of repeatedly saying “`Option`/`Some`/`None`,” the following text generally just refers to “`Option`” or “the `Option` classes.”
+
+
+
+## A first example
+
+While this first example doesn’t deal with null values, it’s a good way to introduce the `Option` classes, so we’ll start with it.
+
+Imagine that you want to write a method that makes it easy to convert strings to integer values, and you want an elegant way to handle the exception that’s thrown when your method gets a string like `"Hello"` instead of `"1"`.
+A first guess at such a method might look like this:
+
+
+{% tabs fp-java-try class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+def makeInt(s: String): Int =
+ try {
+ Integer.parseInt(s.trim)
+ } catch {
+ case e: Exception => 0
+ }
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+def makeInt(s: String): Int =
+ try
+ Integer.parseInt(s.trim)
+ catch
+ case e: Exception => 0
+```
+{% endtab %}
+
+{% endtabs %}
+
+If the conversion works, this method returns the correct `Int` value, but if it fails, the method returns `0`.
+This might be okay for some purposes, but it’s not really accurate.
+For instance, the method might have received `"0"`, but it may have also received `"foo"`, `"bar"`, or an infinite number of other strings that will throw an exception.
+This is a real problem: How do you know when the method really received a `"0"`, or when it received something else?
+The answer is that with this approach, there’s no way to know.
+
+
+
+## Using Option/Some/None
+
+A common solution to this problem in Scala is to use a trio of classes known as `Option`, `Some`, and `None`.
+The `Some` and `None` classes are subclasses of `Option`, so the solution works like this:
+
+- You declare that `makeInt` returns an `Option` type
+- If `makeInt` receives a string it *can* convert to an `Int`, the answer is wrapped inside a `Some`
+- If `makeInt` receives a string it *can’t* convert, it returns a `None`
+
+Here’s the revised version of `makeInt`:
+
+
+{% tabs fp--try-option class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+def makeInt(s: String): Option[Int] =
+ try {
+ Some(Integer.parseInt(s.trim))
+ } catch {
+ case e: Exception => None
+ }
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+def makeInt(s: String): Option[Int] =
+ try
+ Some(Integer.parseInt(s.trim))
+ catch
+ case e: Exception => None
+```
+{% endtab %}
+
+{% endtabs %}
+
+This code can be read as, “When the given string converts to an integer, return the `Int` wrapped inside a `Some`, such as `Some(1)`.
+When the string can’t be converted to an integer, an exception is thrown and caught, and the method returns a `None` value.”
+
+These examples show how `makeInt` works:
+
+{% tabs fp-try-option-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = makeInt("1") // Some(1)
+val b = makeInt("one") // None
+```
+{% endtab %}
+
+{% endtabs %}
+
+As shown, the string `"1"` results in a `Some(1)`, and the string `"one"` results in a `None`.
+This is the essence of the `Option` approach to error handling.
+As shown, this technique is used so methods can return *values* instead of *exceptions*.
+In other situations, `Option` values are also used to replace `null` values.
+
+Two notes:
+
+- You’ll find this approach used throughout Scala library classes, and in third-party Scala libraries.
+- A key point of this example is that functional methods don’t throw exceptions; instead they return values like `Option`.
+
+
+
+## Being a consumer of makeInt
+
+Now imagine that you’re a consumer of the `makeInt` method.
+You know that it returns a subclass of `Option[Int]`, so the question becomes, how do you work with these return types?
+
+There are two common answers, depending on your needs:
+
+- Use a `match` expression
+- Use a `for` expression
+
+## Using a `match` expression
+
+One possible solution is to use a `match` expression:
+
+{% tabs fp-option-match class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+makeInt(x) match {
+ case Some(i) => println(i)
+ case None => println("That didn’t work.")
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+makeInt(x) match
+ case Some(i) => println(i)
+ case None => println("That didn’t work.")
+```
+{% endtab %}
+
+{% endtabs %}
+
+In this example, if `x` can be converted to an `Int`, the expression on the right-hand side of the first `case` clause is evaluated; if `x` can’t be converted to an `Int`, the expression on the right-hand side of the second `case` clause is evaluated.
+
+
+
+## Using a `for` expression
+
+Another common solution is to use a `for` expression---i.e., the `for`/`yield` combination that was shown earlier in this book.
+For instance, imagine that you want to convert three strings to integer values, and then add them together.
+This is how you do that with a `for` expression and `makeInt`:
+
+
+{% tabs fp-for-comprehension class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+val y = for {
+ a <- makeInt(stringA)
+ b <- makeInt(stringB)
+ c <- makeInt(stringC)
+} yield {
+ a + b + c
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+val y = for
+ a <- makeInt(stringA)
+ b <- makeInt(stringB)
+ c <- makeInt(stringC)
+yield
+ a + b + c
+```
+{% endtab %}
+
+{% endtabs %}
+
+After that expression runs, `y` will be one of two things:
+
+- If *all* three strings convert to `Int` values, `y` will be a `Some[Int]`, i.e., an integer wrapped inside a `Some`
+- If *any* of the three strings can’t be converted to an `Int`, `y` will be a `None`
+
+You can test this for yourself:
+
+{% tabs fp-for-comprehension-evaluation class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+val stringA = "1"
+val stringB = "2"
+val stringC = "3"
+
+val y = for {
+ a <- makeInt(stringA)
+ b <- makeInt(stringB)
+ c <- makeInt(stringC)
+} yield {
+ a + b + c
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+val stringA = "1"
+val stringB = "2"
+val stringC = "3"
+
+val y = for
+ a <- makeInt(stringA)
+ b <- makeInt(stringB)
+ c <- makeInt(stringC)
+yield
+ a + b + c
+```
+{% endtab %}
+
+{% endtabs %}
+
+With that sample data, the variable `y` will have the value `Some(6)`.
+
+To see the failure case, change any of those strings to something that won’t convert to an integer.
+When you do that, you’ll see that `y` is a `None`:
+
+{% tabs fp-for-comprehension-failure-result %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+y: Option[Int] = None
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+## Thinking of Option as a container
+
+Mental models can often help us understand new situations, so if you’re not familiar with the `Option` classes, one way to think about them is as a *container*:
+
+- `Some` is a container with one item in it
+- `None` is a container, but it has nothing in it
+
+If you prefer to think of the `Option` classes as being like a box, `None` is like an empty box.
+It could have had something in it, but it doesn’t.
+
+
+{% comment %}
+NOTE: I commented-out this subsection because it continues to explain Some and None, and I thought it was probably too much for this book.
+
+
+
+## Using `foreach` with `Option`
+
+Because `Some` and `None` can be thought of containers, they’re also like collections classes.
+They have many of the methods you’d expect from a collection class, including `map`, `filter`, `foreach`, etc.
+
+This raises an interesting question: What will these two values print, if anything?
+
+{% tabs fp-option-methods-evaluation %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+makeInt("1").foreach(println)
+makeInt("x").foreach(println)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Answer: The first example prints the number `1`, and the second example doesn’t print anything.
+The first example prints `1` because:
+
+- `makeInt("1")` evaluates to `Some(1)`
+- The expression becomes `Some(1).foreach(println)`
+- The `foreach` method on the `Some` class knows how to reach inside the `Some` container and extract the value (`1`) that’s inside it, so it passes that value to `println`
+
+Similarly, the second example prints nothing because:
+
+- `makeInt("x")` evaluates to `None`
+- The `foreach` method on the `None` class knows that `None` doesn’t contain anything, so it does nothing
+
+In this regard, `None` is similar to an empty `List`.
+
+
+### The happy and unhappy paths
+
+Somewhere in Scala’s history, someone noted that the first example (the `Some`) represents the “Happy Path” of the `Option` approach, and the second example (the `None`) represents the “Unhappy Path.”
+*But* despite having two different possible outcomes, the great thing with `Option` is that there’s really just one path: The code you write to handle the `Some` and `None` possibilities is the same in both cases.
+The `foreach` examples look like this:
+
+{% tabs fp-another-option-method-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+makeInt(aString).foreach(println)
+```
+{% endtab %}
+
+{% endtabs %}
+
+And the `for` expression looks like this:
+
+{% tabs fp-another-for-comprehension-example class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+val y = for {
+ a <- makeInt(stringA)
+ b <- makeInt(stringB)
+ c <- makeInt(stringC)
+} yield {
+ a + b + c
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+val y = for
+ a <- makeInt(stringA)
+ b <- makeInt(stringB)
+ c <- makeInt(stringC)
+yield
+ a + b + c
+```
+{% endtab %}
+
+{% endtabs %}
+
+With exceptions you have to worry about handling branching logic, but because `makeInt` returns a value, you only have to write one piece of code to handle both the Happy and Unhappy Paths, and that simplifies your code.
+
+Indeed, the only time you have to think about whether the `Option` is a `Some` or a `None` is when you handle the result value, such as in a `match` expression:
+
+{% tabs fp-option-match-handle class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+makeInt(x) match {
+ case Some(i) => println(i)
+ case None => println("That didn't work.")
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+makeInt(x) match
+ case Some(i) => println(i)
+ case None => println("That didn't work.")
+```
+{% endtab %}
+
+{% endtabs %}
+
+> There are several other ways to handle `Option` values.
+> See the reference documentation for more details.
+{% endcomment %}
+
+
+
+## Using `Option` to replace `null`
+
+Getting back to `null` values, a place where a `null` value can silently creep into your code is with a class like this:
+
+{% tabs fp=case-class-nulls %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+class Address(
+ var street1: String,
+ var street2: String,
+ var city: String,
+ var state: String,
+ var zip: String
+)
+```
+{% endtab %}
+
+{% endtabs %}
+
+While every address on Earth has a `street1` value, the `street2` value is optional.
+As a result, the `street2` field can be assigned a `null` value:
+
+
+{% tabs fp-case-class-nulls-example class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+val santa = new Address(
+ "1 Main Street",
+ null, // <-- D’oh! A null value!
+ "North Pole",
+ "Alaska",
+ "99705"
+)
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+val santa = Address(
+ "1 Main Street",
+ null, // <-- D’oh! A null value!
+ "North Pole",
+ "Alaska",
+ "99705"
+)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Historically, developers have used blank strings and null values in this situation, both of which are hacks to work around the root problem: `street2` is an *optional* field.
+In Scala---and other modern languages---the correct solution is to declare up front that `street2` is optional:
+
+
+{% tabs fp-case-class-with-options %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+class Address(
+ var street1: String,
+ var street2: Option[String], // an optional value
+ var city: String,
+ var state: String,
+ var zip: String
+)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Now developers can write more accurate code like this:
+
+{% tabs fp-case-class-with-options-example-none class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+val santa = new Address(
+ "1 Main Street",
+ None, // 'street2' has no value
+ "North Pole",
+ "Alaska",
+ "99705"
+)
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+val santa = Address(
+ "1 Main Street",
+ None, // 'street2' has no value
+ "North Pole",
+ "Alaska",
+ "99705"
+)
+```
+{% endtab %}
+
+{% endtabs %}
+
+or this:
+
+{% tabs fp-case-class-with-options-example-some class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+val santa = new Address(
+ "123 Main Street",
+ Some("Apt. 2B"),
+ "Talkeetna",
+ "Alaska",
+ "99676"
+)
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+val santa = Address(
+ "123 Main Street",
+ Some("Apt. 2B"),
+ "Talkeetna",
+ "Alaska",
+ "99676"
+)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+
+## `Option` isn’t the only solution
+
+While this section focuses on the `Option` classes, Scala has a few other alternatives.
+
+For example, a trio of classes known as `Try`/`Success`/`Failure` work in the same manner, but (a) you primarily use these classes when your code can throw exceptions, and (b) you want to use the `Failure` class because it gives you access to the exception message.
+For example, these `Try` classes are commonly used when writing methods that interact with files, databases, and internet services, as those functions can easily throw exceptions.
+
+
+
+## A quick review
+
+This section was long, so let’s give it a quick review:
+
+- Functional programmers don’t use `null` values
+- A main replacement for `null` values is to use the `Option` classes
+- Functional methods don’t throw exceptions; instead they return values like `Option`, `Try`, or `Either`
+- Common ways to work with `Option` values are `match` and `for` expressions
+- Options can be thought of as containers of one item (`Some`) and no items (`None`)
+- Options can also be used for optional constructor or method parameters
+
+
diff --git a/_overviews/scala3-book/fp-functions-are-values.md b/_overviews/scala3-book/fp-functions-are-values.md
new file mode 100644
index 0000000000..e656d3c9f9
--- /dev/null
+++ b/_overviews/scala3-book/fp-functions-are-values.md
@@ -0,0 +1,145 @@
+---
+title: Functions Are Values
+type: section
+description: This section looks at the use of functions as values in functional programming.
+languages: [ru, zh-cn]
+num: 45
+previous-page: fp-pure-functions
+next-page: fp-functional-error-handling
+---
+
+
+While every programming language ever created probably lets you write pure functions, a second important Scala FP feature is that *you can create functions as values*, just like you create `String` and `Int` values.
+
+This feature has many benefits, the most common of which are (a) you can define methods to accept function parameters, and (b) you can pass functions as parameters into methods.
+You’ve seen this in multiple places in this book, whenever methods like `map` and `filter` are demonstrated:
+
+{% tabs fp-function-as-values-anonymous %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val nums = (1 to 10).toList
+
+val doubles = nums.map(_ * 2) // double each value
+val lessThanFive = nums.filter(_ < 5) // List(1,2,3,4)
+```
+{% endtab %}
+
+{% endtabs %}
+
+In those examples, anonymous functions are passed into `map` and `filter`.
+
+> Anonymous functions are also known as *lambdas*.
+
+In addition to passing anonymous functions into `filter` and `map`, you can also supply them with *methods*:
+
+{% tabs fp-function-as-values-defined %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+// two methods
+def double(i: Int): Int = i * 2
+def underFive(i: Int): Boolean = i < 5
+
+// pass those methods into filter and map
+val doubles = nums.filter(underFive).map(double)
+```
+{% endtab %}
+
+{% endtabs %}
+
+This ability to treat methods and functions as values is a powerful feature that functional programming languages provide.
+
+> Technically, a function that takes another function as an input parameter is known as a *Higher-Order Function*.
+> (If you like humor, as someone once wrote, that’s like saying that a class that takes an instance of another class as a constructor parameter is a Higher-Order Class.)
+
+
+
+## Functions, anonymous functions, and methods
+
+As you saw in those examples, this is an anonymous function:
+
+{% tabs fp-anonymous-function-short %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+_ * 2
+```
+{% endtab %}
+
+{% endtabs %}
+
+As shown in the [higher-order functions][hofs] discussion, that’s a shorthand version of this syntax:
+
+{% tabs fp-anonymous-function-full %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+(i: Int) => i * 2
+```
+{% endtab %}
+
+{% endtabs %}
+
+Functions like these are called “anonymous” because they don’t have names.
+If you want to give one a name, just assign it to a variable:
+
+{% tabs fp-function-assignement %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val double = (i: Int) => i * 2
+```
+{% endtab %}
+
+{% endtabs %}
+
+Now you have a named function, one that’s assigned to a variable.
+You can use this function just like you use a method:
+
+{% tabs fp-function-used-like-method %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+double(2) // 4
+```
+{% endtab %}
+
+{% endtabs %}
+
+In most scenarios it doesn’t matter if `double` is a function or a method; Scala lets you treat them the same way.
+Behind the scenes, the Scala technology that lets you treat methods just like functions is known as [Eta Expansion][eta].
+
+This ability to seamlessly pass functions around as variables is a distinguishing feature of functional programming languages like Scala.
+And as you’ve seen in the `map` and `filter` examples throughout this book, the ability to pass functions into other functions helps you create code that is concise and still readable---*expressive*.
+
+If you’re not comfortable with the process of passing functions as parameters into other functions, here are a few more examples you can experiment with:
+
+{% tabs fp-function-as-values-example %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+List("bob", "joe").map(_.toUpperCase) // List(BOB, JOE)
+List("bob", "joe").map(_.capitalize) // List(Bob, Joe)
+List("plum", "banana").map(_.length) // List(4, 6)
+
+val fruits = List("apple", "pear")
+fruits.map(_.toUpperCase) // List(APPLE, PEAR)
+fruits.flatMap(_.toUpperCase) // List(A, P, P, L, E, P, E, A, R)
+
+val nums = List(5, 1, 3, 11, 7)
+nums.map(_ * 2) // List(10, 2, 6, 22, 14)
+nums.filter(_ > 3) // List(5, 11, 7)
+nums.takeWhile(_ < 6) // List(5, 1, 3)
+nums.sortWith(_ < _) // List(1, 3, 5, 7, 11)
+nums.sortWith(_ > _) // List(11, 7, 5, 3, 1)
+
+nums.takeWhile(_ < 6).sortWith(_ < _) // List(1, 3, 5)
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+[hofs]: {% link _overviews/scala3-book/fun-hofs.md %}
+[eta]: {% link _overviews/scala3-book/fun-eta-expansion.md %}
diff --git a/_overviews/scala3-book/fp-immutable-values.md b/_overviews/scala3-book/fp-immutable-values.md
new file mode 100644
index 0000000000..2226ceac95
--- /dev/null
+++ b/_overviews/scala3-book/fp-immutable-values.md
@@ -0,0 +1,103 @@
+---
+title: Immutable Values
+type: section
+description: This section looks at the use of immutable values in functional programming.
+languages: [ru, zh-cn]
+num: 43
+previous-page: fp-what-is-fp
+next-page: fp-pure-functions
+---
+
+
+In pure functional programming, only immutable values are used.
+In Scala this means:
+
+- All variables are created as `val` fields
+- Only immutable collections classes are used, such as `List`, `Vector`, and the immutable `Map` and `Set` classes
+
+Using only immutable variables raises an interesting question: If everything is immutable, how does anything ever change?
+
+When it comes to using collections, one answer is that you don’t mutate an existing collection; instead, you apply a function to an existing collection to create a new collection.
+This is where higher-order functions like `map` and `filter` come in.
+
+For example, imagine that you have a list of names---a `List[String]`---that are all in lowercase, and you want to find all the names that begin with the letter `"j"`, and then you want to capitalize those names.
+In FP you write this code:
+
+{% tabs fp-list %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = List("jane", "jon", "mary", "joe")
+val b = a.filter(_.startsWith("j"))
+ .map(_.capitalize)
+```
+{% endtab %}
+
+{% endtabs %}
+
+As shown, you don’t mutate the original list `a`.
+Instead, you apply filtering and transformation functions to `a` to create a new collection, and assign that result to the new immutable variable `b`.
+
+Similarly, in FP you don’t create classes with mutable `var` constructor parameters.
+That is, you don’t write this:
+
+{% tabs fp--class-variables %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+// don’t do this in FP
+class Person(var firstName: String, var lastName: String)
+ --- ---
+```
+{% endtab %}
+
+{% endtabs %}
+
+Instead, you typically create `case` classes, whose constructor parameters are `val` by default:
+
+{% tabs fp-immutable-case-class %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+case class Person(firstName: String, lastName: String)
+```
+{% endtab %}
+
+{% endtabs %}
+
+Now you create a `Person` instance as a `val` field:
+
+{% tabs fp-case-class-creation %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val reginald = Person("Reginald", "Dwight")
+```
+{% endtab %}
+
+{% endtabs %}
+
+Then, when you need to make a change to the data, you use the `copy` method that comes with a `case` class to “update the data as you make a copy,” like this:
+
+
+{% tabs fp-case-class-copy %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+val elton = reginald.copy(
+ firstName = "Elton", // update the first name
+ lastName = "John" // update the last name
+)
+```
+{% endtab %}
+
+{% endtabs %}
+
+There are other techniques for working with immutable collections and variables, but hopefully these examples give you a taste of the techniques.
+
+> Depending on your needs, you may create enums, traits, or classes instead of `case` classes.
+> See the [Data Modeling][modeling] chapter for more details.
+
+
+
+[modeling]: {% link _overviews/scala3-book/domain-modeling-intro.md %}
diff --git a/_overviews/scala3-book/fp-intro.md b/_overviews/scala3-book/fp-intro.md
new file mode 100644
index 0000000000..99f02ca759
--- /dev/null
+++ b/_overviews/scala3-book/fp-intro.md
@@ -0,0 +1,26 @@
+---
+title: Functional Programming
+type: chapter
+description: This chapter provides an introduction to functional programming in Scala 3.
+languages: [ru, zh-cn]
+num: 41
+previous-page: collections-summary
+next-page: fp-what-is-fp
+---
+
+
+Scala lets you write code in an object-oriented programming (OOP) style, a functional programming (FP) style, and also in a hybrid style---using both approaches in combination.
+As stated by Martin Odersky, the creator of Scala, the essence of Scala is a fusion of functional and object-oriented programming in a typed setting:
+
+- Functions for the logic
+- Objects for the modularity
+
+This chapter assumes that you’re comfortable with OOP and less comfortable with FP, so it provides a gentle introduction to several main functional programming concepts:
+
+- What is functional programming?
+- Immutable values
+- Pure functions
+- Functions are values
+- Functional error handling
+
+
diff --git a/_overviews/scala3-book/fp-pure-functions.md b/_overviews/scala3-book/fp-pure-functions.md
new file mode 100644
index 0000000000..641eee59ce
--- /dev/null
+++ b/_overviews/scala3-book/fp-pure-functions.md
@@ -0,0 +1,140 @@
+---
+title: Pure Functions
+type: section
+description: This section looks at the use of pure functions in functional programming.
+languages: [ru, zh-cn]
+num: 44
+previous-page: fp-immutable-values
+next-page: fp-functions-are-values
+---
+
+
+Another feature that Scala offers to help you write functional code is the ability to write pure functions.
+A _pure function_ can be defined like this:
+
+- A function `f` is pure if, given the same input `x`, it always returns the same output `f(x)`
+- The function’s output depends _only_ on its input variables and its implementation
+- It only computes the output and does not modify the world around it
+
+This implies:
+- It doesn’t modify its input parameters
+- It doesn’t mutate any hidden state
+- It doesn’t have any “back doors”: It doesn’t read data from the outside world (including the console, web services, databases, files, etc.), or write data to the outside world
+
+As a result of this definition, any time you call a pure function with the same input value(s), you’ll always get the same result.
+For example, you can call a `double` function an infinite number of times with the input value `2`, and you’ll always get the result `4`.
+
+
+
+## Examples of pure functions
+
+Given that definition, as you can imagine, methods like these in the *scala.math._* package are pure functions:
+
+- `abs`
+- `ceil`
+- `max`
+
+These `String` methods are also pure functions:
+
+- `isEmpty`
+- `length`
+- `substring`
+
+Most methods on the Scala collections classes also work as pure functions, including `drop`, `filter`, `map`, and many more.
+
+> In Scala, _functions_ and _methods_ are almost completely interchangeable, so even though we use the common industry term “pure function,” this term can be used to describe both functions and methods.
+> If you’re interested in how methods can be used like functions, see the [Eta Expansion][eta] discussion.
+
+
+
+## Examples of impure functions
+
+Conversely, the following functions are _impure_ because they violate the definition.
+
+- `println` -- methods that interact with the console, files, databases, web services, sensors, etc., are all impure.
+- `currentTimeMillis ` -- date and time related methods are all impure because their output depends on something other than their input parameters
+- `sys.error` -- exception throwing methods are impure because they do not simply return a result
+
+Impure functions often do one or more of these things:
+
+- Read from hidden state, i.e., they access variables and data not explicitly passed into the function as input parameters
+- Write to hidden state
+- Mutate the parameters they’re given, or mutate hidden variables, such as fields in their containing class
+- Perform some sort of I/O with the outside world
+
+> In general, you should watch out for functions with a return type of `Unit`.
+> Because those functions do not return anything, logically the only reason you ever call it is to achieve some side effect.
+> In consequence, often the usage of those functions is impure.
+
+
+## But impure functions are needed ...
+
+Of course an application isn’t very useful if it can’t read or write to the outside world, so people make this recommendation:
+
+> Write the core of your application using pure functions, and then write an impure “wrapper” around that core to interact with the outside world.
+> As someone once said, this is like putting a layer of impure icing on top of a pure cake.
+
+It’s important to note that there are ways to make impure interactions with the outside world feel more pure.
+For instance, you’ll hear about using an `IO` Monad to deal with input and output.
+These topics are beyond the scope of this document, so to keep things simple it can help to think that FP applications have a core of pure functions that are wrapped with other functions to interact with the outside world.
+
+
+
+## Writing pure functions
+
+**Note**: In this section the common industry term “pure function” is often used to refer to Scala methods.
+
+To write pure functions in Scala, just write them using Scala’s method syntax (though you can also use Scala’s function syntax, as well).
+For instance, here’s a pure function that doubles the input value it’s given:
+
+
+{% tabs fp-pure-function %}
+
+{% tab 'Scala 2 and 3' %}
+```scala
+def double(i: Int): Int = i * 2
+```
+{% endtab %}
+
+{% endtabs %}
+
+If you’re comfortable with recursion, here’s a pure function that calculates the sum of a list of integers:
+
+{% tabs fp-pure-recursive-function class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+def sum(xs: List[Int]): Int = xs match {
+ case Nil => 0
+ case head :: tail => head + sum(tail)
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+def sum(xs: List[Int]): Int = xs match
+ case Nil => 0
+ case head :: tail => head + sum(tail)
+```
+{% endtab %}
+
+{% endtabs %}
+
+If you understand that code, you’ll see that it meets the pure function definition.
+
+
+
+## Key points
+
+The first key point of this section is the definition of a pure function:
+
+> A _pure function_ is a function that depends only on its declared inputs and its implementation to produce its output.
+> It only computes its output and does not depend on or modify the outside world.
+
+A second key point is that every real-world application interacts with the outside world.
+Therefore, a simplified way to think about functional programs is that they consist of a core of pure functions that are wrapped with other functions that interact with the outside world.
+
+
+
+[eta]: {% link _overviews/scala3-book/fun-eta-expansion.md %}
diff --git a/_overviews/scala3-book/fp-summary.md b/_overviews/scala3-book/fp-summary.md
new file mode 100644
index 0000000000..7695293e9d
--- /dev/null
+++ b/_overviews/scala3-book/fp-summary.md
@@ -0,0 +1,27 @@
+---
+title: Summary
+type: section
+description: This section summarizes the previous functional programming sections.
+languages: [ru, zh-cn]
+num: 47
+previous-page: fp-functional-error-handling
+next-page: types-introduction
+---
+
+
+This chapter provides a high-level introduction to functional programming in Scala.
+The topics covered are:
+
+- What is functional programming?
+- Immutable values
+- Pure functions
+- Functions are values
+- Functional error handling
+
+As mentioned, functional programming is a big topic, so all we can do in this book is to touch on these introductory concepts.
+See the [Reference documentation][reference] for more details.
+
+
+
+[reference]: {{ site.scala3ref }}/overview.html
+
diff --git a/_overviews/scala3-book/fp-what-is-fp.md b/_overviews/scala3-book/fp-what-is-fp.md
new file mode 100644
index 0000000000..2eca848e60
--- /dev/null
+++ b/_overviews/scala3-book/fp-what-is-fp.md
@@ -0,0 +1,31 @@
+---
+title: What is Functional Programming?
+type: section
+description: This section provides an answer to the question, what is functional programming?
+languages: [ru, zh-cn]
+num: 42
+previous-page: fp-intro
+next-page: fp-immutable-values
+---
+
+
+
+[Wikipedia defines _functional programming_](https://en.wikipedia.org/wiki/Functional_programming) like this:
+
+
+
Functional programming is a programming paradigm where programs are constructed by applying and composing functions.
+It is a declarative programming paradigm in which function definitions are trees of expressions that each return a value, rather than a sequence of imperative statements which change the state of the program.
+
+
In functional programming, functions are treated as first-class citizens, meaning that they can be bound to names (including local identifiers), passed as arguments, and returned from other functions, just as any other data type can.
+This allows programs to be written in a declarative and composable style, where small functions are combined in a modular manner.
+
+
+It can also be helpful to know that experienced functional programmers have a strong desire to see their code as math, that combining pure functions together is like combining a series of algebraic equations.
+
+When you write functional code you feel like a mathematician, and once you understand the paradigm, you want to write pure functions that always return _values_---not exceptions or null values---so you can combine (compose) them together to create solutions.
+The feeling that you’re writing math-like equations (expressions) is the driving desire that leads you to use _only_ pure functions and immutable values, because that’s what you use in algebra and other forms of math.
+
+Functional programming is a large topic, and there’s no simple way to condense the entire topic into one chapter, but hopefully the following sections will provide an overview of the main topics, and show some of the tools Scala provides for writing functional code.
+
+
+
diff --git a/_overviews/scala3-book/fun-anonymous-functions.md b/_overviews/scala3-book/fun-anonymous-functions.md
new file mode 100644
index 0000000000..428186b968
--- /dev/null
+++ b/_overviews/scala3-book/fun-anonymous-functions.md
@@ -0,0 +1,196 @@
+---
+title: Anonymous Functions
+type: section
+description: This page shows how to use anonymous functions in Scala, including examples with the List class 'map' and 'filter' functions.
+languages: [ru, zh-cn]
+num: 29
+previous-page: fun-intro
+next-page: fun-function-variables
+---
+
+An anonymous function---also referred to as a *lambda*---is a block of code that’s passed as an argument to a higher-order function.
+Wikipedia defines an [anonymous function](https://en.wikipedia.org/wiki/Anonymous_function) as, “a function definition that is not bound to an identifier.”
+
+For example, given a list like this:
+
+{% tabs fun-anonymous-1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val ints = List(1, 2, 3)
+```
+{% endtab %}
+{% endtabs %}
+
+You can create a new list by doubling each element in `ints`, using the `List` class `map` method and your custom anonymous function:
+
+{% tabs fun-anonymous-2 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledInts = ints.map(_ * 2) // List(2, 4, 6)
+```
+{% endtab %}
+{% endtabs %}
+
+As the comment shows, `doubledInts` contains the list, `List(2, 4, 6)`.
+In that example, this portion of the code is an anonymous function:
+
+{% tabs fun-anonymous-3 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+_ * 2
+```
+{% endtab %}
+{% endtabs %}
+
+This is a shorthand way of saying, “Multiply a given element by 2.”
+
+## Longer forms
+
+Once you’re comfortable with Scala, you’ll use that form all the time to write anonymous functions that use one variable at one spot in the function.
+But if you prefer, you can also write them using longer forms, so in addition to writing this code:
+
+{% tabs fun-anonymous-4 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledInts = ints.map(_ * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+you can also write it using these forms:
+
+{% tabs fun-anonymous-5 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledInts = ints.map((i: Int) => i * 2)
+val doubledInts = ints.map((i) => i * 2)
+val doubledInts = ints.map(i => i * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+All of these lines have the exact same meaning: Double each element in `ints` to create a new list, `doubledInts`.
+(The syntax of each form is explained in a few moments.)
+
+If you’re familiar with Java, it may help to know that those `map` examples are the equivalent of this Java code:
+
+{% tabs fun-anonymous-5-b %}
+{% tab 'Java' %}
+```java
+List ints = List.of(1, 2, 3);
+List doubledInts = ints.stream()
+ .map(i -> i * 2)
+ .collect(Collectors.toList());
+```
+{% endtab %}
+{% endtabs %}
+
+## Shortening anonymous functions
+
+When you want to be explicit, you can write an anonymous function using this long form:
+
+{% tabs fun-anonymous-6 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledInts = ints.map((i: Int) => i * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+The anonymous function in that expression is this:
+
+{% tabs fun-anonymous-7 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+(i: Int) => i * 2
+```
+{% endtab %}
+{% endtabs %}
+
+If you’re not familiar with this syntax, it helps to think of the `=>` symbol as a transformer, because the expression *transforms* the parameter list on the left side of the symbol (an `Int` variable named `i`) into a new result using the algorithm on the right side of the `=>` symbol (in this case, an expression that doubles the `Int`).
+
+### Shortening that expression
+
+This long form can be shortened, as will be shown in the following steps.
+First, here’s that longest and most explicit form again:
+
+{% tabs fun-anonymous-8 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledInts = ints.map((i: Int) => i * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+Because the Scala compiler can infer from the data in `ints` that `i` is an `Int`, the `Int` declaration can be removed:
+
+{% tabs fun-anonymous-9 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledInts = ints.map((i) => i * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+Because there’s only one argument, the parentheses around the parameter `i` aren’t needed:
+
+{% tabs fun-anonymous-10 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledInts = ints.map(i => i * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+Because Scala lets you use the `_` symbol instead of a variable name when the parameter appears only once in your function, the code can be simplified even more:
+
+{% tabs fun-anonymous-11 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledInts = ints.map(_ * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+### Going even shorter
+
+In other examples, you can simplify your anonymous functions further.
+For instance, beginning with the most explicit form, you can print each element in `ints` using this anonymous function with the `List` class `foreach` method:
+
+{% tabs fun-anonymous-12 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+ints.foreach((i: Int) => println(i))
+```
+{% endtab %}
+{% endtabs %}
+
+As before, the `Int` declaration isn’t required, and because there’s only one argument, the parentheses around `i` aren’t needed:
+
+{% tabs fun-anonymous-13 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+ints.foreach(i => println(i))
+```
+{% endtab %}
+{% endtabs %}
+
+Because `i` is used only once in the body of the function, the expression can be further simplified with the `_` symbol:
+
+{% tabs fun-anonymous-14 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+ints.foreach(println(_))
+```
+{% endtab %}
+{% endtabs %}
+
+Finally, if an anonymous function consists of one method call that takes a single argument, you don’t need to explicitly name and specify the argument, so you can finally write only the name of the method (here, `println`):
+
+{% tabs fun-anonymous-15 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+ints.foreach(println)
+```
+{% endtab %}
+{% endtabs %}
diff --git a/_overviews/scala3-book/fun-eta-expansion.md b/_overviews/scala3-book/fun-eta-expansion.md
new file mode 100644
index 0000000000..a435a4284b
--- /dev/null
+++ b/_overviews/scala3-book/fun-eta-expansion.md
@@ -0,0 +1,134 @@
+---
+title: Eta-Expansion
+type: section
+description: This page discusses Eta-Expansion, the Scala technology that automatically and transparently converts methods into functions.
+languages: [ru, zh-cn]
+num: 32
+previous-page: fun-function-variables
+next-page: fun-hofs
+---
+
+
+When you look at the Scaladoc for the `map` method on Scala collections classes, you see that it’s defined to accept a _function_ value:
+
+{% tabs fun_1 %}
+{% tab 'Scala 2 and 3' for=fun_1 %}
+
+```scala
+def map[B](f: A => B): List[B]
+// ^^^^^^ function type from `A` to `B`
+```
+
+{% endtab %}
+{% endtabs %}
+
+Indeed, the Scaladoc clearly states, “`f` is the _function_ to apply to each element.”
+But despite that, somehow you can pass a _method_ into `map`, and it still works:
+
+{% tabs fun_2 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+def times10(i: Int) = i * 10 // a method
+List(1, 2, 3).map(times10) // List(10,20,30)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Why does this work? The process behind this is known as _eta-expansion_.
+It converts an expression of _method type_ to an equivalent expression of _function type_, and it does so seamlessly and quietly.
+
+## The differences between methods and functions
+
+The key difference between methods and functions is that _a function is an object_, i.e. it is an instance of a class, and in turn has its own methods (e.g. try `f.apply` on a function `f`).
+
+_Methods_ are not values that can be passed around, i.e. they can only be called via method application (e.g. `foo(arg1, arg2, ...)`). Methods can be _converted_ to a value by creating a function value that will call the method when supplied with the required arguments. This is known as eta-expansion.
+
+More concretely: with automatic eta-expansion, the compiler automatically converts any _method reference_, without supplied arguments, to an equivalent _anonymous function_ that will call the method. For example, the reference to `times10` in the code above gets rewritten to `x => times10(x)`, as seen here:
+
+{% tabs fun_2_expanded %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+def times10(i: Int) = i * 10
+List(1, 2, 3).map(x => times10(x)) // eta expansion of `.map(times10)`
+```
+
+{% endtab %}
+{% endtabs %}
+
+> For the curious, the term eta-expansion has its origins in the [Lambda Calculus](https://en.wikipedia.org/wiki/Lambda_calculus).
+
+## When does eta-expansion happen?
+
+Automatic eta-expansion is a desugaring that is context-dependent (i.e. the expansion conditionally activates, depending on the surrounding code of the method reference.)
+
+{% tabs fun_5 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+In Scala 2 eta-expansion only occurs automatically when the expected type is a function type.
+For example, the following will fail:
+```scala
+def isLessThan(x: Int, y: Int): Boolean = x < y
+
+val methods = List(isLessThan)
+// ^^^^^^^^^^
+// error: missing argument list for method isLessThan
+// Unapplied methods are only converted to functions when a function type is expected.
+// You can make this conversion explicit by writing `isLessThan _` or `isLessThan(_,_)` instead of `isLessThan`.
+```
+
+See [below](#manual-eta-expansion) for how to solve this issue with manual eta-expansion.
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+New to Scala 3, method references can be used everywhere as a value, they will be automatically converted to a function object with a matching type. e.g.
+
+```scala
+def isLessThan(x: Int, y: Int): Boolean = x < y
+
+val methods = List(isLessThan) // works
+```
+
+{% endtab %}
+{% endtabs %}
+
+## Manual eta-expansion
+
+You can always manually eta-expand a method to a function value, here are some examples how:
+
+{% tabs fun_6 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+
+```scala
+val methodsA = List(isLessThan _) // way 1: expand all parameters
+val methodsB = List(isLessThan(_, _)) // way 2: wildcard application
+val methodsC = List((x, y) => isLessThan(x, y)) // way 3: anonymous function
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' %}
+
+```scala
+val methodsA = List(isLessThan(_, _)) // way 1: wildcard application
+val methodsB = List((x, y) => isLessThan(x, y)) // way 2: anonymous function
+```
+
+{% endtab %}
+{% endtabs %}
+
+## Summary
+
+For the purpose of this introductory book, the important things to know are:
+
+- eta-expansion is a helpful desugaring that lets you use methods just like functions,
+- the automatic eta-expansion been improved in Scala 3 to be almost completely seamless.
+
+For more details on how this works, see the [Eta Expansion page][eta_expansion] in the Reference documentation.
+
+[eta_expansion]: {{ site.scala3ref }}/changed-features/eta-expansion.html
+[extension]: {% link _overviews/scala3-book/ca-extension-methods.md %}
+[toplevel]: {% link _overviews/scala3-book/taste-toplevel-definitions.md %}
diff --git a/_overviews/scala3-book/fun-function-variables.md b/_overviews/scala3-book/fun-function-variables.md
new file mode 100644
index 0000000000..248a334edf
--- /dev/null
+++ b/_overviews/scala3-book/fun-function-variables.md
@@ -0,0 +1,167 @@
+---
+title: Function Variables
+type: section
+description: This page shows how to use function variables in Scala.
+languages: [ru, zh-cn]
+num: 30
+previous-page: fun-anonymous-functions
+next-page: fun-partial-functions
+---
+
+
+
+Going back to this example from the previous section:
+
+{% tabs fun-function-variables-1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledInts = ints.map((i: Int) => i * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+We noted that this part of the expression is an anonymous function:
+
+{% tabs fun-function-variables-2 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+(i: Int) => i * 2
+```
+{% endtab %}
+{% endtabs %}
+
+The reason it’s called *anonymous* is because it’s not assigned to a variable, and therefore doesn’t have a name.
+
+However, an anonymous function---also known as a *function literal*---can be assigned to a variable to create a *function variable*:
+
+{% tabs fun-function-variables-3 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val double = (i: Int) => i * 2
+```
+{% endtab %}
+{% endtabs %}
+
+This creates a function variable named `double`.
+In this expression, the original function literal is on the right side of the `=` symbol:
+
+{% tabs fun-function-variables-4 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val double = (i: Int) => i * 2
+ -----------------
+```
+{% endtab %}
+{% endtabs %}
+
+the new variable name is on the left side:
+
+{% tabs fun-function-variables-5 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val double = (i: Int) => i * 2
+ ------
+```
+{% endtab %}
+{% endtabs %}
+
+and the function’s parameter list is underlined here:
+
+{% tabs fun-function-variables-6 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val double = (i: Int) => i * 2
+ --------
+```
+{% endtab %}
+{% endtabs %}
+
+Like the parameter list for a method, this means that the `double` function takes one parameter, an `Int` named `i`.
+You can see in the REPL that `double` has the type `Int => Int`, meaning that it takes a single `Int` parameter and returns an `Int`:
+
+{% tabs fun-function-variables-7 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> val double = (i: Int) => i * 2
+val double: Int => Int = ...
+```
+{% endtab %}
+{% endtabs %}
+
+
+### Invoking the function
+
+Now you can call the `double` function like this:
+
+{% tabs fun-function-variables-8 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val x = double(2) // 4
+```
+{% endtab %}
+{% endtabs %}
+
+You can also pass `double` into a `map` call:
+
+{% tabs fun-function-variables-9 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+List(1, 2, 3).map(double) // List(2, 4, 6)
+```
+{% endtab %}
+{% endtabs %}
+
+Furthermore, when you have other functions of the `Int => Int` type:
+
+{% tabs fun-function-variables-10 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val triple = (i: Int) => i * 3
+```
+{% endtab %}
+{% endtabs %}
+
+you can store them in a `List` or `Map`:
+
+{% tabs fun-function-variables-11 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val functionList = List(double, triple)
+
+val functionMap = Map(
+ "2x" -> double,
+ "3x" -> triple
+)
+```
+{% endtab %}
+{% endtabs %}
+
+If you paste those expressions into the REPL, you’ll see that they have these types:
+
+{% tabs fun-function-variables-12 %}
+{% tab 'Scala 2 and 3' %}
+````
+// a List that contains functions of the type `Int => Int`
+functionList: List[Int => Int]
+
+// a Map whose keys have the type `String`, and whose
+// values have the type `Int => Int`
+functionMap: Map[String, Int => Int]
+````
+{% endtab %}
+{% endtabs %}
+
+
+
+## Key points
+
+The important parts here are:
+
+- To create a function variable, just assign a variable name to a function literal
+- Once you have a function, you can treat it like any other variable, i.e., like a `String` or `Int` variable
+
+And thanks to the improved [Eta Expansion][eta_expansion] functionality in Scala 3, you can treat *methods* in the same way.
+
+
+
+[eta_expansion]: {% link _overviews/scala3-book/fun-eta-expansion.md %}
diff --git a/_overviews/scala3-book/fun-hofs.md b/_overviews/scala3-book/fun-hofs.md
new file mode 100644
index 0000000000..943845cfc6
--- /dev/null
+++ b/_overviews/scala3-book/fun-hofs.md
@@ -0,0 +1,381 @@
+---
+title: Higher-Order Functions
+type: section
+description: This page demonstrates how to create and use higher-order functions in Scala.
+languages: [ru, zh-cn]
+num: 33
+previous-page: fun-eta-expansion
+next-page: fun-write-map-function
+---
+
+
+A higher-order function (HOF) is often defined as a function that (a) takes other functions as input parameters or (b) returns a function as a result.
+In Scala, HOFs are possible because functions are first-class values.
+
+As an important note, while we use the common industry term “higher-order function” in this document, in Scala this phrase applies to both *methods* and *functions*.
+Thanks to Scala’s [Eta Expansion technology][eta_expansion], they can generally be used in the same places.
+
+## From consumer to creator
+
+In the examples so far in this book you’ve seen how to be a *consumer* of methods that take other functions as input parameters, such as using HOFs like `map` and `filter`.
+In the next few sections you’ll see how to be a *creator* of HOFs, including:
+
+- How to write methods that take functions as input parameters
+- How to return a function from a method
+
+In the process you’ll see:
+
+- The syntax you use to define function input parameters
+- How to call a function once you have a reference to it
+
+As a beneficial side effect of this discussion, once you’re comfortable with this syntax, you’ll use it to define function parameters, anonymous functions, and function variables, and it also becomes easier to read the Scaladoc for higher-order functions.
+
+## Understanding filter’s Scaladoc
+
+To understand how higher-order functions work, it helps to dig into an example.
+For instance, you can understand the type of functions `filter` accepts by looking at its Scaladoc.
+Here’s the `filter` definition in the `List[A]` class:
+
+{% tabs filter-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def filter(p: A => Boolean): List[A]
+```
+{% endtab %}
+{% endtabs %}
+
+This states that `filter` is a method that takes a function parameter named `p`.
+By convention, `p` stands for a *predicate*, which is just a function that returns a `Boolean` value.
+So `filter` takes a predicate `p` as an input parameter, and returns a `List[A]`, where `A` is the type held in the list; if you call `filter` on a `List[Int]`, `A` is the type `Int`.
+
+At this point, if you don’t know the purpose of the `filter` method, all you’d know is that its algorithm somehow uses the predicate `p` to create and return the `List[A]`.
+
+Looking specifically at the function parameter `p`, this part of `filter`’s description:
+
+{% tabs filter-definition_1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+p: A => Boolean
+```
+{% endtab %}
+{% endtabs %}
+
+means that whatever function you pass in must take the type `A` as an input parameter and return a `Boolean`.
+So if your list is a `List[Int]`, you can replace the type parameter `A` with `Int`, and read that signature like this:
+
+{% tabs filter-definition_2 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+p: Int => Boolean
+```
+{% endtab %}
+{% endtabs %}
+
+Because `isEven` has this type---it transforms an input `Int` into a resulting `Boolean`---it can be used with `filter`.
+
+{% comment %}
+NOTE: (A low-priority issue): The next several sections can be condensed.
+{% endcomment %}
+
+## Writing methods that take function parameters
+
+Given that background, let’s start writing methods that take functions as input parameters.
+
+**Note:** To make the following discussion clear, we’ll refer to the code you’re writing as a *method*, and the code you’re accepting as an input parameter as a *function*.
+
+
+### A first example
+
+To create a method that takes a function parameter, all you have to do is:
+
+1. In your method’s parameter list, define the signature of the function you want to accept
+2. Use that function inside your method
+
+To demonstrate this, here’s a method that takes an input parameter named `f`, where `f` is a function:
+
+{% tabs sayHello-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def sayHello(f: () => Unit): Unit = f()
+```
+{% endtab %}
+{% endtabs %}
+
+This portion of the code---the *type signature*---states that `f` is a function, and defines the types of functions the `sayHello` method will accept:
+
+{% tabs sayHello-definition_1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+f: () => Unit
+```
+{% endtab %}
+{% endtabs %}
+
+Here’s how this works:
+
+- `f` is the name of the function input parameter.
+ It’s just like naming a `String` parameter `s` or an `Int` parameter `i`.
+- The type signature of `f` specifies the *type* of the functions this method will accept.
+- The `()` portion of `f`’s signature (on the left side of the `=>` symbol) states that `f` takes no input parameters.
+- The `Unit` portion of the signature (on the right side of the `=>` symbol) indicates that `f` should not return a meaningful result.
+- Looking back at the body of the `sayHello` method (on the right side of the `=` symbol), the `f()` statement there invokes the function that’s passed in.
+
+Now that we’ve defined `sayHello`, let’s create a function to match `f`’s signature so we can test it.
+The following function takes no input parameters and returns nothing, so it matches `f`’s type signature:
+
+{% tabs helloJoe-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def helloJoe(): Unit = println("Hello, Joe")
+```
+{% endtab %}
+{% endtabs %}
+
+Because the type signatures match, you can pass `helloJoe` into `sayHello`:
+
+{% tabs sayHello-usage %}
+{% tab 'Scala 2 and 3' %}
+```scala
+sayHello(helloJoe) // prints "Hello, Joe"
+```
+{% endtab %}
+{% endtabs %}
+
+If you’ve never done this before, congratulations:
+You just defined a method named `sayHello` that takes a function as an input parameter, and then invokes that function in its method body.
+
+### sayHello can take many functions
+
+It’s important to know that the beauty of this approach is not that `sayHello` can take *one* function as an input parameter; the beauty is that it can take *any* function that matches `f`’s signature.
+For instance, because this next function takes no input parameters and returns nothing, it also works with `sayHello`:
+
+{% tabs bonjourJulien-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def bonjourJulien(): Unit = println("Bonjour, Julien")
+```
+{% endtab %}
+{% endtabs %}
+
+Here it is in the REPL:
+
+{% tabs bonjourJulien-usage %}
+{% tab 'Scala 2 and 3' %}
+````
+scala> sayHello(bonjourJulien)
+Bonjour, Julien
+````
+{% endtab %}
+{% endtabs %}
+
+This is a good start.
+The only thing to do now is see a few more examples of how to define different type signatures for function parameters.
+
+## The general syntax for defining function input parameters
+
+In this method:
+
+{% tabs sayHello-definition-2 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def sayHello(f: () => Unit): Unit
+```
+{% endtab %}
+{% endtabs %}
+
+We noted that the type signature for `f` is:
+
+{% tabs sayHello-definition-2_1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+() => Unit
+```
+{% endtab %}
+{% endtabs %}
+
+We know that this means, “a function that takes no input parameters and returns nothing meaningful (given by `Unit`).”
+
+To demonstrate more type signature examples, here’s a function that takes a `String` parameter and returns an `Int`:
+
+{% tabs sayHello-definition-2_2 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+f: String => Int
+```
+{% endtab %}
+{% endtabs %}
+
+What kinds of functions take a string and return an integer?
+Functions like “string length” and checksum are two examples.
+
+Similarly, this function takes two `Int` parameters and returns an `Int`:
+
+{% tabs sayHello-definition-2_3 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+f: (Int, Int) => Int
+```
+{% endtab %}
+{% endtabs %}
+
+Can you imagine what sort of functions match that signature?
+
+The answer is that any function that takes two `Int` input parameters and returns an `Int` matches that signature, so all of these “functions” (methods, really) are a match:
+
+{% tabs add-sub-mul-definitions %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def add(a: Int, b: Int): Int = a + b
+def subtract(a: Int, b: Int): Int = a - b
+def multiply(a: Int, b: Int): Int = a * b
+```
+{% endtab %}
+{% endtabs %}
+
+As you can infer from these examples, the general syntax for defining function parameter type signatures is:
+
+{% tabs add-sub-mul-definitions_1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+variableName: (parameterTypes ...) => returnType
+```
+{% endtab %}
+{% endtabs %}
+
+> Because functional programming is like creating and combining a series of algebraic equations, it’s common to think about types a *lot* when designing functions and applications.
+> You might say that you “think in types.”
+
+## Taking a function parameter along with other parameters
+
+For HOFs to be really useful, they also need some data to work on.
+For a class like `List`, its `map` method already has data to work on: the data in the `List`.
+But for a standalone HOF that doesn’t have its own data, it should also accept data as other input parameters.
+
+For instance, here’s a method named `executeNTimes` that has two input parameters: a function, and an `Int`:
+
+{% tabs executeNTimes-definition class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+def executeNTimes(f: () => Unit, n: Int): Unit =
+ for (i <- 1 to n) f()
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+def executeNTimes(f: () => Unit, n: Int): Unit =
+ for i <- 1 to n do f()
+```
+{% endtab %}
+{% endtabs %}
+
+As the code shows, `executeNTimes` executes the `f` function `n` times.
+Because a simple `for` loop like this has no return value, `executeNTimes` returns `Unit`.
+
+To test `executeNTimes`, define a method that matches `f`’s signature:
+
+{% tabs helloWorld-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+// a method of type `() => Unit`
+def helloWorld(): Unit = println("Hello, world")
+```
+{% endtab %}
+{% endtabs %}
+
+Then pass that method into `executeNTimes` along with an `Int`:
+
+{% tabs helloWorld-usage %}
+{% tab 'Scala 2 and 3' %}
+```
+scala> executeNTimes(helloWorld, 3)
+Hello, world
+Hello, world
+Hello, world
+```
+{% endtab %}
+{% endtabs %}
+
+Excellent.
+The `executeNTimes` method executes the `helloWorld` function three times.
+### As many parameters as needed
+
+Your methods can continue to get as complicated as necessary.
+For example, this method takes a function of type `(Int, Int) => Int`, along with two input parameters:
+
+{% tabs executeAndPrint-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def executeAndPrint(f: (Int, Int) => Int, i: Int, j: Int): Unit =
+ println(f(i, j))
+```
+{% endtab %}
+{% endtabs %}
+
+Because these `sum` and `multiply` methods match that type signature, they can be passed into `executeAndPrint` along with two `Int` values:
+
+{% tabs executeAndPrint-usage %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def sum(x: Int, y: Int) = x + y
+def multiply(x: Int, y: Int) = x * y
+
+executeAndPrint(sum, 3, 11) // prints 14
+executeAndPrint(multiply, 3, 9) // prints 27
+```
+{% endtab %}
+{% endtabs %}
+
+## Function type signature consistency
+
+A great thing about learning about Scala’s function type signatures is that the syntax you use to define function input parameters is the same syntax you use to write function literals.
+
+For instance, if you were to write a function that calculates the sum of two integers, you’d write it like this:
+
+{% tabs f-val-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val f: (Int, Int) => Int = (a, b) => a + b
+```
+{% endtab %}
+{% endtabs %}
+
+That code consists of the type signature:
+
+````
+val f: (Int, Int) => Int = (a, b) => a + b
+ -----------------
+````
+
+The input parameters:
+
+````
+val f: (Int, Int) => Int = (a, b) => a + b
+ ------
+````
+
+and the body of the function:
+
+````
+val f: (Int, Int) => Int = (a, b) => a + b
+ -----
+````
+
+Scala’s consistency is shown here, where this function type:
+
+````
+val f: (Int, Int) => Int = (a, b) => a + b
+ -----------------
+````
+
+is the same as the type signature you use to define a function input parameter:
+
+````
+def executeAndPrint(f: (Int, Int) => Int, ...
+ -----------------
+````
+
+Once you’re comfortable with this syntax, you’ll use it to define function parameters, anonymous functions, and function variables, and it becomes easier to read the Scaladoc for higher-order functions.
+
+
+
+[eta_expansion]: {% link _overviews/scala3-book/fun-eta-expansion.md %}
diff --git a/_overviews/scala3-book/fun-intro.md b/_overviews/scala3-book/fun-intro.md
new file mode 100644
index 0000000000..66cb6bad81
--- /dev/null
+++ b/_overviews/scala3-book/fun-intro.md
@@ -0,0 +1,14 @@
+---
+title: Functions
+type: chapter
+description: This chapter looks at all topics related to functions in Scala 3.
+languages: [ru, zh-cn]
+num: 28
+previous-page: methods-summary
+next-page: fun-anonymous-functions
+---
+
+
+Where the previous chapter introduced Scala *methods*, this chapter digs into *functions*.
+The topics that are covered include anonymous functions, partial functions, function variables, and higher-order functions (HOFs), including how to create your own HOFs.
+
diff --git a/_overviews/scala3-book/fun-partial-functions.md b/_overviews/scala3-book/fun-partial-functions.md
new file mode 100644
index 0000000000..fe8aaa50eb
--- /dev/null
+++ b/_overviews/scala3-book/fun-partial-functions.md
@@ -0,0 +1,81 @@
+---
+title: Partial Functions
+type: section
+description: This page shows how to use partial functions in Scala.
+num: 31
+previous-page: fun-function-variables
+next-page: fun-eta-expansion
+---
+
+A partial function is a function that may not be defined for all values of its argument type. In Scala, partial functions
+are unary functions implementing the `PartialFunction[A, B]` trait, where `A` is the argument type and `B` the result type.
+
+To define a partial function, use a `case` identical to those used in `match` expressions:
+
+{% tabs fun-partial-1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val doubledOdds: PartialFunction[Int, Int] = {
+ case i if i % 2 == 1 => i * 2
+}
+```
+{% endtab %}
+{% endtabs %}
+
+To check if a partial function is defined for an argument, use the `isDefinedAt` method:
+
+{% tabs fun-partial-2 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+doubledOdds.isDefinedAt(3) // true
+doubledOdds.isDefinedAt(4) // false
+```
+{% endtab %}
+{% endtabs %}
+
+Trying to apply a partial function to an argument not belonging to its domain results in `MatchError`:
+
+{% tabs fun-partial-3 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+doubledOdds(4) // Exception in thread "main" scala.MatchError: 4
+```
+{% endtab %}
+{% endtabs %}
+
+### Using partial functions
+
+A partial function can be passed as an argument to a method:
+
+{% tabs fun-partial-4 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val res = List(1, 2, 3).collect({ case i if i % 2 == 1 => i * 2 }) // List(2, 6)
+```
+{% endtab %}
+{% endtabs %}
+
+You can define a default value for arguments not in domain with `applyOrElse`:
+
+{% tabs fun-partial-5 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+doubledOdds.applyOrElse(4, _ + 1) // 5
+```
+{% endtab %}
+{% endtabs %}
+
+Two partial function can be composed with `orElse`, the second function will be applied for arguments where the first
+one is not defined:
+
+{% tabs fun-partial-6 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val incrementedEvens: PartialFunction[Int, Int] = {
+ case i if i % 2 == 0 => i + 1
+}
+
+val res2 = List(1, 2, 3).collect(doubledOdds.orElse(incrementedEvens)) // List(2, 3, 6)
+```
+{% endtab %}
+{% endtabs %}
\ No newline at end of file
diff --git a/_overviews/scala3-book/fun-summary.md b/_overviews/scala3-book/fun-summary.md
new file mode 100644
index 0000000000..50eb480c27
--- /dev/null
+++ b/_overviews/scala3-book/fun-summary.md
@@ -0,0 +1,36 @@
+---
+title: Summary
+type: section
+description: This page provides a summary of the previous 'Functions' sections.
+languages: [ru, zh-cn]
+num: 36
+previous-page: fun-write-method-returns-function
+next-page: packaging-imports
+---
+
+This was a long chapter, so let’s review the key points that are covered.
+
+A higher-order function (HOF) is often defined as a function that takes other functions as input parameters or returns a function as its value.
+In Scala this is possible because functions are first-class values.
+
+Moving through the sections, first you saw:
+
+- You can write anonymous functions as small code fragments
+- You can pass them into the dozens of HOFs (methods) on the collections classes, i.e., methods like `filter`, `map`, etc.
+- With these small code fragments and powerful HOFs, you create a lot of functionality with just a little code
+
+After looking at anonymous functions and HOFs, you saw:
+
+- Function variables are simply anonymous functions that have been bound to a variable
+
+After seeing how to be a *consumer* of HOFs, you then saw how to be a *creator* of HOFs.
+Specifically, you saw:
+
+- How to write methods that take functions as input parameters
+- How to return a function from a method
+
+A beneficial side effect of this chapter is that you saw many examples of how to declare type signatures for functions.
+The benefits of that are that you use that same syntax to define function parameters, anonymous functions, and function variables, and it also becomes easier to read the Scaladoc for higher-order functions like `map`, `filter`, and others.
+
+
+
diff --git a/_overviews/scala3-book/fun-write-map-function.md b/_overviews/scala3-book/fun-write-map-function.md
new file mode 100644
index 0000000000..85fd13b248
--- /dev/null
+++ b/_overviews/scala3-book/fun-write-map-function.md
@@ -0,0 +1,136 @@
+---
+title: Write Your Own map Method
+type: section
+description: This page demonstrates how to create and use higher-order functions in Scala.
+languages: [ru, zh-cn]
+num: 34
+previous-page: fun-hofs
+next-page: fun-write-method-returns-function
+---
+
+
+Now that you’ve seen how to write your own higher-order functions, let’s take a quick look at a more real-world example.
+
+Imagine for a moment that the `List` class doesn’t have its own `map` method, and you want to write your own.
+A good first step when creating functions is to accurately state the problem.
+Focusing only on a `List[Int]`, you state:
+
+> I want to write a `map` method that can be used to apply a function to each element in a `List[Int]` that it’s given, returning the transformed elements as a new list.
+
+Given that statement, you start to write the method signature.
+First, you know that you want to accept a function as a parameter, and that function should transform an `Int` into some type `A`, so you write:
+
+{% tabs map-accept-func-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def map(f: (Int) => A)
+```
+{% endtab %}
+{% endtabs %}
+
+The syntax for using a type parameter requires declaring it in square brackets `[]` before the parameter list, so you add that:
+
+{% tabs map-type-symbol-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def map[A](f: (Int) => A)
+```
+{% endtab %}
+{% endtabs %}
+
+Next, you know that `map` should also accept a `List[Int]`:
+
+{% tabs map-list-int-param-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def map[A](f: (Int) => A, xs: List[Int])
+```
+{% endtab %}
+{% endtabs %}
+
+Finally, you also know that `map` returns a transformed `List` that contains elements of the type `A`:
+
+{% tabs map-with-return-type-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def map[A](f: (Int) => A, xs: List[Int]): List[A] = ???
+```
+{% endtab %}
+{% endtabs %}
+
+That takes care of the method signature.
+Now all you have to do is write the method body.
+A `map` method applies the function it’s given to every element in the list it’s given to produce a new, transformed list.
+One way to do this is with a `for` expression:
+{% tabs for-definition class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+for (x <- xs) yield f(x)
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+for x <- xs yield f(x)
+```
+{% endtab %}
+{% endtabs %}
+
+`for` expressions often make code surprisingly simple, and for our purposes, that ends up being the entire method body.
+
+Putting it together with the method signature, you now have a standalone `map` method that works with a `List[Int]`:
+
+{% tabs map-function class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+def map[A](f: (Int) => A, xs: List[Int]): List[A] =
+ for (x <- xs) yield f(x)
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+def map[A](f: (Int) => A, xs: List[Int]): List[A] =
+ for x <- xs yield f(x)
+```
+{% endtab %}
+{% endtabs %}
+
+
+### Make it generic
+
+As a bonus, notice that the `for` expression doesn’t do anything that depends on the type inside the `List` being `Int`.
+Therefore, you can replace `Int` in the type signature with the type parameter `B`:
+
+{% tabs map-function-full-generic class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+def map[A, B](f: (B) => A, xs: List[B]): List[A] =
+ for (x <- xs) yield f(x)
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+def map[A, B](f: (B) => A, xs: List[B]): List[A] =
+ for x <- xs yield f(x)
+```
+{% endtab %}
+{% endtabs %}
+
+Now you have a `map` method that works with any `List`.
+
+These examples demonstrate that `map` works as desired:
+
+{% tabs map-use-example %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def double(i : Int): Int = i * 2
+map(double, List(1, 2, 3)) // List(2, 4, 6)
+
+def strlen(s: String): Int = s.length
+map(strlen, List("a", "bb", "ccc")) // List(1, 2, 3)
+```
+{% endtab %}
+{% endtabs %}
+
+Now that you’ve seen how to write methods that accept functions as input parameters, let’s look at methods that return functions.
+
+
diff --git a/_overviews/scala3-book/fun-write-method-returns-function.md b/_overviews/scala3-book/fun-write-method-returns-function.md
new file mode 100644
index 0000000000..28c05b9cf2
--- /dev/null
+++ b/_overviews/scala3-book/fun-write-method-returns-function.md
@@ -0,0 +1,245 @@
+---
+title: Creating a Method That Returns a Function
+type: section
+description: This page demonstrates how to create and use higher-order functions in Scala.
+languages: [ru, zh-cn]
+num: 35
+previous-page: fun-write-map-function
+next-page: fun-summary
+---
+
+
+Thanks to Scala’s consistency, writing a method that returns a function is similar to everything you’ve seen in the previous sections.
+For example, imagine that you want to write a `greet` method that returns a function.
+Once again we start with a problem statement:
+
+> I want to create a `greet` method that returns a function.
+> That function will take a string parameter and print it using `println`.
+> To simplify this first example, `greet` won’t take any input parameters; it will just build a function and return it.
+
+Given that statement, you can start building `greet`.
+You know it’s going to be a method:
+
+{% tabs fun-write-method-returns-function-1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def greet()
+```
+{% endtab %}
+{% endtabs %}
+
+You also know this method will return a function that (a) takes a `String` parameter, and (b) prints that string using `println`.
+Therefore that function has the type, `String => Unit`:
+
+{% tabs fun-write-method-returns-function-2 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def greet(): String => Unit = ???
+ ----------------
+```
+{% endtab %}
+{% endtabs %}
+
+Now you just need a method body.
+You know that the method needs to return a function, and that function takes a `String` and prints it.
+This anonymous function matches that description:
+
+{% tabs fun-write-method-returns-function-3 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+(name: String) => println(s"Hello, $name")
+```
+{% endtab %}
+{% endtabs %}
+
+Now you just return that function from the method:
+
+{% tabs fun-write-method-returns-function-4 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+// a method that returns a function
+def greet(): String => Unit =
+ (name: String) => println(s"Hello, $name")
+```
+{% endtab %}
+{% endtabs %}
+
+Because this method returns a function, you get the function by calling `greet()`.
+This is a good step to do in the REPL because it verifies the type of the new function:
+
+{% tabs fun-write-method-returns-function-5 %}
+{% tab 'Scala 2 and 3' %}
+````
+scala> val greetFunction = greet()
+val greetFunction: String => Unit = Lambda....
+ -----------------------------
+````
+{% endtab %}
+{% endtabs %}
+
+Now you can call `greetFunction`:
+
+{% tabs fun-write-method-returns-function-6 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+greetFunction("Joe") // prints "Hello, Joe"
+```
+{% endtab %}
+{% endtabs %}
+
+Congratulations, you just created a method that returns a function, and then executed that function.
+
+
+
+## Improving the method
+
+Our method would be more useful if you could pass in a greeting, so let’s do that.
+All you have to do is pass the greeting in as a parameter to the `greet` method, and use it in the string inside `println`:
+
+{% tabs fun-write-method-returns-function-7 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def greet(theGreeting: String): String => Unit =
+ (name: String) => println(s"$theGreeting, $name")
+```
+{% endtab %}
+{% endtabs %}
+
+Now when you call your method, the process is more flexible because you can change the greeting.
+This is what it looks like when you create a function from this method:
+
+{% tabs fun-write-method-returns-function-8 %}
+{% tab 'Scala 2 and 3' %}
+````
+scala> val sayHello = greet("Hello")
+val sayHello: String => Unit = Lambda.....
+ ------------------------
+````
+{% endtab %}
+{% endtabs %}
+
+The REPL type signature output shows that `sayHello` is a function that takes a `String` input parameter and returns `Unit` (nothing).
+So now when you give `sayHello` a `String`, it prints the greeting:
+
+{% tabs fun-write-method-returns-function-9 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+sayHello("Joe") // prints "Hello, Joe"
+```
+{% endtab %}
+{% endtabs %}
+
+You can also change the greeting to create new functions, as desired:
+
+{% tabs fun-write-method-returns-function-10 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val sayCiao = greet("Ciao")
+val sayHola = greet("Hola")
+
+sayCiao("Isabella") // prints "Ciao, Isabella"
+sayHola("Carlos") // prints "Hola, Carlos"
+```
+{% endtab %}
+{% endtabs %}
+
+
+
+## A more real-world example
+
+This technique can be even more useful when your method returns one of many possible functions, like a factory that returns custom-built functions.
+
+For instance, imagine that you want to write a method that returns functions that greet people in different languages.
+We’ll limit this to functions that greet in English or French, depending on a parameter that’s passed into the method.
+
+A first thing you know is that you want to create a method that (a) takes a “desired language” as an input, and (b) returns a function as its result.
+Furthermore, because that function prints a string that it’s given, you know it has the type `String => Unit`.
+With that information you write the method signature:
+
+{% tabs fun-write-method-returns-function-11 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def createGreetingFunction(desiredLanguage: String): String => Unit = ???
+```
+{% endtab %}
+{% endtabs %}
+
+Next, because you know that the possible functions you’ll return take a string and print it, you can write two anonymous functions for the English and French languages:
+
+{% tabs fun-write-method-returns-function-12 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+(name: String) => println(s"Hello, $name")
+(name: String) => println(s"Bonjour, $name")
+```
+{% endtab %}
+{% endtabs %}
+
+Inside a method it might be a little more readable if you give those anonymous functions some names, so let’s assign them to two variables:
+
+{% tabs fun-write-method-returns-function-13 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val englishGreeting = (name: String) => println(s"Hello, $name")
+val frenchGreeting = (name: String) => println(s"Bonjour, $name")
+```
+{% endtab %}
+{% endtabs %}
+
+Now all you need to do is (a) return `englishGreeting` if the `desiredLanguage` is English, and (b) return `frenchGreeting` if the `desiredLanguage` is French.
+One way to do that is with a `match` expression:
+
+{% tabs fun-write-method-returns-function-14 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+def createGreetingFunction(desiredLanguage: String): String => Unit = {
+ val englishGreeting = (name: String) => println(s"Hello, $name")
+ val frenchGreeting = (name: String) => println(s"Bonjour, $name")
+ desiredLanguage match {
+ case "english" => englishGreeting
+ case "french" => frenchGreeting
+ }
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+def createGreetingFunction(desiredLanguage: String): String => Unit =
+ val englishGreeting = (name: String) => println(s"Hello, $name")
+ val frenchGreeting = (name: String) => println(s"Bonjour, $name")
+ desiredLanguage match
+ case "english" => englishGreeting
+ case "french" => frenchGreeting
+```
+{% endtab %}
+{% endtabs %}
+
+And that’s the final method.
+Notice that returning a function value from a method is no different than returning a string or integer value.
+
+This is how `createGreetingFunction` builds a French-greeting function:
+
+{% tabs fun-write-method-returns-function-15 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val greetInFrench = createGreetingFunction("french")
+greetInFrench("Jonathan") // prints "Bonjour, Jonathan"
+```
+{% endtab %}
+{% endtabs %}
+
+And this is how it builds an English-greeting function:
+
+{% tabs fun-write-method-returns-function-16 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val greetInEnglish = createGreetingFunction("english")
+greetInEnglish("Joe") // prints "Hello, Joe"
+```
+{% endtab %}
+{% endtabs %}
+
+If you’re comfortable with that code---congratulations---you now know how to write methods that return functions.
+
+
+
diff --git a/_overviews/scala3-book/interacting-with-java.md b/_overviews/scala3-book/interacting-with-java.md
new file mode 100644
index 0000000000..00a3c5aa8a
--- /dev/null
+++ b/_overviews/scala3-book/interacting-with-java.md
@@ -0,0 +1,483 @@
+---
+title: Interacting with Java
+type: chapter
+description: This page demonstrates how Scala code can interact with Java, and how Java code can interact with Scala code.
+languages: [ru, zh-cn]
+num: 73
+previous-page: tools-worksheets
+next-page: scala-for-java-devs
+---
+
+
+## Introduction
+
+This section looks at how to use Java code in Scala, and the opposite, how to use Scala code in Java.
+
+In general, using Java code in Scala is pretty seamless.
+There are only a few points where you’ll want to use Scala utilities to convert Java concepts to Scala, including:
+
+- Java collections classes
+- The Java `Optional` class
+
+Similarly, if you’re writing Java code and want to use Scala concepts, you’ll want to convert Scala collections and the Scala `Option` class.
+
+These following sections demonstrate the most common conversions you’ll need:
+
+- How to use Java collections in Scala
+- How to use Java `Optional` in Scala
+- Extending Java interfaces in Scala
+- How to use Scala collections in Java
+- How to use Scala `Option` in Java
+- How to use Scala traits in Java
+- How to handle Scala methods that throw exceptions in Java code
+- How to use Scala varargs parameters in Java
+- Create alternate names to use Scala methods in Java
+
+Note that the Java examples in this section assume that you’re using Java 11 or newer.
+
+
+
+## How to use Java collections in Scala
+
+When you’re writing Scala code and an API either requires or produces a Java collection class (from the `java.util` package), then it is valid to directly use or create the collection as you would in Java.
+
+However, for idiomatic usage in Scala, such as `for` loops over the collection, or to apply higher-order functions such as `map` and `filter`, you can create a proxy that behaves like a Scala collection.
+
+Here’s an example of how this works.
+Given this API that returns `java.util.List[String]`:
+
+{% tabs foo-definition %}
+{% tab Java %}
+```java
+public interface Foo {
+ static java.util.List getStrings() {
+ return List.of("a", "b", "c");
+ }
+}
+```
+{% endtab %}
+{% endtabs %}
+
+You can convert that Java list to a Scala `Seq`, using the conversion utilities in the Scala `scala.jdk.CollectionConverters` object:
+
+
+{% tabs foo-usage class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import scala.jdk.CollectionConverters._
+import scala.collection.mutable
+
+def testList() = {
+ println("Using a Java List in Scala")
+ val javaList: java.util.List[String] = Foo.getStrings()
+ val scalaSeq: mutable.Seq[String] = javaList.asScala
+ for (s <- scalaSeq) println(s)
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+import scala.jdk.CollectionConverters.*
+import scala.collection.mutable
+
+def testList() =
+ println("Using a Java List in Scala")
+ val javaList: java.util.List[String] = Foo.getStrings()
+ val scalaSeq: mutable.Seq[String] = javaList.asScala
+ for s <- scalaSeq do println(s)
+```
+{% endtab %}
+{% endtabs %}
+
+In the above code `javaList.asScala` creates a wrapper that adapts a `java.util.List` to Scala's `mutable.Seq` collection.
+
+
+## How to use Java `Optional` in Scala
+
+When you are interacting with an API that uses the `java.util.Optional` class in your Scala code, it is fine to construct and use as in Java.
+
+However, for idiomatic usage in Scala, such as use with `for`, you can convert it to a Scala `Option`.
+
+To demonstrate this, here’s a Java API that returns an `Optional[String]` value:
+
+{% tabs bar-definition %}
+{% tab Java %}
+```java
+public interface Bar {
+ static java.util.Optional optionalString() {
+ return Optional.of("hello");
+ }
+}
+```
+{% endtab %}
+{% endtabs %}
+
+First import all members from the `scala.jdk.OptionConverters` object, and then use the `toScala` method to convert the `Optional` value to a Scala `Option`:
+
+{% tabs bar-usage class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import java.util.Optional
+import scala.jdk.OptionConverters._
+
+val javaOptString: Optional[String] = Bar.optionalString
+val scalaOptString: Option[String] = javaOptString.toScala
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+import java.util.Optional
+import scala.jdk.OptionConverters.*
+
+val javaOptString: Optional[String] = Bar.optionalString
+val scalaOptString: Option[String] = javaOptString.toScala
+```
+{% endtab %}
+{% endtabs %}
+
+## Extending Java interfaces in Scala
+
+If you need to use Java interfaces in your Scala code, extend them just as though they are Scala traits.
+For example, given these three Java interfaces:
+
+{% tabs animal-definition %}
+{% tab Java %}
+```java
+public interface Animal {
+ void speak();
+}
+
+public interface Wagging {
+ void wag();
+}
+
+public interface Running {
+ // an implemented method
+ default void run() {
+ System.out.println("I’m running");
+ }
+}
+```
+{% endtab %}
+{% endtabs %}
+
+you can create a `Dog` class in Scala just as though you were using traits.
+Because `run` has a default implementation, you only need to implement the `speak` and `wag` methods:
+
+{% tabs animal-usage class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+class Dog extends Animal with Wagging with Running {
+ def speak = println("Woof")
+ def wag = println("Tail is wagging")
+}
+
+def useJavaInterfaceInScala = {
+ val d = new Dog()
+ d.speak
+ d.wag
+ d.run
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+class Dog extends Animal, Wagging, Running:
+ def speak = println("Woof")
+ def wag = println("Tail is wagging")
+
+def useJavaInterfaceInScala =
+ val d = Dog()
+ d.speak
+ d.wag
+ d.run
+```
+{% endtab %}
+{% endtabs %}
+
+Also notice that in Scala, Java methods defined with empty parameter lists can be called either as in Java, `.wag()`, or you can choose to not use parentheses `.wag`.
+
+## How to use Scala collections in Java
+
+When you need to use a Scala collection class in your Java code, use the methods of Scala’s `scala.jdk.javaapi.CollectionConverters` object in your Java code to make the conversions work.
+
+For example, suppose that a Scala API returns a `List[String]` like this:
+
+{% tabs baz-definition class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+object Baz {
+ val strings: List[String] = List("a", "b", "c")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+object Baz:
+ val strings: List[String] = List("a", "b", "c")
+```
+{% endtab %}
+{% endtabs %}
+
+You can access that Scala `List` in your Java code like this:
+
+{% tabs baz-usage %}
+{% tab Java %}
+```java
+import scala.jdk.javaapi.CollectionConverters;
+
+// access the `strings` method with `Baz.strings()`
+scala.collection.immutable.List xs = Baz.strings();
+
+java.util.List listOfStrings = CollectionConverters.asJava(xs);
+
+for (String s: listOfStrings) {
+ System.out.println(s);
+}
+```
+{% endtab %}
+{% endtabs %}
+
+That code can be shortened, but the full steps are shown to demonstrate how the process works.
+Be sure to notice that while `Baz` has a field named `strings`, from Java the field appears as a method, so must be called with parentheses `.strings()`.
+
+
+## How to use Scala `Option` in Java
+
+When you need to use a Scala `Option` in your Java code, you can convert the `Option` to a Java `Optional` value using the `toJava` method of the Scala `scala.jdk.javaapi.OptionConverters` object.
+
+For example, suppose that a Scala API returns an `Option[String]` like this:
+
+{% tabs qux-definition class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+object Qux {
+ val optString: Option[String] = Option("hello")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+object Qux:
+ val optString: Option[String] = Option("hello")
+```
+{% endtab %}
+{% endtabs %}
+
+Then you can access that Scala `Option` in your Java code like this:
+
+{% tabs qux-usage %}
+{% tab Java %}
+```java
+import java.util.Optional;
+import scala.Option;
+import scala.jdk.javaapi.OptionConverters;
+
+Option scalaOptString = Qux.optString();
+Optional javaOptString = OptionConverters.toJava(scalaOptString);
+```
+{% endtab %}
+{% endtabs %}
+
+That code can be shortened, but the full steps are shown to demonstrate how the process works.
+Be sure to notice that while `Qux` has a field named `optString`, from Java the field appears as a method, so must be called with parentheses `.optString()`.
+
+## How to use Scala traits in Java
+
+From Java 8 you can use a Scala trait just like a Java interface, even if the trait has implemented methods.
+For example, given these two Scala traits, one with an implemented method and one with only an interface:
+
+{% tabs scala-trait-definition class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+trait ScalaAddTrait {
+ def sum(x: Int, y: Int) = x + y // implemented
+}
+
+trait ScalaMultiplyTrait {
+ def multiply(x: Int, y: Int): Int // abstract
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+trait ScalaAddTrait:
+ def sum(x: Int, y: Int) = x + y // implemented
+
+trait ScalaMultiplyTrait:
+ def multiply(x: Int, y: Int): Int // abstract
+```
+{% endtab %}
+{% endtabs %}
+
+A Java class can implement both of those interfaces, and define the `multiply` method:
+
+{% tabs scala-trait-usage %}
+{% tab Java %}
+```java
+class JavaMath implements ScalaAddTrait, ScalaMultiplyTrait {
+ public int multiply(int a, int b) {
+ return a * b;
+ }
+}
+
+JavaMath jm = new JavaMath();
+System.out.println(jm.sum(3,4)); // 7
+System.out.println(jm.multiply(3,4)); // 12
+```
+{% endtab %}
+{% endtabs %}
+
+
+
+## How to handle Scala methods that throw exceptions in Java code
+
+When you’re writing Scala code using Scala programming idioms, you’ll never write a method that throws an exception.
+But if for some reason you have a Scala method that does throw an exception, and you want Java developers to be able to use that method, add the `@throws` annotation to your Scala method so Java consumers will know the exceptions it can throw.
+
+For example, this Scala `exceptionThrower` method is annotated to declare that it throws an `Exception`:
+
+{% tabs except-throw-definition class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+object SExceptionThrower {
+ @throws[Exception]
+ def exceptionThrower =
+ throw new Exception("Idiomatic Scala methods don’t throw exceptions")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+object SExceptionThrower:
+ @throws[Exception]
+ def exceptionThrower =
+ throw Exception("Idiomatic Scala methods don’t throw exceptions")
+```
+{% endtab %}
+{% endtabs %}
+
+As a result, you’ll need to handle the exception in your Java code.
+For instance, this code won’t compile because I don’t handle the exception:
+
+{% tabs except-throw-usage %}
+{% tab Java %}
+```java
+// won’t compile because the exception isn’t handled
+public class ScalaExceptionsInJava {
+ public static void main(String[] args) {
+ SExceptionThrower.exceptionThrower();
+ }
+}
+```
+{% endtab %}
+{% endtabs %}
+
+The compiler gives this error:
+
+````plain
+[error] ScalaExceptionsInJava: unreported exception java.lang.Exception;
+ must be caught or declared to be thrown
+[error] SExceptionThrower.exceptionThrower()
+````
+
+This is good---it’s what you want: the annotation tells the Java compiler that `exceptionThrower` can throw an exception.
+Now when you’re writing Java code you must handle the exception with a `try` block or declare that your Java method throws an exception.
+
+Conversely, if you leave the annotation off of the Scala `exceptionThrower` method, the Java code _will compile_.
+This is probably not what you want, because the Java code may not account for the Scala method throwing the exception.
+
+
+
+## How to use Scala varargs parameters in Java
+
+When a Scala method has a varargs parameter and you want to use that method in Java, mark the Scala method with the `@varargs` annotation.
+For example, the `printAll` method in this Scala class declares a `String*` varargs field:
+
+{% tabs vararg-definition class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import scala.annotation.varargs
+
+object VarargsPrinter {
+ @varargs def printAll(args: String*): Unit = args.foreach(println)
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+import scala.annotation.varargs
+
+object VarargsPrinter:
+ @varargs def printAll(args: String*): Unit = args.foreach(println)
+```
+{% endtab %}
+{% endtabs %}
+
+Because `printAll` is declared with the `@varargs` annotation, it can be called from a Java program with a variable number of parameters, as shown in this example:
+
+{% tabs vararg-usage %}
+{% tab Java %}
+```java
+public class JVarargs {
+ public static void main(String[] args) {
+ VarargsPrinter.printAll("Hello", "world");
+ }
+}
+```
+{% endtab %}
+{% endtabs %}
+
+When this code is run, it results in the following output:
+
+````plain
+Hello
+world
+````
+
+
+
+## Create alternate names to use Scala methods in Java
+
+In Scala you might want to create a method name using a symbolic character:
+
+{% tabs add-definition %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def +(a: Int, b: Int) = a + b
+```
+{% endtab %}
+{% endtabs %}
+
+That method name won’t work well in Java, but what you can do in Scala is provide an “alternate” name for the method with the `targetName` annotation, which will be the name of the method when used from Java:
+
+{% tabs add-2-definition class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import scala.annotation.targetName
+
+object Adder {
+ @targetName("add") def +(a: Int, b: Int) = a + b
+}
+```
+{% endtab %}
+{% tab 'Scala 3' %}
+```scala
+import scala.annotation.targetName
+
+object Adder:
+ @targetName("add") def +(a: Int, b: Int) = a + b
+```
+{% endtab %}
+{% endtabs %}
+
+Now in your Java code you can use the aliased method name `add`:
+
+{% tabs add-2-usage %}
+{% tab Java %}
+```java
+int x = Adder.add(1,1);
+System.out.printf("x = %d\n", x);
+```
+{% endtab %}
+{% endtabs %}
diff --git a/_overviews/scala3-book/introduction.md b/_overviews/scala3-book/introduction.md
new file mode 100644
index 0000000000..b3798aeabb
--- /dev/null
+++ b/_overviews/scala3-book/introduction.md
@@ -0,0 +1,33 @@
+---
+title: Introduction
+type: chapter
+description: This page begins the overview documentation of the Scala 3 language.
+languages: [ru, zh-cn]
+num: 1
+previous-page:
+next-page: scala-features
+---
+
+Welcome to the Scala 3 Book.
+The goal of this book is to provide an informal introduction to the Scala language.
+It touches on all Scala topics, in a relatively light manner.
+If at any time while you’re reading this book and want more information on a specific feature, you’ll find links to our [_Reference_ documentation][reference], which covers many new features of the Scala language in more detail.
+
+
+ If you are interested in the archived Scala 2 edition of the book, you
+can access it here. We are currently in the process of
+merging the two books and you can help us.
+
+
+Over the course of this book, we hope to demonstrate that Scala is a beautiful, expressive programming language, with a clean, modern syntax, which supports functional programming (FP) and object-oriented programming (OOP), and that provides a safe static type system.
+Scala’s syntax, grammar, and features have been re-thought, debated in an open process, and updated in 2020 to be clearer and easier to understand than ever before.
+
+The book begins with a whirlwind tour of many of Scala’s features in the [“A Taste of Scala” section][taste].
+After that tour, the sections that follow it provide more details on those language features.
+
+## A bit of background
+
+Scala was created by [Martin Odersky](https://en.wikipedia.org/wiki/Martin_Odersky), who studied under [Niklaus Wirth](https://en.wikipedia.org/wiki/Niklaus_Wirth), who created Pascal and several other languages. Mr. Odersky is one of the co-designers of Generic Java, and is also known as the “father” of the `javac` compiler.
+
+[reference]: {{ site.scala3ref }}/overview.html
+[taste]: {% link _overviews/scala3-book/taste-intro.md %}
diff --git a/_overviews/scala3-book/methods-intro.md b/_overviews/scala3-book/methods-intro.md
new file mode 100644
index 0000000000..59e91c3c6c
--- /dev/null
+++ b/_overviews/scala3-book/methods-intro.md
@@ -0,0 +1,20 @@
+---
+title: Methods
+type: chapter
+description: This section introduces methods in Scala 3.
+languages: [ru, zh-cn]
+num: 24
+previous-page: domain-modeling-fp
+next-page: methods-most
+---
+
+
+In Scala 2, _methods_ can be defined inside classes, traits, objects, case classes, and case objects.
+But it gets better: In Scala 3 they can also be defined _outside_ any of those constructs; we say that they are "top-level" definitions, since they are not nested in another definition.
+In short, methods can now be defined anywhere.
+
+Many features of methods are demonstrated in the next section.
+Because `main` methods require a little more explanation, they’re described in the separate section that follows.
+
+
+
diff --git a/_overviews/scala3-book/methods-main-methods.md b/_overviews/scala3-book/methods-main-methods.md
new file mode 100644
index 0000000000..78071efb49
--- /dev/null
+++ b/_overviews/scala3-book/methods-main-methods.md
@@ -0,0 +1,186 @@
+---
+title: Main Methods in Scala 3
+type: section
+description: This page describes how 'main' methods and the '@main' annotation work in Scala 3.
+languages: [ru, zh-cn]
+num: 26
+previous-page: methods-most
+next-page: methods-summary
+scala3: true
+versionSpecific: true
+---
+
+
Writing one line programs
+
+Scala 3 offers a new way to define programs that can be invoked from the command line: Adding a `@main` annotation to a method turns it into entry point of an executable program:
+
+{% tabs method_1 %}
+{% tab 'Scala 3 Only' for=method_1 %}
+
+```scala
+@main def hello() = println("Hello, World")
+```
+
+{% endtab %}
+{% endtabs %}
+
+To run this program, save the line of code in a file named as e.g. *Hello.scala*---the filename doesn’t have to match the method name---and run it with `scala`:
+
+```bash
+$ scala run Hello.scala
+Hello, World
+```
+
+A `@main` annotated method can be written either at the top-level (as shown), or inside a statically accessible object.
+In either case, the name of the program is in each case the name of the method, without any object prefixes.
+
+Learn more about the `@main` annotation by reading the following sections, or by watching this video:
+
+
+
+
+
+### Command line arguments
+
+With this approach your `@main` method can handle command line arguments, and those arguments can have different types.
+For example, given this `@main` method that takes an `Int`, a `String`, and a varargs `String*` parameter:
+
+{% tabs method_2 %}
+{% tab 'Scala 3 Only' for=method_2 %}
+
+```scala
+@main def happyBirthday(age: Int, name: String, others: String*) =
+ val suffix = (age % 100) match
+ case 11 | 12 | 13 => "th"
+ case _ => (age % 10) match
+ case 1 => "st"
+ case 2 => "nd"
+ case 3 => "rd"
+ case _ => "th"
+
+ val sb = StringBuilder(s"Happy $age$suffix birthday, $name")
+ for other <- others do sb.append(" and ").append(other)
+ println(sb.toString)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Pass the arguments after `--`:
+
+```
+$ scala run happyBirthday.scala -- 23 Lisa Peter
+Happy 23rd Birthday, Lisa and Peter!
+```
+
+As shown, the `@main` method can have an arbitrary number of parameters.
+For each parameter type there must be a [given instance]({% link _overviews/scala3-book/ca-context-parameters.md %}) of the `scala.util.CommandLineParser.FromString` type class that converts an argument `String` to the required parameter type.
+Also as shown, a main method’s parameter list can end in a repeated parameter like `String*` that takes all remaining arguments given on the command line.
+
+The program implemented from an `@main` method checks that there are enough arguments on the command line to fill in all parameters, and that the argument strings can be converted to the required types.
+If a check fails, the program is terminated with an error message:
+
+```
+$ scala run happyBirthday.scala -- 22
+Illegal command line after first argument: more arguments expected
+
+$ scala run happyBirthday.scala -- sixty Fred
+Illegal command line: java.lang.NumberFormatException: For input string: "sixty"
+```
+
+## User-defined types as parameters
+
+As mentioned up above, the compiler looks for a given instance of the
+`scala.util.CommandLineParser.FromString` typeclass for the type of the
+argument. For example, let's say you have a custom `Color` type that you want to
+use as a parameter. You would do this like you see below:
+
+{% tabs method_3 %}
+{% tab 'Scala 3 Only' for=method_3 %}
+
+```scala
+enum Color:
+ case Red, Green, Blue
+
+given CommandLineParser.FromString[Color] with
+ def fromString(value: String): Color = Color.valueOf(value)
+
+@main def run(color: Color): Unit =
+ println(s"The color is ${color.toString}")
+```
+
+{% endtab %}
+{% endtabs %}
+
+This works the same for your own user types in your program as well as types you
+might be using from another library.
+
+## The details
+
+The Scala compiler generates a program from an `@main` method `f` as follows:
+
+- It creates a class named `f` in the package where the `@main` method was found.
+- The class has a static method `main` with the usual signature of a Java `main` method: it takes an `Array[String]` as argument and returns `Unit`.
+- The generated `main` method calls method `f` with arguments converted using methods in the `scala.util.CommandLineParser.FromString` object.
+
+For instance, the `happyBirthday` method above generates additional code equivalent to the following class:
+
+{% tabs method_4 %}
+{% tab 'Scala 3 Only' for=method_4 %}
+
+```scala
+final class happyBirthday {
+ import scala.util.{CommandLineParser as CLP}
+ def main(args: Array[String]): Unit =
+ try
+ happyBirthday(
+ CLP.parseArgument[Int](args, 0),
+ CLP.parseArgument[String](args, 1),
+ CLP.parseRemainingArguments[String](args, 2)*)
+ catch {
+ case error: CLP.ParseError => CLP.showError(error)
+ }
+}
+```
+
+> **Note**: In this generated code, the `` modifier expresses that the `main` method is generated as a static method of class `happyBirthday`.
+> This feature is not available for user programs in Scala.
+> Regular “static” members are generated in Scala using objects instead.
+
+{% endtab %}
+{% endtabs %}
+
+## Backwards Compatibility with Scala 2
+
+`@main` methods are the recommended way to generate programs that can be invoked from the command line in Scala 3.
+They replace the previous approach in Scala 2, which was to create an `object` that extends the `App` class:
+
+The previous functionality of `App`, which relied on the “magic” `DelayedInit` trait, is no longer available.
+`App` still exists in limited form for now, but it doesn’t support command line arguments and will be deprecated in the future.
+
+If programs need to cross-build between Scala 2 and Scala 3, it’s recommended to use an `object` with an explicit `main` method and a single `Array[String]` argument instead:
+
+{% tabs method_5 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+object happyBirthday {
+ private def happyBirthday(age: Int, name: String, others: String*) = {
+ ... // same as before
+ }
+ def main(args: Array[String]): Unit =
+ happyBirthday(args(0).toInt, args(1), args.drop(2).toIndexedSeq:_*)
+}
+```
+
+> note that here we use `:_*` to pass a vararg argument, which remains in Scala 3 for backwards compatibility.
+
+{% endtab %}
+{% endtabs %}
+
+If you place that code in a file named *happyBirthday.scala*, you can then compile and run it with `scala`, as shown previously:
+
+```bash
+$ scala run happyBirthday.scala -- 23 Lisa Peter
+Happy 23rd Birthday, Lisa and Peter!
+```
diff --git a/_overviews/scala3-book/methods-most.md b/_overviews/scala3-book/methods-most.md
new file mode 100644
index 0000000000..2a282cdf28
--- /dev/null
+++ b/_overviews/scala3-book/methods-most.md
@@ -0,0 +1,702 @@
+---
+title: Method Features
+type: section
+description: This section introduces Scala 3 methods, including main methods, extension methods, and more.
+languages: [ru, zh-cn]
+num: 25
+previous-page: methods-intro
+next-page: methods-main-methods
+---
+
+This section introduces the various aspects of how to define and call methods in Scala 3.
+
+## Defining Methods
+
+Scala methods have many features, including these:
+
+- Type parameters
+- Default parameter values
+- Multiple parameter groups
+- Context-provided parameters
+- By-name parameters
+- and more...
+
+Some of these features are demonstrated in this section, but when you’re defining a “simple” method that doesn’t use those features, the syntax looks like this:
+
+{% tabs method_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_1 %}
+
+```scala
+def methodName(param1: Type1, param2: Type2): ReturnType = {
+ // the method body
+ // goes here
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_1 %}
+
+```scala
+def methodName(param1: Type1, param2: Type2): ReturnType =
+ // the method body
+ // goes here
+end methodName // this is optional
+```
+
+{% endtab %}
+{% endtabs %}
+
+In that syntax:
+
+- The keyword `def` is used to define a method
+- The Scala standard is to name methods using the camel case convention
+- Method parameters are always defined with their type
+- Declaring the method return type is optional
+- Methods can consist of many lines, or just one line
+- Providing the `end methodName` portion after the method body is also optional, and is only recommended for long methods
+
+Here are two examples of a one-line method named `add` that takes two `Int` input parameters.
+The first version explicitly shows the method’s `Int` return type, and the second does not:
+
+{% tabs method_2 %}
+{% tab 'Scala 2 and 3' for=method_2 %}
+
+```scala
+def add(a: Int, b: Int): Int = a + b
+def add(a: Int, b: Int) = a + b
+```
+
+{% endtab %}
+{% endtabs %}
+
+It is recommended to annotate publicly visible methods with their return type.
+Declaring the return type can make it easier to understand it when you look at it months or years later, or when you look at another person’s code.
+
+## Calling methods
+
+Invoking a method is straightforward:
+
+{% tabs method_3 %}
+{% tab 'Scala 2 and 3' for=method_3 %}
+
+```scala
+val x = add(1, 2) // 3
+```
+
+{% endtab %}
+{% endtabs %}
+
+The Scala collections classes have dozens of built-in methods.
+These examples show how to call them:
+
+{% tabs method_4 %}
+{% tab 'Scala 2 and 3' for=method_4 %}
+
+```scala
+val x = List(1, 2, 3)
+
+x.size // 3
+x.contains(1) // true
+x.map(_ * 10) // List(10, 20, 30)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Notice:
+
+- `size` takes no arguments, and returns the number of elements in the list
+- The `contains` method takes one argument, the value to search for
+- `map` takes one argument, a function; in this case an anonymous function is passed into it
+
+## Multiline methods
+
+When a method is longer than one line, start the method body on the second line, indented to the right:
+
+{% tabs method_5 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_5 %}
+
+```scala
+def addThenDouble(a: Int, b: Int): Int = {
+ // imagine that this body requires multiple lines
+ val sum = a + b
+ sum * 2
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_5 %}
+
+```scala
+def addThenDouble(a: Int, b: Int): Int =
+ // imagine that this body requires multiple lines
+ val sum = a + b
+ sum * 2
+```
+
+{% endtab %}
+{% endtabs %}
+
+In that method:
+
+- `sum` is an immutable local variable; it can’t be accessed outside of the method
+- The last line doubles the value of `sum`; this value is returned from the method
+
+When you paste that code into the REPL, you’ll see that it works as desired:
+
+{% tabs method_6 %}
+{% tab 'Scala 2 and 3' for=method_6 %}
+
+```scala
+scala> addThenDouble(1, 1)
+res0: Int = 4
+```
+
+{% endtab %}
+{% endtabs %}
+
+Notice that there’s no need for a `return` statement at the end of the method.
+Because almost everything in Scala is an _expression_---meaning that each line of code returns (or _evaluates to_) a value---there’s no need to use `return`.
+
+This becomes more clear when you condense that method and write it on one line:
+
+{% tabs method_7 %}
+{% tab 'Scala 2 and 3' for=method_7 %}
+
+```scala
+def addThenDouble(a: Int, b: Int): Int = (a + b) * 2
+```
+
+{% endtab %}
+{% endtabs %}
+
+The body of a method can use all the different features of the language:
+
+- `if`/`else` expressions
+- `match` expressions
+- `while` loops
+- `for` loops and `for` expressions
+- Variable assignments
+- Calls to other methods
+- Definitions of other methods
+
+As an example of a real-world multiline method, this `getStackTraceAsString` method converts its `Throwable` input parameter into a well-formatted `String`:
+
+{% tabs method_8 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_8 %}
+
+```scala
+def getStackTraceAsString(t: Throwable): String = {
+ val sw = new StringWriter()
+ t.printStackTrace(new PrintWriter(sw))
+ sw.toString
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_8 %}
+
+```scala
+def getStackTraceAsString(t: Throwable): String =
+ val sw = StringWriter()
+ t.printStackTrace(PrintWriter(sw))
+ sw.toString
+```
+
+{% endtab %}
+{% endtabs %}
+
+In that method:
+
+- The first line assigns a new instance of `StringWriter` to the value binder `sw`
+- The second line stores the stack trace content into the `StringWriter`
+- The third line yields the `String` representation of the stack trace
+
+## Default parameter values
+
+Method parameters can have default values.
+In this example, default values are given for both the `timeout` and `protocol` parameters:
+
+{% tabs method_9 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_9 %}
+
+```scala
+def makeConnection(timeout: Int = 5_000, protocol: String = "http") = {
+ println(f"timeout = ${timeout}%d, protocol = ${protocol}%s")
+ // more code here ...
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_9 %}
+
+```scala
+def makeConnection(timeout: Int = 5_000, protocol: String = "http") =
+ println(f"timeout = ${timeout}%d, protocol = ${protocol}%s")
+ // more code here ...
+```
+
+{% endtab %}
+{% endtabs %}
+
+Because the parameters have default values, the method can be called in these ways:
+
+{% tabs method_10 %}
+{% tab 'Scala 2 and 3' for=method_10 %}
+
+```scala
+makeConnection() // timeout = 5000, protocol = http
+makeConnection(2_000) // timeout = 2000, protocol = http
+makeConnection(3_000, "https") // timeout = 3000, protocol = https
+```
+
+{% endtab %}
+{% endtabs %}
+
+Here are a few key points about those examples:
+
+- In the first example no arguments are provided, so the method uses the default parameter values of `5_000` and `http`
+- In the second example, `2_000` is supplied for the `timeout` value, so it’s used, along with the default value for the `protocol`
+- In the third example, values are provided for both parameters, so they’re both used
+
+Notice that by using default parameter values, it appears to the consumer that they can use three different overridden methods.
+
+## Named parameters
+
+If you prefer, you can also use the names of the method parameters when calling a method.
+For instance, `makeConnection` can also be called in these ways:
+
+{% tabs method_11 %}
+{% tab 'Scala 2 and 3' for=method_11 %}
+
+```scala
+makeConnection(timeout=10_000)
+makeConnection(protocol="https")
+makeConnection(timeout=10_000, protocol="https")
+makeConnection(protocol="https", timeout=10_000)
+```
+
+{% endtab %}
+{% endtabs %}
+
+In some frameworks named parameters are heavily used.
+They’re also very useful when multiple method parameters have the same type:
+
+{% tabs method_12 %}
+{% tab 'Scala 2 and 3' for=method_12 %}
+
+```scala
+engage(true, true, true, false)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Without help from an IDE that code can be hard to read, but this code is much more clear and obvious:
+
+{% tabs method_13 %}
+{% tab 'Scala 2 and 3' for=method_13 %}
+
+```scala
+engage(
+ speedIsSet = true,
+ directionIsSet = true,
+ picardSaidMakeItSo = true,
+ turnedOffParkingBrake = false
+)
+```
+
+{% endtab %}
+{% endtabs %}
+
+## A suggestion about methods that take no parameters
+
+When a method takes no parameters, it’s said to have an _arity_ level of _arity-0_.
+Similarly, when a method takes one parameter it’s an _arity-1_ method.
+When you create arity-0 methods:
+
+- If the method performs side effects, such as calling `println`, declare the method with empty parentheses
+- If the method does not perform side effects---such as getting the size of a collection, which is similar to accessing a field on the collection---leave the parentheses off
+
+For example, this method performs a side effect, so it’s declared with empty parentheses:
+
+{% tabs method_14 %}
+{% tab 'Scala 2 and 3' for=method_14 %}
+
+```scala
+def speak() = println("hi")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Doing this requires callers of the method to use open parentheses when calling the method:
+
+{% tabs method_15 %}
+{% tab 'Scala 2 and 3' for=method_15 %}
+
+```scala
+speak // error: "method speak must be called with () argument"
+speak() // prints "hi"
+```
+
+{% endtab %}
+{% endtabs %}
+
+While this is just a convention, following it dramatically improves code readability: It makes it easier to understand at a glance that an arity-0 method performs side effects.
+
+{% comment %}
+Some of that wording comes from this page: https://docs.scala-lang.org/style/method-invocation.html
+{% endcomment %}
+
+## Using `if` as a method body
+
+Because `if`/`else` expressions return a value, they can be used as the body of a method.
+Here’s a method named `isTruthy` that implements the Perl definitions of `true` and `false`:
+
+{% tabs method_16 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_16 %}
+
+```scala
+def isTruthy(a: Any) = {
+ if (a == 0 || a == "" || a == false)
+ false
+ else
+ true
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_16 %}
+
+```scala
+def isTruthy(a: Any) =
+ if a == 0 || a == "" || a == false then
+ false
+ else
+ true
+```
+
+{% endtab %}
+{% endtabs %}
+
+These examples show how that method works:
+
+{% tabs method_17 %}
+{% tab 'Scala 2 and 3' for=method_17 %}
+
+```scala
+isTruthy(0) // false
+isTruthy("") // false
+isTruthy("hi") // true
+isTruthy(1.0) // true
+```
+
+{% endtab %}
+{% endtabs %}
+
+## Using `match` as a method body
+
+A `match` expression can also be used as the entire method body, and often is.
+Here’s another version of `isTruthy`, written with a `match` expression :
+
+{% tabs method_18 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_18 %}
+
+```scala
+def isTruthy(a: Any) = a match {
+ case 0 | "" | false => false
+ case _ => true
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_18 %}
+
+```scala
+def isTruthy(a: Matchable) = a match
+ case 0 | "" | false => false
+ case _ => true
+```
+
+> This method works just like the previous method that used an `if`/`else` expression. We use `Matchable` instead of `Any` as the parameter's type to accept any value that supports pattern matching.
+
+> For more details on the `Matchable` trait, see the [Reference documentation][reference_matchable].
+
+[reference_matchable]: {{ site.scala3ref }}/other-new-features/matchable.html
+{% endtab %}
+{% endtabs %}
+
+## Controlling visibility in classes
+
+In classes, objects, traits, and enums, Scala methods are public by default, so the `Dog` instance created here can access the `speak` method:
+
+{% tabs method_19 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_19 %}
+
+```scala
+class Dog {
+ def speak() = println("Woof")
+}
+
+val d = new Dog
+d.speak() // prints "Woof"
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_19 %}
+
+```scala
+class Dog:
+ def speak() = println("Woof")
+
+val d = new Dog
+d.speak() // prints "Woof"
+```
+
+{% endtab %}
+{% endtabs %}
+
+Methods can also be marked as `private`.
+This makes them private to the current class, so they can’t be called nor overridden in subclasses:
+
+{% tabs method_20 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_20 %}
+
+```scala
+class Animal {
+ private def breathe() = println("I’m breathing")
+}
+
+class Cat extends Animal {
+ // this method won’t compile
+ override def breathe() = println("Yo, I’m totally breathing")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_20 %}
+
+```scala
+class Animal:
+ private def breathe() = println("I’m breathing")
+
+class Cat extends Animal:
+ // this method won’t compile
+ override def breathe() = println("Yo, I’m totally breathing")
+```
+
+{% endtab %}
+{% endtabs %}
+
+If you want to make a method private to the current class and also allow subclasses to call it or override it, mark the method as `protected`, as shown with the `speak` method in this example:
+
+{% tabs method_21 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_21 %}
+
+```scala
+class Animal {
+ private def breathe() = println("I’m breathing")
+ def walk() = {
+ breathe()
+ println("I’m walking")
+ }
+ protected def speak() = println("Hello?")
+}
+
+class Cat extends Animal {
+ override def speak() = println("Meow")
+}
+
+val cat = new Cat
+cat.walk()
+cat.speak()
+cat.breathe() // won’t compile because it’s private
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_21 %}
+
+```scala
+class Animal:
+ private def breathe() = println("I’m breathing")
+ def walk() =
+ breathe()
+ println("I’m walking")
+ protected def speak() = println("Hello?")
+
+class Cat extends Animal:
+ override def speak() = println("Meow")
+
+val cat = new Cat
+cat.walk()
+cat.speak()
+cat.breathe() // won’t compile because it’s private
+```
+
+{% endtab %}
+{% endtabs %}
+
+The `protected` setting means:
+
+- The method (or field) can be accessed by other instances of the same class
+- It is not visible by other code in the current package
+- It is available to subclasses
+
+## Objects can contain methods
+
+Earlier you saw that traits and classes can have methods.
+The Scala `object` keyword is used to create a singleton class, and an object can also contain methods.
+This is a nice way to group a set of “utility” methods.
+For instance, this object contains a collection of methods that work on strings:
+
+{% tabs method_22 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_22 %}
+
+```scala
+object StringUtils {
+
+ /**
+ * Returns a string that is the same as the input string, but
+ * truncated to the specified length.
+ */
+ def truncate(s: String, length: Int): String = s.take(length)
+
+ /**
+ * Returns true if the string contains only letters and numbers.
+ */
+ def lettersAndNumbersOnly_?(s: String): Boolean =
+ s.matches("[a-zA-Z0-9]+")
+
+ /**
+ * Returns true if the given string contains any whitespace
+ * at all. Assumes that `s` is not null.
+ */
+ def containsWhitespace(s: String): Boolean =
+ s.matches(".*\\s.*")
+
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=method_22 %}
+
+```scala
+object StringUtils:
+
+ /**
+ * Returns a string that is the same as the input string, but
+ * truncated to the specified length.
+ */
+ def truncate(s: String, length: Int): String = s.take(length)
+
+ /**
+ * Returns true if the string contains only letters and numbers.
+ */
+ def lettersAndNumbersOnly_?(s: String): Boolean =
+ s.matches("[a-zA-Z0-9]+")
+
+ /**
+ * Returns true if the given string contains any whitespace
+ * at all. Assumes that `s` is not null.
+ */
+ def containsWhitespace(s: String): Boolean =
+ s.matches(".*\\s.*")
+
+end StringUtils
+```
+
+{% endtab %}
+{% endtabs %}
+
+## Extension methods
+
+There are many situations where you would like to add functionality to closed classes.
+For example, imagine that you have a `Circle` class, but you can’t change its source code.
+It could be defined like this in a third-party library:
+
+{% tabs method_23 %}
+{% tab 'Scala 2 and 3' for=method_23 %}
+
+```scala
+case class Circle(x: Double, y: Double, radius: Double)
+```
+
+{% endtab %}
+{% endtabs %}
+
+When you want to add methods to this class, you can define them as extension methods, like this:
+
+{% tabs method_24 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_24 %}
+
+```scala
+implicit class CircleOps(c: Circle) {
+ def circumference: Double = c.radius * math.Pi * 2
+ def diameter: Double = c.radius * 2
+ def area: Double = math.Pi * c.radius * c.radius
+}
+```
+In Scala 2 use an `implicit class`, find out more details [here](/overviews/core/implicit-classes.html).
+
+{% endtab %}
+{% tab 'Scala 3' for=method_24 %}
+
+```scala
+extension (c: Circle)
+ def circumference: Double = c.radius * math.Pi * 2
+ def diameter: Double = c.radius * 2
+ def area: Double = math.Pi * c.radius * c.radius
+```
+In Scala 3 use the new `extension` construct. For more details see chapters in [this book][extension], or the [Scala 3 reference][reference-ext].
+
+[reference-ext]: {{ site.scala3ref }}/contextual/extension-methods.html
+[extension]: {% link _overviews/scala3-book/ca-extension-methods.md %}
+{% endtab %}
+{% endtabs %}
+
+Now when you have a `Circle` instance named `aCircle`, you can call those methods like this:
+
+{% tabs method_25 %}
+{% tab 'Scala 2 and 3' for=method_25 %}
+
+```scala
+aCircle.circumference
+aCircle.diameter
+aCircle.area
+```
+
+{% endtab %}
+{% endtabs %}
+
+## Even more
+
+There’s even more to know about methods, including how to:
+
+- Call methods on superclasses
+- Define and use by-name parameters
+- Write a method that takes a function parameter
+- Create inline methods
+- Handle exceptions
+- Use vararg input parameters
+- Write methods that have multiple parameter groups (partially-applied functions)
+- Create methods that have type parameters
+
+{% comment %}
+Jamie: there really needs better linking here - previously it was to the Scala 3 Reference, which doesnt cover any
+of this
+{% endcomment %}
+See the other chapters in this book for more details on these features.
+
+[reference_extension_methods]: {{ site.scala3ref }}/contextual/extension-methods.html
+[reference_matchable]: {{ site.scala3ref }}/other-new-features/matchable.html
diff --git a/_overviews/scala3-book/methods-summary.md b/_overviews/scala3-book/methods-summary.md
new file mode 100644
index 0000000000..eafac85889
--- /dev/null
+++ b/_overviews/scala3-book/methods-summary.md
@@ -0,0 +1,27 @@
+---
+title: Summary
+type: section
+description: This section summarizes the previous sections on Scala 3 methods.
+languages: [ru, zh-cn]
+num: 27
+previous-page: methods-main-methods
+next-page: fun-intro
+---
+
+
+There’s even more to know about methods, including how to:
+
+- Call methods on superclasses
+- Define and use by-name parameters
+- Write a method that takes a function parameter
+- Create inline methods
+- Handle exceptions
+- Use vararg input parameters
+- Write methods that have multiple parameter groups (partially-applied functions)
+- Create methods that have type parameters
+
+See the [Reference documentation][reference] for more details on these features.
+
+
+
+[reference]: {{ site.scala3ref }}/overview.html
diff --git a/_overviews/scala3-book/packaging-imports.md b/_overviews/scala3-book/packaging-imports.md
new file mode 100644
index 0000000000..f5665c28fa
--- /dev/null
+++ b/_overviews/scala3-book/packaging-imports.md
@@ -0,0 +1,629 @@
+---
+title: Packaging and Imports
+type: chapter
+description: A discussion of using packages and imports to organize your code, build related modules of code, control scope, and help prevent namespace collisions.
+languages: [ru, zh-cn]
+num: 37
+previous-page: fun-summary
+next-page: collections-intro
+---
+
+
+Scala uses *packages* to create namespaces that let you modularize programs and help prevent namespace collisions.
+Scala supports the package-naming style used by Java, and also the “curly brace” namespace notation used by languages like C++ and C#.
+
+The Scala approach to importing members is also similar to Java, and more flexible.
+With Scala you can:
+
+- Import packages, classes, objects, traits, and methods
+- Place import statements anywhere
+- Hide and rename members when you import them
+
+These features are demonstrated in the following examples.
+
+## Creating a package
+
+Packages are created by declaring one or more package names at the top of a Scala file.
+For example, when your domain name is _acme.com_ and you’re working in the _model_ package of an application named _myapp_, your package declaration looks like this:
+
+{% tabs packaging-imports-1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+package com.acme.myapp.model
+
+class Person ...
+```
+{% endtab %}
+{% endtabs %}
+
+By convention, package names should be all lower case, and the formal naming convention is *\.\.\.\*.
+
+Although it’s not required, package names typically follow directory structure names, so if you follow this convention, a `Person` class in this project will be found in a *MyApp/src/main/scala/com/acme/myapp/model/Person.scala* file.
+
+### Using multiple packages in the same file
+
+The syntax shown above applies to the entire source file: all the definitions in the file
+`Person.scala` belong to package `com.acme.myapp.model`, according to the package clause
+at the beginning of the file.
+
+Alternatively, it is possible to write package clauses that apply only to the definitions
+they contain:
+
+{% tabs packaging-imports-0 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+package users {
+
+ package administrators { // the full name of this package is users.administrators
+ class AdminUser // the full name of this class users.administrators.AdminUser
+ }
+ package normalusers { // the full name of this package is users.normalusers
+ class NormalUser // the full name of this class is users.normalusers.NormalUser
+ }
+}
+```
+
+{% endtab %}
+{% tab 'Scala 3' %}
+
+```scala
+package users:
+
+ package administrators: // the full name of this package is users.administrators
+ class AdminUser // the full name of this class is users.administrators.AdminUser
+
+ package normalusers: // the full name of this package is users.normalusers
+ class NormalUser // the full name of this class is users.normalusers.NormalUser
+```
+{% endtab %}
+{% endtabs %}
+
+Note that the package names are followed by a colon, and that the definitions within
+a package are indented.
+
+The advantages of this approach are that it allows for package nesting, and provides more obvious control of scope and encapsulation, especially within the same file.
+
+## Import statements, Part 1
+
+Import statements are used to access entities in other packages.
+Import statements fall into two main categories:
+
+- Importing classes, traits, objects, functions, and methods
+- Importing `given` clauses
+
+If you’re used to a language like Java, the first class of import statements is similar to what Java uses, with a slightly different syntax that allows for more flexibility.
+These examples demonstrate some of that flexibility:
+
+{% tabs packaging-imports-2 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import users._ // import everything from the `users` package
+import users.User // import only the `User` class
+import users.{User, UserPreferences} // import only two selected members
+import users.{UserPreferences => UPrefs} // rename a member as you import it
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-2 %}
+
+```scala
+import users.* // import everything from the `users` package
+import users.User // import only the `User` class
+import users.{User, UserPreferences} // import only two selected members
+import users.{UserPreferences as UPrefs} // rename a member as you import it
+```
+
+{% endtab %}
+{% endtabs %}
+
+Those examples are meant to give you a taste of how the first class of `import` statements work.
+They’re explained more in the subsections that follow.
+
+Import statements are also used to import `given` instances into scope.
+Those are discussed at the end of this chapter.
+
+A note before moving on:
+
+> Import clauses are not required for accessing members of the same package.
+
+### Importing one or more members
+
+In Scala you can import one member from a package like this:
+
+{% tabs packaging-imports-3 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+import scala.concurrent.Future
+```
+{% endtab %}
+{% endtabs %}
+
+and multiple members like this:
+
+{% tabs packaging-imports-4 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+import scala.concurrent.Future
+import scala.concurrent.Promise
+import scala.concurrent.blocking
+```
+{% endtab %}
+{% endtabs %}
+
+When importing multiple members, you can import them more concisely like this:
+
+{% tabs packaging-imports-5 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+import scala.concurrent.{Future, Promise, blocking}
+```
+{% endtab %}
+{% endtabs %}
+
+When you want to import everything from the *scala.concurrent* package, use this syntax:
+
+{% tabs packaging-imports-6 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import scala.concurrent._
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-6 %}
+
+```scala
+import scala.concurrent.*
+```
+
+{% endtab %}
+{% endtabs %}
+
+### Renaming members on import
+
+Sometimes it can help to rename entities when you import them to avoid name collisions.
+For instance, if you want to use the Scala `List` class and also the *java.util.List* class at the same time, you can rename the *java.util.List* class when you import it:
+
+{% tabs packaging-imports-7 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import java.util.{List => JavaList}
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-7 %}
+
+```scala
+import java.util.{List as JavaList}
+```
+{% endtab %}
+{% endtabs %}
+
+Now you use the name `JavaList` to refer to that class, and use `List` to refer to the Scala list class.
+
+You can also rename multiple members at one time using this syntax:
+
+{% tabs packaging-imports-8 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import java.util.{Date => JDate, HashMap => JHashMap, _}
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-8 %}
+
+```scala
+import java.util.{Date as JDate, HashMap as JHashMap, *}
+```
+
+{% endtab %}
+{% endtabs %}
+
+That line of code says, “Rename the `Date` and `HashMap` classes as shown, and import everything else in the _java.util_ package without renaming any other members.”
+
+### Hiding members on import
+
+You can also *hide* members during the import process.
+This `import` statement hides the *java.util.Random* class, while importing everything else in the *java.util* package:
+
+{% tabs packaging-imports-9 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import java.util.{Random => _, _}
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-9 %}
+
+```scala
+import java.util.{Random as _, *}
+```
+{% endtab %}
+{% endtabs %}
+
+If you try to access the `Random` class it won’t work, but you can access all other members from that package:
+
+{% tabs packaging-imports-10 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val r = new Random // won’t compile
+new ArrayList // works
+```
+{% endtab %}
+{% endtabs %}
+
+#### Hiding multiple members
+
+To hide multiple members during the import process, list them before using the final wildcard import:
+
+{% tabs packaging-imports-11 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import java.util.{List => _, Map => _, Set => _, _}
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-11 %}
+
+```scala
+scala> import java.util.{List as _, Map as _, Set as _, *}
+```
+{% endtab %}
+{% endtabs %}
+
+Once again those classes are hidden, but you can use all other classes in *java.util*:
+
+{% tabs packaging-imports-12 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> new ArrayList[String]
+val res0: java.util.ArrayList[String] = []
+```
+{% endtab %}
+{% endtabs %}
+
+Because those Java classes are hidden, you can also use the Scala `List`, `Set`, and `Map` classes without having a naming collision:
+
+{% tabs packaging-imports-13 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> val a = List(1, 2, 3)
+val a: List[Int] = List(1, 2, 3)
+
+scala> val b = Set(1, 2, 3)
+val b: Set[Int] = Set(1, 2, 3)
+
+scala> val c = Map(1 -> 1, 2 -> 2)
+val c: Map[Int, Int] = Map(1 -> 1, 2 -> 2)
+```
+{% endtab %}
+{% endtabs %}
+
+### Use imports anywhere
+
+In Scala, `import` statements can be anywhere.
+They can be used at the top of a source code file:
+
+{% tabs packaging-imports-14 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+package foo
+
+import scala.util.Random
+
+class ClassA {
+ def printRandom(): Unit = {
+ val r = new Random // use the imported class
+ // more code here...
+ }
+}
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-14 %}
+
+```scala
+package foo
+
+import scala.util.Random
+
+class ClassA:
+ def printRandom(): Unit =
+ val r = new Random // use the imported class
+ // more code here...
+```
+{% endtab %}
+{% endtabs %}
+
+You can also use `import` statements closer to the point where they are needed, if you prefer:
+
+{% tabs packaging-imports-15 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+package foo
+
+class ClassA {
+ import scala.util.Random // inside ClassA
+ def printRandom(): Unit = {
+ val r = new Random
+ // more code here...
+ }
+}
+
+class ClassB {
+ // the Random class is not visible here
+ val r = new Random // this code will not compile
+}
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-15 %}
+
+```scala
+package foo
+
+class ClassA:
+ import scala.util.Random // inside ClassA
+ def printRandom(): Unit =
+ val r = new Random
+ // more code here...
+
+class ClassB:
+ // the Random class is not visible here
+ val r = new Random // this code will not compile
+```
+
+{% endtab %}
+{% endtabs %}
+
+### “Static” imports
+
+When you want to import members in a way similar to the Java “static import” approach---so you can refer to the member names directly, without having to prefix them with their class name---use the following approach.
+
+Use this syntax to import all static members of the Java `Math` class:
+
+{% tabs packaging-imports-16 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import java.lang.Math._
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-16 %}
+
+```scala
+import java.lang.Math.*
+```
+{% endtab %}
+{% endtabs %}
+
+Now you can access static `Math` class methods like `sin` and `cos` without having to precede them with the class name:
+
+{% tabs packaging-imports-17 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+import java.lang.Math._
+
+val a = sin(0) // 0.0
+val b = cos(PI) // -1.0
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-17 %}
+
+```scala
+import java.lang.Math.*
+
+val a = sin(0) // 0.0
+val b = cos(PI) // -1.0
+```
+{% endtab %}
+{% endtabs %}
+
+### Packages imported by default
+
+Two packages are implicitly imported into the scope of all of your source code files:
+
+- java.lang.*
+- scala.*
+
+The members of the Scala object `Predef` are also imported by default.
+
+> If you ever wondered why you can use classes like `List`, `Vector`, `Map`, etc., without importing them, they’re available because of definitions in the `Predef` object.
+
+### Handling naming conflicts
+
+In the rare event there’s a naming conflict and you need to import something from the root of the project, prefix the package name with `_root_`:
+
+{% tabs packaging-imports-18 class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+package accounts
+
+import _root_.accounts._
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=packaging-imports-18 %}
+
+```scala
+package accounts
+
+import _root_.accounts.*
+```
+{% endtab %}
+{% endtabs %}
+
+## Importing `given` instances
+
+As you’ll see in the [Contextual Abstractions][contextual] chapter, in Scala 3 a special form of the `import` statement is used to import `given` instances.
+The basic form is shown in this example:
+
+{% tabs packaging-imports-19 %}
+{% tab 'Scala 3 only' %}
+```scala
+object A:
+ class TC
+ given tc: TC
+ def f(using TC) = ???
+
+object B:
+ import A.* // import all non-given members
+ import A.given // import the given instance
+```
+{% endtab %}
+{% endtabs %}
+
+In this code, the `import A.*` clause of object `B` imports all members of `A` *except* the `given` instance `tc`.
+Conversely, the second import, `import A.given`, imports *only* that `given` instance.
+The two `import` clauses can also be merged into one:
+
+{% tabs packaging-imports-20 %}
+{% tab 'Scala 3 only' %}
+```scala
+object B:
+ import A.{given, *}
+```
+{% endtab %}
+{% endtabs %}
+In Scala 2, that style of import does not exist. Implicit definitions are always imported by the wildcard import.
+### Discussion
+
+The wildcard selector `*` brings all definitions other than givens or extensions into scope, whereas a `given` selector brings all *givens*---including those resulting from extensions---into scope.
+
+These rules have two main benefits:
+
+- It’s more clear where givens in scope are coming from.
+ In particular, it’s not possible to hide imported givens in a long list of other wildcard imports.
+- It enables importing all givens without importing anything else.
+ This is particularly important since givens can be anonymous, so the usual use of named imports is not practical.
+
+### By-type imports
+
+Since givens can be anonymous, it’s not always practical to import them by their name, and wildcard imports are typically used instead.
+*By-type imports* provide a more specific alternative to wildcard imports, which makes it more clear what is imported:
+
+{% tabs packaging-imports-21 %}
+{% tab 'Scala 3 only' %}
+```scala
+import A.{given TC}
+```
+{% endtab %}
+{% endtabs %}
+
+This imports any `given` in `A` that has a type which conforms to `TC`.
+Importing givens of several types `T1,...,Tn` is expressed by multiple `given` selectors:
+
+{% tabs packaging-imports-22 %}
+{% tab 'Scala 3 only' %}
+```scala
+import A.{given T1, ..., given Tn}
+```
+{% endtab %}
+{% endtabs %}
+
+Importing all `given` instances of a parameterized type is expressed by wildcard arguments.
+For example, when you have this `object`:
+
+{% tabs packaging-imports-23 %}
+{% tab 'Scala 3 only' %}
+```scala
+object Instances:
+ given intOrd: Ordering[Int]
+ given listOrd[T: Ordering]: Ordering[List[T]]
+ given ec: ExecutionContext = ...
+ given im: Monoid[Int]
+```
+{% endtab %}
+{% endtabs %}
+
+This import statement imports the `intOrd`, `listOrd`, and `ec` instances, but leaves out the `im` instance because it doesn’t fit any of the specified bounds:
+
+{% tabs packaging-imports-24 %}
+{% tab 'Scala 3 only' %}
+```scala
+import Instances.{given Ordering[?], given ExecutionContext}
+```
+{% endtab %}
+{% endtabs %}
+
+By-type imports can be mixed with by-name imports.
+If both are present in an import clause, by-type imports come last.
+For instance, this import clause imports `im`, `intOrd`, and `listOrd`, but leaves out `ec`:
+
+{% tabs packaging-imports-25 %}
+{% tab 'Scala 3 only' %}
+```scala
+import Instances.{im, given Ordering[?]}
+```
+{% endtab %}
+{% endtabs %}
+
+### An example
+
+As a concrete example, imagine that you have this `MonthConversions` object that contains two `given` definitions:
+
+{% tabs packaging-imports-26 %}
+{% tab 'Scala 3 only' %}
+
+```scala
+object MonthConversions:
+ trait MonthConverter[A]:
+ def convert(a: A): String
+
+ given intMonthConverter: MonthConverter[Int] with
+ def convert(i: Int): String =
+ i match
+ case 1 => "January"
+ case 2 => "February"
+ // more cases here ...
+
+ given stringMonthConverter: MonthConverter[String] with
+ def convert(s: String): String =
+ s match
+ case "jan" => "January"
+ case "feb" => "February"
+ // more cases here ...
+```
+{% endtab %}
+{% endtabs %}
+
+To import those givens into the current scope, use these two `import` statements:
+
+{% tabs packaging-imports-27 %}
+{% tab 'Scala 3 only' %}
+
+```scala
+import MonthConversions.*
+import MonthConversions.{given MonthConverter[?]}
+```
+{% endtab %}
+{% endtabs %}
+
+Now you can create a method that uses those `given` instances:
+
+{% tabs packaging-imports-28 %}
+{% tab 'Scala 3 only' %}
+
+```scala
+def genericMonthConverter[A](a: A)(using monthConverter: MonthConverter[A]): String =
+ monthConverter.convert(a)
+```
+{% endtab %}
+{% endtabs %}
+
+Then you can use that method in your application:
+
+{% tabs packaging-imports-29 %}
+{% tab 'Scala 3 only' %}
+
+```scala
+@main def main =
+ println(genericMonthConverter(1)) // January
+ println(genericMonthConverter("jan")) // January
+```
+{% endtab %}
+{% endtabs %}
+
+As mentioned, one of the key design benefits of the “import given” syntax is to make it clear where givens in scope come from, and it’s clear in these `import` statements that the givens come from the `MonthConversions` object.
+
+[contextual]: {% link _overviews/scala3-book/ca-contextual-abstractions-intro.md %}
diff --git a/_overviews/scala3-book/scala-features.md b/_overviews/scala3-book/scala-features.md
new file mode 100644
index 0000000000..c1d1ca834c
--- /dev/null
+++ b/_overviews/scala3-book/scala-features.md
@@ -0,0 +1,577 @@
+---
+title: Scala Features
+type: chapter
+description: This page discusses the main features of the Scala programming language.
+languages: [ru, zh-cn]
+num: 2
+previous-page: introduction
+next-page: why-scala-3
+---
+
+
+The name _Scala_ comes from the word _scalable_, and true to that name, the Scala language is used to power busy websites and analyze huge data sets.
+This section introduces the features that make Scala a scalable language.
+These features are split into three sections:
+
+- High-level language features
+- Lower-level language features
+- Scala ecosystem features
+
+
+
+{% comment %}
+I think of this section as being like an “elevator pitch.”
+{% endcomment %}
+
+## High-level features
+
+Looking at Scala from the proverbial “30,000 foot view,” you can make the following statements about it:
+
+- It’s a high-level programming language
+- It has a concise, readable syntax
+- It’s statically-typed (but feels dynamic)
+- It has an expressive type system
+- It’s a functional programming (FP) language
+- It’s an object-oriented programming (OOP) language
+- It supports the fusion of FP and OOP
+- Contextual abstractions provide a clear way to implement _term inference_
+- It runs on the JVM (and in the browser)
+- It interacts seamlessly with Java code
+- It’s used for server-side applications (including microservices), big data applications, and can also be used in the browser with Scala.js
+
+The following sections take a quick look at these features.
+
+
+### A high-level language
+
+Scala is considered a high-level language in at least two ways.
+First, like Java and many other modern languages, you don’t deal with low-level concepts like pointers and memory management.
+
+Second, with the use of lambdas and higher-order functions, you write your code at a very high level.
+As the functional programming saying goes, in Scala you write _what_ you want, not _how_ to achieve it.
+That is, we don’t write imperative code like this:
+
+{% tabs scala-features-1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=scala-features-1 %}
+```scala
+import scala.collection.mutable.ListBuffer
+
+def double(ints: List[Int]): List[Int] = {
+ val buffer = new ListBuffer[Int]()
+ for (i <- ints) {
+ buffer += i * 2
+ }
+ buffer.toList
+}
+
+val oldNumbers = List(1, 2, 3)
+val newNumbers = double(oldNumbers)
+```
+{% endtab %}
+{% tab 'Scala 3' for=scala-features-1 %}
+```scala
+import scala.collection.mutable.ListBuffer
+
+def double(ints: List[Int]): List[Int] =
+ val buffer = new ListBuffer[Int]()
+ for i <- ints do
+ buffer += i * 2
+ buffer.toList
+
+val oldNumbers = List(1, 2, 3)
+val newNumbers = double(oldNumbers)
+```
+{% endtab %}
+{% endtabs %}
+
+That code instructs the compiler what to do on a step-by-step basis.
+Instead, we write high-level, functional code using higher-order functions and lambdas like this to compute the same result:
+
+{% tabs scala-features-2 %}
+{% tab 'Scala 2 and 3' for=scala-features-2 %}
+```scala
+val newNumbers = oldNumbers.map(_ * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+
+As you can see, that code is much more concise, easier to read, and easier to maintain.
+
+
+### Concise syntax
+
+Scala has a concise, readable syntax.
+For instance, variables are created concisely, and their types are clear:
+
+{% tabs scala-features-3 %}
+{% tab 'Scala 2 and 3' for=scala-features-3 %}
+```scala
+val nums = List(1,2,3)
+val p = Person("Martin", "Odersky")
+```
+{% endtab %}
+{% endtabs %}
+
+
+Higher-order functions and lambdas make for concise code that’s readable:
+
+{% tabs scala-features-4 %}
+{% tab 'Scala 2 and 3' for=scala-features-4 %}
+```scala
+nums.map(i => i * 2) // long form
+nums.map(_ * 2) // short form
+
+nums.filter(i => i > 1)
+nums.filter(_ > 1)
+```
+{% endtab %}
+{% endtabs %}
+
+Traits, classes, and methods are defined with a clean, light syntax:
+
+{% tabs scala-features-5 class=tabs-scala-version %}
+{% tab 'Scala 2' for=scala-features-5 %}
+```scala mdoc
+trait Animal {
+ def speak(): Unit
+}
+
+trait HasTail {
+ def wagTail(): Unit
+}
+
+class Dog extends Animal with HasTail {
+ def speak(): Unit = println("Woof")
+ def wagTail(): Unit = println("⎞⎜⎛ ⎞⎜⎛")
+}
+```
+{% endtab %}
+{% tab 'Scala 3' for=scala-features-5 %}
+```scala
+trait Animal:
+ def speak(): Unit
+
+trait HasTail:
+ def wagTail(): Unit
+
+class Dog extends Animal, HasTail:
+ def speak(): Unit = println("Woof")
+ def wagTail(): Unit = println("⎞⎜⎛ ⎞⎜⎛")
+```
+{% endtab %}
+{% endtabs %}
+
+
+Studies have shown that the time a developer spends _reading_ code to _writing_ code is at least a 10:1 ratio, so writing code that is concise _and_ readable is important.
+
+
+### A dynamic feel
+
+Scala is a statically-typed language, but thanks to its type inference capabilities it feels dynamic.
+All of these expressions look like a dynamically-typed language like Python or Ruby, but they’re all Scala:
+
+{% tabs scala-features-6 class=tabs-scala-version %}
+{% tab 'Scala 2' for=scala-features-6 %}
+```scala
+val s = "Hello"
+val p = Person("Al", "Pacino")
+val sum = nums.reduceLeft(_ + _)
+val y = for (i <- nums) yield i * 2
+val z = nums
+ .filter(_ > 100)
+ .filter(_ < 10_000)
+ .map(_ * 2)
+```
+{% endtab %}
+{% tab 'Scala 3' for=scala-features-6 %}
+```scala
+val s = "Hello"
+val p = Person("Al", "Pacino")
+val sum = nums.reduceLeft(_ + _)
+val y = for i <- nums yield i * 2
+val z = nums
+ .filter(_ > 100)
+ .filter(_ < 10_000)
+ .map(_ * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+
+As Heather Miller states, Scala is considered to be a [strong, statically-typed language](https://heather.miller.am/blog/types-in-scala.html), and you get all the benefits of static types:
+
+- Correctness: you catch most errors at compile-time
+- Great IDE support
+ - Reliable code completion
+ - Catching errors at compile-time means catching mistakes as you type
+ - Easy and reliable refactoring
+- You can refactor your code with confidence
+- Method type declarations tell readers what the method does, and help serve as documentation
+- Scalability and maintainability: types help ensure correctness across arbitrarily large applications and development teams
+- Strong typing in combination with excellent inference enables mechanisms like [contextual abstraction]({{ site.scala3ref }}/contextual) that allows you to omit boilerplate code. Often, this boilerplate code can be inferred by the compiler, based on type definitions and a given context.
+
+{% comment %}
+In that list:
+- 'Correctness' and 'Scalability' come from Heather Miller’s page
+- the IDE-related quotes in this section come from the Scala.js website:
+ - catch most errors in the IDE
+ - Easy and reliable refactoring
+ - Reliable code completion
+{% endcomment %}
+
+
+### Expressive type system
+
+{% comment %}
+- this text comes from the current [ScalaTour](https://docs.scala-lang.org/tour/tour-of-scala.html).
+- TODO: all of the URLs will have to be updated
+
+- i removed these items until we can replace them:
+* [Compound types](/tour/compound-types.html)
+* [conversions](/tour/implicit-conversions.html)
+* [Explicitly typed self references](/tour/self-types.html)
+{% endcomment %}
+
+Scala’s type system enforces, at compile-time, that abstractions are used in a safe and coherent manner.
+In particular, the type system supports:
+
+- [Inferred types]({% link _overviews/scala3-book/types-inferred.md %})
+- [Generic classes]({% link _overviews/scala3-book/types-generics.md %})
+- [Variance annotations]({% link _overviews/scala3-book/types-variance.md %})
+- [Upper](/tour/upper-type-bounds.html) and [lower](/tour/lower-type-bounds.html) type bounds
+- [Polymorphic methods](/tour/polymorphic-methods.html)
+- [Intersection types]({% link _overviews/scala3-book/types-intersection.md %})
+- [Union types]({% link _overviews/scala3-book/types-union.md %})
+- [Type lambdas]({{ site.scala3ref }}/new-types/type-lambdas.html)
+- [`given` instances and `using` clauses]({% link _overviews/scala3-book/ca-context-parameters.md %})
+- [Extension methods]({% link _overviews/scala3-book/ca-extension-methods.md %})
+- [Type classes]({% link _overviews/scala3-book/ca-type-classes.md %})
+- [Multiversal equality]({% link _overviews/scala3-book/ca-multiversal-equality.md %})
+- [Opaque type aliases]({% link _overviews/scala3-book/types-opaque-types.md %})
+- [Open classes]({{ site.scala3ref }}/other-new-features/open-classes.html)
+- [Match types]({{ site.scala3ref }}/new-types/match-types.html)
+- [Dependent function types]({{ site.scala3ref }}/new-types/dependent-function-types.html)
+- [Polymorphic function types]({{ site.scala3ref }}/new-types/polymorphic-function-types.html)
+- [Context bounds]({{ site.scala3ref }}/contextual/context-bounds.html)
+- [Context functions]({{ site.scala3ref }}/contextual/context-functions.html)
+- [Inner classes](/tour/inner-classes.html) and [abstract type members](/tour/abstract-type-members.html) as object members
+
+In combination, these features provide a powerful basis for the safe reuse of programming abstractions and for the type-safe extension of software.
+
+
+### A functional programming language
+
+Scala is a functional programming (FP) language, meaning:
+
+- Functions are values, and can be passed around like any other value
+- Higher-order functions are directly supported
+- Lambdas are built in
+- Everything in Scala is an expression that returns a value
+- Syntactically it’s easy to use immutable variables, and their use is encouraged
+- It has a wealth of immutable collection classes in the standard library
+- Those collection classes come with dozens of functional methods: they don’t mutate the collection, but instead return an updated copy of the data
+
+
+### An object-oriented language
+
+Scala is an object-oriented programming (OOP) language.
+Every value is an instance of a class and every “operator” is a method.
+
+In Scala, all types inherit from a top-level class `Any`, whose immediate children are `AnyVal` (_value types_, such as `Int` and `Boolean`) and `AnyRef` (_reference types_, as in Java).
+This means that the Java distinction between primitive types and boxed types (e.g. `int` vs. `Integer`) isn’t present in Scala.
+Boxing and unboxing is completely transparent to the user.
+
+{% comment %}
+- AnyRef above is wrong in case of strict null checking, no? On the other hand, maybe too much information to state this here
+- probably not worth to mention (too advanced at this point) there is AnyKind
+- Add the “types hierarchy” image here?
+{% endcomment %}
+
+
+### Supports FP/OOP fusion
+
+{% comment %}
+NOTE: This text in the first line comes from this slide: https://x.com/alexelcu/status/996408359514525696
+{% endcomment %}
+
+The essence of Scala is the fusion of functional programming and object-oriented programming in a typed setting:
+
+- Functions for the logic
+- Objects for the modularity
+
+As [Martin Odersky has stated](https://jaxenter.com/current-state-scala-odersky-interview-129495.html), “Scala was designed to show that a fusion of functional and object-oriented programming is possible and practical.”
+
+
+### Term inference, made clearer
+
+Following Haskell, Scala was the second popular language to have some form of _implicits_.
+In Scala 3 these concepts have been completely re-thought and more clearly implemented.
+
+The core idea is _term inference_: Given a type, the compiler synthesizes a “canonical” term that has that type.
+In Scala, a context parameter directly leads to an inferred argument term that could also be written down explicitly.
+
+Use cases for this concept include implementing [type classes]({% link _overviews/scala3-book/ca-type-classes.md %}), establishing context, dependency injection, expressing capabilities, computing new types, and proving relationships between them.
+
+Scala 3 makes this process more clear than ever before.
+Read about contextual abstractions in the [Reference documentation]({{ site.scala3ref }}/contextual).
+
+
+### Client & server
+
+Scala code runs on the Java Virtual Machine (JVM), so you get all of its benefits:
+
+- Security
+- Performance
+- Memory management
+- Portability and platform independence
+- The ability to use the wealth of existing Java and JVM libraries
+
+In addition to running on the JVM, Scala also runs in the browser with Scala.js (and open source third-party tools to integrate popular JavaScript libraries), and native executables can be built with Scala Native and GraalVM.
+
+
+### Seamless Java interaction
+
+You can use Java classes and libraries in your Scala applications, and you can use Scala code in your Java applications.
+In regards to the second point, large libraries like [Akka](https://akka.io) and the [Play Framework](https://www.playframework.com) are written in Scala, and can be used in Java applications.
+
+In regards to the first point, Java classes and libraries are used in Scala applications every day.
+For instance, in Scala you can read files with a Java `BufferedReader` and `FileReader`:
+
+{% tabs scala-features-7 %}
+{% tab 'Scala 2 and 3' for=scala-features-7 %}
+```scala
+import java.io.*
+val br = BufferedReader(FileReader(filename))
+// read the file with `br` ...
+```
+{% endtab %}
+{% endtabs %}
+
+Using Java code in Scala is generally seamless.
+
+Java collections can also be used in Scala, and if you want to use Scala’s rich collection class methods with them, you can convert them with just a few lines of code:
+
+{% tabs scala-features-8 %}
+{% tab 'Scala 2 and 3' for=scala-features-8 %}
+```scala
+import scala.jdk.CollectionConverters.*
+val scalaList: Seq[Integer] = JavaClass.getJavaList().asScala.toSeq
+```
+{% endtab %}
+{% endtabs %}
+
+
+### Wealth of libraries
+
+As you’ll see in the third section of this page, Scala libraries and frameworks like these have been written to power busy websites and work with huge datasets:
+
+1. The [Play Framework](https://www.playframework.com) is a lightweight, stateless, developer-friendly, web-friendly architecture for creating highly-scalable applications
+2. [Apache Spark](https://spark.apache.org) is a unified analytics engine for big data processing, with built-in modules for streaming, SQL, machine learning and graph processing
+
+The [Awesome Scala list](https://github.com/lauris/awesome-scala) shows dozens of additional open source tools that developers have created to build Scala applications.
+
+In addition to server-side programming, [Scala.js](https://www.scala-js.org) is a strongly-typed replacement for writing JavaScript, with open source third-party libraries that include tools to integrate with Facebook’s React library, jQuery, and more.
+
+
+
+{% comment %}
+The Lower-Level Features section is like the second part of an elevator pitch.
+Assuming you told someone about the previous high-level features and then they say, “Tell me more,” this is what you might tell them.
+{% endcomment %}
+
+## Lower-level language features
+
+Where the previous section covered high-level features of Scala, it’s interesting to note that at a high level you can make the same statements about both Scala 2 and Scala 3.
+A decade ago Scala started with a strong foundation of desirable features, and as you’ll see in this section, those benefits have been improved with Scala 3.
+
+At a “sea level” view of the details---i.e., the language features programmers use everyday---Scala 3 has significant advantages over Scala 2:
+
+- The ability to create algebraic data types (ADTs) more concisely with enums
+- An even more concise and readable syntax:
+ - The “quiet” control structure syntax is easier to read
+ - Optional braces
+ - Fewer symbols in the code creates less visual noise, making it easier to read
+ - The `new` keyword is generally no longer needed when creating class instances
+ - The formality of package objects have been dropped in favor of simpler “top level” definitions
+- A grammar that’s more clear:
+ - Multiple different uses of the `implicit` keyword have been removed; those uses are replaced by more obvious keywords like `given`, `using`, and `extension`, focusing on intent over mechanism (see the [Givens][givens] section for details)
+ - [Extension methods][extension] replace implicit classes with a clearer and simpler mechanism
+ - The addition of the `open` modifier for classes makes the developer intentionally declare that a class is open for modification, thereby limiting ad-hoc extensions to a code base
+ - [Multiversal equality][multiversal] rules out nonsensical comparisons with `==` and `!=` (i.e., attempting to compare a `Person` to a `Planet`)
+ - Macros are implemented much more easily
+ - Union and intersection offer a flexible way to model types
+ - Trait parameters replace and simplify early initializers
+ - [Opaque type aliases][opaque_types] replace most uses of value classes, while guaranteeing the absence of boxing
+ - Export clauses provide a simple and general way to express aggregation, which can replace the previous facade pattern of package objects inheriting from classes
+ - The procedure syntax has been dropped, and the varargs syntax has been changed, both to make the language more consistent
+ - The `@infix` annotation makes it obvious how you want a method to be applied
+ - The [`@targetName`]({{ site.scala3ref }}/other-new-features/targetName.html) method annotation defines an alternate name for the method, improving Java interoperability, and letting you provide aliases for symbolic operators
+
+It would take too much space to demonstrate all of those features here, but follow the links in the items above to see those features in action.
+All of these features are discussed in detail in the *New*, *Changed*, and *Dropped* features pages in the [Overview documentation][reference].
+
+
+
+{% comment %}
+CHECKLIST OF ALL ADDED, UPDATED, AND REMOVED FEATURES
+=====================================================
+
+New Features
+------------
+- trait parameters
+- super traits
+- creator applications
+- export clauses
+- opaque type aliases
+- open classes
+- parameter untupling
+- kind polymorphism
+- tupled function
+- threadUnsafe annotation
+- new control syntax
+- optional braces (experimental)
+- explicit nulls
+- safe initialization
+
+CHANGED FEATURES
+----------------
+- numeric literals
+- structural types
+- operators
+- wildcard types
+- type checking
+- type inference
+- implicit resolution
+- implicit conversions
+- overload resolution
+- match expressions
+- vararg patterns
+- pattern bindings
+- pattern matching
+- eta expansion
+- compiler plugins
+- lazy vals initialization
+- main functions
+
+DROPPED FEATURES
+----------------
+- DelayedInit
+- macros
+- existential types
+- type projection
+- do/while syntax
+- procedure syntax
+- package objects
+- early initializers
+- class shadowing
+- limit 22
+- XML literals
+- symbol literals
+- auto-application
+- weak conformance
+- nonlocal returns
+- [this] qualifier
+ - private[this] and protected[this] access modifiers are deprecated
+ and will be phased out
+{% endcomment %}
+
+
+
+
+## Scala ecosystem
+
+{% comment %}
+TODO: I didn’t put much work into this section because I don’t know if you want
+ to add many tools because (a) that can be seen as an endorsement and
+ (b) it creates a section that can need more maintenance than average
+ since tool popularity can wax and wane. One way to avoid the first
+ point is to base the lists on Github stars and activity.
+{% endcomment %}
+
+Scala has a vibrant ecosystem, with libraries and frameworks for every need.
+The [“Awesome Scala” list](https://github.com/lauris/awesome-scala) provides a list of hundreds of open source projects that are available to Scala developers, and the [Scaladex](https://index.scala-lang.org) provides a searchable index of Scala libraries.
+Some of the more notable libraries are listed below.
+
+
+
+### Web development
+
+- The [Play Framework](https://www.playframework.com) followed the Ruby on Rails model to become a lightweight, stateless, developer-friendly, web-friendly architecture for highly-scalable applications
+- [Scalatra](https://scalatra.org) is a tiny, high-performance, async web framework, inspired by Sinatra
+- [Finatra](https://twitter.github.io/finatra) is Scala services built for X
+- [Scala.js](https://www.scala-js.org) is a strongly-typed replacement for JavaScript that provides a safer way to build robust front-end web applications
+- [ScalaJs-React](https://github.com/japgolly/scalajs-react) lifts Facebook’s React library into Scala.js, and endeavours to make it as type-safe and Scala-friendly as possible
+
+HTTP(S) libraries:
+
+- [Akka-http](https://akka.io)
+- [Finch](https://github.com/finagle/finch)
+- [Http4s](https://github.com/http4s/http4s)
+- [Sttp](https://github.com/softwaremill/sttp)
+
+JSON libraries:
+
+- [Argonaut](https://github.com/argonaut-io/argonaut)
+- [Circe](https://github.com/circe/circe)
+- [Json4s](https://github.com/json4s/json4s)
+- [Play-JSON](https://github.com/playframework/play-json)
+
+Serialization:
+
+- [ScalaPB](https://github.com/scalapb/ScalaPB)
+
+### Science and data analysis:
+
+- [Algebird](https://github.com/twitter/algebird)
+- [Spire](https://github.com/typelevel/spire)
+- [Squants](https://github.com/typelevel/squants)
+
+
+### Big data
+
+- [Apache Spark](https://github.com/apache/spark)
+- [Apache Flink](https://github.com/apache/flink)
+
+
+### AI, machine learning
+
+- [BigDL](https://github.com/intel-analytics/BigDL) (Distributed Deep Learning Framework for Apache Spark)
+- [TensorFlow Scala](https://github.com/eaplatanios/tensorflow_scala)
+
+
+### Functional Programming & Functional Reactive Programming
+
+FP:
+
+- [Cats](https://github.com/typelevel/cats)
+- [Zio](https://github.com/zio/zio)
+
+Functional reactive programming (FRP):
+
+- [fs2](https://github.com/typelevel/fs2)
+- [monix](https://github.com/monix/monix)
+
+
+### Build tools
+
+- [sbt](https://www.scala-sbt.org)
+- [Gradle](https://gradle.org)
+- [Mill](https://github.com/lihaoyi/mill)
+
+
+
+## Summary
+
+As this page shows, Scala has many terrific programming language features at a high level, at an everyday programming level, and through its developer ecosystem.
+
+
+
+[reference]: {{ site.scala3ref }}/overview.html
+[multiversal]: {% link _overviews/scala3-book/ca-multiversal-equality.md %}
+[extension]: {% link _overviews/scala3-book/ca-extension-methods.md %}
+[givens]: {% link _overviews/scala3-book/ca-context-parameters.md %}
+[opaque_types]: {% link _overviews/scala3-book/types-opaque-types.md %}
+
+
+
diff --git a/_overviews/scala3-book/scala-for-java-devs.md b/_overviews/scala3-book/scala-for-java-devs.md
new file mode 100644
index 0000000000..0ba8361778
--- /dev/null
+++ b/_overviews/scala3-book/scala-for-java-devs.md
@@ -0,0 +1,1332 @@
+---
+title: Scala for Java Developers
+type: chapter
+description: This page is for Java developers who are interested in learning about Scala 3.
+languages: [zh-cn]
+num: 74
+previous-page: interacting-with-java
+next-page: scala-for-javascript-devs
+---
+
+{% include_relative scala4x.css %}
+
+
+
+This page provides a comparison between the Java and Scala programming languages by sharing side-by-side examples of each language.
+It’s intended for programmers who know Java and want to learn about Scala, specifically by seeing how Scala features compare to Java.
+
+
+
+## Overview
+
+Before getting into the examples, this first section provides a relatively brief introduction and summary of the sections that follow.
+It presents the similarities and differences between Java and Scala at a high level, and then introduces the differences you’ll experience every day as you write code.
+
+### High level similarities
+
+At a high level, Scala shares these similarities with Java:
+
+- Scala code is compiled to _.class_ files, packaged in JAR files, and runs on the JVM
+- It’s an [object-oriented programming][modeling-oop] (OOP) language
+- It’s statically typed
+- Both languages have support for lambdas and [higher-order functions][hofs]
+- They can both be used with IDEs like IntelliJ IDEA and Microsoft VS Code
+- Projects can be built with build tools like Gradle, Ant, and Maven
+- It has terrific libraries and frameworks for building server-side, network-intensive applications, including web server applications, microservices, machine learning, and more (see the [“Awesome Scala” list](https://github.com/lauris/awesome-scala))
+- Both Java and Scala can use Scala libraries:
+ - They can use the [Akka actors library](https://akka.io) to build actor-based concurrent systems, and Apache Spark to build data-intensive applications
+ - They can use the [Play Framework](https://www.playframework.com) to develop server-side applications
+- You can use [GraalVM](https://www.graalvm.org) to compile your projects into native executables
+- Scala can seamlessly use the wealth of libraries that have been developed for Java
+
+### High level differences
+
+Also at a high level, the differences between Java and Scala are:
+
+- Scala has a concise but readable syntax; we call it _expressive_
+- Though it’s statically typed, Scala often feels like a dynamic language
+- Scala is a pure OOP language, so every object is an instance of a class, and symbols like `+` and `+=` that look like operators are really methods; this means that you can create your own operators
+- In addition to being a pure OOP language, Scala is also a pure FP language; in fact, it encourages a fusion of OOP and FP, with functions for the logic and objects for modularity
+- Scala has a full suite of immutable collections, including `List`, `Vector`, and immutable `Map` and `Set` implementations
+- Everything in Scala is an _expression_: constructs like `if` statements, `for` loops, `match` expressions, and even `try`/`catch` expressions all have return values
+- Scala idioms favor immutability by default: you’re encouraged to use immutable (`final`) variables and immutable collections
+- Idiomatic Scala code does not use `null`, and thus does not suffer from `NullPointerException`
+- The Scala ecosystem has other [build tools][tools] in sbt, Mill, and others
+- In addition to running on the JVM, the [Scala.js](https://www.scala-js.org) project lets you use Scala as a JavaScript replacement
+- The [Scala Native](http://www.scala-native.org) project adds low-level constructs to let you write “systems” level code, and also compiles to native executables
+
+{% comment %}
+These are several notes that came up early in the writing process, and I (Alvin) can’t really address them:
+TODO: Need a good, simple way to state that Scala has a sound type system
+TODO: Points to make about Scala’s consistency?
+TODO: Add a point about how the type system lets you express details as desired
+{% endcomment %}
+
+
+### Programming level differences
+
+Finally, these are some of the differences you’ll see every day when writing code:
+
+- Scala’s syntax is extremely consistent
+- Variables and parameters are defined as `val` (immutable, like `final` in Java) or `var` (mutable)
+- _Type inference_ makes your code feel dynamically typed, and helps to keep your code brief
+- In addition to simple `for` loops, Scala has powerful `for` comprehensions that yield results based on your algorithms
+- Pattern matching and `match` expressions will change the way you write code
+- Writing immutable code by default leads to writing _expressions_ rather than _statements_; in time you see that writing expressions simplifies your code (and your tests)
+- [Toplevel definitions][toplevel] let you put method, field, and other definitions anywhere, also leading to concise, expressive code
+- You can create _mixins_ by “mixing” multiple traits into classes and objects (traits are similar to interfaces in Java 8 and newer)
+- Classes are closed by default, supporting Joshua Bloch’s _Effective Java_ idiom, “Design and document for inheritance or else forbid it”
+- Scala’s [contextual abstractions][contextual] and _term inference_ provide a collection of features:
+ - [Extension methods][extension-methods] let you add new functionality to closed classes
+ - [_Given_ instances][givens] let you define terms that the compiler can synthesize at _using_ points, making your code less verbose and essentially letting the compiler write code for you
+ - [Multiversal equality][multiversal] lets you limit equality comparisons---at compile time---to only those comparisons that make sense
+- Scala has state of the art, third-party, open source functional programming libraries
+- Scala case classes are like records in Java 14; they help you model data when writing FP code, with built-in support for concepts like pattern matching and cloning
+- Thanks to features like by-name parameters, infix notation, optional parentheses, extension methods, and [higher-order functions][hofs], you can create your own “control structures” and DSLs
+- Scala files do not have to be named according to the classes or traits they contain
+- Many other goodies: companion classes and objects, macros, [union][union-types] and [intersection][intersection-types], numeric literals, multiple parameter lists, default values for parameters, named arguments, and more
+
+### Features compared with examples
+
+Given that introduction, the following sections provide side-by-side comparisons of Java and Scala programming language features.
+
+
+
+## OOP style classes and methods
+
+This section provides comparisons of features related to OOP-style classes and methods.
+
+### Comments:
+
+
+
+
+
+ //
+ /* ... */
+ /** ... */
+
+
+
+
+ //
+ /* ... */
+ /** ... */
+
+
+
+
+
+### OOP style class, primary constructor:
+
+Scala doesn’t follow the JavaBeans standard, so instead of showing Java
+code written in the JavaBeans style, here we show Java code that is
+equivalent to the Scala code that follows it.
+
+
+
+
+
+ class Person {
+ public String firstName;
+ public String lastName;
+ public int age;
+ public Person(
+ String firstName,
+ String lastName,
+ int age
+ ) {
+ this.firstName = firstName;
+ this.lastName = lastName;
+ this.age = age;
+ }
+ public String toString() {
+ return String.format("%s %s is %d years old.", firstName, lastName, age);
+ }
+ }
+
+
+
+
+ class Person (
+ var firstName: String,
+ var lastName: String,
+ var age: Int
+ ):
+ override def toString = s"$firstName $lastName is $age years old."
+
+
+
+
+
+
+### Auxiliary constructors:
+
+
+
+
+
+ public class Person {
+ public String firstName;
+ public String lastName;
+ public int age;
+
+ // primary constructor
+ public Person(
+ String firstName,
+ String lastName,
+ int age
+ ) {
+ this.firstName = firstName;
+ this.lastName = lastName;
+ this.age = age;
+ }
+
+ // zero-arg constructor
+ public Person() {
+ this("", "", 0);
+ }
+
+ // one-arg constructor
+ public Person(String firstName) {
+ this(firstName, "", 0);
+ }
+
+ // two-arg constructor
+ public Person(
+ String firstName,
+ String lastName
+ ) {
+ this(firstName, lastName, 0);
+ }
+
+ }
+
+
+
+
+ class Person (
+ var firstName: String,
+ var lastName: String,
+ var age: Int
+ ):
+ // zero-arg auxiliary constructor
+ def this() = this("", "", 0)
+
+ // one-arg auxiliary constructor
+ def this(firstName: String) =
+ this(firstName, "", 0)
+
+ // two-arg auxiliary constructor
+ def this(
+ firstName: String,
+ lastName: String
+ ) =
+ this(firstName, lastName, 0)
+
+ end Person
+
+
+
+
+
+
+### Classes closed by default:
+“Plan for inheritance or else forbid it.”
+
+
+
+
+
+ final class Person
+
+
+
+
+ class Person
+
+
+
+
+
+
+### A class that’s open for extension:
+
+
+
+
+
+ class Person
+
+
+
+
+ open class Person
+
+
+
+
+
+
+### One-line method:
+
+
+
+
+
+ public int add(int a, int b) {
+ return a + b;
+ }
+
+
+
+
+## Interfaces, traits, and inheritance
+
+This section compares Java interfaces to Scala traits, including how classes extend interfaces and traits.
+
+
+### Interfaces/traits:
+
+
+
+
+
+ public interface Marker {};
+
+
+
+
+ trait Marker
+
+
+
+
+
+
+### Simple interface:
+
+
+
+
+
+ public interface Adder {
+ public int add(int a, int b);
+ }
+
+
+
+
+ trait Adder:
+ def add(a: Int, b: Int): Int
+
+
+
+
+
+
+### Interface with a concrete method:
+
+
+
+
+
+ public interface Adder {
+ int add(int a, int b);
+ default int multiply(
+ int a, int b
+ ) {
+ return a * b;
+ }
+ }
+
+
+
+
+ trait Adder:
+ def add(a: Int, b: Int): Int
+ def multiply(a: Int, b: Int): Int =
+ a * b
+
+
+
+
+
+
+### Inheritance:
+
+
+
+
+
+ class Dog extends Animal implements HasLegs, HasTail
+
+
+
+
+ class Dog extends Animal, HasLegs, HasTail
+
+
+
+
+
+
+### Extend multiple interfaces
+
+These interfaces and traits have concrete, implemented methods (default methods):
+
+
+
+
+
+ interface Adder {
+ default int add(int a, int b) {
+ return a + b;
+ }
+ }
+
+ interface Multiplier {
+ default int multiply (
+ int a,
+ int b)
+ {
+ return a * b;
+ }
+ }
+
+ public class JavaMath implements Adder, Multiplier {}
+
+ JavaMath jm = new JavaMath();
+ jm.add(1,1);
+ jm.multiply(2,2);
+
+
+
+
+ trait Adder:
+ def add(a: Int, b: Int) = a + b
+
+ trait Multiplier:
+ def multiply(a: Int, b: Int) = a * b
+
+ class ScalaMath extends Adder, Multiplier
+
+ val sm = new ScalaMath
+ sm.add(1,1)
+ sm.multiply(2,2)
+
+
+
+
+
+
+### Mixins:
+
+
+
+
+
+ N/A
+
+
+
+
+ class DavidBanner
+
+ trait Angry:
+ def beAngry() =
+ println("You won’t like me ...")
+
+ trait Big:
+ println("I’m big")
+
+ trait Green:
+ println("I’m green")
+
+ // mix in the traits as DavidBanner
+ // is created
+ val hulk = new DavidBanner with Big with Angry with Green
+
+
+
+
+
+
+
+
+## Control structures
+
+This section compares [control structures][control] in Java and Scala.
+
+### `if` statement, one line:
+
+
+
+
+
+ if (x == 1) { System.out.println(1); }
+
+
+
+
+ if x == 1 then println(x)
+
+
+
+
+
+
+### `if` statement, multiline:
+
+
+
+
+
+ if (x == 1) {
+ System.out.println("x is 1, as you can see:")
+ System.out.println(x)
+ }
+
+
+
+
+ if x == 1 then
+ println("x is 1, as you can see:")
+ println(x)
+
+ try
+ writeTextToFile(text)
+ catch
+ case ioe: IOException =>
+ println(ioe.getMessage)
+ case nfe: NumberFormatException =>
+ println(nfe.getMessage)
+ finally
+ println("Clean up resources here.")
+
+
+
+
+
+
+
+## Collections classes
+
+This section compares the [collections classes][collections-classes] that are available in Java and Scala.
+
+
+### Immutable collections classes
+
+Examples of how to create instances of immutable collections.
+
+
+### Sequences:
+
+
+
+
+
+ List strings = List.of("a", "b", "c");
+
+
+
+
+ val strings = List("a", "b", "c")
+ val strings = Vector("a", "b", "c")
+
+ val map = Map(
+ "a" -> 1,
+ "b" -> 2,
+ "c" -> 3
+ )
+
+
+
+
+
+
+### Mutable collections classes
+
+Scala has mutable collections classes like `ArrayBuffer`, `Map`, and `Set` in its _scala.collection.mutable_ package.
+After [importing them][imports] into the current scope, they’re created just like the immutable `List`, `Vector`, `Map`, and `Set` examples just shown.
+
+Scala also has an `Array` class, which you can think of as being a wrapper around the Java `array` primitive type.
+A Scala `Array[A]` maps to a Java `A[]`, so you can think of this Scala `Array[String]`:
+
+```scala
+val a = Array("a", "b")
+```
+
+as being backed by this Java `String[]`:
+
+```java
+String[] a = {"a", "b"};
+```
+
+However, a Scala `Array` also has all of the functional methods you expect in a Scala collection, including `map` and `filter`:
+
+```scala
+val nums = Array(1, 2, 3, 4, 5)
+val doubledNums = nums.map(_ * 2)
+val filteredNums = nums.filter(_ > 2)
+```
+
+Because the Scala `Array` is represented in the same way as the Java `array`, you can easily use Java methods that return arrays in your Scala code.
+
+> Despite that discussion of `Array`, bear in mind that often in Scala there are alternatives to `Array` that might be better suited.
+> Arrays are useful for interoperating with other languages (Java, JavaScript) and they may also be useful when writing low-level code that needs to squeeze maximum performance out of the underlying platform. But in general, when you need to use a sequence, the Scala idiom is to prefer immutable sequences like `Vector` and `List`, and then use `ArrayBuffer` if and when when you really need a mutable sequence.
+
+You can also convert between Java and Scala collections classes with the Scala `CollectionConverters` objects.
+There are two objects in different packages, one for converting from Java to Scala, and another for converting from Scala to Java.
+This table shows the possible conversions:
+
+
+
+
+
Java
+
Scala
+
+
+
java.util.Collection
+
scala.collection.Iterable
+
+
+
java.util.List
+
scala.collection.mutable.Buffer
+
+
+
java.util.Set
+
scala.collection.mutable.Set
+
+
+
java.util.Map
+
scala.collection.mutable.Map
+
+
+
java.util.concurrent.ConcurrentMap
+
scala.collection.mutable.ConcurrentMap
+
+
+
java.util.Dictionary
+
scala.collection.mutable.Map
+
+
+
+
+
+
+## Methods on collections classes
+
+With the ability to treat Java collections as streams, Java and Scala now have many of the same common functional methods available to them:
+
+- `map`
+- `filter`
+- `forEach`/`foreach`
+- `findFirst`/`find`
+- `reduce`
+
+If you’re used to using these methods with lambda expressions in Java, you’ll find it easy to use the same methods on Scala’s [collection classes][collections-classes].
+
+Scala also has _dozens_ of other [collections methods][collections-methods], including `head`, `tail`, `drop`, `take`, `distinct`, `flatten`, and many more.
+At first you may wonder why there are so many methods, but after working with Scala you’ll realize that _because_ of these methods, you rarely ever need to write custom `for` loops any more.
+
+(This also means that you rarely need to _read_ custom `for` loops, as well.
+Because developers tend to spend on the order of ten times as much time _reading_ code as _writing_ code, this is significant.)
+
+
+
+## Tuples
+
+Java tuples are created like this:
+
+```scala
+Pair pair =
+ new Pair("Eleven", 11);
+
+Triplet triplet =
+ Triplet.with("Eleven", 11, 11.0);
+Quartet quartet =
+ Quartet.with("Eleven", 11, 11.0, new Person("Eleven"));
+```
+
+Other Java tuple names are Quintet, Sextet, Septet, Octet, Ennead, Decade.
+
+Tuples of any size in Scala are created by putting the values inside parentheses, like this:
+
+```scala
+val a = ("eleven")
+val b = ("eleven", 11)
+val c = ("eleven", 11, 11.0)
+val d = ("eleven", 11, 11.0, Person("Eleven"))
+```
+
+
+
+## Enums
+
+This section compares enumerations in Java and Scala.
+
+
+### Basic enum:
+
+
+ enum Planet(
+ mass: Double,
+ radius: Double
+ ):
+ case Mercury extends Planet(3.303e+23, 2.4397e6)
+ case Venus extends Planet(4.869e+24, 6.0518e6)
+ case Earth extends Planet(5.976e+24, 6.37814e6)
+ // more planets ...
+
+ private final val G = 6.67300E-11
+
+ def surfaceGravity = G * mass / (radius * radius)
+
+ def surfaceWeight(otherMass: Double)
+ = otherMass * surfaceGravity
+
+
+
+
+
+
+
+## Exceptions and error handling
+
+This section covers the differences between exception handling in Java and Scala.
+
+### Java uses checked exceptions
+
+Java uses checked exceptions, so in Java code you have historically written `try`/`catch`/`finally` blocks, along with `throws` clauses on methods:
+
+```scala
+public int makeInt(String s)
+throws NumberFormatException {
+ // code here to convert a String to an int
+}
+```
+
+### Scala doesn’t use checked exceptions
+
+The Scala idiom is to _not_ use checked exceptions like this.
+When working with code that can throw exceptions, you can use `try`/`catch`/`finally` blocks to catch exceptions from code that throws them, but how you proceed from there is different.
+
+The best way to explain this is that Scala code consists of _expressions_, which return values.
+As a result, you end up writing your code as a series of algebraic expressions:
+
+```scala
+val a = f(x)
+val b = g(a,z)
+val c = h(b,y)
+```
+
+This is nice, it’s just algebra.
+You create equations to solve small problems, and then combine equations to solve larger problems.
+
+And very importantly---as you remember from algebra courses---algebraic expressions don’t short circuit---they don’t throw exceptions that blow up a series of equations.
+
+Therefore, in Scala our methods don’t throw exceptions.
+Instead, they return types like `Option`.
+For example, this `makeInt` method catches a possible exception and returns an `Option` value:
+
+```scala
+def makeInt(s: String): Option[Int] =
+ try
+ Some(s.toInt)
+ catch
+ case e: NumberFormatException => None
+```
+
+The Scala `Option` is similar to the Java `Optional` class.
+As shown, if the string-to-int conversion succeeds, the `Int` is returned inside a `Some` value, and if it fails, a `None` value is returned.
+`Some` and `None` are subtypes of `Option`, so the method is declared to return the `Option[Int]` type.
+
+When you have an `Option` value, such as the one returned by `makeInt`, there are many ways to work with it, depending on your needs.
+This code shows one possible approach:
+
+```scala
+makeInt(aString) match
+ case Some(i) => println(s"Int i = $i")
+ case None => println(s"Could not convert $aString to an Int.")
+```
+
+`Option` is commonly used in Scala, and it’s built into many classes in the standard library.
+Other similar sets of classes like Try/Success/Failure and Either/Left/Right offer even more flexibility.
+
+For more information on dealing with errors and exceptions in Scala, see the [Functional Error Handling][error-handling] section.
+
+
+
+## Concepts that are unique to Scala
+
+That concludes are comparison of the Java and Scala languages.
+
+There are other concepts in Scala which currently have no equal in Java 11.
+This includes:
+
+- Everything related to Scala’s [contextual abstractions][contextual]
+- Several Scala method features:
+ - Multiple parameter lists
+ - Default parameter values
+ - Using named arguments when calling methods
+- Case classes (like “records” in Java 14), case objects, and companion classes and objects (see the [Domain Modeling][modeling-intro]) chapter
+- The ability to create your own control structures and DSLs
+- [Toplevel definitions][toplevel]
+- Pattern matching
+- Advanced features of `match` expressions
+- Type lambdas
+- Trait parameters
+- [Opaque type aliases][opaque]
+- [Multiversal equality][equality]
+- [Type classes][type-classes]
+- Infix methods
+- Macros and metaprogramming
+
+
+[collections-classes]: {% link _overviews/scala3-book/collections-classes.md %}
+[collections-methods]: {% link _overviews/scala3-book/collections-methods.md %}
+[control]: {% link _overviews/scala3-book/control-structures.md %}
+[equality]: {% link _overviews/scala3-book/ca-multiversal-equality.md %}
+[error-handling]: {% link _overviews/scala3-book/fp-functional-error-handling.md %}
+[extension-methods]: {% link _overviews/scala3-book/ca-extension-methods.md %}
+[givens]: {% link _overviews/scala3-book/ca-context-parameters.md %}
+[hofs]: {% link _overviews/scala3-book/fun-hofs.md %}
+[imports]: {% link _overviews/scala3-book/packaging-imports.md %}
+[modeling-intro]: {% link _overviews/scala3-book/domain-modeling-intro.md %}
+[modeling-oop]: {% link _overviews/scala3-book/domain-modeling-oop.md %}
+[opaque]: {% link _overviews/scala3-book/types-opaque-types.md %}
+[tools]: {% link _overviews/scala3-book/scala-tools.md %}
+[toplevel]: {% link _overviews/scala3-book/taste-toplevel-definitions.md %}
+[type-classes]: {% link _overviews/scala3-book/ca-type-classes.md %}
+
+
+
+
+
+[concurrency]: {% link _overviews/scala3-book/concurrency.md %}
+[contextual]: {% link _overviews/scala3-book/ca-contextual-abstractions-intro.md %}
+[control]: {% link _overviews/scala3-book/control-structures.md %}
+[fp-intro]: {% link _overviews/scala3-book/fp-intro.md %}
+[intersection-types]: {% link _overviews/scala3-book/types-intersection.md %}
+[modeling-fp]: {% link _overviews/scala3-book/domain-modeling-fp.md %}
+[multiversal]: {% link _overviews/scala3-book/ca-multiversal-equality.md %}
+[union-types]: {% link _overviews/scala3-book/types-union.md %}
+
+
diff --git a/_overviews/scala3-book/scala-for-javascript-devs.md b/_overviews/scala3-book/scala-for-javascript-devs.md
new file mode 100644
index 0000000000..26c672ae99
--- /dev/null
+++ b/_overviews/scala3-book/scala-for-javascript-devs.md
@@ -0,0 +1,1377 @@
+---
+title: Scala for JavaScript Developers
+type: chapter
+description: This chapter provides an introduction to Scala 3 for JavaScript developers
+languages: [zh-cn]
+num: 75
+previous-page: scala-for-java-devs
+next-page: scala-for-python-devs
+---
+
+{% include_relative scala4x.css %}
+
+
+
+This page provides a comparison between the JavaScript and Scala programming languages.
+It’s intended for programmers who know JavaScript and want to learn about Scala, specifically by seeing examples of how JavaScript language features compare to Scala.
+
+
+
+## Overview
+
+This section provides a relatively brief introduction and summary of the sections that follow.
+It presents the similarities and differences between JavaScript and Scala at a high level, and then introduces the differences you’ll experience every day as you write code.
+
+### High-level similarities
+
+At a high level, Scala shares these similarities with JavaScript:
+
+- Both are considered high-level programming languages, where you don’t have to concern yourself with low-level concepts like pointers and manual memory management
+- Both have a relatively simple, concise syntax
+- Both support a C/C++/Java style curly-brace syntax for writing methods and other block of code
+- Both include features (like classes) for object-oriented programming (OOP)
+- Both include features (like lambdas) for [functional programming][fp-intro] (FP)
+- JavaScript runs in the browser and other environments like Node.js.
+ The [Scala.js](https://www.scala-js.org) flavor of Scala targets JavaScript and Scala programs can thus run in the same environments.
+- Developers write server-side applications in JavaScript and Scala using [Node.js](https://nodejs.org); projects like the [Play Framework](https://www.playframework.com/) also let you write server-side applications in Scala
+- Both languages have similar `if` statements, `while` loops, and `for` loops
+- Starting [at this Scala.js page](https://www.scala-js.org/libraries/index.html), you’ll find dozens of libraries to support React, Angular, jQuery, and many other JavaScript and Scala libraries
+- JavaScript objects are mutable; Scala objects _can_ be mutable when writing in an imperative style
+- Both JavaScript and Scala support _promises_ as a way of handling the result of asynchronous computations ([Scala concurrency][concurrency] uses futures and promises)
+
+### High-level differences
+
+Also at a high level, some of the differences between JavaScript and Scala are:
+
+- JavaScript is dynamically typed, and Scala is statically typed
+ - Although Scala is statically typed, features like type inference make it feel like a dynamic language (as you’ll see in the examples that follow)
+- Scala idioms favor immutability by default: you’re encouraged to use immutable variables and immutable collections
+- Scala has a concise but readable syntax; we call it _expressive_
+- Scala is a pure OOP language, so every object is an instance of a class, and symbols like `+` and `+=` that look like operators are really methods; this means that you can create your own methods that work as operators
+- As a pure OOP language and a pure FP language, Scala encourages a fusion of OOP and FP, with functions for the logic and immutable objects for modularity
+- Scala has state of the art, third-party, open source functional programming libraries
+- Everything in Scala is an _expression_: constructs like `if` statements, `for` loops, `match` expressions, and even `try`/`catch` expressions all have return values
+- The [Scala Native](https://scala-native.org/) project lets you write “systems” level code, and also compiles to native executables
+
+### Programming level differences
+
+At a lower level, these are some of the differences you’ll see every day when writing code:
+
+- Scala variables and parameters are defined with `val` (immutable, like a JavaScript `const`) or `var` (mutable, like a JavaScript `var` or `let`)
+- Scala does not use semi-colons at the end of lines
+- Scala is statically-typed, though in many situations you don’t need to declare the type
+- Scala uses traits as interfaces and to create _mixins_
+- In addition to simple `for` loops, Scala has powerful `for` comprehensions that yield results based on your algorithms
+- Pattern matching and `match` expressions will change the way you write code
+- Scala’s [contextual abstractions][contextual] and _term inference_ provide a collection of features:
+ - [Extension methods][extension-methods] let you add new functionality to closed classes without breaking modularity, by being available only in specific scopes (as opposed to monkey-patching, which can pollute other areas of the code)
+ - [Given instances][givens] let you define terms that the compiler can use to synthesize code for you
+ - Type safety and [multiversal equality][multiversal] let you limit equality comparisons---at compile time---to only those comparisons that make sense
+- Thanks to features like by-name parameters, infix notation, optional parentheses, extension methods, and [higher-order functions][hofs], you can create your own “control structures” and DSLs
+- Many other goodies that you can read about throughout this book: case classes, companion classes and objects, macros, [union][union-types] and [intersection][intersection-types] types, multiple parameter lists, named arguments, and more
+
+
+
+## Variables and Types
+
+### Comments
+
+
+
+
+
+ //
+ /* ... */
+ /** ... */
+
+
+
+
+ //
+ /* ... */
+ /** ... */
+
+
+
+
+
+
+### Mutable variables
+
+
+
+
+
+ let // now preferred for mutable
+ var // old mutable style
+
+
+
+
+ var // used for mutable variables
+
+
+
+
+
+
+### Immutable values
+
+
+
+
+
+ const
+
+
+
+
+ val
+
+
+
+
+
+The rule of thumb in Scala is to declare variables using `val`, unless there’s a specific reason you need a mutable variable.
+
+
+
+## Naming standards
+
+JavaScript and Scala generally use the same _CamelCase_ naming standards.
+Variables are named like `myVariableName`, methods are named like `lastIndexOf`, and classes and object are named like `Animal` and `PrintedBook`.
+
+
+
+## Strings
+
+Many uses of strings are similar in JavaScript and Scala, though Scala only uses double-quotes for simple strings, and triple-quotes for multiline strings.
+
+
+### String basics
+
+
+
+
+
+ // use single- or double-quotes
+ let msg = 'Hello, world';
+ let msg = "Hello, world";
+
+
+
+
+ // use only double-quotes
+ val msg = "Hello, world"
+
+
+
+
+
+
+### Interpolation
+
+
+
+
+
+ let name = 'Joe';
+
+ // JavaScript uses backticks
+ let msg = `Hello, ${name}`;
+
+
+
+
+ val name = "Joe"
+ val age = 42
+ val weight = 180.5
+
+ // use `s` before a string for simple interpolation
+ println(s"Hi, $name") // "Hi, Joe"
+ println(s"${1 + 1}") // "2"
+
+ // `f` before a string allows printf-style formatting.
+ // this example prints:
+ // "Joe is 42 years old, and weighs"
+ // "180.5 pounds."
+ println(f"$name is $age years old, and weighs $weight%.1f pounds.")
+
+
+
+
+
+
+### Multiline strings with interpolation
+
+
+
+
+
+ let name = "joe";
+ let str = `
+ Hello, ${name}.
+ This is a multiline string.
+ `;
+
+
+
+
+
+ val name = "Martin Odersky"
+
+ val quote = s"""
+ |$name says
+ |Scala is a fusion of
+ |OOP and FP.
+ """.stripMargin.replaceAll("\n", " ").trim
+
+ // result:
+ // "Martin Odersky says Scala is a fusion of OOP and FP."
+
+
+
+
+
+
+JavaScript and Scala also have similar methods to work with strings, including `charAt`, `concat`, `indexOf`, and many more.
+Escape characters like `\n`, `\f`, `\t` are also the same in both languages.
+
+
+
+## Numbers and arithmetic
+
+Numeric operators are similar between JavaScript and Scala.
+The biggest difference is that Scala doesn’t offer `++` and `--` operators.
+
+
+### Numeric operators:
+
+
+
+
+
+ let x = 1;
+ let y = 2.0;
+
+ let a = 1 + 1;
+ let b = 2 - 1;
+ let c = 2 * 2;
+ let d = 4 / 2;
+ let e = 5 % 2;
+
+
+
+
+
+ val x = 1
+ val y = 2.0
+
+ val a = 1 + 1
+ val b = 2 - 1
+ val c = 2 * 2
+ val d = 4 / 2
+ val e = 5 % 2
+
+
+
+
+
+
+
+### Increment and decrement:
+
+
+
+
+
+ i++;
+ i += 1;
+
+ i--;
+ i -= 1;
+
+
+
+
+ i += 1;
+ i -= 1;
+
+
+
+
+
+Perhaps the biggest difference is that “operators” like `+` and `-` are really _methods_ in Scala, not operators.
+Scala numbers also have these related methods:
+
+```scala
+var a = 2
+a *= 2 // 4
+a /= 2 // 2
+```
+
+Scala's `Double` type most closely corresponds to JavaScript’s default `number` type,
+`Int` represents signed 32-bit integer values, and `BigInt` corresponds to JavaScript's `bigint`.
+
+These are Scala `Int` and `Double` values.
+Notice that the type doesn’t have to be explicitly declared:
+
+```scala
+val i = 1 // Int
+val d = 1.1 // Double
+```
+
+You can also use other numeric types as needed:
+
+```scala
+val a: Byte = 0 // Byte = 0
+val a: Double = 0 // Double = 0.0
+val a: Float = 0 // Float = 0.0
+val a: Int = 0 // Int = 0
+val a: Long = 0 // Long = 0
+val a: Short = 0 // Short = 0
+
+val x = BigInt(1_234_456_789)
+val y = BigDecimal(1_234_456.890)
+```
+
+
+### Boolean values
+
+Both languages use `true` and `false` for boolean values:
+
+
+
+
+
+ let a = true;
+ let b = false;
+
+
+
+
+ val a = true
+ val b = false
+
+
+
+
+
+
+
+## Dates
+
+Dates are another commonly used type in both languages.
+
+### Get the current date:
+
+
+
+
+
+ let d = new Date();
+ // result:
+ // Sun Nov 29 2020 18:47:57 GMT-0700 (Mountain Standard Time)
+
+
+
+
+
+ // different ways to get the current date and time
+ import java.time.*
+
+ val a = LocalDate.now
+ // 2020-11-29
+ val b = LocalTime.now
+ // 18:46:38.563737
+ val c = LocalDateTime.now
+ // 2020-11-29T18:46:38.563750
+ val d = Instant.now
+ // 2020-11-30T01:46:38.563759Z
+
+
+
+
+
+
+### Specify a different date:
+
+
+
+
+
+ let d = Date(2020, 1, 21, 1, 0, 0, 0);
+ let d = Date(2020, 1, 21, 1, 0, 0);
+ let d = Date(2020, 1, 21, 1, 0);
+ let d = Date(2020, 1, 21, 1);
+ let d = Date(2020, 1, 21);
+
+
+
+
+ val d = LocalDate.of(2020, 1, 21)
+ val d = LocalDate.of(2020, Month.JANUARY, 21)
+ val d = LocalDate.of(2020, 1, 1).plusDays(20)
+
+
+
+
+
+
+In this case, Scala uses the date and time classes that come with Java.
+Many date/time methods are similar between JavaScript and Scala.
+See [the _java.time_ package](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/time/package-summary.html) for more details.
+
+
+
+## Functions
+
+In both JavaScript and Scala, functions are objects, so their functionality is similar, but their syntax and terminology is a little different.
+
+### Named functions, one line:
+
+
+
+
+
+ function add(a, b) {
+ return a + b;
+ }
+ add(2, 2); // 4
+
+
+
+
+ // technically this is a method, not a function
+ def add(a: Int, b: Int) = a + b
+ add(2, 2) // 4
+
+
+
+
+
+### Named functions, multiline:
+
+
+
+
+
+ function addAndDouble(a, b) {
+ // imagine this requires
+ // multiple lines
+ return (a + b) * 2
+ }
+
+
+
+
+ def addAndDouble(a: Int, b: Int): Int =
+ // imagine this requires
+ // multiple lines
+ (a + b) * 2
+
+
+
+
+
+In Scala, showing the `Int` return type is optional.
+It’s _not_ shown in the `add` example and _is_ shown in the `addAndDouble` example, so you can see both approaches.
+
+
+
+## Anonymous functions
+
+Both JavaScript and Scala let you define anonymous functions, which you can pass into other functions and methods.
+
+### Arrow and anonymous functions
+
+
+
+
+
+ // arrow function
+ let log = (s) => console.log(s)
+
+ // anonymous function
+ let log = function(s) {
+ console.log(s);
+ }
+
+ // use either of those functions here
+ function printA(a, log) {
+ log(a);
+ }
+
+
+
+
+ // a function (an anonymous function assigned to a variable)
+ val log = (s: String) => console.log(s)
+
+ // a scala method. methods tend to be used much more often,
+ // probably because they’re easier to read.
+ def log(a: Any) = console.log(a)
+
+ // a function or a method can be passed into another
+ // function or method
+ def printA(a: Any, f: log: Any => Unit) = log(a)
+
+
+
+
+
+
+In Scala you rarely define a function using the first syntax shown.
+Instead, you often define anonymous functions right at the point of use.
+Many collections methods are [higher-order functions][hofs] and accept function parameters, so you write code like this:
+
+```scala
+// map method, long form
+List(1,2,3).map(i => i * 10) // List(10,20,30)
+
+// map, short form (which is more commonly used)
+List(1,2,3).map(_ * 10) // List(10,20,30)
+
+// filter, short form
+List(1,2,3).filter(_ < 3) // List(1,2)
+
+// filter and then map
+List(1,2,3,4,5).filter(_ < 3).map(_ * 10) // List(10, 20)
+```
+
+
+
+## Classes
+
+Scala has both classes and case classes.
+A _class_ is similar to a JavaScript class, and is generally intended for use in [OOP style applications][modeling-oop] (though they can also be used in FP code), and _case classes_ have additional features that make them very useful in [FP style applications][modeling-fp].
+
+The following example shows how to create several types as enumerations, and then defines an OOP-style `Pizza` class.
+At the end, a `Pizza` instance is created and used:
+
+```scala
+// create some enumerations that the Pizza class will use
+enum CrustSize:
+ case Small, Medium, Large
+
+enum CrustType:
+ case Thin, Thick, Regular
+
+enum Topping:
+ case Cheese, Pepperoni, BlackOlives, GreenOlives, Onions
+
+// import those enumerations and the ArrayBuffer,
+// so the Pizza class can use them
+import CrustSize.*
+import CrustType.*
+import Topping.*
+import scala.collection.mutable.ArrayBuffer
+
+// define an OOP style Pizza class
+class Pizza(
+ var crustSize: CrustSize,
+ var crustType: CrustType
+):
+
+ private val toppings = ArrayBuffer[Topping]()
+
+ def addTopping(t: Topping): Unit =
+ toppings += t
+
+ def removeTopping(t: Topping): Unit =
+ toppings -= t
+
+ def removeAllToppings(): Unit =
+ toppings.clear()
+
+ override def toString(): String =
+ s"""
+ |Pizza:
+ | Crust Size: ${crustSize}
+ | Crust Type: ${crustType}
+ | Toppings: ${toppings}
+ """.stripMargin
+
+end Pizza
+
+// create a Pizza instance
+val p = Pizza(Small, Thin)
+
+// change the crust
+p.crustSize = Large
+p.crustType = Thick
+
+// add and remove toppings
+p.addTopping(Cheese)
+p.addTopping(Pepperoni)
+p.addTopping(BlackOlives)
+p.removeTopping(Pepperoni)
+
+// print the pizza, which uses its `toString` method
+println(p)
+```
+
+
+
+## Interfaces, traits, and inheritance
+
+Scala uses traits as interfaces, and also to create mixins.
+Traits can have both abstract and concrete members, including methods and fields.
+
+This example shows how to define two traits, create a class that extends and implements those traits, and then create and use an instance of that class:
+
+```scala
+trait HasLegs:
+ def numLegs: Int
+ def walk(): Unit
+ def stop() = println("Stopped walking")
+
+trait HasTail:
+ def wagTail(): Unit
+ def stopTail(): Unit
+
+class Dog(var name: String) extends HasLegs, HasTail:
+ val numLegs = 4
+ def walk() = println("I’m walking")
+ def wagTail() = println("⎞⎜⎛ ⎞⎜⎛")
+ def stopTail() = println("Tail is stopped")
+ override def toString = s"$name is a Dog"
+
+// create a Dog instance
+val d = Dog("Rover")
+
+// use the class’s attributes and behaviors
+println(d.numLegs) // 4
+d.wagTail() // "⎞⎜⎛ ⎞⎜⎛"
+d.walk() // "I’m walking"
+```
+
+
+
+## Control Structures
+
+Except for the use of `===` and `!==` in JavaScript, comparison and logical operators are almost identical in JavaScript and Scala.
+
+{% comment %}
+TODO: Sébastien mentioned that `===` is closest to `eql` in Scala. Update this area.
+{% endcomment %}
+
+### Comparison operators
+
+
+
+
+
JavaScript
+
Scala
+
+
+
+ ==
+
+ ==
+
+
+
+ ===
+
+ ==
+
+
+
+ !=
+
+ !=
+
+
+
+ !==
+
+ !=
+
+
+
+ >
+
+ >
+
+
+
+ <
+
+ <
+
+
+
+ >=
+
+ >=
+
+
+
+ <=
+
+ <=
+
+
+
+
+### Logical operators
+
+
+
+
+
JavaScript
+
Scala
+
+
+
+ &&
+ ||
+ !
+
+
+ &&
+ ||
+ !
+
+
+
+
+
+
+
+## if/then/else expressions
+
+JavaScript and Scala if/then/else statements are similar.
+In Scala 2 they were almost identical, but with Scala 3, curly braces are no longer necessary (though they can still be used).
+
+### `if` statement, one line:
+
+
+
+
+
+ if (x == 1) { console.log(1); }
+
+
+
+
+ if x == 1 then println(x)
+
+
+
+
+
+### `if` statement, multiline:
+
+
+
+
+
+ if (x == 1) {
+ console.log("x is 1, as you can see:")
+ console.log(x)
+ }
+
+
+
+
+ if x == 1 then
+ println("x is 1, as you can see:")
+ println(x)
+
+ if x < 0 then
+ println("negative")
+ else if x == 0
+ println("zero")
+ else
+ println("positive")
+
+
+
+
+
+### Returning a value from `if`:
+
+JavaScript uses a ternary operator, and Scala uses its `if` expression as usual:
+
+
+
+
+
+ let minVal = a < b ? a : b;
+
+
+
+
+ val minValue = if a < b then a else b
+
+
+
+
+
+### `if` as the body of a method:
+
+Scala methods tend to be very short, and you can easily use `if` as the body of a method:
+
+
+
+
+
+ function min(a, b) {
+ return (a < b) ? a : b;
+ }
+
+
+
+
+ def min(a: Int, b: Int): Int =
+ if a < b then a else b
+
+
+
+
+
+In Scala 3 you can still use the “curly brace” style, if you prefer.
+For instance, you can write an if/else-if/else expression like this:
+
+```scala
+if (i == 0) {
+ println(0)
+} else if (i == 1) {
+ println(1)
+} else {
+ println("other")
+}
+```
+
+
+
+## Loops
+
+Both JavaScript and Scala have `while` loops and `for` loops.
+Scala used to have do/while loops, but they have been removed from the language.
+
+### `while` loop:
+
+
+
+
+
+ let i = 0;
+ while (i < 3) {
+ console.log(i);
+ i++;
+ }
+
+
+
+
+ var i = 0;
+ while i < 3 do
+ println(i)
+ i += 1
+
+
+
+
+
+The Scala code can also be written like this, if you prefer:
+
+```scala
+var i = 0
+while (i < 3) {
+ println(i)
+ i += 1
+}
+```
+
+The following examples show `for` loops in JavaScript and Scala.
+They assume that you have these collections to work with:
+
+```scala
+// JavaScript
+let nums = [1, 2, 3];
+
+// Scala
+val nums = List(1, 2, 3)
+```
+
+### `for` loop, single line
+
+
+
+
+
+ // newer syntax
+ for (let i of nums) {
+ console.log(i);
+ }
+
+ // older
+ for (i=0; i<nums.length; i++) {
+ console.log(nums[i]);
+ }
+
+
+
+
+ // preferred
+ for i <- nums do println(i)
+
+ // also available
+ for (i <- nums) println(i)
+
+
+
+
+
+### `for` loop, multiple lines in the body
+
+
+
+
+
+ // preferred
+ for (let i of nums) {
+ let j = i * 2;
+ console.log(j);
+ }
+
+ // also available
+ for (i=0; i<nums.length; i++) {
+ let j = nums[i] * 2;
+ console.log(j);
+ }
+
+
+
+
+ // preferred
+ for i <- nums do
+ val j = i * 2
+ println(j)
+
+ // also available
+ for (i <- nums) {
+ val j = i * 2
+ println(j)
+ }
+
+
+
+
+
+### Multiple generators in a `for` loop
+
+
+
+
+
+ let str = "ab";
+ for (let i = 1; i < 3; i++) {
+ for (var j = 0; j < str.length; j++) {
+ for (let k = 1; k < 11; k += 5) {
+ let c = str.charAt(j);
+ console.log(`i: ${i} j: ${c} k: ${k}`);
+ }
+ }
+ }
+
+
+
+
+ for
+ i <- 1 to 2
+ j <- 'a' to 'b'
+ k <- 1 to 10 by 5
+ do
+ println(s"i: $i, j: $j, k: $k")
+
+
+
+
+
+### Generator with guards
+
+A _guard_ is a name for an `if` expression inside a `for` expression.
+
+
+
+
+
+ for (let i = 0; i < 10; i++) {
+ if (i % 2 == 0 && i < 5) {
+ console.log(i);
+ }
+ }
+
+
+
+
+ for
+ i <- 1 to 10
+ if i % 2 == 0
+ if i < 5
+ do
+ println(i)
+
+
+
+
+
+### `for` comprehension
+
+A `for` comprehension is a `for` loop that uses `yield` to return (yield) a value. They’re used often in Scala.
+
+
+
+
+
+ N/A
+
+
+
+
+ val list =
+ for
+ i <- 1 to 3
+ yield
+ i * 10
+ // result: Vector(10, 20, 30)
+
+
+
+
+
+
+
+## switch & match
+
+Where JavaScript has `switch` statements, Scala has `match` expressions.
+Like everything else in Scala, these truly are _expressions_, meaning they return a result:
+
+```scala
+val day = 1
+
+// later in the code ...
+val monthAsString = day match
+ case 1 => "January"
+ case 2 => "February"
+ case _ => "Other"
+```
+
+`match` expressions can handle multiple matches in each `case` statement:
+
+```scala
+val numAsString = i match
+ case 1 | 3 | 5 | 7 | 9 => "odd"
+ case 2 | 4 | 6 | 8 | 10 => "even"
+ case _ => "too big"
+```
+
+They can also be used as the body of a method:
+
+```scala
+def isTruthy(a: Matchable) = a match
+ case 0 | "" => false
+ case _ => true
+
+def isPerson(x: Matchable): Boolean = x match
+ case p: Person => true
+ case _ => false
+```
+
+`match` expressions have many other pattern-matching options.
+
+
+
+## Collections classes
+
+Scala has different [collections classes][collections-classes] for different needs.
+
+Common _immutable_ sequences are:
+
+- `List`
+- `Vector`
+
+Common _mutable_ sequences are:
+
+- `Array`
+- `ArrayBuffer`
+
+Scala also has mutable and immutable Maps and Sets.
+
+This is how you create the common Scala collection types:
+
+```scala
+val strings = List("a", "b", "c")
+val strings = Vector("a", "b", "c")
+val strings = ArrayBuffer("a", "b", "c")
+
+val set = Set("a", "b", "a") // result: Set("a", "b")
+val map = Map(
+ "a" -> 1,
+ "b" -> 2,
+ "c" -> 3
+)
+```
+
+### Methods on collections
+
+The following examples show many different ways to work with Scala collections.
+
+### Populating lists:
+
+```scala
+// to, until
+(1 to 5).toList // List(1, 2, 3, 4, 5)
+(1 until 5).toList // List(1, 2, 3, 4)
+
+(1 to 10 by 2).toList // List(1, 3, 5, 7, 9)
+(1 until 10 by 2).toList // List(1, 3, 5, 7, 9)
+(1 to 10).by(2).toList // List(1, 3, 5, 7, 9)
+
+('d' to 'h').toList // List(d, e, f, g, h)
+('d' until 'h').toList // List(d, e, f, g)
+('a' to 'f').by(2).toList // List(a, c, e)
+
+// range method
+List.range(1, 3) // List(1, 2)
+List.range(1, 6, 2) // List(1, 3, 5)
+
+List.fill(3)("foo") // List(foo, foo, foo)
+List.tabulate(3)(n => n * n) // List(0, 1, 4)
+List.tabulate(4)(n => n * n) // List(0, 1, 4, 9)
+```
+
+### Functional methods on sequences:
+
+```scala
+// these examples use a List, but they’re the same with Vector
+val a = List(10, 20, 30, 40, 10) // List(10, 20, 30, 40, 10)
+a.contains(20) // true
+a.distinct // List(10, 20, 30, 40)
+a.drop(2) // List(30, 40, 10)
+a.dropRight(2) // List(10, 20, 30)
+a.dropWhile(_ < 25) // List(30, 40, 10)
+a.filter(_ < 25) // List(10, 20, 10)
+a.filter(_ > 100) // List()
+a.find(_ > 20) // Some(30)
+a.head // 10
+a.headOption // Some(10)
+a.init // List(10, 20, 30, 40)
+a.last // 10
+a.lastOption // Some(10)
+a.slice(2,4) // List(30, 40)
+a.tail // List(20, 30, 40, 10)
+a.take(3) // List(10, 20, 30)
+a.takeRight(2) // List(40, 10)
+a.takeWhile(_ < 30) // List(10, 20)
+
+// map, flatMap
+val fruits = List("apple", "pear")
+fruits.map(_.toUpperCase) // List(APPLE, PEAR)
+fruits.flatMap(_.toUpperCase) // List(A, P, P, L, E, P, E, A, R)
+
+val nums = List(10, 5, 8, 1, 7)
+nums.sorted // List(1, 5, 7, 8, 10)
+nums.sortWith(_ < _) // List(1, 5, 7, 8, 10)
+nums.sortWith(_ > _) // List(10, 8, 7, 5, 1)
+
+List(1,2,3).updated(0,10) // List(10, 2, 3)
+List(2,4).union(List(1,3)) // List(2, 4, 1, 3)
+
+// zip
+val women = List("Wilma", "Betty") // List(Wilma, Betty)
+val men = List("Fred", "Barney") // List(Fred, Barney)
+val couples = women.zip(men) // List((Wilma,Fred), (Betty,Barney))
+```
+
+Scala has _many_ more methods that are available to you.
+The benefits of all these methods are:
+
+- You don’t have to write custom `for` loops to solve problems
+- When you read someone else’s code, you won’t have to read their custom `for` loops; you’ll just find common methods like these, so it’s easier to read code from different projects
+
+
+### Tuples
+
+When you want to put multiple data types in the same list, JavaScript lets you do this:
+
+```scala
+let stuff = ["Joe", 42, 1.0];
+```
+
+In Scala you do this:
+
+```scala
+val a = ("eleven")
+val b = ("eleven", 11)
+val c = ("eleven", 11, 11.0)
+val d = ("eleven", 11, 11.0, Person("Eleven"))
+```
+
+In Scala these types are called tuples, and as shown, they can contain one or more elements, and the elements can have different types.
+You access their elements just like you access elements of a `List`, `Vector`, or `Array`:
+
+```scala
+d(0) // "eleven"
+d(1) // 11
+```
+
+### Enumerations
+
+JavaScript doesn’t have enumerations, but you can do this:
+
+```javascript
+let Color = {
+ RED: 1,
+ GREEN: 2,
+ BLUE: 3
+};
+Object.freeze(Color);
+```
+
+In Scala 3 you can do quite a few things with enumerations.
+You can create an equivalent of that code:
+
+```scala
+enum Color:
+ case Red, Green, Blue
+```
+
+You can create a parameterized enum:
+
+```scala
+enum Color(val rgb: Int):
+ case Red extends Color(0xFF0000)
+ case Green extends Color(0x00FF00)
+ case Blue extends Color(0x0000FF)
+```
+
+You can also create user-defined enum members:
+
+```scala
+enum Planet(mass: Double, radius: Double):
+ case Mercury extends Planet(3.303e+23, 2.4397e6)
+ case Venus extends Planet(4.869e+24,6.0518e6)
+ case Earth extends Planet(5.976e+24,6.37814e6)
+ // more planets here ...
+
+ private final val G = 6.67300E-11
+ def surfaceGravity = G * mass / (radius * radius)
+ def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity
+```
+
+
+
+## Scala.js DOM Code
+
+Scala.js lets you write Scala code that is compiled to JavaScript code that can then be used in the browser.
+The approach is similar to TypeScript, ReScript, and other languages that are compiled to JavaScript.
+
+Once you include the necessary libraries, and import the necessary packages in your project, writing Scala.js code looks very similar to writing JavaScript code:
+
+```scala
+// show an alert dialog on a button click
+jQuery("#hello-button").click{() =>
+ dom.window.alert("Hello, world")
+}
+
+// define a button and what should happen when it’s clicked
+val btn = button(
+ "Click me",
+ onclick := { () =>
+ dom.window.alert("Hello, world")
+ })
+
+// create two divs with css classes, an h2 element, and the button
+val content =
+ div(cls := "foo",
+ div(cls := "bar",
+ h2("Hello"),
+ btn
+ )
+ )
+
+// add the content to the DOM
+val root = dom.document.getElementById("root")
+root.innerHTML = ""
+root.appendChild(content.render)
+```
+
+Note that although Scala is a type-safe language, no types are declared in the above code.
+Scala’s strong type inference capabilities often make Scala code look like it’s dynamically typed.
+But it is type-safe, so you catch many classes of errors early in the development cycle.
+
+
+
+## Other Scala.js resources
+
+The Scala.js website has an excellent collection of tutorials for JavaScript developers interested in using Scala.js.
+Here are some of their initial tutorials:
+
+- [Basic tutorial (creating a first Scala.js project)](https://www.scala-js.org/doc/tutorial/basic/)
+- [Scala.js for JavaScript developers](https://www.scala-js.org/doc/sjs-for-js/)
+- [From ES6 to Scala: Basics](https://www.scala-js.org/doc/sjs-for-js/es6-to-scala-part1.html)
+- [From ES6 to Scala: Collections](https://www.scala-js.org/doc/sjs-for-js/es6-to-scala-part2.html)
+- [From ES6 to Scala: Advanced](https://www.scala-js.org/doc/sjs-for-js/es6-to-scala-part3.html)
+
+
+
+## Concepts that are unique to Scala
+
+There are other concepts in Scala which currently have no equivalent in JavaScript:
+
+- Almost everything related to [contextual abstractions][contextual]
+- Method features:
+ - Multiple parameter lists
+ - Using named arguments when calling methods
+- Using traits as interfaces
+- Case classes
+- Companion classes and objects
+- The ability to create your own [control structures][control] and DSLs
+- Advanced features of `match` expressions and pattern matching
+- `for` comprehensions
+- Infix methods
+- Macros and metaprogramming
+- More ...
+
+
+[collections-classes]: {% link _overviews/scala3-book/collections-classes.md %}
+[concurrency]: {% link _overviews/scala3-book/concurrency.md %}
+[contextual]: {% link _overviews/scala3-book/ca-contextual-abstractions-intro.md %}
+[control]: {% link _overviews/scala3-book/control-structures.md %}
+[extension-methods]: {% link _overviews/scala3-book/ca-extension-methods.md %}
+[fp-intro]: {% link _overviews/scala3-book/fp-intro.md %}
+[givens]: {% link _overviews/scala3-book/ca-context-parameters.md %}
+[hofs]: {% link _overviews/scala3-book/fun-hofs.md %}
+[intersection-types]: {% link _overviews/scala3-book/types-intersection.md %}
+[modeling-fp]: {% link _overviews/scala3-book/domain-modeling-fp.md %}
+[modeling-oop]: {% link _overviews/scala3-book/domain-modeling-oop.md %}
+[multiversal]: {% link _overviews/scala3-book/ca-multiversal-equality.md %}
+[union-types]: {% link _overviews/scala3-book/types-union.md %}
+
+
diff --git a/_overviews/scala3-book/scala-for-python-devs.md b/_overviews/scala3-book/scala-for-python-devs.md
new file mode 100644
index 0000000000..147f5977f7
--- /dev/null
+++ b/_overviews/scala3-book/scala-for-python-devs.md
@@ -0,0 +1,1391 @@
+---
+title: Scala for Python Developers
+type: chapter
+description: This page is for Python developers who are interested in learning about Scala 3.
+languages: [zh-cn]
+num: 76
+previous-page: scala-for-javascript-devs
+next-page: where-next
+---
+
+{% include_relative scala4x.css %}
+
+
+
+{% comment %}
+
+NOTE: Hopefully someone with more Python experience can give this a thorough review.
+
+NOTE: On this page (https://contributors.scala-lang.org/t/feedback-sought-optional-braces/4702/10), Li Haoyi comments: “Python’s success also speaks for itself; beginners certainly don’t pick Python because of performance, ease of installation, packaging, IDE support, or simplicity of the language’s runtime semantics!” I’m not a Python expert, so these points are good to know, though I don’t want to go negative in any comparisons.
+It’s more like thinking, “Python developers will appreciate Scala’s performance, ease of installation, packaging, IDE support, etc.”
+{% endcomment %}
+
+{% comment %}
+TODO: We should probably go through this document and add links to our other detail pages, when time permits.
+{% endcomment %}
+
+This section provides a comparison between the Python and Scala programming languages.
+It’s intended for programmers who know Python and want to learn about Scala, specifically by seeing examples of how Python language features compare to Scala.
+
+## Introduction
+
+Before getting into the examples, this first section provides a relatively brief introduction and summary of the sections that follow.
+The two languages are first compared at a high level, and then at an everyday programming level.
+
+### High level similarities
+
+At a high level, Scala shares these *similarities* with Python:
+
+- Both are high-level programming languages, where you don’t have to concern yourself with low-level concepts like pointers and manual memory management
+- Both have a relatively simple, concise syntax
+- Both support a [functional style of programming][fp-intro]
+- Both are object-oriented programming (OOP) languages
+- Both have comprehensions: Python has list comprehensions and Scala has `for` comprehensions
+- Both languages have support for lambdas and [higher-order functions][hofs]
+- Both can be used with [Apache Spark](https://spark.apache.org) for big data processing
+- Both have a wealth of terrific libraries
+
+### High level differences
+
+Also at a high level, the _differences_ between Python and Scala are:
+
+- Python is dynamically typed, and Scala is statically typed
+ - Though it's dynamically typed, Python supports "gradual typing" with type hints, which are checked by static type checkers, like `mypy`
+ - Though it’s statically typed, Scala features like type inference make it feel like a dynamic language
+- Python is interpreted, and Scala code is compiled to _.class_ files, and runs on the Java Virtual Machine (JVM)
+- In addition to running on the JVM, the [Scala.js](https://www.scala-js.org) project lets you use Scala as a JavaScript replacement
+- The [Scala Native](https://scala-native.org/) project lets you write “systems” level code, and compiles to native executables
+- Everything in Scala is an _expression_: constructs like `if` statements, `for` loops, `match` expressions, and even `try`/`catch` expressions all have return values
+- Scala idioms favor immutability by default: you’re encouraged to use immutable variables and immutable collections
+- Scala has excellent support for [concurrent and parallel programming][concurrency]
+
+### Programming level similarities
+
+This section looks at the similarities you’ll see between Python and Scala when you write code on an everyday basis:
+
+- Scala’s type inference often makes it feel like a dynamically typed language
+- Neither language uses semicolons to end expressions
+- Both languages support the use of significant indentation rather than braces and parentheses
+- The syntax for defining methods is similar
+- Both have lists, dictionaries (maps), sets, and tuples
+- Both have comprehensions for mapping and filtering
+- Both have terrific IDE support
+- With Scala 3’s [toplevel definitions][toplevel] you can put method, field, and other definitions anywhere
+ - One difference is that Python can operate without even declaring a single method, while Scala 3 can’t do _everything_ at the toplevel; for instance, a [main method][main-method] (`@main def`) is required to start a Scala application
+
+### Programming level differences
+
+Also at a programming level, these are some of the differences you’ll see every day when writing code:
+
+- Programming in Scala feels very consistent:
+ - `val` and `var` fields are used consistently to define fields and parameters
+ - Lists, maps, sets, and tuples are all created and accessed similarly; for instance, parentheses are used to create all types---`List(1,2,3)`, `Set(1,2,3)`, `Map(1->"one")`---just like creating any other Scala class
+ - [Collections classes][collections-classes] generally have most of the same higher-order functions
+ - Pattern matching is used consistently throughout the language
+ - The syntax that’s used to define functions that are passed into methods is the same syntax that’s used to define anonymous functions
+- Scala variables and parameters are defined with the `val` (immutable) or `var` (mutable) keywords
+- Scala idioms prefer immutable data structures
+- Comments: Python uses `#` for comments; Scala uses the C, C++, and Java style: `//`, `/*...*/`, and `/**...*/`
+- Naming conventions: The Python standard is to use underscores like `my_list`; Scala uses `myList`
+- Scala is statically typed, so you declare types for method parameters, method return values, and in other places
+- Pattern matching and `match` expressions are used extensively in Scala (and will change the way you write code)
+- Traits are used heavily in Scala; interfaces and abstract classes are used less often in Python
+- Scala’s [contextual abstractions][contextual] and _term inference_ provide a collection of different features:
+ - [Extension methods][extension-methods] let you easily add new functionality to classes using a clear syntax
+ - [Multiversal equality][multiversal] lets you limit equality comparisons---at compile time---to only those comparisons that make sense
+- Scala has state-of-the-art open source functional programming libraries (see the [“Awesome Scala” list](https://github.com/lauris/awesome-scala))
+- You can create your own “control structures” and DSLs, thanks to features like objects, by-name parameters, infix notation, optional parentheses, extension methods, higher-order functions, and more
+- Scala code can run in the JVM and even be compiled to native images (using [Scala Native](https://github.com/scala-native/scala-native) and [GraalVM](https://www.graalvm.org)) for high performance
+- Many other goodies: companion classes and objects, macros, numeric literals, multiple parameter lists, [intersection][intersection-types] types, type-level programming, and more
+
+### Features compared with examples
+
+Given that introduction, the following sections provide side-by-side comparisons of Python and Scala programming language features.
+
+{% comment %}
+TODO: Update the Python examples to use four spaces. I started to do this, but then thought it would be better to do that in a separate PR.
+{% endcomment %}
+
+## Comments
+
+Python uses `#` for comments, while the Scala comment syntax is the same as languages like C, C++, and Java:
+
+
+
+
+
+ # a comment
+
+
+
+
+ // a comment
+ /* ... */
+ /** ... */
+
+
+
+
+
+## Variable assignment
+
+These examples demonstrate how to create variables in Python and Scala.
+
+### Create integer and string variables:
+
+
+
+
+
+ x = 1
+ x = "Hi"
+ y = """foo
+ bar
+ baz"""
+
+
+
+
+ val x = 1
+ val x = "Hi"
+ val y = """foo
+ bar
+ baz"""
+
+
+If a Scala field is going to be mutable, use `var` instead of `val` for variable definition:
+
+```scala
+var x = 1
+x += 1
+```
+
+However, the rule of thumb in Scala is to always use `val` unless the variable specifically needs to be mutated.
+
+## FP style records
+
+Scala case classes are similar to Python frozen dataclasses.
+
+### Constructor definition:
+
+
+
+
+
+ from dataclasses import dataclass, replace
+
+ @dataclass(frozen=True)
+ class Person:
+ name: str
+ age: int
+
+
+
+
+ case class Person(name: String, age: Int)
+
+
+
+
+
+### Create and use an instance:
+
+
+
+
+
+ p = Person("Alice", 42)
+ p.name # Alice
+ p2 = replace(p, age=43)
+
+
+
+
+ val p = Person("Alice", 42)
+ p.name // Alice
+ val p2 = p.copy(age = 43)
+
+
+
+
+
+## OOP style classes and methods
+
+This section provides comparisons of features related to OOP-style classes and methods.
+
+### OOP style class, primary constructor:
+
+
+
+
+
+ class Person(object):
+ def __init__(self, name):
+ self.name = name
+
+ def speak(self):
+ print(f'Hello, my name is {self.name}')
+
+
+
+
+ class Person (var name: String):
+ def speak() = println(s"Hello, my name is $name")
+
+
+
+
+
+### Create and use an instance:
+
+
+
+
+
+ p = Person("John")
+ p.name # John
+ p.name = 'Fred'
+ p.name # Fred
+ p.speak()
+
+
+
+
+ val p = Person("John")
+ p.name // John
+ p.name = "Fred"
+ p.name // Fred
+ p.speak()
+
+
+## Interfaces, traits, and inheritance
+
+If you’re familiar with Java 8 and newer, Scala traits are similar to those Java interfaces.
+Traits are used all the time in Scala, while Python interfaces (Protocols) and abstract classes are used much less often.
+Therefore, rather than attempt to compare the two, this example shows how to use Scala traits to build a small solution to a simulated math problem:
+
+```scala
+trait Adder:
+ def add(a: Int, b: Int) = a + b
+
+trait Multiplier:
+ def multiply(a: Int, b: Int) = a * b
+
+// create a class from the traits
+class SimpleMath extends Adder, Multiplier
+val sm = new SimpleMath
+sm.add(1,1) // 2
+sm.multiply(2,2) // 4
+```
+
+There are [many other ways to use traits with classes and objects][modeling-intro], but this gives you a little idea of how they can be used to organize concepts into logical groups of behavior, and then merge them as needed to create a complete solution.
+
+## Control structures
+
+This section compares [control structures][control-structures] in Python and Scala.
+Both languages have constructs like `if`/`else`, `while`, `for` loops, and `try`.
+Scala also has `match` expressions.
+
+### `if` statement, one line:
+
+
+
+
+
+ if x == 1: print(x)
+
+
+
+
+ if x == 1 then println(x)
+
+
+
+
+
+### `if` statement, multiline:
+
+
+
+
+
+ if x == 1:
+ print("x is 1, as you can see:")
+ print(x)
+
+
+
+
+ if x == 1 then
+ println("x is 1, as you can see:")
+ println(x)
+
+
+
+
+
+### if, else if, else:
+
+
+
+
+
+ if x < 0:
+ print("negative")
+ elif x == 0:
+ print("zero")
+ else:
+ print("positive")
+
+
+
+
+ if x < 0 then
+ println("negative")
+ else if x == 0 then
+ println("zero")
+ else
+ println("positive")
+
+
+
+
+
+### Returning a value from `if`:
+
+
+
+
+
+ min_val = a if a < b else b
+
+
+
+
+ val minValue = if a < b then a else b
+
+
+
+
+
+### `if` as the body of a method:
+
+
+
+
+
+ def min(a, b):
+ return a if a < b else b
+
+
+
+
+ def min(a: Int, b: Int): Int =
+ if a < b then a else b
+
+
+
+
+
+### `while` loop:
+
+
+
+
+
+ i = 1
+ while i < 3:
+ print(i)
+ i += 1
+
+
+
+
+ var i = 1
+ while i < 3 do
+ println(i)
+ i += 1
+
+
+
+
+
+### `for` loop with range:
+
+
+
+
+
+ for i in range(0,3):
+ print(i)
+
+
+
+
+ // preferred
+ for i <- 0 until 3 do println(i)
+
+ // also available
+ for (i <- 0 until 3) println(i)
+
+ // multiline syntax
+ for
+ i <- 0 until 3
+ do
+ println(i)
+
+
+
+
+
+### `for` loop with a list:
+
+
+
+
+
+ for i in ints: print(i)
+
+ for i in ints:
+ print(i)
+
+
+
+
+ for i <- ints do println(i)
+
+
+
+
+
+### `for` loop, multiple lines:
+
+
+
+
+
+ for i in ints:
+ x = i * 2
+ print(f"i = {i}, x = {x}")
+
+
+
+
+ for
+ i <- ints
+ do
+ val x = i * 2
+ println(s"i = $i, x = $x")
+
+
+
+
+
+### Multiple “range” generators:
+
+
+
+
+
+ for i in range(1,3):
+ for j in range(4,6):
+ for k in range(1,10,3):
+ print(f"i = {i}, j = {j}, k = {k}")
+
+
+
+
+ for
+ i <- 1 to 2
+ j <- 4 to 5
+ k <- 1 until 10 by 3
+ do
+ println(s"i = $i, j = $j, k = $k")
+
+
+
+
+
+### Generator with guards (`if` expressions):
+
+
+
+
+
+ for i in range(1,11):
+ if i % 2 == 0:
+ if i < 5:
+ print(i)
+
+
+
+
+ for
+ i <- 1 to 10
+ if i % 2 == 0
+ if i < 5
+ do
+ println(i)
+
+
+
+
+
+### Multiple `if` conditions per line:
+
+
+
+
+
+ for i in range(1,11):
+ if i % 2 == 0 and i < 5:
+ print(i)
+
+
+
+
+ for
+ i <- 1 to 10
+ if i % 2 == 0 && i < 5
+ do
+ println(i)
+
+
+
+
+
+### Comprehensions:
+
+
+
+
+
+ xs = [i * 10 for i in range(1, 4)]
+ # xs: [10,20,30]
+
+
+
+
+ val xs = for i <- 1 to 3 yield i * 10
+ // xs: Vector(10, 20, 30)
+
+
+
+
+
+### `match` expressions:
+
+
+
+
+
+ # From 3.10, Python supports structural pattern matching
+ # You can also use dictionaries for basic “switch” functionality
+ match month:
+ case 1:
+ monthAsString = "January"
+ case 2:
+ monthAsString = "February"
+ case _:
+ monthAsString = "Other"
+
+
+
+
+ val monthAsString = month match
+ case 1 => "January"
+ case 2 => "February"
+ _ => "Other"
+
+
+
+
+
+### switch/match:
+
+
+
+
+
+ # Only from Python 3.10
+ match i:
+ case 1 | 3 | 5 | 7 | 9:
+ numAsString = "odd"
+ case 2 | 4 | 6 | 8 | 10:
+ numAsString = "even"
+ case _:
+ numAsString = "too big"
+
+
+
+
+ val numAsString = i match
+ case 1 | 3 | 5 | 7 | 9 => "odd"
+ case 2 | 4 | 6 | 8 | 10 => "even"
+ case _ => "too big"
+
+
+Match expressions and pattern matching are a big part of the Scala programming experience, but only a few `match` expression features are shown here. See the [Control Structures][control-structures] page for many more examples.
+
+## Collections classes
+
+This section compares the [collections classes][collections-classes] that are available in Python and Scala, including lists, dictionaries/maps, sets, and tuples.
+
+### Lists
+
+Where Python has its list, Scala has several different specialized mutable and immutable sequence classes, depending on your needs.
+Because the Python list is mutable, it most directly compares to Scala’s `ArrayBuffer`.
+
+### Python list & Scala sequences:
+
+
+
+
+
+ a = [1,2,3]
+
+
+
+
+ // use different sequence classes
+ // as needed
+ val a = List(1,2,3)
+ val a = Vector(1,2,3)
+ val a = ArrayBuffer(1,2,3)
+
+ for i in ints: print(i)
+
+ for i in ints:
+ print(i)
+
+
+
+
+ // preferred
+ for i <- ints do println(i)
+
+ // also available
+ for (i <- ints) println(i)
+
+
+
+
+
+Scala’s main sequence classes are `List`, `Vector`, and `ArrayBuffer`.
+`List` and `Vector` are the main classes to use when you want an immutable sequence, and `ArrayBuffer` is the main class to use when you want a mutable sequence.
+(A “buffer” in Scala is a sequence that can grow and shrink.)
+
+### Dictionary/Map
+
+The Python dictionary is like the _mutable_ Scala `Map` class.
+However, the default Scala map is _immutable_, and has a number of transformation methods to let you easily create new maps.
+
+#### Dictionary/Map creation:
+
+
+
+
+
+ my_dict = {
+ 'a': 1,
+ 'b': 2,
+ 'c': 3
+ }
+
+
+
+
+ val myMap = Map(
+ "a" -> 1,
+ "b" -> 2,
+ "c" -> 3
+ )
+
+
+
+
+
+#### Accessing dictionary/map elements:
+
+
+
+
+
+ my_dict['a'] # 1
+
+
+
+
+ myMap("a") // 1
+
+
+
+
+
+#### Dictionary/Map with a `for` loop:
+
+
+
+
+
+ for key, value in my_dict.items():
+ print(key)
+ print(value)
+
+
+
+
+ for (key,value) <- myMap do
+ println(key)
+ println(value)
+
+
+
+
+
+Scala has other specialized `Map` classes for different needs.
+
+### Sets
+
+The Python set is similar to the _mutable_ Scala `Set` class.
+
+#### Set creation:
+
+
+
+
+
+ set = {"a", "b", "c"}
+
+
+
+
+ val set = Set(1,2,3)
+
+
+
+
+
+#### Duplicate elements:
+
+
+
+
+
+ set = {1,2,1}
+ # set: {1,2}
+
+
+
+
+ val set = Set(1,2,1)
+ // set: Set(1,2)
+
+
+
+
+
+Scala has other specialized `Set` classes for different needs.
+
+### Tuples
+
+Python and Scala tuples are also similar.
+
+#### Tuple creation:
+
+
+
+
+
+ t = (11, 11.0, "Eleven")
+
+
+
+
+ val t = (11, 11.0, "Eleven")
+
+
+
+
+
+#### Accessing tuple elements:
+
+
+
+
+
+ t[0] # 11
+ t[1] # 11.0
+
+
+
+
+ t(0) // 11
+ t(1) // 11.0
+
+
+
+
+
+## Methods on collections classes
+
+Python and Scala have several of the same common functional methods available to them:
+
+- `map`
+- `filter`
+- `reduce`
+
+If you’re used to using these methods with lambda expressions in Python, you’ll see that Scala has a similar approach with methods on its collections classes.
+To demonstrate this functionality, here are two sample lists:
+
+```scala
+numbers = [1,2,3] // python
+val numbers = List(1,2,3) // scala
+```
+
+Those lists are used in the following table, that shows how to apply mapping and filtering algorithms to it.
+
+### Mapping with a comprehension:
+
+
+
+
+
+ x = [i * 10 for i in numbers]
+
+
+
+
+ val x = for i <- numbers yield i * 10
+
+
+
+
+
+### Filtering with a comprehension:
+
+
+
+
+
+ evens = [i for i in numbers if i % 2 == 0]
+
+
+
+
+ val evens = numbers.filter(_ % 2 == 0)
+ // or
+ val evens = for i <- numbers if i % 2 == 0 yield i
+
+
+
+
+
+### Mapping & filtering with a comprehension:
+
+
+
+
+
+ x = [i * 10 for i in numbers if i % 2 == 0]
+
+
+
+
+ val x = numbers.filter(_ % 2 == 0).map(_ * 10)
+ // or
+ val x = for i <- numbers if i % 2 == 0 yield i * 10
+
+
+
+
+
+### Mapping:
+
+
+
+
+
+ x = map(lambda x: x * 10, numbers)
+
+
+
+
+ val x = numbers.map(_ * 10)
+
+
+
+
+
+### Filtering:
+
+
+
+
+
+ f = lambda x: x > 1
+ x = filter(f, numbers)
+
+
+
+
+ val x = numbers.filter(_ > 1)
+
+
+
+
+
+
+### Scala collections methods
+
+Scala collections classes have over 100 functional methods to simplify your code.
+In Python, some of these functions are available in the `itertools` module.
+In addition to `map`, `filter`, and `reduce`, other commonly-used methods in Scala are listed below.
+In those method examples:
+
+- `c` refers to a collection
+- `p` is a predicate
+- `f` is a function, anonymous function, or method
+- `n` refers to an integer value
+
+These are some of the filtering methods that are available:
+
+| Method | Description |
+| -------------- | ------------- |
+| `c1.diff(c2)` | Returns the difference of the elements in `c1` and `c2`. |
+| `c.distinct` | Returns the unique elements in `c`. |
+| `c.drop(n)` | Returns all elements in the collection except the first `n` elements. |
+| `c.filter(p)` | Returns all elements from the collection for which the predicate is `true`. |
+| `c.head` | Returns the first element of the collection. (Throws a `NoSuchElementException` if the collection is empty.) |
+| `c.tail` | Returns all elements from the collection except the first element. (Throws a `UnsupportedOperationException` if the collection is empty.) |
+| `c.take(n)` | Returns the first `n` elements of the collection `c`. |
+
+Here are a few transformer methods:
+
+| Method | Description |
+| --------------- | ------------- |
+| `c.flatten` | Converts a collection of collections (such as a list of lists) to a single collection (single list). |
+| `c.flatMap(f)` | Returns a new collection by applying `f` to all elements of the collection `c` (like `map`), and then flattening the elements of the resulting collections. |
+| `c.map(f)` | Creates a new collection by applying `f` to all elements of the collection `c`. |
+| `c.reduce(f)` | Applies the “reduction” function `f` to successive elements in `c` to yield a single value. |
+| `c.sortWith(f)` | Returns a version of `c` that’s sorted by the comparison function `f`. |
+
+Some common grouping methods:
+
+| Method | Description |
+| ---------------- | ------------- |
+| `c.groupBy(f)` | Partitions the collection into a `Map` of collections according to `f`. |
+| `c.partition(p)` | Returns two collections according to the predicate `p`. |
+| `c.span(p)` | Returns a collection of two collections, the first created by `c.takeWhile(p)`, and the second created by `c.dropWhile(p)`. |
+| `c.splitAt(n)` | Returns a collection of two collections by splitting the collection `c` at element `n`. |
+
+Some informational and mathematical methods:
+
+| Method | Description |
+| -------------- | ------------- |
+| `c1.containsSlice(c2)` | Returns `true` if `c1` contains the sequence `c2`. |
+| `c.count(p)` | Counts the number of elements in `c` where `p` is `true`. |
+| `c.distinct` | Returns the unique elements in `c`. |
+| `c.exists(p)` | Returns `true` if `p` is `true` for any element in the collection. |
+| `c.find(p)` | Returns the first element that matches `p`. The element is returned as `Option[A]`. |
+| `c.min` | Returns the smallest element from the collection. (Can throw _java.lang.UnsupportedOperationException_.) |
+| `c.max` | Returns the largest element from the collection. (Can throw _java.lang.UnsupportedOperationException_.) |
+|`c slice(from, to)` | Returns the interval of elements beginning at element `from`, and ending at element `to`. |
+| `c.sum` | Returns the sum of all elements in the collection. (Requires an `Ordering` be defined for the elements in the collection.) |
+
+Here are a few examples that demonstrate how these methods work on a list:
+
+```scala
+val a = List(10, 20, 30, 40, 10) // List(10, 20, 30, 40, 10)
+a.distinct // List(10, 20, 30, 40)
+a.drop(2) // List(30, 40, 10)
+a.dropRight(2) // List(10, 20, 30)
+a.dropWhile(_ < 25) // List(30, 40, 10)
+a.filter(_ < 25) // List(10, 20, 10)
+a.filter(_ > 100) // List()
+a.find(_ > 20) // Some(30)
+a.head // 10
+a.headOption // Some(10)
+a.init // List(10, 20, 30, 40)
+a.intersect(List(19,20,21)) // List(20)
+a.last // 10
+a.lastOption // Some(10)
+a.slice(2,4) // List(30, 40)
+a.tail // List(20, 30, 40, 10)
+a.take(3) // List(10, 20, 30)
+a.takeRight(2) // List(40, 10)
+a.takeWhile(_ < 30) // List(10, 20)
+```
+
+These methods show a common pattern in Scala: Functional methods that are available on objects.
+None of these methods mutate the initial list `a`; instead, they all return the data shown after the comments.
+
+There are many more methods available, but hopefully these descriptions and examples give you a taste of the power that’s available in the pre-built collections methods.
+
+## Enums
+
+This section compares enums (enumerations) in Python and Scala 3.
+
+### Creating enums:
+
+
+
+
+
+ from enum import Enum, auto
+ class Color(Enum):
+ RED = auto()
+ GREEN = auto()
+ BLUE = auto()
+
+
+
+
+ enum Color:
+ case Red, Green, Blue
+
+
+
+
+
+### Values and comparison:
+
+
+
+
+
+ Color.RED == Color.BLUE # False
+
+
+
+
+ Color.Red == Color.Blue // false
+
+
+
+
+
+### Parameterized enums:
+
+
+
+
+
+ N/A
+
+
+
+
+ enum Color(val rgb: Int):
+ case Red extends Color(0xFF0000)
+ case Green extends Color(0x00FF00)
+ case Blue extends Color(0x0000FF)
+
+
+
+
+
+### User-defined enum members:
+
+
+
+
+
+ N/A
+
+
+
+
+ enum Planet(
+ mass: Double,
+ radius: Double
+ ):
+ case Mercury extends
+ Planet(3.303e+23, 2.4397e6)
+ case Venus extends
+ Planet(4.869e+24, 6.0518e6)
+ case Earth extends
+ Planet(5.976e+24, 6.37814e6)
+ // more planets ...
+
+ // fields and methods
+ private final val G = 6.67300E-11
+ def surfaceGravity = G * mass /
+ (radius * radius)
+ def surfaceWeight(otherMass: Double)
+ = otherMass * surfaceGravity
+
+
+
+
+
+## Concepts that are unique to Scala
+
+There are other concepts in Scala which currently don’t have equivalent functionality in Python.
+Follow the links below for more details:
+
+- Most concepts related to [contextual abstractions][contextual], such as [extension methods][extension-methods], [type classes][type-classes], implicit values
+- Scala allows multiple parameter lists, which enables features like partially-applied functions, and the ability to create your own DSLs
+- The ability to create your own control structures and DSLs
+- [Multiversal equality][multiversal]: the ability to control at compile time what equality comparisons make sense
+- Infix methods
+- Macros
+
+## Scala and virtual environments
+
+In Scala, there is no need to explicitly set up the equivalent of a Python virtual environment. By default, Scala build tools manage project dependencies such that users do not have to think about manual package installation. For example, using the `sbt` build tool, we specify dependencies inside `build.sbt` file under `libraryDependencies` setting, then executing
+
+```
+cd myapp
+sbt compile
+```
+
+automatically resolves all dependencies for that particular project. The location of downloaded dependencies is largely an implementation detail of the build tool, and users do not have to interact with these downloaded dependencies directly. For example, if we delete the whole sbt dependencies cache, on the next compilation of the project, sbt simply resolves and downloads all the required dependencies again, automatically.
+
+This differs from Python, where by default dependencies are installed in system-wide or user-wide directories, so to obtain an isolated environment on a per-project basis one has to create a corresponding virtual environment. For example, using the `venv` module, we might create one for a particular project like so
+
+```
+cd myapp
+python3 -m venv myapp-env
+source myapp-env/bin/activate
+pip install -r requirements.txt
+```
+
+This installs all the dependencies under the project's `myapp/myapp-env` directory and alters the shell environmental variable `PATH` to look up dependencies from `myapp-env`.
+None of this manual process is necessary in Scala.
+
+
+[collections-classes]: {% link _overviews/scala3-book/collections-classes.md %}
+[concurrency]: {% link _overviews/scala3-book/concurrency.md %}
+[contextual]: {% link _overviews/scala3-book/ca-contextual-abstractions-intro.md %}
+[control-structures]: {% link _overviews/scala3-book/control-structures.md %}
+[extension-methods]: {% link _overviews/scala3-book/ca-extension-methods.md %}
+[fp-intro]: {% link _overviews/scala3-book/fp-intro.md %}
+[hofs]: {% link _overviews/scala3-book/fun-hofs.md %}
+[intersection-types]: {% link _overviews/scala3-book/types-intersection.md %}
+[main-method]: {% link _overviews/scala3-book/methods-main-methods.md %}
+[modeling-intro]: {% link _overviews/scala3-book/domain-modeling-intro.md %}
+[multiversal]: {% link _overviews/scala3-book/ca-multiversal-equality.md %}
+[toplevel]: {% link _overviews/scala3-book/taste-toplevel-definitions.md %}
+[type-classes]: {% link _overviews/scala3-book/ca-type-classes.md %}
+[union-types]: {% link _overviews/scala3-book/types-union.md %}
+
diff --git a/_overviews/scala3-book/scala-tools.md b/_overviews/scala3-book/scala-tools.md
new file mode 100644
index 0000000000..4469a7283d
--- /dev/null
+++ b/_overviews/scala3-book/scala-tools.md
@@ -0,0 +1,14 @@
+---
+title: Scala Tools
+type: chapter
+description: This chapter looks at two commonly-used Scala tools, sbt and ScalaTest.
+languages: [ru, zh-cn]
+num: 70
+previous-page: concurrency
+next-page: tools-sbt
+---
+
+This chapter introduces two ways to write and run Scala programs:
+
+- by creating Scala projects, possibly containing multiple files, and defining a program entry point,
+- by interacting with a worksheet, which is a program defined in a single file, executed line by line.
diff --git a/_overviews/scala3-book/scala4x.css b/_overviews/scala3-book/scala4x.css
new file mode 100644
index 0000000000..1772c03ac8
--- /dev/null
+++ b/_overviews/scala3-book/scala4x.css
@@ -0,0 +1,54 @@
+
+
+
diff --git a/_overviews/scala3-book/string-interpolation.md b/_overviews/scala3-book/string-interpolation.md
new file mode 100644
index 0000000000..1ba335e3b7
--- /dev/null
+++ b/_overviews/scala3-book/string-interpolation.md
@@ -0,0 +1,370 @@
+---
+title: String Interpolation
+type: chapter
+description: This page provides more information about creating strings and using string interpolation.
+languages: [ru, zh-cn]
+num: 18
+previous-page: first-look-at-types
+next-page: control-structures
+redirect_from:
+ - /overviews/core/string-interpolation.html
+---
+
+## Introduction
+
+String interpolation provides a way to use variables inside strings.
+For instance:
+
+{% tabs example-1 %}
+{% tab 'Scala 2 and 3' for=example-1 %}
+```scala
+val name = "James"
+val age = 30
+println(s"$name is $age years old") // "James is 30 years old"
+```
+{% endtab %}
+{% endtabs %}
+
+Using string interpolation consists of putting an `s` in front of your string
+quotes, and prefixing any variable names with a `$` symbol.
+
+## String Interpolators
+
+The `s` that you place before the string is just one possible interpolator that Scala
+provides.
+
+Scala provides three string interpolation methods out of the box: `s`, `f` and `raw`.
+Further, a string interpolator is just a special method, so it is possible to define your
+own. For instance, some database libraries define a `sql` interpolator that returns a
+database query.
+
+### The `s` Interpolator (`s`-Strings)
+
+Prepending `s` to any string literal allows the usage of variables directly in the string. You've already seen an example here:
+
+{% tabs example-2 %}
+{% tab 'Scala 2 and 3' for=example-2 %}
+```scala
+val name = "James"
+val age = 30
+println(s"$name is $age years old") // "James is 30 years old"
+```
+{% endtab %}
+{% endtabs %}
+
+Here, the `$name` and `$age` placeholders in the string are replaced by the results of
+calling `name.toString` and `age.toString`, respectively. The `s`-String will have
+access to all variables that are currently in scope.
+
+While it may seem obvious, it's important to note here that string interpolation will _not_ happen in normal string literals:
+
+{% tabs example-3 %}
+{% tab 'Scala 2 and 3' for=example-3 %}
+```scala
+val name = "James"
+val age = 30
+println("$name is $age years old") // "$name is $age years old"
+```
+{% endtab %}
+{% endtabs %}
+
+String interpolators can also take arbitrary expressions. For example:
+
+{% tabs example-4 %}
+{% tab 'Scala 2 and 3' for=example-4 %}
+```scala
+println(s"2 + 2 = ${2 + 2}") // "2 + 2 = 4"
+val x = -1
+println(s"x.abs = ${x.abs}") // "x.abs = 1"
+```
+{% endtab %}
+{% endtabs %}
+
+Any arbitrary expression can be embedded in `${}`.
+
+For some special characters, it is necessary to escape them when embedded within a string.
+To represent an actual dollar sign you can double it `$$`, like here:
+
+{% tabs example-5 %}
+{% tab 'Scala 2 and 3' for=example-5 %}
+```scala
+println(s"New offers starting at $$14.99") // "New offers starting at $14.99"
+```
+{% endtab %}
+{% endtabs %}
+
+Double quotes also need to be escaped. This can be done by using triple quotes as shown:
+
+{% tabs example-6 %}
+{% tab 'Scala 2 and 3' for=example-6 %}
+```scala
+println(s"""{"name":"James"}""") // `{"name":"James"}`
+```
+{% endtab %}
+{% endtabs %}
+
+Finally, all multi-line string literals can also be interpolated
+
+{% tabs example-7 %}
+{% tab 'Scala 2 and 3' for=example-7 %}
+```scala
+println(s"""name: "$name",
+ |age: $age""".stripMargin)
+```
+
+This will print as follows:
+
+```
+name: "James"
+age: 30
+```
+{% endtab %}
+{% endtabs %}
+
+### The `f` Interpolator (`f`-Strings)
+
+Prepending `f` to any string literal allows the creation of simple formatted strings, similar to `printf` in other languages. When using the `f`
+interpolator, all variable references should be followed by a `printf`-style format string, like `%d`. Let's look at an example:
+
+{% tabs example-8 %}
+{% tab 'Scala 2 and 3' for=example-8 %}
+```scala
+val height = 1.9d
+val name = "James"
+println(f"$name%s is $height%2.2f meters tall") // "James is 1.90 meters tall"
+```
+{% endtab %}
+{% endtabs %}
+
+The `f` interpolator is typesafe. If you try to pass a format string that only works for integers but pass a double, the compiler will issue an
+error. For example:
+
+{% tabs f-interpolator-error class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=f-interpolator-error %}
+```scala
+val height: Double = 1.9d
+
+scala> f"$height%4d"
+:9: error: type mismatch;
+ found : Double
+ required: Int
+ f"$height%4d"
+ ^
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=f-interpolator-error %}
+```scala
+val height: Double = 1.9d
+
+scala> f"$height%4d"
+-- Error: ----------------------------------------------------------------------
+1 |f"$height%4d"
+ | ^^^^^^
+ | Found: (height : Double), Required: Int, Long, Byte, Short, BigInt
+1 error found
+
+```
+{% endtab %}
+{% endtabs %}
+
+The `f` interpolator makes use of the string format utilities available from Java. The formats allowed after the `%` character are outlined in the
+[Formatter javadoc][java-format-docs]. If there is no `%` character after a variable
+definition a formatter of `%s` (`String`) is assumed.
+
+Finally, as in Java, use `%%` to get a literal `%` character in the output string:
+
+{% tabs literal-percent %}
+{% tab 'Scala 2 and 3' for=literal-percent %}
+```scala
+println(f"3/19 is less than 20%%") // "3/19 is less than 20%"
+```
+{% endtab %}
+{% endtabs %}
+
+### The `raw` Interpolator
+
+The raw interpolator is similar to the `s` interpolator except that it performs no escaping of literals within the string. Here's an example processed string:
+
+{% tabs example-9 %}
+{% tab 'Scala 2 and 3' for=example-9 %}
+```scala
+scala> s"a\nb"
+res0: String =
+a
+b
+```
+{% endtab %}
+{% endtabs %}
+
+Here the `s` string interpolator replaced the characters `\n` with a return character. The `raw` interpolator will not do that.
+
+{% tabs example-10 %}
+{% tab 'Scala 2 and 3' for=example-10 %}
+```scala
+scala> raw"a\nb"
+res1: String = a\nb
+```
+{% endtab %}
+{% endtabs %}
+
+The raw interpolator is useful when you want to avoid having expressions like `\n` turn into a return character.
+
+Furthermore, the raw interpolator allows the usage of variables, which are replaced with their value, just as the s interpolator.
+
+{% tabs example-11 %}
+{% tab 'Scala 2 and 3' for=example-11 %}
+```scala
+scala> val foo = 42
+scala> raw"a\n$foo"
+res1: String = a\n42
+```
+{% endtab %}
+{% endtabs %}
+
+## Advanced Usage
+
+In addition to the three default string interpolators, users can define their own.
+
+The literal `s"Hi $name"` is parsed by Scala as a _processed_ string literal.
+This means that the compiler does some additional work to this literal. The specifics
+of processed strings and string interpolation are described in [SIP-11][sip-11], but
+here's a quick example to help illustrate how they work.
+
+### Custom Interpolators
+
+In Scala, all processed string literals are simple code transformations. Anytime the compiler encounters a processed string literal of the form:
+
+{% tabs example-12 %}
+{% tab 'Scala 2 and 3' for=example-12 %}
+```scala
+id"string content"
+```
+{% endtab %}
+{% endtabs %}
+
+it transforms it into a method call (`id`) on an instance of [StringContext](https://www.scala-lang.org/api/current/scala/StringContext.html).
+This method can also be available on implicit scope.
+To define our own string interpolation, we need to create an implicit class (Scala 2) or an `extension` method (Scala 3) that adds a new method to `StringContext`.
+
+As a trivial example, let's assume we have a simple `Point` class and want to create a custom interpolator that turns `p"a,b"` into a `Point` object.
+
+{% tabs custom-interpolator-1 %}
+{% tab 'Scala 2 and 3' for=custom-interpolator-1 %}
+```scala
+case class Point(x: Double, y: Double)
+
+val pt = p"1,-2" // Point(1.0,-2.0)
+```
+{% endtab %}
+{% endtabs %}
+
+We'd create a custom `p`-interpolator by first implementing a `StringContext` extension
+with something like:
+
+{% tabs custom-interpolator-2 class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=custom-interpolator-2 %}
+```scala
+implicit class PointHelper(val sc: StringContext) extends AnyVal {
+ def p(args: Any*): Point = ???
+}
+```
+
+**Note:** It's important to extend `AnyVal` in Scala 2.x to prevent runtime instantiation on each interpolation. See the [value class]({% link _overviews/core/value-classes.md %}) documentation for more.
+
+{% endtab %}
+
+{% tab 'Scala 3' for=custom-interpolator-2 %}
+```scala
+extension (sc: StringContext)
+ def p(args: Any*): Point = ???
+```
+{% endtab %}
+
+{% endtabs %}
+
+Once this extension is in scope and the Scala compiler encounters `p"some string"`, it
+will process `some string` to turn it into String tokens and expression arguments for
+each embedded variable in the string.
+
+For example, `p"1, $someVar"` would turn into:
+
+{% tabs extension-desugaring class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=extension-desugaring %}
+```scala
+new StringContext("1, ", "").p(someVar)
+```
+
+The implicit class is then used to rewrite it to the following:
+
+```scala
+new PointHelper(new StringContext("1, ", "")).p(someVar)
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=extension-desugaring %}
+```scala
+StringContext("1, ","").p(someVar)
+```
+{% endtab %}
+
+{% endtabs %}
+
+As a result, each of the fragments of the processed String are exposed in the
+`StringContext.parts` member, while any expressions values in the string are passed in
+to the method's `args` parameter.
+
+### Example Implementation
+
+A naive implementation of our Point interpolator method might look something like below,
+though a more sophisticated method may choose to have more precise control over the
+processing of the string `parts` and expression `args` instead of reusing the
+`s`-Interpolator.
+
+{% tabs naive-implementation class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=naive-implementation %}
+```scala
+implicit class PointHelper(val sc: StringContext) extends AnyVal {
+ def p(args: Double*): Point = {
+ // reuse the `s`-interpolator and then split on ','
+ val pts = sc.s(args: _*).split(",", 2).map { _.toDoubleOption.getOrElse(0.0) }
+ Point(pts(0), pts(1))
+ }
+}
+
+val x=12.0
+
+p"1, -2" // Point(1.0, -2.0)
+p"${x/5}, $x" // Point(2.4, 12.0)
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=naive-implementation %}
+```scala
+extension (sc: StringContext)
+ def p(args: Double*): Point = {
+ // reuse the `s`-interpolator and then split on ','
+ val pts = sc.s(args: _*).split(",", 2).map { _.toDoubleOption.getOrElse(0.0) }
+ Point(pts(0), pts(1))
+ }
+
+val x=12.0
+
+p"1, -2" // Point(1.0, -2.0)
+p"${x/5}, $x" // Point(2.4, 12.0)
+```
+{% endtab %}
+{% endtabs %}
+
+While string interpolators were originally used to create some form of a String, the use
+of custom interpolators as above can allow for powerful syntactic shorthand, and the
+community has already made swift use of this syntax for things like ANSI terminal color
+expansion, executing SQL queries, magic `$"identifier"` representations, and many others.
+
+[java-format-docs]: https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/Formatter.html#detail
+[value-class]: {% link _overviews/core/value-classes.md %}
+[sip-11]: {% link _sips/sips/string-interpolation.md %}
diff --git a/_overviews/scala3-book/taste-collections.md b/_overviews/scala3-book/taste-collections.md
new file mode 100644
index 0000000000..773f823ce4
--- /dev/null
+++ b/_overviews/scala3-book/taste-collections.md
@@ -0,0 +1,151 @@
+---
+title: Collections
+type: section
+description: This page provides a high-level overview of the main features of the Scala 3 programming language.
+languages: [ru, zh-cn]
+num: 13
+previous-page: taste-objects
+next-page: taste-contextual-abstractions
+---
+
+
+The Scala library has a rich set of collection classes, and those classes have a rich set of methods.
+Collections classes are available in both immutable and mutable forms.
+
+## Creating lists
+
+To give you a taste of how these work, here are some examples that use the `List` class, which is an immutable, linked-list class.
+These examples show different ways to create a populated `List`:
+
+{% tabs collection_1 %}
+{% tab 'Scala 2 and 3' for=collection_1 %}
+
+```scala
+val a = List(1, 2, 3) // a: List[Int] = List(1, 2, 3)
+
+// Range methods
+val b = (1 to 5).toList // b: List[Int] = List(1, 2, 3, 4, 5)
+val c = (1 to 10 by 2).toList // c: List[Int] = List(1, 3, 5, 7, 9)
+val e = (1 until 5).toList // e: List[Int] = List(1, 2, 3, 4)
+val f = List.range(1, 5) // f: List[Int] = List(1, 2, 3, 4)
+val g = List.range(1, 10, 3) // g: List[Int] = List(1, 4, 7)
+```
+
+{% endtab %}
+{% endtabs %}
+
+## `List` methods
+
+Once you have a populated list, the following examples show some of the methods you can call on it.
+Notice that these are all functional methods, meaning that they don’t mutate the collection they’re called on, but instead return a new collection with the updated elements.
+The result that’s returned by each expression is shown in the comment on each line:
+
+{% tabs collection_2 %}
+{% tab 'Scala 2 and 3' for=collection_2 %}
+
+```scala
+// a sample list
+val a = List(10, 20, 30, 40, 10) // List(10, 20, 30, 40, 10)
+
+a.drop(2) // List(30, 40, 10)
+a.dropWhile(_ < 25) // List(30, 40, 10)
+a.filter(_ < 25) // List(10, 20, 10)
+a.slice(2,4) // List(30, 40)
+a.tail // List(20, 30, 40, 10)
+a.take(3) // List(10, 20, 30)
+a.takeWhile(_ < 30) // List(10, 20)
+
+// flatten
+val a = List(List(1,2), List(3,4))
+a.flatten // List(1, 2, 3, 4)
+
+// map, flatMap
+val nums = List("one", "two")
+nums.map(_.toUpperCase) // List("ONE", "TWO")
+nums.flatMap(_.toUpperCase) // List('O', 'N', 'E', 'T', 'W', 'O')
+```
+
+{% endtab %}
+{% endtabs %}
+
+These examples show how the “foldLeft” and “reduceLeft” methods are used to sum the values in a sequence of integers:
+
+{% tabs collection_3 %}
+{% tab 'Scala 2 and 3' for=collection_3 %}
+
+```scala
+val firstTen = (1 to 10).toList // List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+
+firstTen.reduceLeft(_ + _) // 55
+firstTen.foldLeft(100)(_ + _) // 155 (100 is a “seed” value)
+```
+
+{% endtab %}
+{% endtabs %}
+
+There are many more methods available to Scala collections classes, and they’re demonstrated in the [Collections chapter][collections], and in the [API Documentation][api].
+
+## Tuples
+
+The Scala _tuple_ is a type that lets you easily put a collection of different types in the same container.
+For example, given this `Person` case class:
+
+{% tabs collection_4 %}
+{% tab 'Scala 2 and 3' for=collection_4 %}
+
+```scala
+case class Person(name: String)
+```
+
+{% endtab %}
+{% endtabs %}
+
+This is how you create a tuple that contains an `Int`, a `String`, and a custom `Person` value:
+
+{% tabs collection_5 %}
+{% tab 'Scala 2 and 3' for=collection_5 %}
+
+```scala
+val t = (11, "eleven", Person("Eleven"))
+```
+
+{% endtab %}
+{% endtabs %}
+
+Once you have a tuple, you can access its values by binding them to variables, or access them by number:
+
+{% tabs collection_6 %}
+{% tab 'Scala 2 and 3' for=collection_6 %}
+
+```scala
+t(0) // 11
+t(1) // "eleven"
+t(2) // Person("Eleven")
+```
+
+{% endtab %}
+{% endtabs %}
+
+You can also use this _extractor_ approach to assign the tuple fields to variable names:
+
+{% tabs collection_7 %}
+{% tab 'Scala 2 and 3' for=collection_7 %}
+
+```scala
+val (num, str, person) = t
+
+// result:
+// val num: Int = 11
+// val str: String = eleven
+// val person: Person = Person(Eleven)
+```
+
+{% endtab %}
+{% endtabs %}
+
+Tuples are nice for those times when you want to put a collection of heterogeneous types in a little collection-like structure.
+See the [Reference documentation][reference] for more tuple details.
+
+[collections]: {% link _overviews/scala3-book/collections-intro.md %}
+[api]: https://scala-lang.org/api/3.x/
+[reference]: {{ site.scala3ref }}/overview.html
diff --git a/_overviews/scala3-book/taste-contextual-abstractions.md b/_overviews/scala3-book/taste-contextual-abstractions.md
new file mode 100644
index 0000000000..60d21d1643
--- /dev/null
+++ b/_overviews/scala3-book/taste-contextual-abstractions.md
@@ -0,0 +1,76 @@
+---
+title: Contextual Abstractions
+type: section
+description: This section provides an introduction to Contextual Abstractions in Scala 3.
+languages: [ru, zh-cn]
+num: 14
+previous-page: taste-collections
+next-page: taste-toplevel-definitions
+---
+
+
+{% comment %}
+TODO: Now that this is a separate section, it needs a little more content.
+{% endcomment %}
+
+Under certain circumstances, you can omit some parameters of method calls that are considered repetitive.
+
+Those parameters are called _Context Parameters_ because they are inferred by the compiler from the context surrounding the method call.
+
+For instance, consider a program that sorts a list of addresses by two criteria: the city name and then street name.
+
+{% tabs contextual_1 %}
+{% tab 'Scala 2 and 3' for=contextual_1 %}
+
+```scala
+val addresses: List[Address] = ...
+
+addresses.sortBy(address => (address.city, address.street))
+```
+
+{% endtab %}
+{% endtabs %}
+
+The `sortBy` method takes a function that returns, for every address, the value to compare it with the other addresses.
+In this case, we pass a function that returns a pair containing the city name and the street name.
+
+Note that we only indicate _what_ to compare, but not _how_ to perform the comparison.
+How does the sorting algorithm know how to compare pairs of `String`?
+
+Actually, the `sortBy` method takes a second parameter---a context parameter---that is inferred by the compiler.
+It does not appear in the above example because it is supplied by the compiler.
+
+This second parameter implements the _how_ to compare.
+It is convenient to omit it because we know `String`s are generally compared using the lexicographic order.
+
+However, it is also possible to pass it explicitly:
+
+{% tabs contextual_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=contextual_2 %}
+
+```scala
+addresses.sortBy(address => (address.city, address.street))(Ordering.Tuple2(Ordering.String, Ordering.String))
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=contextual_2 %}
+
+```scala
+addresses.sortBy(address => (address.city, address.street))(using Ordering.Tuple2(Ordering.String, Ordering.String))
+```
+
+in Scala 3 `using` in an argument list to `sortBy` signals passing the context parameter explicitly, avoiding ambiguity.
+
+{% endtab %}
+{% endtabs %}
+
+In this case, the `Ordering.Tuple2(Ordering.String, Ordering.String)` instance is exactly the one that is otherwise inferred by the compiler.
+In other words both examples produce the same program.
+
+_Contextual Abstractions_ are used to avoid repetition of code.
+They help developers write pieces of code that are extensible and concise at the same time.
+
+For more details, see the [Contextual Abstractions chapter][contextual] of this book, and also the [Reference documentation][reference].
+
+[contextual]: {% link _overviews/scala3-book/ca-contextual-abstractions-intro.md %}
+[reference]: {{ site.scala3ref }}/overview.html
diff --git a/_overviews/scala3-book/taste-control-structures.md b/_overviews/scala3-book/taste-control-structures.md
new file mode 100644
index 0000000000..4b58abbf00
--- /dev/null
+++ b/_overviews/scala3-book/taste-control-structures.md
@@ -0,0 +1,541 @@
+---
+title: Control Structures
+type: section
+description: This section demonstrates Scala 3 control structures.
+languages: [ru, zh-cn]
+num: 8
+previous-page: taste-vars-data-types
+next-page: taste-modeling
+---
+
+
+Scala has the control structures you find in other programming languages, and also has powerful `for` expressions and `match` expressions:
+
+- `if`/`else`
+- `for` loops and expressions
+- `match` expressions
+- `while` loops
+- `try`/`catch`
+
+These structures are demonstrated in the following examples.
+
+## `if`/`else`
+
+Scala’s `if`/`else` control structure looks similar to other languages.
+
+{% tabs if-else class=tabs-scala-version %}
+{% tab 'Scala 2' for=if-else %}
+
+```scala
+if (x < 0) {
+ println("negative")
+} else if (x == 0) {
+ println("zero")
+} else {
+ println("positive")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=if-else %}
+
+```scala
+if x < 0 then
+ println("negative")
+else if x == 0 then
+ println("zero")
+else
+ println("positive")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Note that this really is an _expression_---not a _statement_.
+This means that it returns a value, so you can assign the result to a variable:
+
+{% tabs if-else-expression class=tabs-scala-version %}
+{% tab 'Scala 2' for=if-else-expression %}
+
+```scala
+val x = if (a < b) { a } else { b }
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=if-else-expression %}
+
+```scala
+val x = if a < b then a else b
+```
+
+{% endtab %}
+{% endtabs %}
+
+As you’ll see throughout this book, _all_ Scala control structures can be used as expressions.
+
+> An expression returns a result, while a statement does not.
+> Statements are typically used for their side-effects, such as using `println` to print to the console.
+
+## `for` loops and expressions
+
+The `for` keyword is used to create a `for` loop.
+This example shows how to print every element in a `List`:
+
+{% tabs for-loop class=tabs-scala-version %}
+{% tab 'Scala 2' for=for-loop %}
+
+```scala
+val ints = List(1, 2, 3, 4, 5)
+
+for (i <- ints) println(i)
+```
+
+> The code `i <- ints` is referred to as a _generator_. In any generator `p <- e`, the expression `e` can generate zero or many bindings to the pattern `p`.
+> The code that follows the closing parentheses of the generator is the _body_ of the loop.
+
+{% endtab %}
+
+{% tab 'Scala 3' for=for-loop %}
+
+```scala
+val ints = List(1, 2, 3, 4, 5)
+
+for i <- ints do println(i)
+```
+
+> The code `i <- ints` is referred to as a _generator_, and the code that follows the `do` keyword is the _body_ of the loop.
+
+{% endtab %}
+{% endtabs %}
+
+### Guards
+
+You can also use one or more `if` expressions inside a `for` loop.
+These are referred to as _guards_.
+This example prints all of the numbers in `ints` that are greater than `2`:
+
+{% tabs for-guards class=tabs-scala-version %}
+{% tab 'Scala 2' for=for-guards %}
+
+```scala
+for (i <- ints if i > 2)
+ println(i)
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=for-guards %}
+
+```scala
+for
+ i <- ints
+ if i > 2
+do
+ println(i)
+```
+
+{% endtab %}
+{% endtabs %}
+
+You can use multiple generators and guards.
+This loop iterates over the numbers `1` to `3`, and for each number it also iterates over the characters `a` to `c`.
+However, it also has two guards, so the only time the print statement is called is when `i` has the value `2` and `j` is the character `b`:
+
+{% tabs for-guards-multi class=tabs-scala-version %}
+{% tab 'Scala 2' for=for-guards-multi %}
+
+```scala
+for {
+ i <- 1 to 3
+ j <- 'a' to 'c'
+ if i == 2
+ if j == 'b'
+} {
+ println(s"i = $i, j = $j") // prints: "i = 2, j = b"
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=for-guards-multi %}
+
+```scala
+for
+ i <- 1 to 3
+ j <- 'a' to 'c'
+ if i == 2
+ if j == 'b'
+do
+ println(s"i = $i, j = $j") // prints: "i = 2, j = b"
+```
+
+{% endtab %}
+{% endtabs %}
+
+### `for` expressions
+
+The `for` keyword has even more power: When you use the `yield` keyword instead of `do`, you create `for` _expressions_ which are used to calculate and yield results.
+
+A few examples demonstrate this.
+Using the same `ints` list as the previous example, this code creates a new list, where the value of each element in the new list is twice the value of the elements in the original list:
+
+{% tabs for-expression_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=for-expression_1 %}
+
+````
+scala> val doubles = for (i <- ints) yield i * 2
+val doubles: List[Int] = List(2, 4, 6, 8, 10)
+````
+
+{% endtab %}
+
+{% tab 'Scala 3' for=for-expression_1 %}
+
+````
+scala> val doubles = for i <- ints yield i * 2
+val doubles: List[Int] = List(2, 4, 6, 8, 10)
+````
+
+{% endtab %}
+{% endtabs %}
+
+Scala’s control structure syntax is flexible, and that `for` expression can be written in several other ways, depending on your preference:
+
+{% tabs for-expressioni_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=for-expressioni_2 %}
+
+```scala
+val doubles = for (i <- ints) yield i * 2
+val doubles = for (i <- ints) yield (i * 2)
+val doubles = for { i <- ints } yield (i * 2)
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=for-expressioni_2 %}
+
+```scala
+val doubles = for i <- ints yield i * 2 // style shown above
+val doubles = for (i <- ints) yield i * 2
+val doubles = for (i <- ints) yield (i * 2)
+val doubles = for { i <- ints } yield (i * 2)
+```
+
+{% endtab %}
+{% endtabs %}
+
+This example shows how to capitalize the first character in each string in the list:
+
+{% tabs for-expressioni_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=for-expressioni_3 %}
+
+```scala
+val names = List("chris", "ed", "maurice")
+val capNames = for (name <- names) yield name.capitalize
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=for-expressioni_3 %}
+
+```scala
+val names = List("chris", "ed", "maurice")
+val capNames = for name <- names yield name.capitalize
+```
+
+{% endtab %}
+{% endtabs %}
+
+Finally, this `for` expression iterates over a list of strings, and returns the length of each string, but only if that length is greater than `4`:
+
+{% tabs for-expressioni_4 class=tabs-scala-version %}
+{% tab 'Scala 2' for=for-expressioni_4 %}
+
+```scala
+val fruits = List("apple", "banana", "lime", "orange")
+
+val fruitLengths =
+ for (f <- fruits if f.length > 4) yield f.length
+
+// fruitLengths: List[Int] = List(5, 6, 6)
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=for-expressioni_4 %}
+
+```scala
+val fruits = List("apple", "banana", "lime", "orange")
+
+val fruitLengths = for
+ f <- fruits
+ if f.length > 4
+yield
+ // you can use multiple lines
+ // of code here
+ f.length
+
+// fruitLengths: List[Int] = List(5, 6, 6)
+```
+
+{% endtab %}
+{% endtabs %}
+
+`for` loops and expressions are covered in more detail in the [Control Structures sections][control] of this book, and in the [Reference documentation]({{ site.scala3ref }}/other-new-features/control-syntax.html).
+
+## `match` expressions
+
+Scala has a `match` expression, which in its most basic use is like a Java `switch` statement:
+
+{% tabs match class=tabs-scala-version %}
+{% tab 'Scala 2' for=match %}
+
+```scala
+val i = 1
+
+// later in the code ...
+i match {
+ case 1 => println("one")
+ case 2 => println("two")
+ case _ => println("other")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=match %}
+
+```scala
+val i = 1
+
+// later in the code ...
+i match
+ case 1 => println("one")
+ case 2 => println("two")
+ case _ => println("other")
+```
+
+{% endtab %}
+{% endtabs %}
+
+However, `match` really is an expression, meaning that it returns a result based on the pattern match, which you can bind to a variable:
+
+{% tabs match-expression_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=match-expression_1 %}
+
+```scala
+val result = i match {
+ case 1 => "one"
+ case 2 => "two"
+ case _ => "other"
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=match-expression_1 %}
+
+```scala
+val result = i match
+ case 1 => "one"
+ case 2 => "two"
+ case _ => "other"
+```
+
+{% endtab %}
+{% endtabs %}
+
+`match` isn’t limited to working with just integer values, it can be used with any data type:
+
+{% tabs match-expression_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=match-expression_2 %}
+
+```scala
+val p = Person("Fred")
+
+// later in the code
+p match {
+ case Person(name) if name == "Fred" =>
+ println(s"$name says, Yubba dubba doo")
+
+ case Person(name) if name == "Bam Bam" =>
+ println(s"$name says, Bam bam!")
+
+ case _ => println("Watch the Flintstones!")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=match-expression_2 %}
+
+```scala
+val p = Person("Fred")
+
+// later in the code
+p match
+ case Person(name) if name == "Fred" =>
+ println(s"$name says, Yubba dubba doo")
+
+ case Person(name) if name == "Bam Bam" =>
+ println(s"$name says, Bam bam!")
+
+ case _ => println("Watch the Flintstones!")
+```
+
+{% endtab %}
+{% endtabs %}
+
+In fact, a `match` expression can be used to test a variable against many different types of patterns.
+This example shows (a) how to use a `match` expression as the body of a method, and (b) how to match all the different types shown:
+
+{% tabs match-expression_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=match-expression_3 %}
+
+```scala
+// getClassAsString is a method that takes a single argument of any type.
+def getClassAsString(x: Any): String = x match {
+ case s: String => s"'$s' is a String"
+ case i: Int => "Int"
+ case d: Double => "Double"
+ case l: List[_] => "List"
+ case _ => "Unknown"
+}
+
+// examples
+getClassAsString(1) // Int
+getClassAsString("hello") // 'hello' is a String
+getClassAsString(List(1, 2, 3)) // List
+```
+
+Because the method `getClassAsString` takes a parameter value of type `Any`, it can be decomposed by any kind of
+pattern.
+
+{% endtab %}
+{% tab 'Scala 3' for=match-expression_3 %}
+
+```scala
+// getClassAsString is a method that takes a single argument of any type.
+def getClassAsString(x: Matchable): String = x match
+ case s: String => s"'$s' is a String"
+ case i: Int => "Int"
+ case d: Double => "Double"
+ case l: List[?] => "List"
+ case _ => "Unknown"
+
+// examples
+getClassAsString(1) // Int
+getClassAsString("hello") // 'hello' is a String
+getClassAsString(List(1, 2, 3)) // List
+```
+
+The method `getClassAsString` takes as a parameter a value of type [Matchable]({{ site.scala3ref }}/other-new-features/matchable.html), which can be
+any type supporting pattern matching (some types don’t support pattern matching because this could
+break encapsulation).
+
+{% endtab %}
+{% endtabs %}
+
+There’s _much_ more to pattern matching in Scala.
+Patterns can be nested, results of patterns can be bound, and pattern matching can even be user-defined.
+See the pattern matching examples in the [Control Structures chapter][control] for more details.
+
+## `try`/`catch`/`finally`
+
+Scala’s `try`/`catch`/`finally` control structure lets you catch exceptions.
+It’s similar to Java, but its syntax is consistent with `match` expressions:
+
+{% tabs try class=tabs-scala-version %}
+{% tab 'Scala 2' for=try %}
+
+```scala
+try {
+ writeTextToFile(text)
+} catch {
+ case ioe: IOException => println("Got an IOException.")
+ case nfe: NumberFormatException => println("Got a NumberFormatException.")
+} finally {
+ println("Clean up your resources here.")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=try %}
+
+```scala
+try
+ writeTextToFile(text)
+catch
+ case ioe: IOException => println("Got an IOException.")
+ case nfe: NumberFormatException => println("Got a NumberFormatException.")
+finally
+ println("Clean up your resources here.")
+```
+
+{% endtab %}
+{% endtabs %}
+
+## `while` loops
+
+Scala also has a `while` loop construct.
+Its one-line syntax looks like this:
+
+{% tabs while_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=while_1 %}
+
+```scala
+while (x >= 0) { x = f(x) }
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=while_1 %}
+
+```scala
+while x >= 0 do x = f(x)
+```
+Scala 3 still supports the Scala 2 syntax for the sake of compatibility.
+
+{% endtab %}
+{% endtabs %}
+
+The `while` loop multiline syntax looks like this:
+
+{% tabs while_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=while_2 %}
+
+```scala
+var x = 1
+
+while (x < 3) {
+ println(x)
+ x += 1
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=while_2 %}
+
+```scala
+var x = 1
+
+while
+ x < 3
+do
+ println(x)
+ x += 1
+```
+
+{% endtab %}
+{% endtabs %}
+
+## Custom control structures
+
+Thanks to features like by-name parameters, infix notation, fluent interfaces, optional parentheses, extension methods, and higher-order functions, you can also create your own code that works just like a control structure.
+You’ll learn more about this in the [Control Structures][control] section.
+
+[control]: {% link _overviews/scala3-book/control-structures.md %}
diff --git a/_overviews/scala3-book/taste-functions.md b/_overviews/scala3-book/taste-functions.md
new file mode 100644
index 0000000000..e73024bca0
--- /dev/null
+++ b/_overviews/scala3-book/taste-functions.md
@@ -0,0 +1,78 @@
+---
+title: First-Class Functions
+type: section
+description: This page provides an introduction to functions in Scala 3.
+languages: [ru, zh-cn]
+num: 11
+previous-page: taste-methods
+next-page: taste-objects
+---
+
+
+Scala has most features you’d expect in a functional programming language, including:
+
+- Lambdas (anonymous functions)
+- Higher-order functions (HOFs)
+- Immutable collections in the standard library
+
+Lambdas, also known as _anonymous functions_, are a big part of keeping your code concise but readable.
+
+The `map` method of the `List` class is a typical example of a higher-order function---a function that takes a function as parameter.
+
+These two examples are equivalent, and show how to multiply each number in a list by `2` by passing a lambda into the `map` method:
+
+
+{% tabs function_1 %}
+{% tab 'Scala 2 and 3' for=function_1 %}
+```scala
+val a = List(1, 2, 3).map(i => i * 2) // List(2,4,6)
+val b = List(1, 2, 3).map(_ * 2) // List(2,4,6)
+```
+{% endtab %}
+{% endtabs %}
+
+Those examples are also equivalent to the following code, which uses a `double` method instead of a lambda:
+
+
+{% tabs function_2 %}
+{% tab 'Scala 2 and 3' for=function_2 %}
+```scala
+def double(i: Int): Int = i * 2
+
+val a = List(1, 2, 3).map(i => double(i)) // List(2,4,6)
+val b = List(1, 2, 3).map(double) // List(2,4,6)
+```
+{% endtab %}
+{% endtabs %}
+
+> If you haven’t seen the `map` method before, it applies a given function to every element in a list, yielding a new list that contains the resulting values.
+
+Passing lambdas to higher-order functions on collections classes (like `List`) is a part of the Scala experience, something you’ll do every day.
+
+## Immutable collections
+
+When you work with immutable collections like `List`, `Vector`, and the immutable `Map` and `Set` classes, it’s important to know that these functions don’t mutate the collection they’re called on; instead, they return a new collection with the updated data.
+As a result, it’s also common to chain them together in a “fluent” style to solve problems.
+
+For instance, this example shows how to filter a collection twice, and then multiply each element in the remaining collection:
+
+
+{% tabs function_3 %}
+{% tab 'Scala 2 and 3' for=function_3 %}
+```scala
+// a sample list
+val nums = (1 to 10).toList // List(1,2,3,4,5,6,7,8,9,10)
+
+// methods can be chained together as needed
+val x = nums.filter(_ > 3)
+ .filter(_ < 7)
+ .map(_ * 10)
+
+// result: x == List(40, 50, 60)
+```
+{% endtab %}
+{% endtabs %}
+
+In addition to higher-order functions being used throughout the standard library, you can also [create your own][higher-order].
+
+[higher-order]: {% link _overviews/scala3-book/fun-hofs.md %}
diff --git a/_overviews/scala3-book/taste-hello-world.md b/_overviews/scala3-book/taste-hello-world.md
new file mode 100644
index 0000000000..52fc532e5e
--- /dev/null
+++ b/_overviews/scala3-book/taste-hello-world.md
@@ -0,0 +1,165 @@
+---
+title: Hello, World!
+type: section
+description: This section demonstrates a Scala 3 'Hello, World!' example.
+languages: [ru, zh-cn]
+num: 5
+previous-page: taste-intro
+next-page: taste-repl
+---
+
+> **Hint**: in the following examples try picking your preferred Scala version.
+
+## Your First Scala Program
+
+
+A Scala “Hello, World!” example goes as follows.
+First, put this code in a file named _hello.scala_:
+
+
+
+{% tabs hello-world-demo class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=hello-world-demo %}
+```scala
+object hello {
+ def main(args: Array[String]) = {
+ println("Hello, World!")
+ }
+}
+```
+> In this code, we defined a method named `main`, inside a Scala `object` named `hello`.
+> An `object` in Scala is similar to a `class`, but defines a singleton instance that you can pass around.
+> `main` takes an input parameter named `args` that must be typed as `Array[String]`, (ignore `args` for now).
+
+{% endtab %}
+
+{% tab 'Scala 3' for=hello-world-demo %}
+```scala
+@main def hello() = println("Hello, World!")
+```
+> In this code, `hello` is a method.
+> It’s defined with `def`, and declared to be a “main” method with the `@main` annotation.
+> It prints the `"Hello, World!"` string to standard output (STDOUT) using the `println` method.
+
+{% endtab %}
+
+{% endtabs %}
+
+
+Next, compile and run the code with `scala`:
+
+```bash
+$ scala run hello.scala
+```
+
+The command should produce an output similar to:
+```
+Compiling project (Scala {{site.scala-3-version}}, JVM (20))
+Compiled project (Scala {{site.scala-3-version}}, JVM (20))
+Hello, World!
+```
+
+Assuming that worked, congratulations, you just compiled and ran your first Scala application.
+
+> More information about sbt and other tools that make Scala development easier can be found in the [Scala Tools][scala_tools] chapter.
+> The Scala CLI documentation can be found [here](https://scala-cli.virtuslab.org/).
+
+## Ask For User Input
+
+In our next example let's ask for the user's name before we greet them!
+
+There are several ways to read input from a command-line, but a simple way is to use the
+`readLine` method in the _scala.io.StdIn_ object. To use it, you need to first import it, like this:
+
+{% tabs import-readline %}
+{% tab 'Scala 2 and 3' for=import-readline %}
+```scala
+import scala.io.StdIn.readLine
+```
+{% endtab %}
+{% endtabs %}
+
+To demonstrate how this works, let’s create a little example. Put this source code in a file named _helloInteractive.scala_:
+
+
+{% tabs hello-world-interactive class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=hello-world-interactive %}
+```scala
+import scala.io.StdIn.readLine
+
+object helloInteractive {
+
+ def main(args: Array[String]) = {
+ println("Please enter your name:")
+ val name = readLine()
+
+ println("Hello, " + name + "!")
+ }
+
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=hello-world-interactive %}
+```scala
+import scala.io.StdIn.readLine
+
+@main def helloInteractive() =
+ println("Please enter your name:")
+ val name = readLine()
+
+ println("Hello, " + name + "!")
+```
+{% endtab %}
+
+{% endtabs %}
+
+
+In this code we save the result of `readLine` to a variable called `name`, we then
+use the `+` operator on strings to join `"Hello, "` with `name` and `"!"`, making one single string value.
+
+> You can learn more about using `val` by reading [Variables and Data Types](/scala3/book/taste-vars-data-types.html).
+
+Then run the code with `scala`. This time the program will pause after asking for your name,
+and wait until you type a name and press return on the keyboard, looking like this:
+
+```bash
+$ scala run helloInteractive.scala
+Compiling project (Scala {{site.scala-3-version}}, JVM (20))
+Compiled project (Scala {{site.scala-3-version}}, JVM (20))
+Please enter your name:
+▌
+```
+
+When you enter your name at the prompt, the final interaction should look like this:
+
+```bash
+$ scala run helloInteractive.scala
+Compiling project (Scala {{site.scala-3-version}}, JVM (20))
+Compiled project (Scala {{site.scala-3-version}}, JVM (20))
+Please enter your name:
+Alvin Alexander
+Hello, Alvin Alexander!
+```
+
+### A Note about Imports
+
+As you saw in this application, sometimes certain methods, or other kinds of definitions that we'll see later,
+are not available unless you use an `import` clause like so:
+
+{% tabs import-readline-2 %}
+{% tab 'Scala 2 and 3' for=import-readline-2 %}
+```scala
+import scala.io.StdIn.readLine
+```
+{% endtab %}
+{% endtabs %}
+
+Imports help you write code in a few ways:
+ - you can put code in multiple files, to help avoid clutter, and to help navigate large projects.
+ - you can use a code library, perhaps written by someone else, that has useful functionality
+ - you can know where a certain definition comes from (especially if it was not written in the current file).
+
+[scala_tools]: {% link _overviews/scala3-book/scala-tools.md %}
diff --git a/_overviews/scala3-book/taste-intro.md b/_overviews/scala3-book/taste-intro.md
new file mode 100644
index 0000000000..9d93b317cf
--- /dev/null
+++ b/_overviews/scala3-book/taste-intro.md
@@ -0,0 +1,62 @@
+---
+title: A Taste of Scala
+type: chapter
+description: This chapter provides a high-level overview of the main features of the Scala 3 programming language.
+languages: [ru, zh-cn]
+num: 4
+previous-page: why-scala-3
+next-page: taste-hello-world
+---
+
+
+This chapter provides a whirlwind tour of the main features of the Scala 3 programming language.
+After this initial tour, the rest of the book provides more details on these features, and the [Reference documentation][reference] provides _many_ more details.
+
+## Setting Up Scala
+
+Throughout this chapter, and the rest of the book, we encourage you to try out the examples by either copying
+them or typing them out manually. The tools necessary to follow along with the examples on your own computer
+can be installed by following our [getting started guide][get-started].
+
+> Alternatively you can run the examples in a web browser with [Scastie](https://scastie.scala-lang.org), a
+> fully online editor and code-runner for Scala.
+
+## Comments
+
+One good thing to know up front is that comments in Scala are just like comments in Java (and many other languages):
+
+{% tabs comments %}
+{% tab 'Scala 2 and 3' for=comments %}
+```scala
+// a single line comment
+
+/*
+ * a multiline comment
+ */
+
+/**
+ * also a multiline comment
+ */
+```
+{% endtab %}
+{% endtabs %}
+
+## IDEs
+
+The two main IDEs (integrated development environments) for Scala are:
+
+- [IntelliJ IDEA](/getting-started/intellij-track/building-a-scala-project-with-intellij-and-sbt.html)
+- [Visual Studio Code](https://scalameta.org/metals/docs/editors/vscode/)
+
+## Naming conventions
+
+Another good thing to know is that Scala naming conventions follow the same “camel case” style as Java:
+
+- Class names: `Person`, `StoreEmployee`
+- Variable names: `name`, `firstName`
+- Method names: `convertToInt`, `toUpper`
+
+More on conventions used while writing Scala code can be found in the [Style Guide](/style/index.html).
+
+[reference]: {{ site.scala3ref }}/overview.html
+[get-started]: {% link _overviews/getting-started/install-scala.md %}
diff --git a/_overviews/scala3-book/taste-methods.md b/_overviews/scala3-book/taste-methods.md
new file mode 100644
index 0000000000..6c54818805
--- /dev/null
+++ b/_overviews/scala3-book/taste-methods.md
@@ -0,0 +1,159 @@
+---
+title: Methods
+type: section
+description: This section provides an introduction to defining and using methods in Scala 3.
+languages: [ru, zh-cn]
+num: 10
+previous-page: taste-modeling
+next-page: taste-functions
+---
+
+
+## Scala methods
+
+Scala classes, case classes, traits, enums, and objects can all contain methods.
+The syntax of a simple method looks like this:
+
+{% tabs method_1 %}
+{% tab 'Scala 2 and 3' for=method_1 %}
+```scala
+def methodName(param1: Type1, param2: Type2): ReturnType =
+ // the method body
+ // goes here
+```
+{% endtab %}
+{% endtabs %}
+
+Here are a few examples:
+
+{% tabs method_2 %}
+{% tab 'Scala 2 and 3' for=method_2 %}
+```scala
+def sum(a: Int, b: Int): Int = a + b
+def concatenate(s1: String, s2: String): String = s1 + s2
+```
+{% endtab %}
+{% endtabs %}
+
+You don’t have to declare a method’s return type, so you can write those methods like this, if you prefer:
+
+{% tabs method_3 %}
+{% tab 'Scala 2 and 3' for=method_3 %}
+```scala
+def sum(a: Int, b: Int) = a + b
+def concatenate(s1: String, s2: String) = s1 + s2
+```
+{% endtab %}
+{% endtabs %}
+
+This is how you call those methods:
+
+{% tabs method_4 %}
+{% tab 'Scala 2 and 3' for=method_4 %}
+```scala
+val x = sum(1, 2)
+val y = concatenate("foo", "bar")
+```
+{% endtab %}
+{% endtabs %}
+
+Here’s an example of a multiline method:
+
+{% tabs method_5 class=tabs-scala-version %}
+{% tab 'Scala 2' for=method_5 %}
+```scala
+def getStackTraceAsString(t: Throwable): String = {
+ val sw = new StringWriter
+ t.printStackTrace(new PrintWriter(sw))
+ sw.toString
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=method_5 %}
+```scala
+def getStackTraceAsString(t: Throwable): String =
+ val sw = new StringWriter
+ t.printStackTrace(new PrintWriter(sw))
+ sw.toString
+```
+{% endtab %}
+{% endtabs %}
+
+Method parameters can also have default values.
+In this example, the `timeout` parameter has a default value of `5000`:
+
+{% tabs method_6 %}
+{% tab 'Scala 2 and 3' for=method_6 %}
+```scala
+def makeConnection(url: String, timeout: Int = 5000): Unit =
+ println(s"url=$url, timeout=$timeout")
+```
+{% endtab %}
+{% endtabs %}
+
+Because a default `timeout` value is supplied in the method declaration, the method can be called in these two ways:
+
+{% tabs method_7 %}
+{% tab 'Scala 2 and 3' for=method_7 %}
+```scala
+makeConnection("https://localhost") // url=http://localhost, timeout=5000
+makeConnection("https://localhost", 2500) // url=http://localhost, timeout=2500
+```
+{% endtab %}
+{% endtabs %}
+
+Scala also supports the use of _named parameters_ when calling a method, so you can also call that method like this, if you prefer:
+
+{% tabs method_8 %}
+{% tab 'Scala 2 and 3' for=method_8 %}
+```scala
+makeConnection(
+ url = "https://localhost",
+ timeout = 2500
+)
+```
+{% endtab %}
+{% endtabs %}
+
+Named parameters are particularly useful when multiple method parameters have the same type.
+At a glance, with this method you may wonder which parameters are set to `true` or `false`:
+
+{% tabs method_9 %}
+{% tab 'Scala 2 and 3' for=method_9 %}
+
+```scala
+engage(true, true, true, false)
+```
+
+{% endtab %}
+{% endtabs %}
+
+The `extension` keyword declares that you’re about to define one or more extension methods on the parameter that’s put in parentheses.
+As shown with this example, the parameter `s` of type `String` can then be used in the body of your extension methods.
+
+This next example shows how to add a `makeInt` method to the `String` class.
+Here, `makeInt` takes a parameter named `radix`.
+The code doesn’t account for possible string-to-integer conversion errors, but skipping that detail, the examples show how it works:
+
+{% tabs extension %}
+{% tab 'Scala 3 Only' %}
+
+```scala
+extension (s: String)
+ def makeInt(radix: Int): Int = Integer.parseInt(s, radix)
+
+"1".makeInt(2) // Int = 1
+"10".makeInt(2) // Int = 2
+"100".makeInt(2) // Int = 4
+```
+
+{% endtab %}
+{% endtabs %}
+
+## See also
+
+Scala Methods can be much more powerful: they can take type parameters and context parameters.
+They are covered in detail in the [Domain Modeling][data-1] section.
+
+[data-1]: {% link _overviews/scala3-book/domain-modeling-tools.md %}
diff --git a/_overviews/scala3-book/taste-modeling.md b/_overviews/scala3-book/taste-modeling.md
new file mode 100644
index 0000000000..3e391d745a
--- /dev/null
+++ b/_overviews/scala3-book/taste-modeling.md
@@ -0,0 +1,422 @@
+---
+title: Domain Modeling
+type: section
+description: This section provides an introduction to data modeling in Scala 3.
+languages: [ru, zh-cn]
+num: 9
+previous-page: taste-control-structures
+next-page: taste-methods
+---
+
+
+{% comment %}
+NOTE: I kept the OOP section first, assuming that most readers will be coming from an OOP background.
+{% endcomment %}
+
+Scala supports both functional programming (FP) and object-oriented programming (OOP), as well as a fusion of the two paradigms.
+This section provides a quick overview of data modeling in OOP and FP.
+
+## OOP Domain Modeling
+
+When writing code in an OOP style, your two main tools for data encapsulation are _traits_ and _classes_.
+
+{% comment %}
+NOTE: Julien had a comment, “in OOP we don’t really model data.
+It’s more about modeling operations, imho.”
+
+How to resolve? Is there a good DDD term to use here?
+{% endcomment %}
+
+### Traits
+
+Scala traits can be used as simple interfaces, but they can also contain abstract and concrete methods and fields, and they can have parameters, just like classes.
+They provide a great way for you to organize behaviors into small, modular units.
+Later, when you want to create concrete implementations of attributes and behaviors, classes and objects can extend traits, mixing in as many traits as needed to achieve the desired behavior.
+
+As an example of how to use traits as interfaces, here are three traits that define well-organized and modular behaviors for animals like dogs and cats:
+
+{% tabs traits class=tabs-scala-version %}
+{% tab 'Scala 2' for=traits %}
+
+```scala
+trait Speaker {
+ def speak(): String // has no body, so it’s abstract
+}
+
+trait TailWagger {
+ def startTail(): Unit = println("tail is wagging")
+ def stopTail(): Unit = println("tail is stopped")
+}
+
+trait Runner {
+ def startRunning(): Unit = println("I’m running")
+ def stopRunning(): Unit = println("Stopped running")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=traits %}
+
+```scala
+trait Speaker:
+ def speak(): String // has no body, so it’s abstract
+
+trait TailWagger:
+ def startTail(): Unit = println("tail is wagging")
+ def stopTail(): Unit = println("tail is stopped")
+
+trait Runner:
+ def startRunning(): Unit = println("I’m running")
+ def stopRunning(): Unit = println("Stopped running")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Given those traits, here’s a `Dog` class that extends all of those traits while providing a behavior for the abstract `speak` method:
+
+{% tabs traits-class class=tabs-scala-version %}
+{% tab 'Scala 2' for=traits-class %}
+
+```scala
+class Dog(name: String) extends Speaker with TailWagger with Runner {
+ def speak(): String = "Woof!"
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=traits-class %}
+
+```scala
+class Dog(name: String) extends Speaker, TailWagger, Runner:
+ def speak(): String = "Woof!"
+```
+
+{% endtab %}
+{% endtabs %}
+
+Notice how the class extends the traits with the `extends` keyword.
+
+Similarly, here’s a `Cat` class that implements those same traits while also overriding two of the concrete methods it inherits:
+
+{% tabs traits-override class=tabs-scala-version %}
+{% tab 'Scala 2' for=traits-override %}
+
+```scala
+class Cat(name: String) extends Speaker with TailWagger with Runner {
+ def speak(): String = "Meow"
+ override def startRunning(): Unit = println("Yeah ... I don’t run")
+ override def stopRunning(): Unit = println("No need to stop")
+}
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=traits-override %}
+
+```scala
+class Cat(name: String) extends Speaker, TailWagger, Runner:
+ def speak(): String = "Meow"
+ override def startRunning(): Unit = println("Yeah ... I don’t run")
+ override def stopRunning(): Unit = println("No need to stop")
+```
+
+{% endtab %}
+{% endtabs %}
+
+These examples show how those classes are used:
+
+{% tabs traits-use class=tabs-scala-version %}
+{% tab 'Scala 2' for=traits-use %}
+
+```scala
+val d = new Dog("Rover")
+println(d.speak()) // prints "Woof!"
+
+val c = new Cat("Morris")
+println(c.speak()) // "Meow"
+c.startRunning() // "Yeah ... I don’t run"
+c.stopRunning() // "No need to stop"
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=traits-use %}
+
+```scala
+val d = Dog("Rover")
+println(d.speak()) // prints "Woof!"
+
+val c = Cat("Morris")
+println(c.speak()) // "Meow"
+c.startRunning() // "Yeah ... I don’t run"
+c.stopRunning() // "No need to stop"
+```
+
+{% endtab %}
+{% endtabs %}
+
+If that code makes sense---great, you’re comfortable with traits as interfaces.
+If not, don’t worry, they’re explained in more detail in the [Domain Modeling][data-1] chapter.
+
+### Classes
+
+Scala _classes_ are used in OOP-style programming.
+Here’s an example of a class that models a “person.” In OOP, fields are typically mutable, so `firstName` and `lastName` are both declared as `var` parameters:
+
+{% tabs class_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=class_1 %}
+
+```scala
+class Person(var firstName: String, var lastName: String) {
+ def printFullName() = println(s"$firstName $lastName")
+}
+
+val p = new Person("John", "Stephens")
+println(p.firstName) // "John"
+p.lastName = "Legend"
+p.printFullName() // "John Legend"
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=class_1 %}
+
+```scala
+class Person(var firstName: String, var lastName: String):
+ def printFullName() = println(s"$firstName $lastName")
+
+val p = Person("John", "Stephens")
+println(p.firstName) // "John"
+p.lastName = "Legend"
+p.printFullName() // "John Legend"
+```
+
+{% endtab %}
+{% endtabs %}
+
+Notice that the class declaration creates a constructor:
+
+{% tabs class_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=class_2 %}
+
+```scala
+// this code uses that constructor
+val p = new Person("John", "Stephens")
+```
+
+{% endtab %}
+
+{% tab 'Scala 3' for=class_2 %}
+
+```scala
+// this code uses that constructor
+val p = Person("John", "Stephens")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Constructors and other class-related topics are covered in the [Domain Modeling][data-1] chapter.
+
+## FP Domain Modeling
+
+{% comment %}
+NOTE: Julien had a note about expecting to see sealed traits here.
+I didn’t include that because I didn’t know if enums are intended
+to replace the Scala2 “sealed trait + case class” pattern. How to resolve?
+{% endcomment %}
+
+When writing code in an FP style, you’ll use these concepts:
+
+- Algebraic Data Types to define the data
+- Traits for functionality on the data.
+
+### Enumerations and Sum Types
+
+Sum types are one way to model algebraic data types (ADTs) in Scala.
+
+They are used when data can be represented with different choices.
+
+For instance, a pizza has three main attributes:
+
+- Crust size
+- Crust type
+- Toppings
+
+These are concisely modeled with enumerations, which are sum types that only contain singleton values:
+
+{% tabs enum_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=enum_1 %}
+
+In Scala 2 `sealed` classes and `case object` are combined to define an enumeration:
+
+```scala
+sealed abstract class CrustSize
+object CrustSize {
+ case object Small extends CrustSize
+ case object Medium extends CrustSize
+ case object Large extends CrustSize
+}
+
+sealed abstract class CrustType
+object CrustType {
+ case object Thin extends CrustType
+ case object Thick extends CrustType
+ case object Regular extends CrustType
+}
+
+sealed abstract class Topping
+object Topping {
+ case object Cheese extends Topping
+ case object Pepperoni extends Topping
+ case object BlackOlives extends Topping
+ case object GreenOlives extends Topping
+ case object Onions extends Topping
+}
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=enum_1 %}
+
+Scala 3 offers the `enum` construct for defining enumerations:
+
+```scala
+enum CrustSize:
+ case Small, Medium, Large
+
+enum CrustType:
+ case Thin, Thick, Regular
+
+enum Topping:
+ case Cheese, Pepperoni, BlackOlives, GreenOlives, Onions
+```
+
+{% endtab %}
+{% endtabs %}
+
+Once you have an enumeration you can import its members as ordinary values:
+
+{% tabs enum_2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=enum_2 %}
+
+```scala
+import CrustSize._
+val currentCrustSize = Small
+
+// enums in a `match` expression
+currentCrustSize match {
+ case Small => println("Small crust size")
+ case Medium => println("Medium crust size")
+ case Large => println("Large crust size")
+}
+
+// enums in an `if` statement
+if (currentCrustSize == Small) println("Small crust size")
+```
+
+{% endtab %}
+{% tab 'Scala 3' for=enum_2 %}
+
+```scala
+import CrustSize.*
+val currentCrustSize = Small
+
+// enums in a `match` expression
+currentCrustSize match
+ case Small => println("Small crust size")
+ case Medium => println("Medium crust size")
+ case Large => println("Large crust size")
+
+// enums in an `if` statement
+if currentCrustSize == Small then println("Small crust size")
+```
+
+{% endtab %}
+{% endtabs %}
+
+Here’s another example of how to create a sum type with Scala, this would not be called an enumeration because the `Succ` case has parameters:
+
+{% tabs enum_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=enum_3 %}
+
+```scala
+sealed abstract class Nat
+object Nat {
+ case object Zero extends Nat
+ case class Succ(pred: Nat) extends Nat
+}
+```
+
+Sum Types are covered in detail in the [Domain Modeling]({% link _overviews/scala3-book/domain-modeling-tools.md %}) section of this book.
+
+{% endtab %}
+{% tab 'Scala 3' for=enum_3 %}
+
+```scala
+enum Nat:
+ case Zero
+ case Succ(pred: Nat)
+```
+
+Enums are covered in detail in the [Domain Modeling]({% link _overviews/scala3-book/domain-modeling-tools.md %}) section of this book, and in the [Reference documentation]({{ site.scala3ref }}/enums/enums.html).
+
+{% endtab %}
+{% endtabs %}
+
+### Product Types
+
+A product type is an algebraic data type (ADT) that only has one shape, for example a singleton object, represented in Scala by a `case` object; or an immutable structure with accessible fields, represented by a `case` class.
+
+A `case` class has all of the functionality of a `class`, and also has additional features baked in that make them useful for functional programming.
+When the compiler sees the `case` keyword in front of a `class` it has these effects and benefits:
+
+- Case class constructor parameters are public `val` fields by default, so the fields are immutable, and accessor methods are generated for each parameter.
+- An `unapply` method is generated, which lets you use case classes in more ways in `match` expressions.
+- A `copy` method is generated in the class.
+ This provides a way to create updated copies of the object without changing the original object.
+- `equals` and `hashCode` methods are generated to implement structural equality.
+- A default `toString` method is generated, which is helpful for debugging.
+
+{% comment %}
+NOTE: Julien had a comment about how he decides when to use case classes vs classes. Add something here?
+{% endcomment %}
+
+You _can_ manually add all of those methods to a class yourself, but since those features are so commonly used in functional programming, using a `case` class is much more convenient.
+
+This code demonstrates several `case` class features:
+
+{% tabs case-class %}
+{% tab 'Scala 2 and 3' for=case-class %}
+
+```scala
+// define a case class
+case class Person(
+ name: String,
+ vocation: String
+)
+
+// create an instance of the case class
+val p = Person("Reginald Kenneth Dwight", "Singer")
+
+// a good default toString method
+p // : Person = Person(Reginald Kenneth Dwight,Singer)
+
+// can access its fields, which are immutable
+p.name // "Reginald Kenneth Dwight"
+p.name = "Joe" // error: can’t reassign a val field
+
+// when you need to make a change, use the `copy` method
+// to “update as you copy”
+val p2 = p.copy(name = "Elton John")
+p2 // : Person = Person(Elton John,Singer)
+```
+
+{% endtab %}
+{% endtabs %}
+
+See the [Domain Modeling][data-1] sections for many more details on `case` classes.
+
+[data-1]: {% link _overviews/scala3-book/domain-modeling-tools.md %}
diff --git a/_overviews/scala3-book/taste-objects.md b/_overviews/scala3-book/taste-objects.md
new file mode 100644
index 0000000000..479182bfa2
--- /dev/null
+++ b/_overviews/scala3-book/taste-objects.md
@@ -0,0 +1,155 @@
+---
+title: Singleton Objects
+type: section
+description: This section provides an introduction to the use of singleton objects in Scala 3.
+languages: [ru, zh-cn]
+num: 12
+previous-page: taste-functions
+next-page: taste-collections
+---
+
+
+In Scala, the `object` keyword creates a Singleton object.
+Put another way, an object defines a class that has exactly one instance.
+
+Objects have several uses:
+
+- They are used to create collections of utility methods.
+- A _companion object_ is an object that has the same name as the class it shares a file with.
+ In this situation, that class is also called a _companion class_.
+- They’re used to implement traits to create _modules_.
+
+## “Utility” methods
+
+Because an `object` is a Singleton, its methods can be accessed like `static` methods in a Java class.
+For example, this `StringUtils` object contains a small collection of string-related methods:
+
+
+{% tabs object_1 class=tabs-scala-version %}
+{% tab 'Scala 2' for=object_1 %}
+```scala
+object StringUtils {
+ def isNullOrEmpty(s: String): Boolean = s == null || s.trim.isEmpty
+ def leftTrim(s: String): String = s.replaceAll("^\\s+", "")
+ def rightTrim(s: String): String = s.replaceAll("\\s+$", "")
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=object_1 %}
+```scala
+object StringUtils:
+ def isNullOrEmpty(s: String): Boolean = s == null || s.trim.isEmpty
+ def leftTrim(s: String): String = s.replaceAll("^\\s+", "")
+ def rightTrim(s: String): String = s.replaceAll("\\s+$", "")
+```
+{% endtab %}
+{% endtabs %}
+
+Because `StringUtils` is a singleton, its methods can be called directly on the object:
+
+{% tabs object_2 %}
+{% tab 'Scala 2 and 3' for=object_2 %}
+```scala
+val x = StringUtils.isNullOrEmpty("") // true
+val x = StringUtils.isNullOrEmpty("a") // false
+```
+{% endtab %}
+{% endtabs %}
+
+## Companion objects
+
+A companion class or object can access the private members of its companion.
+Use a companion object for methods and values which aren’t specific to instances of the companion class.
+
+This example demonstrates how the `area` method in the companion class can access the private `calculateArea` method in its companion object:
+
+{% tabs object_3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=object_3 %}
+```scala
+import scala.math._
+
+class Circle(radius: Double) {
+ import Circle._
+ def area: Double = calculateArea(radius)
+}
+
+object Circle {
+ private def calculateArea(radius: Double): Double =
+ Pi * pow(radius, 2.0)
+}
+
+val circle1 = new Circle(5.0)
+circle1.area // Double = 78.53981633974483
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=object_3 %}
+```scala
+import scala.math.*
+
+class Circle(radius: Double):
+ import Circle.*
+ def area: Double = calculateArea(radius)
+
+object Circle:
+ private def calculateArea(radius: Double): Double =
+ Pi * pow(radius, 2.0)
+
+val circle1 = Circle(5.0)
+circle1.area // Double = 78.53981633974483
+```
+{% endtab %}
+{% endtabs %}
+
+## Creating modules from traits
+
+Objects can also be used to implement traits to create modules.
+This technique takes two traits and combines them to create a concrete `object`:
+
+{% tabs object_4 class=tabs-scala-version %}
+{% tab 'Scala 2' for=object_4 %}
+```scala
+trait AddService {
+ def add(a: Int, b: Int) = a + b
+}
+
+trait MultiplyService {
+ def multiply(a: Int, b: Int) = a * b
+}
+
+// implement those traits as a concrete object
+object MathService extends AddService with MultiplyService
+
+// use the object
+import MathService._
+println(add(1,1)) // 2
+println(multiply(2,2)) // 4
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=object_4 %}
+```scala
+trait AddService:
+ def add(a: Int, b: Int) = a + b
+
+trait MultiplyService:
+ def multiply(a: Int, b: Int) = a * b
+
+// implement those traits as a concrete object
+object MathService extends AddService, MultiplyService
+
+// use the object
+import MathService.*
+println(add(1,1)) // 2
+println(multiply(2,2)) // 4
+```
+{% endtab %}
+{% endtabs %}
+
+{% comment %}
+NOTE: I don’t know if this is worth keeping, but I’m leaving it here as a comment for now.
+
+> You may read that objects are used to _reify_ traits into modules.
+> _Reify_ means, “to take an abstract concept and turn it into something concrete.” This is what happens in these examples, but “implement” is a more familiar word for most people than “reify.”
+{% endcomment %}
diff --git a/_overviews/scala3-book/taste-repl.md b/_overviews/scala3-book/taste-repl.md
new file mode 100644
index 0000000000..784eaca131
--- /dev/null
+++ b/_overviews/scala3-book/taste-repl.md
@@ -0,0 +1,88 @@
+---
+title: The REPL
+type: section
+description: This section provides an introduction to the Scala REPL.
+languages: [ru, zh-cn]
+num: 6
+previous-page: taste-hello-world
+next-page: taste-vars-data-types
+---
+
+
+The Scala REPL (“Read-Evaluate-Print-Loop”) is a command-line interpreter that you use as a “playground” area to test your Scala code.
+You start a REPL session by running the `scala` or `scala3` command depending on your installation at your operating system command line, where you’ll see a “welcome” prompt like this:
+
+
+{% tabs command-line class=tabs-scala-version %}
+
+{% tab 'Scala 2' for=command-line %}
+```bash
+$ scala
+Welcome to Scala {{site.scala-version}} (OpenJDK 64-Bit Server VM, Java 1.8.0_342).
+Type in expressions for evaluation. Or try :help.
+
+scala> _
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=command-line %}
+```bash
+$ scala
+Welcome to Scala {{site.scala-3-version}} (1.8.0_322, Java OpenJDK 64-Bit Server VM).
+Type in expressions for evaluation. Or try :help.
+
+scala> _
+```
+{% endtab %}
+
+{% endtabs %}
+
+The REPL is a command-line interpreter, so it sits there waiting for you to type something.
+Now you can type Scala expressions to see how they work:
+
+{% tabs expression-one %}
+{% tab 'Scala 2 and 3' for=expression-one %}
+````
+scala> 1 + 1
+val res0: Int = 2
+
+scala> 2 + 2
+val res1: Int = 4
+````
+{% endtab %}
+{% endtabs %}
+
+As shown in the output, if you don’t assign a variable to the result of an expression, the REPL creates variables named `res0`, `res1`, etc., for you.
+You can use these variable names in subsequent expressions:
+
+{% tabs expression-two %}
+{% tab 'Scala 2 and 3' for=expression-two %}
+````
+scala> val x = res0 * 10
+val x: Int = 20
+````
+{% endtab %}
+{% endtabs %}
+
+Notice that the REPL output also shows the result of your expressions.
+
+You can run all sorts of experiments in the REPL.
+This example shows how to create and then call a `sum` method:
+
+{% tabs expression-three %}
+{% tab 'Scala 2 and 3' for=expression-three %}
+````
+scala> def sum(a: Int, b: Int): Int = a + b
+def sum(a: Int, b: Int): Int
+
+scala> sum(2, 2)
+val res2: Int = 4
+````
+{% endtab %}
+{% endtabs %}
+
+If you prefer a browser-based playground environment, you can also use [scastie.scala-lang.org](https://scastie.scala-lang.org).
+
+If you prefer writing your code in a text editor instead of in console prompt, you can use a [worksheet].
+
+[worksheet]: {% link _overviews/scala3-book/tools-worksheets.md %}
diff --git a/_overviews/scala3-book/taste-summary.md b/_overviews/scala3-book/taste-summary.md
new file mode 100644
index 0000000000..96c95089c3
--- /dev/null
+++ b/_overviews/scala3-book/taste-summary.md
@@ -0,0 +1,32 @@
+---
+title: Summary
+type: section
+description: This page provides a summary of the previous 'Taste of Scala' sections.
+languages: [ru, zh-cn]
+num: 16
+previous-page: taste-toplevel-definitions
+next-page: first-look-at-types
+---
+
+
+In the previous sections you saw:
+
+- How to use the Scala REPL
+- How to create variables with `val` and `var`
+- Some common data types
+- Control structures
+- How to model the real world using OOP and FP styles
+- How to create and use methods
+- How to use lambdas (anonymous functions) and higher-order functions
+- How to use objects for several purposes
+- An introduction to [contextual abstraction][contextual]
+
+We also mentioned that if you prefer using a browser-based playground environment instead of the Scala REPL, you can also use [Scastie](https://scastie.scala-lang.org/).
+
+Scala has even more features that aren’t covered in this whirlwind tour.
+See the remainder of this book and the [Reference documentation][reference] for many more details.
+
+
+
+[reference]: {{ site.scala3ref }}/overview.html
+[contextual]: {% link _overviews/scala3-book/ca-contextual-abstractions-intro.md %}
diff --git a/_overviews/scala3-book/taste-toplevel-definitions.md b/_overviews/scala3-book/taste-toplevel-definitions.md
new file mode 100644
index 0000000000..b56273945f
--- /dev/null
+++ b/_overviews/scala3-book/taste-toplevel-definitions.md
@@ -0,0 +1,71 @@
+---
+title: Toplevel Definitions
+type: section
+description: This page provides an introduction to top-level definitions in Scala 3
+languages: [ru, zh-cn]
+num: 15
+previous-page: taste-contextual-abstractions
+next-page: taste-summary
+---
+
+
+In Scala 3, all kinds of definitions can be written at the “top level” of your source code files.
+For instance, you can create a file named _MyCoolApp.scala_ and put these contents into it:
+
+{% tabs toplevel_1 %}
+{% tab 'Scala 3 only' for=toplevel_1 %}
+```scala
+import scala.collection.mutable.ArrayBuffer
+
+enum Topping:
+ case Cheese, Pepperoni, Mushrooms
+
+import Topping.*
+class Pizza:
+ val toppings = ArrayBuffer[Topping]()
+
+val p = Pizza()
+
+extension (s: String)
+ def capitalizeAllWords = s.split(" ").map(_.capitalize).mkString(" ")
+
+val hwUpper = "hello, world".capitalizeAllWords
+
+type Money = BigDecimal
+
+// more definitions here as desired ...
+
+@main def myApp =
+ p.toppings += Cheese
+ println("show me the code".capitalizeAllWords)
+```
+{% endtab %}
+{% endtabs %}
+
+As shown, there’s no need to put those definitions inside a `package`, `class`, or other construct.
+
+## Replaces package objects
+
+If you’re familiar with Scala 2, this approach replaces _package objects_.
+But while being much easier to use, they work similarly: When you place a definition in a package named _foo_, you can then access that definition under all other packages under _foo_, such as within the _foo.bar_ package in this example:
+
+{% tabs toplevel_2 %}
+{% tab 'Scala 3 only' for=toplevel_2 %}
+```scala
+package foo {
+ def double(i: Int) = i * 2
+}
+
+package foo {
+ package bar {
+ @main def fooBarMain =
+ println(s"${double(1)}") // this works
+ }
+}
+```
+{% endtab %}
+{% endtabs %}
+
+Curly braces are used in this example to put an emphasis on the package nesting.
+
+The benefit of this approach is that you can place definitions under a package named _com.acme.myapp_, and then those definitions can be referenced within _com.acme.myapp.model_, _com.acme.myapp.controller_, etc.
diff --git a/_overviews/scala3-book/taste-vars-data-types.md b/_overviews/scala3-book/taste-vars-data-types.md
new file mode 100644
index 0000000000..194e2d7f40
--- /dev/null
+++ b/_overviews/scala3-book/taste-vars-data-types.md
@@ -0,0 +1,273 @@
+---
+title: Variables and Data Types
+type: section
+description: This section demonstrates val and var variables, and some common Scala data types.
+languages: [ru, zh-cn]
+num: 7
+previous-page: taste-repl
+next-page: taste-control-structures
+---
+
+
+This section provides a look at Scala variables and data types.
+
+## Two types of variables
+
+When you create a new variable in Scala, you declare whether the variable is immutable or mutable:
+
+
+
+
+
Variable Type
+
Description
+
+
+
+
+
val
+
Creates an immutable variable—like final in Java. You should always create a variable with val, unless there’s a reason you need a mutable variable.
+
+
+
var
+
Creates a mutable variable, and should only be used when a variable’s contents will change over time.
+
+
+
+
+These examples show how to create `val` and `var` variables:
+
+{% tabs var-express-1 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+// immutable
+val a = 0
+
+// mutable
+var b = 1
+```
+{% endtab %}
+{% endtabs %}
+
+In an application, a `val` can’t be reassigned.
+You’ll cause a compiler error if you try to reassign one:
+
+{% tabs var-express-2 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val msg = "Hello, world"
+msg = "Aloha" // "reassignment to val" error; this won’t compile
+```
+{% endtab %}
+{% endtabs %}
+
+Conversely, a `var` can be reassigned:
+
+{% tabs var-express-3 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+var msg = "Hello, world"
+msg = "Aloha" // this compiles because a var can be reassigned
+```
+{% endtab %}
+{% endtabs %}
+
+## Declaring variable types
+
+When you create a variable you can explicitly declare its type, or let the compiler infer the type:
+
+{% tabs var-express-4 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val x: Int = 1 // explicit
+val x = 1 // implicit; the compiler infers the type
+```
+{% endtab %}
+{% endtabs %}
+
+The second form is known as _type inference_, and it’s a great way to help keep this type of code concise.
+The Scala compiler can usually infer the data type for you, as shown in the output of these REPL examples:
+
+{% tabs var-express-5 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+scala> val x = 1
+val x: Int = 1
+
+scala> val s = "a string"
+val s: String = a string
+
+scala> val nums = List(1, 2, 3)
+val nums: List[Int] = List(1, 2, 3)
+```
+{% endtab %}
+{% endtabs %}
+
+You can always explicitly declare a variable’s type if you prefer, but in simple assignments like these it isn’t necessary:
+
+{% tabs var-express-6 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val x: Int = 1
+val s: String = "a string"
+val p: Person = Person("Richard")
+```
+{% endtab %}
+{% endtabs %}
+
+Notice that with this approach, the code feels more verbose than necessary.
+
+{% comment %}
+TODO: Jonathan had an early comment on the text below: “While it might feel like this, I would be afraid that people automatically assume from this statement that everything is always boxed.” Suggestion on how to change this?
+{% endcomment %}
+
+## Built-in data types
+
+Scala comes with the standard numeric data types you’d expect, and they’re all full-blown instances of classes.
+In Scala, everything is an object.
+
+These examples show how to declare variables of the numeric types:
+
+{% tabs var-express-7 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val b: Byte = 1
+val i: Int = 1
+val l: Long = 1
+val s: Short = 1
+val d: Double = 2.0
+val f: Float = 3.0
+```
+{% endtab %}
+{% endtabs %}
+
+Because `Int` and `Double` are the default numeric types, you typically create them without explicitly declaring the data type:
+
+{% tabs var-express-8 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val i = 123 // defaults to Int
+val j = 1.0 // defaults to Double
+```
+{% endtab %}
+{% endtabs %}
+
+In your code you can also append the characters `L`, `D`, and `F` (and their lowercase equivalents) to numbers to specify that they are `Long`, `Double`, or `Float` values:
+
+{% tabs var-express-9 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val x = 1_000L // val x: Long = 1000
+val y = 2.2D // val y: Double = 2.2
+val z = 3.3F // val z: Float = 3.3
+```
+{% endtab %}
+{% endtabs %}
+
+When you need really large numbers, use the `BigInt` and `BigDecimal` types:
+
+{% tabs var-express-10 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+var a = BigInt(1_234_567_890_987_654_321L)
+var b = BigDecimal(123_456.789)
+```
+{% endtab %}
+{% endtabs %}
+
+Where `Double` and `Float` are approximate decimal numbers, `BigDecimal` is used for precise arithmetic.
+
+Scala also has `String` and `Char` data types:
+
+{% tabs var-express-11 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val name = "Bill" // String
+val c = 'a' // Char
+```
+{% endtab %}
+{% endtabs %}
+
+### Strings
+
+Scala strings are similar to Java strings, but they have two great additional features:
+
+- They support string interpolation
+- It’s easy to create multiline strings
+
+#### String interpolation
+
+String interpolation provides a very readable way to use variables inside strings.
+For instance, given these three variables:
+
+{% tabs var-express-12 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val firstName = "John"
+val mi = 'C'
+val lastName = "Doe"
+```
+{% endtab %}
+{% endtabs %}
+
+You can combine those variables in a string like this:
+
+{% tabs var-express-13 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+println(s"Name: $firstName $mi $lastName") // "Name: John C Doe"
+```
+{% endtab %}
+{% endtabs %}
+
+Just precede the string with the letter `s`, and then put a `$` symbol before your variable names inside the string.
+
+To embed arbitrary expressions inside a string, enclose them in curly braces:
+
+{% tabs var-express-14 %}
+{% tab 'Scala 2 and 3' %}
+
+``` scala
+println(s"2 + 2 = ${2 + 2}") // prints "2 + 2 = 4"
+
+val x = -1
+println(s"x.abs = ${x.abs}") // prints "x.abs = 1"
+```
+{% endtab %}
+{% endtabs %}
+
+The `s` that you place before the string is just one possible interpolator.
+If you use an `f` instead of an `s`, you can use `printf`-style formatting syntax in the string.
+Furthermore, a string interpolator is just a special method and it is possible to define your own.
+For instance, some database libraries define the very powerful `sql` interpolator.
+
+#### Multiline strings
+
+Multiline strings are created by including the string inside three double-quotes:
+
+{% tabs var-express-15 %}
+{% tab 'Scala 2 and 3' %}
+
+```scala
+val quote = """The essence of Scala:
+ Fusion of functional and object-oriented
+ programming in a typed setting."""
+```
+{% endtab %}
+{% endtabs %}
+
+> For more details on string interpolators and multiline strings, see the [“First Look at Types” chapter][first-look].
+
+[first-look]: {% link _overviews/scala3-book/first-look-at-types.md %}
diff --git a/_overviews/scala3-book/tools-sbt.md b/_overviews/scala3-book/tools-sbt.md
new file mode 100644
index 0000000000..c17820ecf5
--- /dev/null
+++ b/_overviews/scala3-book/tools-sbt.md
@@ -0,0 +1,511 @@
+---
+title: Building and Testing Scala Projects with sbt
+type: section
+description: This section looks at a commonly-used build tool, sbt, and a testing library, ScalaTest.
+languages: [ru, zh-cn]
+num: 71
+previous-page: scala-tools
+next-page: tools-worksheets
+---
+
+In this section you’ll see two tools that are commonly used in Scala projects:
+
+- The [sbt](https://www.scala-sbt.org) build tool
+- [ScalaTest](https://www.scalatest.org), a source code testing framework
+
+We’ll start by showing how to use sbt to build your Scala projects, and then we’ll show how to use sbt and ScalaTest together to test your Scala projects.
+
+> If you want to learn about tools to help you migrate your Scala 2 code to Scala 3, see our [Scala 3 Migration Guide](/scala3/guides/migration/compatibility-intro.html).
+
+
+
+## Building Scala projects with sbt
+
+You can use several different tools to build your Scala projects, including Ant, Maven, Gradle, Mill, and more.
+But a tool named _sbt_ was the first build tool that was specifically created for Scala.
+
+> To install sbt, see [its download page](https://www.scala-sbt.org/download.html) or our [Getting Started][getting_started] page.
+
+
+
+### Creating a “Hello, world” project
+
+You can create an sbt “Hello, world” project in just a few steps.
+First, create a directory to work in, and move into that directory:
+
+```bash
+$ mkdir hello
+$ cd hello
+```
+
+In the directory `hello`, create a subdirectory `project`:
+
+```bash
+$ mkdir project
+```
+
+Create a file named _build.properties_ in the directory `project`, with
+the following content:
+
+```text
+sbt.version=1.10.11
+```
+
+Then create a file named _build.sbt_ in the project root directory that contains this line:
+
+```scala
+scalaVersion := "{{ site.scala-3-version }}"
+```
+
+Now create a file named something like _Hello.scala_---the first part of the name doesn’t matter---with this line:
+
+```scala
+@main def helloWorld = println("Hello, world")
+```
+
+That’s all you have to do.
+
+You should have a project structure like the following:
+
+~~~ bash
+$ tree
+.
+├── build.sbt
+├── Hello.scala
+└── project
+ └── build.properties
+~~~
+
+Now run the project with this `sbt` command:
+
+```bash
+$ sbt run
+```
+
+You should see output that looks like this, including the `"Hello, world"` from your program:
+
+```bash
+$ sbt run
+[info] welcome to sbt 1.5.4 (AdoptOpenJDK Java 11.x)
+[info] loading project definition from project ...
+[info] loading settings for project from build.sbt ...
+[info] compiling 1 Scala source to target/scala-3.0.0/classes ...
+[info] running helloWorld
+Hello, world
+[success] Total time: 2 s
+```
+
+The sbt launcher---the `sbt` command-line tool---loads the version of sbt set in the file _project/build.properties_, which loads the version of the Scala compiler set in the file _build.sbt_, compiles the code in the file _Hello.scala_, and runs the resulting bytecode.
+
+When you look at your directory, you’ll see that sbt has a directory named _target_.
+These are working directories that sbt uses.
+
+As you can see, creating and running a little Scala project with sbt takes just a few simple steps.
+
+### Using sbt with larger projects
+
+For a little project, that’s all that sbt requires to run.
+For larger projects that require many source code files, dependencies, or sbt plugins, you’ll want to create an organized directory structure.
+The rest of this section demonstrates the structure that sbt uses.
+
+
+### The sbt directory structure
+
+Like Maven, sbt uses a standard project directory structure.
+A nice benefit of that is that once you’re comfortable with its structure, it makes it easy to work on other Scala/sbt projects.
+
+The first thing to know is that underneath the root directory of your project, sbt expects a directory structure that looks like this:
+
+```text
+.
+├── build.sbt
+├── project/
+│ └── build.properties
+├── src/
+│ ├── main/
+│ │ ├── java/
+│ │ ├── resources/
+│ │ └── scala/
+│ └── test/
+│ ├── java/
+│ ├── resources/
+│ └── scala/
+└── target/
+```
+
+You can also add a _lib_ directory under the root directory if you want to add unmanaged dependencies---JAR files---to your project.
+
+If you’re going to create a project that has Scala source code files and tests, but won’t be using any Java source code files, and doesn’t need any “resources”---such as embedded images, configuration files, etc.---this is all you really need under the _src_ directory:
+
+```text
+.
+└── src/
+ ├── main/
+ │ └── scala/
+ └── test/
+ └── scala/
+```
+
+
+### “Hello, world” with an sbt directory structure
+
+{% comment %}
+LATER: using something like `sbt new scala/scala3.g8` may eventually
+ be preferable, but that seems to have a few bugs atm (creates
+ a 'target' directory above the root; renames the root dir;
+ uses 'dottyVersion'; 'name' doesn’t match the supplied name;
+ config syntax is a little hard for beginners.)
+{% endcomment %}
+
+Creating this directory structure is simple.
+There are tools to do this for you, but assuming that you’re using a Unix/Linux system, you can use these commands to create your first sbt project directory structure:
+
+```bash
+$ mkdir HelloWorld
+$ cd HelloWorld
+$ mkdir -p src/{main,test}/scala
+$ mkdir project target
+```
+
+When you run a `find .` command after running those commands, you should see this result:
+
+```bash
+$ find .
+.
+./project
+./src
+./src/main
+./src/main/scala
+./src/test
+./src/test/scala
+./target
+```
+
+If you see that, you’re in great shape for the next step.
+
+> There are other ways to create the files and directories for an sbt project.
+> One way is to use the `sbt new` command, [which is documented here on scala-sbt.org](https://www.scala-sbt.org/1.x/docs/Hello.html).
+> That approach isn’t shown here because some of the files it creates are more complicated than necessary for an introduction like this.
+
+
+### Creating a first build.sbt file
+
+At this point you only need two more things to run a “Hello, world” project:
+
+- A _build.sbt_ file
+- A _Hello.scala_ file
+
+For a little project like this, the _build.sbt_ file only needs a `scalaVersion` entry, but we’ll add three lines that you commonly see:
+
+```scala
+name := "HelloWorld"
+version := "0.1"
+scalaVersion := "{{ site.scala-3-version }}"
+```
+
+Because sbt projects use a standard directory structure, sbt can find everything else it needs.
+
+Now you just need to add a little “Hello, world” program.
+
+
+### A “Hello, world” program
+
+In large projects, all of your Scala source code files will go under the _src/main/scala_ and _src/test/scala_ directories, but for a little sample project like this, you can put your source code file in the root directory of your project.
+Therefore, create a file named _HelloWorld.scala_ in the root directory with these contents:
+
+```scala
+@main def helloWorld = println("Hello, world")
+```
+
+That code defines a Scala 3 “main” method that prints the `"Hello, world"` when it’s run.
+
+Now, use the `sbt run` command to compile and run your project:
+
+```bash
+$ sbt run
+
+[info] welcome to sbt
+[info] loading settings for project ...
+[info] loading project definition
+[info] loading settings for project root from build.sbt ...
+[info] Compiling 1 Scala source ...
+[info] running helloWorld
+Hello, world
+[success] Total time: 4 s
+```
+
+The first time you run `sbt` it downloads everything it needs, and that can take a few moments to run, but after that it gets much faster.
+
+Also, once you get this first step working, you’ll find that it’s much faster to run sbt interactively.
+To do that, first run the `sbt` command by itself:
+
+```bash
+$ sbt
+
+[info] welcome to sbt
+[info] loading settings for project ...
+[info] loading project definition ...
+[info] loading settings for project root from build.sbt ...
+[info] sbt server started at
+ local:///${HOME}/.sbt/1.0/server/7d26bae822c36a31071c/sock
+sbt:hello-world> _
+```
+
+Then inside this sbt shell, execute its `run` command:
+
+````
+sbt:hello-world> run
+
+[info] running helloWorld
+Hello, world
+[success] Total time: 0 s
+````
+
+There, that’s much faster.
+
+If you type `help` at the sbt command prompt you’ll see a list of other commands you can run.
+But for now, just type `exit` (or press `CTRL-D`) to leave the sbt shell.
+
+### Using project templates
+
+Manually creating the project structure can be tedious. Thankfully, sbt can create it for you,
+based on a template.
+
+To create a Scala 3 project from a template, run the following command in a shell:
+
+~~~
+$ sbt new scala/scala3.g8
+~~~
+
+Sbt will load the template, ask some questions, and create the project files in a subdirectory:
+
+~~~
+$ tree scala-3-project-template
+scala-3-project-template
+├── build.sbt
+├── project
+│ └── build.properties
+├── README.md
+└── src
+ ├── main
+ │ └── scala
+ │ └── Main.scala
+ └── test
+ └── scala
+ └── Test1.scala
+~~~
+
+> If you want to create a Scala 3 project that cross-compiles with Scala 2, use the template `scala/scala3-cross.g8`:
+>
+> ~~~
+> $ sbt new scala/scala3-cross.g8
+> ~~~
+
+Learn more about `sbt new` and project templates in the [documentation of sbt](https://www.scala-sbt.org/1.x/docs/sbt-new-and-Templates.html#sbt+new+and+Templates).
+
+### Other build tools for Scala
+
+While sbt is widely used, there are other tools you can use to build Scala projects:
+
+- [Ant](https://ant.apache.org/)
+- [Gradle](https://gradle.org/)
+- [Maven](https://maven.apache.org/)
+- [Mill](https://com-lihaoyi.github.io/mill/)
+
+#### Coursier
+
+In a related note, [Coursier](https://get-coursier.io/docs/overview) is a “dependency resolver,” similar to Maven and Ivy in function.
+It’s written from scratch in Scala, “embraces functional programming principles,” and downloads artifacts in parallel for rapid downloads.
+sbt uses it to handle most dependency resolutions, and as a command-line tool, it can be used to easily install tools like sbt, Java, and Scala on your system, as shown in our [Getting Started][getting_started] page.
+
+This example from the `launch` web page shows that the `cs launch` command can be used to launch applications from dependencies:
+
+```scala
+$ cs launch org.scalameta::scalafmt-cli:2.4.2 -- --help
+scalafmt 2.4.2
+Usage: scalafmt [options] [...]
+
+ -h, --help prints this usage text
+ -v, --version print version
+ more ...
+```
+
+See Coursier’s [launch page](https://get-coursier.io/docs/cli-launch) for more details.
+
+
+
+## Using sbt with ScalaTest
+
+[ScalaTest](https://www.scalatest.org) is one of the main testing libraries for Scala projects.
+In this section you’ll see the steps necessary to create a Scala/sbt project that uses ScalaTest.
+
+
+### 1) Create the project directory structure
+
+As with the previous lesson, create an sbt project directory structure for a project named _HelloScalaTest_ with the following commands:
+
+```bash
+$ mkdir HelloScalaTest
+$ cd HelloScalaTest
+$ mkdir -p src/{main,test}/scala
+$ mkdir project
+```
+
+
+### 2) Create the build.properties and build.sbt files
+
+Next, create a _build.properties_ file in the _project/_ subdirectory of your project
+with this line:
+
+```text
+sbt.version=1.10.11
+```
+
+Next, create a _build.sbt_ file in the root directory of your project with these contents:
+
+```scala
+name := "HelloScalaTest"
+version := "0.1"
+scalaVersion := "{{site.scala-3-version}}"
+
+libraryDependencies ++= Seq(
+ "org.scalatest" %% "scalatest" % "3.2.19" % Test
+)
+```
+
+The first three lines of this file are essentially the same as the first example.
+The `libraryDependencies` lines tell sbt to include the dependencies (JAR files) that are needed to include ScalaTest.
+
+> The ScalaTest documentation has always been good, and you can always find the up to date information on what those lines should look like on the [Installing ScalaTest](https://www.scalatest.org/install) page.
+
+
+### 3) Create a Scala source code file
+
+Next, create a Scala program that you can use to demonstrate ScalaTest.
+First, create a directory under _src/main/scala_ named _math_:
+
+```bash
+$ mkdir src/main/scala/math
+ ----
+```
+
+Then, inside that directory, create a file named _MathUtils.scala_ with these contents:
+
+```scala
+package math
+
+object MathUtils:
+ def double(i: Int) = i * 2
+```
+
+That method provides a simple way to demonstrate ScalaTest.
+
+
+{% comment %}
+Because this project doesn’t have a `main` method, we don’t try to run it with `sbt run`; we just compile it with `sbt compile`:
+
+````
+$ sbt compile
+
+[info] welcome to sbt
+[info] loading settings for project ...
+[info] loading project definition ...
+[info] loading settings for project ...
+[info] Executing in batch mode. For better performance use sbt's shell
+[success] Total time: 1 s
+````
+
+With that compiled, let’s create a ScalaTest file to test the `double` method.
+{% endcomment %}
+
+
+### 4) Create your first ScalaTest tests
+
+ScalaTest is very flexible, and offers several different ways to write tests.
+A simple way to get started is to write tests using the ScalaTest `AnyFunSuite`.
+To get started, create a directory named _math_ under the _src/test/scala_ directory:
+
+```bash
+$ mkdir src/test/scala/math
+ ----
+```
+
+Next, create a file named _MathUtilsTests.scala_ in that directory with the following contents:
+
+```scala
+package math
+
+import org.scalatest.funsuite.AnyFunSuite
+
+class MathUtilsTests extends AnyFunSuite:
+
+ // test 1
+ test("'double' should handle 0") {
+ val result = MathUtils.double(0)
+ assert(result == 0)
+ }
+
+ // test 2
+ test("'double' should handle 1") {
+ val result = MathUtils.double(1)
+ assert(result == 2)
+ }
+
+ test("test with Int.MaxValue") (pending)
+
+end MathUtilsTests
+```
+
+This code demonstrates the ScalaTest `AnyFunSuite` approach.
+A few important points:
+
+- Your test class should extend `AnyFunSuite`
+- You create tests as shown, by giving each `test` a unique name
+- At the end of each test you should call `assert` to test that a condition has been satisfied
+- When you know you want to write a test, but you don’t want to write it right now, create the test as “pending,” with the syntax shown
+
+Using ScalaTest like this is similar to JUnit, so if you’re coming to Scala from Java, hopefully this looks similar.
+
+Now you can run these tests with the `sbt test` command.
+Skipping the first few lines of output, the result looks like this:
+
+````
+sbt:HelloScalaTest> test
+
+[info] Compiling 1 Scala source ...
+[info] MathUtilsTests:
+[info] - 'double' should handle 0
+[info] - 'double' should handle 1
+[info] - test with Int.MaxValue (pending)
+[info] Total number of tests run: 2
+[info] Suites: completed 1, aborted 0
+[info] Tests: succeeded 2, failed 0, canceled 0, ignored 0, pending 1
+[info] All tests passed.
+[success] Total time: 1 s
+````
+
+If everything works well, you’ll see output that looks like that.
+Welcome to the world of testing Scala applications with sbt and ScalaTest.
+
+
+### Support for many types of tests
+
+This example demonstrates a style of testing that’s similar to xUnit _Test-Driven Development_ (TDD) style testing, with a few benefits of the _Behavior-Driven Development_ (BDD) style.
+
+As mentioned, ScalaTest is flexible and you can also write tests using other styles, such as a style similar to Ruby’s RSpec.
+You can also use mock objects, property-based testing, and use ScalaTest to test Scala.js code.
+
+See the User Guide on the [ScalaTest website](https://www.scalatest.org) for more details on the different testing styles that are available.
+
+
+
+## Where to go from here
+
+For more information about sbt and ScalaTest, see the following resources:
+
+- [The sbt documentation](https://www.scala-sbt.org/1.x/docs/)
+- [The ScalaTest website](https://www.scalatest.org/)
+
+
+
+[getting_started]: {{ site.baseurl }}/scala3/getting-started.html
diff --git a/_overviews/scala3-book/tools-worksheets.md b/_overviews/scala3-book/tools-worksheets.md
new file mode 100644
index 0000000000..cf14935e46
--- /dev/null
+++ b/_overviews/scala3-book/tools-worksheets.md
@@ -0,0 +1,57 @@
+---
+title: Worksheets
+type: section
+description: This section looks at worksheets, an alternative to Scala projects.
+languages: [ru, zh-cn]
+num: 72
+previous-page: tools-sbt
+next-page: interacting-with-java
+---
+
+A worksheet is a Scala file that is evaluated on save, and the result of each expression is shown
+in a column to the right of your program. Worksheets are like a [REPL session] on steroids, and
+enjoy 1st class editor support: completion, hyperlinking, interactive errors-as-you-type, etc.
+Worksheets use the extension `.worksheet.sc`.
+
+In the following, we show how to use worksheets in IntelliJ, and in VS Code (with the Metals extension).
+
+1. Open a Scala project, or create one.
+ - To create a project in IntelliJ, select “File” -> “New” -> “Project…”, select “Scala”
+ in the left column, and click “Next” to set the project name and location.
+ - To create a project in VS Code, run the command “Metals: New Scala project”, select the
+ seed `scala/scala3.g8`, set the project location, open it in a new VS Code window, and
+ import its build.
+1. Create a file named `hello.worksheet.sc` in the directory `src/main/scala/`.
+ - In IntelliJ, right-click on the directory `src/main/scala/`, and select “New”, and
+ then “File”.
+ - In VS Code, right-click on the directory `src/main/scala/`, and select “New File”.
+1. Paste the following content in the editor:
+ ~~~
+ println("Hello, world!")
+
+ val x = 1
+ x + x
+ ~~~
+1. Evaluate the worksheet.
+ - In IntelliJ, click on the green arrow at the top of the editor to evaluate the worksheet.
+ - In VS Code, save the file.
+
+ You should see the result of the evaluation of every line on the right panel (IntelliJ), or
+ as comments (VS Code).
+
+
+
+A worksheet evaluated in IntelliJ.
+
+
+
+A worksheet evaluated in VS Code (with the Metals extension).
+
+Note that the worksheet will use the Scala version defined by your project (set by the key `scalaVersion`,
+in your file `build.sbt`, typically).
+
+Also note that worksheets don’t have a [program entry point]. Instead, top-level statements and expressions
+are evaluated from top to bottom.
+
+[REPL session]: {% link _overviews/scala3-book/taste-repl.md %}
+[program entry point]: {% link _overviews/scala3-book/methods-main-methods.md %}
diff --git a/_overviews/scala3-book/types-adts-gadts.md b/_overviews/scala3-book/types-adts-gadts.md
new file mode 100644
index 0000000000..356d01c16d
--- /dev/null
+++ b/_overviews/scala3-book/types-adts-gadts.md
@@ -0,0 +1,195 @@
+---
+title: Algebraic Data Types
+type: section
+description: This section introduces and demonstrates algebraic data types (ADTs) in Scala 3.
+languages: [ru, zh-cn]
+num: 54
+previous-page: types-union
+next-page: types-variance
+scala3: true
+versionSpecific: true
+---
+
+
+Algebraic Data Types (ADTs) can be created with the `enum` construct, so we’ll briefly review enumerations before looking at ADTs.
+
+## Enumerations
+
+An _enumeration_ is used to define a type consisting of a set of named values:
+
+```scala
+enum Color:
+ case Red, Green, Blue
+```
+which can be seen as a shorthand for:
+```scala
+enum Color:
+ case Red extends Color
+ case Green extends Color
+ case Blue extends Color
+```
+#### Parameters
+Enums can be parameterized:
+
+```scala
+enum Color(val rgb: Int):
+ case Red extends Color(0xFF0000)
+ case Green extends Color(0x00FF00)
+ case Blue extends Color(0x0000FF)
+```
+This way, each of the different variants has a value member `rgb` which is assigned the corresponding value:
+```scala
+println(Color.Green.rgb) // prints 65280
+```
+
+#### Custom Definitions
+Enums can also have custom definitions:
+
+```scala
+enum Planet(mass: Double, radius: Double):
+
+ private final val G = 6.67300E-11
+ def surfaceGravity = G * mass / (radius * radius)
+ def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity
+
+ case Mercury extends Planet(3.303e+23, 2.4397e6)
+ case Venus extends Planet(4.869e+24, 6.0518e6)
+ case Earth extends Planet(5.976e+24, 6.37814e6)
+ // 5 or 6 more planets ...
+```
+
+Like classes and `case` classes, you can also define a companion object for an enum:
+
+```scala
+object Planet:
+ def main(args: Array[String]) =
+ val earthWeight = args(0).toDouble
+ val mass = earthWeight / Earth.surfaceGravity
+ for (p <- values)
+ println(s"Your weight on $p is ${p.surfaceWeight(mass)}")
+```
+
+## Algebraic Datatypes (ADTs)
+
+The `enum` concept is general enough to also support _algebraic data types_ (ADTs) and their generalized version (GADTs).
+Here’s an example that shows how an `Option` type can be represented as an ADT:
+
+```scala
+enum Option[+T]:
+ case Some(x: T)
+ case None
+```
+
+This example creates an `Option` enum with a covariant type parameter `T` consisting of two cases, `Some` and `None`.
+`Some` is _parameterized_ with a value parameter `x`; this is a shorthand for writing a `case` class that extends `Option`.
+Since `None` is not parameterized, it’s treated as a normal `enum` value.
+
+The `extends` clauses that were omitted in the previous example can also be given explicitly:
+
+```scala
+enum Option[+T]:
+ case Some(x: T) extends Option[T]
+ case None extends Option[Nothing]
+```
+
+As with normal `enum` values, the cases of an `enum` are defined in the `enum`s companion object, so they’re referred to as `Option.Some` and `Option.None` (unless the definitions are “pulled out” with an import):
+
+```scala
+scala> Option.Some("hello")
+val res1: t2.Option[String] = Some(hello)
+
+scala> Option.None
+val res2: t2.Option[Nothing] = None
+```
+
+As with other enumeration uses, ADTs can define additional methods.
+For instance, here’s `Option` again, with an `isDefined` method and an `Option(...)` constructor in its companion object:
+
+```scala
+enum Option[+T]:
+ case Some(x: T)
+ case None
+
+ def isDefined: Boolean = this match
+ case None => false
+ case Some(_) => true
+
+object Option:
+ def apply[T >: Null](x: T): Option[T] =
+ if (x == null) None else Some(x)
+```
+
+Enumerations and ADTs share the same syntactic construct, so they can
+be seen simply as two ends of a spectrum, and it’s perfectly possible
+to construct hybrids.
+For instance, the code below gives an
+implementation of `Color`, either with three enum values or with a
+parameterized case that takes an RGB value:
+
+```scala
+enum Color(val rgb: Int):
+ case Red extends Color(0xFF0000)
+ case Green extends Color(0x00FF00)
+ case Blue extends Color(0x0000FF)
+ case Mix(mix: Int) extends Color(mix)
+```
+
+#### Recursive Enumerations
+So far all the enumerations that we defined consisted of different variants of values or case classes.
+Enumerations can also be recursive, as illustrated in the below example of encoding natural numbers:
+```scala
+enum Nat:
+ case Zero
+ case Succ(n: Nat)
+```
+For example the value `Succ(Succ(Zero))` represents the number `2` in an unary encoding.
+Lists can be defined in a very similar way:
+
+```scala
+enum List[+A]:
+ case Nil
+ case Cons(head: A, tail: List[A])
+```
+
+## Generalized Algebraic Datatypes (GADTs)
+The above notation for enumerations is very concise and serves as the perfect starting point for modeling your data types.
+Since we can always be more explicit, it is also possible to express types that are much more powerful: generalized algebraic datatypes (GADTs).
+
+Here is an example of a GADT where the type parameter (`T`) specifies the contents stored in the box:
+```scala
+enum Box[T](contents: T):
+ case IntBox(n: Int) extends Box[Int](n)
+ case BoolBox(b: Boolean) extends Box[Boolean](b)
+```
+Pattern matching on the particular constructor (`IntBox` or `BoolBox`) recovers the type information:
+```scala
+def extract[T](b: Box[T]): T = b match
+ case IntBox(n) => n + 1
+ case BoolBox(b) => !b
+```
+It is only safe to return an `Int` in the first case, since we know from pattern matching that the input was an `IntBox`.
+
+
+## Desugaring Enumerations
+_Conceptually_, enums can be thought of as defining a sealed class together with its companion object.
+Let’s look at the desugaring of our `Color` enum above:
+```scala
+sealed abstract class Color(val rgb: Int) extends scala.reflect.Enum
+object Color:
+ case object Red extends Color(0xFF0000) { def ordinal = 0 }
+ case object Green extends Color(0x00FF00) { def ordinal = 1 }
+ case object Blue extends Color(0x0000FF) { def ordinal = 2 }
+ case class Mix(mix: Int) extends Color(mix) { def ordinal = 3 }
+
+ def fromOrdinal(ordinal: Int): Color = ordinal match
+ case 0 => Red
+ case 1 => Green
+ case 2 => Blue
+ case _ => throw new NoSuchElementException(ordinal.toString)
+```
+Note that the above desugaring is simplified and we purposefully leave out [some details][desugar-enums].
+
+While enums could be manually encoded using other constructs, using enumerations is more concise and also comes with a few additional utilities (such as the `fromOrdinal` method).
+
+
+[desugar-enums]: {{ site.scala3ref }}/enums/desugarEnums.html
diff --git a/_overviews/scala3-book/types-dependent-function.md b/_overviews/scala3-book/types-dependent-function.md
new file mode 100644
index 0000000000..cf86880fa6
--- /dev/null
+++ b/_overviews/scala3-book/types-dependent-function.md
@@ -0,0 +1,149 @@
+---
+title: Dependent Function Types
+type: section
+description: This section introduces and demonstrates dependent function types in Scala 3.
+languages: [ru, zh-cn]
+num: 58
+previous-page: types-structural
+next-page: types-others
+scala3: true
+versionSpecific: true
+---
+
+A *dependent function type* describes function types, where the result type may depend on the function’s parameter values.
+The concept of dependent types, and of dependent function types, is more advanced and you would typically only come across it when designing your own libraries or using advanced libraries.
+
+## Dependent Method Types
+Let's consider the following example of a heterogenous database that can store values of different types.
+The key contains the information about what's the type of the corresponding value:
+
+```scala
+trait Key { type Value }
+
+trait DB {
+ def get(k: Key): Option[k.Value] // a dependent method
+}
+```
+Given a key, the method `get` lets us access the map and potentially returns the stored value of type `k.Value`.
+We can read this _path-dependent type_ as: "depending on the concrete type of the argument `k`, we return a matching value".
+
+For example, we could have the following keys:
+```scala
+object Name extends Key { type Value = String }
+object Age extends Key { type Value = Int }
+```
+The following calls to method `get` would now type check:
+```scala
+val db: DB = ...
+val res1: Option[String] = db.get(Name)
+val res2: Option[Int] = db.get(Age)
+```
+Calling the method `db.get(Name)` returns a value of type `Option[String]`, while calling `db.get(Age)` returns a value of type `Option[Int]`.
+The return type _depends_ on the concrete type of the argument passed to `get`---hence the name _dependent type_.
+
+## Dependent Function Types
+As seen above, Scala 2 already had support for dependent method types.
+However, creating values of type `DB` is quite cumbersome:
+```scala
+// a user of a DB
+def user(db: DB): Unit =
+ db.get(Name) ... db.get(Age)
+
+// creating an instance of the DB and passing it to `user`
+user(new DB {
+ def get(k: Key): Option[k.Value] = ... // implementation of DB
+})
+```
+We manually need to create an anonymous inner class of `DB`, implementing the `get` method.
+For code that relies on creating many different instances of `DB` this is very tedious.
+
+The trait `DB` only has a single abstract method `get`.
+Wouldn't it be nice, if we could use lambda syntax instead?
+```scala
+user { k =>
+ ... // implementation of DB
+}
+```
+In fact, this is now possible in Scala 3! We can define `DB` as a _dependent function type_:
+```scala
+type DB = (k: Key) => Option[k.Value]
+// ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+// A dependent function type
+```
+Given this definition of `DB` the above call to `user` type checks, as is.
+
+You can read more about the internals of dependent function types in the [reference documentation][ref].
+
+## Case Study: Numerical Expressions
+Let us assume we want to define a module that abstracts over the internal represention of numbers.
+This can be useful, for instance, to implement libraries for automatic derivation.
+
+We start by defining our module for numbers:
+```scala
+trait Nums:
+ // the type of numbers is left abstract
+ type Num
+
+ // some operations on numbers
+ def lit(d: Double): Num
+ def add(l: Num, r: Num): Num
+ def mul(l: Num, r: Num): Num
+```
+> We omit the concrete implementation of `Nums`, but as an exercise you could implement `Nums` by assigning `type Num = Double` and implement methods accordingly.
+
+A program that uses our number abstraction now has the following type:
+
+```scala
+type Prog = (n: Nums) => n.Num => n.Num
+
+val ex: Prog = nums => x => nums.add(nums.lit(0.8), x)
+```
+The type of a function that computes the derivative of programs like `ex` is:
+```scala
+def derivative(input: Prog): Double
+```
+Given the facility of dependent function types, calling this function with different programs is very convenient:
+```scala
+derivative { nums => x => x }
+derivative { nums => x => nums.add(nums.lit(0.8), x) }
+// ...
+```
+
+To recall, the same program in the encoding above would be:
+```scala
+derivative(new Prog {
+ def apply(nums: Nums)(x: nums.Num): nums.Num = x
+})
+derivative(new Prog {
+ def apply(nums: Nums)(x: nums.Num): nums.Num = nums.add(nums.lit(0.8), x)
+})
+// ...
+```
+
+#### Combination with Context Functions
+The combination of extension methods, [context functions][ctx-fun], and dependent functions provides a powerful tool for library designers.
+For instance, we can refine our library from above as follows:
+```scala
+trait NumsDSL extends Nums:
+ extension (x: Num)
+ def +(y: Num) = add(x, y)
+ def *(y: Num) = mul(x, y)
+
+def const(d: Double)(using n: Nums): n.Num = n.lit(d)
+
+type Prog = (n: NumsDSL) ?=> n.Num => n.Num
+// ^^^
+// prog is now a context function that implicitly
+// assumes a NumsDSL in the calling context
+
+def derivative(input: Prog): Double = ...
+
+// notice how we do not need to mention Nums in the examples below?
+derivative { x => const(1.0) + x }
+derivative { x => x * x + const(2.0) }
+// ...
+```
+
+
+[ref]: {{ site.scala3ref }}/new-types/dependent-function-types.html
+[ctx-fun]: {{ site.scala3ref }}/contextual/context-functions.html
diff --git a/_overviews/scala3-book/types-generics.md b/_overviews/scala3-book/types-generics.md
new file mode 100644
index 0000000000..84ddd4599e
--- /dev/null
+++ b/_overviews/scala3-book/types-generics.md
@@ -0,0 +1,89 @@
+---
+title: Generics
+type: section
+description: This section introduces and demonstrates generics in Scala 3.
+languages: [ru, zh-cn]
+num: 51
+previous-page: types-inferred
+next-page: types-intersection
+---
+
+
+Generic classes (or traits) take a type as _a parameter_ within square brackets `[...]`.
+The Scala convention is to use a single letter (like `A`) to name those type parameters.
+The type can then be used inside the class as needed for method instance parameters, or on return types:
+
+{% tabs stack class=tabs-scala-version %}
+
+{% tab 'Scala 2' %}
+```scala
+// here we declare the type parameter A
+// v
+class Stack[A] {
+ private var elements: List[A] = Nil
+ // ^
+ // Here we refer to the type parameter
+ // v
+ def push(x: A): Unit =
+ elements = elements.prepended(x)
+ def peek: A = elements.head
+ def pop(): A = {
+ val currentTop = peek
+ elements = elements.tail
+ currentTop
+ }
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+// here we declare the type parameter A
+// v
+class Stack[A]:
+ private var elements: List[A] = Nil
+ // ^
+ // Here we refer to the type parameter
+ // v
+ def push(x: A): Unit =
+ elements = elements.prepended(x)
+ def peek: A = elements.head
+ def pop(): A =
+ val currentTop = peek
+ elements = elements.tail
+ currentTop
+```
+{% endtab %}
+{% endtabs %}
+
+This implementation of a `Stack` class takes any type as a parameter.
+The beauty of generics is that you can now create a `Stack[Int]`, `Stack[String]`, and so on, allowing you to reuse your implementation of a `Stack` for arbitrary element types.
+
+This is how you create and use a `Stack[Int]`:
+
+{% tabs stack-usage class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+val stack = new Stack[Int]
+stack.push(1)
+stack.push(2)
+println(stack.pop()) // prints 2
+println(stack.pop()) // prints 1
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+val stack = Stack[Int]
+stack.push(1)
+stack.push(2)
+println(stack.pop()) // prints 2
+println(stack.pop()) // prints 1
+```
+{% endtab %}
+{% endtabs %}
+
+> See the [Variance section][variance] for details on how to express variance with generic types.
+
+
+[variance]: {% link _overviews/scala3-book/types-variance.md %}
diff --git a/_overviews/scala3-book/types-inferred.md b/_overviews/scala3-book/types-inferred.md
new file mode 100644
index 0000000000..92333b3735
--- /dev/null
+++ b/_overviews/scala3-book/types-inferred.md
@@ -0,0 +1,53 @@
+---
+title: Inferred Types
+type: section
+description: This section introduces and demonstrates inferred types in Scala 3
+languages: [ru, zh-cn]
+num: 50
+previous-page: types-introduction
+next-page: types-generics
+---
+
+
+As with other statically typed programming languages, in Scala you can _declare_ a type when creating a new variable:
+
+{% tabs xy %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val x: Int = 1
+val y: Double = 1
+```
+{% endtab %}
+{% endtabs %}
+
+In those examples the types are _explicitly_ declared to be `Int` and `Double`, respectively.
+However, in Scala you generally don’t have to declare the type when defining value binders:
+
+{% tabs abm %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = 1
+val b = List(1, 2, 3)
+val m = Map(1 -> "one", 2 -> "two")
+```
+{% endtab %}
+{% endtabs %}
+
+When you do this, Scala _infers_ the types, as shown in the following REPL interaction:
+
+{% tabs abm2 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+scala> val a = 1
+val a: Int = 1
+
+scala> val b = List(1, 2, 3)
+val b: List[Int] = List(1, 2, 3)
+
+scala> val m = Map(1 -> "one", 2 -> "two")
+val m: Map[Int, String] = Map(1 -> one, 2 -> two)
+```
+{% endtab %}
+{% endtabs %}
+
+Indeed, most variables are defined this way, and Scala’s ability to automatically infer types is one feature that makes it _feel_ like a dynamically typed language.
diff --git a/_overviews/scala3-book/types-intersection.md b/_overviews/scala3-book/types-intersection.md
new file mode 100644
index 0000000000..2c533ffd09
--- /dev/null
+++ b/_overviews/scala3-book/types-intersection.md
@@ -0,0 +1,64 @@
+---
+title: Intersection Types
+type: section
+description: This section introduces and demonstrates intersection types in Scala 3.
+languages: [ru, zh-cn]
+num: 52
+previous-page: types-generics
+next-page: types-union
+scala3: true
+versionSpecific: true
+---
+
+Used on types, the `&` operator creates a so called _intersection type_.
+The type `A & B` represents values that are **both** of the type `A` and of the type `B` at the same time.
+For instance, the following example uses the intersection type `Resettable & Growable[String]`:
+
+{% tabs intersection-reset-grow %}
+
+{% tab 'Scala 3 Only' %}
+
+```scala
+trait Resettable:
+ def reset(): Unit
+
+trait Growable[A]:
+ def add(a: A): Unit
+
+def f(x: Resettable & Growable[String]): Unit =
+ x.reset()
+ x.add("first")
+```
+
+{% endtab %}
+
+{% endtabs %}
+
+In the method `f` in this example, the parameter `x` is required to be *both* a `Resettable` and a `Growable[String]`.
+
+The _members_ of an intersection type `A & B` are all the members of `A` and all the members of `B`.
+Therefore, as shown, `Resettable & Growable[String]` has member methods `reset` and `add`.
+
+Intersection types can be useful to describe requirements _structurally_.
+That is, in our example `f`, we directly express that we are happy with any value for `x` as long as it’s a subtype of both `Resettable` and `Growable`.
+We **did not** have to create a _nominal_ helper trait like the following:
+
+{% tabs normal-trait class=tabs-scala-version %}
+{% tab 'Scala 2' %}
+```scala
+trait Both[A] extends Resettable with Growable[A]
+def f(x: Both[String]): Unit
+```
+{% endtab %}
+
+{% tab 'Scala 3' %}
+```scala
+trait Both[A] extends Resettable, Growable[A]
+def f(x: Both[String]): Unit
+```
+{% endtab %}
+{% endtabs %}
+
+There is an important difference between the two alternatives of defining `f`: While both allow `f` to be called with instances of `Both`, only the former allows passing instances that are subtypes of `Resettable` and `Growable[String]`, but _not of_ `Both[String]`.
+
+> Note that `&` is _commutative_: `A & B` is the same type as `B & A`.
diff --git a/_overviews/scala3-book/types-introduction.md b/_overviews/scala3-book/types-introduction.md
new file mode 100644
index 0000000000..77a79a0844
--- /dev/null
+++ b/_overviews/scala3-book/types-introduction.md
@@ -0,0 +1,58 @@
+---
+title: Types and the Type System
+type: chapter
+description: This chapter provides an introduction to Scala 3 types and the type system.
+languages: [ru, zh-cn]
+num: 49
+previous-page: fp-summary
+next-page: types-inferred
+---
+
+
+Scala is a unique language in that it’s statically typed, but often _feels_ flexible and dynamic.
+For instance, thanks to type inference you can write code like this without explicitly specifying the variable types:
+
+{% tabs hi %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val a = 1
+val b = 2.0
+val c = "Hi!"
+```
+{% endtab %}
+{% endtabs %}
+
+That makes the code feel dynamically typed.
+And thanks to new features, like [union types][union-types] in Scala 3, you can also write code like the following that expresses very concisely which values are expected as arguments and which types are returned:
+
+{% tabs union-example %}
+{% tab 'Scala 3 Only' %}
+```scala
+def isTruthy(a: Boolean | Int | String): Boolean = ???
+def dogCatOrWhatever(): Dog | Plant | Car | Sun = ???
+```
+{% endtab %}
+{% endtabs %}
+
+As the example suggests, when using union types, the types don’t have to share a common hierarchy, and you can still accept them as arguments or return them from a method.
+
+If you’re an application developer, you’ll use features like type inference every day and generics every week.
+When you read the Scaladoc for classes and methods, you’ll also need to have some understanding of _variance_.
+Hopefully you’ll see that using types can be relatively simple and also offers a lot of expressive power, flexibility, and control for library developers.
+
+
+## Benefits of types
+
+Statically-typed programming languages offer a number of benefits, including:
+
+- Helping to provide strong IDE support
+- Eliminating many classes of potential errors at compile time
+- Assisting in refactoring
+- Providing strong documentation that cannot be outdated since it is type checked
+
+
+## Introducing features of Scala’s type system
+
+Given that brief introduction, the following sections provide an overview of the features of Scala’s type system.
+
+[union-types]: {% link _overviews/scala3-book/types-union.md %}
diff --git a/_overviews/scala3-book/types-opaque-types.md b/_overviews/scala3-book/types-opaque-types.md
new file mode 100644
index 0000000000..4076749050
--- /dev/null
+++ b/_overviews/scala3-book/types-opaque-types.md
@@ -0,0 +1,148 @@
+---
+title: Opaque Types
+type: section
+description: This section introduces and demonstrates opaque types in Scala 3.
+languages: [ru, zh-cn]
+num: 56
+previous-page: types-variance
+next-page: types-structural
+scala3: true
+versionSpecific: true
+---
+
+_Opaque type aliases_ provide type abstraction without any **overhead**.
+In Scala 2, a similar result could be achieved with [value classes][value-classes].
+
+## Abstraction Overhead
+
+Let us assume we want to define a module that offers arithmetic on numbers, which are represented by their logarithm.
+This can be useful to improve precision when the numerical values involved tend to be very large, or close to zero.
+
+Since it is important to distinguish “regular” double values from numbers stored as their logarithm, we introduce a class `Logarithm`:
+
+```scala
+class Logarithm(protected val underlying: Double):
+ def toDouble: Double = math.exp(underlying)
+ def + (that: Logarithm): Logarithm =
+ // here we use the apply method on the companion
+ Logarithm(this.toDouble + that.toDouble)
+ def * (that: Logarithm): Logarithm =
+ new Logarithm(this.underlying + that.underlying)
+
+object Logarithm:
+ def apply(d: Double): Logarithm = new Logarithm(math.log(d))
+```
+The apply method on the companion object lets us create values of type `Logarithm` which we can use as follows:
+```scala
+val l2 = Logarithm(2.0)
+val l3 = Logarithm(3.0)
+println((l2 * l3).toDouble) // prints 6.0
+println((l2 + l3).toDouble) // prints 4.999...
+```
+While the class `Logarithm` offers a nice abstraction for `Double` values that are stored in this particular logarithmic form, it imposes severe performance overhead: For every single mathematical operation, we need to extract the underlying value and then wrap it again in a new instance of `Logarithm`.
+
+
+## Module Abstractions
+Let us consider another approach to implement the same library.
+This time instead of defining `Logarithm` as a class, we define it using a _type alias_.
+First, we define an abstract interface of our module:
+
+```scala
+trait Logarithms:
+
+ type Logarithm
+
+ // operations on Logarithm
+ def add(x: Logarithm, y: Logarithm): Logarithm
+ def mul(x: Logarithm, y: Logarithm): Logarithm
+
+ // functions to convert between Double and Logarithm
+ def make(d: Double): Logarithm
+ def extract(x: Logarithm): Double
+
+ // extension methods to use `add` and `mul` as "methods" on Logarithm
+ extension (x: Logarithm)
+ def toDouble: Double = extract(x)
+ def + (y: Logarithm): Logarithm = add(x, y)
+ def * (y: Logarithm): Logarithm = mul(x, y)
+```
+Now, let us implement this abstract interface by saying type `Logarithm` is equal to `Double`:
+```scala
+object LogarithmsImpl extends Logarithms:
+
+ type Logarithm = Double
+
+ // operations on Logarithm
+ def add(x: Logarithm, y: Logarithm): Logarithm = make(x.toDouble + y.toDouble)
+ def mul(x: Logarithm, y: Logarithm): Logarithm = x + y
+
+ // functions to convert between Double and Logarithm
+ def make(d: Double): Logarithm = math.log(d)
+ def extract(x: Logarithm): Double = math.exp(x)
+```
+Within the implementation of `LogarithmsImpl`, the equation `Logarithm = Double` allows us to implement the various methods.
+
+#### Leaky Abstractions
+However, this abstraction is slightly leaky.
+We have to make sure to _only_ ever program against the abstract interface `Logarithms` and never directly use `LogarithmsImpl`.
+Directly using `LogarithmsImpl` would make the equality `Logarithm = Double` visible for the user, who might accidentally use a `Double` where a logarithmic double is expected.
+For example:
+
+```scala
+import LogarithmsImpl.*
+val l: Logarithm = make(1.0)
+val d: Double = l // type checks AND leaks the equality!
+```
+
+Having to separate the module into an abstract interface and implementation can be useful, but is also a lot of effort, just to hide the implementation detail of `Logarithm`.
+Programming against the abstract module `Logarithms` can be very tedious and often requires the use of advanced features like path-dependent types, as in the following example:
+
+```scala
+def someComputation(L: Logarithms)(init: L.Logarithm): L.Logarithm = ...
+```
+
+#### Boxing Overhead
+Type abstractions, such as `type Logarithm` [erase](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#type-erasure) to their bound (which is `Any` in our case).
+That is, although we do not need to manually wrap and unwrap the `Double` value, there will be still some boxing overhead related to boxing the primitive type `Double`.
+
+## Opaque Types
+Instead of manually splitting our `Logarithms` component into an abstract part and into a concrete implementation, we can simply use opaque types in Scala 3 to achieve a similar effect:
+
+```scala
+object Logarithms:
+//vvvvvv this is the important difference!
+ opaque type Logarithm = Double
+
+ object Logarithm:
+ def apply(d: Double): Logarithm = math.log(d)
+
+ extension (x: Logarithm)
+ def toDouble: Double = math.exp(x)
+ def + (y: Logarithm): Logarithm = Logarithm(math.exp(x) + math.exp(y))
+ def * (y: Logarithm): Logarithm = x + y
+```
+The fact that `Logarithm` is the same as `Double` is only known in the scope where `Logarithm` is defined, which in the above example corresponds to the object `Logarithms`.
+The type equality `Logarithm = Double` can be used to implement the methods (like `*` and `toDouble`).
+
+However, outside of the module the type `Logarithm` is completely encapsulated, or “opaque.” To users of `Logarithm` it is not possible to discover that `Logarithm` is actually implemented as a `Double`:
+
+```scala
+import Logarithms.*
+val log2 = Logarithm(2.0)
+val log3 = Logarithm(3.0)
+println((log2 * log3).toDouble) // prints 6.0
+println((log2 + log3).toDouble) // prints 4.999...
+
+val d: Double = log2 // ERROR: Found Logarithm required Double
+```
+
+Even though we abstracted over `Logarithm`, the abstraction comes for free:
+Since there is only one implementation, at runtime there will be _no boxing overhead_ for primitive types like `Double`.
+
+### Summary of Opaque Types
+Opaque types offer a sound abstraction over implementation details, without imposing performance overhead.
+As illustrated above, opaque types are convenient to use, and integrate very well with the [Extension Methods][extension] feature.
+
+
+[extension]: {% link _overviews/scala3-book/ca-extension-methods.md %}
+[value-classes]: {% link _overviews/core/value-classes.md %}
diff --git a/_overviews/scala3-book/types-others.md b/_overviews/scala3-book/types-others.md
new file mode 100644
index 0000000000..9419073f95
--- /dev/null
+++ b/_overviews/scala3-book/types-others.md
@@ -0,0 +1,31 @@
+---
+title: Other Types
+type: section
+description: This section mentions other advanced types in Scala 3.
+languages: [ru, zh-cn]
+num: 59
+previous-page: types-dependent-function
+next-page: ca-contextual-abstractions-intro
+scala3: true
+versionSpecific: true
+---
+
+
+Scala has several other advanced types that are not shown in this book, including:
+
+- Type lambdas
+- Match types
+- Existential types
+- Higher-kinded types
+- Singleton types
+- Refinement types
+- Kind polymorphism
+
+For more details on most of these types, refer to the [Scala 3 Reference documentation][reference].
+For singleton types see the [literal types](https://scala-lang.org/files/archive/spec/3.4/03-types.html#literal-types) section of the Scala 3 spec,
+and for refinement types, see the [refined types](https://scala-lang.org/files/archive/spec/3.4/03-types.html) section.
+
+
+
+
+[reference]: {{ site.scala3ref }}/overview.html
diff --git a/_overviews/scala3-book/types-structural.md b/_overviews/scala3-book/types-structural.md
new file mode 100644
index 0000000000..afa74fe340
--- /dev/null
+++ b/_overviews/scala3-book/types-structural.md
@@ -0,0 +1,109 @@
+---
+title: Structural Types
+type: section
+description: This section introduces and demonstrates structural types in Scala 3.
+languages: [ru, zh-cn]
+num: 57
+previous-page: types-opaque-types
+next-page: types-dependent-function
+scala3: true
+versionSpecific: true
+---
+
+{% comment %}
+NOTE: It would be nice to simplify this more.
+{% endcomment %}
+
+_Scala 2 has a weaker form of structural types based on Java reflection, achieved with `import scala.language.reflectiveCalls`_.
+
+## Introduction
+
+Some use cases, such as modeling database access, are more awkward in statically typed languages than in dynamically typed languages.
+With dynamically typed languages, it’s natural to model a row as a record or object, and to select entries with simple dot notation, e.g. `row.columnName`.
+
+Achieving the same experience in a statically typed language requires defining a class for every possible row arising from database manipulation---including rows arising from joins and projections---and setting up a scheme to map between a row and the class representing it.
+
+This requires a large amount of boilerplate, which leads developers to trade the advantages of static typing for simpler schemes where column names are represented as strings and passed to other operators, e.g. `row.select("columnName")`.
+This approach forgoes the advantages of static typing, and is still not as natural as the dynamically typed version.
+
+Structural types help in situations where you’d like to support simple dot notation in dynamic contexts without losing the advantages of static typing.
+They allow developers to use dot notation and configure how fields and methods should be resolved.
+
+## Example
+
+Here’s an example of a structural type `Person`:
+
+```scala
+class Record(elems: (String, Any)*) extends Selectable:
+ private val fields = elems.toMap
+ def selectDynamic(name: String): Any = fields(name)
+
+type Person = Record {
+ val name: String
+ val age: Int
+}
+```
+
+The `Person` type adds a _refinement_ to its parent type `Record` that defines `name` and `age` fields.
+We say the refinement is _structural_ since `name` and `age` are not defined in the parent type.
+But they exist nevertheless as members of class `Person`.
+For instance, the following program would print `"Emma is 42 years old."`:
+
+```scala
+val person = Record(
+ "name" -> "Emma",
+ "age" -> 42
+).asInstanceOf[Person]
+
+println(s"${person.name} is ${person.age} years old.")
+```
+
+The parent type `Record` in this example is a generic class that can represent arbitrary records in its `elems` argument.
+This argument is a sequence of pairs of labels of type `String` and values of type `Any`.
+When you create a `Person` as a `Record` you have to assert with a typecast that the record defines the right fields of the right types.
+`Record` itself is too weakly typed, so the compiler cannot know this without help from the user.
+In practice, the connection between a structural type and its underlying generic representation would most likely be done by a database layer, and therefore would not be a concern of the end user.
+
+`Record` extends the marker trait `scala.Selectable` and defines a method `selectDynamic`, which maps a field name to its value.
+Selecting a structural type member is done by calling this method.
+The `person.name` and `person.age` selections are translated by the Scala compiler to:
+
+```scala
+person.selectDynamic("name").asInstanceOf[String]
+person.selectDynamic("age").asInstanceOf[Int]
+```
+
+## A second example
+
+To reinforce what you just saw, here’s another structural type named `Book` that represents a book that you might read from a database:
+
+```scala
+type Book = Record {
+ val title: String
+ val author: String
+ val year: Int
+ val rating: Double
+}
+```
+
+As with `Person`, this is how you create a `Book` instance:
+
+```scala
+val book = Record(
+ "title" -> "The Catcher in the Rye",
+ "author" -> "J. D. Salinger",
+ "year" -> 1951,
+ "rating" -> 4.5
+).asInstanceOf[Book]
+```
+
+## Selectable class
+
+Besides `selectDynamic`, a `Selectable` class sometimes also defines a method `applyDynamic`.
+This can then be used to translate function calls of structural members.
+So, if `a` is an instance of `Selectable`, a structural call like `a.f(b, c)` translates to:
+
+```scala
+a.applyDynamic("f")(b, c)
+```
+
diff --git a/_overviews/scala3-book/types-union.md b/_overviews/scala3-book/types-union.md
new file mode 100644
index 0000000000..e685646608
--- /dev/null
+++ b/_overviews/scala3-book/types-union.md
@@ -0,0 +1,95 @@
+---
+title: Union Types
+type: section
+description: This section introduces and demonstrates union types in Scala 3.
+languages: [ru, zh-cn]
+num: 53
+previous-page: types-intersection
+next-page: types-adts-gadts
+scala3: true
+versionSpecific: true
+---
+
+Used on types, the `|` operator creates a so-called _union type_.
+The type `A | B` represents values that are **either** of the type `A` **or** of the type `B`.
+
+In the following example, the `help` method accepts a parameter named `id` of the union type `Username | Password`, that can be either a `Username` or a `Password`:
+
+```scala
+case class Username(name: String)
+case class Password(hash: Hash)
+
+def help(id: Username | Password) =
+ val user = id match
+ case Username(name) => lookupName(name)
+ case Password(hash) => lookupPassword(hash)
+ // more code here ...
+```
+We implement the method `help` by distinguishing between the two alternatives using pattern matching.
+
+This code is a flexible and type-safe solution.
+If you attempt to pass in a type other than a `Username` or `Password`, the compiler flags it as an error:
+
+```scala
+help("hi") // error: Found: ("hi" : String)
+ // Required: Username | Password
+```
+
+You’ll also get an error if you attempt to add a `case` to the `match` expression that doesn’t match the `Username` or `Password` types:
+
+```scala
+case 1.0 => ??? // ERROR: this line won’t compile
+```
+
+### Alternative to Union Types
+As shown, union types can be used to represent alternatives of several different types, without requiring those types to be part of a custom-crafted class hierarchy, or requiring explicit wrapping.
+
+#### Pre-planning the Class Hierarchy
+Without union types, it would require pre-planning of the class hierarchy, like the following example illustrates:
+
+```scala
+trait UsernameOrPassword
+case class Username(name: String) extends UsernameOrPassword
+case class Password(hash: Hash) extends UsernameOrPassword
+def help(id: UsernameOrPassword) = ...
+```
+
+Pre-planning does not scale very well since, for example, requirements of API users might not be foreseeable.
+Additionally, cluttering the type hierarchy with marker traits like `UsernameOrPassword` also makes the code more difficult to read.
+
+#### Tagged Unions
+Another alternative is to define a separate enumeration type like:
+
+```scala
+enum UsernameOrPassword:
+ case IsUsername(u: Username)
+ case IsPassword(p: Password)
+```
+The enumeration `UsernameOrPassword` represents a _tagged_ union of `Username` and `Password`.
+However, this way of modeling the union requires _explicit wrapping and unwrapping_ and, for instance, `Username` is **not** a subtype of `UsernameOrPassword`.
+
+### Inference of Union Types
+The compiler assigns a union type to an expression _only if_ such a type is explicitly given.
+For instance, given these values:
+
+```scala
+val name = Username("Eve") // name: Username = Username(Eve)
+val password = Password(123) // password: Password = Password(123)
+```
+
+This REPL example shows how a union type can be used when binding a variable to the result of an `if`/`else` expression:
+
+````
+scala> val a = if true then name else password
+val a: Object = Username(Eve)
+
+scala> val b: Password | Username = if true then name else password
+val b: Password | Username = Username(Eve)
+````
+
+The type of `a` is `Object`, which is a supertype of `Username` and `Password`, but not the *least* supertype, `Password | Username`.
+If you want the least supertype you have to give it explicitly, as is done for `b`.
+
+> Union types are duals of intersection types.
+> And like `&` with intersection types, `|` is also commutative: `A | B` is the same type as `B | A`.
+
diff --git a/_overviews/scala3-book/types-variance.md b/_overviews/scala3-book/types-variance.md
new file mode 100644
index 0000000000..f2b8e3d931
--- /dev/null
+++ b/_overviews/scala3-book/types-variance.md
@@ -0,0 +1,235 @@
+---
+title: Variance
+type: section
+description: This section introduces and demonstrates variance in Scala 3.
+languages: [ru, zh-cn]
+num: 55
+previous-page: types-adts-gadts
+next-page: types-opaque-types
+---
+
+Type parameter _variance_ controls the subtyping of parameterized types (like classes or traits).
+
+To explain variance, let us assume the following type definitions:
+
+{% tabs types-variance-1 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+trait Item { def productNumber: String }
+trait Buyable extends Item { def price: Int }
+trait Book extends Buyable { def isbn: String }
+
+```
+{% endtab %}
+{% endtabs %}
+
+Let us also assume the following parameterized types:
+
+{% tabs types-variance-2 class=tabs-scala-version %}
+{% tab 'Scala 2' for=types-variance-2 %}
+```scala
+// an example of an invariant type
+trait Pipeline[T] {
+ def process(t: T): T
+}
+
+// an example of a covariant type
+trait Producer[+T] {
+ def make: T
+}
+
+// an example of a contravariant type
+trait Consumer[-T] {
+ def take(t: T): Unit
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=types-variance-2 %}
+```scala
+// an example of an invariant type
+trait Pipeline[T]:
+ def process(t: T): T
+
+// an example of a covariant type
+trait Producer[+T]:
+ def make: T
+
+// an example of a contravariant type
+trait Consumer[-T]:
+ def take(t: T): Unit
+```
+{% endtab %}
+{% endtabs %}
+
+In general there are three modes of variance:
+
+- **invariant**---the default, written like `Pipeline[T]`
+- **covariant**---annotated with a `+`, such as `Producer[+T]`
+- **contravariant**---annotated with a `-`, like in `Consumer[-T]`
+
+We will now go into detail on what this annotation means and why we use it.
+
+### Invariant Types
+By default, types like `Pipeline` are invariant in their type argument (`T` in this case).
+This means that types like `Pipeline[Item]`, `Pipeline[Buyable]`, and `Pipeline[Book]` are in _no subtyping relationship_ to each other.
+
+And rightfully so! Assume the following method that consumes two values of type `Pipeline[Buyable]`, and passes its argument `b` to one of them, based on the price:
+
+{% tabs types-variance-3 class=tabs-scala-version %}
+{% tab 'Scala 2' for=types-variance-3 %}
+```scala
+def oneOf(
+ p1: Pipeline[Buyable],
+ p2: Pipeline[Buyable],
+ b: Buyable
+): Buyable = {
+ val b1 = p1.process(b)
+ val b2 = p2.process(b)
+ if (b1.price < b2.price) b1 else b2
+ }
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=types-variance-3 %}
+```scala
+def oneOf(
+ p1: Pipeline[Buyable],
+ p2: Pipeline[Buyable],
+ b: Buyable
+): Buyable =
+ val b1 = p1.process(b)
+ val b2 = p2.process(b)
+ if b1.price < b2.price then b1 else b2
+```
+{% endtab %}
+{% endtabs %}
+
+Now, recall that we have the following _subtyping relationship_ between our types:
+
+{% tabs types-variance-4 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+Book <: Buyable <: Item
+```
+{% endtab %}
+{% endtabs %}
+
+We cannot pass a `Pipeline[Book]` to the method `oneOf` because in its implementation, we call `p1` and `p2` with a value of type `Buyable`.
+A `Pipeline[Book]` expects a `Book`, which can potentially cause a runtime error.
+
+We cannot pass a `Pipeline[Item]` because calling `process` on it only promises to return an `Item`; however, we are supposed to return a `Buyable`.
+
+#### Why Invariant?
+In fact, type `Pipeline` needs to be invariant since it uses its type parameter `T` _both_ as an argument _and_ as a return type.
+For the same reason, some types in the Scala collection library---like `Array` or `Set`---are also _invariant_.
+
+### Covariant Types
+In contrast to `Pipeline`, which is invariant, the type `Producer` is marked as **covariant** by prefixing the type parameter with a `+`.
+This is valid, since the type parameter is only used in a _return position_.
+
+Marking it as covariant means that we can pass (or return) a `Producer[Book]` where a `Producer[Buyable]` is expected.
+And in fact, this is sound. The type of `Producer[Buyable].make` only promises to _return_ a `Buyable`.
+As a caller of `make`, we will be happy to also accept a `Book`, which is a subtype of `Buyable`---that is, it is _at least_ a `Buyable`.
+
+This is illustrated by the following example, where the function `makeTwo` expects a `Producer[Buyable]`:
+
+{% tabs types-variance-5 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+def makeTwo(p: Producer[Buyable]): Int =
+ p.make.price + p.make.price
+```
+{% endtab %}
+{% endtabs %}
+
+It is perfectly fine to pass a producer for books:
+
+{% tabs types-variance-6 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val bookProducer: Producer[Book] = ???
+makeTwo(bookProducer)
+```
+{% endtab %}
+{% endtabs %}
+
+The call to `price` within `makeTwo` is still valid also for books.
+
+#### Covariant Types for Immutable Containers
+You will encounter covariant types a lot when dealing with immutable containers, like those that can be found in the standard library (such as `List`, `Seq`, `Vector`, etc.).
+
+For example, `List` and `Vector` are approximately defined as:
+
+{% tabs types-variance-7 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+class List[+A] ...
+class Vector[+A] ...
+```
+{% endtab %}
+{% endtabs %}
+
+This way, you can use a `List[Book]` where a `List[Buyable]` is expected.
+This also intuitively makes sense: If you are expecting a collection of things that can be bought, it should be fine to give you a collection of books.
+They have an additional ISBN method in our example, but you are free to ignore these additional capabilities.
+
+### Contravariant Types
+In contrast to the type `Producer`, which is marked as covariant, the type `Consumer` is marked as **contravariant** by prefixing the type parameter with a `-`.
+This is valid, since the type parameter is only used in an _argument position_.
+
+Marking it as contravariant means that we can pass (or return) a `Consumer[Item]` where a `Consumer[Buyable]` is expected.
+That is, we have the subtyping relationship `Consumer[Item] <: Consumer[Buyable]`.
+Remember, for type `Producer`, it was the other way around, and we had `Producer[Buyable] <: Producer[Item]`.
+
+And in fact, this is sound. The method `Consumer[Item].take` accepts an `Item`.
+As a caller of `take`, we can also supply a `Buyable`, which will be happily accepted by the `Consumer[Item]` since `Buyable` is a subtype of `Item`---that is, it is _at least_ an `Item`.
+
+#### Contravariant Types for Consumers
+Contravariant types are much less common than covariant types.
+As in our example, you can think of them as “consumers.” The most important type that you might come across that is marked contravariant is the one of functions:
+
+{% tabs types-variance-8 class=tabs-scala-version %}
+{% tab 'Scala 2' for=types-variance-8 %}
+```scala
+trait Function[-A, +B] {
+ def apply(a: A): B
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=types-variance-8 %}
+```scala
+trait Function[-A, +B]:
+ def apply(a: A): B
+```
+{% endtab %}
+{% endtabs %}
+
+Its argument type `A` is marked as contravariant `A`---it consumes values of type `A`.
+In contrast, its result type `B` is marked as covariant---it produces values of type `B`.
+
+Here are some examples that illustrate the subtyping relationships induced by variance annotations on functions:
+
+{% tabs types-variance-9 %}
+{% tab 'Scala 2 and 3' %}
+```scala
+val f: Function[Buyable, Buyable] = b => b
+
+// OK to return a Buyable where a Item is expected
+val g: Function[Buyable, Item] = f
+
+// OK to provide a Book where a Buyable is expected
+val h: Function[Book, Buyable] = f
+```
+{% endtab %}
+{% endtabs %}
+
+## Summary
+In this section, we have encountered three different kinds of variance:
+
+- **Producers** are typically covariant, and mark their type parameter with `+`.
+ This also holds for immutable collections.
+- **Consumers** are typically contravariant, and mark their type parameter with `-`.
+- Types that are **both** producers and consumers have to be invariant, and do not require any marking on their type parameter.
+ Mutable collections like `Array` fall into this category.
diff --git a/_overviews/scala3-book/where-next.md b/_overviews/scala3-book/where-next.md
new file mode 100644
index 0000000000..8eed7a163f
--- /dev/null
+++ b/_overviews/scala3-book/where-next.md
@@ -0,0 +1,16 @@
+---
+title: Where To Go Next
+type: chapter
+description: Where to go next after reading the Scala Book
+languages: [zh-cn]
+num: 77
+previous-page: scala-for-python-devs
+next-page:
+---
+
+We hope you enjoyed this introduction to the Scala programming language, and we also hope we were able to share some of the beauty of the language.
+
+As you continue working with Scala, you can find many more details at the
+[Guides and Overviews section][overviews] of our website.
+
+[overviews]: {% link _overviews/index.md %}
diff --git a/_overviews/scala3-book/why-scala-3.md b/_overviews/scala3-book/why-scala-3.md
new file mode 100644
index 0000000000..639c04691e
--- /dev/null
+++ b/_overviews/scala3-book/why-scala-3.md
@@ -0,0 +1,501 @@
+---
+title: Why Scala 3?
+type: chapter
+description: This page describes the benefits of the Scala 3 programming language.
+languages: [ru, zh-cn]
+num: 3
+previous-page: scala-features
+next-page: taste-intro
+---
+
+{% comment %}
+TODO: Is “Scala 3 Benefits” a better title?
+NOTE: Could mention “grammar” as a way of showing that Scala isn’t a large language; see this slide: https://www.slideshare.net/Odersky/preparing-for-scala-3#13
+{% endcomment %}
+
+There are many benefits to using Scala, and Scala 3 in particular.
+It’s hard to list every benefit of Scala, but a “Top Ten” list might look like this:
+
+1. Scala embraces a fusion of functional programming (FP) and object-oriented programming (OOP)
+2. Scala is statically typed, but often feels like a dynamically typed language
+3. Scala’s syntax is concise, but still readable; it’s often referred to as _expressive_
+4. _Implicits_ in Scala 2 were a defining feature, and they have been improved and simplified in Scala 3
+5. Scala integrates seamlessly with Java, so you can create projects with mixed Scala and Java code, and Scala code easily uses the thousands of existing Java libraries
+6. Scala can be used on the server, and also in the browser with [Scala.js](https://www.scala-js.org)
+7. The Scala standard library has dozens of pre-built, functional methods to save you time, and greatly reduce the need to write custom `for` loops and algorithms
+8. “Best practices” are built into Scala, which favors immutability, anonymous functions, higher-order functions, pattern matching, classes that cannot be extended by default, and more
+9. The Scala ecosystem offers the most modern FP libraries in the world
+10. Strong type system
+
+## 1) FP/OOP fusion
+
+More than any other language, Scala supports a fusion of the FP and OOP paradigms.
+As Martin Odersky has stated, the essence of Scala is a fusion of functional and object-oriented programming in a typed setting, with:
+
+- Functions for the logic, and
+- Objects for the modularity
+
+Possibly some of the best examples of modularity are the classes in the standard library.
+For instance, a `List` is defined as a class---technically it’s an abstract class---and a new instance is created like this:
+
+{% tabs list %}
+{% tab 'Scala 2 and 3' for=list %}
+```scala
+val x = List(1, 2, 3)
+```
+{% endtab %}
+{% endtabs %}
+
+However, what appears to the programmer to be a simple `List` is actually built from a combination of several specialized types, including traits named `Iterable`, `Seq`, and `LinearSeq`.
+Those types are similarly composed of other small, modular units of code.
+
+In addition to building a type like `List` from a series of modular traits, the `List` API also consists of dozens of other methods, many of which are higher-order functions:
+
+{% tabs list-methods %}
+{% tab 'Scala 2 and 3' for=list-methods %}
+```scala
+val xs = List(1, 2, 3, 4, 5)
+
+xs.map(_ + 1) // List(2, 3, 4, 5, 6)
+xs.filter(_ < 3) // List(1, 2)
+xs.find(_ > 3) // Some(4)
+xs.takeWhile(_ < 3) // List(1, 2)
+```
+{% endtab %}
+{% endtabs %}
+
+In those examples, the values in the list can’t be modified.
+The `List` class is immutable, so all of those methods return new values, as shown by the data in each comment.
+
+## 2) A dynamic feel
+
+Scala’s _type inference_ often makes the language feel dynamically typed, even though it’s statically typed.
+This is true with variable declaration:
+
+{% tabs dynamic %}
+{% tab 'Scala 2 and 3' for=dynamic %}
+```scala
+val a = 1
+val b = "Hello, world"
+val c = List(1,2,3,4,5)
+val stuff = ("fish", 42, 1_234.5)
+```
+{% endtab %}
+{% endtabs %}
+
+It’s also true when passing anonymous functions to higher-order functions:
+
+{% tabs dynamic-hof %}
+{% tab 'Scala 2 and 3' for=dynamic-hof %}
+```scala
+list.filter(_ < 4)
+list.map(_ * 2)
+list.filter(_ < 4)
+ .map(_ * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+and when defining methods:
+
+{% tabs dynamic-method %}
+{% tab 'Scala 2 and 3' for=dynamic-method %}
+```scala
+def add(a: Int, b: Int) = a + b
+```
+{% endtab %}
+{% endtabs %}
+
+This is more true than ever in Scala 3, such as when using [union types][union-types]:
+
+{% tabs union %}
+{% tab 'Scala 3 Only' for=union %}
+```scala
+// union type parameter
+def help(id: Username | Password) =
+ val user = id match
+ case Username(name) => lookupName(name)
+ case Password(hash) => lookupPassword(hash)
+ // more code here ...
+
+// union type value
+val b: Password | Username = if (true) name else password
+```
+{% endtab %}
+{% endtabs %}
+
+## 3) Concise syntax
+
+Scala is a low ceremony, “concise but still readable” language. For instance, variable declaration is concise:
+
+{% tabs concise %}
+{% tab 'Scala 2 and 3' for=concise %}
+```scala
+val a = 1
+val b = "Hello, world"
+val c = List(1,2,3)
+```
+{% endtab %}
+{% endtabs %}
+
+Creating types like traits, classes, and enumerations are concise:
+
+{% tabs enum %}
+{% tab 'Scala 3 Only' for=enum %}
+```scala
+trait Tail:
+ def wagTail(): Unit
+ def stopTail(): Unit
+
+enum Topping:
+ case Cheese, Pepperoni, Sausage, Mushrooms, Onions
+
+class Dog extends Animal, Tail, Legs, RubberyNose
+
+case class Person(
+ firstName: String,
+ lastName: String,
+ age: Int
+)
+```
+{% endtab %}
+{% endtabs %}
+
+Higher-order functions are concise:
+
+{% tabs list-hof %}
+{% tab 'Scala 2 and 3' for=list-hof %}
+
+```scala
+list.filter(_ < 4)
+list.map(_ * 2)
+```
+{% endtab %}
+{% endtabs %}
+
+All of these expressions and many more are concise, and still very readable: what we call _expressive_.
+
+## 4) Implicits, simplified
+
+Implicits in Scala 2 were a major distinguishing design feature.
+They represented _the_ fundamental way to abstract over context, with a unified paradigm that served a great variety of use cases, among them:
+
+- Implementing [type classes]({% link _overviews/scala3-book/ca-type-classes.md %})
+- Establishing context
+- Dependency injection
+- Expressing capabilities
+
+Since then, other languages have adopted similar concepts, all of which are variants of the core idea of _term inference_: Given a type, the compiler synthesizes a “canonical” term that has that type.
+
+While implicits were a defining feature in Scala 2, their design has been greatly improved in Scala 3:
+
+- There’s a single way to define “given” values
+- There’s a single way to introduce implicit parameters and arguments
+- There’s a separate way to import givens that does not allow them to hide in a sea of normal imports
+- There’s a single way to define an implicit conversion, which is clearly marked as such, and does not require special syntax
+
+Benefits of these changes include:
+
+- The new design avoids feature interactions and makes the language more consistent
+- It makes implicits easier to learn and harder to abuse
+- It greatly improves the clarity of the 95% of Scala programs that use implicits
+- It has the potential to enable term inference in a principled way that’s also accessible and friendly
+
+These capabilities are described in detail in other sections, so see the [Contextual Abstraction introduction][contextual], and the section on [`given` and `using` clauses][given] for more details.
+
+## 5) Seamless Java integration
+
+Scala/Java interaction is seamless in many ways.
+For instance:
+
+- You can use all of the thousands of Java libraries that are available in your Scala projects
+- A Scala `String` is essentially a Java `String`, with additional capabilities added to it
+- Scala seamlessly uses the date/time classes in the Java *java.time._* package
+
+You can also use Java collections classes in Scala, and to give them more functionality, Scala includes methods so you can transform them into Scala collections.
+
+While almost every interaction is seamless, the [“Interacting with Java” chapter][java] demonstrates how to use some features together better, including how to use:
+
+- Java collections in Scala
+- Java `Optional` in Scala
+- Java interfaces in Scala
+- Scala collections in Java
+- Scala `Option` in Java
+- Scala traits in Java
+- Scala methods that throw exceptions in Java code
+- Scala varargs parameters in Java
+
+See that chapter for more details on these features.
+
+## 6) Client & server
+
+Scala can be used on the server side with terrific frameworks:
+
+- The [Play Framework](https://www.playframework.com) lets you build highly scalable server-side applications and microservices
+- [Akka Actors](https://akka.io) let you use the actor model to greatly simplify distributed and concurrent software applications
+
+Scala can also be used in the browser with the [Scala.js project](https://www.scala-js.org), which is a type-safe replacement for JavaScript.
+The Scala.js ecosystem [has dozens of libraries](https://www.scala-js.org/libraries) to let you use React, Angular, jQuery, and many other JavaScript and Scala libraries in the browser.
+
+In addition to those tools, the [Scala Native](https://github.com/scala-native/scala-native) project “is an optimizing ahead-of-time compiler and lightweight managed runtime designed specifically for Scala.” It lets you build “systems” style binary executable applications with plain Scala code, and also lets you use lower-level primitives.
+
+## 7) Standard library methods
+
+You will rarely ever need to write a custom `for` loop again, because the dozens of pre-built functional methods in the Scala standard library will both save you time, and help make code more consistent across different applications.
+
+The following examples show some of the built-in collections methods, and there are many in addition to these.
+While these all use the `List` class, the same methods work with other collections classes like `Seq`, `Vector`, `LazyList`, `Set`, `Map`, `Array`, and `ArrayBuffer`.
+
+Here are some examples:
+
+{% tabs list-more %}
+{% tab 'Scala 2 and 3' for=list-more %}
+```scala
+List.range(1, 3) // List(1, 2)
+List.range(start = 1, end = 6, step = 2) // List(1, 3, 5)
+List.fill(3)("foo") // List(foo, foo, foo)
+List.tabulate(3)(n => n * n) // List(0, 1, 4)
+List.tabulate(4)(n => n * n) // List(0, 1, 4, 9)
+
+val a = List(10, 20, 30, 40, 10) // List(10, 20, 30, 40, 10)
+a.distinct // List(10, 20, 30, 40)
+a.drop(2) // List(30, 40, 10)
+a.dropRight(2) // List(10, 20, 30)
+a.dropWhile(_ < 25) // List(30, 40, 10)
+a.filter(_ < 25) // List(10, 20, 10)
+a.filter(_ > 100) // List()
+a.find(_ > 20) // Some(30)
+a.head // 10
+a.headOption // Some(10)
+a.init // List(10, 20, 30, 40)
+a.intersect(List(19,20,21)) // List(20)
+a.last // 10
+a.lastOption // Some(10)
+a.map(_ * 2) // List(20, 40, 60, 80, 20)
+a.slice(2, 4) // List(30, 40)
+a.tail // List(20, 30, 40, 10)
+a.take(3) // List(10, 20, 30)
+a.takeRight(2) // List(40, 10)
+a.takeWhile(_ < 30) // List(10, 20)
+a.filter(_ < 30).map(_ * 10) // List(100, 200, 100)
+
+val fruits = List("apple", "pear")
+fruits.map(_.toUpperCase) // List(APPLE, PEAR)
+fruits.flatMap(_.toUpperCase) // List(A, P, P, L, E, P, E, A, R)
+
+val nums = List(10, 5, 8, 1, 7)
+nums.sorted // List(1, 5, 7, 8, 10)
+nums.sortWith(_ < _) // List(1, 5, 7, 8, 10)
+nums.sortWith(_ > _) // List(10, 8, 7, 5, 1)
+```
+{% endtab %}
+{% endtabs %}
+
+## 8) Built-in best practices
+
+Scala idioms encourage best practices in many ways.
+For immutability, you’re encouraged to create immutable `val` declarations:
+
+{% tabs val %}
+{% tab 'Scala 2 and 3' for=val %}
+```scala
+val a = 1 // immutable variable
+```
+{% endtab %}
+{% endtabs %}
+
+You’re also encouraged to use immutable collections classes like `List` and `Map`:
+
+{% tabs list-map %}
+{% tab 'Scala 2 and 3' for=list-map %}
+```scala
+val b = List(1,2,3) // List is immutable
+val c = Map(1 -> "one") // Map is immutable
+```
+{% endtab %}
+{% endtabs %}
+
+Case classes are primarily intended for use in [domain modeling]({% link _overviews/scala3-book/domain-modeling-intro.md %}), and their parameters are immutable:
+
+{% tabs case-class %}
+{% tab 'Scala 2 and 3' for=case-class %}
+```scala
+case class Person(name: String)
+val p = Person("Michael Scott")
+p.name // Michael Scott
+p.name = "Joe" // compiler error (reassignment to val name)
+```
+{% endtab %}
+{% endtabs %}
+
+As shown in the previous section, Scala collections classes support higher-order functions, and you can pass methods (not shown) and anonymous functions into them:
+
+{% tabs higher-order %}
+{% tab 'Scala 2 and 3' for=higher-order %}
+```scala
+a.dropWhile(_ < 25)
+a.filter(_ < 25)
+a.takeWhile(_ < 30)
+a.filter(_ < 30).map(_ * 10)
+nums.sortWith(_ < _)
+nums.sortWith(_ > _)
+```
+{% endtab %}
+{% endtabs %}
+
+`match` expressions let you use pattern matching, and they truly are _expressions_ that return values:
+
+{% tabs match class=tabs-scala-version %}
+{% tab 'Scala 2' for=match %}
+```scala
+val numAsString = i match {
+ case 1 | 3 | 5 | 7 | 9 => "odd"
+ case 2 | 4 | 6 | 8 | 10 => "even"
+ case _ => "too big"
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=match %}
+```scala
+val numAsString = i match
+ case 1 | 3 | 5 | 7 | 9 => "odd"
+ case 2 | 4 | 6 | 8 | 10 => "even"
+ case _ => "too big"
+```
+{% endtab %}
+{% endtabs %}
+
+Because they can return values, they’re often used as the body of a method:
+
+{% tabs match-body class=tabs-scala-version %}
+{% tab 'Scala 2' for=match-body %}
+```scala
+def isTruthy(a: Matchable) = a match {
+ case 0 | "" => false
+ case _ => true
+}
+```
+{% endtab %}
+
+{% tab 'Scala 3' for=match-body %}
+```scala
+def isTruthy(a: Matchable) = a match
+ case 0 | "" => false
+ case _ => true
+```
+{% endtab %}
+{% endtabs %}
+
+## 9) Ecosystem libraries
+
+Scala libraries for functional programming like [Cats](https://typelevel.org/cats) and [Zio](https://zio.dev) are leading-edge libraries in the FP community.
+All of the buzzwords like high-performance, type safe, concurrent, asynchronous, resource-safe, testable, functional, modular, binary-compatible, efficient, effects/effectful, and more, can be said about these libraries.
+
+We could list hundreds of libraries here, but fortunately they’re all listed in another location: For those details, see the [“Awesome Scala” list](https://github.com/lauris/awesome-scala).
+
+## 10) Strong type system
+
+Scala has a strong type system, and it’s been improved even more in Scala 3.
+Scala 3’s goals were defined early on, and those related to the type system include:
+
+- Simplification
+- Eliminate inconsistencies
+- Safety
+- Ergonomics
+- Performance
+
+_Simplification_ comes about through dozens of changed and dropped features.
+For instance, the changes from the overloaded `implicit` keyword in Scala 2 to the terms `given` and `using` in Scala 3 make the language more clear, especially for beginning developers.
+
+_Eliminating inconsistencies_ is related to the dozens of [dropped features][dropped], [changed features][changed], and [added features][added] in Scala 3.
+Some of the most important features in this category are:
+
+- Intersection types
+- Union types
+- Implicit function types
+- Dependent function types
+- Trait parameters
+- Generic tuples
+
+{% comment %}
+A list of types from the Dotty documentation:
+
+- Inferred types
+- Generics
+- Intersection types
+- Union types
+- Structural types
+- Dependent function types
+- Type classes
+- Opaque types
+- Variance
+- Algebraic Data Types
+- Wildcard arguments in types: ? replacing _
+- Type lambdas
+- Match types
+- Existential types
+- Higher-kinded types
+- Singleton types
+- Refinement types
+- Kind polymorphism
+- Abstract type members and path-dependent types
+- Dependent function types
+- Bounds
+{% endcomment %}
+
+_Safety_ is related to several new and changed features:
+
+- Multiversal equality
+- Restricting implicit conversions
+- Null safety
+- Safe initialization
+
+Good examples of _ergonomics_ are enumerations and extension methods, which have been added to Scala 3 in a very readable manner:
+
+{% tabs extension %}
+{% tab 'Scala 3 Only' for=extension %}
+```scala
+// enumeration
+enum Color:
+ case Red, Green, Blue
+
+// extension methods
+extension (c: Circle)
+ def circumference: Double = c.radius * math.Pi * 2
+ def diameter: Double = c.radius * 2
+ def area: Double = math.Pi * c.radius * c.radius
+```
+{% endtab %}
+{% endtabs %}
+
+_Performance_ relates to several areas.
+One of those is [opaque types][opaque-types].
+In Scala 2 there were several attempts to create solutions to keep with the Domain-driven design (DDD) practice of giving values more meaningful types.
+These attempts included:
+
+- Type aliases
+- Value classes
+- Case classes
+
+Unfortunately all of these approaches had weaknesses, as described in the [_Opaque Types_ SIP](https://docs.scala-lang.org/sips/opaque-types.html).
+Conversely, the goal of opaque types, as described in that SIP, is that “operations on these wrapper types must not create any extra overhead at runtime while still providing a type safe use at compile time.”
+
+For more type system details, see the [Reference documentation][reference].
+
+## Other great features
+
+Scala has many great features, and choosing a Top 10 list can be subjective.
+Several surveys have shown that different groups of developers love different features.
+Hopefully you’ll discover more great Scala features as you use the language.
+
+[java]: {% link _overviews/scala3-book/interacting-with-java.md %}
+[given]: {% link _overviews/scala3-book/ca-context-parameters.md %}
+[contextual]: {% link _overviews/scala3-book/ca-contextual-abstractions-intro.md %}
+[reference]: {{ site.scala3ref }}
+[dropped]: {{ site.scala3ref }}/dropped-features
+[changed]: {{ site.scala3ref }}/changed-features
+[added]:{{ site.scala3ref }}/other-new-features
+
+[union-types]: {% link _overviews/scala3-book/types-union.md %}
+[opaque-types]: {% link _overviews/scala3-book/types-opaque-types.md %}
diff --git a/_overviews/scala3-contribution/arch-context.md b/_overviews/scala3-contribution/arch-context.md
new file mode 100644
index 0000000000..cbf342703f
--- /dev/null
+++ b/_overviews/scala3-contribution/arch-context.md
@@ -0,0 +1,5 @@
+---
+title: Contexts
+description: This page describes symbols in the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/architecture/context.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/arch-intro.md b/_overviews/scala3-contribution/arch-intro.md
new file mode 100644
index 0000000000..8b306a4e5c
--- /dev/null
+++ b/_overviews/scala3-contribution/arch-intro.md
@@ -0,0 +1,5 @@
+---
+title: High Level Architecture
+description: This page introduces the high level architecture of the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/architecture/index.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/arch-lifecycle.md b/_overviews/scala3-contribution/arch-lifecycle.md
new file mode 100644
index 0000000000..917e5a7824
--- /dev/null
+++ b/_overviews/scala3-contribution/arch-lifecycle.md
@@ -0,0 +1,5 @@
+---
+title: Compiler Overview
+description: This page describes the lifecycle for the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/architecture/lifecycle.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/arch-phases.md b/_overviews/scala3-contribution/arch-phases.md
new file mode 100644
index 0000000000..25db11e6a3
--- /dev/null
+++ b/_overviews/scala3-contribution/arch-phases.md
@@ -0,0 +1,5 @@
+---
+title: Compiler Phases
+description: This page describes the phases for the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/architecture/phases.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/arch-symbols.md b/_overviews/scala3-contribution/arch-symbols.md
new file mode 100644
index 0000000000..5ec3408b51
--- /dev/null
+++ b/_overviews/scala3-contribution/arch-symbols.md
@@ -0,0 +1,5 @@
+---
+title: Symbols
+description: This page describes symbols in the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/architecture/symbols.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/arch-time.md b/_overviews/scala3-contribution/arch-time.md
new file mode 100644
index 0000000000..a56fed21a5
--- /dev/null
+++ b/_overviews/scala3-contribution/arch-time.md
@@ -0,0 +1,5 @@
+---
+title: Time in the Compiler
+description: This page describes the concepts of time in the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/architecture/time.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/arch-types.md b/_overviews/scala3-contribution/arch-types.md
new file mode 100644
index 0000000000..cadcee16f2
--- /dev/null
+++ b/_overviews/scala3-contribution/arch-types.md
@@ -0,0 +1,5 @@
+---
+title: Compiler Types
+description: This page discusses the representation of types in the compiler
+redirect_to: https://dotty.epfl.ch/docs/contributing/architecture/types.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/contribution-intro.md b/_overviews/scala3-contribution/contribution-intro.md
new file mode 100644
index 0000000000..1708decf17
--- /dev/null
+++ b/_overviews/scala3-contribution/contribution-intro.md
@@ -0,0 +1,5 @@
+---
+title: Contribute to Scala 3
+description: This page describes the format of the contribution guide for the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/index.html
+---
diff --git a/_overviews/scala3-contribution/procedures-areas.md b/_overviews/scala3-contribution/procedures-areas.md
new file mode 100644
index 0000000000..74d593b4ac
--- /dev/null
+++ b/_overviews/scala3-contribution/procedures-areas.md
@@ -0,0 +1,5 @@
+---
+title: Common Issue Locations
+description: This page describes common areas of issues around the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/workflow/areas.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/procedures-cheatsheet.md b/_overviews/scala3-contribution/procedures-cheatsheet.md
new file mode 100644
index 0000000000..fdbf2a2435
--- /dev/null
+++ b/_overviews/scala3-contribution/procedures-cheatsheet.md
@@ -0,0 +1,5 @@
+---
+title: Cheatsheets
+description: This page describes a cheatsheet for working with the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/cheatsheet.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/procedures-checklist.md b/_overviews/scala3-contribution/procedures-checklist.md
new file mode 100644
index 0000000000..6908332d2d
--- /dev/null
+++ b/_overviews/scala3-contribution/procedures-checklist.md
@@ -0,0 +1,5 @@
+---
+title: Pull Request Checklist
+description: This page describes a checklist before opening a Pull Request to the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/workflow/checklist.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/procedures-debugging.md b/_overviews/scala3-contribution/procedures-debugging.md
new file mode 100644
index 0000000000..6fe158614d
--- /dev/null
+++ b/_overviews/scala3-contribution/procedures-debugging.md
@@ -0,0 +1,5 @@
+---
+title: Debugging the Compiler
+description: This page describes navigating around the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/workflow/debugging.html
+---
diff --git a/_overviews/scala3-contribution/procedures-inspection.md b/_overviews/scala3-contribution/procedures-inspection.md
new file mode 100644
index 0000000000..40ec4e2f92
--- /dev/null
+++ b/_overviews/scala3-contribution/procedures-inspection.md
@@ -0,0 +1,5 @@
+---
+title: How to Inspect Values
+description: This page describes inspecting semantic values in the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/workflow/inspection.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/procedures-intro.md b/_overviews/scala3-contribution/procedures-intro.md
new file mode 100644
index 0000000000..2cb292caf4
--- /dev/null
+++ b/_overviews/scala3-contribution/procedures-intro.md
@@ -0,0 +1,5 @@
+---
+title: Contributing to Scala 3
+description: This page introduces the compiler procedures for the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/procedures/index.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/procedures-navigation.md b/_overviews/scala3-contribution/procedures-navigation.md
new file mode 100644
index 0000000000..a0e869970c
--- /dev/null
+++ b/_overviews/scala3-contribution/procedures-navigation.md
@@ -0,0 +1,5 @@
+---
+title: Finding the Cause of an Issue
+description: This page describes navigating around the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/workflow/cause.html
+---
diff --git a/_overviews/scala3-contribution/procedures-reproduce.md b/_overviews/scala3-contribution/procedures-reproduce.md
new file mode 100644
index 0000000000..aa31ecedde
--- /dev/null
+++ b/_overviews/scala3-contribution/procedures-reproduce.md
@@ -0,0 +1,5 @@
+---
+title: Reproducing an Issue
+description: This page describes reproducing an issue in the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/workflow/reproduce.html
+---
\ No newline at end of file
diff --git a/_overviews/scala3-contribution/procedures-testing.md b/_overviews/scala3-contribution/procedures-testing.md
new file mode 100644
index 0000000000..7c68dc18af
--- /dev/null
+++ b/_overviews/scala3-contribution/procedures-testing.md
@@ -0,0 +1,5 @@
+---
+title: Testing Your Changes
+description: This page describes test procedures in the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/workflow/testing.html
+---
diff --git a/_overviews/scala3-contribution/start-intro.md b/_overviews/scala3-contribution/start-intro.md
new file mode 100644
index 0000000000..48e6100fbd
--- /dev/null
+++ b/_overviews/scala3-contribution/start-intro.md
@@ -0,0 +1,5 @@
+---
+title: Getting Started
+description: This page describes the high level architecture for the Scala 3 compiler.
+redirect_to: https://dotty.epfl.ch/docs/contributing/getting-started.html
+---
diff --git a/_overviews/scala3-macros/best-practices.md b/_overviews/scala3-macros/best-practices.md
new file mode 100644
index 0000000000..1122c98620
--- /dev/null
+++ b/_overviews/scala3-macros/best-practices.md
@@ -0,0 +1,155 @@
+---
+type: chapter
+title: Best Practices
+num: 8
+---
+## Inline
+
+### Be careful when inlining for performance
+To take the most advantage of the JVM JIT optimisations, you want to avoid generating large methods.
+
+
+## Macros
+**Coming soon**
+
+
+## Quoted code
+
+### Keep quotes readable
+* Try to avoid `${...}` with arbitrary expressions inside
+ * Use `$someExpr`
+ * Use `${ someExprFrom('localExpr) }`
+
+To illustrate, consider the following example:
+```scala
+val sc: StringContext = ...
+'{ StringContext(${Varargs(sc.parts.map(Expr(_)))}: _*) }
+```
+Instead, we can write the following:
+
+```scala
+val sc: StringContext = ...
+val partExprs = sc.parts.map(Expr(_))
+val partsExpr = Varargs(partExprs)
+'{ StringContext($partsExpr: _*) }
+```
+The contents of the quote are much more clear in the second example.
+
+### Avoid nested contexts
+
+Consider the following code:
+
+```scala
+val y: Expr[Int] = ...
+def body(x: Expr[Int])(using quotes.Nested) = '{ $x + $y }
+'{ (x: Int) => ${ body('x) } }
+```
+
+Instead, use a normal context and pass all needed expressions.
+This has also the advantage of allowing the function to not be defined locally.
+```scala
+def body(x: Expr[Int], y: Expr[Int])(using Quotes) =
+ '{ $x + $y }
+
+val y: Expr[Int] = ...
+'{ (x: Int) => ${ body('x, y) } }
+```
+
+## Quotes Reflect
+
+For this section, consider the following setup:
+
+```scala
+object Box:
+ sealed trait Base
+ case class Leaf(x: Int) extends Base
+
+// Quotes in contextual scope
+val boxTpe : TypeRepr = TypeRepr.of[Box.type]
+val baseTpe: TypeRepr = TypeRepr.of[Box.Base]
+val baseSym: Symbol = baseTpe.typeSymbol
+val leafTpe: TypeRepr = TypeRepr.of[Box.Leaf]
+val leafSym: Symbol = leafTpe.typeSymbol
+```
+
+### Avoid `Symbol.tree`
+
+On an object `sym: Symbol`, `sym.tree` returns the `Tree` associated with the symbol.
+Be careful when using this method, as the tree for a symbol might not be defined.
+When the code associated with a symbol is defined at a different time than this access, if the `-Yretain-trees` compilation option is not used, then the `tree` of the symbol will not be available.
+Symbols originating from Java code do not have an associated `tree`.
+
+### Obtaining a `TypeRepr` from a `Symbol`
+
+In the previous heading, we saw that `Symbol.tree` should be avoided and that therefore you should not use `sym.tree.tpe` on `sym: Symbol`.
+Thus, to obtain the `TypeRepr` corresponding to a `Symbol`, it is recommended to use `tpe.memberType` on `tpe: TypeRepr` objects.
+
+We can obtain the `TypeRepr` of `Leaf` in two ways:
+ 1. `TypeRepr.of[Box.Leaf]`
+ 2. `boxTpe.memberType(leafSym)`
+(In other words, we request the `TypeRepr` of the member of `Box` whose symbol is equal to the symbol of `leafSym`.)
+
+While the two approaches are equivalent, the first is only possible if you already know that you are looking for the type `Box.Leaf`.
+The second approach allows you to explore an unknown API.
+
+### Use `Symbol`s to compare definitions
+
+Read more about Symbols [here][symbol].
+
+Symbols allow you to compare definitions using `==`:
+```scala
+leafSym == baseSym.children.head // Is true
+```
+
+However, `==` on `TypeRepr`s does not produce the same result:
+```scala
+boxTpe.memberType(baseSym.children.head) == leafTpe // Is false
+```
+
+### Obtaining a Symbol for a type
+
+There is a handy shortcut to get the symbol for the definition of `T`.
+Instead of
+
+```scala
+TypeTree.of[T].tpe.typeSymbol
+```
+you can use
+
+```scala
+TypeRepr.of[T].typeSymbol
+```
+
+### Pattern match your way into the API
+
+Pattern matching is a very ergonomic approach to the API. Always have a look at
+the `unapply` method defined in `*Module` objects.
+
+### Search the contextual scope in your macros
+
+You can search for given instances using `Implicits.search`.
+
+For example:
+
+```scala
+def summonOrFail[T: Type]: Expr[T] =
+ val tpe = TypeRepr.of[T]
+ Implicits.search(tpe) match
+ case success: ImplicitSearchSuccess =>
+ val implicitTerm = success.tree
+ implicitTerm.asExprOf[T]
+ case failure: ImplicitSearchFailure =>
+ reflect.report.throwError("Could not find an implicit for " + Type.show[T])
+```
+
+If you are writing a macro and prefer to handle `Expr`s, `Expr.summon` is a
+convenient wrapper around `Implicits.search`:
+
+```scala
+def summonOrFail[T: Type]: Expr[T] =
+ Expr.summon[T] match
+ case Some(imp) => imp
+ case None => reflect.report.throwError("Could not find an implicit for " + Type.show[T])
+```
+
+[symbol]: {% link _overviews/scala3-macros/tutorial/reflection.md %}
diff --git a/_overviews/scala3-macros/faq.md b/_overviews/scala3-macros/faq.md
new file mode 100644
index 0000000000..7a809cdd60
--- /dev/null
+++ b/_overviews/scala3-macros/faq.md
@@ -0,0 +1,76 @@
+---
+type: chapter
+title: FAQ
+num: 7
+---
+
+## Which should I use `Expr(...)` or `'{...}`?
+If you can write your code using `Expr(...)`, you will evaluate more at compile time.
+Only use `'{...}` if you really need to evaluate the code later at runtime, usually because it depends on runtime values.
+
+## Which is better between `Expr(true)` or `'{true}`?
+All quotes containing a value of a primitive type is optimised to an `Expr.apply`.
+Choose one in your project and stick with a single notation to avoid confusion.
+
+## How do I get a value out of an `Expr`?
+If the expression represents a value, you can use `.value`, `.valueOrAbort` or `Expr.unapply`
+
+## How can I get the precise type of an `Expr`?
+We can get the precise type (`Type`) of an `Expr` using the following pattern match:
+```scala
+val x: Expr[X] = ...
+x match
+ case '{ $x: t } =>
+ // `x: Expr[X & t]` where `t` is the precise type of `x`
+```
+
+## How do I summon all types of a tuple type?
+If I have a type `(T1, T2, ...)` how do I generate the term for `(summon[T1], summon[T2], ...)` or get the individual expressions with the summoned values?
+
+Depending on your use case the way you will summon them will vary.
+In particular, the code you will need depends on the kind of output you want (`Expr[Tuple]`, `List[Expr[Any]]`, or something else) and how you need errors to be reported.
+Here are two examples that should give you the basic skeleton for two different variant of this code.
+
+```scala
+ def summonAllInList[T](using Type[T])(using Quotes): List[Expr[Any]] = {
+ Type.of[T] match
+ case '[ head *: tail ] =>
+ Expr.summon[head] match
+ case Some(headExpr) => headExpr :: summonAllInList[tail]
+ case _ => quotes.reflect.report.throwError(s"Could not summon ${Type.show[head]}")
+ case '[ EmptyTuple ] => Nil
+ case _ => quotes.reflect.report.throwError(s"Could not `summonAllInList` of tuple with unknown size: ${Type.show[T]}")
+ }
+```
+
+```scala
+ def summonAll[T](using Type[T])(using Quotes): Option[Expr[Tuple]]] = {
+ Type.of[T] match
+ case '[ head *: tail ] =>
+ for headExpr <- Expr.summon[head]
+ tailExpr <- summonAll[tail]
+ yield '{ headExpr *: tailExpr }
+ case '[ EmptyTuple ] => Some('{ EmptyTuple })
+ case _ => None
+ }
+```
+
+## How do I summon an expression for statically unknown types?
+
+You can summon an expression from either a `TypeRepr` or a `Type` as shown below.
+
+If you have a `TypeRepr` use:
+```scala
+val tpe: TypeRepr = ...
+Implicits.search(tpe) match
+ case result: ImplicitSearchSuccess => result.tree
+ case _ =>
+```
+
+Instead, if you have a `Type[_]` use:
+```scala
+val tpe: Type[_] = ...
+tpe match
+ // (1) Use `a` as the name of the unknown type and (2) bring a given `Type[a]` into scope
+ case '[a] => Expr.summon[a]
+```
diff --git a/_overviews/scala3-macros/other-resources.md b/_overviews/scala3-macros/other-resources.md
new file mode 100644
index 0000000000..a50aefa23e
--- /dev/null
+++ b/_overviews/scala3-macros/other-resources.md
@@ -0,0 +1,27 @@
+---
+type: chapter
+title: Other Resources
+num: 9
+---
+
+## Scala 2 migration
+ * [Scala 2 migration and cross-compilation][migration]
+ * [Migration status][migration-status]
+
+## Dotty documentation
+- [Dotty Documentation]({{ site.scala3ref }}/metaprogramming)
+- [Macros: The Plan For Scala 3](https://www.scala-lang.org/blog/2018/04/30/in-a-nutshell.html)
+- [Examples](https://github.com/lampepfl/dotty-macro-examples) - a repository with small, self-contained examples of various tasks done with Dotty macros.
+
+## Talks
+* [Scala Days - Metaprogramming in Dotty](https://www.youtube.com/watch?v=ZfDS_gJyPTc)
+
+## Projects and examples
+* [dotty-macro-examples](https://github.com/lampepfl/dotty-macro-examples)
+* [XML Interpolator](https://github.com/dotty-staging/xml-interpolator/tree/master)
+* [Shapeless 3](https://github.com/dotty-staging/shapeless/tree/shapeless-3)
+* *More Coming soon*
+
+
+[migration]: {% link _overviews/scala3-migration/tutorial-macro-cross-building.md %}
+[migration-status]: https://scalacenter.github.io/scala-3-migration-guide/docs/macros/macro-libraries.html#macro-libraries
diff --git a/_overviews/scala3-macros/tutorial/compiletime.md b/_overviews/scala3-macros/tutorial/compiletime.md
new file mode 100644
index 0000000000..0204efa4c4
--- /dev/null
+++ b/_overviews/scala3-macros/tutorial/compiletime.md
@@ -0,0 +1,75 @@
+---
+type: section
+title: Scala Compile-time Operations
+num: 3
+
+previous-page: inline
+next-page: macros
+---
+
+Operations in [scala.compiletime][compiletime-api] are metaprogramming operations that can be used within an `inline` method.
+These operation do cover some common use cases of macros without you needing to define a macro.
+
+## Reporting
+
+It is possible to emit error messages when inlining code.
+
+```scala
+inline def doSomething(inline mode: Boolean): Unit =
+ if mode then ...
+ else if !mode then ...
+ else error("Mode must be a known value")
+
+doSomething(true)
+doSomething(false)
+val bool: Boolean = ...
+doSomething(bool) // error: Mode must be a known value
+```
+
+If `error` is called outside an inline method, the error will be emitted when compiling that call.
+If the `error` is written inside an inline method, the error will be emitted only if, after inlining the call, it is not removed as part of a dead branch.
+In the previous example, if the value of `mode` were known at compile time, we would only keep one of the first two branches.
+
+If we want to include part of the source code of the arguments in the error message, we can use the `codeOf` method.
+
+```scala
+inline def doSomething(inline mode: Boolean): Unit =
+ if mode then ...
+ else if !mode then ...
+ else error("Mode must be a known value but got: " + codeOf(mode))
+
+val bool: Boolean = ...
+doSomething(bool) // error: Mode must be a known value but got: bool
+```
+
+## Summoning
+
+There are two ways to summon values in inline methods, the first is with a `using` parameter and the second is with one of `summonInline`, `summonAll` or `summonFrom`.
+`using` will summon the value at call site before inlining as if the method was not `inline`.
+On the other hand, `summonInline` will summon after inlining if the call is not eliminated from a dead branch.
+`summonAll` provides a way to summon multiple values at the same time from a tuple type.
+`summonFrom` provides a way to try several implicit searches.
+
+## Values
+* `constValue`, `constValueOpt` and `constValueTuple`
+* `S`
+*Coming soon*
+
+## Testing
+* `testing.typeChecks` and `testing.typeCheckErrors`
+
+## Assertions
+* `byName`
+
+*Coming soon*
+
+## Inline Matching
+* `erasedValue`
+
+*Coming soon*
+
+## Ops (scala.compiletime.ops)
+*Coming soon*
+
+
+[compiletime-api]: https://scala-lang.org/api/3.x/scala/compiletime.html
diff --git a/_overviews/scala3-macros/tutorial/index.md b/_overviews/scala3-macros/tutorial/index.md
new file mode 100644
index 0000000000..e70c39ef45
--- /dev/null
+++ b/_overviews/scala3-macros/tutorial/index.md
@@ -0,0 +1,36 @@
+---
+type: chapter
+title: Tutorial
+description: A tutorial to cover all the features involved in writing macros in Scala 3.
+num: 1
+
+next-page: inline
+---
+
+This tutorial covers all the features involved in writing macros in Scala 3.
+
+The metaprogramming API of Scala 3 is designed in layers to gradually
+support different levels of use-cases. Each successive layer exposes additional
+abstractions and offers more fine-grained control.
+
+- As a starting point, the new [`inline` feature][inline] allows some abstractions (values and methods) to be marked as statically reducible.
+ It provides the entry point for macros and other metaprogramming utilities.
+
+- [Compile-time operations][compiletime] offer additional metaprogramming utilities that can be used within `inline` methods (for example to improve error reporting), without having to define a macro.
+
+- Starting from `inline` methods, [macros][macros] are programs that explicitly operate on programs.
+
+ - Macros can be defined in terms of a _high-level_ API of [quoted expressions][quotes], that admits simple construction and deconstruction of programs expressions.
+
+ - Macros can also be defined in terms of a more _low-level_ API of [Reflection][reflection], that allows detailed inspection of programs.
+
+> The tutorial uses the API of Scala 3.0.0-RC3. The API had many small changes in this revision.
+
+> 🚧 We are still in the process of writing the tutorial. You can [help us][contributing] 🚧
+
+[contributing]: {% link scala3/contribute-to-docs.md %}
+[compiletime]: {% link _overviews/scala3-macros/tutorial/compiletime.md %}
+[inline]: {% link _overviews/scala3-macros/tutorial/inline.md %}
+[macros]: {% link _overviews/scala3-macros/tutorial/macros.md %}
+[quotes]: {% link _overviews/scala3-macros/tutorial/quotes.md %}
+[reflection]: {% link _overviews/scala3-macros/tutorial/reflection.md %}
diff --git a/_overviews/scala3-macros/tutorial/inline.md b/_overviews/scala3-macros/tutorial/inline.md
new file mode 100644
index 0000000000..0fe620f162
--- /dev/null
+++ b/_overviews/scala3-macros/tutorial/inline.md
@@ -0,0 +1,512 @@
+---
+type: section
+title: Inline
+num: 2
+
+previous-page: index
+next-page: compiletime
+---
+
+Inlining is a common compile-time metaprogramming technique, typically used to achieve performance optimizations. As we will see, in Scala 3, the concept of inlining provides us with an entrypoint to programming with macros.
+
+1. It introduces inline as a [soft keyword][soft-modifier].
+2. It guarantees that inlining actually happens instead of being best-effort.
+3. It introduces operations that are guaranteed to evaluate at compile-time.
+
+## Inline Constants
+
+The simplest form of inlining is to inline constants in programs:
+
+
+```scala
+inline val pi = 3.141592653589793
+inline val pie = "🥧"
+```
+
+The usage of the keyword `inline` in the _inline value definitions_ above *guarantees* that all references to `pi` and `pie` are inlined:
+
+```scala
+val pi2 = pi + pi // val pi2 = 6.283185307179586
+val pie2 = pie + pie // val pie2 = "🥧🥧"
+```
+
+In the code above, the references `pi` and `pie` are inlined.
+Then an optimization called "constant folding" is applied by the compiler, which computes the resulting value `pi2` and `pie2` at _compile-time_.
+
+> ##### Inline (Scala 3) vs. final (Scala 2)
+> In Scala 2, we would have used the modifier `final` in the definition that is without a return type:
+>
+> ```scala
+> final val pi = 3.141592653589793
+> final val pie = "🥧"
+> ```
+>
+> The `final` modifier will ensure that `pi` and `pie` will take a _literal type_.
+> Then the constant propagation optimization in the compiler can perform inlining for such definitions.
+> However, this form of constant propagation is _best-effort_ and not guaranteed.
+> Scala 3.0 also supports `final val`-inlining as _best-effort_ inlining for migration purposes.
+
+Currently, only constant expressions may appear on the right-hand side of an inline value definition.
+Therefore, the following code is invalid, though the compiler knows that the right-hand side is a compile-time constant value:
+
+```Scala
+val pi = 3.141592653589793
+inline val pi2 = pi + pi // error
+```
+Note that by defining `inline val pi`, the addition can be computed at compile time.
+This resolves the above error and `pi2` will receive the literal type `6.283185307179586d`.
+
+## Inline Methods
+
+We can also use the modifier `inline` to define a method that should be inlined at the call-site:
+
+```scala
+inline def logged[T](level: Int, message: => String)(inline op: T): T =
+ println(s"[$level]Computing $message")
+ val res = op
+ println(s"[$level]Result of $message: $res")
+ res
+```
+
+When an inline method like `logged` is called, its body will be expanded at the call-site at compile time!
+That is, the call to `logged` will be replaced by the body of the method.
+The provided arguments are statically substituted for the parameters of `logged`, correspondingly.
+Therefore, the compiler inlines the following call
+
+```scala
+logged(logLevel, getMessage()) {
+ computeSomething()
+}
+```
+
+and rewrites it to:
+
+```Scala
+val level = logLevel
+def message = getMessage()
+
+println(s"[$level]Computing $message")
+val res = computeSomething()
+println(s"[$level]Result of $message: $res")
+res
+```
+
+### Semantics of Inline Methods
+Our example method `logged` uses three different kinds of parameters, illustrating
+that inlining handles those parameters differently:
+
+1. __By-value parameters__. The compiler generates a `val` binding for *by-value* parameters. This way, the argument expression is evaluated only once before the method body is reduced.
+
+ This can be seen in the parameter `level` from the example.
+ In some cases, when the arguments are pure constant values, the binding is omitted and the value is inlined directly.
+
+2. __By-Name parameters__. The compiler generates a `def` binding for *by-name* parameters. This way, the argument expression is evaluated every time it is used, but the code is shared.
+
+ This can be seen in the parameter `message` from the example.
+
+3. __Inline parameters__. Inline parameters do not create bindings and are simply inlined. This way, their code is duplicated everywhere they are used.
+
+ This can be seen in the parameter `op` from the example.
+
+The way the different parameters are translated guarantees that inlining a call **will not change** its semantics.
+This implies that the initial elaboration (overloading resolution, implicit search, ...), performed while typing the body of the inline method, will not change when inlined.
+
+For example, consider the following code:
+
+```scala
+class Logger:
+ def log(x: Any): Unit = println(x)
+
+class RefinedLogger extends Logger:
+ override def log(x: Any): Unit = println("Any: " + x)
+ def log(x: String): Unit = println("String: " + x)
+
+inline def logged[T](logger: Logger, x: T): Unit =
+ logger.log(x)
+```
+
+The separate type checking of `logger.log(x)` will resolve the call to the method `Logger.log` which takes an argument of the type `Any`.
+Now, given the following code:
+
+```scala
+logged(new RefinedLogger, "✔️")
+```
+
+It expands to:
+
+```
+val logger = new RefinedLogger
+val x = "✔️"
+logger.log(x)
+```
+Even though now we know that `x` is a `String`, the call `logger.log(x)` still resolves to the method `Logger.log` which takes an argument of the type `Any`. Note that because of late-binding, the actual method called at runtime will be the overridden method `RefinedLogger.log`.
+
+> ##### Inlining preserves semantics
+> Regardless of whether `logged` is defined as a `def` or `inline def`, it performs the same operations with only some differences in performance.
+
+### Inline Parameters
+
+One important application of inlining is to enable constant folding optimisation across method boundaries.
+Inline parameters do not create bindings and their code is duplicated everywhere they are used.
+
+```scala
+inline def perimeter(inline radius: Double): Double =
+ 2.0 * pi * radius
+```
+In the above example, we expect that if the `radius` is statically known then the whole computation can be performed at compile-time.
+The following call
+
+```scala
+perimeter(5.0)
+```
+
+is rewritten to:
+
+```Scala
+2.0 * pi * 5.0
+```
+
+Then `pi` is inlined (we assume the `inline val` definition from the start):
+
+```Scala
+2.0 * 3.141592653589793 * 5.0
+```
+
+Finally, it is constant folded to
+
+```
+31.4159265359
+```
+
+> ##### Inline parameters should be used only once
+> We need to be careful when using an inline parameter **more than once**.
+> Consider the following code:
+>
+> ```scala
+> inline def printPerimeter(inline radius: Double): Double =
+> println(s"Perimeter (r = $radius) = ${perimeter(radius)}")
+> ```
+> It works perfectly fine when a constant or reference to a val is passed to it.
+> ```scala
+> printPerimeter(5.0)
+> // inlined as
+> println(s"Perimeter (r = ${5.0}) = ${31.4159265359}")
+> ```
+>
+> But if a larger expression (possibly with side-effects) is passed, we might accidentally duplicate work.
+>
+> ```scala
+> printPerimeter(longComputation())
+> // inlined as
+> println(s"Perimeter (r = ${longComputation()}) = ${6.283185307179586 * longComputation()}")
+> ```
+
+A useful application of inline parameters is to avoid the creation of _closures_, incurred by the use of by-name parameters.
+
+```scala
+inline def assert1(cond: Boolean, msg: => String) =
+ if !cond then
+ throw new Exception(msg)
+
+assert1(x, "error1")
+// is inlined as
+val cond = x
+def msg = "error1"
+if !cond then
+ throw new Exception(msg)
+```
+In the above example, we can see that the use of a by-name parameter leads to a local definition `msg`, which allocates a closure before the condition is checked.
+
+If we use an inline parameter instead, we can guarantee that the condition is checked before any of the code that handles the exception is reached.
+In the case of an assertion, this code should never be reached.
+```scala
+inline def assert2(cond: Boolean, inline msg: String) =
+ if !cond then
+ throw new Exception(msg)
+
+assert2(x, "error2")
+// is inlined as
+val cond = x
+if !cond then
+ throw new Exception("error2")
+```
+
+### Inline Conditionals
+If the condition of an `if` is a known constant (`true` or `false`), possibly after inlining and constant folding, then the conditional is partially evaluated and only one branch will be kept.
+
+For example, the following power method contains some `if` that will potentially unroll the recursion and remove all method calls.
+
+```scala
+inline def power(x: Double, inline n: Int): Double =
+ if (n == 0) 1.0
+ else if (n % 2 == 1) x * power(x, n - 1)
+ else power(x * x, n / 2)
+```
+Calling `power` with statically known constants results in the following code:
+ ```scala
+ power(2, 2)
+ // first inlines as
+ val x = 2
+ if (2 == 0) 1.0 // dead branch
+ else if (2 % 2 == 1) x * power(x, 2 - 1) // dead branch
+ else power(x * x, 2 / 2)
+ // partially evaluated to
+ val x = 2
+ power(x * x, 1)
+ ```
+
+{::options parse_block_html="true" /}
+
+ See rest of inlining steps
+
+
+```scala
+// then inlined as
+val x = 2
+val x2 = x * x
+if (1 == 0) 1.0 // dead branch
+else if (1 % 2 == 1) x2 * power(x2, 1 - 1)
+else power(x2 * x2, 1 / 2) // dead branch
+// partially evaluated to
+val x = 2
+val x2 = x * x
+x2 * power(x2, 0)
+// then inlined as
+val x = 2
+val x2 = x * x
+x2 * {
+ if (0 == 0) 1.0
+ else if (0 % 2 == 1) x2 * power(x2, 0 - 1) // dead branch
+ else power(x2 * x2, 0 / 2) // dead branch
+}
+// partially evaluated to
+val x = 2
+val x2 = x * x
+x2 * 1.0
+```
+
+
+{::options parse_block_html="false" /}
+
+In contrast, let us imagine we do not know the value of `n`:
+
+```scala
+power(2, unknownNumber)
+```
+Driven by the inline annotation on the parameter, the compiler will try to unroll the recursion.
+But without any success, since the parameter is not statically known.
+
+{::options parse_block_html="true" /}
+
+ See inlining steps
+
+
+```scala
+// first inlines as
+val x = 2
+if (unknownNumber == 0) 1.0
+else if (unknownNumber % 2 == 1) x * power(x, unknownNumber - 1)
+else power(x * x, unknownNumber / 2)
+// then inlined as
+val x = 2
+if (unknownNumber == 0) 1.0
+else if (unknownNumber % 2 == 1) x * {
+ if (unknownNumber - 1 == 0) 1.0
+ else if ((unknownNumber - 1) % 2 == 1) x2 * power(x2, unknownNumber - 1 - 1)
+ else power(x2 * x2, (unknownNumber - 1) / 2)
+}
+else {
+ val x2 = x * x
+ if (unknownNumber / 2 == 0) 1.0
+ else if ((unknownNumber / 2) % 2 == 1) x2 * power(x2, unknownNumber / 2 - 1)
+ else power(x2 * x2, unknownNumber / 2 / 2)
+}
+// Oops this will never finish compiling
+...
+```
+
+{::options parse_block_html="false" /}
+
+To guarantee that the branching can indeed be performed at compile-time, we can use the `inline if` variant of `if`.
+Annotating a conditional with `inline` will guarantee that the conditional can be reduced at compile-time and emits an error if the condition is not a statically known constant.
+
+```scala
+inline def power(x: Double, inline n: Int): Double =
+ inline if (n == 0) 1.0
+ else inline if (n % 2 == 1) x * power(x, n - 1)
+ else power(x * x, n / 2)
+```
+
+```scala
+power(2, 2) // Ok
+power(2, unknownNumber) // error
+```
+
+We will come back to this example later and see how we can get more control on how code is generated.
+
+
+### Inline Method Overriding
+
+To ensure the correct behavior of combining the static feature of `inline def` with the dynamic feature of interfaces and overriding, some restrictions have to be imposed.
+
+#### Effectively final
+Firstly, all inline methods are _effectively final_.
+This ensures that the overload resolution at compile-time behaves the same as the one at runtime.
+
+#### Signature preservation
+Secondly, overrides must have the _exact same signature_ as the overridden method including the inline parameters.
+This ensures that the call semantics are the same for both methods.
+
+#### Retained inline methods
+It is possible to implement or override a normal method with an inline method.
+
+Consider the following example:
+
+```scala
+trait Logger:
+ def log(x: Any): Unit
+
+class PrintLogger extends Logger:
+ inline def log(x: Any): Unit = println(x)
+```
+However, calling the `log` method directly on `PrintLogger` will inline the code, while calling it on `Logger` will not.
+To also admit the latter, the code of `log` must exist at runtime.
+We call this a _retained inline_ method.
+
+For any non-retained inline `def` or `val` the code can always be fully inlined at all call sites.
+Hence, those methods will not be needed at runtime and can be erased from the bytecode.
+However, retained inline methods must be compatible with the case that they are not inlined.
+In particular, retained inline methods cannot take any inline parameters.
+Furthermore, an `inline if` (as in the `power` example) will not work, since the `if` cannot be constant folded in the retained case.
+Other examples involve metaprogramming constructs that only have meaning when inlined.
+
+#### Abstract inline methods
+It is also possible to create _abstract inline definitions_.
+
+```scala
+trait InlineLogger:
+ inline def log(inline x: Any): Unit
+
+class PrintLogger extends InlineLogger:
+ inline def log(inline x: Any): Unit = println(x)
+```
+
+This forces the implementation of `log` to be an inline method and also allows `inline` parameters.
+Counterintuitively, the `log` on the interface `InlineLogger` cannot be directly called. The method implementation is not statically known and we thus do not know what to inline.
+Calling an abstract inline method thus results in an error.
+The usefulness of abstract inline methods becomes apparent when used in another inline method:
+
+```scala
+inline def logged(logger: InlineLogger, x: Any) =
+ logger.log(x)
+```
+Let us assume a call to `logged` on a concrete instance of `PrintLogger`:
+```scala
+logged(new PrintLogger, "🥧")
+// inlined as
+val logger = new PrintLogger
+val x = "🥧"
+logger.log(x)
+```
+After inlining, the call to `log` is de-virtualized and known to be on `PrintLogger`.
+Therefore also the code of `log` can be inlined.
+
+#### Summary of inline methods
+* All `inline` methods are final.
+* Abstract `inline` methods can only be implemented by inline methods.
+* If an inline method overrides/implements a normal method then it must be retained and retained methods cannot have inline parameters.
+* Abstract `inline` methods cannot be called directly (except in inline code).
+
+## Transparent Inline Methods
+Transparent inlines are a simple, yet powerful, extension to `inline` methods and unlock many metaprogramming usecases.
+Calls to transparents allow for an inline piece of code to refine the return type based on the precise type of the inlined expression.
+In Scala 2 parlance, transparents capture the essence of _whitebox macros_.
+
+
+```scala
+transparent inline def default(inline name: String): Any =
+ inline if name == "Int" then 0
+ else inline if name == "String" then ""
+ else ...
+```
+
+```scala
+val n0: Int = default("Int")
+val s0: String = default("String")
+```
+
+Note that even if the return type of `default` is `Any`, the first call is typed as an `Int` and the second as a `String`.
+The return type represents the upper bound of the type within the inlined term.
+We could also have been more precise and have written instead
+```scala
+transparent inline def default(inline name: String): 0 | "" = ...
+```
+While in this example it seems the return type is not necessary, it is important when the inline method is recursive.
+There it should be precise enough for the recursion to type but will get more precise after inlining.
+
+> ##### Transparents affect binary compatibility
+> It is important to note that changing the body of a `transparent inline def` will change how the call site is typed.
+> This implies that the body plays a part in the binary and source compatibility of this interface.
+
+
+## Compiletime Operations
+
+We also provide some operations that evaluate at compiletime.
+
+### Inline Matches
+Like inline `if`, inline matches guarantee that the pattern matching can be statically reduced at compile time and only one branch is kept.
+
+In the following example, the scrutinee, `x`, is an inline parameter that we can pattern match on at compile time.
+
+```scala
+inline def half(x: Any): Any =
+ inline x match
+ case x: Int => x / 2
+ case x: String => x.substring(0, x.length / 2)
+
+half(6)
+// expands to:
+// val x = 6
+// x / 2
+
+half("hello world")
+// expands to:
+// val x = "hello world"
+// x.substring(0, x.length / 2)
+```
+This illustrates that inline matches provide a way to match on the static type of some expression.
+As we match on the _static_ type of an expression, the following code would fail to compile.
+
+```scala
+val n: Any = 3
+half(n) // error: n is not statically known to be an Int or a Double
+```
+Notably, The value `n` is not marked as `inline` and in consequence at compile time
+there is not enough information about the scrutinee to decide which branch to take.
+
+### scala.compiletime
+The package `scala.compiletime` provides useful metaprogramming abstractions that can be used within `inline` methods to provide custom semantics.
+
+## Macros
+Inlining is also the core mechanism used to write macros.
+Macros provide a way to control the code generation and analysis after the call is inlined.
+
+
+```scala
+inline def power(x: Double, inline n: Int) =
+ ${ powerCode('x, 'n) }
+
+def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = ...
+```
+
+
+[soft-modifier]: {{ site.scala3ref }}/soft-modifier.html
+
+[contributing]: {% link scala3/contribute-to-docs.md %}
+[best-practices]: {% link _overviews/scala3-macros/best-practices.md %}
+[compiletime]: {% link _overviews/scala3-macros/tutorial/compiletime.md %}
+[faq]: {% link _overviews/scala3-macros/faq.md %}
+[inline]: {% link _overviews/scala3-macros/tutorial/inline.md %}
+[macros]: {% link _overviews/scala3-macros/tutorial/macros.md %}
+[quotes]: {% link _overviews/scala3-macros/tutorial/quotes.md %}
+[tasty]: {% link _overviews/scala3-macros/tutorial/reflection.md %}
diff --git a/_overviews/scala3-macros/tutorial/macros.md b/_overviews/scala3-macros/tutorial/macros.md
new file mode 100644
index 0000000000..1c90c15928
--- /dev/null
+++ b/_overviews/scala3-macros/tutorial/macros.md
@@ -0,0 +1,304 @@
+---
+type: section
+title: Scala 3 Macros
+num: 4
+
+previous-page: compiletime
+next-page: quotes
+---
+
+[Inline methods][inline] provide us with a elegant technique for metaprogramming by performing some operations at compile time.
+However, sometimes inlining is not enough and we need more powerful ways to analyze and synthesize programs at compile time.
+Macros enable us to do exactly this: treat **programs as data** and manipulate them.
+
+
+## Macros Treat Programs as Values
+With a macro, we can treat programs as values, which allows us to analyze and generate them at compile time.
+
+A Scala expression with type `T` is represented by an instance of the type `scala.quoted.Expr[T]`.
+
+We will dig into the details of the type `Expr[T]`, as well as the different ways of analyzing and constructing instances, when talking about [Quoted Code][quotes] and [Reflection][tasty].
+For now, it suffices to know that macros are metaprograms that manipulate expressions of type `Expr[T]`.
+
+The following macro implementation prints the expression of the provided argument at compile-time in the standard output of the compiler process:
+```scala
+import scala.quoted.* // imports Quotes, Expr
+
+def inspectCode(x: Expr[Any])(using Quotes): Expr[Any] =
+ println(x.show)
+ x
+```
+After printing the argument expression, we return the original argument as a Scala expression of type `Expr[Any]`.
+
+As foreshadowed in the section on [Inline][inline], inline methods provide the entry point for macro definitions:
+
+```scala
+inline def inspect(inline x: Any): Any = ${ inspectCode('x) }
+```
+All macros are defined with an `inline def`.
+The implementation of this entry point always has the same shape:
+
+- they only contain a single [splice][quotes] `${ ... }`
+- the splice contains a single call to the method that implements the macro (for example `inspectCode`).
+- the call to the macro implementation receives the _quoted_ parameters (that is `'x` instead of `x`) and a contextual `Quotes`.
+
+We will dig deeper into these concepts later in this and the following sections.
+
+Calling our `inspect` macro `inspect(sys error "abort")` prints a string representation of the argument expression at compile time:
+```
+scala.sys.error("abort")
+```
+
+
+### Macros and Type Parameters
+
+If the macro has type parameters, the implementation will also need to know about them.
+Just like `scala.quoted.Expr[T]` represents a Scala expression of type `T`, we use `scala.quoted.Type[T]` to represent the Scala type `T`.
+
+```scala
+inline def logged[T](inline x: T): T = ${ loggedCode('x) }
+
+def loggedCode[T](x: Expr[T])(using Type[T], Quotes): Expr[T] = ...
+```
+Both the instance of `Type[T]` and the contextual `Quotes` are automatically provided by the splice in the corresponding inline method (that is, `logged`) and can be used by the macro implementation.
+
+
+### Defining and Using Macros
+
+A key difference between inlining and macros is the way they are evaluated.
+Inlining works by rewriting code and performing optimisations based on rules the compiler knows.
+On the other hand, a macro executes user-written code that generates the code that the macro expands to.
+
+Technically, compiling the inlined code `${ inspectCode('x) }` calls the method `inspectCode` _at compile time_ (through Java reflection), and the method `inspectCode` then executes as normal code.
+
+To be able to execute `inspectCode`, we need to compile its source code first.
+As a technical consequence, we cannot define and use a macro in the **same class/file**.
+However, it is possible to have the macro definition and its call in the **same project** as long as the implementation of the macro can be compiled first.
+
+> ##### Suspended Files
+> To allow defining and using macros in the same project, only those calls to macros that have already been compiled are expanded.
+> For all other (unknown) macro calls, the compilation of the file is _suspended_.
+> Suspended files are only compiled after all non suspended files have been successfully compiled.
+> In some cases, you will have _cyclic dependencies_ that will block the completion of the compilation.
+> To get more information on which files are suspended you can use the `-Xprint-suspension` compiler flag.
+
+
+### Example: Statically Evaluating `power` with Macros
+
+Let us recall our definition of `power` from the section on [Inline][inline] that specialized the computation of `xⁿ` for statically known values of `n`.
+```scala
+inline def power(x: Double, inline n: Int): Double =
+ inline if n == 0 then 1.0
+ else inline if n % 2 == 1 then x * power(x, n - 1)
+ else power(x * x, n / 2)
+```
+In the remainder of this section, we will define a macro that computes `xⁿ` for a statically known values `x` and `n`.
+While this is also possible purely with `inline`, implementing it with macros will illustrate a few things.
+
+```scala
+inline def power(inline x: Double, inline n: Int) =
+ ${ powerCode('x, 'n) }
+
+def powerCode(
+ x: Expr[Double],
+ n: Expr[Int]
+)(using Quotes): Expr[Double] = ...
+```
+
+## Simple Expressions
+
+We could implement `powerCode` as follows:
+```scala
+def pow(x: Double, n: Int): Double =
+ if n == 0 then 1 else x * pow(x, n - 1)
+
+def powerCode(
+ x: Expr[Double],
+ n: Expr[Int]
+)(using Quotes): Expr[Double] =
+ val value: Double = pow(x.valueOrAbort, n.valueOrAbort)
+ Expr(value)
+```
+Here, the `pow` operation is a simple Scala function that computes the value of `xⁿ`.
+The interesting part is how we create and look into the `Expr`s.
+
+
+### Creating Expression From Values
+
+Let's first look at `Expr.apply(value)`. Given a value of type `T`, this call will return an expression containing the code representing the given value (that is, of type `Expr[T]`).
+The argument value to `Expr` is computed at compile-time, at runtime we only need to instantiate this value.
+
+Creating expressions from values works for all _primitive types_, _tuples_ of any arity, `Class`, `Array`, `Seq`, `Set`, `List`, `Map`, `Option`, `Either`, `BigInt`, `BigDecimal`, `StringContext`.
+Other types can also work if a `ToExpr` is implemented for it, we will [see this later][quotes].
+
+
+### Extracting Values from Expressions
+
+The second method we use in the implementation of `powerCode` is `Expr[T].valueOrAbort`, which has an effect opposite to `Expr.apply`.
+It attempts to extract a value of type `T` from an expression of type `Expr[T]`.
+This can only succeed, if the expression directly contains the code of a value, otherwise, it will throw an exception that stops the macro expansion and reports that the expression did not correspond to a value.
+
+Instead of `valueOrAbort`, we could also use the `value` operation, which will return an `Option`.
+This way we can report the error with a custom error message.
+
+#### Reporting Custom Error Messages
+
+The contextual `Quotes` parameter provides a `report` object that we can use to report a custom error message.
+Within a macro implementation method, you can access the contextual `Quotes` parameter with the `quotes` method
+(imported with `import scala.quoted.*`), then import the `report` object by `import quotes.reflect.report`.
+
+#### Providing the Custom Error
+
+We will provide the custom error message by calling `errorAndAbort` on the `report` object as follows:
+```scala
+def powerCode(
+ x: Expr[Double],
+ n: Expr[Int]
+)(using Quotes): Expr[Double] =
+ import quotes.reflect.report
+ (x.value, n.value) match
+ case (Some(base), Some(exponent)) =>
+ val value: Double = pow(base, exponent)
+ Expr(value)
+ case (Some(_), _) =>
+ report.errorAndAbort("Expected a known value for the exponent, but was " + n.show, n)
+ case _ =>
+ report.errorAndAbort("Expected a known value for the base, but was " + x.show, x)
+```
+
+Alternatively, we can also use the `Expr.unapply` extractor
+
+```scala
+ ...
+ (x, n) match
+ case (Expr(base), Expr(exponent)) =>
+ val value: Double = pow(base, exponent)
+ Expr(value)
+ case (Expr(_), _) => ...
+ case _ => ...
+```
+The operations `value`, `valueOrAbort`, and `Expr.unapply` will work for all _primitive types_, _tuples_ of any arity, `Option`, `Seq`, `Set`, `Map`, `Either` and `StringContext`.
+Other types can also work if an `FromExpr` is implemented for it, we will [see this later][quotes].
+
+
+### Showing Expressions
+
+In the implementation of `inspectCode`, we have already seen how to convert expressions to the string representation of their _source code_ using the `.show` method.
+This can be useful to perform debugging on macro implementations:
+
+
+```scala
+def debugPowerCode(
+ x: Expr[Double],
+ n: Expr[Int]
+)(using Quotes): Expr[Double] =
+ println(
+ s"powerCode \n" +
+ s" x := ${x.show}\n" +
+ s" n := ${n.show}")
+ val code = powerCode(x, n)
+ println(s" code := ${code.show}")
+ code
+```
+
+
+### Working with Varargs
+
+Varargs in Scala are represented with `Seq`, hence when we write a macro with a _vararg_, it will be passed as an `Expr[Seq[T]]`.
+It is possible to recover each individual argument (of type `Expr[T]`) using the `scala.quoted.Varargs` extractor.
+
+```scala
+import scala.quoted.* // imports `Varargs`, `Quotes`, etc.
+
+inline def sumNow(inline nums: Int*): Int =
+ ${ sumCode('nums) }
+
+def sumCode(nums: Expr[Seq[Int]])(using Quotes): Expr[Int] =
+ import quotes.reflect.report
+ nums match
+ case Varargs(numberExprs) => // numberExprs: Seq[Expr[Int]]
+ val numbers: Seq[Int] = numberExprs.map(_.valueOrAbort)
+ Expr(numbers.sum)
+ case _ => report.errorAndAbort(
+ "Expected explicit varargs sequence. " +
+ "Notation `args*` is not supported.", nums)
+```
+
+The extractor will match a call to `sumNow(1, 2, 3)` and extract a `Seq[Expr[Int]]` containing the code of each parameter.
+But, if we try to match the argument of the call `sumNow(nums*)`, the extractor will not match.
+
+`Varargs` can also be used as a constructor. `Varargs(Expr(1), Expr(2), Expr(3))` will return an `Expr[Seq[Int]]`.
+We will see how this can be useful later.
+
+
+## Complex Expressions
+So far, we have only seen how to construct and destruct expressions that correspond to simple values.
+In order to work with more complex expressions, Scala 3 offers different metaprogramming facilities ranging from
+
+- additional constructors like `Expr.apply`,
+- over [quoted pattern matching][quotes],
+- to a full [reflection API][tasty];
+
+each increasing in complexity and potentially losing safety guarantees.
+It is generally recommended to prefer simple APIs over more advanced ones.
+In the remainder of this section, we introduce some more additional constructors and destructors,
+while subsequent chapters introduce the more advanced APIs.
+
+### Collections
+
+We have seen how to convert a `List[Int]` into an `Expr[List[Int]]` using `Expr.apply`.
+How about converting a `List[Expr[Int]]` into an `Expr[List[Int]]`?
+We mentioned that `Varargs.apply` can do this for sequences; likewise, for other collection types, corresponding methods are available:
+
+* `Expr.ofList`: Transform a `List[Expr[T]]` into `Expr[List[T]]`
+* `Expr.ofSeq`: Transform a `Seq[Expr[T]]` into `Expr[Seq[T]]` (just like `Varargs`)
+* `Expr.ofTupleFromSeq`: Transform a `Seq[Expr[T]]` into `Expr[Tuple]`
+* `Expr.ofTuple`: Transform a `(Expr[T1], ..., Expr[Tn])` into `Expr[(T1, ..., Tn)]`
+
+### Simple Blocks
+
+The constructor `Expr.block` provides a simple way to create a block of code `{ stat1; ...; statn; expr }`.
+Its first arguments is a list with all the statements and the second argument is the expression at the end of the block.
+
+```scala
+inline def test(inline ignore: Boolean, computation: => Unit): Boolean =
+ ${ testCode('ignore, 'computation) }
+
+def testCode(ignore: Expr[Boolean], computation: Expr[Unit])(using Quotes) =
+ if ignore.valueOrAbort then Expr(false)
+ else Expr.block(List(computation), Expr(true))
+```
+
+The `Expr.block` constructor is useful when we want to generate code contanining several side effects.
+The macro call `test(false, EXPRESSION)` will generate `{ EXPRESSION; true}`, while the call `test(true, EXPRESSION)` will result in `false`.
+
+### Simple Matching
+
+The method `Expr.matches` can be used to check if one expression is equal to another.
+With this method we could implement an `value` operation for `Expr[Boolean]` as follows.
+
+```scala
+def value(boolExpr: Expr[Boolean]): Option[Boolean] =
+ if boolExpr.matches(Expr(true)) then Some(true)
+ else if boolExpr.matches(Expr(false)) then Some(false)
+ else None
+```
+
+It may also be used to compare two user written expression.
+Note, that `matches` only performs a limited amount of normalization and while for instance the Scala expression `2` matches the expression `{ 2 }`, this is _not the case_ for the expression `{ val x: Int = 2; x }`.
+
+### Arbitrary Expressions
+
+Last but not least, it is possible to create an `Expr[T]` from arbitary Scala code by enclosing it in [quotes][quotes].
+For example, `'{ ${expr}; true }` will generate an `Expr[Boolean]` equivalent to `Expr.block(List(expr), Expr(true))`.
+The subsequent section on [Quoted Code][quotes] presents quotes in more detail.
+
+[contributing]: {% link scala3/contribute-to-docs.md %}
+[best-practices]: {% link _overviews/scala3-macros/best-practices.md %}
+[compiletime]: {% link _overviews/scala3-macros/tutorial/compiletime.md %}
+[migration]: https://scalacenter.github.io/scala-3-migration-guide/docs/macros/macro-libraries.html
+[faq]: {% link _overviews/scala3-macros/faq.md %}
+[inline]: {% link _overviews/scala3-macros/tutorial/inline.md %}
+[macros]: {% link _overviews/scala3-macros/tutorial/macros.md %}
+[quotes]: {% link _overviews/scala3-macros/tutorial/quotes.md %}
+[tasty]: {% link _overviews/scala3-macros/tutorial/reflection.md %}
diff --git a/_overviews/scala3-macros/tutorial/quotes.md b/_overviews/scala3-macros/tutorial/quotes.md
new file mode 100644
index 0000000000..b94d4bb6ab
--- /dev/null
+++ b/_overviews/scala3-macros/tutorial/quotes.md
@@ -0,0 +1,605 @@
+---
+type: section
+title: Quoted Code
+num: 5
+
+previous-page: macros
+next-page: reflection
+---
+
+## Code blocks
+A quoted code block `'{ ... }` is syntactically similar to a string quote `" ... "` with the difference that the first contains typed code.
+To insert code into other code, we can use the syntax `$expr` or `${ expr }`, where `expr` is of type `Expr[T]`.
+Intuitively, the code directly within the quote (`'{ ... }`) is not executed now, while the code within the splice (`${ ... }`) is evaluated and the results spliced into the surrounding expression.
+
+```scala
+val msg = Expr("Hello")
+val printHello = '{ print($msg) }
+println(printHello.show) // print("Hello")
+```
+
+In general, the quote delays the execution while the splice makes it happen before the surrounding code.
+This generalisation allows us to also give meaning to a `${ ... }` that is not within a quote. This evaluates the code within the splice at compile-time and places the result in the generated code.
+Due to some technical considerations, only top-level splices are allowed directly within `inline` definitions that we call a [macro][macros].
+
+It is possible to write a quote within a quote, but this pattern is not common when writing macros.
+
+## Level consistency
+One cannot simply write any arbitrary code within quotes and within splices, as one part of the program will live at compile-time and the other will live at runtime.
+Consider the following ill-constructed code:
+
+```scala
+def myBadCounter1(using Quotes): Expr[Int] = {
+ var x = 0
+ '{ x += 1; x }
+}
+```
+The problem with this code is that `x` exists during compilation, but then we try to use it after the compiler has finished (maybe even in another machine).
+Clearly, it would be impossible to access its value and update it.
+
+Now consider the dual version, where we define the variable at runtime and try to access it at compile-time:
+```scala
+def myBadCounter2(using Quotes): Expr[Int] = '{
+ var x = 0
+ ${ x += 1; 'x }
+}
+```
+Clearly, this should not work as the variable does not exist yet.
+
+To make sure you cannot write programs that contain these kinds of problems, we restrict the kinds of references allowed in quote environments.
+
+We introduce _levels_ as a count of the number of quotes minus the number of splices surrounding an expression or definition.
+
+```scala
+// level 0
+'{ // level 1
+ var x = 0
+ ${ // level 0
+ x += 1
+ 'x // level 1
+ }
+}
+```
+
+The system will allow references to global definitions such as `println` at any level, but will restrict references to local definitions.
+A local definition can only be accessed if it is defined at the same level as its reference.
+This will catch the errors in `myBadCounter1` and `myBadCounter2`.
+
+Even though we cannot refer to a variable inside of a quote, we can still pass its current value through a quote by lifting the value to an expression using `Expr.apply`.
+
+
+## Generics
+
+When using type parameters or other kinds of abstract types with quoted code, we will need to keep track of some of these types explicitly.
+Scala uses erased-types semantics for its generics.
+This implies that types are removed from the program when compiling and the runtime does not have to track all types at runtime.
+
+Consider the following code:
+```scala
+def evalAndUse[T](x: Expr[T])(using Quotes) = '{
+ val x2: T = $x // error
+ ... // use x2
+}
+```
+
+Here, we will get an error telling us that we are missing a contextual `Type[T]`.
+Therefore, we can easily fix it by writing:
+```scala
+def evalAndUse[T](x: Expr[T])(using Type[T])(using Quotes) = '{
+ val x2: T = $x
+ ... // use x2
+}
+```
+This code will be equivalent to this more verbose version:
+```scala
+def evalAndUse[T](x: Expr[T])(using t: Type[T])(using Quotes) = '{
+ val x2: t.Underlying = $x
+ ... // use x2
+}
+```
+Note that `Type` has a type member called `Underlying` that refers to the type held within the `Type`; in this case, `t.Underlying` is `T`.
+Even if we use the `Type` implicitly, is generally better to keep it contextual as some changes inside the quote may require it.
+The less verbose version is usually the best way to write the types as it is much simpler to read.
+In some cases, we will not statically know the type within the `Type` and will need to use the `t.Underlying` to refer to it.
+
+When do we need this extra `Type` parameter?
+* When a type is abstract and it is used at a level that is higher than the current level.
+
+When you add a `Type` contextual parameter to a method, you will either get it from another context parameter or implicitly with a call to `Type.of`:
+```scala
+evalAndUse(Expr(3))
+// is equivalent to
+evalAndUse[Int](Expr(3))(using Type.of[Int])
+```
+As you may have guessed, not every type can be used as a parameter to `Type.of[..]` out of the box.
+For example, we cannot recover abstract types that have already been erased:
+```scala
+def evalAndUse[T](x: Expr[T])(using Quotes) =
+ given Type[T] = Type.of[T] // error
+ '{
+ val x2: T = $x
+ ... // use x2
+ }
+```
+
+But we can write more complex types that depend on these abstract types.
+For example, if we look for or explicitly construct a `Type[List[T]]`, then the system will require a `Type[T]` in the current context to compile.
+
+Good code should only add `Type`s to the context parameters and never use them explicitly.
+However, explicit use is useful while debugging, though it comes at the cost of conciseness and clarity.
+
+
+## ToExpr
+The `Expr.apply` method uses instances of `ToExpr` to generate an expression that will create a copy of the value.
+```scala
+object Expr:
+ def apply[T](x: T)(using Quotes, ToExpr[T]): Expr[T] =
+ summon[ToExpr[T]].apply(x)
+```
+
+`ToExpr` is defined as follows:
+```scala
+trait ToExpr[T]:
+ def apply(x: T)(using Quotes): Expr[T]
+```
+
+The `ToExpr.apply` method will take a value `T` and generate code that will construct a copy of this value at runtime.
+
+We can define our own `ToExpr`s like:
+```scala
+given ToExpr[Boolean] with {
+ def apply(x: Boolean)(using Quotes) =
+ if x then '{true}
+ else '{false}
+}
+
+given ToExpr[StringContext] with {
+ def apply(stringContext: StringContext)(using Quotes) =
+ val parts = Varargs(stringContext.parts.map(Expr(_)))
+ '{ StringContext($parts*) }
+}
+```
+The `Varargs` constructor just creates an `Expr[Seq[T]]` which we can efficiently splice as a varargs.
+In general, any sequence can be spliced with `$mySeq*` to splice it as a varargs.
+
+## Quoted patterns
+Quotes can also be used to check if an expression is equivalent to another or to deconstruct an expression into its parts.
+
+
+### Matching exact expression
+
+The simplest thing we can do is to check if an expression matches another known expression.
+Below, we show how we can match some expressions using `case '{...} =>`.
+
+```scala
+def valueOfBoolean(x: Expr[Boolean])(using Quotes): Option[Boolean] =
+ x match
+ case '{ true } => Some(true)
+ case '{ false } => Some(false)
+ case _ => None
+
+def valueOfBooleanOption(x: Expr[Option[Boolean]])(using Quotes): Option[Option[Boolean]] =
+ x match
+ case '{ Some(true) } => Some(Some(true))
+ case '{ Some(false) } => Some(Some(false))
+ case '{ None } => Some(None)
+ case _ => None
+```
+
+### Matching partial expression
+
+To make things more compact, we can also match a part of the expression using a splice (`$`) to match arbitrary code and extract it.
+
+```scala
+def valueOfBooleanOption(x: Expr[Option[Boolean]])(using Quotes): Option[Option[Boolean]] =
+ x match
+ case '{ Some($boolExpr) } => Some(valueOfBoolean(boolExpr))
+ case '{ None } => Some(None)
+ case _ => None
+```
+
+### Matching types of expression
+
+We can also match against code of an arbitrary type `T`.
+Below, we match against `$x` of type `T` and we get out an `x` of type `Expr[T]`.
+
+```scala
+def exprOfOption[T: Type](x: Expr[Option[T]])(using Quotes): Option[Expr[T]] =
+ x match
+ case '{ Some($x) } => Some(x) // x: Expr[T]
+ case '{ None } => Some(None)
+ case _ => None
+```
+
+We can also check for the type of an expression:
+
+```scala
+def valueOf(x: Expr[Any])(using Quotes): Option[Any] =
+ x match
+ case '{ $x: Boolean } => valueOfBoolean(x) // x: Expr[Boolean]
+ case '{ $x: Option[Boolean] } => valueOfBooleanOption(x) // x: Expr[Option[Boolean]]
+ case _ => None
+```
+Or similarly for a partial expression:
+
+```scala
+case '{ Some($x: Boolean) } => // x: Expr[Boolean]
+```
+
+### Matching receiver of methods
+
+When we want to match the receiver of a method, we need to explicitly state its type:
+
+```scala
+case '{ ($ls: List[Int]).sum } =>
+```
+
+If we would have written `$ls.sum`, we would not have been able to know the type of `ls` and which `sum` method we are calling.
+
+Another common case where we need type annotations is for infix operations:
+```scala
+case '{ ($x: Int) + ($y: Int) } =>
+case '{ ($x: Double) + ($y: Double) } =>
+case ...
+```
+
+### Matching function expressions
+
+Let's start with the most straightforward example, matching an identity function expression:
+
+```scala
+def matchIdentityFunction[A: Type](func: Expr[A => A])(using Quotes): Unit =
+ func match
+ case '{ (arg: A) => arg } =>
+```
+The above matches function expressions that just return their arguments, like:
+
+```scala
+(value: Int) => value
+```
+
+We can also match more complex expressions, like method call chains:
+
+```scala
+def matchMethodCallChain(func: Expr[String => String])(using Quotes) =
+ func match
+ case '{ (arg: String) => arg.toLowerCase.strip.trim } =>
+```
+
+But what about the cases where we want more flexibility (eg. we know the subset of methods that will be called but not neccessarily their order)?
+
+#### Iterative deconstruction of a function expression
+
+Let's imagine we need a macro that collects names of methods used in an expression of type `FieldName => FieldName`, for a definition of `FieldName`:
+
+```scala
+trait FieldName:
+ def uppercase: FieldName
+ def lowercase: FieldName
+```
+
+The implementation itself would look like this:
+
+```scala
+def collectUsedMethods(func: Expr[FieldName => FieldName])(using Quotes): List[String] =
+ def recurse(current: Expr[FieldName => FieldName], acc: List[String])(using Quotes): List[String] =
+ current match
+ // $body is the next tree with the '.lowercase' call stripped away
+ case '{ (arg: FieldName) => ($body(arg): FieldName).lowercase } =>
+ recurse(body, "lowercase" :: acc) // body: Expr[FieldName => FieldName]
+
+ // $body is the next tree with the '.uppercase' call stripped away
+ case '{ (arg: FieldName) => ($body(arg): FieldName).uppercase } =>
+ recurse(body, "uppercase" :: acc) // body: Expr[FieldName => FieldName]
+
+ // this matches an identity function, i.e. the end of our loop
+ case '{ (arg: FieldName) => arg } => acc
+ end recurse
+
+ recurse(func, Nil)
+```
+
+For more details on how patterns like `$body(arg)` work please refer to a docs section on [the HOAS pattern](https://dotty.epfl.ch/docs/reference/metaprogramming/macros.html#hoas-patterns-1).
+
+If we were to use this on an expression like this one:
+```scala
+(name: FieldName) => name.lowercase.uppercase.lowercase
+```
+the result would evaluate to `List("lowercase", "uppercase", "lowercase")`.
+
+### Matching types
+
+So far, we assumed that the types within quote patterns would be statically known.
+Quote patterns also allow for type parameters, which we will see in this section.
+
+#### Type parameters in patterns
+
+Consider the function `exprOfOption` that we have already seen:
+```scala
+def exprOfOption[T: Type](x: Expr[Option[T]])(using Quotes): Option[Expr[T]] =
+ x match
+ case '{ Some($x: T) } => Some(x) // x: Expr[T]
+ // ^^^ type ascription with type T
+ ...
+```
+
+Note that this time we have added the `T` explicitly in the pattern, even though it could be inferred.
+By referring to the type parameter `T` in the pattern, we are required to have a given `Type[T]` in scope.
+This implies that `$x: T` will only match if `x` is of type `Expr[T]`.
+In this particular case, this condition will always be true.
+
+Now consider the following variant where `x` is an optional value with a (statically) unknown element type:
+
+```scala
+def exprOfOptionOf[T: Type](x: Expr[Option[Any]])(using Quotes): Option[Expr[T]] =
+ x match
+ case '{ Some($x: T) } => Some(x) // x: Expr[T]
+ case _ => None
+```
+This time, the pattern `Some($x: T)` will only match if the type of the `Option` is `Some[T]`.
+
+```scala
+exprOfOptionOf[Int]('{ Some(3) }) // Some('{3})
+exprOfOptionOf[Int]('{ Some("a") }) // None
+```
+
+#### Type variables in quoted patterns
+
+Quoted code may contain types that are not known outside of the quote.
+We can match on them using pattern type variables.
+Just as in a normal pattern, the type variables are written using lower case names.
+
+```scala
+def exprOptionToList(x: Expr[Option[Any]])(using Quotes): Option[Expr[List[Any]]] =
+ x match
+ case '{ Some($x: t) } =>
+ // ^^^ this binds the type `t` in the body of the case
+ Some('{ List[t]($x) }) // x: Expr[List[t]]
+ case '{ None } =>
+ Some('{ Nil })
+ case _ => None
+```
+
+The pattern `$x: t` will match an expression of any type and `t` will be bound to the type of the pattern.
+This type variable is only valid in the right-hand side of the `case`.
+In this example, we use it to construct the list `List[t]($x)` (`List($x)` would also work).
+As this is a type that is not statically, known we need a given `Type[t]` in scope.
+Luckily, the quoted pattern will automatically provide this for us.
+
+The simple pattern `case '{ $expr: tpe } =>` is very useful if we want to know the precise type of the expression.
+```scala
+val expr: Expr[Option[Int]] = ...
+expr match
+ case '{ $expr: tpe } =>
+ Type.show[tpe] // could be: Option[Int], Some[Int], None, Option[1], Option[2], ...
+ '{ val x: tpe = $expr; x } // binds the value without widening the type
+ ...
+```
+
+In some cases we need to define a pattern variable that is referenced several times or has some type bounds.
+To achieve this, it is possible to create pattern variables at the start of the pattern using `type t` with a type pattern variable.
+
+```scala
+/**
+ * Use: Converts a redundant `list.map(f).map(g)` to only use one call
+ * to `map`: `list.map(y => g(f(y)))`.
+ */
+def fuseMap[T: Type](x: Expr[List[T]])(using Quotes): Expr[List[T]] = x match {
+ case '{
+ type u
+ type v
+ ($ls: List[`u`])
+ .map($f: `u` => `v`)
+ .map($g: `v` => T)
+ } =>
+ '{ $ls.map(y => $g($f(y))) }
+ case _ => x
+}
+```
+
+Here, we define two type variables `u` and `v` and then refer to them using `` `u` `` and `` `v` ``.
+We do not refer to them using `u` or `v` (without backticks) because those would be interpreted as new type variables with the same variable name.
+This notation follows the normal [stable identifier patterns](https://www.scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#stable-identifier-patterns) syntax.
+Furthermore, if the type variable needs to be constrained, we can add bounds directly on the type definition: `case '{ type u <: AnyRef; ... } =>`.
+
+Note that the previous case could also be written as `case '{ ($ls: List[u]).map[v]($f).map[T]($g) =>`.
+
+#### Quote types patterns
+
+Types represented with `Type[T]` can be matched on using the patten `case '[...] =>`.
+
+```scala
+inline def mirrorFields[T]: List[String] = ${mirrorFieldsImpl[T]}
+
+def mirrorFieldsImpl[T: Type](using Quotes): Expr[List[String]] =
+
+ def rec[A : Type]: List[String] = Type.of[A] match
+ case '[field *: fields] =>
+ Type.show[field] :: rec[fields]
+ case '[EmptyTuple] =>
+ Nil
+ case _ =>
+ quotes.reflect.report.errorAndAbort("Expected known tuple but got: " + Type.show[A])
+
+ Expr(rec)
+```
+```scala
+mirrorFields[EmptyTuple] // Nil
+mirrorFields[(Int, String, Int)] // List("scala.Int", "java.lang.String", "scala.Int")
+mirrorFields[Tuple] // error: Expected known tuple but got: Tuple
+```
+
+As with expression quote patterns, type variables are represented using lower case names.
+
+## FromExpr
+
+The `Expr.value`, `Expr.valueOrAbort`, and `Expr.unapply` methods use instances of `FromExpr` to extract the value if possible.
+```scala
+extension [T](expr: Expr[T]):
+ def value(using Quotes)(using fromExpr: FromExpr[T]): Option[T] =
+ fromExpr.unapply(expr)
+
+ def valueOrError(using Quotes)(using fromExpr: FromExpr[T]): T =
+ fromExpr.unapply(expr).getOrElse(eport.throwError("...", expr))
+end extension
+
+object Expr:
+ def unapply[T](expr: Expr[T])(using Quotes)(using fromExpr: FromExpr[T]): Option[T] =
+ fromExpr.unapply(expr)
+```
+
+`FromExpr` is defined as follows:
+```scala
+trait FromExpr[T]:
+ def unapply(x: Expr[T])(using Quotes): Option[T]
+```
+
+The `FromExpr.unapply` method will take a value `x` and generate code that will construct a copy of this value at runtime.
+
+We can define our own `FromExpr`s like so:
+```scala
+given FromExpr[Boolean] with {
+ def unapply(x: Expr[Boolean])(using Quotes): Option[Boolean] =
+ x match
+ case '{ true } => Some(true)
+ case '{ false } => Some(false)
+ case _ => None
+}
+
+given FromExpr[StringContext] with {
+ def unapply(x: Expr[StringContext])(using Quotes): Option[StringContext] = x match {
+ case '{ new StringContext(${Varargs(Exprs(args))}*) } => Some(StringContext(args*))
+ case '{ StringContext(${Varargs(Exprs(args))}*) } => Some(StringContext(args*))
+ case _ => None
+ }
+}
+```
+Note that we handled two cases for `StringContext`.
+As it is a `case class`, it can be created with `new StringContext` or with `StringContext.apply` from the companion object.
+We also used the `Varargs` extractor to match the arguments of type `Expr[Seq[String]]` into a `Seq[Expr[String]]`.
+Then we used the `Exprs` to match known constants in the `Seq[Expr[String]]` to get a `Seq[String]`.
+
+
+## The Quotes
+The `Quotes` is the main entry point for the creation of all quotes.
+This context is usually just passed around through contextual abstractions (`using` and `?=>`).
+Each quote scope will have its own `Quotes`.
+New scopes are introduced each time a splice is introduced (`${ ... }`).
+Though it looks like a splice takes an expression as argument, it actually takes a `Quotes ?=> Expr[T]`.
+Therefore, we could actually write it explicitly as `${ (using q) => ... }`.
+This might be useful when debugging to avoid generated names for these scopes.
+
+The method `scala.quoted.quotes` provides a simple way to use the current `Quotes` without naming it.
+It is usually imported along with the `Quotes` using `import scala.quoted.*`.
+
+```scala
+${ (using q1) => body(using q1) }
+// equivalent to
+${ body(using quotes) }
+```
+Warning: If you explicitly name a `Quotes` `quotes`, you will shadow this definition.
+
+When we write a top-level splice in a macro, we are calling something similar to the following definition.
+This splice will provide the initial `Quotes` associated with the macro expansion.
+```scala
+def $[T](x: Quotes ?=> Expr[T]): T = ...
+```
+
+When we have a splice within a quote, the inner quote context will depend on the outer one.
+This link is represented using the `Quotes.Nested` type.
+Users of quotes will almost never need to use `Quotes.Nested`.
+These details are only useful for advanced macros that will inspect code and may encounter details of quotes and splices.
+
+```scala
+def f(using q1: Quotes) = '{
+ ${ (using q2: q1.Nested) ?=>
+ ...
+ }
+}
+```
+
+We can imagine that a nested splice is like the following method, where `ctx` is the context received by the surrounding quote.
+```scala
+def $[T](using q: Quotes)(x: q.Nested ?=> Expr[T]): T = ...
+```
+
+## β-reduction
+When we have a lambda applied to an argument in a quote `'{ ((x: Int) => x + x)(y) }`, we do not reduce it within the quote; the code is kept as-is.
+There is an optimisation that will β-reduce all lambdas directly applied to parameters to avoid the creation of a closure.
+This will not be visible from the quote's perspective.
+
+Sometimes it is useful to perform this β-reduction on the quotes directly.
+We provide the function `Expr.betaReduce[T]` that receives an `Expr[T]` and β-reduces if it contains a directly-applied lambda.
+
+```scala
+Expr.betaReduce('{ ((x: Int) => x + x)(y) }) // returns '{ val x = y; x + x }
+```
+
+
+## Summon values
+There are two ways to summon values in a macro.
+The first is to have a `using` parameter in the inline method that is passed explicitly to the macro implementation.
+
+```scala
+inline def setOf[T](using ord: Ordering[T]): Set[T] =
+ ${ setOfCode[T]('ord) }
+
+def setOfCode[T: Type](ord: Expr[Ordering[T]])(using Quotes): Expr[Set[T]] =
+ '{ TreeSet.empty[T](using $ord) }
+```
+
+In this scenario, the context parameter is found before the macro is expanded.
+If not found, the macro will not be expanded.
+
+The second way is using `Expr.summon`.
+This allows us to programatically search for distinct given expressions.
+The following example is similar to the previous example:
+
+```scala
+inline def setOf[T]: Set[T] =
+ ${ setOfCode[T] }
+
+def setOfCode[T: Type](using Quotes): Expr[Set[T]] =
+ Expr.summon[Ordering[T]] match
+ case Some(ord) => '{ TreeSet.empty[T](using $ord) }
+ case _ => '{ HashSet.empty[T] }
+```
+
+The difference is that, in the second scenario, we expand the macro before the implicit search is performed. We can therefore write arbitrary code to handle the case when an `Ordering[T]` is not found.
+Here, we used `HashSet` instead of `TreeSet` because the former does not need an `Ordering`.
+
+## Quoted Type Classes
+
+In the previous example we showed how to use the `Expr[Ordering[T]]` type class explicitly by leveraging the `using` argument clause. This is perfectly fine, but it is not very convenient if we need to use the type class multiple times. To show this we will
+use a `powerCode` function that can be used on any numeric type.
+
+First, it can be useful to make `Expr` type class can make it a given parameter. To do this we do need to explicitly in `power` to `powerCode` because we have a given `Numeric[Num]` but require an `Expr[Numeric[Num]]`. But then we can ignore it in `powerMacro` and any other place that only passes it around.
+
+```scala
+inline def power[Num](x: Num, inline n: Int)(using num: Numeric[Num]) =
+ ${ powerMacro('x, 'n)(using 'num) }
+
+def powerMacro[Num: Type](x: Expr[Num], n: Expr[Int])(using Expr[Numeric[Num]])(using Quotes): Expr[Num] =
+ powerCode(x, n.valueOrAbort)
+```
+
+To use a this type class we need a given `Numeric[Num]` but we have a `Expr[Numeric[Num]]` and therefore we need to splice this expression in the generated code. To make it available we can just splice it in a given definition.
+
+```scala
+def powerCode[Num: Type](x: Expr[Num], n: Int)(using num: Expr[Numeric[Num]])(using Quotes): Expr[Num] =
+ if (n == 0) '{ $num.one }
+ else if (n % 2 == 0) '{
+ given Numeric[Num] = $num
+ val y = $x * $x
+ ${ powerCode('y, n / 2) }
+ }
+ else '{
+ given Numeric[Num] = $num
+ $x * ${ powerCode(x, n - 1) }
+ }
+```
+
+
+
+[macros]: {% link _overviews/scala3-macros/tutorial/macros.md %}
+[quotes]: {% link _overviews/scala3-macros/tutorial/quotes.md %}
diff --git a/_overviews/scala3-macros/tutorial/reflection.md b/_overviews/scala3-macros/tutorial/reflection.md
new file mode 100644
index 0000000000..46618a1d4f
--- /dev/null
+++ b/_overviews/scala3-macros/tutorial/reflection.md
@@ -0,0 +1,235 @@
+---
+type: section
+title: Reflection
+num: 6
+
+previous-page: quotes
+---
+
+The reflection API provides a more complex and comprehensive view on the structure of the code.
+It provides a view of *Typed Abstract Syntax Trees* and their properties such as types, symbols, positions and comments.
+
+The API can be used in macros as well as for [inspecting TASTy files][tasty inspection].
+
+## How to use the API
+
+The reflection API is defined in the type `Quotes` as `reflect`.
+The actual instance depends on the current scope, in which quotes or quoted pattern matching is used.
+Hence, every macro method receives Quotes as an additional argument.
+Since `Quotes` is contextual, to access its members we either need to name the parameter or summon it.
+The following definition from the standard library details the canonical way of accessing it:
+
+```scala
+package scala.quoted
+
+transparent inline def quotes(using inline q: Quotes): q.type = q
+```
+
+We can use `scala.quoted.quotes` to import the current `Quotes` in scope:
+
+```scala
+import scala.quoted.* // Import `quotes`, `Quotes`, and `Expr`
+
+def f(x: Expr[Int])(using Quotes): Expr[Int] =
+ import quotes.reflect.* // Import `Tree`, `TypeRepr`, `Symbol`, `Position`, .....
+ val tree: Tree = ...
+ ...
+```
+
+This will import all the types and modules (with extension methods) of the API.
+
+## How to navigate the API
+
+The full API can be found in the [API documentation for `scala.quoted.Quotes.reflectModule`][reflection doc].
+Unfortunately, at this stage, this automatically-generated documentation is not very easy to navigate.
+
+The most important element on the page is the hierarchy tree which provides a synthetic overview of the subtyping relationships of
+the types in the API. For each type `Foo` in the tree:
+
+ - the trait `FooMethods` contains the methods available on the type `Foo`
+ - the trait `FooModule` contains the static methods available on the object `Foo`.
+Most notably, constructors (`apply/copy`) and the `unapply` method which provides the extractor(s) required for pattern matching are found here
+ - For all types `Upper` such that `Foo <: Upper`, the methods defined in `UpperMethods` are also available on `Foo`
+
+For example, [`TypeBounds`](https://scala-lang.org/api/3.x/scala/quoted/Quotes$reflectModule.html#TypeBounds-0), a subtype of `TypeRepr`, represents a type tree of the form `T >: L <: U`: a type `T` which is a super type of `L`
+and a subtype of `U`. In [`TypeBoundsMethods`](https://scala-lang.org/api/3.x/scala/quoted/Quotes$reflectModule$TypeBoundsMethods.html), you will find the methods `low` and `hi`, which allow you to access the
+representations of `L` and `U`. In [`TypeBoundsModule`](https://scala-lang.org/api/3.x/scala/quoted/Quotes$reflectModule$TypeBoundsModule.html), you will find the `unapply` method, which allows you to write:
+
+```scala
+def f(tpe: TypeRepr) =
+ tpe match
+ case TypeBounds(l, u) =>
+```
+
+Because `TypeBounds <: TypeRepr`, all the methods defined in `TypeReprMethods` are available on `TypeBounds` values:
+
+```scala
+def f(tpe: TypeRepr) =
+ tpe match
+ case tpe: TypeBounds =>
+ val low = tpe.low
+ val hi = tpe.hi
+```
+
+## Relation with Expr/Type
+
+### Expr and Term
+
+Expressions (`Expr[T]`) can be seen as wrappers around a `Term`, where `T` is the statically-known type of the term.
+Below, we use the extension method `asTerm` to transform an expression into a term.
+This extension method is only available after importing `quotes.reflect.asTerm`.
+Then we use `asExprOf[Int]` to transform the term back into `Expr[Int]`.
+This operation will fail if the term does not have the provided type (in this case, `Int`) or if the term is not a valid expression.
+For example, an `Ident(fn)` is an invalid term if the method `fn` takes type parameters, in which case we would need an `Apply(Ident(fn), args)`.
+
+```scala
+def f(x: Expr[Int])(using Quotes): Expr[Int] =
+ import quotes.reflect.*
+ val tree: Term = x.asTerm
+ val expr: Expr[Int] = tree.asExprOf[Int]
+ expr
+```
+
+### Type and TypeRepr
+
+Similarly, we can also see `Type[T]` as a wrapper over `TypeRepr`, with `T` being the statically-known type.
+To get a `TypeRepr`, we use `TypeRepr.of[T]`, which expects a given `Type[T]` in scope (similar to `Type.of[T]`).
+We can also transform it back into a `Type[?]` using the `asType` method.
+As the type of `Type[?]` is not statically known, we need to name it with an existential type to use it. This can be achieved using the `'[t]` pattern.
+
+```scala
+def g[T: Type](using Quotes) =
+ import quotes.reflect.*
+ val tpe: TypeRepr = TypeRepr.of[T]
+ tpe.asType match
+ case '[t] => '{ val x: t = ${...} }
+ ...
+```
+
+## Symbols
+
+The APIs of `Term` and `TypeRepr` are relatively *closed* in the sense that methods produce and accept values whose types are defined in the API.
+However, you might notice the presence of `Symbol`s which identify definitions.
+
+Both `Term`s and `TypeRepr`s (and therefore `Expr`s and `Type`s) have an associated symbol.
+`Symbol`s make it possible to compare two definitions using `==` to know if they are the same.
+In addition, `Symbol` exposes and is used by many useful methods. For example:
+
+ - `declaredFields` and `declaredMethods` allow you to iterate on the fields and members defined inside a symbol
+ - `flags` allows you to check multiple properties of a symbol
+ - `companionClass` and `companionModule` provide a way to jump to and from the companion object/class
+ - `TypeRepr.baseClasses` returns the list of symbols of classes extended by a type
+ - `Symbol.pos` gives you access to the position where the symbol is defined, the source code of the definition, and even the filename where the symbol is defined
+ - many others that you can find in [`SymbolMethods`](https://scala-lang.org/api/3.x/scala/quoted/Quotes$reflectModule$SymbolMethods.html)
+
+### To Symbol and back
+
+Consider an instance of the type `TypeRepr` named `val tpe: TypeRepr = ...`. Then:
+
+ - `tpe.typeSymbol` returns the symbol of the type represented by `TypeRepr`. The recommended way to obtain a `Symbol` given a `Type[T]` is `TypeRepr.of[T].typeSymbol`
+ - For a singleton type, `tpe.termSymbol` returns the symbol of the underlying object or value
+ - `tpe.memberType(symbol)` returns the `TypeRepr` of the provided symbol
+ - On objects `t: Tree`, `t.symbol` returns the symbol associated with a tree.
+ Given that `Term <: Tree`, `Expr.asTerm.symbol` is the best way to obtain the symbol associated with an `Expr[T]`
+ - On objects `sym: Symbol`, `sym.tree` returns the `Tree` associated to the symbol.
+Be careful when using this method as the tree for a symbol might not be defined.
+Read more on the [best practices page][best practices]
+
+## Macro API design
+
+It will often be useful to create helper methods or extractors that perform some common logic of your macros.
+
+The simplest methods will be those that only mention `Expr`, `Type`, and `Quotes` in their signature.
+Internally, they may use reflection, but this will not be seen at the use site of the method.
+
+```scala
+def f(x: Expr[Int])(using Quotes): Expr[Int] =
+ import quotes.reflect.*
+ ...
+```
+
+In some cases, it may be inevitable that some methods will expect or return `Tree`s or other types in `quotes.reflect`.
+For these cases, the best practice is to follow the following method signature examples:
+
+A method that takes a `quotes.reflect.Term` parameter
+```scala
+def f(using Quotes)(term: quotes.reflect.Term): String =
+ import quotes.reflect.*
+ ...
+```
+
+An extension method for a `quotes.reflect.Term` returning a `quotes.reflect.Tree`
+```scala
+extension (using Quotes)(term: quotes.reflect.Term)
+ def g: quotes.reflect.Tree = ...
+```
+
+An extractor that matches on `quotes.reflect.Term`s
+```scala
+object MyExtractor:
+ def unapply(using Quotes)(x: quotes.reflect.Term) =
+ ...
+ Some(y)
+```
+
+> **Avoid saving the `Quotes` context in a field.**
+> `Quotes` in fields inevitably make its use harder by causing errors involving `Quotes` with different paths.
+>
+> Usually, these patterns have been seen in code that uses the Scala 2 ways to define extension methods or contextual unapplies.
+> Now that we have `given` parameters that can be added before other parameters, all these old workarounds are not needed anymore.
+> The new abstractions make it simpler both at the definition site and at the use site.
+
+## Debugging
+
+### Runtime checks
+
+Expressions (`Expr[T]`) can be seen as wrappers around a `Term`, where `T` is the statically-known type of the term.
+Hence, these checks will be done at runtime (i.e. compile-time when the macro expands).
+
+It is recommended to enable the `-Xcheck-macros` flag while developing macros or on the tests for the macro.
+This flag will enable extra runtime checks that will try to find ill-formed trees or types as soon as they are created.
+
+There is also the `-Ycheck:all` flag that checks all compiler invariants for tree well-formedness.
+These checks will usually fail with an assertion error.
+
+### Printing the trees
+
+The `toString` methods on types in the `quotes.reflect` package are not great for debugging as they show the internal representation rather than the `quotes.reflect` representation.
+In many cases these are similar, but they may sometimes lead the debugging process astray, so they shouldn't be relied on.
+
+Instead, `quotes.reflect.Printers` provides a set of useful printers for debugging.
+Notably the `TreeStructure`, `TypeReprStructure`, and `ConstantStructure` classes can be quite useful.
+These will print the tree structure following loosely the extractors that would be needed to match it.
+
+```scala
+val tree: Tree = ...
+println(tree.show(using Printer.TreeStructure))
+```
+
+One of the most useful places where this can be added is at the end of a pattern match on a `Tree`.
+
+```scala
+tree match
+ case Ident(_) =>
+ case Select(_, _) =>
+ ...
+ case _ =>
+ throw new MatchError(tree.show(using Printer.TreeStructure))
+```
+This way, if a case is missed the error will report a familiar structure that can be copy-pasted to start fixing the issue.
+
+You can make this printer the default if desired:
+```scala
+ import quotes.reflect.*
+ given Printer[Tree] = Printer.TreeStructure
+ ...
+ println(tree.show)
+```
+
+## More
+*Coming soon*
+
+[tasty inspection]: {{ site.scala3ref }}/metaprogramming/tasty-inspect.html
+[reflection doc]: https://scala-lang.org/api/3.x/scala/quoted/Quotes$reflectModule.html
+
+[best practices]: {% link _overviews/scala3-macros/best-practices.md %}
diff --git a/_overviews/scala3-migration/compatibility-classpath.md b/_overviews/scala3-migration/compatibility-classpath.md
new file mode 100644
index 0000000000..6b25280994
--- /dev/null
+++ b/_overviews/scala3-migration/compatibility-classpath.md
@@ -0,0 +1,141 @@
+---
+title: Classpath Level
+type: section
+description: This section describes the compatibility between Scala 2.13 and Scala 3 class files.
+num: 3
+previous-page: compatibility-source
+next-page: compatibility-runtime
+---
+
+In your code you can use public types and terms, and call public methods that are defined in a different module or library.
+It works well as long as the type checker, which is the compiler phase that validates the semantic consistency of the code, is able to read the signatures of those types, terms and methods, from the class files containing them.
+
+In Scala 2 the signatures are stored in a dedicated format called the Pickle format.
+In Scala 3 the story is a bit different because it relies on the TASTy format which is a lot more than a signature layout.
+But, for the purpose of moving from Scala 2.13 to Scala 3, only the signatures are useful.
+
+## The Scala 3 Unpickler
+
+The first piece of good news is that the Scala 3 compiler is able to read the Scala 2.13 Pickle format and thus it can type check code that depends on modules or libraries compiled with Scala 2.13.
+
+The Scala 3 unpickler has been extensively tested in the community build for many years now. It is safe to use.
+
+### A Scala 3 module can depend on a Scala 2.13 artifact
+
+
+
+As an sbt build, it looks like this:
+
+```scala
+// build.sbt (sbt 1.5 or higher)
+lazy val foo = project.in(file("foo"))
+ .settings(scalaVersion := "3.3.1")
+ .dependsOn(bar)
+
+lazy val bar = project.in(file("bar"))
+ .settings(scalaVersion := "2.13.11")
+```
+
+Or, in case bar is a published Scala 2.13 library, we can have:
+
+```scala
+lazy val foo = project.in(file("foo"))
+ .settings(
+ scalaVersion := "3.3.1",
+ libraryDependencies += ("org.bar" %% "bar" % "1.0.0").cross(CrossVersion.for3Use2_13)
+ )
+```
+
+We use `CrossVersion.for3Use2_13` in sbt to resolve `bar_2.13` instead of `bar_3`.
+
+### The Standard Library
+
+One notable example is the Scala 2.13 library.
+We have indeed decided that the Scala 2.13 library is the official standard library for Scala 3.
+
+Let's note that the standard library is automatically provided by the build tool, you should not need to configure it manually.
+
+## The Scala 2.13 TASTy Reader
+
+The second piece of good news is that Scala 2.13 can consume Scala 3 libraries with `-Ytasty-reader`.
+
+### Supported Features
+
+The TASTy reader supports all the traditional language features as well as the following Scala 3 features:
+- [Enumerations]({{ site.scala3ref }}/enums/enums.html)
+- [Intersection Types]({{ site.scala3ref }}/new-types/intersection-types.html)
+- [Opaque Type Aliases]({{ site.scala3ref }}/other-new-features/opaques.html)
+- [Type Lambdas]({{ site.scala3ref }}/new-types/type-lambdas.html)
+- [Contextual Abstractions]({{ site.scala3ref }}/contextual) (new syntax)
+- [Open Classes]({{ site.scala3ref }}/other-new-features/open-classes.html) (and inheritance of super traits)
+- [Export Clauses]({{ site.scala3ref }}/other-new-features/export.html)
+
+It partially supports:
+- [Top-Level Definitions]({{ site.scala3ref }}/dropped-features/package-objects.html)
+- [Extension Methods]({{ site.scala3ref }}/contextual/extension-methods.html)
+
+It does not support the more advanced features:
+- [Context Functions]({{ site.scala3ref }}/contextual/context-functions.html)
+- [Polymorphic Function Types]({{ site.scala3ref }}/new-types/polymorphic-function-types.html)
+- [Trait Parameters]({{ site.scala3ref }}/other-new-features/trait-parameters.html)
+- `@static` Annotation
+- `@alpha` Annotation
+- [Functions and Tuples larger than 22 parameters]({{ site.scala3ref }}/dropped-features/limit22.html)
+- [Match Types]({{ site.scala3ref }}/new-types/match-types.html)
+- [Union Types]({{ site.scala3ref }}/new-types/union-types.html)
+- [Multiversal Equality]({{ site.scala3ref }}/contextual/multiversal-equality.html) (unless explicit)
+- [Inline]({{ site.scala3ref }}/metaprogramming/inline.html) (including Scala 3 macros)
+- [Kind Polymorphism]({{ site.scala3ref }}/other-new-features/kind-polymorphism.html) (the `scala.AnyKind` upper bound)
+
+### A Scala 2.13 module can depend on a Scala 3 artifact
+
+By enabling the TASTy reader with `-Ytasty-reader`, a Scala 2.13 module can depend on a Scala 3 artifact.
+
+
+
+As an sbt build, it looks like this:
+
+```scala
+// build.sbt (sbt 1.5 or higher)
+lazy val foo = project.in.file("foo")
+ .settings(
+ scalaVersion := "2.13.11",
+ scalacOptions += "-Ytasty-reader"
+ )
+ .dependsOn(bar)
+
+lazy val bar = project.in(file("bar"))
+ .settings(scalaVersion := "3.3.1")
+```
+
+Or, in case `bar` is a published Scala 3 library:
+
+```scala
+lazy val foo = project.in.file("foo")
+ .settings(
+ scalaVersion := "2.13.11",
+ scalacOptions += "-Ytasty-reader",
+ libraryDependencies += ("org.bar" %% "bar" % "1.0.0").cross(CrossVersion.for2_13Use3)
+ )
+```
+
+Similarly to `CrossVersion.for2_13Use3`, we use `CrossVersion.for3Use2_13` in sbt to resolve `bar_3` instead of `bar_2.13`.
+
+## Interoperability Overview
+
+In short, we have backward and forward compatibility and so **migration can happen gradually**.
+
+You can port a big Scala application one module at a time, even if its library dependencies have not yet been ported (excepting the macro libraries).
+
+During the transition period, you can have a Scala 3 module layered in between two 2.13 modules.
+
+
+
+This is permitted as long as all libraries are resolved to a single binary version: you can have `lib-foo_3` and `lib-bar_2.13` in the same classpath, but you cannot have `lib-foo_3` and `lib-foo_2.13`.
+
+The inverted pattern, with a 2.13 module in the middle, is also possible.
+
+> #### Disclaimer for library maintainers
+>
+> Unless you know exactly what you are doing, it is discouraged to publish a Scala 3 library that depends on a Scala 2.13 library (the scala-library being excluded) or vice versa.
+> The reason is to prevent library users from ending up with two conflicting versions `foo_2.13` and `foo_3` of the same foo library in their classpath, this problem being unsolvable in some cases.
diff --git a/_overviews/scala3-migration/compatibility-intro.md b/_overviews/scala3-migration/compatibility-intro.md
new file mode 100644
index 0000000000..b511567a2b
--- /dev/null
+++ b/_overviews/scala3-migration/compatibility-intro.md
@@ -0,0 +1,36 @@
+---
+title: Compatibility Reference
+type: chapter
+description: This chapter describes the compatibility between Scala 2.13 and Scala 3.
+num: 1
+previous-page:
+next-page: compatibility-source
+---
+
+Scala 3 is a game changer in terms of compatibility in the Scala ecosystem that will greatly improve the day-to-day experience of every Scala programmer.
+This new compatibility era starts with the migration.
+
+Moving from Scala 2 to Scala 3 is a big leap forward.
+Scala 3 is a shiny new compiler, built upon a complete redesign of the core foundations of the language.
+Yet we claim the migration will not be harder than before, when we moved from Scala 2.12 to Scala 2.13.
+
+It will even be simpler in some respects, thanks to the interoperability between Scala 2.13 and Scala 3.
+
+This chapter details the level of compatibility between the two versions at the different stages of the program.
+This is where you will find answers to the following questions:
+
+**[Source Level](compatibility-source.html)**
+- Is Scala 3 a different language?
+- How hard is it to translate a Scala 2.13 project into Scala 3?
+
+**[Classpath Level](compatibility-classpath.html)**
+- Can we use a Scala 2.13 library in Scala 3?
+- Inversely, can we use a Scala 3 library in Scala 2.13?
+
+**[Runtime](compatibility-runtime.html)**
+- Is it safe to deploy a Scala 3 program in a production environment?
+- How fast are Scala 3 programs compared to Scala 2.13?
+
+**[Metaprogramming](compatibility-metaprogramming.html)**
+- Will my Scala 2.13 project be affected by the replacement of the Scala 2 macro feature?
+- How can I port my Scala 2.13 macro library to Scala 3?
diff --git a/_overviews/scala3-migration/compatibility-metaprogramming.md b/_overviews/scala3-migration/compatibility-metaprogramming.md
new file mode 100644
index 0000000000..675f5fc4a3
--- /dev/null
+++ b/_overviews/scala3-migration/compatibility-metaprogramming.md
@@ -0,0 +1,89 @@
+---
+title: Metaprogramming
+type: section
+description: This section discuss the metaprogramming transition
+num: 5
+previous-page: compatibility-runtime
+next-page: tooling-tour
+---
+
+A call to a macro method is executed during the compiler phase called macro expansion to generate a part of the program---an abstract syntax tree.
+
+The Scala 2.13 macro API is closely tied to the Scala 2.13 compiler internals.
+Therefore it is not possible for the Scala 3 compiler to expand any Scala 2.13 macro.
+
+In contrast, Scala 3 introduces a new principled approach of metaprogramming that is designed for stability.
+Scala 3 macros, and inline methods in general, will be compatible with future versions of the Scala 3 compiler.
+While this is an uncontested improvement, it also means that all Scala 2.13 macros have to be rewritten from the ground up, using the new metaprogramming features.
+
+## Macro Dependencies
+
+A Scala 3 module can depend on a Scala 2.13 artifact even if it contains a macro definition but the compiler will not be able to expand its macros.
+When you try to, it simply returns an error.
+
+{% highlight text %}
+ -- Error: /src/main/scala/example/Example.scala:10:45
+ 10 | val documentFormat = Json.format[Document]
+ | ^
+ |Scala 2 macro cannot be used in Scala 3. See https://dotty.epfl.ch/docs/reference/dropped-features/macros.html
+ |To turn this error into a warning, pass -Xignore-scala2-macros to the compiler
+{% endhighlight %}
+
+Let's note that using `-Xignore-scala2-macros` is helpful to type check the code but it produces incomplete class files.
+
+When this error appears in your project, you have eventually no other choice than upgrading to a Scala 3-compiled version of the macro artifact.
+
+## Porting the Macro Ecosystem
+
+While being experimental, the Scala community has largely adopted the Scala 2 macro feature in multiple ways: code generation, optimizations, ergonomic DSLs...
+
+A large part of the ecosystem now depends on Scala 2.13 macros defined in external libraries.
+Identifying and porting those libraries is key to move the ecosystem forward.
+
+A migration status of many open-source macro libraries is available in [this page](https://scalacenter.github.io/scala-3-migration-guide/docs/macros/macro-libraries.html).
+
+## Rewriting a Macro
+
+The new metaprogramming features are completely different from Scala 2.
+They are comprised of:
+- [Inline Methods][inline]
+- [Compile-time operations][compiletime]
+- [Macros][macros]
+- [Quoted code][quotes]
+- [Reflection over Abstract Syntax Trees (AST)][reflection]
+
+Before getting deep into reimplementing a macro you should ask yourself:
+- Can I use `inline` and the `scala.compiletime` operations to reimplement my logic?
+- Can I use the simpler and safer expression-based macros?
+- Do I really need to access the AST?
+- Can I use a [match type]({{ site.scala3ref }}/new-types/match-types.html) as return type?
+
+You can learn all the new metaprogramming concepts by reading the [Macros in Scala 3][scala3-macros] tutorial.
+
+## Cross-building a Macro Library
+
+You have written a wonderful macro library and you would like it to be available in Scala 2.13 and Scala 3.
+There are two different approaches, the traditional cross-building technique and the more flexible macro mixing technique.
+
+The benefit of macro mixing is that consumers who take advantage of the `-Ytasty-reader` option can still use your macros.
+
+You can learn about them by reading these tutorials:
+- [Cross-Building a Macro Library](tutorial-macro-cross-building.html)
+- [Mixing Scala 2.13 and Scala 3 Macros](tutorial-macro-mixing.html)
+
+## Additional Resources
+
+Blog posts and talks:
+- [Macros: The Plan For Scala 3](https://www.scala-lang.org/blog/2018/04/30/in-a-nutshell.html)
+- [Scala Days - Metaprogramming in Dotty](https://www.youtube.com/watch?v=ZfDS_gJyPTc)
+
+Early-adopter projects:
+- [XML Interpolator](https://github.com/dotty-staging/xml-interpolator/tree/master)
+- [Shapeless 3](https://github.com/dotty-staging/shapeless/tree/shapeless-3)
+
+[inline]: {% link _overviews/scala3-macros/tutorial/inline.md %}
+[compiletime]: {% link _overviews/scala3-macros/tutorial/compiletime.md %}
+[macros]: {% link _overviews/scala3-macros/tutorial/macros.md %}
+[quotes]: {% link _overviews/scala3-macros/tutorial/quotes.md %}
+[reflection]: {% link _overviews/scala3-macros/tutorial/reflection.md %}
+[scala3-macros]: {% link _overviews/scala3-macros/tutorial/index.md %}
diff --git a/_overviews/scala3-migration/compatibility-runtime.md b/_overviews/scala3-migration/compatibility-runtime.md
new file mode 100644
index 0000000000..729faae7aa
--- /dev/null
+++ b/_overviews/scala3-migration/compatibility-runtime.md
@@ -0,0 +1,28 @@
+---
+title: Runtime
+type: section
+description: This section describes the run-time characteristics of a Scala 3 program.
+num: 4
+previous-page: compatibility-classpath
+next-page: compatibility-metaprogramming
+---
+
+Scala 2.13 and Scala 3 share the same Application Binary Interface (ABI).
+
+> The ABI is the representation of Scala code in bytecode or Scala.js IR.
+> It determines the run-time behavior of Scala programs.
+
+Compiling the same source code with Scala 2.13 and Scala 3 produces very similar bytecodes.
+The difference being that some features have changed, for instance the initialization of lazy vals has been improved.
+
+Sharing the ABI also ensures that Scala 2.13 and Scala 3 class files can be loaded by the same JVM class loader.
+Similarly, that Scala 2.13 and Scala 3 `sjsir` files can be linked together by the Scala.js linker.
+
+Furthermore it relieves us from surprising behaviors at runtime.
+It makes the migration from Scala 2.13 to Scala 3 very safe in terms of run-time crashes and performance.
+
+At first sight the run-time characteristics of a Scala program is neither better nor worse in Scala 3 compare to Scala 2.13.
+However some new features will help you optimize your program:
+- [Opaque Type Aliases](http://dotty.epfl.ch/docs/reference/other-new-features/opaques.html)
+- [Inline Methods](http://dotty.epfl.ch/docs/reference/metaprogramming/inline.html)
+- [@threadUnsafe annotation](http://dotty.epfl.ch/docs/reference/other-new-features/threadUnsafe-annotation.html)
diff --git a/_overviews/scala3-migration/compatibility-source.md b/_overviews/scala3-migration/compatibility-source.md
new file mode 100644
index 0000000000..b3e4ad5c41
--- /dev/null
+++ b/_overviews/scala3-migration/compatibility-source.md
@@ -0,0 +1,28 @@
+---
+title: Source Level
+type: section
+description: This section describes the level of compatibility between Scala 2.13 and Scala 3 sources.
+num: 2
+previous-page: compatibility-intro
+next-page: compatibility-classpath
+---
+
+Scala 3 is an improved version of the Scala 2 language.
+
+Despite the new syntax, a very large subset of the Scala 2.13 language is still valid.
+Not all of it though, some constructs have been simplified, restricted or dropped altogether.
+However those decisions were made for good reasons and by taking care that a good workaround is possible.
+
+In general there is a straightforward cross-compiling solution to every incompatibility, so that the migration from Scala 2.13 to Scala 3 is easy and smooth.
+You can find a corpus of incompatibilities in the [Incompatibility Table](incompatibility-table.html).
+
+There is an exception though, which is the new metaprogramming framework that replaces the Scala 2 experimental macros.
+Further explanations are given at the end of this chapter in the [Metaprogramming](compatibility-metaprogramming.html) section.
+
+Metaprogramming aside, a Scala 2.13 source code can rather easily be ported to Scala 3.
+Once done, you will be able to use the new powerful features of Scala 3, which have no equivalent in Scala 2.
+The downside is those sources cannot be compiled with Scala 2.13 anymore.
+But amazingly, this new Scala 3 artifact can be consumed as a dependency in Scala 2.13.
+
+As we will see in more detail, it permits backward and forward compatibility.
+This is a breakthrough in the history of the Scala programming language.
diff --git a/_overviews/scala3-migration/external-resources.md b/_overviews/scala3-migration/external-resources.md
new file mode 100644
index 0000000000..1055f4bc95
--- /dev/null
+++ b/_overviews/scala3-migration/external-resources.md
@@ -0,0 +1,34 @@
+---
+title: External Resources
+type: chapter
+description: This section lists external resources about the migration to Scala 3.
+num: 29
+previous-page: plugin-kind-projector
+next-page:
+---
+
+## Courses
+
+### Lunatech's [_Moving from Scala 2 to Scala 3_](https://github.com/lunatech-labs/lunatech-scala-2-to-scala3-course)
+
+If you're a Scala 2 application developer who's looking at getting up-to-speed on Scala 3 or who's considering a migration of an existing Scala 2 application to Scala 3, Lunatech's [_"Moving from Scala 2 to Scala 3"_](https://github.com/lunatech-labs/lunatech-scala-2-to-scala3-course) course is a good way to get started.
+
+This course guides you through a migration of a single-module Akka Typed Sudoku solver in a series of about 10 steps. It covers the practical application of the following Scala 3 features:
+
+- New Control Structure syntax
+- Indentation Based syntax
+- Syntax rewriting by the Scala 3 compiler
+- Top Level definitions
+- Parameter untupling
+- Contextual Abstractions:
+ - Extension methods new syntax
+ - Given instances and Using clauses
+- Enumerations and Export clauses
+- Intersection and Union Types
+- Opaque Type Aliases
+- Multiversal Equality
+
+## Talks
+
+- [Scala 3: Python 3 or Easiest Upgrade Ever?](https://www.youtube.com/watch?v=jWJ5A1irH_E) by Daniel Spiewak (Weehawken-Lang)
+- [Taste the difference with Scala 3: Migrating the ecosystem and more](https://www.youtube.com/watch?v=YQmVrUdx8TU) by Jamie Thompson (f(by) 2020)
diff --git a/_overviews/scala3-migration/incompat-contextual-abstractions.md b/_overviews/scala3-migration/incompat-contextual-abstractions.md
new file mode 100644
index 0000000000..ea5947f2e4
--- /dev/null
+++ b/_overviews/scala3-migration/incompat-contextual-abstractions.md
@@ -0,0 +1,150 @@
+---
+title: Contextual Abstractions
+type: section
+description: This chapter details all incompatibilities caused by the redesign of contextual abstractions
+num: 19
+previous-page: incompat-dropped-features
+next-page: incompat-other-changes
+---
+
+The redesign of [contextual abstractions]({{ site.scala3ref }}/contextual) brings some incompatibilities.
+
+|Incompatibility|Scala 2.13|Scala 3 Migration Rewrite|Scalafix Rule|Runtime Incompatibiltiy|
+|--- |--- |--- |--- |--- |
+|[Type of implicit def](#type-of-implicit-definition)|||[✅](https://scalacenter.github.io/scalafix/docs/rules/ExplicitResultTypes.html)||
+|[Implicit views](#implicit-views)||||**Possible**|
+|[View bounds](#view-bounds)|Deprecation||||
+|[Ambiguous conversion on `A` and `=> A`](#ambiguous-conversion-on-a-and--a)|||||
+
+## Type Of Implicit Definition
+
+The type of implicit definitions (`val` or `def`) needs to be given explicitly in Scala 3.
+They cannot be inferred anymore.
+
+The Scalafix rule named [ExplicitResultTypes](https://scalacenter.github.io/scalafix/docs/rules/ExplicitResultTypes.html) can write the missing type annotations automatically.
+
+## Implicit Views
+
+Scala 3 does not support implicit conversion from an implicit function value, of the form `implicit val ev: A => B`.
+
+{% tabs scala-2-implicit_1 %}
+{% tab 'Scala 2 Only' %}
+
+The following piece of code is now invalid in Scala 3:
+~~~ scala
+trait Pretty {
+ val print: String
+}
+
+def pretty[A](a: A)(implicit ev: A => Pretty): String =
+ a.print // In Scala 3, Error: value print is not a member of A
+~~~
+{% endtab %}
+{% endtabs %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) can warn you about those cases, but it does not try to fix it.
+
+Be aware that this incompatibility can produce a runtime incompatibility and break your program.
+Indeed the compiler can find another implicit conversion from a broader scope, which would eventually cause an undesired behavior at runtime.
+
+{% tabs shared-implicit_2 %}
+{% tab 'Scala 2 and 3' %}
+
+This example illustrates the case:
+~~~ scala
+trait Pretty {
+ val print: String
+}
+
+implicit def anyPretty(any: Any): Pretty = new Pretty { val print = "any" }
+
+def pretty[A](a: A)(implicit ev: A => Pretty): String =
+ a.print // always print "any"
+~~~
+{% endtab %}
+{% endtabs %}
+
+The resolved conversion depends on the compiler mode:
+ - `-source:3.0-migration`: the compiler performs the `ev` conversion
+ - `-source:3.0`: the compiler cannot perform the `ev` conversion but it can perform the `anyPretty`, which is undesired
+
+In Scala 3, one simple fix is to supply the right conversion explicitly:
+
+{% highlight diff %}
+def pretty[A](a: A)(implicit ev: A => Pretty): String =
+- a.print
++ ev(a).print
+{% endhighlight %}
+
+## View Bounds
+
+View bounds have been deprecated for a long time but they are still supported in Scala 2.13.
+They cannot be compiled with Scala 3 anymore.
+
+{% tabs scala-2-bounds_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+def foo[A <% Long](a: A): Long = a
+~~~
+{% endtab %}
+{% endtabs %}
+
+In this example, in Scala 3, we get this following error message:
+
+{% highlight text %}
+-- Error: src/main/scala/view-bound.scala:2:12
+2 | def foo[A <% Long](a: A): Long = a
+ | ^
+ | view bounds `<%' are deprecated, use a context bound `:' instead
+{% endhighlight %}
+
+The message suggests to use a context bound instead of a view bound but it would change the signature of the method.
+It is probably easier and safer to preserve the binary compatibility.
+To do so the implicit conversion must be declared and called explicitly.
+
+Be careful not to fall in the runtime incompatibility described above, in [Implicit Views](#implicit-views).
+
+{% highlight diff %}
+-def foo[A <% Long](a: A): Long = a
++def foo[A](a: A)(implicit ev: A => Long): Long = ev(a)
+{% endhighlight %}
+
+## Ambiguous Conversion On `A` And `=> A`
+
+In Scala 2.13 the implicit conversion on `A` wins over the implicit conversion on `=> A`.
+It is not the case in Scala 3 anymore, and leads to an ambiguous conversion.
+
+For instance, in this example:
+
+{% tabs scala-2-ambiguous_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+implicit def boolFoo(bool: Boolean): Foo = ???
+implicit def lazyBoolFoo(lazyBool: => Boolean): Foo = ???
+
+true.foo()
+~~~
+{% endtab %}
+{% endtabs %}
+
+The Scala 2.13 compiler chooses the `boolFoo` conversion but the Scala 3 compiler fails to compile.
+
+{% highlight text %}
+-- Error: src/main/scala/ambiguous-conversion.scala:4:19
+9 | true.foo()
+ | ^^^^
+ |Found: (true : Boolean)
+ |Required: ?{ foo: ? }
+ |Note that implicit extension methods cannot be applied because they are ambiguous;
+ |both method boolFoo in object Foo and method lazyBoolFoo in object Foo provide an extension method `foo` on (true : Boolean)
+{% endhighlight %}
+
+A temporary solution is to write the conversion explicitly.
+
+{% highlight diff %}
+implicit def boolFoo(bool: Boolean): Foo = ???
+implicit def lazyBoolFoo(lazyBool: => Boolean): Foo = ???
+
+-true.foo()
++boolFoo(true).foo()
+{% endhighlight %}
diff --git a/_overviews/scala3-migration/incompat-dropped-features.md b/_overviews/scala3-migration/incompat-dropped-features.md
new file mode 100644
index 0000000000..845a58b143
--- /dev/null
+++ b/_overviews/scala3-migration/incompat-dropped-features.md
@@ -0,0 +1,308 @@
+---
+title: Dropped Features
+type: section
+description: This chapter details all the dropped features
+num: 18
+previous-page: incompat-syntactic
+next-page: incompat-contextual-abstractions
+---
+
+Some features are dropped to simplify the language.
+Most of these changes can be handled automatically during the [Scala 3 migration compilation](tooling-migration-mode.html).
+
+|Incompatibility|Scala 2.13|Scala 3 Migration Rewrite|Scalafix Rule|
+|--- |--- |--- |--- |
+|[Symbol literals](#symbol-literals)|Deprecation|✅||
+|[`do`-`while` construct](#do-while-construct)||✅||
+|[Auto-application](#auto-application)|Deprecation|✅|[✅](https://github.com/scala/scala-rewrites/blob/main/rewrites/src/main/scala/fix/scala213/ExplicitNonNullaryApply.scala)|
+|[Value eta-expansion](#value-eta-expansion)|Deprecation|✅|[✅](https://github.com/scala/scala-rewrites/blob/main/rewrites/src/main/scala/fix/scala213/ExplicitNullaryEtaExpansion.scala)|
+|[`any2stringadd` conversion](#any2stringadd-conversion)|Deprecation||[✅](https://github.com/scala/scala-rewrites/blob/main/rewrites/src/main/scala/fix/scala213/Any2StringAdd.scala)|
+|[Early initializer](#early-initializer)|Deprecation|||
+|[Existential type](#existential-type)|Feature warning|||
+|[@specialized](#specialized)|Deprecation|||
+
+## Symbol literals
+
+The Symbol literal syntax is deprecated in Scala 2.13 and dropped in Scala 3.
+But the `scala.Symbol` class still exists so that each string literal can be safely replaced by an application of `Symbol`.
+
+This piece of code cannot be compiled with Scala 3:
+
+{% tabs scala-2-literals_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+val values: Map[Symbol, Int] = Map('abc -> 1)
+
+val abc = values('abc) // In Scala 3, Migration Warning: symbol literal 'abc is no longer supported
+~~~
+{% endtab %}
+{% endtabs %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) rewrites the code into:
+{% highlight diff %}
+val values: Map[Symbol, Int] = Map(Symbol("abc") -> 1)
+
+-val abc = values('abc)
++val abc = values(Symbol("abc"))
+{% endhighlight %}
+
+Although the `Symbol` class is useful during the transition, beware that it is deprecated and will be removed from the `scala-library` in a future version.
+You are recommended, as a second step, to replace every use of `Symbol` with a plain string literals `"abc"` or a custom dedicated class.
+
+## `do`-`while` construct
+
+The `do` keyword has acquired a different meaning in the [New Control Syntax]({{ site.scala3ref }}/other-new-features/control-syntax.html).
+
+To avoid confusion, the traditional `do while ()` construct is dropped.
+It is recommended to use the equivalent `while ({ ; }) ()` that can be cross-compiled, or the new Scala 3 syntax `while { ; } do ()`.
+
+The following piece of code cannot be compiled with Scala 3.
+
+{% tabs scala-2-do_while_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+do { // In Scala 3, Migration Warning: `do while ` is no longer supported
+ i += 1
+} while (f(i) == 0)
+~~~
+{% endtab %}
+{% endtabs %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) rewrites it into.
+{% tabs scala-3-do_while_2 %}
+{% tab 'Scala 3 Only' %}
+~~~ scala
+while ({ {
+ i += 1
+} ; f(i) == 0}) ()
+~~~
+{% endtab %}
+{% endtabs %}
+
+## Auto-application
+
+Auto-application is the syntax of calling an empty-paren method such as `def toInt(): Int` without passing an empty argument list.
+It is deprecated in Scala 2.13 and dropped in Scala 3.
+
+The following code is invalid in Scala 3:
+
+{% tabs scala-2-auto_application_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+object Hello {
+ def message(): String = "Hello"
+}
+
+println(Hello.message) // In Scala 3, Migration Warning: method message must be called with () argument
+~~~
+{% endtab %}
+{% endtabs %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) rewrites it into:
+{% highlight diff %}
+object Hello {
+ def message(): String = "Hello"
+}
+
+-println(Hello.message)
++println(Hello.message())
+{% endhighlight %}
+
+Auto-application is covered in detail in [this page]({{ site.scala3ref }}/dropped-features/auto-apply.html) of the Scala 3 reference documentation.
+
+## Value eta-expansion
+
+Scala 3 introduces [Automatic Eta-Expansion]({{ site.scala3ref }}/changed-features/eta-expansion-spec.html) which will deprecate the method to value syntax `m _`.
+Furthermore Scala 3 does not allow eta-expansion of values to nullary functions anymore.
+
+Thus, this piece of code is invalid in Scala 3:
+
+{% tabs scala-2-eta_expansion_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+val x = 1
+val f: () => Int = x _ // In Scala 3, Migration Warning: The syntax ` _` is no longer supported;
+~~~
+{% endtab %}
+{% endtabs %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) rewrites it into:
+{% highlight diff %}
+val x = 1
+-val f: () => Int = x _
++val f: () => Int = (() => x)
+{% endhighlight %}
+
+## `any2stringadd` conversion
+
+The implicit `Predef.any2stringadd` conversion is deprecated in Scala 2.13 and dropped in Scala 3.
+
+This piece of code does not compile anymore in Scala 3.
+
+{% tabs scala-2-any2stringadd_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+val str = new AnyRef + "foo" // In Scala 3, Error: value + is not a member of Object
+~~~
+{% endtab %}
+{% endtabs %}
+
+The conversion to `String` must be applied explicitly, for instance with `String.valueOf`.
+{% highlight diff %}
+-val str = new AnyRef + "foo"
++val str = String.valueOf(new AnyRef) + "foo"
+{% endhighlight %}
+
+This rewrite can be applied by the `fix.scala213.Any2StringAdd` Scalafix rule in [`scala/scala-rewrites`](https://index.scala-lang.org/scala/scala-rewrites/scala-rewrites/0.1.2?target=_2.13).
+
+## Early Initializer
+
+Early initializers are deprecated in Scala 2.13 and dropped in Scala 3.
+They were rarely used, and mostly to compensate for the lack of [Trait parameters]({{ site.scala3ref }}/other-new-features/trait-parameters.html) which are now supported in Scala 3.
+
+That is why the following piece of code does not compile anymore in Scala 3.
+
+{% tabs scala-2-initializer_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+trait Bar {
+ val name: String
+ val size: Int = name.size
+}
+
+object Foo extends {
+ val name = "Foo"
+} with Bar
+~~~
+{% endtab %}
+{% endtabs %}
+
+The Scala 3 compiler produces two error messages:
+
+{% highlight text %}
+-- Error: src/main/scala/early-initializer.scala:6:19
+6 |object Foo extends {
+ | ^
+ | `extends` must be followed by at least one parent
+{% endhighlight %}
+{% highlight text %}
+-- [E009] Syntax Error: src/main/scala/early-initializer.scala:8:2
+8 |} with Bar
+ | ^^^^
+ | Early definitions are not supported; use trait parameters instead
+{% endhighlight %}
+
+It suggests to use trait parameters which would give us:
+
+{% tabs scala-3-initializer_2 %}
+{% tab 'Scala 3 Only' %}
+~~~ scala
+trait Bar(name: String) {
+ val size: Int = name.size
+}
+
+object Foo extends Bar("Foo")
+~~~
+{% endtab %}
+{% endtabs %}
+
+Since trait parameters are not available in Scala 2.13, it does not cross-compile.
+If you need a cross-compiling solution you can use an intermediate class that carries the early initialized `val`s and `var`s as constructor parameters.
+
+{% tabs shared-initializer_4 %}
+{% tab 'Scala 2 and 3' %}
+~~~ scala
+abstract class BarEarlyInit(val name: String) extends Bar
+
+object Foo extends BarEarlyInit("Foo")
+~~~
+
+In the case of a class, it is also possible to use a secondary constructor with a fixed value, as shown by:
+~~~ scala
+class Fizz private (val name: String) extends Bar {
+ def this() = this("Fizz")
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+Another use case for early initializers in Scala 2 is private state in the subclass that is accessed (through an overridden method) by the constructor of the superclass:
+
+{% tabs scala-2-initializer_5 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+class Adder {
+ var sum = 0
+ def add(x: Int): Unit = sum += x
+ add(1)
+}
+class LogAdder extends {
+ private var added: Set[Int] = Set.empty
+} with Adder {
+ override def add(x: Int): Unit = { added += x; super.add(x) }
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+This case can be refactored by moving the private state into a nested `object`, which is initialized on demand:
+
+{% tabs shared-initializer_6 %}
+{% tab 'Scala 2 and 3' %}
+~~~ scala
+class Adder {
+ var sum = 0
+ def add(x: Int): Unit = sum += x
+ add(1)
+}
+class LogAdder extends Adder {
+ private object state {
+ var added: Set[Int] = Set.empty
+ }
+ import state._
+ override def add(x: Int): Unit = { added += x; super.add(x) }
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+## Existential Type
+
+Existential type is a [dropped feature]({{ site.scala3ref }}/dropped-features/existential-types.html), which makes the following code invalid.
+
+{% tabs scala-2-existential_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+def foo: List[Class[T]] forSome { type T } // In Scala 3, Error: Existential types are no longer supported
+~~~
+{% endtab %}
+{% endtabs %}
+
+> Existential type is an experimental feature in Scala 2.13 that must be enabled explicitly either by importing `import scala.language.existentials` or by setting the `-language:existentials` compiler flag.
+
+In Scala 3, the proposed solution is to introduce an enclosing type that carries the dependent type:
+
+{% tabs shared-existential_1 %}
+{% tab 'Scala 2 and 3' %}
+~~~ scala
+trait Bar {
+ type T
+ val value: List[Class[T]]
+}
+
+def foo: Bar
+~~~
+{% endtab %}
+{% endtabs %}
+
+Note that using a wildcard argument, `_` or `?`, is often simpler but is not always possible.
+For instance you could replace `List[T] forSome { type T }` by `List[?]`.
+
+## Specialized
+
+The `@specialized` annotation from Scala 2 is ignored in Scala 3.
+
+However, there is limited support for specialized `Function` and `Tuple`.
+
+Similar benefits can be derived from `inline` declarations.
+
diff --git a/_overviews/scala3-migration/incompat-other-changes.md b/_overviews/scala3-migration/incompat-other-changes.md
new file mode 100644
index 0000000000..a7a003d8ec
--- /dev/null
+++ b/_overviews/scala3-migration/incompat-other-changes.md
@@ -0,0 +1,328 @@
+---
+title: Other Changed Features
+type: section
+description: This chapter details all incompatibilities caused by changed features
+num: 20
+previous-page: incompat-contextual-abstractions
+next-page: incompat-type-checker
+---
+
+Some other features are simplified or restricted to make the language easier, safer or more consistent.
+
+|Incompatibility|Scala 3 Migration Rewrite|
+|--- |--- |
+|[Inheritance shadowing](#inheritance-shadowing)|✅|
+|[Non-private constructor in private class](#non-private-constructor-in-private-class)|Migration Warning|
+|[Abstract override](#abstract-override)||
+|[Case class companion](#case-class-companion)||
+|[Explicit call to unapply](#explicit-call-to-unapply)||
+|[Invisible bean property](#invisible-bean-property)||
+|[`=>T` as type argument](#-t-as-type-argument)||
+|[Wildcard type argument](#wildcard-type-argument)||
+
+## Inheritance Shadowing
+
+An inherited member, from a parent trait or class, can shadow an identifier defined in an outer scope.
+That pattern is called inheritance shadowing.
+
+{% tabs shared-inheritance_1 %}
+{% tab 'Scala 2 and 3' %}
+~~~ scala
+object B {
+ val x = 1
+ class C extends A {
+ println(x)
+ }
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+For instance, in this preceding piece of code, the `x` term in C can refer to the `x` member defined in the outer class `B` or it can refer to a `x` member of the parent class `A`.
+You cannot know until you go to the definition of `A`.
+
+This is known for being error prone.
+
+That's why, in Scala 3, the compiler requires disambiguation if the parent class `A` does actually have a member `x`.
+
+It prevents the following piece of code from compiling.
+{% tabs scala-2-inheritance_2 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+class A {
+ val x = 2
+}
+
+object B {
+ val x = 1
+ class C extends A {
+ println(x)
+ }
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+But if you try to compile with Scala 3 you should see an error of the same kind as:
+{% highlight text %}
+-- [E049] Reference Error: src/main/scala/inheritance-shadowing.scala:9:14
+9 | println(x)
+ | ^
+ | Reference to x is ambiguous,
+ | it is both defined in object B
+ | and inherited subsequently in class C
+{% endhighlight %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) can automatically disambiguate the code by replacing `println(x)` with `println(this.x)`.
+
+## Non-private Constructor In Private Class
+
+The Scala 3 compiler requires the constructor of private classes to be private.
+
+For instance, in the example:
+{% tabs scala-2-constructor_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+package foo
+
+private class Bar private[foo] () {}
+~~~
+{% endtab %}
+{% endtabs %}
+
+If you try to compile in scala 3 you should get the following error message:
+{% highlight text %}
+-- Error: /home/piquerez/scalacenter/scala-3-migration-guide/incompat/access-modifier/src/main/scala-2.13/access-modifier.scala:4:19
+4 | private class Bar private[foo] ()
+ | ^
+ | non-private constructor Bar in class Bar refers to private class Bar
+ | in its type signature (): foo.Foo.Bar
+{% endhighlight %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) warns about this but no automatic rewrite is provided.
+
+The solution is to make the constructor private, since the class is private.
+
+## Abstract Override
+
+In Scala 3, overriding a concrete def with an abstract def causes subclasses to consider the def abstract, whereas in Scala 2 it was considered as concrete.
+
+In the following piece of code, the `bar` method in `C` is considered concrete by the Scala 2.13 compiler but abstract by the Scala 3 compiler, causing the following error.
+{% tabs scala-2-abstract_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+trait A {
+ def bar(x: Int): Int = x + 3
+}
+
+trait B extends A {
+ def bar(x: Int): Int
+}
+
+class C extends B // In Scala 3, Error: class C needs to be abstract, since def bar(x: Int): Int is not defined
+~~~
+{% endtab %}
+{% endtabs %}
+
+This behavior was decided in [Dotty issue #4770](https://github.com/scala/scala3/issues/4770).
+
+An easy fix is simply to remove the abstract def, since in practice it had no effect in Scala 2.
+
+## Case Class Companion
+
+The companion object of a case class does not extend any of the `Function{0-23}` traits anymore.
+In particular, it does not inherit their methods: `tupled`, `curried`, `andThen`, `compose`...
+
+For instance, this is not permitted anymore:
+{% tabs scala-2-companion_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+case class Foo(x: Int, b: Boolean)
+
+Foo.curried(1)(true)
+Foo.tupled((2, false))
+~~~
+{% endtab %}
+{% endtabs %}
+
+A cross-compiling solution is to explicitly eta-expand the method `Foo.apply`.
+{% highlight diff %}
+
+-Foo.curried(1)(true)
++(Foo.apply _).curried(1)(true)
+
+-Foo.tupled((2, false))
++(Foo.apply _).tupled((2, false))
+{% endhighlight %}
+
+Or, for performance reasons, you can introduce an intermediate function value.
+{% tabs scala-3-companion_2 %}
+{% tab 'Scala 2 and 3' %}
+~~~ scala
+val fooCtr: (Int, Boolean) => Foo = (x, b) => Foo(x, b)
+
+fooCtr.curried(1)(true)
+fooCtr.tupled((2, false))
+~~~
+{% endtab %}
+{% endtabs %}
+## Explicit Call to `unapply`
+
+In Scala, case classes have an auto-generated extractor method, called `unapply` in their companion object.
+Its signature has changed between Scala 2.13 and Scala 3.
+
+The new signature is option-less (see the new [Pattern Matching]({{ site.scala3ref }}/changed-features/pattern-matching.html) reference), which causes an incompatibility when `unapply` is called explicitly.
+
+Note that this problem does not affect user-defined extractors, whose signature stays the same across Scala versions.
+
+Given the following case class definition:
+{% tabs shared-unapply_1 %}
+{% tab 'Scala 2 and 3' %}
+~~~ scala
+case class Location(lat: Double, long: Double)
+~~~
+{% endtab %}
+{% endtabs %}
+
+The Scala 2.13 compiler generates the following `unapply` method:
+{% tabs scala-2-unapply_2 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+object Location {
+ def unapply(location: Location): Option[(Double, Double)] = Some((location.lat, location.long))
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+Whereas the Scala 3 compiler generates:
+{% tabs scala-3-unapply_2 %}
+{% tab 'Scala 3 Only' %}
+~~~ scala
+object Location {
+ def unapply(location: Location): Location = location
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+Consequently the following code does not compile anymore in Scala 3.
+{% tabs scala-2-unapply_3 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+def tuple(location: Location): (Int, Int) = {
+ Location.unapply(location).get // [E008] In Scala 3, Not Found Error: value get is not a member of Location
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+A possible solution, in Scala 3, is to use pattern binding:
+
+{% highlight diff %}
+def tuple(location: Location): (Int, Int) = {
+- Location.unapply(location).get
++ val Location(lat, lon) = location
++ (lat, lon)
+}
+{% endhighlight %}
+
+## Invisible Bean Property
+
+The getter and setter methods generated by the `BeanProperty` annotation are now invisible in Scala 3 because their primary use case is the interoperability with Java frameworks.
+
+For instance, the below Scala 2 code would fail to compile in Scala 3:
+{% tabs scala-2-bean_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+class Pojo() {
+ @BeanProperty var fooBar: String = ""
+}
+
+val pojo = new Pojo()
+
+pojo.setFooBar("hello") // [E008] In Scala 3, Not Found Error: value setFooBar is not a member of Pojo
+
+println(pojo.getFooBar()) // [E008] In Scala 3, Not Found Error: value getFooBar is not a member of Pojo
+~~~
+{% endtab %}
+{% endtabs %}
+
+In Scala 3, the solution is to call the more idiomatic `pojo.fooBar` getter and setter.
+
+{% highlight diff %}
+val pojo = new Pojo()
+
+-pojo.setFooBar("hello")
++pojo.fooBar = "hello"
+
+-println(pojo.getFooBar())
++println(pojo.fooBar)
+{% endhighlight %}
+
+## `=> T` as Type Argument
+
+A type of the form `=> T` cannot be used as an argument to a type parameter anymore.
+
+This decision is explained in [this comment](https://github.com/scala/scala3/blob/0f1a23e008148f76fd0a1c2991b991e1dad600e8/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala#L144-L152) of the Scala 3 source code.
+
+For instance, it is not allowed to pass a function of type `Int => (=> Int) => Int` to the `uncurried` method since it would assign `=> Int` to the type parameter `T2`.
+
+{% highlight text %}
+-- [E134] Type Mismatch Error: src/main/scala/by-name-param-type-infer.scala:3:41
+3 | val g: (Int, => Int) => Int = Function.uncurried(f)
+ | ^^^^^^^^^^^^^^^^^^
+ |None of the overloaded alternatives of method uncurried in object Function with types
+ | [T1, T2, T3, T4, T5, R]
+ | (f: T1 => T2 => T3 => T4 => T5 => R): (T1, T2, T3, T4, T5) => R
+ | [T1, T2, T3, T4, R](f: T1 => T2 => T3 => T4 => R): (T1, T2, T3, T4) => R
+ | [T1, T2, T3, R](f: T1 => T2 => T3 => R): (T1, T2, T3) => R
+ | [T1, T2, R](f: T1 => T2 => R): (T1, T2) => R
+ |match arguments ((Test.f : Int => (=> Int) => Int))
+{% endhighlight %}
+
+The solution depends on the situation. In the given example, you can either:
+ - define your own `uncurried` method with the appropriate signature
+ - inline the implementation of `uncurried` locally
+
+## Wildcard Type Argument
+
+Scala 3 cannot reduce the application of a higher-kinded abstract type member to the wildcard argument.
+
+For instance, the below Scala 2 code would fail to compile in Scala 3:
+{% tabs scala-2-wildcard_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+trait Example {
+ type Foo[A]
+
+ def f(foo: Foo[_]): Unit // [E043] In Scala 3, Type Error: unreducible application of higher-kinded type Example.this.Foo to wildcard arguments
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+We can fix this by using a type parameter:
+
+{% highlight diff %}
+-def f(foo: Foo[_]): Unit
++def f[A](foo: Foo[A]): Unit
+{% endhighlight %}
+
+But this simple solution does not work when `Foo` is itself used as a type argument.
+{% tabs scala-2-wildcard_2 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+def g(foos: Seq[Foo[_]]): Unit
+~~~
+{% endtab %}
+{% endtabs %}
+
+In such case, we can use a wrapper class around `Foo`:
+
+{% highlight diff %}
++class FooWrapper[A](foo: Foo[A])
+
+-def g(foos: Seq[Foo[_]]): Unit
++def g(foos: Seq[FooWrapper[_]]): Unit
+{% endhighlight %}
\ No newline at end of file
diff --git a/_overviews/scala3-migration/incompat-syntactic.md b/_overviews/scala3-migration/incompat-syntactic.md
new file mode 100644
index 0000000000..0e88e2b034
--- /dev/null
+++ b/_overviews/scala3-migration/incompat-syntactic.md
@@ -0,0 +1,239 @@
+---
+title: Syntactic Changes
+type: section
+description: This chapter details all the incompatibilities caused by syntactic changes
+num: 17
+previous-page: incompatibility-table
+next-page: incompat-dropped-features
+---
+
+Scala 3 introduces the optional-braces syntax and the new control structure syntax.
+It comes at the cost of some minimal restrictions in the preexisting syntax.
+
+Other syntactic changes are intended to make the syntax less surprising and more consistent.
+
+It is worth noting that most of the changes can be automatically handled during the [Scala 3 migration compilation](tooling-migration-mode.html).
+
+|Incompatibility|Scala 2.13|Scala 3 Migration Rewrite|Scalafix Rule|
+|--- |--- |--- |--- |
+|[Restricted keywords](#restricted-keywords)||✅||
+|[Procedure syntax](#procedure-syntax)|Deprecation|✅|[✅](https://scalacenter.github.io/scalafix/docs/rules/ProcedureSyntax.html)|
+|[Parentheses around lambda parameter](#parentheses-around-lambda-parameter)||✅||
+|[Open brace indentation for passing an argument](#open-brace-indentation-for-passing-an-argument)||✅||
+|[Wrong indentation](#wrong-indentation)||||
+|[`_` as a type parameter](#_-as-a-type-parameter)||||
+|[`+` and `-` as type parameters](#-and---as-type-parameters)||||
+
+## Restricted Keywords
+
+The list of Scala 3 keywords can be found [here](https://dotty.epfl.ch/docs/internals/syntax.html#keywords).
+_Regular_ keywords cannot be used as identifiers, whereas _soft_ keywords are not restricted.
+
+For the matter of migrating from Scala 2.13 to Scala 3, only the subset of new _regular_ keywords are problematic.
+It is composed of:
+- `enum`
+- `export`
+- `given`
+- `then`
+- `=>>`
+- `?=>`
+
+{% tabs scala-2-keywords_1 %}
+{% tab 'Scala 2 Only' %}
+
+For instance, the following piece of code can be compiled with Scala 2.13 but not with Scala 3.
+~~~ scala
+object given { // In Scala 3, Error: given is now a keyword.
+ val enum = ??? // In Scala 3, Error: enum is now a keyword.
+
+ println(enum) // In Scala 3, Error: enum is now a keyword.
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) rewrites the code into:
+{% highlight diff %}
+-object given {
++object `given` {
+- val enum = ???
++ val `enum` = ???
+
+- println(enum)
++ println(`enum`)
+ }
+{% endhighlight %}
+
+## Procedure Syntax
+
+Procedure syntax has been deprecated for a while and it is dropped in Scala 3.
+
+{% tabs scala-2-procedure_1 %}
+{% tab 'Scala 2 Only' %}
+
+The following pieces of code are now illegal:
+~~~ scala
+object Bar {
+ def print() { // In Scala 3, Error: Procedure syntax no longer supported; `: Unit =` should be inserted here.
+ println("bar")
+ }
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) rewrites the code into.
+{% highlight diff %}
+ object Bar {
+- def print() {
++ def print(): Unit = {
+ println("bar")
+ }
+ }
+{% endhighlight %}
+
+## Parentheses Around Lambda Parameter
+
+When followed by its type, the parameter of a lambda is now required to be enclosed in parentheses.
+The following piece of code is invalid.
+
+{% tabs scala-2-lambda_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+val f = { x: Int => x * x } // In Scala 3, Error: parentheses are required around the parameter of a lambda.
+~~~
+{% endtab %}
+{% endtabs %}
+
+The [Scala 3 migration compilation](tooling-migration-mode.html) rewrites the code into:
+{% highlight diff %}
+-val f = { x: Int => x * x }
++val f = { (x: Int) => x * x }
+{% endhighlight %}
+
+## Open Brace Indentation For Passing An Argument
+
+In Scala 2 it is possible to pass an argument after a new line by enclosing it into braces.
+Although valid, this style of coding is not encouraged by the [Scala style guide](https://docs.scala-lang.org/style) and is no longer supported in Scala 3.
+
+{% tabs scala-2-brace_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+test("my test")
+{ // In Scala 3, Error: This opening brace will start a new statement.
+ assert(1 == 1)
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+The [Scala 3 migration compiler](tooling-migration-mode.html) indents the first line of the block.
+{% highlight diff %}
+ test("my test")
+-{
++ {
+ assert(1 == 1)
+ }
+{% endhighlight %}
+
+This migration rule applies to other patterns as well, such as refining a type after a new line.
+
+{% highlight diff %}
+ type Bar = Foo
+-{
++ {
+ def bar(): Int
+ }
+{% endhighlight %}
+
+A preferable solution is to write:
+{% highlight diff %}
+-test("my test")
+-{
++test("my test") {
+ assert(1 == 1)
+ }
+{% endhighlight %}
+
+## Wrong indentation
+
+The Scala 3 compiler now requires correct indentation.
+The following piece of code, that was compiled in Scala 2.13, does not compile anymore because of the indentation.
+
+{% tabs scala-2-indentation_1 %}
+{% tab 'Scala 2 Only' %}
+
+~~~ scala
+def bar: (Int, Int) = {
+ val foo = 1.0
+ val bar = foo // [E050] In Scala 3, type Error: value foo does not take parameters.
+ (1, 1)
+} // [E007] In Scala 3, type Mismatch Error: Found Unit, Required (Int, Int).
+~~~
+{% endtab %}
+{% endtabs %}
+
+The indentation must be fixed.
+{% highlight diff %}
+ def bar: (Int, Int) = {
+ val foo = 1.0
+ val bar = foo
+- (1, 1)
++ (1, 1)
+ }
+{% endhighlight %}
+
+These errors can be prevented by using a Scala formatting tool such as [scalafmt](https://scalameta.org/scalafmt/) or the [IntelliJ Scala formatter](https://www.jetbrains.com/help/idea/reformat-and-rearrange-code.html).
+Beware that these tools may change the entire code style of your project.
+
+## `_` As A Type Parameter
+
+The usage of the `_` identifier as a type parameter is permitted in Scala 2.13, even if it has never been mentioned in the Scala 2 specification.
+It is used in the API of [fastparse](https://index.scala-lang.org/lihaoyi/fastparse), in combination with a context bound, to declare an implicit parameter.
+
+{% tabs scala-2-identifier_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+def foo[_: Foo]: Unit = ???
+~~~
+{% endtab %}
+{% endtabs %}
+
+Here, the method `foo` takes a type parameter `_` and an implicit parameter of type `Foo[_]` where `_` refers to the type parameter, not the wildcard symbol.
+
+Martin Odersky described this pattern as a "clever exploit of a scalac compiler bug" ([source](https://www.reddit.com/r/scala/comments/fczcvo/mysterious_context_bounds_in_fastparse_2/fjecokn/)).
+
+The Scala 3 compiler does not permit this pattern anymore:
+
+{% highlight text %}
+-- [E040] Syntax Error: src/main/scala/anonymous-type-param.scala:4:10
+4 | def foo[_: Foo]: Unit = ()
+ | ^
+ | an identifier expected, but '_' found
+{% endhighlight %}
+
+The solution is to give the parameter a valid identifier name, for instance `T`.
+This will not break the binary compatibility.
+
+{% highlight diff %}
+-def foo[_: Foo]: Unit = ???
++def foo[T: Foo]: Unit = ???
+{% endhighlight %}
+
+## `+` And `-` As Type Parameters
+
+`+` and `-` are not valid identifiers for type parameters in Scala 3, since they are reserved for variance annotation.
+
+You cannot write `def foo[+]` or `def foo[-]` anymore.
+
+{% highlight text %}
+-- Error: src/main/scala/type-param-identifier.scala:2:10
+2 | def foo[+]: +
+ | ^
+ | no `+/-` variance annotation allowed here
+{% endhighlight %}
+
+The solution is to choose another valid identifier, for instance `T`.
+
+However, `+` and `-` still are valid type identifiers in general.
+You can write `type +`.
diff --git a/_overviews/scala3-migration/incompat-type-checker.md b/_overviews/scala3-migration/incompat-type-checker.md
new file mode 100644
index 0000000000..41afc5ebc7
--- /dev/null
+++ b/_overviews/scala3-migration/incompat-type-checker.md
@@ -0,0 +1,130 @@
+---
+title: Type Checker
+type: section
+description: This chapter details the unsoundness fixes in the type checker
+num: 21
+previous-page: incompat-other-changes
+next-page: incompat-type-inference
+---
+
+The Scala 2.13 type checker is unsound in some specific cases.
+This can lead to surprising runtime errors in places we would not expect.
+Scala 3 being based on stronger theoretical foundations, these unsoundness bugs in the type checker are now fixed.
+
+## Unsoundness Fixes in Variance checks
+
+In Scala 2, default parameters and inner-classes are not subject to variance checks.
+It is unsound and might cause runtime failures, as demonstrated by this [test](https://github.com/scala/scala3/blob/10526a7d0aa8910729b6036ee51942e05b71abf6/tests/neg/variances.scala) in the Scala 3 repository.
+
+The Scala 3 compiler does not permit this anymore.
+
+{% tabs scala-2-unsound_vc_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+class Foo[-A](x: List[A]) {
+ def f[B](y: List[B] = x): Unit = ???
+}
+
+class Outer[+A](x: A) {
+ class Inner(y: A)
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+So if you compile in Scala 3, you will get the following error.
+{% highlight text %}
+-- Error: src/main/scala/variance.scala:2:8
+2 | def f[B](y: List[B] = x): Unit = y
+ | ^^^^^^^^^^^^^^^^^
+ |contravariant type A occurs in covariant position in type [B] => List[A] of method f$default$1
+-- Error: src/main/scala/variance.scala:6:14
+6 | class Inner(y: A)
+ | ^^^^
+ |covariant type A occurs in contravariant position in type A of parameter y
+{% endhighlight %}
+
+Each problem of this kind needs a specific care.
+You can try the following options on a case-by-case basis:
+- Make type `A` invariant
+- Add a lower or an upper bound on a type parameter `B`
+- Add a new method overload
+
+In our example, we can opt for these two solutions:
+
+{% highlight diff %}
+class Foo[-A](x: List[A]) {
+- def f[B](y: List[B] = x): Unit = ???
++ def f[B](y: List[B]): Unit = ???
++ def f(): Unit = f(x)
+}
+
+class Outer[+A](x: A) {
+- class Inner(y: A)
++ class Inner[B >: A](y: B)
+}
+{% endhighlight %}
+
+Or, as a temporary solution, you can also use the `uncheckedVariance` annotation:
+
+{% highlight diff %}
+class Outer[+A](x: A) {
+- class Inner(y: A)
++ class Inner(y: A @uncheckedVariance)
+}
+{% endhighlight %}
+
+## Unsoundness Fixes in Pattern Matching
+
+Scala 3 fixes some unsoundness bugs in pattern matching, preventing some semantically wrong match expressions to type check.
+
+For instance, the match expression in `combineReq` can be compiled with Scala 2.13 but not with Scala 3.
+
+{% tabs scala-2-unsound_pm_1 %}
+{% tab 'Scala 2 Only' %}
+~~~ scala
+trait Request
+case class Fetch[A](ids: Set[A]) extends Request
+
+object Request {
+ def combineFetch[A](x: Fetch[A], y: Fetch[A]): Fetch[A] = Fetch(x.ids ++ y.ids)
+
+ def combineReq(x: Request, y: Request): Request = {
+ (x, y) match {
+ case (x @ Fetch(_), y @ Fetch(_)) => combineFetch(x, y)
+ }
+ }
+}
+~~~
+{% endtab %}
+{% endtabs %}
+
+In Scala 3, the error message is:
+
+{% highlight text %}
+-- [E007] Type Mismatch Error: src/main/scala/pattern-match.scala:9:59
+9 | case (x @ Fetch(_), y @ Fetch(_)) => combineFetch(x, y)
+ | ^
+ | Found: (y : Fetch[A$2])
+ | Required: Fetch[A$1]
+{% endhighlight %}
+
+
+Which is right, there is no proof that `x` and `y` have the same type parameter `A`.
+
+Coming from Scala 2, this is clearly an improvement to help us locate mistakes in our code.
+To solve this incompatibility it is better to find a solution that can be checked by the compiler.
+It is not always easy and sometimes it is even not possible, in which case the code is likely to fail at runtime.
+
+In this example, we can relax the constraint on `x` and `y` by stating that `A` is a common ancestor of both type arguments.
+This makes the compiler type-check the code successfully.
+{% tabs shared-unsound_pm_2 %}
+{% tab 'Scala 2 and 3' %}
+~~~ scala
+def combineFetch[A](x: Fetch[_ <: A], y: Fetch[_ <: A]): Fetch[A] = Fetch(x.ids ++ y.ids)
+~~~
+{% endtab %}
+{% endtabs %}
+
+Alternatively, a general but unsafe solution is to cast.
+
diff --git a/_overviews/scala3-migration/incompat-type-inference.md b/_overviews/scala3-migration/incompat-type-inference.md
new file mode 100644
index 0000000000..bb6fc3052a
--- /dev/null
+++ b/_overviews/scala3-migration/incompat-type-inference.md
@@ -0,0 +1,107 @@
+---
+title: Type Inference
+type: section
+description: This chapter details the incompatibilities caused by the new type inference algorithm
+num: 22
+previous-page: incompat-type-checker
+next-page: options-intro
+---
+
+The two incompatibilities described in this page are intentional changes in the type inference rules.
+
+Other incompatibilities could be caused by the replacement of the type inference algorithm.
+The new algorithm is better than the old one, but sometime it can fail where Scala 2.13 would succeed:
+
+> It is always good practice to write the result types of all public values and methods explicitly.
+> It prevents the public API of your library from changing with the Scala version, because of different inferred types.
+>
+> This can be done prior to the Scala 3 migration by using the [ExplicitResultTypes](https://scalacenter.github.io/scalafix/docs/rules/ExplicitResultTypes.html) rule in Scalafix.
+
+## Return Type of an Override Method
+
+In Scala 3 the return type of an override method is inferred by inheritance from the base method, whereas in Scala 2.13 it is inferred from the left hand side of the override method.
+
+{% tabs define_parent_child %}
+{% tab 'Scala 2 and 3' %}
+```scala
+class Foo
+
+class RichFoo(foo: Foo) extends Foo {
+ def show: String = ""
+}
+
+class Parent {
+ def foo: Foo = new Foo
+}
+
+class Child extends Parent {
+ override def foo = new RichFoo(super.foo)
+}
+```
+{% endtab %}
+{% endtabs %}
+
+In this example, `Child#foo` returns a `RichFoo` in Scala 2.13 but a `Foo` in Scala 3.
+It can lead to compiler errors as demonstrated below.
+
+{% tabs extend_parent_child %}
+{% tab 'Scala 3 Only' %}
+```scala
+(new Child).foo.show // Scala 3 error: value show is not a member of Foo
+```
+{% endtab %}
+{% endtabs %}
+
+In some rare cases involving implicit conversions and runtime casting it could even cause a runtime failure.
+
+The solution is to make the return type of the override method explicit so that it matches what is inferred in 2.13:
+
+{% highlight diff %}
+class Child extends Parent {
+- override def foo = new RichFoo(super.foo)
++ override def foo: RichFoo = new RichFoo(super.foo)
+}
+{% endhighlight %}
+
+## Reflective Type
+
+Scala 2 reflective calls are dropped and replaced by the broader [Programmatic Structural Types]({{ site.scala3ref }}/changed-features/structural-types.html).
+
+Scala 3 can imitate Scala 2 reflective calls by making `scala.reflect.Selectable.reflectiveSelectable` available wherever `scala.language.reflectiveCalls` is imported.
+
+{% tabs define_structural %}
+{% tab 'Scala 2 and 3' %}
+```scala
+import scala.language.reflectiveCalls
+
+val foo = new {
+ def bar: Unit = ???
+}
+```
+{% endtab %}
+{% endtabs %}
+
+However the Scala 3 compiler does not infer structural types by default.
+It infers the type `Object` for `foo` instead of `{ def bar: Unit }`.
+Therefore, the following structural selection fails to compile:
+
+{% tabs use_structural %}
+{% tab 'Scala 3 Only' %}
+```scala
+foo.bar // Error: value bar is not a member of Object
+```
+{% endtab %}
+{% endtabs %}
+
+The straightforward solution is to explicitly write down the structural type.
+
+{% highlight diff %}
+import scala.language.reflectiveCalls
+
+- val foo = new {
++ val foo: { def bar: Unit } = new {
+ def bar: Unit = ???
+}
+
+foo.bar
+{% endhighlight %}
diff --git a/_overviews/scala3-migration/incompatibility-table.md b/_overviews/scala3-migration/incompatibility-table.md
new file mode 100644
index 0000000000..9fc9f8bf18
--- /dev/null
+++ b/_overviews/scala3-migration/incompatibility-table.md
@@ -0,0 +1,126 @@
+---
+title: Incompatibility Table
+type: chapter
+description: This chapter list all the known incompatibilities between Scala 2.13 and Scala 3
+num: 16
+previous-page: tooling-syntax-rewriting
+next-page: incompat-syntactic
+---
+
+An incompatibility is a piece of code that can be compiled with Scala 2.13 but not with Scala 3.
+Migrating a codebase involves finding and fixing all the incompatibilities of the source code.
+On rare occasions we can also have a runtime incompatibility: a piece of code that behaves differently at runtime.
+
+In this page we propose a classification of the known incompatibilities.
+Each incompatibility is described by:
+ - Its short name with a link towards the detailed description and proposed solutions
+ - Whether the Scala 2.13 compiler emits a deprecation or a feature warning
+ - The existence of a [Scala 3 migration](tooling-migration-mode.html) rule for it
+ - The existence of a Scalafix rule that can fix it
+
+> #### Scala 2.13 deprecations and feature warnings
+> Run the 2.13 compilation with `-Xsource:3` to locate those incompatibilities in the code.
+
+> #### Scala 3 migration versus Scalafix rewrites
+> The Scala 3 migration mode comes out-of-the-box.
+> On the contrary, Scalafix is a tool that must be installed and configured manually.
+> However Scalafix has its own advantages:
+> - It runs on Scala 2.13.
+> - It is composed of individual rules that you can apply one at a time.
+> - It is easily extensible by adding custom rules.
+
+### Syntactic Changes
+
+Some of the old syntax is not supported anymore.
+
+|Incompatibility|Scala 2.13|Scala 3 Migration Rewrite|Scalafix Rule|
+|--- |--- |--- |--- |
+|[Restricted keywords](incompat-syntactic.html#restricted-keywords)||✅||
+|[Procedure syntax](incompat-syntactic.html#procedure-syntax)|Deprecation|✅|[✅](https://scalacenter.github.io/scalafix/docs/rules/ProcedureSyntax.html)|
+|[Parentheses around lambda parameter](incompat-syntactic.html#parentheses-around-lambda-parameter)||✅||
+|[Open brace indentation for passing an argument](incompat-syntactic.html#open-brace-indentation-for-passing-an-argument)||✅||
+|[Wrong indentation](incompat-syntactic.html#wrong-indentation)||||
+|[`_` as a type parameter](incompat-syntactic.html#_-as-a-type-parameter)||||
+|[`+` and `-` as type parameters](incompat-syntactic.html#-and---as-type-parameters)||||
+
+### Dropped Features
+
+Some features are dropped to simplify the language.
+
+|Incompatibility|Scala 2.13|Scala 3 Migration Rewrite|Scalafix Rule|
+|--- |--- |--- |--- |
+|[Symbol literals](incompat-dropped-features.html#symbol-literals)|Deprecation|✅||
+|[`do`-`while` construct](incompat-dropped-features.html#do-while-construct)||✅||
+|[Auto-application](incompat-dropped-features.html#auto-application)|Deprecation|✅|[✅](https://github.com/scala/scala-rewrites/blob/main/rewrites/src/main/scala/fix/scala213/ExplicitNonNullaryApply.scala)|
+|[Value eta-expansion](incompat-dropped-features.html#value-eta-expansion)|Deprecation|✅|[✅](https://github.com/scala/scala-rewrites/blob/main/rewrites/src/main/scala/fix/scala213/ExplicitNullaryEtaExpansion.scala)|
+|[`any2stringadd` conversion](incompat-dropped-features.html#any2stringadd-conversion)|Deprecation||[✅](https://github.com/scala/scala-rewrites/blob/main/rewrites/src/main/scala/fix/scala213/Any2StringAdd.scala)|
+|[Early initializer](incompat-dropped-features.html#early-initializer)|Deprecation|||
+|[Existential type](incompat-dropped-features.html#existential-type)|Feature warning|||
+
+### Contextual Abstractions
+
+The redesign of [contextual abstractions]({{ site.scala3ref }}/contextual) brings some well defined incompatibilities.
+
+|Incompatibility|Scala 2.13|Scala 3 Migration Rewrite|Scalafix Rule|Runtime Incompatibility|
+|--- |--- |--- |--- |--- |
+|[Type of implicit def](incompat-contextual-abstractions.html#type-of-implicit-definition)|||[✅](https://scalacenter.github.io/scalafix/docs/rules/ExplicitResultTypes.html)||
+|[Implicit views](incompat-contextual-abstractions.html#implicit-views)||||**Possible**|
+|[View bounds](incompat-contextual-abstractions.html#view-bounds)|Deprecation||||
+|[Ambiguous conversion on `A` and `=> A`](incompat-contextual-abstractions.html#ambiguous-conversion-on-a-and--a)|||||
+
+Furthermore we have changed the implicit resolution rules so that they are more useful and less surprising.
+The new rules are described [here]({{ site.scala3ref }}/changed-features/implicit-resolution.html).
+
+Because of these changes, the Scala 3 compiler could possibly fail at resolving some implicit parameters of existing Scala 2.13 code.
+
+### Other Changed Features
+
+Some other features are simplified or restricted to make the language easier, safer or more consistent.
+
+|Incompatibility|Scala 3 Migration Rewrite|
+|--- |--- |
+|[Inheritance shadowing](incompat-other-changes.html#inheritance-shadowing)|✅|
+|[Non-private constructor in private class](incompat-other-changes.html#non-private-constructor-in-private-class)|Migration Warning|
+|[Abstract override](incompat-other-changes.html#abstract-override)||
+|[Case class companion](incompat-other-changes.html#case-class-companion)||
+|[Explicit call to unapply](incompat-other-changes.html#explicit-call-to-unapply)||
+|[Invisible bean property](incompat-other-changes.html#invisible-bean-property)||
+|[`=>T` as type argument](incompat-other-changes.html#-t-as-type-argument)||
+|[Wildcard type argument](incompat-other-changes.html#wildcard-type-argument)||
+
+### Type Checker
+
+The Scala 2.13 type checker is unsound in some specific cases.
+This can lead to surprising runtime errors in places we would not expect.
+Scala 3 being based on stronger theoretical foundations, these unsoundness bugs in the type checker are now fixed.
+
+|Incompatibility|
+|--- |
+|[Variance checks](incompat-type-checker.html#unsoundness-fixes-in-variance-checks)|
+|[Pattern matching](incompat-type-checker.html#unsoundness-fixes-in-pattern-matching)|
+
+### Type Inference
+
+Some specific type inference rules have changed between Scala 2.13 and Scala 3.
+
+|Incompatibility|
+|--- |
+|[Return type of override method](incompat-type-inference.html#return-type-of-an-override-method)|
+|[Reflective type](incompat-type-inference.html#reflective-type)|
+
+Also we have improved the type inference algorithm by redesigning it entirely.
+This fundamental change leads to a few incompatibilities:
+- A different type can be inferred
+- A new type-checking error can appear
+
+> It is always good practice to write the result types of all public values and methods explicitly.
+> It prevents the public API of your library from changing with the Scala version, because of different inferred types.
+>
+> This can be done prior to the Scala 3 migration by using the [ExplicitResultTypes](https://scalacenter.github.io/scalafix/docs/rules/ExplicitResultTypes.html) rule in Scalafix.
+
+### Macros
+
+The Scala 3 compiler is not able to expand Scala 2.13 macros.
+Under such circumstances it is necessary to re-implement the Scala 2.13 macros using the new Scala 3 metaprogramming features.
+
+You can go back to the [Metaprogramming](compatibility-metaprogramming.html) page to learn about the new metaprogramming features.
diff --git a/_overviews/scala3-migration/options-intro.md b/_overviews/scala3-migration/options-intro.md
new file mode 100644
index 0000000000..9fc2d04d48
--- /dev/null
+++ b/_overviews/scala3-migration/options-intro.md
@@ -0,0 +1,21 @@
+---
+title: Compiler Options
+type: chapter
+description: This chapter shows the difference between Scala 2.13 and Scala 3 compiler options
+num: 23
+previous-page: incompat-type-inference
+next-page: options-lookup
+---
+
+The Scala 3 compiler has been rewritten from the ground up and consequently it does not offer the same options as the Scala 2.13 compiler.
+Some options are available under a different name, others have just not been implemented yet.
+
+When porting a Scala 2.13 project to Scala 3, you will need to adapt the list of compiler options.
+To do so you can refer to the [Lookup Table](options-lookup.html).
+
+> Passing an unavailable option to the Scala 3 compiler does not make it fail.
+> It just prints a warning and ignores the option.
+
+You can also discover the new Scala 3 compiler options, which have no equivalent in Scala 2.13, in the [New Compiler Options](options-new.html) page.
+
+For Scaladoc settings reference and their compatibility with Scala2 Scaladoc, read [Scaladoc settings compatibility between Scala2 and Scala3](scaladoc-settings-compatibility.html) page.
diff --git a/_overviews/scala3-migration/options-lookup.md b/_overviews/scala3-migration/options-lookup.md
new file mode 100644
index 0000000000..36db00ad8e
--- /dev/null
+++ b/_overviews/scala3-migration/options-lookup.md
@@ -0,0 +1,265 @@
+---
+title: Compiler Options Lookup Table
+type: section
+description: This section contains the compiler options lookup tables
+num: 24
+previous-page: options-intro
+next-page: options-new
+---
+
+This table lists the Scala 2.13 compiler options with their equivalent in Scala 3.
+Some options have cross-version support, such as `-Vprint`.
+Others have a close equivalent with a different name. A number of Scala 2 options
+have no equivalent in Scala 3, such as options for debugging Scala 2 macros.
+
+The compiler options are shown as displayed by the help output `scalac -help`, `scalac -X`, etc.
+A few aliases are shown here, but most older aliases, such as `-Xprint` for `-Vprint`,
+or `-Ytyper-debug` for `-Vtyper`, are listed by the latest name.
+
+The option groups `-V` and `-W` were introduced in Scala 2.13, for "verbose" options that
+request additional diagnostic output and "warnings" that request additional checks which
+may or may not indicate errors in code. `-Werror` elevates warnings to errors, and `-Wconf`
+allows precise control over warnings by either ignoring them or taking them as errors.
+The configuration string for `-Wconf` will likely require adjustment when migrating to Scala 3,
+since the configuration syntax and the error messages it matches are different.
+
+| Status | Meaning |
+|-|-|
+| | It is available in Scala 3. |
+| `` | It has been renamed to ``. |
+| | It is not yet available but could be added later. |
+
+> The current comparison is based on Scala 2.13.10 and 3.3.0.
+
+## Standard Settings
+
+| 2.13.x | 3.3.x |
+|-|-|
+| `-Dproperty=value` | |
+| `-J` | |
+| `-P::` ||
+| `-V` | |
+| `-W` | |
+| `-X` ||
+| `-Y` ||
+| `-bootclasspath` ||
+| `-classpath` ||
+| `-d` ||
+| `-dependencyfile` | |
+| `-deprecation` ||
+| `-encoding` ||
+| `-explaintypes` | `-explain-types` |
+| `-extdirs` ||
+| `-feature` ||
+| `-g` | |
+| `-help` ||
+| `-javabootclasspath` ||
+| `-javaextdirs` ||
+| `-language` ||
+| `-no-specialization` | |
+| `-nobootcp` | |
+| `-nowarn` ||
+| `-opt` | |
+| `-opt-inline-from` | |
+| `-opt-warnings` | |
+| `-optimize` | |
+| `-print` ||
+| `-release` ||
+| `-rootdir` | |
+| `-sourcepath` ||
+| `-target` | `-Xtarget` |
+| `-toolcp` | |
+| `-unchecked` ||
+| `-uniqid` ||
+| `-usejavacp` ||
+| `-usemanifestc` | |
+| `-verbose` ||
+| `-version` ||
+
+## Verbose Settings
+
+| 2.13.x | 3.3.x |
+|-|-|
+| `-Vbrowse:` | |
+| `-Vclasspath` | `-Ylog-classpath` |
+| `-Vdebug` | `-Ydebug` |
+| `-Vdebug-tasty` | |
+| `-Vdebug-type-error` | |
+| `-Vdoc` | |
+| `-Vfree-terms` | |
+| `-Vfree-types` | |
+| `-Vhot-statistics`| |
+| `-Vide`| |
+| `-Vimplicit-conversions`| |
+| `-Vimplicits`| |
+| `-Vimplicits-max-refined`| |
+| `-Vimplicits-verbose-tree`| |
+| `-Vinline ` | |
+| `-Vlog:` | `-Ylog:`|
+| `-Vmacro` | |
+| `-Vmacro-lite` | |
+| `-Vopt ` | |
+| `-Vpatmat` | |
+| `-Vphases` | |
+| `-Vpos`| |
+| `-Vprint:` | |
+| `-Vprint-args ` | |
+| `-Vprint-pos` | `-Yprint-pos` |
+| `-Vprint-types` | `-Xprint-types` |
+| `-Vquasiquote` | |
+| `-Vreflective-calls` | |
+| `-Vreify` | |
+| `-Vshow:` | |
+| `-Vshow-class ` | |
+| `-Vshow-member-pos