8000 Merge branch 'upstream-main' into liangan1/flex_attention · pytorch/pytorch@962ee1b · GitHub
[go: up one dir, main page]

Skip to content

Commit 962ee1b

Browse files
committed
Merge branch 'upstream-main' into liangan1/flex_attention
2 parents 800bed4 + 129a297 commit 962ee1b

File tree

196 files changed

+2710
-1867
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

196 files changed

+2710
-1867
lines changed

.ci/caffe2/README.md

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,5 +10,3 @@ example: `py2-cuda9.0-cudnn7-ubuntu16.04`. The Docker images that are
1010
built on Jenkins and are used in triggered builds already have this
1111
environment variable set in their manifest. Also see
1212
`./docker/jenkins/*/Dockerfile` and search for `BUILD_ENVIRONMENT`.
13-
14-
Our Jenkins installation is located at https://ci.pytorch.org/jenkins/.

.ci/docker/build.sh

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -251,19 +251,19 @@ case "$tag" in
251251
UCC_COMMIT=${_UCC_COMMIT}
252252
INDUCTOR_BENCHMARKS=yes
253253
;;
254-
pytorch-linux-jammy-xpu-2024.0-py3)
254+
pytorch-linux-jammy-xpu-2025.0-py3)
255255
ANACONDA_PYTHON_VERSION=3.9
256256
GCC_VERSION=11
257257
VISION=yes
258-
XPU_VERSION=0.5
258+
XPU_VERSION=2025.0
259259
NINJA_VERSION=1.9.0
260260
TRITON=yes
261261
;;
262-
pytorch-linux-jammy-xpu-2025.0-py3)
262+
pytorch-linux-jammy-xpu-2025.1-py3)
263263
ANACONDA_PYTHON_VERSION=3.9
264264
GCC_VERSION=11
265265
VISION=yes
266-
XPU_VERSION=2025.0
266+
XPU_VERSION=2025.1
267267
NINJA_VERSION=1.9.0
268268
TRITON=yes
269269
;;

.ci/docker/centos-rocm/Dockerfile

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,8 @@ RUN bash ./install_base.sh && rm install_base.sh
1717
# Update CentOS git version
1818
RUN yum -y remove git
1919
RUN yum -y remove git-*
20-
RUN yum -y install https://packages.endpoint.com/rhel/7/os/x86_64/endpoint-repo-1.9-1.x86_64.rpm || \
21-
(yum -y install https://packages.endpointdev.com/rhel/7/os/x86_64/endpoint-repo-1.9-1.x86_64.rpm && \
22-
sed -i "s/packages.endpoint/packages.endpointdev/" /etc/yum.repos.d/endpoint.repo)
20+
RUN yum -y install https://packages.endpointdev.com/rhel/7/os/x86_64/endpoint-repo-1.9-1.x86_64.rpm && \
21+
sed -i 's/packages.endpoint/packages.endpointdev/' /etc/yum.repos.d/endpoint.repo
2322
RUN yum install -y git
2423

2524
# Install devtoolset

.ci/docker/common/install_conda.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ if [ -n "$ANACONDA_PYTHON_VERSION" ]; then
77
BASE_URL="https://repo.anaconda.com/miniconda"
88
CONDA_FILE="Miniconda3-latest-Linux-x86_64.sh"
99
if [[ $(uname -m) == "aarch64" ]] || [[ "$BUILD_ENVIRONMENT" == *xpu* ]]; then
10-
BASE_URL="https://github.com/conda-forge/miniforge/releases/latest/download"
10+
BASE_URL="https://github.com/conda-forge/miniforge/releases/latest/download" # @lint-ignore
1111
CONDA_FILE="Miniforge3-Linux-$(uname -m).sh"
1212
fi
1313

.ci/docker/common/install_cpython.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
set -uex -o pipefail
44

55
PYTHON_DOWNLOAD_URL=https://www.python.org/ftp/python
6-
PYTHON_DOWNLOAD_GITHUB_BRANCH=https://github.com/python/cpython/archive/refs/heads
6+
PYTHON_DOWNLOAD_GITHUB_BRANCH=https://github.com/python/cpython/archive/refs/heads # @lint-ignore
77
GET_PIP_URL=https://bootstrap.pypa.io/get-pip.py
88

99
# Python versions to be installed in /opt/$VERSION_NO

.ci/docker/common/install_xpu.sh

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ function install_ubuntu() {
2626
wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \
2727
| gpg --dearmor > /usr/share/keyrings/oneapi-archive-keyring.gpg.gpg
2828
echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg.gpg] \
29-
https://apt.repos.intel.com/${XPU_REPO_NAME} all main" \
29+
https://apt.repos.intel.com/oneapi all main" \
3030
| tee /etc/apt/sources.list.d/oneAPI.list
3131

3232
# Update the packages list and repository index
@@ -74,7 +74,7 @@ function install_rhel() {
7474
tee > /etc/yum.repos.d/oneAPI.repo << EOF
7575
[oneAPI]
7676
name=Intel for Pytorch GPU dev repository
77-
baseurl=https://yum.repos.intel.com/${XPU_REPO_NAME}
77+
baseurl=https://yum.repos.intel.com/oneapi
7878
enabled=1
7979
gpgcheck=1
8080
repo_gpgcheck=1
@@ -118,7 +118,7 @@ function install_sles() {
118118
https://repositories.intel.com/gpu/sles/${VERSION_SP}${XPU_DRIVER_VERSION}/unified/intel-gpu-${VERSION_SP}.repo
119119
rpm --import https://repositories.intel.com/gpu/intel-graphics.key
120120
# To add the online network network package repository for the Intel Support Packages
121-
zypper addrepo https://yum.repos.intel.com/${XPU_REPO_NAME} oneAPI
121+
zypper addrepo https://yum.repos.intel.com/oneapi oneAPI
122122
rpm --import https://yum.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
123123

124124
# The xpu-smi packages
@@ -141,10 +141,10 @@ if [[ "${XPU_DRIVER_TYPE,,}" == "rolling" ]]; then
141141
XPU_DRIVER_VERSION=""
142142
fi
143143

144-
XPU_REPO_NAME="intel-for-pytorch-gpu-dev"
145-
XPU_PACKAGES="intel-for-pytorch-gpu-dev-0.5 intel-pti-dev-0.9"
146-
if [[ "$XPU_VERSION" == "2025.0" ]]; then
147-
XPU_REPO_NAME="oneapi"
144+
# Default use Intel® oneAPI Deep Learning Essentials 2025.0
145+
if [[ "$XPU_VERSION" == "2025.1" ]]; then
146+
XPU_PACKAGES="intel-deep-learning-essentials-2025.1"
147+
else
148148
XPU_PACKAGES="intel-deep-learning-essentials-2025.0"
149149
fi
150150

.ci/docker/manywheel/Dockerfile_2_28

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,6 +174,6 @@ ENV XPU_DRIVER_TYPE ROLLING
174174
RUN python3 -m pip install --upgrade pip && \
175175
python3 -mpip install cmake==3.28.4
176176
ADD ./common/install_xpu.sh install_xpu.sh
177-
ENV XPU_VERSION 2025.0
177+
ENV XPU_VERSION 2025.1
178178
RUN bash ./install_xpu.sh && rm install_xpu.sh
179179
RUN pushd /opt/_internal && tar -xJf static-libs-for-embedding-only.tar.xz && popd

.ci/docker/manywheel/build_scripts/build.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ find /opt/_internal -type f -print0 \
9797
| xargs -0 -n1 strip --strip-unneeded 2>/dev/null || true
9898
# We do not need the Python test suites, or indeed the precompiled .pyc and
9999
# .pyo files. Partially cribbed from:
100-
# https://github.com/docker-library/python/blob/master/3.4/slim/Dockerfile
100+
# https://github.com/docker-library/python/blob/master/3.4/slim/Dockerfile # @lint-ignore
101101
find /opt/_internal \
102102
\( -type d -a -name test -o -name tests \) \
103103
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \

.ci/docker/manywheel/build_scripts/build_utils.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
# Helper utilities for build
33
# Script used only in CD pipeline
44

5-
OPENSSL_DOWNLOAD_URL=https://www.openssl.org/source/old/1.1.1/
5+
OPENSSL_DOWNLOAD_URL=https://www.openssl.org/source/old/1.1.1/ # @lint-ignore
66
CURL_DOWNLOAD_URL=https://curl.se/download
77

88
AUTOCONF_DOWNLOAD_URL=https://ftp.gnu.org/gnu/autoconf

.ci/manywheel/build_xpu.sh

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,11 @@ fi
2020
source /opt/intel/oneapi/compiler/latest/env/vars.sh
2121
source /opt/intel/oneapi/pti/latest/env/vars.sh
2222
source /opt/intel/oneapi/umf/latest/env/vars.sh
23+
source /opt/intel/oneapi/ccl/latest/env/vars.sh
24+
source /opt/intel/oneapi/mpi/latest/env/vars.sh
2325
export USE_STATIC_MKL=1
26+
export USE_ONEMKL=1
27+
export USE_XCCL=1
2428

2529
WHEELHOUSE_DIR="wheelhousexpu"
2630
LIBTORCH_HOUSE_DIR="libtorch_housexpu"

0 commit comments

Comments
 (0)
0