8000 Update · pytorch/pytorch@0669c68 · GitHub
[go: up one dir, main page]

Skip to content

Commit 0669c68

Browse files
Update
[ghstack-poisoned]
2 parents c55a2cb + 0f8763f commit 0669c68

File tree

191 files changed

+5209
-1654
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

191 files changed

+5209
-1654
lines changed

.ci/docker/build.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -308,7 +308,7 @@ case "$image" in
308308
TRITON=yes
309309
;;
310310
pytorch-linux-jammy-xpu-2024.0-py3)
311-
ANACONDA_PYTHON_VERSION=3.8
311+
ANACONDA_PYTHON_VERSION=3.9
312312
GCC_VERSION=11
313313
PROTOBUF=yes
314314
DB=yes

.ci/docker/common/install_conda.sh

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -78,19 +78,20 @@ fi
7878
CONDA_COMMON_DEPS="astunparse pyyaml setuptools openblas==0.3.25=*openmp* ninja==1.11.1 scons==4.5.2"
7979

8080
if [ "$ANACONDA_PYTHON_VERSION" = "3.8" ]; then
81-
conda_install numpy=1.24.4 ${CONDA_COMMON_DEPS}
81+
NUMPY_VERSION=1.24.4
8282
else
83-
conda_install numpy=1.26.2 ${CONDA_COMMON_DEPS}
83+
NUMPY_VERSION=1.26.2
8484
fi
8585
else
8686
CONDA_COMMON_DEPS="astunparse pyyaml mkl=2021.4.0 mkl-include=2021.4.0 setuptools"
8787

8888
if [ "$ANACONDA_PYTHON_VERSION" = "3.11" ] || [ "$ANACONDA_PYTHON_VERSION" = "3.12" ] || [ "$ANACONDA_PYTHON_VERSION" = "3.13" ]; then
89-
conda_install numpy=1.26.0 ${CONDA_COMMON_DEPS}
89+
NUMPY_VERSION=1.26.0
9090
else
91-
conda_install numpy=1.21.2 ${CONDA_COMMON_DEPS}
91+
NUMPY_VERSION=1.21.2
9292
fi
9393
fi
94+
conda_install ${CONDA_COMMON_DEPS}
9495

9596
# Install llvm-8 as it is required to compile llvmlite-0.30.0 from source
9697
# and libpython-static for torch deploy
@@ -112,7 +113,7 @@ fi
112113

113114
# Install some other packages, including those needed for Python test reporting
114115
pip_install -r /opt/conda/requirements-ci.txt
115-
116+
pip_install numpy=="$NUMPY_VERSION"
116117
pip_install -U scikit-learn
117118

118119
if [ -n "$DOCS" ]; then

.ci/docker/common/install_cuda.sh

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,17 @@ function install_cusparselt_052 {
2727
rm -rf tmp_cusparselt
2828
}
2929

30+
function install_cusparselt_062 {
31+
# cuSparseLt license: https://docs.nvidia.com/cuda/cusparselt/license.html
32+
mkdir tmp_cusparselt && pushd tmp_cusparselt
33+
wget -q https://developer.download.nvidia.com/compute/cusparselt/redist/libcusparse_lt/linux-x86_64/libcusparse_lt-linux-x86_64-0.6.2.3-archive.tar.xz
34+
tar xf libcusparse_lt-linux-x86_64-0.6.2.3-archive.tar.xz
35+
cp -a libcusparse_lt-linux-x86_64-0.6.2.3-archive/include/* /usr/local/cuda/include/
36+
cp -a libcusparse_lt-linux-x86_64-0.6.2.3-archive/lib/* /usr/local/cuda/lib64/
37+
popd
38+
rm -rf tmp_cusparselt
39+
}
40+
3041
function install_118 {
3142
echo "Installing CUDA 11.8 and cuDNN ${CUDNN_VERSION} and NCCL ${NCCL_VERSION} and cuSparseLt-0.4.0"
3243
rm -rf /usr/local/cuda-11.8 /usr/local/cuda
@@ -121,7 +132,7 @@ function install_124 {
121132
cd ..
122133
rm -rf nccl
123134

124-
install_cusparselt_052
135+
install_cusparselt_062
125136

126137
ldconfig
127138
}

.ci/docker/common/install_cusparselt.sh

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,15 @@ set -ex
55
# cuSPARSELt license: https://docs.nvidia.com/cuda/cusparselt/license.html
66
mkdir tmp_cusparselt && cd tmp_cusparselt
77

8-
if [[ ${CUDA_VERSION:0:4} =~ ^12\.[1-4]$ ]]; then
8+
if [[ ${CUDA_VERSION:0:4} =~ ^12\.[2-4]$ ]]; then
9+
arch_path='sbsa'
10+
export TARGETARCH=${TARGETARCH:-$(uname -m)}
11+
if [ ${TARGETARCH} = 'amd64' ] || [ "${TARGETARCH}" = 'x86_64' ]; then
12+
arch_path='x86_64'
13+
fi
14+
CUSPARSELT_NAME="libcusparse_lt-linux-${arch_path}-0.6.2.3-archive"
15+
curl --retry 3 -OLs https://developer.download.nvidia.com/compute/cusparselt/redist/libcusparse_lt/linux-${arch_path}/${CUSPARSELT_NAME}.tar.xz
16+
elif [[ ${CUDA_VERSION:0:4} == "12.1" ]]; then
917
arch_path='sbsa'
1018
export TARGETARCH=${TARGETARCH:-$(uname -m)}
1119
if [ ${TARGETARCH} = 'amd64' ] || [ "${TARGETARCH}" = 'x86_64' ]; then

.circleci/scripts/binary_linux_test.sh

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -116,12 +116,6 @@ if [[ "$PACKAGE_TYPE" == libtorch ]]; then
116116
cd /tmp/libtorch
117117
fi
118118
119-
if [[ "$GPU_ARCH_TYPE" == xpu ]]; then
120-
# Refer https://www.intel.com/content/www/us/en/developer/articles/tool/pytorch-prerequisites-for-intel-gpu/2-5.html
121-
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
122-
source /opt/intel/oneapi/pti/latest/env/vars.sh
123-
fi
124-
125119
# Test the package
126120
/builder/check_binary.sh
127121

.github/actionlint.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ self-hosted-runner:
1414
- linux.12xlarge
1515
- linux.12xlarge.ephemeral
1616
- linux.24xlarge
17+
- linux.24xlarge.ephemeral
1718
- linux.arm64.2xlarge
1819
- linux.arm64.m7g.4xlarge
1920
- linux.4xlarge.nvidia.gpu

.github/lf-canary-scale-config.yml

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,18 @@ runner_types:
121121
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-x86_64
122122
am2:
123123
ami: amzn2-ami-hvm-2.0.20240306.2-x86_64-ebs
124+
lf.c.linux.24xlarge.ephemeral:
125+
disk_size: 150
126+
instance_type: c5.24xlarge
127+
is_ephemeral: true
128+
max_available: 200
129+
os: linux
130+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-x86_64
131+
variants:
132+
amz2023:
133+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-x86_64
134+
am2:
135+
ami: amzn2-ami-hvm-2.0.20240306.2-x86_64-ebs
124136
lf.c.linux.2xlarge:
125137
disk_size: 150
126138
instance_type: c5.2xlarge
@@ -277,6 +289,30 @@ runner_types:
277289
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
278290
am2:
279291
ami: amzn2-ami-hvm-2.0.20240306.2-arm64-gp2
292+
lf.c.linux.arm64.2xlarge.ephemeral:
293+
disk_size: 256
294+
instance_type: t4g.2xlarge
295+
is_ephemeral: true
296+
max_available: 200
297+
os: linux
298+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
299+
variants:
300+
amz2023:
301+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
302+
am2:
303+
ami: amzn2-ami-hvm-2.0.20240306.2-arm64-gp2
304+
lf.c.linux.arm64.m7g.4xlarge.ephemeral:
305+
disk_size: 256
306+
instance_type: m7g.4xlarge
307+
is_ephemeral: true
308+
max_available: 200
309+
os: linux
310+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
311+
variants:
312+
amz2023:
313+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
314+
am2:
315+
ami: amzn2-ami-hvm-2.0.20240306.2-arm64-gp2
280316
lf.c.linux.arm64.m7g.metal:
281317
disk_size: 256
282318
instance_type: m7g.metal

.github/lf-scale-config.yml

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,18 @@ runner_types:
121121
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-x86_64
122122
am2:
123123
ami: amzn2-ami-hvm-2.0.20240306.2-x86_64-ebs
124+
lf.linux.24xlarge.ephemeral:
125+
disk_size: 150
126+
instance_type: c5.24xlarge
127+
is_ephemeral: true
128+
max_available: 200
129+
os: linux
130+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-x86_64
131+
variants:
132+
amz2023:
133+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-x86_64
134+
am2:
135+
ami: amzn2-ami-hvm-2.0.20240306.2-x86_64-ebs
124136
lf.linux.2xlarge:
125137
disk_size: 150
126138
instance_type: c5.2xlarge
@@ -277,6 +289,30 @@ runner_types:
277289
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
278290
am2:
279291
ami: amzn2-ami-hvm-2.0.20240306.2-arm64-gp2
292+
lf.linux.arm64.2xlarge.ephemeral:
293+
disk_size: 256
294+
instance_type: t4g.2xlarge
295+
is_ephemeral: true
296+
max_available: 200
297+
os: linux
298+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
299+
variants:
300+
amz2023:
301+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
302+
am2:
303+
ami: amzn2-ami-hvm-2.0.20240306.2-arm64-gp2
304+
lf.linux.arm64.m7g.4xlarge.ephemeral:
305+
disk_size: 256
306+
instance_type: m7g.4xlarge
307+
is_ephemeral: true
308+
max_available: 200
309+
os: linux
310+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
311+
variants:
312+
amz2023:
313+
ami: al2023-ami-2023.5.20240701.0-kernel-6.1-arm64
314+
am2:
315+
ami: amzn2-ami-hvm-2.0.20240306.2-arm64-gp2
280316
lf.linux.arm64.m7g.metal:
281317
disk_size: 256
282318
instance_type: m7g.metal

.github/templates/linux_binary_build_workflow.yml.j2

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ jobs:
7676
ALPINE_IMAGE: "docker.io/s390x/alpine"
7777
{%- elif "conda" in build_environment and config["gpu_arch_type"] == "cuda" %}
7878
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}amz2023."
79-
runs_on: linux.24xlarge
79+
runs_on: linux.24xlarge.ephemeral
8080
{%- else %}
8181
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}amz2023."
8282
{%- endif %}

.github/templates/macos_binary_build_workflow.yml.j2

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -64,9 +64,6 @@ jobs:
6464
{%- if config.pytorch_extra_install_requirements is defined and config.pytorch_extra_install_requirements|d('')|length > 0 %}
6565
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: !{{ config.pytorch_extra_install_requirements }}
6666
{%- endif %}
67-
# For sccache access (only on non-forked PRs)
68-
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
69-
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
7067
steps:
7168
!{{ set_runner_specific_vars() }}
7269
- name: Install conda and dependencies

0 commit comments

Comments
 (0)
0