xref: /aosp_15_r20/external/pytorch/.circleci/scripts/binary_populate_env.sh (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1#!/bin/bash
2set -eux -o pipefail
3export TZ=UTC
4
5tagged_version() {
6  GIT_DIR="${workdir}/pytorch/.git"
7  GIT_DESCRIBE="git --git-dir ${GIT_DIR} describe --tags --match v[0-9]*.[0-9]*.[0-9]*"
8  if [[ ! -d "${GIT_DIR}" ]]; then
9    echo "Abort, abort! Git dir ${GIT_DIR} does not exists!"
10    kill $$
11  elif ${GIT_DESCRIBE} --exact >/dev/null; then
12    ${GIT_DESCRIBE}
13  else
14    return 1
15  fi
16}
17
18envfile=${BINARY_ENV_FILE:-/tmp/env}
19if [[ -n "${PYTORCH_ROOT}"  ]]; then
20  workdir=$(dirname "${PYTORCH_ROOT}")
21else
22  # docker executor (binary builds)
23  workdir="/"
24fi
25
26if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then
27  export BUILD_PYTHONLESS=1
28fi
29
30# Pick docker image
31export DOCKER_IMAGE=${DOCKER_IMAGE:-}
32if [[ -z "$DOCKER_IMAGE" ]]; then
33  if [[ "$PACKAGE_TYPE" == conda ]]; then
34    export DOCKER_IMAGE="pytorch/conda-cuda"
35  elif [[ "$DESIRED_CUDA" == cpu ]]; then
36    export DOCKER_IMAGE="pytorch/manylinux-cpu"
37  else
38    export DOCKER_IMAGE="pytorch/manylinux-cuda${DESIRED_CUDA:2}"
39  fi
40fi
41
42USE_GOLD_LINKER="OFF"
43# GOLD linker can not be used if CUPTI is statically linked into PyTorch, see https://github.com/pytorch/pytorch/issues/57744
44if [[ ${DESIRED_CUDA} == "cpu" ]]; then
45  USE_GOLD_LINKER="ON"
46fi
47
48
49# Default to nightly, since that's where this normally uploads to
50PIP_UPLOAD_FOLDER='nightly/'
51# We put this here so that OVERRIDE_PACKAGE_VERSION below can read from it
52export DATE="$(date -u +%Y%m%d)"
53BASE_BUILD_VERSION="$(cat ${PYTORCH_ROOT}/version.txt|cut -da -f1).dev${DATE}"
54
55# Change BASE_BUILD_VERSION to git tag when on a git tag
56# Use 'git -C' to make doubly sure we're in the correct directory for checking
57# the git tag
58if tagged_version >/dev/null; then
59  # Switch upload folder to 'test/' if we are on a tag
60  PIP_UPLOAD_FOLDER='test/'
61  # Grab git tag, remove prefixed v and remove everything after -
62  # Used to clean up tags that are for release candidates like v1.6.0-rc1
63  # Turns tag v1.6.0-rc1 -> v1.6.0
64  BASE_BUILD_VERSION="$(tagged_version | sed -e 's/^v//' -e 's/-.*$//')"
65fi
66if [[ "$(uname)" == 'Darwin' ]] || [[ "$PACKAGE_TYPE" == conda ]]; then
67  export PYTORCH_BUILD_VERSION="${BASE_BUILD_VERSION}"
68else
69  export PYTORCH_BUILD_VERSION="${BASE_BUILD_VERSION}+$DESIRED_CUDA"
70fi
71
72export PYTORCH_BUILD_NUMBER=1
73
74# Set triton version as part of PYTORCH_EXTRA_INSTALL_REQUIREMENTS
75TRITON_VERSION=$(cat $PYTORCH_ROOT/.ci/docker/triton_version.txt)
76
77# Here PYTORCH_EXTRA_INSTALL_REQUIREMENTS is already set for the all the wheel builds hence append TRITON_CONSTRAINT
78TRITON_CONSTRAINT="platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.13'"
79if [[ "$PACKAGE_TYPE" =~ .*wheel.* &&  -n "${PYTORCH_EXTRA_INSTALL_REQUIREMENTS:-}" ]]; then
80  TRITON_REQUIREMENT="triton==${TRITON_VERSION}; ${TRITON_CONSTRAINT}"
81  if [[ -n "$PYTORCH_BUILD_VERSION" && "$PYTORCH_BUILD_VERSION" =~ .*dev.* ]]; then
82      TRITON_SHORTHASH=$(cut -c1-10 $PYTORCH_ROOT/.ci/docker/ci_commit_pins/triton.txt)
83      TRITON_REQUIREMENT="pytorch-triton==${TRITON_VERSION}+${TRITON_SHORTHASH}; ${TRITON_CONSTRAINT}"
84  fi
85  export PYTORCH_EXTRA_INSTALL_REQUIREMENTS="${PYTORCH_EXTRA_INSTALL_REQUIREMENTS} | ${TRITON_REQUIREMENT}"
86fi
87
88# Set triton via PYTORCH_EXTRA_INSTALL_REQUIREMENTS for triton rocm package
89if [[ "$PACKAGE_TYPE" =~ .*wheel.* && -n "$PYTORCH_BUILD_VERSION" && "$PYTORCH_BUILD_VERSION" =~ .*rocm.* && $(uname) == "Linux" ]]; then
90    TRITON_REQUIREMENT="pytorch-triton-rocm==${TRITON_VERSION}; ${TRITON_CONSTRAINT}"
91    if [[ -n "$PYTORCH_BUILD_VERSION" && "$PYTORCH_BUILD_VERSION" =~ .*dev.* ]]; then
92        TRITON_SHORTHASH=$(cut -c1-10 $PYTORCH_ROOT/.ci/docker/ci_commit_pins/triton-rocm.txt)
93        TRITON_REQUIREMENT="pytorch-triton-rocm==${TRITON_VERSION}+${TRITON_SHORTHASH}; ${TRITON_CONSTRAINT}"
94    fi
95    if [[ -z "${PYTORCH_EXTRA_INSTALL_REQUIREMENTS:-}" ]]; then
96        export PYTORCH_EXTRA_INSTALL_REQUIREMENTS="${TRITON_REQUIREMENT}"
97    else
98        export PYTORCH_EXTRA_INSTALL_REQUIREMENTS="${PYTORCH_EXTRA_INSTALL_REQUIREMENTS} | ${TRITON_REQUIREMENT}"
99    fi
100fi
101
102JAVA_HOME=
103BUILD_JNI=OFF
104if [[ "$PACKAGE_TYPE" == libtorch ]]; then
105  POSSIBLE_JAVA_HOMES=()
106  POSSIBLE_JAVA_HOMES+=(/usr/local)
107  POSSIBLE_JAVA_HOMES+=(/usr/lib/jvm/java-8-openjdk-amd64)
108  POSSIBLE_JAVA_HOMES+=(/Library/Java/JavaVirtualMachines/*.jdk/Contents/Home)
109  # Add the Windows-specific JNI path
110  POSSIBLE_JAVA_HOMES+=("$PWD/pytorch/.circleci/windows-jni/")
111  for JH in "${POSSIBLE_JAVA_HOMES[@]}" ; do
112    if [[ -e "$JH/include/jni.h" ]] ; then
113      # Skip if we're not on Windows but haven't found a JAVA_HOME
114      if [[ "$JH" == "$PWD/pytorch/.circleci/windows-jni/" && "$OSTYPE" != "msys" ]] ; then
115        break
116      fi
117      echo "Found jni.h under $JH"
118      JAVA_HOME="$JH"
119      BUILD_JNI=ON
120      break
121    fi
122  done
123  if [ -z "$JAVA_HOME" ]; then
124    echo "Did not find jni.h"
125  fi
126fi
127
128cat >"$envfile" <<EOL
129# =================== The following code will be executed inside Docker container ===================
130export TZ=UTC
131echo "Running on $(uname -a) at $(date)"
132
133export PACKAGE_TYPE="$PACKAGE_TYPE"
134export DESIRED_PYTHON="${DESIRED_PYTHON:-}"
135export DESIRED_CUDA="$DESIRED_CUDA"
136export LIBTORCH_VARIANT="${LIBTORCH_VARIANT:-}"
137export BUILD_PYTHONLESS="${BUILD_PYTHONLESS:-}"
138if [[ "${OSTYPE}" == "msys" ]]; then
139  export LIBTORCH_CONFIG="${LIBTORCH_CONFIG:-}"
140  if [[ "${LIBTORCH_CONFIG:-}" == 'debug' ]]; then
141    export DEBUG=1
142  fi
143  export DESIRED_DEVTOOLSET=""
144else
145  export DESIRED_DEVTOOLSET="${DESIRED_DEVTOOLSET:-}"
146fi
147
148export DATE="$DATE"
149export NIGHTLIES_DATE_PREAMBLE=1.14.0.dev
150export PYTORCH_BUILD_VERSION="$PYTORCH_BUILD_VERSION"
151export PYTORCH_BUILD_NUMBER="$PYTORCH_BUILD_NUMBER"
152export OVERRIDE_PACKAGE_VERSION="$PYTORCH_BUILD_VERSION"
153export PYTORCH_EXTRA_INSTALL_REQUIREMENTS="${PYTORCH_EXTRA_INSTALL_REQUIREMENTS:-}"
154
155# TODO: We don't need this anymore IIUC
156export TORCH_PACKAGE_NAME='torch'
157export TORCH_CONDA_BUILD_FOLDER='pytorch-nightly'
158export ANACONDA_USER='pytorch'
159
160export USE_FBGEMM=1
161export JAVA_HOME=$JAVA_HOME
162export BUILD_JNI=$BUILD_JNI
163export PIP_UPLOAD_FOLDER="$PIP_UPLOAD_FOLDER"
164export DOCKER_IMAGE="$DOCKER_IMAGE"
165
166
167export USE_GOLD_LINKER="${USE_GOLD_LINKER}"
168export USE_GLOO_WITH_OPENSSL="ON"
169# =================== The above code will be executed inside Docker container ===================
170EOL
171
172# nproc doesn't exist on darwin
173if [[ "$(uname)" != Darwin ]]; then
174  # This was lowered from 18 to 12 to avoid OOMs when compiling FlashAttentionV2
175  MEMORY_LIMIT_MAX_JOBS=12
176  NUM_CPUS=$(( $(nproc) - 2 ))
177
178  # Defaults here for **binary** linux builds so they can be changed in one place
179  export MAX_JOBS=${MAX_JOBS:-$(( ${NUM_CPUS} > ${MEMORY_LIMIT_MAX_JOBS} ? ${MEMORY_LIMIT_MAX_JOBS} : ${NUM_CPUS} ))}
180
181  cat >>"$envfile" <<EOL
182  export MAX_JOBS="${MAX_JOBS}"
183EOL
184fi
185
186echo 'retry () {' >> "$envfile"
187echo '    $*  || (sleep 1 && $*) || (sleep 2 && $*) || (sleep 4 && $*) || (sleep 8 && $*)' >> "$envfile"
188echo '}' >> "$envfile"
189echo 'export -f retry' >> "$envfile"
190
191cat "$envfile"
192