diff --git a/.github/workflows/build-manywheel-images.yml b/.github/workflows/build-manywheel-images.yml index cf7d82828..a599635f8 100644 --- a/.github/workflows/build-manywheel-images.yml +++ b/.github/workflows/build-manywheel-images.yml @@ -137,18 +137,3 @@ jobs: - name: Build Docker Image run: | manywheel/build_docker.sh - build-docker-cpu-s390x: - runs-on: linux.s390x - env: - GPU_ARCH_TYPE: cpu-s390x - steps: - - name: Checkout PyTorch - uses: actions/checkout@v3 - - name: Authenticate if WITH_PUSH - run: | - if [[ "${WITH_PUSH}" == true ]]; then - echo "${DOCKER_TOKEN}" | docker login -u "${DOCKER_ID}" --password-stdin - fi - - name: Build Docker Image - run: | - manywheel/build_docker.sh diff --git a/check_binary.sh b/check_binary.sh index 98a5267eb..be2b5252b 100755 --- a/check_binary.sh +++ b/check_binary.sh @@ -330,7 +330,7 @@ fi if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then echo "Checking that MKL is available" build_and_run_example_cpp check-torch-mkl -elif [[ "$(uname -m)" != "arm64" ]]; then +elif [[ "$(uname -m)" != "arm64" && "$(uname -m)" != "s390x" ]]; then if [[ "$(uname)" != 'Darwin' || "$PACKAGE_TYPE" != *wheel ]]; then if [[ "$(uname -m)" == "aarch64" ]]; then echo "Checking that MKLDNN is available on aarch64" @@ -354,7 +354,7 @@ if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then echo "Checking that XNNPACK is available" build_and_run_example_cpp check-torch-xnnpack else - if [[ "$(uname)" != 'Darwin' || "$PACKAGE_TYPE" != *wheel ]]; then + if [[ "$(uname)" != 'Darwin' || "$PACKAGE_TYPE" != *wheel ]] && [[ "$(uname -m)" != "s390x" ]]; then echo "Checking that XNNPACK is available" pushd /tmp python -c 'import torch.backends.xnnpack; exit(0 if torch.backends.xnnpack.enabled else 1)' @@ -375,7 +375,7 @@ if [[ "$OSTYPE" == "msys" ]]; then fi # Test that CUDA builds are setup correctly -if [[ "$DESIRED_CUDA" != 'cpu' && "$DESIRED_CUDA" != 'cpu-cxx11-abi' && "$DESIRED_CUDA" != *"rocm"* ]]; then +if [[ "$DESIRED_CUDA" != 'cpu' && "$DESIRED_CUDA" != 'cpu-cxx11-abi' && "$DESIRED_CUDA" != *"rocm"* && "$(uname -m)" != "s390x" ]]; then if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then build_and_run_example_cpp check-torch-cuda else diff --git a/manywheel/Dockerfile_s390x b/manywheel/Dockerfile_s390x index e30d0bea5..caa5d00bd 100644 --- a/manywheel/Dockerfile_s390x +++ b/manywheel/Dockerfile_s390x @@ -1,18 +1,15 @@ -FROM --platform=linux/s390x docker.io/redhat/ubi9 as base +FROM --platform=linux/s390x docker.io/ubuntu:24.04 as base -# earliest available version in ubi9 -ARG DEVTOOLSET_VERSION=12 - -# Language variabes -ENV LC_ALL=en_US.UTF-8 -ENV LANG=en_US.UTF-8 -ENV LANGUAGE=en_US.UTF-8 +# Language variables +ENV LC_ALL=C.UTF-8 +ENV LANG=C.UTF-8 +ENV LANGUAGE=C.UTF-8 # Installed needed OS packages. This is to support all # the binary builds (torch, vision, audio, text, data) -RUN dnf -y install redhat-release -RUN dnf -y update -RUN dnf install -y --allowerasing \ +RUN apt update ; apt upgrade -y +RUN apt install -y \ + build-essential \ autoconf \ automake \ bzip2 \ @@ -27,20 +24,19 @@ RUN dnf install -y --allowerasing \ util-linux \ wget \ which \ - xz \ + xz-utils \ less \ zstd \ - libgomp \ cmake \ - gcc-toolset-${DEVTOOLSET_VERSION}-gcc \ - gcc-toolset-${DEVTOOLSET_VERSION}-gcc-c++ \ - gcc-toolset-${DEVTOOLSET_VERSION}-gcc-gfortran \ - gcc-toolset-${DEVTOOLSET_VERSION}-binutils - -# Ensure the expected gcc-toolset is used -ENV PATH=/opt/rh/gcc-toolset-${DEVTOOLSET_VERSION}/root/usr/bin:$PATH -ENV LD_LIBRARY_PATH=/opt/rh/gcc-toolset-${DEVTOOLSET_VERSION}/root/usr/lib64:/opt/rh/gcc-toolset-${DEVTOOLSET_VERSION}/root/usr/lib:$LD_LIBRARY_PATH - + python3 \ + python3-dev \ + python3-setuptools \ + python3-yaml \ + python3-typing-extensions \ + libblas-dev \ + libopenblas-dev \ + liblapack-dev \ + libatlas-base-dev # git236+ would refuse to run git commands in repos owned by other users # Which causes version check to fail, as pytorch repo is bind-mounted into the image @@ -57,9 +53,21 @@ ADD ./common/install_openssl.sh install_openssl.sh RUN bash ./install_openssl.sh && rm install_openssl.sh ENV SSL_CERT_FILE=/opt/_internal/certs.pem +# EPEL for cmake +FROM base as patchelf +# Install patchelf +ADD ./common/install_patchelf.sh install_patchelf.sh +RUN bash ./install_patchelf.sh && rm install_patchelf.sh +RUN cp $(which patchelf) /patchelf + +FROM patchelf as python +# build python +COPY manywheel/build_scripts /build_scripts +ADD ./common/install_cpython.sh /build_scripts/install_cpython.sh +RUN bash build_scripts/build.sh && rm -r build_scripts + FROM openssl as final -# remove unncessary python versions -RUN rm -rf /opt/python/cp26-cp26m /opt/_internal/cpython-2.6.9-ucs2 -RUN rm -rf /opt/python/cp26-cp26mu /opt/_internal/cpython-2.6.9-ucs4 -RUN rm -rf /opt/python/cp33-cp33m /opt/_internal/cpython-3.3.6 -RUN rm -rf /opt/python/cp34-cp34m /opt/_internal/cpython-3.4.6 +COPY --from=python /opt/python /opt/python +COPY --from=python /opt/_internal /opt/_internal +COPY --from=python /opt/python/cp37-cp37m/bin/auditwheel /usr/local/bin/auditwheel +COPY --from=patchelf /usr/local/bin/patchelf /usr/local/bin/patchelf diff --git a/manywheel/build.sh b/manywheel/build.sh index 43725615d..a04d05869 100755 --- a/manywheel/build.sh +++ b/manywheel/build.sh @@ -15,7 +15,7 @@ case "${GPU_ARCH_TYPE:-BLANK}" in rocm) bash "${SCRIPTPATH}/build_rocm.sh" ;; - cpu | cpu-cxx11-abi) + cpu | cpu-cxx11-abi | cpu-s390x) bash "${SCRIPTPATH}/build_cpu.sh" ;; *) diff --git a/manywheel/build_cpu.sh b/manywheel/build_cpu.sh index 4669c8a3c..24c95f14e 100755 --- a/manywheel/build_cpu.sh +++ b/manywheel/build_cpu.sh @@ -32,7 +32,11 @@ if [[ "$OS_NAME" == *"CentOS Linux"* ]]; then elif [[ "$OS_NAME" == *"Red Hat Enterprise Linux"* ]]; then LIBGOMP_PATH="/usr/lib64/libgomp.so.1" elif [[ "$OS_NAME" == *"Ubuntu"* ]]; then - LIBGOMP_PATH="/usr/lib/x86_64-linux-gnu/libgomp.so.1" + if [[ "$(uname -m)" == "s390x" ]]; then + LIBGOMP_PATH="/usr/lib/s390x-linux-gnu/libgomp.so.1" + else + LIBGOMP_PATH="/usr/lib/x86_64-linux-gnu/libgomp.so.1" + fi fi DEPS_LIST=( diff --git a/manywheel/build_scripts/build.sh b/manywheel/build_scripts/build.sh index d139abcb6..c545ca967 100644 --- a/manywheel/build_scripts/build.sh +++ b/manywheel/build_scripts/build.sh @@ -15,22 +15,37 @@ CURL_HASH=cf34fe0b07b800f1c01a499a6e8b2af548f6d0e044dca4a29d88a4bee146d131 AUTOCONF_ROOT=autoconf-2.69 AUTOCONF_HASH=954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969 -# Dependencies for compiling Python that we want to remove from -# the final image after compiling Python -PYTHON_COMPILE_DEPS="zlib-devel bzip2-devel ncurses-devel sqlite-devel readline-devel tk-devel gdbm-devel db4-devel libpcap-devel xz-devel libffi-devel" - -# Libraries that are allowed as part of the manylinux1 profile -MANYLINUX1_DEPS="glibc-devel libstdc++-devel glib2-devel libX11-devel libXext-devel libXrender-devel mesa-libGL-devel libICE-devel libSM-devel ncurses-devel" - # Get build utilities MY_DIR=$(dirname "${BASH_SOURCE[0]}") source $MY_DIR/build_utils.sh -# Development tools and libraries -yum -y install bzip2 make git patch unzip bison yasm diffutils \ - automake which file cmake28 \ - kernel-devel-`uname -r` \ - ${PYTHON_COMPILE_DEPS} +if [ "$(uname -m)" != "s390x" ] ; then + # Dependencies for compiling Python that we want to remove from + # the final image after compiling Python + PYTHON_COMPILE_DEPS="zlib-devel bzip2-devel ncurses-devel sqlite-devel readline-devel tk-devel gdbm-devel db4-devel libpcap-devel xz-devel libffi-devel" + + # Libraries that are allowed as part of the manylinux1 profile + MANYLINUX1_DEPS="glibc-devel libstdc++-devel glib2-devel libX11-devel libXext-devel libXrender-devel mesa-libGL-devel libICE-devel libSM-devel ncurses-devel" + + # Development tools and libraries + yum -y install bzip2 make git patch unzip bison yasm diffutils \ + automake which file cmake28 \ + kernel-devel-`uname -r` \ + ${PYTHON_COMPILE_DEPS} +else + # Dependencies for compiling Python that we want to remove from + # the final image after compiling Python + PYTHON_COMPILE_DEPS="zlib1g-dev libbz2-dev libncurses-dev libsqlite3-dev libdb-dev libpcap-dev liblzma-dev libffi-dev" + + # Libraries that are allowed as part of the manylinux1 profile + MANYLINUX1_DEPS="libglib2.0-dev libX11-dev libncurses-dev" + + # Development tools and libraries + apt install -y bzip2 make git patch unzip diffutils \ + automake which file cmake \ + linux-headers-virtual \ + ${PYTHON_COMPILE_DEPS} +fi # Install newest autoconf build_autoconf $AUTOCONF_ROOT $AUTOCONF_HASH @@ -76,12 +91,16 @@ ln -s $PY37_BIN/auditwheel /usr/local/bin/auditwheel # Clean up development headers and other unnecessary stuff for # final image -yum -y erase wireless-tools gtk2 libX11 hicolor-icon-theme \ - avahi freetype bitstream-vera-fonts \ - ${PYTHON_COMPILE_DEPS} || true > /dev/null 2>&1 -yum -y install ${MANYLINUX1_DEPS} -yum -y clean all > /dev/null 2>&1 -yum list installed +if [ "$(uname -m)" != "s390x" ] ; then + yum -y erase wireless-tools gtk2 libX11 hicolor-icon-theme \ + avahi freetype bitstream-vera-fonts \ + ${PYTHON_COMPILE_DEPS} || true > /dev/null 2>&1 + yum -y install ${MANYLINUX1_DEPS} + yum -y clean all > /dev/null 2>&1 + yum list installed +else + apt purge -y ${PYTHON_COMPILE_DEPS} || true > /dev/null 2>&1 +fi # we don't need libpython*.a, and they're many megabytes find /opt/_internal -name '*.a' -print0 | xargs -0 rm -f # Strip what we can -- and ignore errors, because this just attempts to strip diff --git a/manywheel/build_scripts/manylinux1-check.py b/manywheel/build_scripts/manylinux1-check.py index fa77ef43a..7cb62e0c0 100644 --- a/manywheel/build_scripts/manylinux1-check.py +++ b/manywheel/build_scripts/manylinux1-check.py @@ -3,7 +3,7 @@ def is_manylinux1_compatible(): # Only Linux, and only x86-64 / i686 from distutils.util import get_platform - if get_platform() not in ["linux-x86_64", "linux-i686"]: + if get_platform() not in ["linux-x86_64", "linux-i686", "linux-s390x"]: return False # Check for presence of _manylinux module