From 06813bdbbd9040bb42303a6d8f57a029b7619e04 Mon Sep 17 00:00:00 2001 From: Juan Carlos Graciosa Date: Wed, 22 Apr 2026 19:56:17 +1000 Subject: [PATCH] Add Gadi Singularity container build files for UW3 --- docs/developer/gadi_singularity/README.md | 73 ++++++ docs/developer/gadi_singularity/petsc.rhel | 197 ++++++++++++++++ .../gadi_singularity/underworld3.rhel | 210 ++++++++++++++++++ 3 files changed, 480 insertions(+) create mode 100644 docs/developer/gadi_singularity/README.md create mode 100644 docs/developer/gadi_singularity/petsc.rhel create mode 100644 docs/developer/gadi_singularity/underworld3.rhel diff --git a/docs/developer/gadi_singularity/README.md b/docs/developer/gadi_singularity/README.md new file mode 100644 index 00000000..6b579a45 --- /dev/null +++ b/docs/developer/gadi_singularity/README.md @@ -0,0 +1,73 @@ +# Building Underworld3 for Gadi (NCI) + +This directory contains two Containerfiles to build the Underworld3 (UW3) Singularity image for Gadi (nci.org.au). + +Both use Rocky Linux 8.10 to match Gadi's OS for ABI compatibility. + +## Build Order + +Build commands must be run from the top-level `underworld3/` directory (the build context). +Builds targeting Gadi must use `--platform linux/amd64`. + +### 1. Build PETSc layer + +```bash +podman build . \ + --platform linux/amd64 \ + --format docker \ + -t ghcr.io//petsc:3.25.0-ompi \ + -f ./docs/developer/gadi_singularity/petsc.rhel +``` + +### 2. Push PETSc image to registry + +```bash +podman push ghcr.io//petsc:3.25.0-ompi +``` + +### 3. Build Underworld3 + +```bash +podman build . \ + --platform linux/amd64 \ + --format docker \ + --build-arg PETSC_IMAGE=ghcr.io//petsc:3.25.0-ompi \ + --build-arg UW3_BRANCH=development \ + -t ghcr.io//underworld3-gadi:latest \ + -f ./docs/developer/gadi_singularity/underworld3.rhel +``` + +### 4. Push Underworld3 image + +```bash +podman push ghcr.io//underworld3-gadi:latest +``` + +## What Each File Does + +- **petsc.rhel** — Builds PETSc 3.25.0 with full AMR support (petsc4py, slepc4py, mmg, parmmg, etc.) +- **underworld3.rhel** — Builds Underworld3 on top of the PETSc image + +## Running on Gadi + +Pull the image on Gadi (redirect cache to scratch to avoid home quota issues): + +```bash +export SINGULARITY_CACHEDIR=/scratch///.singularity +module load singularity +singularity pull docker://ghcr.io//underworld3-gadi:latest +``` + +Run a script with MPI: + +```bash +module load singularity +module load openmpi/4.1.7 +mpiexec -n singularity exec underworld3-gadi_latest.sif python3 +``` + +## Notes + +- OpenFabrics (mlx5_0) warnings in the job error log are harmless +- PostHog telemetry failures on compute nodes are harmless (no outbound internet) +- The ghcr.io images must be set to **public** for Singularity to pull without authentication diff --git a/docs/developer/gadi_singularity/petsc.rhel b/docs/developer/gadi_singularity/petsc.rhel new file mode 100644 index 00000000..760e35b8 --- /dev/null +++ b/docs/developer/gadi_singularity/petsc.rhel @@ -0,0 +1,197 @@ +##################################################################### +# UW3 PETSc container +# Multi stage Containerfile based on UW2 version +# This builds PETSc according to the pixi amr-dev environment +# see https://docs.docker.com/get-started/docker-concepts/building-images/multi-stage-builds/ +# +# Stages: +# 1. 'runtime' +# The runtime environment (packages, permissions, ENV vars.) +# is consistent accross all stages of this Containerfile. +# +# 2. 'builder' +# The builder layer, takes the runtime layer and add compiling / building software +# that is added to /usr/local and /opt/venv +# +# 3. 'final' == runtime + min. builder +# The final image is a composite of the runtime layer and the +# minimal sections of the builder layer's final software stack. +# +# To build use podman from the top level underworld directory. i.e. +# $ podman build . \ +# --platform linux/amd64 \ +# --format docker \ +# -t new_image_name \ +# -f ./docs/development/gadi_singularity/petsc.rhel +##################################################################### + +# The following are passed in via --build-args +# Must go before the 1st FROM see +# https://docs.docker.com/engine/reference/builder/#understand-how-arg-and-from-interact +ARG PYTHON_VERSION="3.12" +ARG PETSC_VERSION="3.25.0" +ARG BASE_IMAGE="quay.io/rockylinux/rockylinux:8.10" + +# 1. Stage 1: 'runtime' +FROM ${BASE_IMAGE} as runtime +LABEL maintainer="https://github.com/underworldcode/" + +# need to repeat ARGS after every FROM +ARG PYTHON_VERSION + +#### Containerfile ENV vars - for all image stages +ENV LANG=C.UTF-8 +ENV PYVER=${PYTHON_VERSION} + +# gadi-specific settings +ENV OPENBLAS_NUM_THREADS=1 +ENV OMPI_MCA_io=ompio + +# add user jovyan +ENV NB_USER jovyan +ENV NB_HOME /home/$NB_USER +RUN useradd -m -s /bin/bash -N $NB_USER + +RUN yum update -y \ +&& yum install -y \ + bash-completion \ + openssh \ + openblas \ + python${PYVER}-pip \ + python${PYVER}-devel \ + openmpi \ + findutils \ +&& yum clean all \ +&& rm -rf /var/cache/yum + +# add system openmpi to $PATH and $LD_LIBRARY_PATH +ENV PATH=/usr/lib64/openmpi/bin:$PATH +ENV LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH + +ENV PYOPT=/opt/venv +# build and set open permissions on virtual environment +RUN python${PYVER} -m venv $PYOPT \ +&& chmod ugo+rwx $PYOPT + +# define python env vars. +# prepappending on PATH means all pip install will goto the PYOPT +ENV PATH=$PYOPT/bin:$PATH +ENV PYTHONPATH=$PYTHONPATH:$PYOPT/lib/python${PYVER}/site-packages + +# runtime python requirements +RUN python${PYVER} -m pip install wheel \ + "numpy<2" + +# 2. Define the builder layer +FROM runtime as builder + +ARG PETSC_VERSION +ARG PYTHON_VERSION + +RUN yum install -y \ + ca-certificates \ + wget \ + make \ + gcc \ + gcc-gfortran \ + gcc-c++ \ + cmake \ + patch \ + openblas \ + zlib-devel \ + openmpi-devel \ + findutils \ + git \ + flex \ + bison \ +&& yum clean all \ +&& rm -rf /var/cache/yum +# NOTE flex and bison are needed to build + +RUN python${PYVER} -m pip install "cython>=3.1" \ + "setuptools>=75" \ + "meson" \ + "meson-python" \ + "ninja" \ +&& python${PYVER} -m pip install --no-cache-dir --no-binary=mpi4py "mpi4py>=4,<5" +# no-binary for mpi4py to force build against openmpi, rather than mpich (default) + +# copy patches into builder +COPY petsc-custom/patches/scotch-7.0.10-c23-fix.tar.gz /tmp/scotch.tar.gz +COPY petsc-custom/patches/plexfem-internal-boundary-ownership-fix.patch /tmp/ + +# get petsc +RUN mkdir -p /tmp/src +WORKDIR /tmp/src +RUN wget https://web.cels.anl.gov/projects/petsc/download/release-snapshots/petsc-lite-${PETSC_VERSION}.tar.gz --no-check-certificate \ +&& tar -zxf petsc-lite-${PETSC_VERSION}.tar.gz +WORKDIR /tmp/src/petsc-${PETSC_VERSION} + +# apply patch then configure +# patch may already be included in newer PETSc versions - skip gracefully if it doesn't apply +RUN if patch -p1 --dry-run < /tmp/plexfem-internal-boundary-ownership-fix.patch 2>/dev/null; then \ + patch -p1 < /tmp/plexfem-internal-boundary-ownership-fix.patch; \ + echo "plexfem patch applied successfully"; \ + else \ + echo "plexfem patch not applicable (may already be in this PETSc version), skipping"; \ + fi +RUN python${PYVER} ./configure \ + --with-debugging=0 \ + --prefix=/usr/local \ + --with-shared-libraries=1 \ + --with-cxx-dialect=C++11 \ + "--COPTFLAGS=-g -O3" "--CXXOPTFLAGS=-g -O3" "--FOPTFLAGS=-g -O3" \ + --useThreads=0 \ + --with-x=0 \ + --with-pragmatic=1 \ + --with-petsc4py=1 \ + --with-slepc4py=1 \ + --download-eigen=1 \ + --download-metis=1 \ + --download-parmetis=1 \ + --download-mumps=1 \ + --download-scalapack=1 \ + --download-hypre=1 \ + --download-superlu=1 \ + --download-superlu_dist=1 \ + --download-mmg=1 \ + "--download-mmg-cmake-arguments=-DMMG_INSTALL_PRIVATE_HEADERS=ON -DUSE_SCOTCH=OFF" \ + --download-parmmg=1 \ + --download-pragmatic=1 \ + "--download-ptscotch=/tmp/scotch.tar.gz" \ + --download-slepc=1 \ + --download-hdf5=1 \ + --download-fblaslapack=1 \ + --download-zlib=1 \ + --download-ctetgen=1 \ + --download-triangle=1 \ + --with-make-np=2 +RUN make PETSC_DIR=`pwd` PETSC_ARCH=arch-linux-c-opt all +RUN make PETSC_DIR=`pwd` PETSC_ARCH=arch-linux-c-opt install \ + || (echo "=== petsc4py build log ===" && \ + cat arch-linux-c-opt/lib/petsc/conf/petsc4py.build.log 2>/dev/null && \ + echo "=== slepc4py build log ===" && \ + cat arch-linux-c-opt/lib/petsc/conf/slepc4py.build.log 2>/dev/null && \ + exit 1) +RUN rm -rf /usr/local/share/petsc + +# record builder stage packages used +RUN python${PYVER} -m pip freeze > /opt/requirements.txt \ +&& dnf history userinstalled > /opt/packages.txt + +# Stage 3: 'final' +FROM runtime as final + +COPY --from=builder /opt /opt +COPY --from=builder /usr/local /usr/local + +# MUST set PETSc environment variables +ENV PETSC_DIR=/usr/local +ENV PYTHONPATH=$PYTHONPATH:$PETSC_DIR/lib + +# switch to not-root user and workspace +USER $NB_USER +WORKDIR $NB_HOME + +# default command is to run jupyter lab +CMD ["jupyter-lab", "--no-browser", "--ip='0.0.0.0'"] \ No newline at end of file diff --git a/docs/developer/gadi_singularity/underworld3.rhel b/docs/developer/gadi_singularity/underworld3.rhel new file mode 100644 index 00000000..4bbb9673 --- /dev/null +++ b/docs/developer/gadi_singularity/underworld3.rhel @@ -0,0 +1,210 @@ +##################################################################### +# Multi stage Containerfile for Underworld 3 +# UW3 container based on UW2 version +# This UW3 container is based on pixi amr-dev environment +# see https://docs.docker.com/get-started/docker-concepts/building-images/multi-stage-builds/ +# +# Stages: +# 1. 'runtime' +# The runtime environment (packages, permissions, ENV vars.) +# is consistent accross all stages of this Containerfile. +# +# 2. 'builder' +# The builder layer, takes the runtime layer and add compiling / building software +# that is added to /usr/local and /opt/venv +# +# 3. 'final' == runtime + min. builder +# The final image is a composite of the runtime layer and the +# minimal sections of the builder layer's final software stack. +# +# To build use podman from the top level underworld didrectory. i.e. +# $ podman build . \ + --platform linux/amd64 \ +# --format docker \ +# --build-arg UW3_BRANCH=xxx \ +# -t new_image_name \ +# -f ./docs/development/gadi_singularity/underworld3.rhel +##################################################################### + +# The following are passed in via --build-arg +# Must go before the 1st FROM see +# https://docs.docker.com/engine/reference/builder/#understand-how-arg-and-from-interact +ARG PYTHON_VERSION="3.12" +ARG BASE_IMAGE="quay.io/rockylinux/rockylinux:8.10" +ARG PETSC_IMAGE="ghcr.io/jcgraciosa/petsc:3.25.0-ompi" +ARG UW3_BRANCH="development" + +# 'petsc-image' will be used later on in builder stage COPY command +FROM ${PETSC_IMAGE} as petsc-image + +#################### +# Stage 1: 'runtime' +#################### +FROM ${BASE_IMAGE} as runtime +LABEL maintainer="https://github.com/underworldcode/" + +# need to repeat ARGS after every FROM +ARG PYTHON_VERSION + +#### Containerfile ENV vars - for all image stages +ENV LANG=C.UTF-8 +ENV PYVER=${PYTHON_VERSION} + +# add user jovyan +ENV NB_USER jovyan +ENV NB_HOME /home/$NB_USER +RUN useradd -m -s /bin/bash -N $NB_USER + +# runtime packages - [vim, git] are optional +RUN yum update -y \ +&& yum install -y \ + ca-certificates \ + bash-completion \ + openssh \ + openblas \ + openmpi \ + python${PYVER}-pip \ + python${PYVER}-devel \ + zlib \ + mesa-libGL \ + mesa-libGLU \ + mesa-libOSMesa \ + libX11 \ + libXrender \ + libXext \ + libXfixes \ + libXcursor \ + libXinerama \ + libXrandr \ + libXft \ + fontconfig \ +&& yum clean all \ +&& rm -rf /var/cache/yum + +# add system openmpi to $PATH and $LD_LIBRARY_PATH +ENV PATH=/usr/lib64/openmpi/bin:$PATH +ENV LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH + +ENV PYOPT=/opt/venv +# build and set open permissions on virtual environment +RUN python${PYVER} -m venv $PYOPT \ +&& chmod ugo+rwx $PYOPT + +# define python env vars. +# prepappending on PATH means all pip install will goto the PYOPT +ENV PATH=$PYOPT/bin:$PATH +ENV PYTHONPATH=$PYTHONPATH:$PYOPT/lib/python${PYVER}/site-packages +# taken from PETSC images - will be needed in the final image +ENV PETSC_DIR=/usr/local +ENV PYTHONPATH=$PYTHONPATH:$PETSC_DIR/lib + +#################### +# Stage 2: 'builder' +#################### +FROM petsc-image as builder +ARG PYTHON_VERSION +ARG UW3_BRANCH + +# root to install with yum +USER root +RUN yum install -y\ + cmake \ + make \ + git \ + gcc \ + gcc-c++ \ + gcc-gfortran \ + findutils \ + openmpi-devel \ +&& yum clean all \ +&& rm -rf /var/cache/yum +# NOTE: underworld2 does not yum install gcc + +# install python build time and runtime requirements here +RUN python${PYVER} -m pip install --no-cache-dir \ + "setuptools>=75" \ + "cython>=3.1" \ + "scipy>=1.15" \ + "numpy<2" \ + matplotlib \ + pint \ + pytest \ + jupytext \ + sympy \ + "pydantic>=2" \ + pyyaml \ + psutil \ + typing_extensions \ + xxhash \ + pykdtree \ + trimesh \ + ipython \ + jupyterlab \ + "ipywidgets<9.0.0" \ + jupyter-server-proxy \ + "trame>=2.5.2" \ + "trame-vtk>=2.5.8" \ + "trame-vuetify>=2.3.1" \ + cmocean \ + colorcet \ + imageio \ + imageio-ffmpeg \ + "rich<14" \ + meshio \ + pyvista +# gmsh wheels are only available for x86_64; skip gracefully on aarch64 (local Apple Silicon builds) +RUN pip install --no-cache-dir gmsh pygmsh || echo "gmsh/pygmsh not available on this platform, skipping" + +# H5PY with mpi - match pixi version constraint! +RUN CC=mpicc HDF5_MPI="ON" HDF5_DIR=${PETSC_DIR} \ + python${PYVER} -m pip install \ + --no-binary=h5py \ + --no-cache-dir \ + "h5py>=3.12" + +# separate so if this fails, don't need to reinstall everything again with layer caching +# vtk-osmesa wheels only available for x86_64; skip gracefully on aarch64 (local Apple Silicon builds) +RUN pip install --no-cache-dir --extra-index-url https://wheels.vtk.org vtk-osmesa \ + || echo "vtk-osmesa not available on this platform, skipping" + +# Clone and install uw3 +RUN git clone --depth 1 --branch ${UW3_BRANCH} \ + https://github.com/underworldcode/underworld3.git /tmp/underworld3 +WORKDIR /tmp/underworld3 +RUN pip install --no-build-isolation --no-cache-dir . + +# record 'builder' stage packages used +RUN python${PYVER} -m pip freeze >/opt/requirements.txt \ +&& dnf history userinstalled >/opt/installed.txt + +#################### +# Stage 3: 'final', a combination of 'runtime' and 'builder' stages +#################### + +FROM runtime as final + +COPY --from=builder --chown=$NB_USER:users /opt /opt +COPY --from=builder --chown=$NB_USER:users /usr/local /usr/local + +# must make directory before COPY into it for permissions to work () +# set default viewer to a notebook see https://jupytext.readthedocs.io/en/latest/text-notebooks.html#with-a-double-click +RUN mkdir -p $NB_HOME/workspace $NB_HOME/Underworld3/ \ +&& chown $NB_USER:users -R $NB_HOME \ +&& jupyter-server extension enable --sys-prefix jupyter_server_proxy \ +&& jupytext-config set-default-viewer + +# confirm if these are the ones to put into the container +# copy examples, tests, etc. +COPY --chown=$NB_USER:users ./docs/examples $NB_HOME/Underworld3/examples +COPY --chown=$NB_USER:users ./docs/beginner $NB_HOME/Underworld3/beginner +COPY --chown=$NB_USER:users ./docs/advanced $NB_HOME/Underworld3/advanced + +EXPOSE 8888 +WORKDIR $NB_HOME +USER $NB_USER + +# Declare a volume space +VOLUME $NB_HOME/workspace + +CMD ["jupyter-lab", "--no-browser", "--ip='0.0.0.0'"] +