diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 313966f..0796a27 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -1,4 +1,4 @@ -name: Docker Build +name: Docker Build - Python Next-Gen on: workflow_dispatch: @@ -6,7 +6,8 @@ on: # branches: [ "main" ] env: - UBUNTU_VERSION: 22.04 + # Sets the Ubuntu version for the NVIDIA build. The AMD build will use 22.04. + UBUNTU_VERSION: 24.04 BUILDX_NO_DEFAULT_ATTESTATIONS: 1 jobs: @@ -16,156 +17,115 @@ jobs: fail-fast: false matrix: build: - - {latest: "false", python: "3.10", venv: "python_310"} + - {latest: "false", python: "3.12", venv: "python_312"} steps: - - - name: Free Space + - name: Free Space run: | df -h - sudo rm -rf /usr/share/dotnet - sudo rm -rf /opt/ghc - sudo rm -rf /usr/local/.ghcup - sudo rm -rf /usr/local/share/boost - sudo rm -rf /usr/local/lib/android - sudo rm -rf "$AGENT_TOOLSDIRECTORY" + sudo rm -rf /usr/share/dotnet /opt/ghc /usr/local/.ghcup /usr/local/share/boost /usr/local/lib/android "$AGENT_TOOLSDIRECTORY" df -h - - - name: Env Setter + - name: Env Setter run: | REPO=${GITHUB_REPOSITORY,,} echo "REPO_NAMESPACE=${REPO%%/*}" >> ${GITHUB_ENV} echo "REPO_NAME=${REPO#*/}" >> ${GITHUB_ENV} - - - name: Checkout - uses: actions/checkout@v3 - - - name: Permissions fixes - run: | - target="${HOME}/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/build/COPY*" - chmod -R ug+rwX ${target} - - - name: Login to DockerHub + - name: Checkout + uses: actions/checkout@v4 + - name: Login to DockerHub uses: docker/login-action@v3 with: - username: ${{ vars.DOCKERHUB_USER }} + username: ${{ secrets.DOCKERHUB_USER }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Login to GitHub Container Registry + - name: Login to GitHub Container Registry uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - - name: Set tags + - name: Set tags run: | + img_path_dhub="${{ secrets.DOCKERHUB_USER }}/${{ env.REPO_NAME }}" img_path_ghcr="ghcr.io/${{ env.REPO_NAMESPACE }}/${{ env.REPO_NAME }}" - img_path_dhub="${{ secrets.DOCKERHUB_USER }}/${{ env.REPO_NAME }}-cpu" - base_tag="${{ matrix.build.python }}-v2-cpu-${{ env.UBUNTU_VERSION }}" + # The new, clean versioning for the CPU image + base_tag="v1-python${{ matrix.build.python }}-cpu-ubuntu${{ env.UBUNTU_VERSION }}" if [[ ${{ matrix.build.latest }} == "true" ]]; then echo "Marking latest" - # GHCR.io Tags - TAGS="${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest-cpu" - # Docker.io tags - TAGS="${TAGS}, ${img_path_dhub}:${base_tag}, ${img_path_dhub}:latest" + TAGS="${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest-cpu, ${img_path_dhub}:${base_tag}, ${img_path_dhub}:latest-cpu" else TAGS="${img_path_ghcr}:${base_tag}, ${img_path_dhub}:${base_tag}" fi echo "TAGS=${TAGS}" >> ${GITHUB_ENV} - - - name: Build and push - uses: docker/build-push-action@v4 + - name: Build and push + uses: docker/build-push-action@v5 with: context: build build-args: | - IMAGE_BASE=ghcr.io/ai-dock/base-image:v2-cpu-${{ env.UBUNTU_VERSION }} + # This correctly points to YOUR CPU base image from Phase 1 + IMAGE_BASE=${{ secrets.DOCKERHUB_USER }}/base-image:v1-cpu-ubuntu${{ env.UBUNTU_VERSION }} PYTHON_VERSION=${{ matrix.build.python }} PYTHON_VENV_NAME=${{ matrix.build.venv }} push: true - # Avoids unknown/unknown architecture and extra metadata provenance: false tags: ${{ env.TAGS }} - + nvidia-base: runs-on: ubuntu-latest strategy: fail-fast: false matrix: + # The new, modernized matrix for NVIDIA builds build: - - {latest: "false", python: "3.10", venv: "python_310", cuda: "12.4.1-base"} - - {latest: "false", python: "3.10", venv: "python_310", cuda: "12.4.1-cudnn-runtime"} - - {latest: "false", python: "3.10", venv: "python_310", cuda: "12.4.1-cudnn-devel"} - - {latest: "false", python: "3.10", venv: "python_310", cuda: "12.1.1-base"} - - {latest: "false", python: "3.10", venv: "python_310", cuda: "12.1.1-cudnn8-runtime"} - - {latest: "false", python: "3.10", venv: "python_310", cuda: "12.1.1-cudnn8-devel"} - - {latest: "false", python: "3.10", venv: "python_310", cuda: "11.8.0-base"} - - {latest: "false", python: "3.10", venv: "python_310", cuda: "11.8.0-cudnn8-runtime"} - - {latest: "false", python: "3.10", venv: "python_310", cuda: "11.8.0-cudnn8-devel"} + - {latest: "true", python: "3.12", venv: "python_312", cuda: "12.8.1-runtime"} + - {latest: "false", python: "3.12", venv: "python_312", cuda: "12.8.1-base"} + - {latest: "false", python: "3.12", venv: "python_312", cuda: "12.5.1-runtime"} + - {latest: "false", python: "3.12", venv: "python_312", cuda: "12.5.1-base"} steps: - - - name: Free Space + - name: Free Space run: | df -h - sudo rm -rf /usr/share/dotnet - sudo rm -rf /opt/ghc - sudo rm -rf /usr/local/.ghcup - sudo rm -rf /usr/local/share/boost - sudo rm -rf /usr/local/lib/android - sudo rm -rf "$AGENT_TOOLSDIRECTORY" + sudo rm -rf /usr/share/dotnet /opt/ghc /usr/local/.ghcup /usr/local/share/boost /usr/local/lib/android "$AGENT_TOOLSDIRECTORY" df -h - - - name: Env Setter + - name: Env Setter run: | REPO=${GITHUB_REPOSITORY,,} echo "REPO_NAMESPACE=${REPO%%/*}" >> ${GITHUB_ENV} echo "REPO_NAME=${REPO#*/}" >> ${GITHUB_ENV} - - - name: Checkout - uses: actions/checkout@v3 - - - name: Permissions fixes - run: | - target="${HOME}/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/build/COPY*" - chmod -R ug+rwX ${target} - - - name: Login to DockerHub + - name: Checkout + uses: actions/checkout@v4 + - name: Login to DockerHub uses: docker/login-action@v3 with: - username: ${{ vars.DOCKERHUB_USER }} + username: ${{ secrets.DOCKERHUB_USER }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Login to GitHub Container Registry + - name: Login to GitHub Container Registry uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - - name: Set tags + - name: Set tags run: | + img_path_dhub="${{ secrets.DOCKERHUB_USER }}/${{ env.REPO_NAME }}" img_path_ghcr="ghcr.io/${{ env.REPO_NAMESPACE }}/${{ env.REPO_NAME }}" - img_path_dhub="${{ secrets.DOCKERHUB_USER }}/${{ env.REPO_NAME }}-cuda" - base_tag="${{ matrix.build.python }}-v2-cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }}" + # The new, clean versioning scheme for this image + base_tag="v1-python${{ matrix.build.python }}-cuda-${{ matrix.build.cuda }}-ubuntu${{ env.UBUNTU_VERSION }}" if [[ ${{ matrix.build.latest }} == "true" ]]; then echo "Marking latest" - # GHCR.io Tags - TAGS="${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest-cuda, ${img_path_ghcr}:latest" - # Docker.io Tags - TAGS="${TAGS}, ${img_path_dhub}:${base_tag}, ${img_path_dhub}:latest" + TAGS="${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest, ${img_path_dhub}:${base_tag}, ${img_path_dhub}:latest" else TAGS="${img_path_ghcr}:${base_tag}, ${img_path_dhub}:${base_tag}" fi echo "TAGS=${TAGS}" >> ${GITHUB_ENV} - - - name: Build and push - uses: docker/build-push-action@v4 + - name: Build and push + uses: docker/build-push-action@v5 with: context: build build-args: | - IMAGE_BASE=ghcr.io/ai-dock/base-image:v2-cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }} + # This correctly points to YOUR base image from Phase 1 + IMAGE_BASE=${{ secrets.DOCKERHUB_USER }}/base-image:v1-cuda-${{ matrix.build.cuda }}-ubuntu${{ env.UBUNTU_VERSION }} PYTHON_VERSION=${{ matrix.build.python }} PYTHON_VENV_NAME=${{ matrix.build.venv }} push: true @@ -177,75 +137,50 @@ jobs: strategy: fail-fast: false matrix: + # The new, modernized matrix for AMD builds build: - - {latest: "false", python: "3.10", venv: "python_310", rocm: "6.0-core"} - - {latest: "false", python: "3.10", venv: "python_310", rocm: "6.0-runtime"} - - {latest: "false", python: "3.10", venv: "python_310", rocm: "6.0-devel"} + - {latest: "false", python: "3.12", venv: "python_312", rocm: "6.2-runtime"} steps: - - - name: Free Space + - name: Free Space run: | df -h - sudo rm -rf /usr/share/dotnet - sudo rm -rf /opt/ghc - sudo rm -rf /usr/local/.ghcup - sudo rm -rf /usr/local/share/boost - sudo rm -rf /usr/local/lib/android - sudo rm -rf "$AGENT_TOOLSDIRECTORY" + sudo rm -rf /usr/share/dotnet /opt/ghc /usr/local/.ghcup /usr/local/share/boost /usr/local/lib/android "$AGENT_TOOLSDIRECTORY" df -h - - - name: Env Setter + - name: Env Setter run: | REPO=${GITHUB_REPOSITORY,,} echo "REPO_NAMESPACE=${REPO%%/*}" >> ${GITHUB_ENV} echo "REPO_NAME=${REPO#*/}" >> ${GITHUB_ENV} - - - name: Checkout - uses: actions/checkout@v3 - - - name: Permissions fixes - run: | - target="${HOME}/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/build/COPY*" - chmod -R ug+rwX ${target} - - - name: Login to DockerHub + - name: Checkout + uses: actions/checkout@v4 + - name: Login to DockerHub uses: docker/login-action@v3 with: - username: ${{ vars.DOCKERHUB_USER }} + username: ${{ secrets.DOCKERHUB_USER }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Login to GitHub Container Registry + - name: Login to GitHub Container Registry uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - - name: Set tags + - name: Set tags run: | + img_path_dhub="${{ secrets.DOCKERHUB_USER }}/${{ env.REPO_NAME }}" img_path_ghcr="ghcr.io/${{ env.REPO_NAMESPACE }}/${{ env.REPO_NAME }}" - img_path_dhub="${{ secrets.DOCKERHUB_USER }}/${{ env.REPO_NAME }}-rocm" - base_tag="${{ matrix.build.python }}-v2-rocm-${{ matrix.build.rocm }}-${{ env.UBUNTU_VERSION }}" - - if [[ ${{ matrix.build.latest }} == "true" ]]; then - echo "Marking latest" - # GHCR.io Tags - TAGS="${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest-rocm" - # Docker.io Tags - TAGS="${TAGS}, ${img_path_dhub}:${base_tag}, ${img_path_dhub}:latest" - else - TAGS="${img_path_ghcr}:${base_tag}, ${img_path_dhub}:${base_tag}" - fi + # The tag explicitly states ubuntu22.04 for the AMD build + base_tag="v1-python${{ matrix.build.python }}-rocm-${{ matrix.build.rocm }}-ubuntu22.04" + TAGS="${img_path_ghcr}:${base_tag}, ${img_path_dhub}:${base_tag}" echo "TAGS=${TAGS}" >> ${GITHUB_ENV} - - - name: Build and push - uses: docker/build-push-action@v4 + - name: Build and push + uses: docker/build-push-action@v5 with: context: build build-args: | - IMAGE_BASE=ghcr.io/ai-dock/base-image:v2-rocm-${{ matrix.build.rocm }}-${{ env.UBUNTU_VERSION }} + # This correctly points to YOUR AMD base image from Phase 1 + IMAGE_BASE=${{ secrets.DOCKERHUB_USER }}/base-image:v1-rocm-${{ matrix.build.rocm }}-ubuntu22.04 PYTHON_VERSION=${{ matrix.build.python}} PYTHON_VENV_NAME=${{ matrix.build.venv }} push: true provenance: false - tags: ${{ env.TAGS }} + tags: ${{ env.TAGS }} \ No newline at end of file diff --git a/README.md b/README.md index 1c9b761..4fe385c 100644 --- a/README.md +++ b/README.md @@ -1,73 +1,48 @@ -[![Docker Build](https://github.com/ai-dock/python/actions/workflows/docker-build.yml/badge.svg)](https://github.com/ai-dock/python/actions/workflows/docker-build.yml) +[![Docker Build](https://github.com/PhoneHomePhone/python/actions/workflows/docker-build.yml/badge.svg)](https://github.com/PhoneHomePhone/python/actions/workflows/docker-build.yml) -# AI-Dock + Python +# Modernized AI-Dock + Python -Run python in a cloud-first AI-Dock container. Nothing is added to the installed python environment(s) - You'll have python & pip. - -This image provides a great starting point for python development when used standalone but its also a solid foundation for extending upon. +This is a modernized fork of the original `ai-dock/python` project, updated to provide a clean, cloud-first Python 3.12 environment on an Ubuntu 24.04 base. +This image provides a great starting point for Python development when used standalone, but it's also a solid foundation for extending with other applications like ComfyUI. ## Documentation -All AI-Dock containers share a common base which is designed to make running on cloud services such as [vast.ai](https://link.ai-dock.org/vast.ai) and [runpod.io](https://link.ai-dock.org/runpod.io) as straightforward and user friendly as possible. - -Common features and options are documented in the [base wiki](https://github.com/ai-dock/base-image/wiki) but any additional features unique to this image will be detailed below. - +This project is part of a modernized stack. The common features for the underlying base image are documented in the **[base image repository wiki](https://github.com/PhoneHomePhone/base-image/wiki)**. ## Version Tags -The `:latest` tag points to `:latest-cuda` +The `:latest` tag points to the latest stable CUDA runtime build (`:v1-python3.12-cuda-12.8.1-runtime-ubuntu24.04`). -Tags follow these patterns: +Tags follow a clear and consistent pattern: +`v1-python---` ##### _CUDA_ -- `:[python-version]-v2-cuda-[x.x.x]-[base|runtime|devel]-[ubuntu-version]` - -- `:latest-cuda` → `:3.10-v2-cuda-11.8.0-cudnn8-runtime-22.04` +* **Example:** `:v1-python3.12-cuda-12.8.1-runtime-ubuntu24.04` +* **Latest Tag:** `:latest` ##### _ROCm_ -- `:[python-version]-v2-rocm-[x.x.x]-[core|runtime]-[ubuntu-version]` - -- `:latest-rocm` → `:3.10-v2-rocm-6.0-runtime-22.04` - -ROCm builds are experimental. Please give feedback. +* **Example:** `:v1-python3.12-rocm-6.2-runtime-ubuntu22.04` +* **Note:** ROCm builds currently use an Ubuntu 22.04 base pending official driver support for 24.04. ##### _CPU_ -- `:[python-version]-v2-cpu-[ubuntu-version]` - -- `:latest-cpu` → `:3.10-v2-cpu-22.04` - -Browse [here](https://github.com/ai-dock/python/pkgs/container/python) for an image suitable for your target environment. - -Supported Python versions: `3.10` - -Supported Platforms: `NVIDIA CUDA`, `AMD ROCm`, `CPU` +* **Example:** `:v1-python3.12-cpu-ubuntu24.04` +* **Latest Tag:** `:latest-cpu` ->[!NOTE] ->Recent builds include `v2` in their image tag. These images use `venv` rather than `micromamba` for environment management. - - - -## Pre-Configured Templates - -**Vast.​ai** - -[python:latest-cuda](https://link.ai-dock.org/template-vast-python) (CUDA) - -[python:latest-rocm](https://link.ai-dock.org/template-vast-python-rocm) (ROCm) +~~Browse the available image tags on **[Docker Hub](https://hub.docker.com/r/phonehomephone/python/tags)** or on the **[GitHub Packages](https://github.com/PhoneHomePhone/python/pkgs/container/python)** page for this repository.~~ --- -**Runpod.​io** - -[python:latest](https://link.ai-dock.org/template-runpod-python) +### Supported Versions +* **Python (Default):** `3.12` +* **Additional Kernels:** `3.10`, `3.11` +* **Platforms:** `NVIDIA CUDA`, `AMD ROCm`, `CPU` +* **OS:** `Ubuntu 24.04` (NVIDIA/CPU), `Ubuntu 22.04` (AMD) --- ->[!NOTE] ->These templates are configured to use the `latest` tag but you are free to change to any of the available Python CUDA tags listed [here](https://github.com/ai-dock/python/pkgs/container/python) - ---- +### Credits and Acknowledgements -_The author ([@robballantyne](https://github.com/robballantyne)) may be compensated if you sign up to services linked in this document. Testing multiple variants of GPU images in many different environments is both costly and time-consuming; This along with [sponsorships](https://github.com/sponsors/ai-dock) helps to offset costs and further the development of the project_ +This project is a direct fork and modernization of the original, excellent work done by **[ai-dock](https://github.com/ai-dock)**. All credit for the foundational architecture and scripts belongs to the original author, [@robballantyne](https://github.com/robballantyne). +This fork is maintained by [@PhoneHomePhone](https://github.com/PhoneHomePhone). \ No newline at end of file diff --git a/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/common.sh b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/common.sh index 79b0a15..bd1ea72 100755 --- a/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/common.sh +++ b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/common.sh @@ -3,9 +3,14 @@ source /opt/ai-dock/etc/environment.sh build_common_main() { - apt update - build_common_install_python + apt-get update + # Add the deadsnakes PPA to get access to older python versions + add-apt-repository ppa:deadsnakes/ppa -y + apt-get update + build_common_install_jupyter + build_common_install_python_kernels # Renamed for clarity + } build_common_do_install_python_venv() { @@ -17,38 +22,47 @@ build_common_do_install_python_venv() { venv="${VENV_DIR}/${1}" "python${2}" -m venv "$venv" - "$venv/bin/pip" install --no-cache-dir \ + "$venv/bin/pip" install --no-cache-dir --upgrade\ + pip \ ipykernel \ ipywidgets "$venv/bin/python" -m ipykernel install \ --name="$1" \ - --display-name="Python$2 ($1)" + --display-name="Python ${2} (${1})" } -build_common_install_python() { +build_common_install_python_kernels() { if [[ $PYTHON_VERSION != "all" ]]; then build_common_do_install_python_venv "${PYTHON_VENV_NAME}" "${PYTHON_VERSION}" else - # Multi Python + # Install multiple Python versions as selectable Jupyter kernels + echo "Installing additional Python kernels..." build_common_do_install_python_venv "python_310" "3.10" build_common_do_install_python_venv "python_311" "3.11" - build_common_do_install_python_venv "python_312" "3.12" + #build_common_do_install_python_venv "python_312" "3.12" fi } build_common_install_jupyter() { $APT_INSTALL \ - python3.10-full \ - python3.10-dev \ - python3.10-venv - python3.10 -m venv "$JUPYTER_VENV" + python3.12-full \ + python3.12-dev \ + python3.12-venv + python3.12 -m venv "$JUPYTER_VENV" + source /opt/nvm/nvm.sh nvm use default - "$JUPYTER_VENV_PIP" install --no-cache-dir \ + "$JUPYTER_VENV_PIP" install --no-cache-dir --upgrade \ + pip \ jupyterlab \ - notebook + notebook \ + ipykernel \ + ipywidgets - printf "Removing default ipython kernel...\n" + "$JUPYTER_VENV_PYTHON" -m ipykernel install \ + --name="pytorch_312" \ + --display-name="PyTorch (Python 3.12)" + printf "Removing default ipython kernel from Jupyter venv...\n" rm -rf "$JUPYTER_VENV/share/jupyter/kernels/python3" } diff --git a/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/nvidia.sh b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/nvidia.sh index b25bec6..15ff078 100755 --- a/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/nvidia.sh +++ b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/nvidia.sh @@ -1,3 +1,32 @@ #!/bin/false -# For CUDA specific logic +# This script installs PyTorch and related libraries for NVIDIA GPUs. + +# Source the environment to get access to variables like JUPYTER_VENV_PIP +source /opt/ai-dock/etc/environment.sh + +# Check if CUDA_VERSION is set (it should be inherited from the base image) +if [[ -z $CUDA_VERSION ]]; then + printf "ERROR: No CUDA_VERSION specified. Cannot install PyTorch.\n" >&2 + exit 1 +fi + +# Construct the PyTorch index URL based on the CUDA version. +# Example: CUDA_VERSION=12.8.1 -> cu_version_string="cu128" +# PyTorch URLs don't use the patch version (the last digit). +# We also need to remove the dot. +cu_major_minor=$(echo "$CUDA_VERSION" | cut -d. -f1,2) +cu_version_string="cu${cu_major_minor//.}" +index_url="https://download.pytorch.org/whl/${cu_version_string}" + +# Install PyTorch, torchvision, and torchaudio using the correct index URL +printf "Installing PyTorch for CUDA %s using index %s...\n" "$CUDA_VERSION" "$index_url" +"$JUPYTER_VENV_PIP" install --no-cache-dir \ + torch \ + torchvision \ + torchaudio \ + --index-url "${index_url}" + +# Install xformers, which is critical for performance +printf "Installing xformers...\n" +"$JUPYTER_VENV_PIP" install --no-cache-dir xformers \ No newline at end of file diff --git a/build/Dockerfile b/build/Dockerfile index f5ffc6f..67e1082 100644 --- a/build/Dockerfile +++ b/build/Dockerfile @@ -1,12 +1,10 @@ # For build automation - Allows building from any ai-dock base image -ARG IMAGE_BASE="ghcr.io/ai-dock/base-image:v2-cuda-12.1.1-base-22.04" +ARG IMAGE_BASE="phonehomephone/base-image:v1-cuda-12.8.1-base-ubuntu24.04" FROM ${IMAGE_BASE} -LABEL org.opencontainers.image.source https://github.com/ai-dock/python - -LABEL org.opencontainers.image.description "Python development environment in a cloud-first docker container" - -LABEL maintainer="Rob Ballantyne " +LABEL org.opencontainers.image.source="https://github.com/PhoneHomePhone/python" +LABEL org.opencontainers.image.description="ai-dock/python upgraded to ubuntu 24.04 with Python 3.12" +LABEL maintainer="https://github.com/PhoneHomePhone" ENV JUPYTER_VENV=$VENV_DIR/jupyter ENV JUPYTER_VENV_PYTHON=$JUPYTER_VENV/bin/python @@ -17,13 +15,14 @@ COPY --chown=0:1111 ./COPY_ROOT_0/ / # Define the startup environment for interactive sessions. # ENV for inheritance -ARG PYTHON_VERSION=all +ARG PYTHON_VERSION=3.12 ENV PYTHON_VERSION=${PYTHON_VERSION} -ARG PYTHON_VENV_NAME=python_310 +ARG PYTHON_VENV_NAME=python_312 ENV PYTHON_VENV_NAME=${PYTHON_VENV_NAME} -ENV PYTHON_DEFAULT_VENV=${PYTHON_VENV_NAME} +ENV PYTHON_DEFAULT_VENV=jupyter ENV OPT_SYNC=$OPT_SYNC ENV IMAGE_SLUG="python" + # Use build scripts to ensure we can build all targets from one Dockerfile in a single layer. # Don't put anything heavy in here - We can use multi-stage building above if necessary. diff --git a/docker-compose.yaml b/docker-compose.yaml index 81bb604..0956a0d 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,17 +1,16 @@ -version: "3.8" # Compose file build variables set in .env services: supervisor: build: context: ./build args: - IMAGE_BASE: ${IMAGE_BASE:-ghcr.io/ai-dock/base-image:v2-cuda-11.8.0-runtime-22.04} + IMAGE_BASE: ${IMAGE_BASE:-phonehomephone/base-image:v1-cuda-12.8.1-base-ubuntu24.04} PYTHON_VERSION: ${PYTHON_VERSION:-all} PYTHON_VENV_NAME: ${PYTHON_VENV_NAME:-python_312} - tags: - - "ghcr.io/ai-dock/python:${IMAGE_TAG:-all-v2-cuda-11.8.0-runtime-22.04}" + #tags: + # - "ghcr.io/ai-dock/python:${IMAGE_TAG:-all-v2-cuda-11.8.0-runtime-22.04}" - image: ghcr.io/ai-dock/python:${IMAGE_TAG:-all-v2-cuda-11.8.0-runtime-22.04} + image: phonehomephone/python:${IMAGE_TAG:-v1-python3.12-cuda-12.8.1-ubuntu24.04} ## For Nvidia GPU's - You probably want to uncomment this #deploy: @@ -68,4 +67,4 @@ services: - SYNCTHING_UI_PORT_HOST=${SYNCTHING_UI_PORT_HOST:-8384} - SYNCTHING_TRANSPORT_PORT_HOST=${SYNCTHING_TRANSPORT_PORT_HOST:-22999} - SYNCTHING_URL=${SYNCTHING_URL:-} - #- PROVISIONING_SCRIPT=https://raw.githubusercontent.com/ai-dock/python/main/config/provisioning/default.sh + #- PROVISIONING_SCRIPT=https://raw.githubusercontent.com/ai-dock/python/main/config/provisioning/default.sh \ No newline at end of file