554 lines
18 KiB
Docker
554 lines
18 KiB
Docker
#
|
|
# Build Pyton 3.11 for use in later stages
|
|
#
|
|
FROM ubuntu:oracular AS python-build
|
|
|
|
SHELL [ "/bin/bash", "-c" ]
|
|
|
|
# Instructions Dockerfied from:
|
|
#
|
|
# https://github.com/pytorch/pytorch
|
|
#
|
|
# and
|
|
#
|
|
# https://pytorch.org/docs/stable/notes/get_start_xpu.html
|
|
# https://www.intel.com/content/www/us/en/developer/articles/tool/pytorch-prerequisites-for-intel-gpu/2-6.html
|
|
#
|
|
#
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
gpg \
|
|
wget \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
|
|
# ipex only supports python 3.11, so use 3.11 instead of latest oracular (3.12)
|
|
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
build-essential \
|
|
ca-certificates \
|
|
ccache \
|
|
cmake \
|
|
curl \
|
|
git \
|
|
gpg-agent \
|
|
less \
|
|
libbz2-dev \
|
|
libffi-dev \
|
|
libjpeg-dev \
|
|
libpng-dev \
|
|
libreadline-dev \
|
|
libssl-dev \
|
|
libsqlite3-dev \
|
|
llvm \
|
|
nano \
|
|
wget \
|
|
zlib1g-dev \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
# python3 \
|
|
# python3-pip \
|
|
# python3-venv \
|
|
# python3-dev \
|
|
|
|
RUN /usr/sbin/update-ccache-symlinks
|
|
RUN mkdir /opt/ccache && ccache --set-config=cache_dir=/opt/ccache
|
|
|
|
# Build Python in /opt/..., install it locally, then remove the build environment
|
|
# collapsed to a single docker layer.
|
|
WORKDIR /opt
|
|
ENV PYTHON_VERSION=3.11.9
|
|
|
|
RUN wget -q -O - https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz | tar -xz \
|
|
&& cd Python-${PYTHON_VERSION} \
|
|
&& ./configure --prefix=/opt/python --enable-optimizations \
|
|
&& make -j$(nproc) \
|
|
&& make install \
|
|
&& cd /opt \
|
|
&& rm -rf Python-${PYTHON_VERSION}
|
|
|
|
FROM ubuntu:oracular AS ze-monitor
|
|
# From https://github.com/jketreno/ze-monitor
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
build-essential \
|
|
debhelper \
|
|
devscripts \
|
|
cmake \
|
|
git \
|
|
libfmt-dev \
|
|
libncurses-dev \
|
|
rpm \
|
|
rpm2cpio \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
RUN apt-get install -y \
|
|
software-properties-common \
|
|
&& add-apt-repository -y ppa:kobuk-team/intel-graphics \
|
|
&& apt-get update \
|
|
&& apt-get install -y \
|
|
libze-dev \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
RUN git clone --depth 1 --branch v0.4.0-1 https://github.com/jketreno/ze-monitor /opt/ze-monitor
|
|
WORKDIR /opt/ze-monitor/build
|
|
RUN cmake .. \
|
|
&& make \
|
|
&& cpack
|
|
|
|
#
|
|
# Build the ipex-llm wheel for use in later stages
|
|
#
|
|
FROM python-build AS ipex-llm-src
|
|
|
|
RUN update-alternatives --install /usr/bin/python3 python3 /opt/python/bin/python3.11 2
|
|
|
|
RUN git clone --branch main --depth 1 https://github.com/intel/ipex-llm.git /opt/ipex-llm \
|
|
&& cd /opt/ipex-llm \
|
|
&& git fetch --depth 1 origin cb3c4b26ad058c156591816aa37eec4acfcbf765 \
|
|
&& git checkout cb3c4b26ad058c156591816aa37eec4acfcbf765
|
|
|
|
WORKDIR /opt/ipex-llm
|
|
|
|
RUN python3 -m venv --system-site-packages /opt/ipex-llm/venv
|
|
RUN { \
|
|
echo '#!/bin/bash' ; \
|
|
echo 'update-alternatives --set python3 /opt/python/bin/python3.11' ; \
|
|
echo 'source /opt/ipex-llm/venv/bin/activate' ; \
|
|
echo 'if [[ "${1}" != "" ]]; then bash -c "${@}"; else bash; fi' ; \
|
|
} > /opt/ipex-llm/shell ; \
|
|
chmod +x /opt/ipex-llm/shell
|
|
|
|
SHELL [ "/opt/ipex-llm/shell" ]
|
|
|
|
RUN pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/xpu
|
|
|
|
WORKDIR /opt/ipex-llm/python/llm
|
|
RUN pip install requests wheel
|
|
RUN python setup.py clean --all bdist_wheel --linux
|
|
|
|
#
|
|
# The main backstory image:
|
|
# * python 3.11
|
|
# * pytorch xpu w/ ipex-llm
|
|
# * ollama-ipex-llm
|
|
# * src/server.py - model server supporting RAG and fine-tuned models
|
|
#
|
|
FROM ubuntu:oracular AS llm-base
|
|
|
|
COPY --from=python-build /opt/python /opt/python
|
|
|
|
# Get a couple prerequisites
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
gpg \
|
|
# python3 \
|
|
# python3-pip \
|
|
# python3-venv \
|
|
wget \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
# The client frontend is built using React Expo to allow
|
|
# easy creation of an Android app as well as web app
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
nodejs \
|
|
npm \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
# Install Intel graphics runtimes
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y software-properties-common \
|
|
&& add-apt-repository -y ppa:kobuk-team/intel-graphics \
|
|
&& apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
libze-intel-gpu1 \
|
|
libze1 \
|
|
intel-ocloc \
|
|
intel-opencl-icd \
|
|
xpu-smi \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
RUN update-alternatives --install /usr/bin/python3 python3 /opt/python/bin/python3.11 2
|
|
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
libncurses6 \
|
|
rsync \
|
|
jq \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
COPY --from=ze-monitor /opt/ze-monitor/build/ze-monitor-*deb /opt/
|
|
RUN dpkg -i /opt/ze-monitor-*deb && rm /opt/ze-monitor-*deb
|
|
RUN usermod -aG ze-monitor root
|
|
|
|
WORKDIR /opt/backstory
|
|
|
|
# Setup the ollama python virtual environment
|
|
RUN python3 -m venv --system-site-packages /opt/backstory/venv
|
|
|
|
# Setup the docker pip shell
|
|
RUN { \
|
|
echo '#!/bin/bash' ; \
|
|
echo 'update-alternatives --set python3 /opt/python/bin/python3.11' ; \
|
|
echo 'if [[ -e /opt/intel/oneapi/setvars.sh ]]; then source /opt/intel/oneapi/setvars.sh; fi' ; \
|
|
echo 'source /opt/backstory/venv/bin/activate' ; \
|
|
echo 'if [[ "${1}" != "" ]]; then bash -c "${@}"; else bash; fi' ; \
|
|
} > /opt/backstory/shell ; \
|
|
chmod +x /opt/backstory/shell
|
|
|
|
# Activate the pip environment on all shell calls
|
|
SHELL [ "/opt/backstory/shell" ]
|
|
|
|
# From https://pytorch-extension.intel.com/installation?platform=gpu&version=v2.6.10%2Bxpu&os=linux%2Fwsl2&package=pip
|
|
RUN pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/xpu
|
|
RUN pip install intel-extension-for-pytorch==2.6.10+xpu oneccl_bind_pt==2.6.0+xpu --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
|
|
|
|
# From https://huggingface.co/docs/bitsandbytes/main/en/installation?backend=Intel+CPU+%2B+GPU#multi-backend
|
|
RUN pip install "transformers>=4.45.1"
|
|
RUN pip install 'https://github.com/bitsandbytes-foundation/bitsandbytes/releases/download/continuous-release_multi-backend-refactor/bitsandbytes-0.44.1.dev0-py3-none-manylinux_2_24_x86_64.whl'
|
|
|
|
# Install ollama python module
|
|
RUN pip install ollama langchain-ollama
|
|
|
|
RUN pip install setuptools --upgrade
|
|
RUN pip install ollama langchain-ollama
|
|
RUN pip install feedparser bs4 chromadb
|
|
RUN pip install tiktoken
|
|
RUN pip install flask flask_cors flask_sock
|
|
RUN pip install peft datasets
|
|
|
|
COPY --from=ipex-llm-src /opt/ipex-llm/python/llm/dist/*.whl /opt/wheels/
|
|
RUN for pkg in /opt/wheels/ipex_llm*.whl; do pip install $pkg; done
|
|
|
|
# mistral fails with cache_position errors with transformers>4.40 (or at least it fails with the latest)
|
|
# as well as MistralSpda* and QwenSpda* things missing (needed when loading models with )
|
|
RUN pip install "sentence_transformers<3.4.1"
|
|
# "transformers==4.40.0" ""
|
|
#RUN pip install sentence_transformers "transformers==4.40.0" "trl<0.12.0"
|
|
#RUN pip install transformers==4.45.0 "trl<0.12.0"
|
|
# trl.core doesn't have what is needed with the default 'pip install trl' version
|
|
#RUN pip install git+https://github.com/huggingface/trl.git@7630f877f91c556d9e5a3baa4b6e2894d90ff84c
|
|
|
|
# To get xe_linear and other Xe methods
|
|
# NOTE: As of 2025-03-10, these are only available for Python 3.11, hence
|
|
# why we build python from source
|
|
RUN pip3 install 'bigdl-core-xe-all>=2.6.0b'
|
|
|
|
# NOTE: IPEX includes the oneAPI components... not sure if they still need to be installed separately with a oneAPI env
|
|
RUN pip install einops diffusers # Required for IPEX optimize(), which is required to convert from Params4bit
|
|
|
|
# Needed by src/utils/chroma.py
|
|
RUN pip install watchdog
|
|
|
|
# Install packages needed for stock.py
|
|
RUN pip install yfinance pyzt geopy PyHyphen nltk
|
|
|
|
FROM llm-base AS backstory
|
|
|
|
COPY /src/requirements.txt /opt/backstory/src/requirements.txt
|
|
RUN pip install -r /opt/backstory/src/requirements.txt
|
|
|
|
SHELL [ "/bin/bash", "-c" ]
|
|
|
|
RUN { \
|
|
echo '#!/bin/bash'; \
|
|
echo 'echo "Container: backstory"'; \
|
|
echo 'set -e'; \
|
|
echo 'echo "Setting pip environment to /opt/backstory"'; \
|
|
echo 'update-alternatives --set python3 /opt/python/bin/python3.11' ; \
|
|
echo 'if [[ -e /opt/intel/oneapi/setvars.sh ]]; then source /opt/intel/oneapi/setvars.sh; fi' ; \
|
|
echo 'source /opt/backstory/venv/bin/activate'; \
|
|
echo ''; \
|
|
echo 'if [[ "${1}" == "/bin/bash" ]] || [[ "${1}" =~ ^(/opt/backstory/)?shell$ ]]; then'; \
|
|
echo ' echo "Dropping to shell"'; \
|
|
echo ' shift' ; \
|
|
echo ' echo "Running: ${@}"' ; \
|
|
echo ' if [[ "${1}" != "" ]]; then' ; \
|
|
echo ' exec ${@}'; \
|
|
echo ' else' ; \
|
|
echo ' exec /bin/bash'; \
|
|
echo ' fi' ; \
|
|
echo 'else'; \
|
|
echo ' if [[ "${PRODUCTION}" -eq 0 ]]; then'; \
|
|
echo ' while true; do'; \
|
|
echo ' cd /opt/backstory/frontend'; \
|
|
echo ' echo "Launching Backstory React Frontend..."'; \
|
|
echo ' npm start "${@}" || echo "Backstory frontend died. Restarting in 3 seconds."'; \
|
|
echo ' sleep 3'; \
|
|
echo ' done &' ; \
|
|
echo ' fi' ; \
|
|
echo ' if [[ ! -e src/cert.pem ]]; then' ; \
|
|
echo ' echo "Generating self-signed certificate for HTTPS"'; \
|
|
echo ' openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout src/key.pem -out src/cert.pem -subj "/C=US/ST=OR/L=Portland/O=Development/CN=localhost"'; \
|
|
echo ' fi' ; \
|
|
echo ' while true; do'; \
|
|
echo ' echo "Launching Backstory server..."'; \
|
|
echo ' python src/server.py "${@}" || echo "Backstory server died. Restarting in 3 seconds."'; \
|
|
echo ' sleep 3'; \
|
|
echo ' done' ; \
|
|
echo 'fi'; \
|
|
} > /entrypoint.sh \
|
|
&& chmod +x /entrypoint.sh
|
|
|
|
# From
|
|
ENV USE_XETLA=OFF
|
|
ENV SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=1
|
|
ENV SYCL_CACHE_PERSISTENT=1
|
|
ENV PATH=/opt/backstory:$PATH
|
|
|
|
COPY /src/ /opt/backstory/src/
|
|
|
|
ENTRYPOINT [ "/entrypoint.sh" ]
|
|
|
|
FROM ubuntu:oracular AS ollama
|
|
|
|
# Get a couple prerequisites
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
gpg \
|
|
wget \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
# Install Intel graphics runtimes
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y software-properties-common \
|
|
&& add-apt-repository -y ppa:kobuk-team/intel-graphics \
|
|
&& apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
libze-intel-gpu1 \
|
|
libze1 \
|
|
intel-ocloc \
|
|
intel-opencl-icd \
|
|
xpu-smi \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
WORKDIR /opt/ollama
|
|
|
|
# Download the nightly ollama release from ipex-llm
|
|
#ENV OLLAMA_VERSION=https://github.com/intel/ipex-llm/releases/download/v2.2.0/ollama-ipex-llm-2.2.0-ubuntu.tgz
|
|
#ENV OLLAMA_VERSION=https://github.com/intel/ipex-llm/releases/download/v2.3.0-nightly/ollama-ipex-llm-2.3.0b20250415-ubuntu.tgz
|
|
|
|
# NOTE: NO longer at github.com/intel -- now at ipex-llm
|
|
ENV OLLAMA_VERSION=https://github.com/ipex-llm/ipex-llm/releases/download/v2.2.0/ollama-ipex-llm-2.2.0-ubuntu.tgz
|
|
RUN wget -qO - ${OLLAMA_VERSION} | \
|
|
tar --strip-components=1 -C . -xzv
|
|
|
|
# Install Python from Oracular (ollama works with 3.12)
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
gpg \
|
|
python3 \
|
|
python3-pip \
|
|
python3-venv \
|
|
wget \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
# Setup the ollama python virtual environment
|
|
RUN python3 -m venv --system-site-packages /opt/ollama/venv
|
|
|
|
# Setup the docker pip shell
|
|
RUN { \
|
|
echo '#!/bin/bash' ; \
|
|
update-alternatives --set python3 /opt/python/bin/python3.11 ; \
|
|
echo 'source /opt/ollama/venv/bin/activate' ; \
|
|
echo 'if [[ "${1}" != "" ]]; then bash -c ${*}; else bash; fi' ; \
|
|
} > /opt/ollama/shell ; \
|
|
chmod +x /opt/ollama/shell
|
|
|
|
# Activate the pip environment on all shell calls
|
|
SHELL [ "/opt/ollama/shell" ]
|
|
|
|
# Install ollama python module
|
|
RUN pip install ollama langchain-ollama
|
|
|
|
SHELL [ "/bin/bash", "-c" ]
|
|
|
|
RUN { \
|
|
echo '#!/bin/bash'; \
|
|
echo 'echo "Container: ollama"'; \
|
|
echo 'set -e'; \
|
|
echo 'echo "Setting pip environment to /opt/ollama"'; \
|
|
echo 'source /opt/ollama/venv/bin/activate'; \
|
|
echo 'export OLLAMA_NUM_GPU=999'; \
|
|
echo 'export ZES_ENABLE_SYSMAN=1'; \
|
|
echo 'export SYCL_CACHE_PERSISTENT=1'; \
|
|
echo 'export OLLAMA_KEEP_ALIVE=-1'; \
|
|
echo 'export SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=1'; \
|
|
echo ''; \
|
|
echo 'if [[ "${1}" == "/bin/bash" ]] || [[ "${1}" =~ ^(/opt/ollama/)?shell$ ]]; then'; \
|
|
echo ' echo "Dropping to shell"'; \
|
|
echo ' shift'; \
|
|
echo ' if [[ "${1}" != "" ]]; then cmd="/opt/ollama/shell ${@}"; echo "Running: ${cmd}"; exec ${cmd}; else /opt/ollama/shell; fi'; \
|
|
echo 'else'; \
|
|
echo ' echo "Launching Ollama server..."'; \
|
|
echo ' exec ollama serve'; \
|
|
echo 'fi'; \
|
|
} > /entrypoint.sh \
|
|
&& chmod +x /entrypoint.sh
|
|
|
|
RUN { \
|
|
echo '#!/bin/bash'; \
|
|
echo 'echo "Container: ollama"'; \
|
|
echo 'set -e'; \
|
|
echo 'echo "Setting pip environment to /opt/ollama"'; \
|
|
echo 'source /opt/ollama/venv/bin/activate'; \
|
|
echo 'ollama pull qwen2.5:7b' ; \
|
|
echo 'ollama pull llama3.2' ; \
|
|
echo 'ollama pull mxbai-embed-large' ; \
|
|
echo 'ollama pull deepseek-r1:7b' ; \
|
|
echo 'ollama pull mistral:7b' ; \
|
|
} > /fetch-models.sh \
|
|
&& chmod +x /fetch-models.sh
|
|
|
|
ENV PYTHONUNBUFFERED=1
|
|
|
|
# Enable ext_intel_free_memory
|
|
ENV ZES_ENABLE_SYSMAN=1
|
|
|
|
# Use all GPUs
|
|
ENV OLLAMA_NUM_GPU=999
|
|
|
|
# Use immediate command lists
|
|
ENV SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=1
|
|
|
|
# Use persistent cache
|
|
ENV SYCL_CACHE_PERSISTENT=1
|
|
|
|
VOLUME [" /root/.ollama" ]
|
|
|
|
ENV PATH=/opt/ollama:${PATH}
|
|
|
|
ENTRYPOINT [ "/entrypoint.sh" ]
|
|
|
|
FROM llm-base AS jupyter
|
|
|
|
SHELL [ "/opt/backstory/shell" ]
|
|
|
|
# BEGIN setup Jupyter
|
|
RUN pip install \
|
|
jupyterlab \
|
|
dash[jupyterlab] \
|
|
&& jupyter lab build --dev-build=False --minimize=False
|
|
# END setup Jupyter
|
|
|
|
COPY /src/requirements.txt /opt/backstory/src/requirements.txt
|
|
|
|
RUN pip install -r /opt/backstory/src/requirements.txt
|
|
|
|
SHELL [ "/bin/bash", "-c" ]
|
|
|
|
RUN { \
|
|
echo '#!/bin/bash' ; \
|
|
echo 'echo "Container: backstory jupyter"' ; \
|
|
echo 'if [[ ! -e "/root/.cache/hub/token" ]]; then' ; \
|
|
echo ' if [[ "${HF_ACCESS_TOKEN}" == "" ]]; then' ; \
|
|
echo ' echo "Set your HF access token in .env as: HF_ACCESS_TOKEN=<token>" >&2' ; \
|
|
echo ' exit 1' ; \
|
|
echo ' else' ; \
|
|
echo ' if [[ ! -d '/root/.cache/hub' ]]; then mkdir -p /root/.cache/hub; fi' ; \
|
|
echo ' echo "${HF_ACCESS_TOKEN}" > /root/.cache/hub/token' ; \
|
|
echo ' fi' ; \
|
|
echo 'fi' ; \
|
|
echo 'update-alternatives --set python3 /opt/python/bin/python3.11' ; \
|
|
echo 'if [[ -e /opt/intel/oneapi/setvars.sh ]]; then source /opt/intel/oneapi/setvars.sh; fi' ; \
|
|
echo 'source /opt/backstory/venv/bin/activate' ; \
|
|
echo 'if [[ "${1}" == "shell" ]]; then echo "Dropping to shell"; /bin/bash; exit $?; fi' ; \
|
|
echo 'while true; do' ; \
|
|
echo ' echo "Launching jupyter lab"' ; \
|
|
echo ' jupyter lab \' ; \
|
|
echo ' --notebook-dir=/opt/jupyter \' ; \
|
|
echo ' --port 8888 \' ; \
|
|
echo ' --ip 0.0.0.0 \' ; \
|
|
echo ' --allow-root \' ; \
|
|
echo ' --ServerApp.token= \' ; \
|
|
echo ' --ServerApp.password= \' ; \
|
|
echo ' --ServerApp.allow_origin=* \' ; \
|
|
echo ' --ServerApp.base_url="/jupyter" \' ; \
|
|
echo ' "${@}" \' ; \
|
|
echo ' 2>&1 | tee -a "/root/.cache/jupyter.log"' ; \
|
|
echo ' echo "jupyter notebook died ($?). Restarting."' ; \
|
|
echo ' sleep 5' ; \
|
|
echo 'done' ; \
|
|
} > /entrypoint-jupyter.sh \
|
|
&& chmod +x /entrypoint-jupyter.sh
|
|
|
|
# echo ' --no-browser \' ; \
|
|
|
|
ENTRYPOINT [ "/entrypoint-jupyter.sh" ]
|
|
|
|
FROM ubuntu:oracular AS miniircd
|
|
|
|
COPY --from=python-build /opt/python /opt/python
|
|
|
|
# Get a couple prerequisites
|
|
RUN apt-get update \
|
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
|
gpg \
|
|
wget \
|
|
nano \
|
|
irssi \
|
|
&& apt-get clean \
|
|
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
|
|
|
WORKDIR /opt/miniircd
|
|
|
|
RUN update-alternatives --install /usr/bin/python3 python3 /opt/python/bin/python3.11 2
|
|
|
|
# Setup the ollama python virtual environment
|
|
RUN python3 -m venv --system-site-packages /opt/miniircd/venv
|
|
|
|
# Setup the docker pip shell
|
|
RUN { \
|
|
echo '#!/bin/bash' ; \
|
|
echo 'update-alternatives --set python3 /opt/python/bin/python3.11' ; \
|
|
echo 'source /opt/miniircd/venv/bin/activate' ; \
|
|
echo 'if [[ "${1}" != "" ]]; then bash -c "${@}"; else bash; fi' ; \
|
|
} > /opt/miniircd/shell ; \
|
|
chmod +x /opt/miniircd/shell
|
|
|
|
# Activate the pip environment on all shell calls
|
|
SHELL [ "/opt/miniircd/shell" ]
|
|
|
|
RUN pip install miniircd
|
|
|
|
SHELL [ "/bin/bash", "-c" ]
|
|
|
|
RUN { \
|
|
echo '#!/bin/bash'; \
|
|
echo 'echo "Container: miniircd"'; \
|
|
echo 'set -e'; \
|
|
echo 'echo "Setting pip environment to /opt/miniircd"'; \
|
|
echo 'update-alternatives --set python3 /opt/python/bin/python3.11' ; \
|
|
echo 'source /opt/miniircd/venv/bin/activate'; \
|
|
echo ''; \
|
|
echo 'if [[ "${1}" == "/bin/bash" ]] || [[ "${1}" =~ ^(/opt/miniircd/)?shell$ ]]; then'; \
|
|
echo ' echo "Dropping to shell"'; \
|
|
echo ' shift' ; \
|
|
echo ' echo "Running: ${@}"' ; \
|
|
echo ' if [[ "${1}" != "" ]]; then' ; \
|
|
echo ' exec ${@}'; \
|
|
echo ' else' ; \
|
|
echo ' exec /bin/bash'; \
|
|
echo ' fi' ; \
|
|
echo 'else'; \
|
|
echo ' echo "Launching IRC server..."'; \
|
|
echo ' miniircd --setuid root "${@}"' ; \
|
|
echo 'fi'; \
|
|
} > /entrypoint.sh \
|
|
&& chmod +x /entrypoint.sh
|
|
|
|
ENTRYPOINT [ "/entrypoint.sh" ]
|