-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Docker improvements, remove CUDA default (#14)
* change default to no cuda * update Dockerfiles
- Loading branch information
1 parent
45e3fcf
commit 62197dd
Showing
7 changed files
with
201 additions
and
127 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
FROM python:3.11-slim-bookworm | ||
|
||
# Metadata as per https://github.com/opencontainers/image-spec/blob/master/annotations.md | ||
LABEL org.opencontainers.image.authors="Bram Vanroy" | ||
LABEL org.opencontainers.image.title="MAchine Translation Evaluation Online - Demo" | ||
|
||
# Avoid prompts from apt | ||
ENV DEBIAN_FRONTEND=noninteractive | ||
|
||
# Install dependencies in a single RUN command to reduce image layers | ||
RUN apt-get update && apt-get install -y --no-install-recommends \ | ||
build-essential \ | ||
curl \ | ||
git \ | ||
&& apt-get clean \ | ||
&& rm -rf /var/lib/apt/lists/* | ||
|
||
# Create a non-root user | ||
RUN useradd -m -u 1000 mateo_user | ||
USER mateo_user | ||
ENV HOME="/home/mateo_user" | ||
|
||
# Environment variables | ||
ENV PORT=7860 \ | ||
SERVER="localhost" \ | ||
BASE="" \ | ||
DEMO_MODE="" \ | ||
HF_HUB_ENABLE_HF_TRANSFER=1 \ | ||
PATH="${HOME}/.local/bin:${PATH}" \ | ||
USE_CUDA=false | ||
|
||
WORKDIR ${HOME}/mateo | ||
|
||
# Clone the repository | ||
RUN git clone https://github.com/BramVanroy/mateo-demo.git | ||
WORKDIR mateo-demo | ||
|
||
# Install Python dependencies with conditional torch installation | ||
RUN python -m pip install --no-cache-dir --upgrade pip wheel setuptools \ | ||
&& python -m pip install --no-cache-dir torch==2.2.1+cpu -f https://download.pytorch.org/whl/torch \ | ||
&& python -m pip install --no-cache-dir --upgrade . | ||
|
||
# Pre-download default models | ||
RUN huggingface-cli download bert-base-multilingual-cased model.safetensors tokenizer.json vocab.txt; \ | ||
huggingface-cli download facebook/nllb-200-distilled-600M pytorch_model.bin sentencepiece.bpe.model tokenizer.json; \ | ||
python -c "import comet; from comet import download_model; download_model('Unbabel/wmt22-comet-da')"; \ | ||
python -c "import evaluate; evaluate.load('bleurt', 'BLEURT-20')" | ||
|
||
# Expose the port the app runs on | ||
EXPOSE $PORT | ||
|
||
# Healthcheck to ensure the service is running | ||
HEALTHCHECK CMD curl --fail http://$SERVER:$PORT$BASE/_stcore/health || exit 1 | ||
|
||
# Set the working directory to the Streamlit app | ||
WORKDIR src/mateo_st | ||
|
||
# Simplify the CMD script with conditional --use_cuda flag | ||
CMD streamlit run 01_🎈_MATEO.py \ | ||
--server.port $PORT \ | ||
--server.address $(if [ "$SERVER" = "localhost" ]; then echo "0.0.0.0"; else echo $SERVER; fi) \ | ||
$(if [ -n "$BASE" ]; then echo "--server.baseUrlPath $BASE"; fi) \ | ||
$(if [ "$DEMO_MODE" = "true" ]; then echo "--server.maxUploadSize 1"; fi) \ | ||
-- \ | ||
$(if [ "$DEMO_MODE" = "true" ]; then echo "--demo_mode"; fi) |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
# includes python3.10 (but we use `python-is-python3` to set `python` cmd to `python3` for ease-of-use) | ||
FROM nvidia/cuda:12.1.1-runtime-ubuntu22.04 | ||
|
||
# Metadata as per https://github.com/opencontainers/image-spec/blob/master/annotations.md | ||
LABEL org.opencontainers.image.authors="Bram Vanroy" | ||
LABEL org.opencontainers.image.title="MAchine Translation Evaluation Online - Demo" | ||
|
||
# Avoid prompts from apt | ||
ENV DEBIAN_FRONTEND=noninteractive | ||
|
||
# Install dependencies in a single RUN command to reduce image layers | ||
RUN apt-get update && apt-get install -y --no-install-recommends \ | ||
build-essential \ | ||
curl \ | ||
git \ | ||
python3-pip \ | ||
python-is-python3 \ | ||
&& apt-get clean \ | ||
&& rm -rf /var/lib/apt/lists/* | ||
|
||
# Create a non-root user | ||
RUN useradd -m -u 1000 mateo_user | ||
USER mateo_user | ||
ENV HOME="/home/mateo_user" | ||
|
||
# Environment variables | ||
ENV PORT=7860 \ | ||
SERVER="localhost" \ | ||
BASE="" \ | ||
DEMO_MODE="" \ | ||
HF_HUB_ENABLE_HF_TRANSFER=1 \ | ||
PATH="${HOME}/.local/bin:${PATH}" \ | ||
USE_CUDA=true | ||
|
||
WORKDIR /mateo | ||
|
||
# Clone the repository | ||
RUN git clone https://github.com/BramVanroy/mateo-demo.git | ||
WORKDIR mateo-demo | ||
|
||
# Install Python dependencies with conditional torch installation | ||
RUN python -m pip install --no-cache-dir --upgrade pip wheel setuptools \ | ||
&& python -m pip install --no-cache-dir torch==2.2.1 --index-url https://download.pytorch.org/whl/cu121 \ | ||
&& python -m pip install --no-cache-dir --upgrade . | ||
|
||
# Pre-download default models | ||
RUN huggingface-cli download bert-base-multilingual-cased model.safetensors tokenizer.json vocab.txt \ | ||
&& huggingface-cli download facebook/nllb-200-distilled-600M pytorch_model.bin sentencepiece.bpe.model tokenizer.json \ | ||
&& python -c "import comet; from comet import download_model; download_model('Unbabel/wmt22-comet-da')" \ | ||
&& python -c "import evaluate; evaluate.load('bleurt', 'BLEURT-20')" | ||
|
||
# Expose the port the app runs on | ||
EXPOSE $PORT | ||
|
||
# Healthcheck to ensure the service is running | ||
HEALTHCHECK CMD curl --fail http://$SERVER:$PORT$BASE/_stcore/health || exit 1 | ||
|
||
# Set the working directory to the Streamlit app | ||
WORKDIR src/mateo_st | ||
|
||
# Simplify the CMD script with conditional --use_cuda flag | ||
CMD streamlit run 01_🎈_MATEO.py \ | ||
--server.port $PORT \ | ||
--server.address $(if [ "$SERVER" = "localhost" ]; then echo "0.0.0.0"; else echo $SERVER; fi) \ | ||
$(if [ -n "$BASE" ]; then echo "--server.baseUrlPath $BASE"; fi) \ | ||
$(if [ "$DEMO_MODE" = "true" ]; then echo "--server.maxUploadSize 1"; fi) \ | ||
-- \ | ||
--use_cuda \ | ||
$(if [ "$DEMO_MODE" = "true" ]; then echo "--demo_mode"; fi) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,37 +1,57 @@ | ||
FROM ubuntu:latest | ||
LABEL authors="Bram Vanroy" | ||
FROM python:3.11-slim-bookworm | ||
|
||
# Metadata as per https://github.com/opencontainers/image-spec/blob/master/annotations.md | ||
LABEL org.opencontainers.image.authors="Bram Vanroy" | ||
LABEL org.opencontainers.image.title="MAchine Translation Evaluation Online - Demo" | ||
|
||
# Avoid prompts from apt | ||
ENV DEBIAN_FRONTEND=noninteractive | ||
RUN apt-get -y update \ | ||
&& apt-get -y install build-essential curl git software-properties-common | ||
|
||
RUN add-apt-repository ppa:deadsnakes/ppa \ | ||
&& apt-get -y update \ | ||
&& apt-get -y install python3.10 python3.10-dev python3-pip python3.10-distutils \ | ||
&& ln -s /usr/bin/python3.10 /usr/bin/python \ | ||
# Install dependencies in a single RUN command to reduce image layers | ||
RUN apt-get update && apt-get install -y --no-install-recommends \ | ||
build-essential \ | ||
curl \ | ||
git \ | ||
&& apt-get clean \ | ||
&& rm -rf /var/lib/apt/lists/* | ||
|
||
RUN useradd -m -u 1000 user | ||
USER user | ||
ENV HOME /home/user | ||
ENV PATH $HOME/.local/bin:$PATH | ||
ENV HF_HUB_ENABLE_HF_TRANSFER=1 | ||
# Create a non-root user | ||
RUN useradd -m -u 1000 mateo_user | ||
USER mateo_user | ||
ENV HOME="/home/mateo_user" | ||
|
||
# Environment variables | ||
ENV PORT=7860 \ | ||
SERVER="localhost" \ | ||
HF_HUB_ENABLE_HF_TRANSFER=1 \ | ||
PATH="${HOME}/.local/bin:${PATH}" \ | ||
USE_CUDA=false | ||
|
||
WORKDIR $HOME | ||
WORKDIR ${HOME}/mateo | ||
|
||
# Clone the repository | ||
RUN git clone https://github.com/BramVanroy/mateo-demo.git | ||
WORKDIR $HOME/mateo-demo | ||
WORKDIR mateo-demo | ||
|
||
RUN python -m pip install --no-cache-dir --upgrade pip && python -m pip install --no-cache-dir --upgrade . | ||
# Install Python dependencies with conditional torch installation | ||
RUN python -m pip install --no-cache-dir --upgrade pip wheel setuptools \ | ||
&& python -m pip install --no-cache-dir torch==2.2.1+cpu -f https://download.pytorch.org/whl/torch \ | ||
&& python -m pip install --no-cache-dir --upgrade . | ||
|
||
# Pre-download default models | ||
RUN python -c "import comet; from comet import download_model; download_model('Unbabel/wmt22-comet-da')" | ||
RUN python -c "import evaluate; evaluate.load('bleurt', 'BLEURT-20')" | ||
RUN huggingface-cli download bert-base-multilingual-cased model.safetensors tokenizer.json vocab.txt | ||
RUN huggingface-cli download facebook/nllb-200-distilled-600M pytorch_model.bin sentencepiece.bpe.model tokenizer.json | ||
RUN huggingface-cli download bert-base-multilingual-cased model.safetensors tokenizer.json vocab.txt; \ | ||
huggingface-cli download facebook/nllb-200-distilled-600M pytorch_model.bin sentencepiece.bpe.model tokenizer.json; \ | ||
python -c "import comet; from comet import download_model; download_model('Unbabel/wmt22-comet-da')"; \ | ||
python -c "import evaluate; evaluate.load('bleurt', 'BLEURT-20')" | ||
|
||
# Expose the port the app runs on | ||
EXPOSE $PORT | ||
|
||
EXPOSE 7860 | ||
HEALTHCHECK CMD curl --fail http://localhost:7860/_stcore/health | ||
# Healthcheck to ensure the service is running | ||
HEALTHCHECK CMD curl --fail http://$SERVER:$PORT/_stcore/health || exit 1 | ||
|
||
WORKDIR $HOME/mateo-demo/src/mateo_st | ||
# Set the working directory to the Streamlit app | ||
WORKDIR src/mateo_st | ||
|
||
CMD ["streamlit", "run", "01_🎈_MATEO.py", "--server.port", "7860", "--server.enableXsrfProtection", "false", "--", "--no_cuda"] | ||
# Launch app | ||
CMD streamlit run 01_🎈_MATEO.py --server.port $PORT --server.enableXsrfProtection false -- --no_cuda |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters