Use gunicorn as WSGI server in ML image
This commit is contained in:
@@ -6,7 +6,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
|
|||||||
|
|
||||||
RUN python -m venv /opt/venv && \
|
RUN python -m venv /opt/venv && \
|
||||||
/opt/venv/bin/pip install --pre torch -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html && \
|
/opt/venv/bin/pip install --pre torch -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html && \
|
||||||
/opt/venv/bin/pip install transformers tqdm numpy scikit-learn scipy nltk sentencepiece flask Pillow && \
|
/opt/venv/bin/pip install transformers tqdm numpy scikit-learn scipy nltk sentencepiece flask Pillow gunicorn && \
|
||||||
/opt/venv/bin/pip install --no-deps sentence-transformers
|
/opt/venv/bin/pip install --no-deps sentence-transformers
|
||||||
|
|
||||||
FROM python:3.10-slim
|
FROM python:3.10-slim
|
||||||
@@ -22,4 +22,4 @@ WORKDIR /usr/src/app
|
|||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
CMD ["python", "src/main.py"]
|
CMD ["gunicorn", "src.main:server"]
|
||||||
|
|||||||
@@ -0,0 +1,13 @@
|
|||||||
|
"""
|
||||||
|
Gunicorn configuration options.
|
||||||
|
https://docs.gunicorn.org/en/stable/settings.html#config-file
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
# Set the bind address based on the env
|
||||||
|
server_port = os.getenv('MACHINE_LEARNING_PORT') or "3003"
|
||||||
|
bind = f"127.0.0.1:{server_port}"
|
||||||
|
|
||||||
|
# Preload the Flask app / models etc. before starting the server
|
||||||
|
preload_app = True
|
||||||
Reference in New Issue
Block a user