I'm looking to deploy a simple application which uses Django and celery.
docker-compose.yml:
version: "3.8"
services:
django:
build: .
container_name: django
command: python manage.py runserver 0.0.0.0:8000
volumes:
- .:/usr/src/app/
ports:
- "8000:8000"
environment:
- DEBUG=1
- CELERY_BROKER=redis://redis:6379/0
- CELERY_BACKEND=djcelery.backends.database:DatabaseBackend
depends_on:
- redis
celery:
build: .
command: celery -A core worker -l INFO
volumes:
- .:/usr/src/app
environment:
- DEBUG=1
- CELERY_BROKER=redis://redis:6379/0
- CELERY_BACKEND=djcelery.backends.database:DatabaseBackend
depends_on:
- django
- redis
redis:
image: "redis:alpine"
volumes:
pgdata:
Dockerfile:
FROM python:3.7
WORKDIR /app
ADD . /app
#Install dependencies for PyODBC
RUN apt-get update \
&& apt-get install unixodbc -y \
&& apt-get install unixodbc-dev -y \
&& apt-get install tdsodbc -y \
&& apt-get clean -y
# install ODBC driver in docker image
RUN apt-get update \
&& curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
&& curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list \
&& apt-get update \
&& ACCEPT_EULA=Y apt-get install --yes --no-install-recommends msodbcsql17 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& rm -rf /tmp/*
# install requirements
RUN pip install --trusted-host pypi.python.org -r requirements.txt
EXPOSE 5000
ENV NAME OpentoAll
CMD ["python", "app.py"]
When I run "docker-compose up" locally, the celery worker is run and I am able to go to localhost:8000 to access the API to make asynchronous requests to a celery task.
Now I'm wondering how can I deploy this to the cloud environment? What would be the image I would need to build and deploy? Thanks
volumes:
that overwrite the content in the image and testing this setup locally before trying to deploy to somewhere more involved. - David Maze