forked from deepset-ai/haystack
-
Notifications
You must be signed in to change notification settings - Fork 0
/
docker-compose.yml
43 lines (43 loc) · 1.76 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
version: "3"
services:
haystack-api:
build:
context: .
dockerfile: Dockerfile
image: "deepset/haystack-cpu:latest"
ports:
- 8000:8000
volumes:
# Optional: mount your own models from disk into the container
- "./models:/home/user/models"
environment:
# See rest_api/config.py for more variables that you can configure here.
- DB_HOST=elasticsearch
- USE_GPU=False
- TOP_K_PER_SAMPLE=3 # how many answers can come from the same small passage (reduce value for more variety of answers)
# Load a model from transformers' model hub or a local path into the FARMReader.
- READER_MODEL_PATH=deepset/roberta-base-squad2
# - READER_MODEL_PATH=home/user/models/roberta-base-squad2
# Alternative: If you want to use the TransformersReader (e.g. for loading a local model in transformers format):
# - READER_TYPE=TransformersReader
# - READER_MODEL_PATH=/home/user/models/roberta-base-squad2
# - READER_TOKENIZER=/home/user/models/roberta-base-squad2
restart: always
depends_on:
- elasticsearch
command: "/bin/bash -c 'sleep 15 && gunicorn rest_api.application:app -b 0.0.0.0 -k uvicorn.workers.UvicornWorker --workers 1 --timeout 180 --preload'"
elasticsearch:
# This will start an empty elasticsearch instance (so you have to add your documents yourself)
#image: "elasticsearch:7.9.2"
# If you want a demo image instead that is "ready-to-query" with some indexed Game of Thrones articles:
image: "deepset/elasticsearch-game-of-thrones"
ports:
- 9200:9200
environment:
- discovery.type=single-node
ui:
image: "deepset/haystack-streamlit-ui"
ports:
- 8501:8501
environment:
- API_ENDPOINT=http://haystack-api:8000