-
Notifications
You must be signed in to change notification settings - Fork 8
/
Copy pathdocker-compose.yml
executable file
·53 lines (50 loc) · 1.48 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# Allows Scrapy and MongoDB to work together in separate containers through
# docker-compose: https://docs.docker.com/compose/.
# For info on use, see the README.md.
version: '3.5'
services:
crawler_api:
build: schools # Build the Scrapy Dockerfile.
depends_on:
- mongodb_container
logging:
options:
max-size: 100m
ports:
- 5000
volumes:
- crawler_api:/code/schools/spiders
#links:
# - "mongodb_container:mongodb_container" # Allow connection to mongodb via the container name
# credit: https://dev.to/sonyarianto/how-to-spin-mongodb-server-with-docker-and-docker-compose-2lef
# note there is no MongoDB authentication currently.
mongodb_container:
container_name: mongodb_container
image: mongo:latest
restart: always
logging:
options:
max-size: 100m ## limit the size of logging files during scraping
ports:
- "27000:27017" ## map the port 27000 to mongodb's default port
environment:
MONGO_INITDB_ROOT_USERNAME: admin
MONGO_INITDB_ROOT_PASSWORD: mdipass
volumes:
- /vol_b/data/mongodata:/data/db/
redis:
image: redis:latest
ports:
- "7480:6379"
# command: rq worker crawling-tasks
redis_worker:
build: schools
depends_on:
- mongodb_container
- redis
command: rq worker crawling-tasks --path /code/schools/
volumes:
- crawler_api:/code/schools/spiders
volumes:
mongo_data_container:
crawler_api: