-
Notifications
You must be signed in to change notification settings - Fork 2
/
Makefile
144 lines (115 loc) · 4.07 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
# Makefile Readme
# ----------------
#
# down: shut down docker containers (database and api containers).
# buildnocache: build new docker image without using cache.
# up: start docker containers (database and api containers).
# dbbackup: create a backup of your local database and push to S3. NOTE: Local backups are used by all devs during database restore.
# dbrestore: restore a backup from s3 to your local database.
# migrate: apply any database migrations to local database.
# freshinstall: helper command that wraps server commands to setup API, database and data locally.
# runserver: Start api web server, runs on http://localhost:8080/
# shell: Starts bash terminal inside the API docker container.
#
#
# To get started run `make freshinstall`
#
API_SERVICE="api_service"
down:
@docker-compose down
downnocache:
@docker-compose down -v
stop:
@make down
buildnocache:
$(eval short_sha=$(shell git rev-parse --short HEAD))
@echo $(short_sha) > src/VERSION.txt
@cat src/VERSION.txt
@docker-compose build --no-cache --pull
build:
$(eval short_sha=$(shell git rev-parse --short HEAD))
@echo $(short_sha) > src/VERSION.txt
@cat src/VERSION.txt
@docker-compose build
up:
docker-compose up -d
start:
@make up
logs:
@docker-compose logs -f $(API_SERVICE)
dbbackup:
@docker-compose exec $(API_SERVICE) python manage.py dbbackup local
dbrestore:
@docker-compose exec $(API_SERVICE) python manage.py dbrestore local
migrate:
@docker-compose exec $(API_SERVICE) python manage.py migrate
install:
@echo "\n--- Shutting down existing stack ---\n"
@make down
@echo "\n--- Building new docker image ---\n"
@make build
@echo "\n--- Spinning up new stack ---\n"
@make up
@sleep 20
@echo "\n--- Applying MERMAID database migrations ---\n"
@make migrate
freshinstall:
@echo "\n--- Shutting down existing stack ---\n"
@make downnocache
@echo "\n--- Building new docker image ---\n"
@make buildnocache
@echo "\n--- Spinning up new stack ---\n"
@make up
@sleep 20
@echo "\n--- Restoring MERMAID database ---\n"
@make dbrestore
@echo "\n--- Applying MERMAID database migrations ---\n"
@make migrate
runserver:
@docker-compose exec $(API_SERVICE) python manage.py runserver 0.0.0.0:8080
worker:
@docker-compose exec $(API_SERVICE) python manage.py simpleq_worker
runserverplus:
@docker-compose exec $(API_SERVICE) gunicorn app.wsgi \
--bind 0.0.0.0:8081 \
--timeout 120 \
--workers 2 \
--threads 4 \
--worker-class gthread \
--access-logfile "-" \
--error-logfile "-" \
--worker-tmp-dir /dev/shm
simpleq:
@docker-compose exec $(API_SERVICE) python manage.py simpleq_worker
shellplus:
@docker-compose exec $(API_SERVICE) python manage.py shell_plus
shell:
@docker-compose exec $(API_SERVICE) /bin/bash
shellroot:
@docker-compose exec --user=root $(API_SERVICE) /bin/bash
shellplusroot:
@docker-compose exec --user=root $(API_SERVICE) python manage.py shell_plus
test:
@docker-compose exec $(API_SERVICE) pytest -v --no-migrations --rich api/tests
# -----------------
# Fargate Maintenance (docker exec)
# -----------------
# Assume local profile name in ~/.aws/config is `mermaid`
cloudshell:
$(eval taskid=$(shell aws ecs list-tasks --profile mermaid --cluster $(MERMAID_CLUSTER) --service-name $(MERMAID_SERVICE) --output text | awk -F'/' '{print $$3}'))
aws ecs execute-command \
--profile mermaid \
--cluster $(MERMAID_CLUSTER) \
--task $(taskid) \
--container MermaidAPI \
--command "/bin/bash" \
--interactive
cloudtunnel:
$(eval taskid=$(shell aws ecs list-tasks --profile mermaid --cluster $(MERMAID_CLUSTER) --service-name $(MERMAID_SERVICE) --output text | awk -F'/' '{print $$3}'))
$(eval runtimeid=$(shell aws ecs describe-tasks --profile mermaid --cluster $(MERMAID_CLUSTER) --tasks $(taskid) | grep -oP '"runtimeId": "\K.+"' | head -c-2))
$(eval localport=5444)
aws ssm start-session \
--profile mermaid \
--target ecs:$(MERMAID_CLUSTER)_$(taskid)_$(runtimeid) \
--document-name AWS-StartPortForwardingSessionToRemoteHost \
--parameters '{"host":["$(MERMAID_DBHOST)"], "portNumber":["$(MERMAID_DBPORT)"], "localPortNumber":["$(localport)"]}'