Skip to content

Commit

Permalink
Merge branch 'dev' into 656-FRONT-continuousDeployment
Browse files Browse the repository at this point in the history
  • Loading branch information
jmensch1 authored Jun 17, 2020
2 parents 06f8001 + 4348cfd commit 20c5830
Show file tree
Hide file tree
Showing 8 changed files with 99 additions and 61 deletions.
26 changes: 26 additions & 0 deletions .github/workflows/Continuous_Deployment_Backend_Dev.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: Deploy_Backend_Dev

on:
push:
branches:
- dev
paths:
- 'server/**'

jobs:
build:
runs-on: ubuntu-latest
steps:
- name: run deploy script on AWS dev
uses: appleboy/ssh-action@master
with:
username: ec2-user
host: ${{ secrets.AWS_DEV_HOST }}
key: ${{ secrets.AWS_DEV_PRIVATE_KEY }}
script: |
set -e
cd 311-data/server
git pull
echo GITHUB_SHA=${{ github.sha }} >> .env
docker-compose build api
docker-compose up --no-deps -d api
20 changes: 20 additions & 0 deletions .github/workflows/Daily_Update_Backend_Dev.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Daily_Update_Backend_Dev

on:
schedule:
- cron: '0 0 * * *'

jobs:
build:
runs-on: ubuntu-latest
steps:
- name: run update script on AWS dev
uses: appleboy/ssh-action@master
with:
username: ec2-user
host: ${{ secrets.AWS_DEV_HOST }}
key: ${{ secrets.AWS_DEV_PRIVATE_KEY }}
script: |
set -e
cd 311-data/server
docker-compose run api python bin/db_update.py
11 changes: 0 additions & 11 deletions .github/workflows/Nightly_Ingest.yml

This file was deleted.

25 changes: 0 additions & 25 deletions .github/workflows/dev-server-CI.yml

This file was deleted.

8 changes: 5 additions & 3 deletions server/api/src/pb/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from .query import query as query_pb
from .populate import populate as populate_pb
from .data_access import clear_data, set_ready, check_ready
from .data_access import clear_data as clear_pb, check_ready
from settings import Picklebase
from utils.log import log, log_heading

Expand Down Expand Up @@ -33,9 +33,7 @@ def populate():

try:
log_heading('populating picklebase', spacing=(1, 0))
clear_data()
populate_pb()
set_ready()
log('\nPicklebase ready.')
except Exception as e:
enabled = False
Expand All @@ -47,3 +45,7 @@ def populate():

def query(table, fields, filters):
return query_pb(table, fields, filters)


def clear_data():
clear_pb()
65 changes: 44 additions & 21 deletions server/api/src/pb/data_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,64 +6,87 @@


DATA_DIR = os.path.join(Server.TMP_DIR, 'picklebase')
STAGE_DIR = os.path.join(Server.TMP_DIR, 'picklebase-stage')
READY_FILE = os.path.join(DATA_DIR, 'ready')


# ########## RESETS ########### #

def clear_stage():
shutil.rmtree(STAGE_DIR, ignore_errors=True)


def clear_data():
shutil.rmtree(DATA_DIR, ignore_errors=True)


# ###### PATHS / FILENAMES ##### #

def stage_table_path(table):
return os.path.join(STAGE_DIR, table)


def table_path(table):
return os.path.join(DATA_DIR, table)


def init_table(table):
path = table_path(table)
shutil.rmtree(path, ignore_errors=True)
os.makedirs(path, exist_ok=True)
def stage_meta_path(table):
return os.path.join(STAGE_DIR, table, 'meta.json')


def meta_path(table):
return os.path.join(DATA_DIR, table, 'meta.json')


def batch_filename(batch_num):
return f'batch_{batch_num}'


# ############ WRITING ########### #

def init_table(table):
path = stage_table_path(table)
shutil.rmtree(path, ignore_errors=True)
os.makedirs(path, exist_ok=True)


def save_batch(table, batch_num, batch):
filename = batch_filename(batch_num)
path = os.path.join(table_path(table), filename)
path = os.path.join(stage_table_path(table), filename)
with open(path, 'wb') as f:
pickle.dump(batch, f)
return filename, os.path.getsize(path)


def load_batch(table, batch_num):
path = os.path.join(table_path(table), batch_filename(batch_num))
with open(path, 'rb') as f:
return pickle.load(f)


def meta_path(table):
return os.path.join(DATA_DIR, table, 'meta.json')


def save_meta(table, meta):
path = meta_path(table)
path = stage_meta_path(table)
meta_json = json.dumps(meta, indent=2)
print('\nSaving meta:', flush=True)
print(meta_json, flush=True)
with open(path, 'w') as f:
f.write(meta_json)


def commit_pb():
shutil.rmtree(DATA_DIR, ignore_errors=True)
shutil.move(STAGE_DIR, DATA_DIR)
with open(READY_FILE, 'w'):
pass


# ########### READING ########### #

def load_batch(table, batch_num):
path = os.path.join(table_path(table), batch_filename(batch_num))
with open(path, 'rb') as f:
return pickle.load(f)


def load_meta(table):
path = meta_path(table)
with open(path, 'r') as f:
return json.load(f)


def set_ready():
with open(READY_FILE, 'w'):
pass


def check_ready():
return os.path.isfile(READY_FILE)
3 changes: 3 additions & 0 deletions server/api/src/pb/populate.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import db
from .create_table import create_table
from .data_access import clear_stage, commit_pb
from settings import Picklebase


Expand Down Expand Up @@ -51,5 +52,7 @@ def optimize(batch):


def populate():
clear_stage()
create_map_table()
create_vis_table()
commit_pb()
2 changes: 1 addition & 1 deletion server/api/src/utils/parse_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def parse(value, to_type):
if os.path.isabs(value):
return value
else:
value = os.path.join(os.getcwd(), value)
value = os.path.join(os.path.dirname(__file__), '../..', value)
return os.path.normpath(value)

return value
Expand Down

0 comments on commit 20c5830

Please sign in to comment.