Skip to content

Commit

Permalink
Merge pull request #4 from earthdaily/deploy1
Browse files Browse the repository at this point in the history
Update: deployment to AWS
  • Loading branch information
KarineGEO6 authored Jun 5, 2024
2 parents e7afbbe + a911d9d commit 237158a
Show file tree
Hide file tree
Showing 19 changed files with 1,172 additions and 349 deletions.
2 changes: 2 additions & 0 deletions .env.sample
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,5 @@ AWS_BUCKET_NAME=
AZURE_ACCOUNT_NAME=
AZURE_BLOB_CONTAINER_NAME=
AZURE_SAS_CREDENTIAL=

GATEWAY_STAGE=
157 changes: 157 additions & 0 deletions .github/workflows/AWS_deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
name: Deployment AWS

on:
push:
branches:
- deploy1

env:
AWS_REGION: ${{ secrets.AWS_REGION }}
ECR_REPOSITORY: ${{ secrets.ECR_REPOSITORY }}
ECS_SERVICE: ${{ secrets.ECS_SERVICE }}
ECS_CLUSTER: ${{ secrets.ECS_CLUSTER }}
ECS_TASK_DEFINITION: ${{ secrets.ECS_TASK_DEFINITION }}
CONTAINER_NAME: ${{ secrets.CONTAINER_NAME }}
EDS_API_URL: ${{ secrets.EDS_API_URL }}
EDS_AUTH_URL: ${{ secrets.EDS_AUTH_URL }}
LAMBDA_FUNCTION: ${{ secrets.LAMBDA_FUNCTION }}
GATEWAY_STAGE: ${{ secrets.GATEWAY_STAGE }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
DEPLOY_LAMBDA: ${{ secrets.DEPLOY_LAMBDA }}

permissions:
id-token: write # This is required for requesting the JWT
contents: read

jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
environment: production

steps:
- name: Checkout
uses: actions/checkout@v3

- name: Check if secret gateway stage exists and assign to variable
id: gateway-key
run: |
if [[ -n "${{ env.GATEWAY_STAGE }}" ]]; then
echo "::set-output name=key_gateway_stage::${{ env.GATEWAY_STAGE }}"
else
echo "::set-output name=key_gateway_stage::\"\""
fi
shell: bash

- name: configure aws credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::489065051964:role/GitHubActionProcessor-AssumeRoleWithAction #change to reflect your IAM role’s ARN
role-session-name: GitHub_to_AWS_via_gitaction_devOps
aws-region: ${{ env.AWS_REGION }}

- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2

- name: Build, tag, and push image Lambda to Amazon ECR
id: build-image-lambda
if: ${{ env.DEPLOY_LAMBDA == 'true' }}
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
IMAGE_TAG: ${{ github.sha }}
run: |
# Build a docker container and
# push it to ECR so that it can
# be deployed to ECS.
docker build \
--build-arg EDS_API_URL=${{ env.EDS_API_URL }} \
--build-arg EDS_AUTH_URL=${{ env.EDS_AUTH_URL }} \
--build-arg AWS_ACCESS_KEY_ID=${{ env.AWS_ACCESS_KEY_ID }} \
--build-arg AWS_SECRET_ACCESS_KEY=${{ env.AWS_SECRET_ACCESS_KEY }} \
--build-arg INPUT_JSON_PATH="data/processor_input_example.json" \
--build-arg GATEWAY_STAGE=${{ steps.gateway-key.outputs.key_gateway_stage }} \
-t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . -f Dockerfile_lambda
# docker tag $ECR_REPOSITORY:latest $ECR_REGISTRY/$ECR_REPOSITORY:latest
docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
echo "image=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT
- name: Update image to lambda funtion
id: lambda-function
if: ${{ env.DEPLOY_LAMBDA == 'true' }}
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
IMAGE_TAG: ${{ github.sha }}
run: |
aws lambda update-function-code \
--function-name $LAMBDA_FUNCTION \
--image-uri $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
# - name: Update lambda funtion configuration
# id: lambda-function-config
# if: ${{ env.DEPLOY_LAMBDA == 'true' }}
# run: |
# aws lambda update-function-configuration \
# --function-name $LAMBDA_FUNCTION \
# --environment "Variables={EDS_API_URL=${{ env.EDS_API_URL }},EDS_AUTH_URL=${{ env.EDS_AUTH_URL }}}"

- name: Build, tag, and push image Task to Amazon ECR
id: build-image
if: ${{ env.DEPLOY_LAMBDA != 'true' }}
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
IMAGE_TAG: ${{ github.sha }}
run: |
# Build a docker container and
# push it to ECR so that it can
# be deployed to ECS.
docker build \
--build-arg EDS_API_URL=${{ env.EDS_API_URL }} \
--build-arg EDS_AUTH_URL=${{ env.EDS_AUTH_URL }} \
--build-arg AWS_ACCESS_KEY_ID=${{ env.AWS_ACCESS_KEY_ID }} \
--build-arg AWS_SECRET_ACCESS_KEY=${{ env.AWS_SECRET_ACCESS_KEY }} \
--build-arg INPUT_JSON_PATH="data/processor_input_example.json" \
--build-arg GATEWAY_STAGE=${{ steps.gateway-key.outputs.key_gateway_stage }} \
-t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . -f Dockerfile_ECS
# docker tag $ECR_REPOSITORY:latest $ECR_REGISTRY/$ECR_REPOSITORY:latest
docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
echo "image=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT
echo "::set-output name=IMAGE_URI::$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG"
- name: Download task definition
if: ${{ env.DEPLOY_LAMBDA != 'true' }}
run: |
aws ecs describe-task-definition --task-definition ${{ env.ECS_TASK_DEFINITION }} --query taskDefinition > task-definition.json
echo $(cat task-definition.json | jq 'del(
.taskDefinitionArn,
.requiresAttributes,
.compatibilities,
.revision,
.status,
.registeredAt,
.registeredBy
)') > task-definition.json
cat task-definition.json
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
if: ${{ env.DEPLOY_LAMBDA != 'true' }}
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: ${{ env.CONTAINER_NAME }}
image: ${{ steps.build-image.outputs.image }}

- name: updating task-definition file
if: ${{ env.DEPLOY_LAMBDA != 'true' }}
run: cat ${{ steps.task-def.outputs.task-definition }}

- name: Deploy Amazon ECS task definition
if: ${{ env.DEPLOY_LAMBDA != 'true' }}
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: ${{ env.ECS_SERVICE }}
cluster: ${{ env.ECS_CLUSTER }}
wait-for-service-stability: true1
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -89,3 +89,6 @@ target/

# Mypy cache
.mypy_cache/

# credentials AWS for Terraform
#aws_credentials.tfvars
4 changes: 3 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,6 @@ COPY docker-entrypoint.sh /usr/local/bin/
RUN dos2unix /usr/local/bin/docker-entrypoint.sh
RUN chmod +x /usr/local/bin/docker-entrypoint.sh

ENTRYPOINT ["docker-entrypoint.sh"]
RUN chmod 644 api/api.py

ENTRYPOINT ["docker-entrypoint.sh"]
37 changes: 37 additions & 0 deletions Dockerfile_ECS
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
FROM continuumio/miniconda3:23.10.0-1
EXPOSE 80

RUN pip install --upgrade pip==22.0.4
RUN conda clean --all
RUN pip cache purge

WORKDIR /app

COPY ./requirements.txt .

RUN pip install -r requirements.txt
RUN pip cache purge; exit 0
RUN apt-get update && apt-get install -y dos2unix
COPY ./src .

# Set up environment variables
ARG EDS_API_URL
ENV EDS_API_URL=${EDS_API_URL}
ARG EDS_AUTH_URL
ENV EDS_AUTH_URL=${EDS_AUTH_URL}
ARG AWS_ACCESS_KEY_ID
ENV AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
ARG AWS_SECRET_ACCESS_KEY
ENV AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
ARG INPUT_JSON_PATH
ENV INPUT_JSON_PATH=${INPUT_JSON_PATH}
ARG GATEWAY_STAGE
ENV GATEWAY_STAGE=${GATEWAY_STAGE}

COPY docker-entrypoint.sh /usr/local/bin/
RUN dos2unix /usr/local/bin/docker-entrypoint.sh
RUN chmod +x /usr/local/bin/docker-entrypoint.sh

RUN chmod 644 api/api.py

ENTRYPOINT ["docker-entrypoint.sh"]
29 changes: 29 additions & 0 deletions Dockerfile_lambda
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
FROM public.ecr.aws/lambda/python:3.11

RUN pip install --upgrade pip==22.0.4
RUN pip cache purge

# Copy function code
COPY ./src ${LAMBDA_TASK_ROOT}

# Set up environment variables
ARG EDS_API_URL
ENV EDS_API_URL=${EDS_API_URL}
ARG EDS_AUTH_URL
ENV EDS_AUTH_URL=${EDS_AUTH_URL}
ARG AWS_ACCESS_KEY_ID
ENV AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
ARG AWS_SECRET_ACCESS_KEY
ENV AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
ARG INPUT_JSON_PATH
ENV INPUT_JSON_PATH=${INPUT_JSON_PATH}
ARG GATEWAY_STAGE
ENV GATEWAY_STAGE=${GATEWAY_STAGE}

# Install the function's dependencies using file requirements.txt
# from your project folder.
COPY requirements.txt ${LAMBDA_TASK_ROOT}
# Install the specified packages
RUN pip install -r requirements.txt
# Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile)
CMD [ "api.api.handler" ]
7 changes: 1 addition & 6 deletions docker-entrypoint.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,2 @@
#!/bin/bash
set -e
if [ "$RUN_MODE_ENV" = "API" ]; then
exec hypercorn api.api:app -b 0.0.0.0:80 --worker-class trio
else
exec python main.py "$@"
fi
exec hypercorn api.api:app -b 0.0.0.0:80 --worker-class trio
37 changes: 20 additions & 17 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -1,26 +1,29 @@
name: earthdaily-processor
name: reflectance-processor
channels:
- conda-forge
- defaults
- conda-forge
- defaults
dependencies:
- python>3.11
- ipykernel
- python-dotenv>=0.5.1
- geojson
- pyproj
- zarr
- pip
- pip
- pip:
# aws
- boto3
# azure
- azure-storage-blob
- earthdaily==0.0.6
- jupyter
- matplotlib
- numpy
- scipy
- geogif
- fastapi
- pydantic
- byoa==0.1.0a2
# aws
- boto3
# azure
- azure-storage-blob
- earthdaily==0.0.6
- jupyter
- matplotlib
- numpy
- scipy
- geogif
- fastapi
- pydantic
- byoa==0.1.0a2
- mangum
- cloudpathlib
- adlfs
Loading

0 comments on commit 237158a

Please sign in to comment.