Skip to content

Commit

Permalink
#20 refactoring
Browse files Browse the repository at this point in the history
- writeup refactoring in infrastructure deployment
- add function in circumbot to run the bacalhau point job
- adding types for the different jobs in order to execute the geospatial jobs.
  • Loading branch information
dhruvmalik007 committed Oct 30, 2023
1 parent a8f34ab commit 69c7869
Show file tree
Hide file tree
Showing 10 changed files with 34 additions and 30 deletions.
4 changes: 4 additions & 0 deletions .aws/credentials.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[default]
sso_account_id=
region=
secret_key=""
2 changes: 1 addition & 1 deletion aws_deployment/infrastructure/infrastructure_stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from constructs import Construct

"""
Credits to [aws-cdk examples](https://github.com/aws-samples/aws-cdk-examples) for the reference examples.
Credits to [aws-cdk examples](https://github.com/aws-samples/aws-cdk-examples) for the basic template.
"""
class InfrastructureStack(Stack):
job_scheduler: _lambda.DockerImageFunction
Expand Down
3 changes: 2 additions & 1 deletion bots/Discord/circombot.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,15 @@

import logging
import discord
from discord.ext import commands, tasks
from discord.ext import commands
from discord.ext.commands import Bot, Context
from storage import Database
import aiosqlite
from bots.Discord.loggingFormatter import LoggingFormatter
from bots.consumer.kafkaConsumer import kafka_consume_list_jobs
from bots.producer.kafkaProducer import kafka_producer_job
from discord import app_commands

try:
with open(f"{os.path.realpath(os.path.dirname(__file__))}/config.json") as file:
config = json.load(file)
Expand Down
3 changes: 3 additions & 0 deletions bots/Discord/cogs/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import sys
import os
sys.path.append(os.path.relpath(os.getcwd(), "/"))
2 changes: 0 additions & 2 deletions bots/Dockerfile.discord
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@ ENV PATH=" ${PATH}:/root/.local/bin"

## copying the remaining folders for circumbot deployment.
COPY . .

## also .env separately from the root folder

COPY ../../.env ../../.env

FROM build as run
Expand Down
4 changes: 2 additions & 2 deletions bots/consumer/kafkaConsumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import json
import time
import os
#from bots.utils.pipeline_construction_caller import execute_reconstruction_pipeline
from bots.utils.pipeline_construction_caller import createJobBacalauPoint

from utils.model_helper import InputParametersPoint

Expand Down Expand Up @@ -68,7 +68,7 @@ def kafka_consume_message_jobInput(topic: str = 'bacalhau_compute_job', username
jobParameter.filename_shp = params[3]
jobParameter.ipfs_image = params[4]
jobParameter.username = params[5]
#createJobBacalauPoint(jobParameter)
createJobBacalauPoint(jobParameter)

#return parameters

Expand Down
20 changes: 1 addition & 19 deletions bots/utils/pipeline_construction_caller.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,24 +81,6 @@ def createJobBacalauPoint(parameter: InputParametersPoint) -> any:
return job_json_details
except SystemError as s:
print(s)



def execute_reconstruction_pipeline(
Xcoord="43.2946",
Ycoord="5.3695",
username="test",
ipfs_shp_file="bafkreicxd6u4avrcytevtvehaaimqbsqe5qerohji2nikcbfrh6ccb3lgu",
filename="pipeline_template.json",
algorithm_surface_reconstruction="0", #(poisson)
):
"""
function to call the hosted pipeline construction container on ECS which will in turn run the intermediate series of computations on the bacalhau
"""

pass




def listJobs() -> JobResults:
Expand Down Expand Up @@ -176,7 +158,7 @@ def vectorize_outputs(data:dict):
# algorithm_file = paramsReconstruction.algo



test = True



Expand Down
3 changes: 2 additions & 1 deletion job_scheduler/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@ uvicorn
python-dotenv
httpx
pytest
kafka
kafka
fastapi-scheduler
6 changes: 2 additions & 4 deletions job_scheduler/tests/test_job_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class RequestCityGMLReconstruction(BaseModel):



def create_job_task():
def create_job_task_surface_reconstruction():

parameters : Request = {
"xcoord": '34',
Expand All @@ -36,6 +36,4 @@ def create_job_task():
response = client.get("/jobs/requestJob/surface_reconstruction/ECS", params=parameters)

assert response.status_code == 200

def get_values():
pass

17 changes: 17 additions & 0 deletions job_scheduler/utils/types.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
"""
here we store the various standards of data inputs/outputs that are being processed by the various jobs
"""
from typing import List

class surface_reconstruction_pipeline:
coordinates: List[str]
laz_file: str
username:str
template_file: str
filename_pipeline: str
surface_reconstruction_algorithm: str

class citygml_pipeline:
yaml_file_path: str
object_file_path: str
cityGML_output_path:str

0 comments on commit 69c7869

Please sign in to comment.