Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/non osm #170

Closed
wants to merge 14 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,4 @@ trainings/*
backend/.env
backend/config.txt
backend/postgres-data

1 change: 1 addition & 0 deletions backend/Dockerfile_CPU
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,4 @@ RUN pip install /tmp/solaris --use-feature=in-tree-build && \
# Set working directory and copy the application code
WORKDIR /app
COPY . /app

12 changes: 6 additions & 6 deletions backend/core/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,8 @@ def validate(self, data):
Check supplied data
"""
for i in data["zoom_level"]:
if int(i) < 19 or int(i) > 21:
raise serializers.ValidationError("Zoom level Supported between 19-21")
if int(i) < 18 or int(i) > 23:
raise serializers.ValidationError("Zoom level Supported between 18-23")
return data


Expand Down Expand Up @@ -208,9 +208,9 @@ def validate(self, data):

if "zoom_level" in data:
for zoom in data["zoom_level"]:
if zoom < 19 or zoom > 21:
if zoom < 18 or zoom > 23:
raise serializers.ValidationError(
"Zoom level must be between 19 and 21"
"Zoom level must be between 18 and 23"
)

return data
Expand Down Expand Up @@ -274,9 +274,9 @@ def validate(self, data):
)
if len(data["bbox"]) != 4:
raise serializers.ValidationError("Not a valid bbox")
if data["zoom_level"] < 18 or data["zoom_level"] > 22:
if data["zoom_level"] < 18 or data["zoom_level"] > 23:
raise serializers.ValidationError(
f"""Invalid Zoom level : {data["zoom_level"]}, Supported between 18-22"""
f"""Invalid Zoom level : {data["zoom_level"]}, Supported between 18-23"""
)

if "max_angle_change" in data:
Expand Down
28 changes: 10 additions & 18 deletions backend/core/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,7 @@
FeedbackLabelFileSerializer,
LabelFileSerializer,
)
from core.utils import (
bbox,
download_imagery,
get_start_end_download_coords,
is_dir_empty,
)
from core.utils import bbox, is_dir_empty
from django.conf import settings
from django.contrib.gis.db.models.aggregates import Extent
from django.contrib.gis.geos import GEOSGeometry
Expand All @@ -30,6 +25,8 @@
from hot_fair_utilities import preprocess, train
from hot_fair_utilities.training import run_feedback

from .utils import download

logger = logging.getLogger(__name__)

# from core.serializers import LabelFileSerializer
Expand Down Expand Up @@ -95,23 +92,18 @@ def train_model(
bbox_coords = bbox(obj.geom.coords[0])
for z in zoom_level:
zm_level = z
print(
logger.info(
f"""Running Download process for
aoi : {obj.id} - dataset : {dataset_id} , zoom : {zm_level}"""
aoi : {obj.id} - dataset : {dataset_id} , zoom : {zm_level} using {source_imagery}"""
)
try:
tile_size = DEFAULT_TILE_SIZE # by default

start, end = get_start_end_download_coords(
bbox_coords, zm_level, tile_size
)
# start downloading
download_imagery(
start,
end,
download(
bbox_coords,
zm_level,
base_path=training_input_image_source,
source=source_imagery,
source_imagery,
tile_size,
training_input_image_source,
)

except Exception as ex:
Expand Down
36 changes: 24 additions & 12 deletions backend/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,26 +115,17 @@ def get_start_end_download_coords(bbox_coords, zm_level, tile_size):
import logging


def is_dir_empty(directory_path):
return not any(os.scandir(directory_path))


def download_image(url, base_path, source_name):
response = requests.get(url)

image = response.content

pattern = r"/(\d+)/(\d+)/(\d+)(?:\.\w+)?"
match = re.search(pattern, url)
# filename = z-x-y
filename = f"{base_path}/{source_name}-{match.group(2)}-{match.group(3)}-{match.group(1)}.png"


with open(filename, "wb") as f:
f.write(image)

# print(f"Downloaded: {url}")


def download_imagery(start: list, end: list, zm_level, base_path, source="maxar"):
"""Downloads imagery from start to end tile coordinate system
Expand All @@ -144,7 +135,6 @@ def download_imagery(start: list, end: list, zm_level, base_path, source="maxar"
end (list): [tile_x,tile_y],
source (string): it should be eithre url string or maxar value
zm_level : Zoom level

"""

begin_x = start[0] # this will be the beginning of the download loop for x
Expand All @@ -170,7 +160,6 @@ def download_imagery(start: list, end: list, zm_level, base_path, source="maxar"
source_name = source
download_url = f"https://services.digitalglobe.com/earthservice/tmsaccess/tms/1.0.0/DigitalGlobe:ImageryTileService@EPSG:3857@jpg/{zm_level}/{download_path[0]}/{download_path[1]}.jpg?connectId={connect_id}&flipy=true"

# add multiple logic on supported sources here
else:
# source should be url as string , like this : https://tiles.openaerialmap.org/62dbd947d8499800053796ec/0/62dbd947d8499800053796ed/{z}/{x}/{y}
if "{-y}" in source:
Expand Down Expand Up @@ -202,14 +191,37 @@ def download_imagery(start: list, end: list, zm_level, base_path, source="maxar"
executor.submit(download_image, url, base_path, source_name)
for url in download_urls
]

for future in concurrent.futures.as_completed(futures):
try:
future.result()
except Exception as e:
print(f"An exception occurred in a thread: {e}")
print(f"Error occurred: {e}")
raise e


def download(
bbox,
zoom_level,
tms_url,
tile_size=256,
download_path=None,
):
start, end = get_start_end_download_coords(bbox, zoom_level, tile_size)
download_imagery(
start,
end,
zoom_level,
base_path=download_path,
source=tms_url,
)
return download_path


def is_dir_empty(directory_path):
return not any(os.scandir(directory_path))


def request_rawdata(request_params):
"""will make call to galaxy API & provides response as json

Expand Down
23 changes: 10 additions & 13 deletions backend/core/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from login.permissions import IsOsmAuthenticated
from orthogonalizer import othogonalize_poly
from osmconflator import conflate_geojson
from .utils import download
from rest_framework import decorators, serializers, status, viewsets
from rest_framework.decorators import api_view
from rest_framework.exceptions import ValidationError
Expand Down Expand Up @@ -63,10 +64,7 @@
)
from .tasks import train_model
from .utils import (
bbox,
download_imagery,
get_dir_size,
get_start_end_download_coords,
gpx_generator,
is_dir_empty,
process_rawdata,
Expand Down Expand Up @@ -539,22 +537,21 @@ def post(self, request, *args, **kwargs):
else source_img_in_dataset
)
zoom_level = deserialized_data["zoom_level"]
start, end = get_start_end_download_coords(
bbox, zoom_level, DEFAULT_TILE_SIZE
)

temp_path = f"temp/{uuid.uuid4()}/"
os.mkdir(temp_path)
os.makedirs(temp_path,exist_ok=True)
try:
download_imagery(
start,
end,
download_image_path = download(
bbox,
zoom_level,
base_path=temp_path,
source=source,
tms_url=source,
tile_size=DEFAULT_TILE_SIZE,
download_path=temp_path,
)

prediction_output = f"{temp_path}/prediction/output"
print("Image Downloaded , Starting Inference")
if is_dir_empty(temp_path):
if is_dir_empty(download_image_path):
return Response("No Images found", status=500)
start_time = time.time()
model_path = os.path.join(
Expand Down
2 changes: 2 additions & 0 deletions docker-compose-cpu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ services:
depends_on:
- redis
- postgres
network_mode: "host"

backend-worker:
build:
Expand All @@ -52,6 +53,7 @@ services:
- backend-api
- redis
- postgres
network_mode: "host"

worker-dashboard:
image: mher/flower
Expand Down
2 changes: 2 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ services:
depends_on:
- redis
- postgres
network_mode: "host"

backend-worker:
build:
Expand All @@ -57,6 +58,7 @@ services:
- backend-api
- redis
- postgres
network_mode: "host"

worker-dashboard:
image: mher/flower
Expand Down
10 changes: 9 additions & 1 deletion docs/Docker-installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ Docker Compose is created with redis , worker , postgis database , api and fron

Frontend will be available on 5000 port , Backend will be on 8000 , Flower will be on 5500


10. Want to run your local tiles ?

You can use [titler](https://github.com/developmentseed/titiler) , [gdals2tiles](https://gdal.org/programs/gdal2tiles.html) or nginx to run your own TMS server and add following to docker compose in order to access your localhost through docker containers . Add those to API and Worker . Make sure you update the .env variable accordingly
Expand Down Expand Up @@ -183,4 +184,11 @@ Docker Compose is created with redis , worker , postgis database , api and fron
DATABASE_URL=postgis://postgres:admin@localhost:5434/ai
CELERY_BROKER_URL="redis://localhost:6379/0"
CELERY_RESULT_BACKEND="redis://localhost:6379/0"
```
```
### fAIr setup for CPU :

This is still in test , Currently CPU version can be swamp by

```
docker compose -f docker-compose-cpu.yml up
```
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ const AIModelEditor = (props) => {
const [description, setDescription] = useState("");
const [feedbackPopupOpen, setFeedbackPopupOpen] = React.useState(false);
const { accessToken } = useContext(AuthContext);
const zoomLevels = [19, 20, 21];
const zoomLevels = [18, 19, 20, 21, 22];
const getModelById = async () => {
try {
const modelId = +id;
Expand Down
Loading
Loading