Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update to titiler-pgstac 0.8.0 #238

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ repos:
language_version: python

- repo: https://github.com/PyCQA/flake8
rev: 3.8.3
rev: 6.1.0
hooks:
- id: flake8
language_version: python
Expand Down
6 changes: 3 additions & 3 deletions raster_api/runtime/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
FROM public.ecr.aws/sam/build-python3.9:latest
FROM public.ecr.aws/sam/build-python3.11:latest

WORKDIR /tmp

COPY raster_api/runtime /tmp/raster
RUN pip install "mangum>=0.14,<0.15" /tmp/raster["psycopg-binary"] -t /asset --no-binary pydantic
RUN pip install "mangum>=0.14,<0.15" /tmp/raster["psycopg-binary"] -t /asset --no-binary pydantic
RUN rm -rf /tmp/raster

# # Reduce package size and remove useless files
Expand All @@ -15,4 +15,4 @@ RUN rm -rdf /asset/numpy/doc/ /asset/boto3* /asset/botocore* /asset/bin /asset/g

COPY raster_api/runtime/handler.py /asset/handler.py

CMD ["echo", "hello world"]
CMD ["echo", "hello world"]
8 changes: 3 additions & 5 deletions raster_api/runtime/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,10 @@
long_description = f.read()

inst_reqs = [
"titiler.pgstac==0.2.3",
"titiler.application>=0.10,<0.11",
"importlib_resources>=1.1.0;python_version<='3.9'", # https://github.com/cogeotiff/rio-tiler/pull/379
"titiler.pgstac==0.8.0",
"titiler.extensions[cogeo]>=0.15.0,<0.16",
"aws_xray_sdk>=2.6.0,<3",
"aws-lambda-powertools>=1.18.0",
"pydantic<2",
]

extra_reqs = {
Expand All @@ -26,7 +24,7 @@
setup(
name="veda.raster_api",
description="",
python_requires=">=3.7",
python_requires=">=3.8",
packages=find_namespace_packages(exclude=["tests*"]),
package_data={"src": ["templates/*.html"]},
include_package_data=True,
Expand Down
45 changes: 45 additions & 0 deletions raster_api/runtime/src/algorithms.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
"""veda custom algorithms"""

import math

import numpy
from rio_tiler.models import ImageData

from titiler.core.algorithm import Algorithms
from titiler.core.algorithm.base import BaseAlgorithm

# https://github.com/cogeotiff/rio-tiler/blob/master/rio_tiler/reader.py#L35-L37

# From eoAPI datasetparams edl_auth branch https://github.com/NASA-IMPACT/eoAPI/blob/edl_auth/src/eoapi/raster/eoapi/raster/datasetparams.py


class SWIR(BaseAlgorithm):
"""SWIR Custom Algorithm."""

low_value: float = math.e
high_value: float = 255
low_threshold: float = math.log(1000)
high_threshold: float = math.log(7500)

def __call__(self, img: ImageData) -> ImageData:
"""Apply processing."""
data = numpy.log(img.array)
data[numpy.where(data <= self.low_threshold)] = self.low_value
data[numpy.where(data >= self.high_threshold)] = self.high_value
indices = numpy.where((data > self.low_value) & (data < self.high_value))
data[indices] = (
self.high_value
* (data[indices] - self.low_threshold)
/ (self.high_threshold - self.low_threshold)
)
img.array = data.astype("uint8")
return img


algorithms = Algorithms(
{
"swir": SWIR,
}
)

PostProcessParams = algorithms.dependency
108 changes: 44 additions & 64 deletions raster_api/runtime/src/app.py
Original file line number Diff line number Diff line change
@@ -1,124 +1,114 @@
"""TiTiler+PgSTAC FastAPI application."""
import logging
from contextlib import asynccontextmanager

from aws_lambda_powertools.metrics import MetricUnit
from rio_cogeo.cogeo import cog_info as rio_cogeo_info
from rio_cogeo.models import Info
from src.algorithms import PostProcessParams
from src.config import ApiSettings
from src.datasetparams import DatasetParams
from src.factory import MultiBaseTilerFactory
from src.dependencies import ItemPathParams
from src.extensions import stacViewerExtension
from src.monitoring import LoggerRouteHandler, logger, metrics, tracer
from src.version import __version__ as veda_raster_version

from fastapi import APIRouter, Depends, FastAPI, Query
from fastapi import APIRouter, FastAPI
from starlette.middleware.cors import CORSMiddleware
from starlette.requests import Request
from starlette.responses import HTMLResponse
from starlette.templating import Jinja2Templates
from starlette_cramjam.middleware import CompressionMiddleware
from titiler.core.dependencies import DatasetPathParams
from titiler.core.errors import DEFAULT_STATUS_CODES, add_exception_handlers
from titiler.core.factory import TilerFactory, TMSFactory
from titiler.core.factory import MultiBaseTilerFactory, TilerFactory, TMSFactory
from titiler.core.middleware import CacheControlMiddleware
from titiler.core.resources.enums import OptionalHeader
from titiler.core.resources.responses import JSONResponse
from titiler.extensions import cogValidateExtension, cogViewerExtension
from titiler.mosaic.errors import MOSAIC_STATUS_CODES
from titiler.pgstac.db import close_db_connection, connect_to_db
from titiler.pgstac.dependencies import ItemPathParams
from titiler.pgstac.factory import MosaicTilerFactory
from titiler.pgstac.reader import PgSTACReader

try:
from importlib.resources import files as resources_files # type: ignore
except ImportError:
# Try backported to PY<39 `importlib_resources`.
from importlib_resources import files as resources_files # type: ignore

from .monitoring import LoggerRouteHandler, logger, metrics, tracer

logging.getLogger("botocore.credentials").disabled = True
logging.getLogger("botocore.utils").disabled = True
logging.getLogger("rio-tiler").setLevel(logging.ERROR)

settings = ApiSettings()
templates = Jinja2Templates(directory=str(resources_files(__package__) / "templates")) # type: ignore


if settings.debug:
optional_headers = [OptionalHeader.server_timing, OptionalHeader.x_assets]
else:
optional_headers = []


@asynccontextmanager
async def lifespan(app: FastAPI):
"""FastAPI Lifespan."""
# Create Connection Pool
await connect_to_db(app, settings=settings.load_postgres_settings())
yield
# Close the Connection Pool
await close_db_connection(app)


path_prefix = settings.path_prefix
app = FastAPI(
title=settings.name,
version=veda_raster_version,
openapi_url=f"{path_prefix}/openapi.json",
docs_url=f"{path_prefix}/docs",
lifespan=lifespan,
)

# router to be applied to all titiler route factories (improves logs with FastAPI context)
router = APIRouter(route_class=LoggerRouteHandler)
add_exception_handlers(app, DEFAULT_STATUS_CODES)
add_exception_handlers(app, MOSAIC_STATUS_CODES)


# Custom PgSTAC mosaic tiler
###############################################################################
# /mosaic - PgSTAC Mosaic titiler endpoint
###############################################################################
mosaic = MosaicTilerFactory(
router_prefix=f"{path_prefix}/mosaic",
add_mosaic_list=settings.enable_mosaic_search,
optional_headers=optional_headers,
environment_dependency=settings.get_gdal_config,
dataset_dependency=DatasetParams,
post_process=PostProcessParams,
router=APIRouter(route_class=LoggerRouteHandler),
)
app.include_router(mosaic.router, prefix=f"{path_prefix}/mosaic", tags=["Mosaic"])
# TODO
# prefix will be replaced by `/mosaics/{search_id}` in titiler-pgstac 0.9.0

# Custom STAC titiler endpoint (not added to the openapi docs)
###############################################################################
# /stac - Custom STAC titiler endpoint
###############################################################################
stac = MultiBaseTilerFactory(
reader=PgSTACReader,
path_dependency=ItemPathParams,
optional_headers=optional_headers,
router_prefix=f"{path_prefix}/stac",
environment_dependency=settings.get_gdal_config,
router=APIRouter(route_class=LoggerRouteHandler),
extensions=[
stacViewerExtension(),
],
)
app.include_router(stac.router, tags=["Items"], prefix=f"{path_prefix}/stac")
# TODO
# in titiler-pgstac we replaced the prefix to `/collections/{collection_id}/items/{item_id}`

###############################################################################
# /cog - External Cloud Optimized GeoTIFF endpoints
###############################################################################
cog = TilerFactory(
router_prefix=f"{path_prefix}/cog",
optional_headers=optional_headers,
environment_dependency=settings.get_gdal_config,
router=APIRouter(route_class=LoggerRouteHandler),
extensions=[
cogValidateExtension(),
cogViewerExtension(),
],
)


@cog.router.get(
"/validate",
response_model=Info,
response_class=JSONResponse,
)
def cog_validate(
src_path: str = Depends(DatasetPathParams),
strict: bool = Query(False, description="Treat warnings as errors"),
):
"""Validate a COG"""
return rio_cogeo_info(src_path, strict=strict, config=settings.get_gdal_config())


@cog.router.get("/viewer", response_class=HTMLResponse)
def cog_demo(request: Request):
"""COG Viewer."""
return templates.TemplateResponse(
name="viewer.html",
context={
"request": request,
"tilejson_endpoint": cog.url_for(request, "tilejson"),
"info_endpoint": cog.url_for(request, "info"),
"statistics_endpoint": cog.url_for(request, "statistics"),
},
media_type="text/html",
)


app.include_router(
cog.router, tags=["Cloud Optimized GeoTIFF"], prefix=f"{path_prefix}/cog"
)
Expand Down Expand Up @@ -174,8 +164,10 @@ async def add_correlation_id(request: Request, call_next):
except KeyError:
# If empty, use uuid
corr_id = "local"

# Add correlation id to logs
logger.set_correlation_id(corr_id)

# Add correlation id to traces
tracer.put_annotation(key="correlation_id", value=corr_id)

Expand All @@ -192,15 +184,3 @@ async def validation_exception_handler(request, err):
metrics.add_metric(name="UnhandledExceptions", unit=MetricUnit.Count, value=1)
logger.exception("Unhandled exception")
return JSONResponse(status_code=500, content={"detail": "Internal Server Error"})


@app.on_event("startup")
async def startup_event() -> None:
"""Connect to database on startup."""
await connect_to_db(app, settings=settings.load_postgres_settings())


@app.on_event("shutdown")
async def shutdown_event() -> None:
"""Close database connection."""
await close_db_connection(app)
43 changes: 24 additions & 19 deletions raster_api/runtime/src/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@
from typing import Optional

import boto3
import pydantic
from pydantic import BaseSettings, Field
from pydantic import Field, field_validator
from pydantic_settings import BaseSettings
from rasterio.session import AWSSession
from typing_extensions import Annotated

from titiler.pgstac.settings import PostgresSettings

Expand Down Expand Up @@ -58,9 +59,15 @@ class ApiSettings(BaseSettings):
# MosaicTiler settings
enable_mosaic_search: bool = False

pgstac_secret_arn: Optional[str]
pgstac_secret_arn: Optional[str] = None

@pydantic.validator("cors_origins")
model_config = {
"env_file": ".env",
"extra": "ignore",
"env_prefix": "VEDA_RASTER_",
}

@field_validator("cors_origins")
def parse_cors_origin(cls, v):
"""Parse CORS origins."""
return [origin.strip() for origin in v.split(",")]
Expand All @@ -74,21 +81,25 @@ def load_postgres_settings(self) -> "PostgresSettings":
postgres_user=secret["username"],
postgres_pass=secret["password"],
postgres_host=secret["host"],
postgres_port=str(secret["port"]),
postgres_port=int(secret["port"]),
postgres_dbname=secret["dbname"],
)
else:
return PostgresSettings()

data_access_role_arn: Optional[str] = Field(
None,
description="Resource name of role permitting access to specified external S3 buckets",
)
data_access_role_arn: Annotated[
Optional[str],
Field(
description="Resource name of role permitting access to specified external S3 buckets"
),
] = None

export_assume_role_creds_as_envs: Optional[bool] = Field(
False,
description="enables 'get_gdal_config' flow to export AWS credentials as os env vars",
)
export_assume_role_creds_as_envs: Annotated[
bool,
Field(
description="enables 'get_gdal_config' flow to export AWS credentials as os env vars",
),
] = False

def get_gdal_config(self):
"""return default aws session config or assume role data_access_role_arn credentials session"""
Expand Down Expand Up @@ -131,9 +142,3 @@ def get_gdal_config(self):
else:
# Use the default role of this lambda
return {}

class Config:
"""model config"""

env_file = ".env"
env_prefix = "VEDA_RASTER_"
Loading
Loading