Skip to content

Commit

Permalink
fixed data loaders + ingest timeout for multipolygons
Browse files Browse the repository at this point in the history
  • Loading branch information
czaloom committed Aug 13, 2024
1 parent 87933cf commit 49c989a
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 7 deletions.
2 changes: 2 additions & 0 deletions integration_tests/benchmarks/data/load_coco.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,7 @@ def create_bounding_boxes(
"is_instance"
] # type: ignore - dict typing
is True
and bitmask_to_bbox(mask_ids == segmentation["id"]) is not None
]


Expand Down Expand Up @@ -272,6 +273,7 @@ def create_bounding_polygons(
"is_instance"
] # type: ignore - dict typing
is True
and bitmask_to_polygon(mask_ids == segmentation["id"]) is not None
]


Expand Down
16 changes: 13 additions & 3 deletions integration_tests/benchmarks/data/run_yolo.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,9 +291,14 @@ def parse_bitmask_into_multipolygon_raster_detection(
prediction = parse_bitmask_detection(
result=result, datum=datum, label_key=label_key, resample=resample
)
annotations = []
for annotation in prediction.annotations:
array = annotation.raster.array
annotation.raster = bitmask_to_multipolygon_raster(array)
multipolygon = bitmask_to_multipolygon_raster(array)
if multipolygon is not None:
annotation.raster = multipolygon
annotations.append(annotation)
prediction.annotations = annotations
return prediction


Expand All @@ -306,10 +311,15 @@ def parse_bitmask_into_bounding_polygon_detection(
prediction = parse_bitmask_detection(
result=result, datum=datum, label_key=label_key, resample=resample
)
annotations = []
for annotation in prediction.annotations:
array = annotation.raster.array
annotation.polygon = bitmask_to_polygon(array)
annotation.raster = None
polygon = bitmask_to_polygon(array)
if polygon is not None:
annotation.polygon = polygon
annotation.raster = None
annotations.append(annotation)
prediction.annotations = annotations
return prediction


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,10 @@ def ingest_groundtruths(
elif len(chunks) < chunk_size:
continue

dataset.add_groundtruths(chunks)
dataset.add_groundtruths(chunks, timeout=30)
chunks = []
if chunks:
dataset.add_groundtruths(chunks)
dataset.add_groundtruths(chunks, timeout=30)


def ingest_predictions(
Expand Down Expand Up @@ -122,10 +122,10 @@ def ingest_predictions(
elif len(chunks) < chunk_size:
continue

model.add_predictions(dataset, chunks)
model.add_predictions(dataset, chunks, timeout=30)
chunks = []
if chunks:
model.add_predictions(dataset, chunks)
model.add_predictions(dataset, chunks, timeout=30)


def run_base_evaluation(dset: Dataset, model: Model):
Expand Down Expand Up @@ -292,6 +292,8 @@ def run_benchmarking_analysis(
for gt_type, gt_filename in groundtruths.items():
for pd_type, pd_filename in predictions.items():

print(gt_type, pd_type)

try:
dataset = Dataset.create(name="coco")
model = Model.create(name="yolo")
Expand Down

0 comments on commit 49c989a

Please sign in to comment.