Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(opendataset): add dataloader for the CityscapesGTCoarse dataset #1167

Merged
merged 1 commit into from
Dec 21, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/source/reference/api/opendataset.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ tensorbay.opendataset
CCPD
CCPDGreen
CIHP
CityscapesGTCoarse
watsonvv marked this conversation as resolved.
Show resolved Hide resolved
CityscapesGTFine
COCO2017
COVIDChestXRay
Expand Down
6 changes: 3 additions & 3 deletions tensorbay/opendataset/Cityscapes/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
#
# pylint: disable=invalid-name

"""Dataloader of the CityscapesGTFine dataset."""
"""Dataloaders of the CityscapesGTCoarse dataset and the CityscapesGTFine dataset."""

from tensorbay.opendataset.Cityscapes.loader import CityscapesGTFine
from tensorbay.opendataset.Cityscapes.loader import CityscapesGTCoarse, CityscapesGTFine

__all__ = ["CityscapesGTFine"]
__all__ = ["CityscapesGTCoarse", "CityscapesGTFine"]
132 changes: 103 additions & 29 deletions tensorbay/opendataset/Cityscapes/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
#
# pylint: disable=invalid-name

"""Dataloader of the CityscapesGTFine dataset."""
"""Dataloaders of the CityscapesGTCoarse dataset and the CityscapesGTFine dataset."""

import json
import os
Expand All @@ -14,8 +14,77 @@
from tensorbay.dataset import Data, Dataset
from tensorbay.label import InstanceMask, LabeledPolygon, SemanticMask

DATASET_NAME = "CityscapesGTFine"
_SEGMENT_NAMES = {"test", "train", "val"}
DATASET_NAME_GTCOARSE = "CityscapesGTCoarse"
DATASET_NAME_GTFINE = "CityscapesGTFine"
watsonvv marked this conversation as resolved.
Show resolved Hide resolved

_SEGMENT_NAMES_GTCOARSE = ("train", "train_extra", "val")
_SEGMENT_NAMES_GTFINE = ("train", "test", "val")


def CityscapesGTCoarse(path: str) -> Dataset:
"""`CityscapesGTCoarse <https://www.cityscapes-dataset.com/>`_ dataset.

The file structure should be like::

<path>
leftImg8bit/
train/
aachen/
aachen_000000_000019_leftImg8bit.png
...
...
train_extra/
augsburg/
augsburg_000000_000019_leftImg8bit.png
...
...
val/
frankfurt/
frankfurt_000000_000019_leftImg8bit.png
...
...
...
gtCoarse/
train/
aachen/
aachen_000000_000019_gtCoarse_instanceIds.png
aachen_000000_000019_gtCoarse_labelIds.png
aachen_000000_000019_gtCoarse_polygons.json
...
...
train_extra/
augsburg/
augsburg_000000_000019_gtCoarse_instanceIds.png
augsburg_000000_000019_gtCoarse_labelIds.png
augsburg_000000_000019_gtCoarse_polygons.json
...
...
val/
frankfurt/
frankfurt_000000_000019_gtCoarse_instanceIds.png
frankfurt_000000_000019_gtCoarse_labelIds.png
frankfurt_000000_000019_gtCoarse_polygons.json
...
...
...

Arguments:
path: The root directory of the dataset.

Returns:
Loaded :class:`~tensorbay.dataset.dataset.Dataset` instance.

"""
root_path = os.path.join(os.path.abspath(os.path.expanduser(path)))

dataset = Dataset(DATASET_NAME_GTCOARSE)
dataset.load_catalog(os.path.join(os.path.dirname(__file__), "catalog.json"))

for segment_name in _SEGMENT_NAMES_GTCOARSE:
segment = dataset.create_segment(segment_name)
for image_path in glob(os.path.join(root_path, "leftImg8bit", segment_name, "*", "*.png")):
segment.append(_get_data(image_path, root_path, segment_name, "gtCoarse"))
return dataset


def CityscapesGTFine(path: str) -> Dataset:
Expand Down Expand Up @@ -74,34 +143,39 @@ def CityscapesGTFine(path: str) -> Dataset:
"""
root_path = os.path.join(os.path.abspath(os.path.expanduser(path)))

dataset = Dataset(DATASET_NAME)
dataset = Dataset(DATASET_NAME_GTFINE)
dataset.load_catalog(os.path.join(os.path.dirname(__file__), "catalog.json"))

for segment_name in _SEGMENT_NAMES:
for segment_name in _SEGMENT_NAMES_GTFINE:
segment = dataset.create_segment(segment_name)
for image_path in glob(os.path.join(root_path, "leftImg8bit", segment_name, "*", "*.png")):
city = os.path.basename(image_path).split("_", 1)[0]
image_prefix = os.path.basename(image_path).rsplit("_", 1)[0]
label_dir = os.path.join(root_path, "gtFine", segment_name, city)
data = Data(image_path)
# get semantic mask and instance mask
label = data.label
label.semantic_mask = SemanticMask(
os.path.join(label_dir, f"{image_prefix}_gtFine_labelIds.png")
)
label.instance_mask = InstanceMask(
os.path.join(label_dir, f"{image_prefix}_gtFine_instanceIds.png")
)
# get polygons
polygons: List[LabeledPolygon] = []
with open(
os.path.join(label_dir, f"{image_prefix}_gtFine_polygons.json"),
encoding="utf-8",
) as fp:
objects = json.load(fp)["objects"]
for obj in objects:
polygons.append(LabeledPolygon(obj["polygon"], category=obj["label"]))
label.polygon = polygons

segment.append(data)
segment.append(_get_data(image_path, root_path, segment_name, "gtFine"))
return dataset


def _get_data(image_path: str, root_path: str, segment_name: str, folder_name: str) -> Data:
filename = os.path.basename(image_path)
city = filename.split("_", 1)[0]
image_prefix = filename.rsplit("_", 1)[0]
label_dir = os.path.join(root_path, folder_name, segment_name, city)
data = Data(image_path)
# get semantic mask and instance mask
label = data.label
label.semantic_mask = SemanticMask(
os.path.join(label_dir, f"{image_prefix}_{folder_name}_labelIds.png")
)
label.instance_mask = InstanceMask(
os.path.join(label_dir, f"{image_prefix}_{folder_name}_instanceIds.png")
)
# get polygons
polygons: List[LabeledPolygon] = []
with open(
os.path.join(label_dir, f"{image_prefix}_{folder_name}_polygons.json"),
encoding="utf-8",
) as fp:
objects = json.load(fp)["objects"]
for obj in objects:
polygons.append(LabeledPolygon(obj["polygon"], category=obj["label"]))
label.polygon = polygons

return data
3 changes: 2 additions & 1 deletion tensorbay/opendataset/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from tensorbay.opendataset.CarConnection import CarConnection
from tensorbay.opendataset.CCPD import CCPD, CCPDGreen
from tensorbay.opendataset.CIHP import CIHP
from tensorbay.opendataset.Cityscapes import CityscapesGTFine
from tensorbay.opendataset.Cityscapes import CityscapesGTCoarse, CityscapesGTFine
from tensorbay.opendataset.COCO2017 import COCO2017
from tensorbay.opendataset.CoinImage import CoinImage
from tensorbay.opendataset.CompCars import CompCars
Expand Down Expand Up @@ -81,6 +81,7 @@
"CCPD",
"CCPDGreen",
"CIHP",
"CityscapesGTCoarse",
"CityscapesGTFine",
"COCO2017",
"CoinImage",
Expand Down