Commit
·
b998535
1
Parent(s):
b721f0a
Add loading script
Browse files- oe_dataset.py +147 -0
oe_dataset.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# This file is part of the oe_dataset distribution (https://huggingface.co/datasets/ABC-iRobotics/oe_dataset).
|
| 3 |
+
# Copyright (c) 2023 ABC-iRobotics.
|
| 4 |
+
#
|
| 5 |
+
# This program is free software: you can redistribute it and/or modify
|
| 6 |
+
# it under the terms of the GNU General Public License as published by
|
| 7 |
+
# the Free Software Foundation, version 3.
|
| 8 |
+
#
|
| 9 |
+
# This program is distributed in the hope that it will be useful, but
|
| 10 |
+
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 11 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
| 12 |
+
# General Public License for more details.
|
| 13 |
+
#
|
| 14 |
+
# You should have received a copy of the GNU General Public License
|
| 15 |
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
| 16 |
+
#
|
| 17 |
+
"""OE dataset"""
|
| 18 |
+
|
| 19 |
+
from collections.abc import Sequence
|
| 20 |
+
from typing import Optional, Generator, Tuple, IO
|
| 21 |
+
|
| 22 |
+
import datasets
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# ---- Constants ----
|
| 26 |
+
|
| 27 |
+
_CITATION = """\
|
| 28 |
+
@ARTICLE{10145828,
|
| 29 |
+
author={Károly, Artúr István and Tirczka, Sebestyén and Gao, Huijun and Rudas, Imre J. and Galambos, Péter},
|
| 30 |
+
journal={IEEE Transactions on Cybernetics},
|
| 31 |
+
title={Increasing the Robustness of Deep Learning Models for Object Segmentation: A Framework for Blending Automatically Annotated Real and Synthetic Data},
|
| 32 |
+
year={2023},
|
| 33 |
+
volume={},
|
| 34 |
+
number={},
|
| 35 |
+
pages={1-14},
|
| 36 |
+
doi={10.1109/TCYB.2023.3276485}}
|
| 37 |
+
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
_DESCRIPTION = """\
|
| 41 |
+
An instance segmentation dataset for robotic manipulation in a tabletop environment.
|
| 42 |
+
The dataset incorporates real and synthetic images for testing sim-to-real model transfer after fine-tuning.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
_HOMEPAGE = "https://huggingface.co/ABC-iRobotics/oe_dataset"
|
| 46 |
+
|
| 47 |
+
_LICENSE = "GNU General Public License v3.0"
|
| 48 |
+
|
| 49 |
+
_LATEST_VERSIONS = {
|
| 50 |
+
"all": "1.0.0",
|
| 51 |
+
"real": "1.0.0",
|
| 52 |
+
"synthetic": "1.0.0",
|
| 53 |
+
"photoreal": "1.0.0",
|
| 54 |
+
"random": "1.0.0",
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
# ---- OE dataset Configs ----
|
| 60 |
+
|
| 61 |
+
class OEDatasetConfig(datasets.BuilderConfig):
|
| 62 |
+
"""BuilderConfig for OE dataset."""
|
| 63 |
+
|
| 64 |
+
def __init__(self, name: str, imgs_urls: Sequence[str], masks_urls: Sequence[str], version: Optional[str] = None, **kwargs):
|
| 65 |
+
_version = _LATEST_VERSIONS[name] if version is None else version
|
| 66 |
+
_name = f"{name}_v{_version}"
|
| 67 |
+
super(OEDatasetConfig, self).__init__(version=datasets.Version(_version), name=_name, **kwargs)
|
| 68 |
+
self._imgs_urls = {"train": [url + "/train.tar.gz" for url in imgs_urls], "val": [url + "/val.tar.gz" for url in imgs_urls]}
|
| 69 |
+
self._masks_urls = {"train": [url + "/train.tar.gz" for url in masks_urls], "val": [url + "/val.tar.gz" for url in masks_urls]}
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def features(self):
|
| 73 |
+
return datasets.Features(
|
| 74 |
+
{
|
| 75 |
+
"image": datasets.Image(),
|
| 76 |
+
"mask": datasets.Image(),
|
| 77 |
+
}
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
@property
|
| 81 |
+
def supervised_keys(self):
|
| 82 |
+
return ("image", "mask")
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
# ---- OE dataset Loader ----
|
| 87 |
+
|
| 88 |
+
class OEDataset(datasets.GeneratorBasedBuilder):
|
| 89 |
+
"""OE dataset."""
|
| 90 |
+
|
| 91 |
+
BUILDER_CONFIG_CLASS = OEDatasetConfig
|
| 92 |
+
BUILDER_CONFIGS = [
|
| 93 |
+
OEDatasetConfig(
|
| 94 |
+
name = "photoreal",
|
| 95 |
+
description = "Photorealistic synthetic images",
|
| 96 |
+
imgs_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/synthetic/photoreal/imgs"],
|
| 97 |
+
masks_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/synthetic/photoreal/masks"]
|
| 98 |
+
),
|
| 99 |
+
]
|
| 100 |
+
|
| 101 |
+
def _info(self):
|
| 102 |
+
return datasets.DatasetInfo(
|
| 103 |
+
description=_DESCRIPTION,
|
| 104 |
+
features=self.config.features,
|
| 105 |
+
supervised_keys=self.config.supervised_keys,
|
| 106 |
+
homepage=_HOMEPAGE,
|
| 107 |
+
license=_LICENSE,
|
| 108 |
+
citation=_CITATION,
|
| 109 |
+
version=self.config.version,
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
def _split_generators(self, dl_manager):
|
| 113 |
+
train_imgs_paths = dl_manager.download(self.config._imgs_urls["train"])
|
| 114 |
+
val_imgs_paths = dl_manager.download(self.config._imgs_urls["val"])
|
| 115 |
+
|
| 116 |
+
train_masks_paths = dl_manager.download(self.config._masks_urls["train"])
|
| 117 |
+
val_masks_paths = dl_manager.download(self.config._masks_urls["val"])
|
| 118 |
+
|
| 119 |
+
return [
|
| 120 |
+
datasets.SplitGenerator(
|
| 121 |
+
name=datasets.Split.TRAIN,
|
| 122 |
+
gen_kwargs={
|
| 123 |
+
"images": dl_manager.iter_archive(train_imgs_paths),
|
| 124 |
+
"masks": dl_manager.iter_archive(train_masks_paths),
|
| 125 |
+
},
|
| 126 |
+
),
|
| 127 |
+
datasets.SplitGenerator(
|
| 128 |
+
name=datasets.Split.VALIDATION,
|
| 129 |
+
gen_kwargs={
|
| 130 |
+
"images": dl_manager.iter_archive(val_imgs_paths),
|
| 131 |
+
"masks": dl_manager.iter_archive(val_masks_paths),
|
| 132 |
+
},
|
| 133 |
+
),
|
| 134 |
+
]
|
| 135 |
+
|
| 136 |
+
def _generate_examples(
|
| 137 |
+
self,
|
| 138 |
+
images: Generator[Tuple[str,IO], None, None],
|
| 139 |
+
masks: Generator[Tuple[str,IO], None, None],
|
| 140 |
+
):
|
| 141 |
+
for i, (img_info, mask_info) in enumerate(zip(images, masks)):
|
| 142 |
+
img_file_path, img_file_obj = img_info
|
| 143 |
+
mask_file_path, mask_file_obj = mask_info
|
| 144 |
+
yield i, {
|
| 145 |
+
"image": {"path": img_file_path, "bytes": img_file_obj.read()},
|
| 146 |
+
"mask": {"path": mask_file_path, "bytes": mask_file_obj.read()},
|
| 147 |
+
}
|