blumenstiel commited on
Commit
b0a124e
·
verified ·
1 Parent(s): 92941a3

Update terramesh.py

Browse files
Files changed (1) hide show
  1. terramesh.py +63 -5
terramesh.py CHANGED
@@ -25,6 +25,7 @@ import fsspec
25
  import braceexpand
26
  import numpy as np
27
  import albumentations
 
28
  import webdataset as wds
29
  from collections.abc import Callable, Iterable
30
  from torch.utils.data._utils.collate import default_collate
@@ -46,6 +47,31 @@ split_files = {
46
  }
47
  }
48
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
 
50
  def build_terramesh_dataset(
51
  path: str = "https://huggingface.co/datasets/ibm-esa-geospatial/TerraMesh/resolve/main/",
@@ -54,6 +80,7 @@ def build_terramesh_dataset(
54
  urls: str | None = None,
55
  batch_size: int = 8,
56
  return_metadata: bool = False,
 
57
  *args, **kwargs,
58
  ):
59
  if len(modalities) == 1:
@@ -65,6 +92,7 @@ def build_terramesh_dataset(
65
  urls=urls,
66
  batch_size=batch_size,
67
  return_metadata=return_metadata,
 
68
  *args, **kwargs
69
  )
70
  return dataset
@@ -78,6 +106,7 @@ def build_terramesh_dataset(
78
  urls=urls,
79
  batch_size=batch_size,
80
  return_metadata=return_metadata,
 
81
  *args, **kwargs,
82
  )
83
  return dataset
@@ -136,6 +165,7 @@ def build_wds_dataset(
136
  urls: str | None = None,
137
  batch_size: int = 8,
138
  transform: Callable = None,
 
139
  return_metadata: bool = False,
140
  *args, **kwargs
141
  ):
@@ -153,7 +183,7 @@ def build_wds_dataset(
153
  [os.path.join(path, split, modality, f) for f in files]
154
  )
155
 
156
- kwargs["shardshuffle"] = kwargs.get("shardshuffle", 100) # Shuffle shard by default
157
 
158
  # Build dataset
159
  dataset = wds.WebDataset(urls, *args, **kwargs)
@@ -184,6 +214,7 @@ def build_multimodal_dataset(
184
  urls: str | None = None,
185
  batch_size: int = 8,
186
  transform: Callable = None,
 
187
  return_metadata: bool = False,
188
  *args, **kwargs
189
  ):
@@ -205,16 +236,23 @@ def build_multimodal_dataset(
205
  urls = (os.path.join(path, split, majortom_mod, split_files["majortom"][split][0])
206
  + "::" + os.path.join(path, split, ssl4eos12_mod, split_files["ssl4eos12"][split][0]))
207
 
208
- dataset = build_datapipeline(urls, transform, batch_size, return_metadata, *args, **kwargs)
209
  return dataset
210
 
211
 
212
- def build_datapipeline(urls, transform, batch_size, return_metadata, *args, **kwargs):
 
 
 
 
213
  datapipeline = wds.DataPipeline(
214
  # Infinitely sample shards from the shard list with replacement. Each worker is seeded independently.
215
- wds.ResampledShards(urls),
 
 
 
216
  multi_tarfile_samples, # Extract individual samples from multi-modal tar files
217
- wds.shuffle(100), # Shuffle with a buffer of given size
218
  (
219
  wds.map(zarr_metadata_decoder)
220
  if return_metadata
@@ -420,3 +458,23 @@ class MultimodalTransforms:
420
  data[modality] = self.non_image_transforms(data[modality])
421
 
422
  return data
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  import braceexpand
26
  import numpy as np
27
  import albumentations
28
+ import warnings
29
  import webdataset as wds
30
  from collections.abc import Callable, Iterable
31
  from torch.utils.data._utils.collate import default_collate
 
47
  }
48
  }
49
 
50
+ statistics = {
51
+ "mean": {
52
+ "S2L1C": [2357.090, 2137.398, 2018.799, 2082.998, 2295.663, 2854.548, 3122.860, 3040.571, 3306.491, 1473.849,
53
+ 506.072, 2472.840, 1838.943],
54
+ "S2L2A": [1390.461, 1503.332, 1718.211, 1853.926, 2199.116, 2779.989, 2987.025, 3083.248, 3132.235, 3162.989,
55
+ 2424.902, 1857.665],
56
+ "S2RGB": [110.349, 99.507, 75.843],
57
+ "S1GRD": [-12.577, -20.265],
58
+ "S1RTC": [-10.93, -17.329],
59
+ "NDVI": [0.327],
60
+ "DEM": [651.663],
61
+ },
62
+ "std": {
63
+ "S2L1C": [1673.639, 1722.641, 1602.205, 1873.138, 1866.055, 1779.839, 1776.496, 1724.114, 1771.041, 1079.786,
64
+ 512.404, 1340.879, 1172.435],
65
+ "S2L2A": [2131.157, 2163.666, 2059.311, 2152.477, 2105.179, 1912.773, 1842.326, 1893.568, 1775.656, 1814.907,
66
+ 1436.282, 1336.155],
67
+ "S2RGB": [69.905, 53.708, 53.378],
68
+ "S1GRD": [5.179, 5.872],
69
+ "S1RTC": [4.391, 4.459],
70
+ "NDVI": [0.322],
71
+ "DEM": [928.168]
72
+ }
73
+ }
74
+
75
 
76
  def build_terramesh_dataset(
77
  path: str = "https://huggingface.co/datasets/ibm-esa-geospatial/TerraMesh/resolve/main/",
 
80
  urls: str | None = None,
81
  batch_size: int = 8,
82
  return_metadata: bool = False,
83
+ shuffle: bool = True,
84
  *args, **kwargs,
85
  ):
86
  if len(modalities) == 1:
 
92
  urls=urls,
93
  batch_size=batch_size,
94
  return_metadata=return_metadata,
95
+ shuffle=shuffle,
96
  *args, **kwargs
97
  )
98
  return dataset
 
106
  urls=urls,
107
  batch_size=batch_size,
108
  return_metadata=return_metadata,
109
+ shuffle=shuffle,
110
  *args, **kwargs,
111
  )
112
  return dataset
 
165
  urls: str | None = None,
166
  batch_size: int = 8,
167
  transform: Callable = None,
168
+ shuffle: bool = True,
169
  return_metadata: bool = False,
170
  *args, **kwargs
171
  ):
 
183
  [os.path.join(path, split, modality, f) for f in files]
184
  )
185
 
186
+ kwargs["shardshuffle"] = kwargs.get("shardshuffle", 100) * shuffle # Shuffle shard
187
 
188
  # Build dataset
189
  dataset = wds.WebDataset(urls, *args, **kwargs)
 
214
  urls: str | None = None,
215
  batch_size: int = 8,
216
  transform: Callable = None,
217
+ shuffle: bool = True,
218
  return_metadata: bool = False,
219
  *args, **kwargs
220
  ):
 
236
  urls = (os.path.join(path, split, majortom_mod, split_files["majortom"][split][0])
237
  + "::" + os.path.join(path, split, ssl4eos12_mod, split_files["ssl4eos12"][split][0]))
238
 
239
+ dataset = build_datapipeline(urls, transform, batch_size, shuffle, return_metadata, *args, **kwargs)
240
  return dataset
241
 
242
 
243
+ def build_datapipeline(urls, transform, batch_size, shuffle, return_metadata, *args, **kwargs):
244
+ shardshuffle = kwargs.get("shardshuffle", 100) * shuffle # Shuffle shard
245
+ deterministic = kwargs.get("deterministic", False)
246
+ seed = kwargs.get("seed", 0)
247
+
248
  datapipeline = wds.DataPipeline(
249
  # Infinitely sample shards from the shard list with replacement. Each worker is seeded independently.
250
+ (
251
+ wds.ResampledShards(urls, deterministic=deterministic, seed=seed)
252
+ if shuffle else wds.SimpleShardList(urls)
253
+ ),
254
  multi_tarfile_samples, # Extract individual samples from multi-modal tar files
255
+ wds.shuffle(shardshuffle, seed=seed), # Shuffle with a buffer of given size
256
  (
257
  wds.map(zarr_metadata_decoder)
258
  if return_metadata
 
458
  data[modality] = self.non_image_transforms(data[modality])
459
 
460
  return data
461
+
462
+
463
+ class MultimodalNormalize(Callable):
464
+ def __init__(self, mean: dict[str, list[float]], std: dict[str, list[float]]):
465
+ super().__init__()
466
+ self.mean = mean
467
+ self.std = std
468
+
469
+ def __call__(self, **batch):
470
+ for m in self.mean.keys():
471
+ if m not in batch.keys():
472
+ continue
473
+ batch[m] = (batch[m] - self.mean[m]) / self.std[m]
474
+ return batch
475
+
476
+ def add_targets(self, targets):
477
+ """
478
+ Required by albumentations
479
+ """
480
+ pass