mspitzna
commited on
Commit
·
38b9e9c
1
Parent(s):
5d03d17
Refactor physicsgen.py to update dataset descriptions, versioning, and implement hf_hub_download for parquet file retrieval
Browse files- physicsgen.py +24 -25
physicsgen.py
CHANGED
@@ -1,9 +1,7 @@
|
|
1 |
import os
|
2 |
-
import csv
|
3 |
-
from PIL import Image
|
4 |
import datasets
|
5 |
-
import pyarrow as pa
|
6 |
import pyarrow.parquet as pq
|
|
|
7 |
|
8 |
# Define configurations for each flavor.
|
9 |
BUILDER_CONFIGS = [
|
@@ -19,12 +17,12 @@ BUILDER_CONFIGS = [
|
|
19 |
),
|
20 |
datasets.BuilderConfig(
|
21 |
name="sound_diffraction",
|
22 |
-
description="Physical dataset:
|
23 |
data_dir="sound_diffraction"
|
24 |
),
|
25 |
datasets.BuilderConfig(
|
26 |
name="sound_combined",
|
27 |
-
description="Physical dataset:
|
28 |
data_dir="sound_combined"
|
29 |
),
|
30 |
datasets.BuilderConfig(
|
@@ -51,7 +49,7 @@ BUILDER_CONFIGS = [
|
|
51 |
|
52 |
class MyPhysicalDataset(datasets.GeneratorBasedBuilder):
|
53 |
BUILDER_CONFIGS = BUILDER_CONFIGS
|
54 |
-
VERSION = datasets.Version("1.0.
|
55 |
|
56 |
def _info(self):
|
57 |
if self.config.name in ["sound_baseline", "sound_reflection", "sound_diffraction", "sound_combined"]:
|
@@ -59,7 +57,7 @@ class MyPhysicalDataset(datasets.GeneratorBasedBuilder):
|
|
59 |
"lat": datasets.Value("float"),
|
60 |
"long": datasets.Value("float"),
|
61 |
"db": datasets.Value("string"),
|
62 |
-
"soundmap": datasets.Image(),
|
63 |
"osm": datasets.Image(),
|
64 |
"temperature": datasets.Value("int32"),
|
65 |
"humidity": datasets.Value("int32"),
|
@@ -77,7 +75,6 @@ class MyPhysicalDataset(datasets.GeneratorBasedBuilder):
|
|
77 |
"p1": datasets.Value("float"),
|
78 |
"p2": datasets.Value("float"),
|
79 |
"cx": datasets.Value("float"),
|
80 |
-
"distortion_path": datasets.Value("string"),
|
81 |
})
|
82 |
elif self.config.name in ["ball_roll", "ball_bounce"]:
|
83 |
features = datasets.Features({
|
@@ -86,39 +83,42 @@ class MyPhysicalDataset(datasets.GeneratorBasedBuilder):
|
|
86 |
"GroundIncli": datasets.Value("float"),
|
87 |
"InputTime": datasets.Value("int32"),
|
88 |
"TargetTime": datasets.Value("int32"),
|
89 |
-
"input_image": datasets.Image(),
|
90 |
"target_image": datasets.Image(),
|
91 |
})
|
92 |
else:
|
93 |
raise ValueError(f"Unknown config name: {self.config.name}")
|
94 |
return datasets.DatasetInfo(
|
95 |
-
description="Multiple variant physical tasks dataset.",
|
96 |
features=features,
|
97 |
)
|
98 |
|
99 |
def _split_generators(self, dl_manager):
|
100 |
-
|
101 |
-
|
|
|
|
|
|
|
102 |
return [
|
103 |
datasets.SplitGenerator(
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
]
|
116 |
|
117 |
def _generate_examples(self, parquet_file):
|
118 |
table = pq.read_table(parquet_file)
|
119 |
examples = table.to_pylist()
|
120 |
|
121 |
-
# Wrap image bytes into the format expected by datasets.Image
|
122 |
if self.config.name in ["sound_baseline", "sound_reflection", "sound_diffraction", "sound_combined"]:
|
123 |
for example in examples:
|
124 |
for key in ["soundmap", "osm", "soundmap_512"]:
|
@@ -129,6 +129,5 @@ class MyPhysicalDataset(datasets.GeneratorBasedBuilder):
|
|
129 |
for key in ["input_image", "target_image"]:
|
130 |
if example.get(key) is not None and isinstance(example[key], bytes):
|
131 |
example[key] = {"bytes": example[key]}
|
132 |
-
|
133 |
for idx, row in enumerate(examples):
|
134 |
yield idx, row
|
|
|
1 |
import os
|
|
|
|
|
2 |
import datasets
|
|
|
3 |
import pyarrow.parquet as pq
|
4 |
+
from huggingface_hub import hf_hub_download
|
5 |
|
6 |
# Define configurations for each flavor.
|
7 |
BUILDER_CONFIGS = [
|
|
|
17 |
),
|
18 |
datasets.BuilderConfig(
|
19 |
name="sound_diffraction",
|
20 |
+
description="Physical dataset: diffraction variant",
|
21 |
data_dir="sound_diffraction"
|
22 |
),
|
23 |
datasets.BuilderConfig(
|
24 |
name="sound_combined",
|
25 |
+
description="Physical dataset: combined variant",
|
26 |
data_dir="sound_combined"
|
27 |
),
|
28 |
datasets.BuilderConfig(
|
|
|
49 |
|
50 |
class MyPhysicalDataset(datasets.GeneratorBasedBuilder):
|
51 |
BUILDER_CONFIGS = BUILDER_CONFIGS
|
52 |
+
VERSION = datasets.Version("1.0.2")
|
53 |
|
54 |
def _info(self):
|
55 |
if self.config.name in ["sound_baseline", "sound_reflection", "sound_diffraction", "sound_combined"]:
|
|
|
57 |
"lat": datasets.Value("float"),
|
58 |
"long": datasets.Value("float"),
|
59 |
"db": datasets.Value("string"),
|
60 |
+
"soundmap": datasets.Image(), # Expects a dict: {"bytes": ...}
|
61 |
"osm": datasets.Image(),
|
62 |
"temperature": datasets.Value("int32"),
|
63 |
"humidity": datasets.Value("int32"),
|
|
|
75 |
"p1": datasets.Value("float"),
|
76 |
"p2": datasets.Value("float"),
|
77 |
"cx": datasets.Value("float"),
|
|
|
78 |
})
|
79 |
elif self.config.name in ["ball_roll", "ball_bounce"]:
|
80 |
features = datasets.Features({
|
|
|
83 |
"GroundIncli": datasets.Value("float"),
|
84 |
"InputTime": datasets.Value("int32"),
|
85 |
"TargetTime": datasets.Value("int32"),
|
86 |
+
"input_image": datasets.Image(), # Expects {"bytes": ...}
|
87 |
"target_image": datasets.Image(),
|
88 |
})
|
89 |
else:
|
90 |
raise ValueError(f"Unknown config name: {self.config.name}")
|
91 |
return datasets.DatasetInfo(
|
92 |
+
description="Multiple variant physical tasks dataset stored as parquet files.",
|
93 |
features=features,
|
94 |
)
|
95 |
|
96 |
def _split_generators(self, dl_manager):
|
97 |
+
# Use hf_hub_download to fetch the parquet files directly from the Hub.
|
98 |
+
repo_id = "mspitzna/physicsgen" # Replace with your repo id if different.
|
99 |
+
train_file = hf_hub_download(repo_id=repo_id, filename=f"{self.config.data_dir}/train.parquet")
|
100 |
+
test_file = hf_hub_download(repo_id=repo_id, filename=f"{self.config.data_dir}/test.parquet")
|
101 |
+
eval_file = hf_hub_download(repo_id=repo_id, filename=f"{self.config.data_dir}/eval.parquet")
|
102 |
return [
|
103 |
datasets.SplitGenerator(
|
104 |
+
name=datasets.Split.TRAIN,
|
105 |
+
gen_kwargs={"parquet_file": train_file},
|
106 |
+
),
|
107 |
+
datasets.SplitGenerator(
|
108 |
+
name=datasets.Split.TEST,
|
109 |
+
gen_kwargs={"parquet_file": test_file},
|
110 |
+
),
|
111 |
+
datasets.SplitGenerator(
|
112 |
+
name=datasets.Split.VALIDATION,
|
113 |
+
gen_kwargs={"parquet_file": eval_file},
|
114 |
+
),
|
115 |
]
|
116 |
|
117 |
def _generate_examples(self, parquet_file):
|
118 |
table = pq.read_table(parquet_file)
|
119 |
examples = table.to_pylist()
|
120 |
|
121 |
+
# Wrap image bytes into the format expected by datasets.Image.
|
122 |
if self.config.name in ["sound_baseline", "sound_reflection", "sound_diffraction", "sound_combined"]:
|
123 |
for example in examples:
|
124 |
for key in ["soundmap", "osm", "soundmap_512"]:
|
|
|
129 |
for key in ["input_image", "target_image"]:
|
130 |
if example.get(key) is not None and isinstance(example[key], bytes):
|
131 |
example[key] = {"bytes": example[key]}
|
|
|
132 |
for idx, row in enumerate(examples):
|
133 |
yield idx, row
|