File size: 3,625 Bytes
fc11903 6988e55 fc11903 6988e55 fc11903 6988e55 fc11903 6988e55 c66a8e6 fc11903 c66a8e6 7a680bd c66a8e6 aed2a03 9af996e aed2a03 9af996e d88165e 0c0cd52 9af996e 6988e55 0c0cd52 19f4382 ef7c9ac 19f4382 cc5a459 d88165e 19f4382 0c9b0c3 aed2a03 fc11903 6988e55 fc11903 6988e55 fc11903 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
### This is example of the script that will be run in the test environment.
### You can change the rest of the code to define and test your solution.
### However, you should not change the signature of the provided function.
### The script saves "submission.parquet" file in the current directory.
### You can use any additional files and subdirectories to organize your code.
from pathlib import Path
from tqdm import tqdm
import pandas as pd
import numpy as np
from datasets import load_dataset
from typing import Dict
def empty_solution(sample):
'''Return a minimal valid solution, i.e. 2 vertices and 1 edge.'''
return np.zeros((2,3)), [(0, 1)]
class Sample(Dict):
def pick_repr_data(self, x):
if hasattr(x, 'shape'):
return x.shape
if isinstance(x, (str, float, int)):
return x
if isinstance(x, list):
return [type(x[0])] if len(x) > 0 else []
return type(x)
def __repr__(self):
# return str({k: v.shape if hasattr(v, 'shape') else [type(v[0])] if isinstance(v, list) else type(v) for k,v in self.items()})
return str({k: self.pick_repr_data(v) for k,v in self.items()})
import json
if __name__ == "__main__":
print ("------------ Loading dataset------------ ")
param_path = Path('params.json')
print(param_path)
with param_path.open() as f:
params = json.load(f)
print(params)
import os
print('pwd:')
os.system('pwd')
print(os.system('ls -lahtr'))
print('/tmp/data/')
print(os.system('ls -lahtrR /tmp/data/'))
data_path_test_server = Path('/tmp/data')
data_path_local = Path().home() / '.cache/huggingface/datasets/usm3d___hoho25k_test_x/'
if data_path_test_server.exists():
data_path = data_path_test_server
else:
data_path = data_path_local
print(data_path)
# dataset = load_dataset(params['dataset'], trust_remote_code=True, use_auth_token=params['token'])
data_files = {
"validation": [str(p) for p in [*data_path.rglob('*validation*.arrow')]+[*data_path.rglob('*public*/**/*.tar')]],
"test": [str(p) for p in [*data_path.rglob('*test*.arrow')]+[*data_path.rglob('*private*/**/*.tar')]],
}
print(data_files)
try:
dataset = load_dataset(
"arrow",
data_files=data_files,
trust_remote_code=True,
# streaming=True
)
print('load with arrow')
except:
dataset = load_dataset(
"webdataset",
data_files=data_files,
trust_remote_code=True,
# streaming=True
)
print('load with webdataset')
print(dataset, flush=True)
# dataset = load_dataset('webdataset', data_files={)
print('------------ Now you can do your solution ---------------')
solution = []
for subset_name in dataset:
for i, sample in enumerate(tqdm(dataset[subset_name])):
# replace this with your solution
print(Sample(sample), flush=True)
print('------')
pred_vertices, pred_edges = empty_solution(sample)
solution.append({
'order_id': sample['order_id'],
'wf_vertices': pred_vertices.tolist(),
'wf_edges': pred_edges
})
print('------------ Saving results ---------------')
sub = pd.DataFrame(solution, columns=["order_id", "wf_vertices", "wf_edges"])
sub.to_parquet("submission.parquet")
print("------------ Done ------------ ") |