jonathan-roberts1 commited on
Commit
67db036
·
1 Parent(s): f9740ac

Upload satin_class.py

Browse files
Files changed (1) hide show
  1. satin_class.py +201 -0
satin_class.py ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+
3
+ import datasets
4
+ import os
5
+ import pyarrow.parquet as pq
6
+ from PIL import Image
7
+ from io import BytesIO
8
+ import numpy as np
9
+ import pandas as pd
10
+
11
+
12
+ def load_data(data_dir):
13
+ parquet_file = [file for file in os.listdir(data_dir) if file.endswith('.parquet')][0]
14
+ print(parquet_file)
15
+ parquet_path = os.path.join(data_dir, parquet_file)
16
+
17
+ parquet_path = data_dir
18
+ table = pq.read_table(parquet_path)
19
+
20
+ for row in table.iterrecords():
21
+ image_bytes = row['image']
22
+ image = Image.open(BytesIO(image_bytes))
23
+ label = row['label']
24
+ yield image, label
25
+
26
+
27
+
28
+ class SATINConfig(datasets.BuilderConfig):
29
+
30
+
31
+ def __init__(self, name, description, data_url, class_names, **kwargs):
32
+
33
+ Args:
34
+ data_url: `string`, url to download the zip file from.
35
+ metadata_urls: dictionary with keys 'train' and 'validation' containing the archive metadata URLs
36
+ **kwargs: keyword arguments forwarded to super.
37
+
38
+ super(SATINConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs)
39
+ self.name = name
40
+ self.data_url = data_url
41
+ self.description = description
42
+ self.class_names = class_names
43
+
44
+
45
+ class SATIN(datasets.GeneratorBasedBuilder):
46
+ SATIN Images dataset
47
+
48
+ _SAT_4_NAMES = ['barren land', 'grassland', 'other', 'trees']
49
+ _SAT_6_NAMES = ['barren land', 'building', 'grassland', 'road', 'trees', 'water']
50
+
51
+ BUILDER_CONFIGS = [
52
+ SATINConfig(
53
+ name="SAT_4",
54
+ description="SAT_4.",
55
+ data_url="https://huggingface.co/datasets/jonathan-roberts1/SAT-4/tree/main/data/",#train-00000-of-00001-e2dcb38bc165dfb0.parquet",
56
+ class_names = _SAT_4_NAMES
57
+ #metadata_urls={
58
+ # "train": "https://link-to-breakfast-foods-train.txt",
59
+ ),
60
+ SATINConfig(
61
+ name="SAT_6",
62
+ description="SAT_6.",
63
+ data_url="https://huggingface.co/datasets/jonathan-roberts1/SAT-6/tree/main/data/",#train-00000-of-00001-c47ada2c92f814d2.parquet",
64
+ class_names = _SAT_6_NAMES
65
+ )
66
+ ]
67
+
68
+ @property
69
+ def url_prefix(self):
70
+ return {
71
+ "SAT-4": "https://huggingface.co/datasets/jonathan-roberts1/SAT-4/tree/main/data/",#train-00000-of-00001-e2dcb38bc165dfb0.parquet",#train-00000-of-00001-e2dcb38bc165dfb0.parquet",
72
+ "SAT-6": "https://huggingface.co/datasets/jonathan-roberts1/SAT-6/tree/main/data/",
73
+ }
74
+
75
+ def _info(self):
76
+ return datasets.DatasetInfo(
77
+ description=self.config.description,
78
+ features=datasets.Features(
79
+ {
80
+ "image": datasets.Image(),
81
+ "label": datasets.ClassLabel(names=self.config.class_names),
82
+ }
83
+ ),
84
+ supervised_keys=("image", "label"),
85
+ #homepage=_HOMEPAGE,
86
+ #citation=_CITATION,
87
+ #license=_LICENSE,
88
+ #task_templates=[ImageClassification(image_column="image", label_column="label")],
89
+ )
90
+
91
+ def _split_generators(self, dl_manager):
92
+ url = self.config.data_url
93
+ data_dir = dl_manager.download_and_extract(url)#, use_auth_token=True)
94
+ print(data_dir)
95
+ return [
96
+ datasets.SplitGenerator(
97
+ name=datasets.Split.TRAIN,
98
+ gen_kwargs={"data_dir": data_dir},
99
+ ),
100
+ ]
101
+
102
+ def _generate_examples(self, data_dir):
103
+ #base_url = self.url_prefix[self.config.name]
104
+ file_url = self.config.data_url
105
+ use_auth_token = os.environ.get("HUGGINGFACE_TOKEN")
106
+
107
+ with NamedTemporaryFile() as file:
108
+ download(file_url, file.name, use_auth_token=use_auth_token)
109
+ df = pd.read_parquet(file.name)
110
+
111
+ for idx, row in df.iterrows():
112
+ example = {
113
+ "image": row["image"],
114
+ "label": row["label"],
115
+ }
116
+ yield idx, example
117
+
118
+
119
+ #def _generate_examples(self, data_dir):
120
+ # for idx, (image, label) in enumerate(load_data(data_dir)):
121
+ # image_array = np.array(image)
122
+ # yield idx, {"image": image_array, "label": label}
123
+ """
124
+
125
+
126
+ from datasets.utils.download_manager import DownloadManager
127
+ import tempfile
128
+ import datasets
129
+ import os
130
+ import pyarrow.parquet as pq
131
+ from PIL import Image
132
+ from io import BytesIO
133
+ import numpy as np
134
+ import pandas as pd
135
+
136
+
137
+ class SATINConfig(datasets.BuilderConfig):
138
+
139
+
140
+ def __init__(self, name, description, data_url, class_names, **kwargs):
141
+
142
+ super(SATINConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs)
143
+ self.name = name
144
+ self.data_url = data_url
145
+ self.description = description
146
+ self.class_names = class_names
147
+
148
+
149
+
150
+ class SATIN(datasets.GeneratorBasedBuilder):
151
+ """SATIN Images dataset"""
152
+
153
+ _SAT_4_NAMES = ['barren land', 'grassland', 'other', 'trees']
154
+ _SAT_6_NAMES = ['barren land', 'building', 'grassland', 'road', 'trees', 'water']
155
+
156
+ BUILDER_CONFIGS = [
157
+ SATINConfig(
158
+ name="SAT_4",
159
+ description="SAT_4.",
160
+ data_url="jonathan-roberts1/SAT-4",#https://huggingface.co/datasets/jonathan-roberts1/SAT-4/blob/main/data/train-00000-of-00001-e2dcb38bc165dfb0.parquet?raw=true",
161
+ class_names=_SAT_4_NAMES
162
+ ),
163
+ SATINConfig(
164
+ name="SAT_6",
165
+ description="SAT_6.",
166
+ data_url="jonathan-roberts1/SAT-6",#"https://huggingface.co/datasets/jonathan-roberts1/SAT-6/blob/main/data/train-00000-of-00001-c47ada2c92f814d2.parquet?raw=true",
167
+ class_names=_SAT_6_NAMES
168
+ )
169
+ ]
170
+
171
+ def _info(self):
172
+ return datasets.DatasetInfo(
173
+ description=self.config.description,
174
+ features=datasets.Features(
175
+ {
176
+ "image": datasets.Image(),
177
+ "label": datasets.ClassLabel(names=self.config.class_names),
178
+ }
179
+ ),
180
+ supervised_keys=("image", "label"),
181
+ )
182
+
183
+ def _split_generators(self, dl_manager):
184
+ #data_path = dl_manager.download(self.config.data_url)
185
+ from datasets import load_dataset
186
+ dataset = load_dataset(self.config.data_url)
187
+ return [
188
+ datasets.SplitGenerator(
189
+ name=datasets.Split.TRAIN,
190
+ gen_kwargs={"data_path": dataset},
191
+ ),
192
+ ]
193
+
194
+ def _generate_examples(self, data_path):
195
+ # iterate over the Huggingface dataset and yield the idx, image and label
196
+ huggingface_dataset = data_path["train"]
197
+ for idx, row in enumerate(huggingface_dataset):
198
+ yield idx, {"image": row["image"], "label": row["label"]}
199
+
200
+
201
+