ssolito commited on
Commit
e270c33
·
verified ·
1 Parent(s): 6c15aad

Upload corts_valencianes_asr_a.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. corts_valencianes_asr_a.py +18 -16
corts_valencianes_asr_a.py CHANGED
@@ -107,7 +107,7 @@ class CortsValencianesASR(datasets.GeneratorBasedBuilder):
107
  metadata_other_train_short=dl_manager.download_and_extract(_METADATA_OTHER_TRAIN_SHORT)
108
  metadata_other_test_short=dl_manager.download_and_extract(_METADATA_OTHER_TEST_SHORT)
109
  metadata_other_dev_short=dl_manager.download_and_extract(_METADATA_OTHER_DEV_SHORT)
110
-
111
  tars_clean_train_short=dl_manager.download_and_extract(_TARS_CLEAN_TRAIN_SHORT)
112
  tars_clean_test_short=dl_manager.download_and_extract(_TARS_CLEAN_TEST_SHORT)
113
  tars_clean_dev_short=dl_manager.download_and_extract(_TARS_CLEAN_DEV_SHORT)
@@ -132,38 +132,40 @@ class CortsValencianesASR(datasets.GeneratorBasedBuilder):
132
  tars_other_train_long=dl_manager.download_and_extract(_TARS_OTHER_TRAIN_LONG)
133
  tars_other_test_long=dl_manager.download_and_extract(_TARS_OTHER_TEST_LONG)
134
  tars_other_dev_long=dl_manager.download_and_extract(_TARS_OTHER_DEV_LONG)
135
-
136
  hash_tar_files=defaultdict(dict)
137
  with open(tars_clean_train_short,'r') as f:
138
- hash_tar_files['clean_train_short']=[path.replace('\n','') for path in f]
139
  with open(tars_clean_test_short,'r') as f:
140
- hash_tar_files['clean_test_short']=[path.replace('\n','') for path in f]
141
  with open(tars_clean_dev_short,'r') as f:
142
- hash_tar_files['clean_dev_short']=[path.replace('\n','') for path in f]
143
 
144
  with open(tars_other_train_short,'r') as f:
145
- hash_tar_files['other_train_short']=[path.replace('\n','') for path in f]
146
  with open(tars_other_test_short,'r') as f:
147
- hash_tar_files['other_test_short']=[path.replace('\n','') for path in f]
148
  with open(tars_other_dev_short,'r') as f:
149
- hash_tar_files['other_dev_short']=[path.replace('\n','') for path in f]
150
 
151
 
152
  with open(tars_clean_train_long,'r') as f:
153
- hash_tar_files['clean_train_long']=[path.replace('\n','') for path in f]
154
  with open(tars_clean_test_long,'r') as f:
155
- hash_tar_files['clean_test_long']=[path.replace('\n','') for path in f]
156
  with open(tars_clean_dev_long,'r') as f:
157
- hash_tar_files['clean_dev_long']=[path.replace('\n','') for path in f]
158
 
159
  with open(tars_other_train_long,'r') as f:
160
- hash_tar_files['other_train_long']=[path.replace('\n','') for path in f]
161
  with open(tars_other_test_long,'r') as f:
162
- hash_tar_files['other_test_long']=[path.replace('\n','') for path in f]
163
  with open(tars_other_dev_long,'r') as f:
164
- hash_tar_files['other_dev_long']=[path.replace('\n','') for path in f]
 
165
 
166
- hash_meta_paths={"clean_train_short":metadata_clean_train_short,
 
167
  "clean_test_short":metadata_clean_test_short,
168
  "clean_dev_short":metadata_clean_dev_short,
169
  "other_train_short":metadata_other_train_short,
@@ -175,7 +177,7 @@ class CortsValencianesASR(datasets.GeneratorBasedBuilder):
175
  "other_train_long":metadata_other_train_long,
176
  "other_test_long":metadata_other_test_long,
177
  "other_dev_long":metadata_other_dev_long}
178
-
179
  audio_paths = dl_manager.download(hash_tar_files)
180
 
181
  splits=["clean_train_short","clean_test_short","clean_dev_short","other_train_short","other_test_short","other_dev_short","clean_train_long","clean_test_long","clean_dev_long","other_train_long","other_test_long","other_dev_long"]
 
107
  metadata_other_train_short=dl_manager.download_and_extract(_METADATA_OTHER_TRAIN_SHORT)
108
  metadata_other_test_short=dl_manager.download_and_extract(_METADATA_OTHER_TEST_SHORT)
109
  metadata_other_dev_short=dl_manager.download_and_extract(_METADATA_OTHER_DEV_SHORT)
110
+
111
  tars_clean_train_short=dl_manager.download_and_extract(_TARS_CLEAN_TRAIN_SHORT)
112
  tars_clean_test_short=dl_manager.download_and_extract(_TARS_CLEAN_TEST_SHORT)
113
  tars_clean_dev_short=dl_manager.download_and_extract(_TARS_CLEAN_DEV_SHORT)
 
132
  tars_other_train_long=dl_manager.download_and_extract(_TARS_OTHER_TRAIN_LONG)
133
  tars_other_test_long=dl_manager.download_and_extract(_TARS_OTHER_TEST_LONG)
134
  tars_other_dev_long=dl_manager.download_and_extract(_TARS_OTHER_DEV_LONG)
135
+
136
  hash_tar_files=defaultdict(dict)
137
  with open(tars_clean_train_short,'r') as f:
138
+ hash_tar_files['clean_train_short']=[path.strip() for path in f if path.strip()]
139
  with open(tars_clean_test_short,'r') as f:
140
+ hash_tar_files['clean_test_short']=[path.strip() for path in f if path.strip()]
141
  with open(tars_clean_dev_short,'r') as f:
142
+ hash_tar_files['clean_dev_short']=[path.strip() for path in f if path.strip()]
143
 
144
  with open(tars_other_train_short,'r') as f:
145
+ hash_tar_files['other_train_short']=[path.strip() for path in f if path.strip()]
146
  with open(tars_other_test_short,'r') as f:
147
+ hash_tar_files['other_test_short']=[path.strip() for path in f if path.strip()]
148
  with open(tars_other_dev_short,'r') as f:
149
+ hash_tar_files['other_dev_short']=[path.strip() for path in f if path.strip()]
150
 
151
 
152
  with open(tars_clean_train_long,'r') as f:
153
+ hash_tar_files['clean_train_long']=[path.strip() for path in f if path.strip()]
154
  with open(tars_clean_test_long,'r') as f:
155
+ hash_tar_files['clean_test_long']=[path.strip() for path in f if path.strip()]
156
  with open(tars_clean_dev_long,'r') as f:
157
+ hash_tar_files['clean_dev_long']=[path.strip() for path in f if path.strip()]
158
 
159
  with open(tars_other_train_long,'r') as f:
160
+ hash_tar_files['other_train_long']=[path.strip() for path in f if path.strip()]
161
  with open(tars_other_test_long,'r') as f:
162
+ hash_tar_files['other_test_long']=[path.strip() for path in f if path.strip()]
163
  with open(tars_other_dev_long,'r') as f:
164
+ hash_tar_files['other_dev_long']=[path.strip() for path in f if path.strip()]
165
+
166
 
167
+ hash_meta_paths={
168
+ "clean_train_short":metadata_clean_train_short,
169
  "clean_test_short":metadata_clean_test_short,
170
  "clean_dev_short":metadata_clean_dev_short,
171
  "other_train_short":metadata_other_train_short,
 
177
  "other_train_long":metadata_other_train_long,
178
  "other_test_long":metadata_other_test_long,
179
  "other_dev_long":metadata_other_dev_long}
180
+
181
  audio_paths = dl_manager.download(hash_tar_files)
182
 
183
  splits=["clean_train_short","clean_test_short","clean_dev_short","other_train_short","other_test_short","other_dev_short","clean_train_long","clean_test_long","clean_dev_long","other_train_long","other_test_long","other_dev_long"]