Datasets:

Languages:
English
ArXiv:
License:
tommasobonomo commited on
Commit
0e754d0
·
verified ·
1 Parent(s): 2cf1b81

Upload bookcoref.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. bookcoref.py +34 -31
bookcoref.py CHANGED
@@ -19,7 +19,7 @@ import sys
19
  from collections import defaultdict
20
  from multiprocessing import Pool, cpu_count
21
  from pathlib import Path
22
- from typing import Generator, Iterable, Literal, Mapping
23
 
24
  import datasets
25
  import spacy
@@ -50,9 +50,25 @@ _HOMEPAGE = ""
50
 
51
  _LICENSE = "CC BY-NC-SA 4.0 License"
52
 
53
- _URLS = {
54
- "full": "https://gutenberg.org/ebooks",
55
- "splitted": "https://gutenberg.org/ebooks",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  }
57
 
58
 
@@ -141,43 +157,28 @@ class BookCoref(datasets.GeneratorBasedBuilder):
141
 
142
  return logger
143
 
144
- def _load_local_data(
145
- self,
146
- ) -> dict[Literal["train", "validation", "test"], dict]:
147
- # Custom method to load local data files
148
- if self.config.name == "full":
149
- data_dir = self.LOCAL_DATA_DIR / "full"
150
- elif self.config.name == "splitted":
151
- data_dir = self.LOCAL_DATA_DIR / "splitted"
152
- else:
153
- raise ValueError(f"Unknown config name: {self.config.name}")
154
-
155
  data = {}
156
- for split_name in ["train", "validation", "test"]:
157
- filepath = data_dir / (
158
- f"{split_name}_splitted.jsonl" if self.config.name == "splitted" else f"{split_name}.jsonl"
159
- )
160
  with open(filepath, "r") as f:
161
  samples = [json.loads(line) for line in f]
162
  data[split_name] = samples
163
-
164
  return data
165
 
166
- def _load_local_delta(self) -> Delta:
167
- delta_file = (self.LOCAL_DATA_DIR / "gutenberg_delta.json").as_posix()
168
- delta = Delta(delta_path=delta_file, deserializer=json_loads)
169
  return delta
170
 
171
- def _load_local_lengths(self) -> dict[str, list[int]]:
172
- lengths_file = self.LOCAL_DATA_DIR / "gutenberg_lengths.json"
173
- with open(lengths_file, "r") as f:
174
  lengths = json.load(f)
175
  return lengths
176
 
177
  def _download_gutenberg_data(
178
  self,
179
  gutenberg_keys: list[str],
180
- dl_manager: DownloadManager | StreamingDownloadManager,
181
  ) -> dict[str, str]:
182
  special_gutenberg_urls = {
183
  "28240": "https://web.archive.org/web/20240320095627/https://gutenberg.org/ebooks/28240.txt.utf-8"
@@ -263,9 +264,11 @@ class BookCoref(datasets.GeneratorBasedBuilder):
263
 
264
  return all_split_docs
265
 
266
- def _split_generators(self, dl_manager: DownloadManager | StreamingDownloadManager):
267
  self.logger.info("Loading local data...")
268
- local_data = self._load_local_data()
 
 
269
  gutenberg_data = defaultdict(dict)
270
  for split, samples in local_data.items():
271
  for sample in samples:
@@ -274,8 +277,8 @@ class BookCoref(datasets.GeneratorBasedBuilder):
274
  self.logger.info(
275
  f"Loaded {gutenberg_data_size} samples from local data for `{self.config.name}` configuration."
276
  )
277
- delta = self._load_local_delta()
278
- lengths = self._load_local_lengths()
279
  gutenberg_keys = set(
280
  sample["gutenberg_key"] if self.config.name == "full" else sample["gutenberg_key"].split("_")[0]
281
  for split in local_data.values()
 
19
  from collections import defaultdict
20
  from multiprocessing import Pool, cpu_count
21
  from pathlib import Path
22
+ from typing import Generator, Iterable, Literal, Mapping, Union
23
 
24
  import datasets
25
  import spacy
 
50
 
51
  _LICENSE = "CC BY-NC-SA 4.0 License"
52
 
53
+ _LOCAL_DATA_DIR = Path("bookcoref_annotations")
54
+
55
+ _LOCAL_FILES = {
56
+ "full": [
57
+ _LOCAL_DATA_DIR / "full" / "train.jsonl",
58
+ _LOCAL_DATA_DIR / "full" / "validation.jsonl",
59
+ _LOCAL_DATA_DIR / "full" / "test.jsonl",
60
+ ],
61
+ "splitted": [
62
+ _LOCAL_DATA_DIR / "splitted" / "train_splitted.jsonl",
63
+ _LOCAL_DATA_DIR / "splitted" / "validation_splitted.jsonl",
64
+ _LOCAL_DATA_DIR / "splitted" / "test_splitted.jsonl",
65
+ ],
66
+ "delta": [
67
+ _LOCAL_DATA_DIR / "gutenberg_delta.json",
68
+ ],
69
+ "lengths": [
70
+ _LOCAL_DATA_DIR / "gutenberg_lengths.json",
71
+ ],
72
  }
73
 
74
 
 
157
 
158
  return logger
159
 
160
+ def _load_local_data(self, splits_paths: list[str]) -> dict[Literal["train", "validation", "test"], dict]:
 
 
 
 
 
 
 
 
 
 
161
  data = {}
162
+ for filepath in splits_paths:
163
+ split_name = Path(filepath).stem.split("_")[0]
 
 
164
  with open(filepath, "r") as f:
165
  samples = [json.loads(line) for line in f]
166
  data[split_name] = samples
 
167
  return data
168
 
169
+ def _load_local_delta(self, delta_path: str) -> Delta:
170
+ delta = Delta(delta_path=delta_path, deserializer=json_loads) # type: ignore
 
171
  return delta
172
 
173
+ def _load_local_lengths(self, lengths_path: str) -> dict[str, list[int]]:
174
+ with open(lengths_path, "r") as f:
 
175
  lengths = json.load(f)
176
  return lengths
177
 
178
  def _download_gutenberg_data(
179
  self,
180
  gutenberg_keys: list[str],
181
+ dl_manager: Union[DownloadManager, StreamingDownloadManager],
182
  ) -> dict[str, str]:
183
  special_gutenberg_urls = {
184
  "28240": "https://web.archive.org/web/20240320095627/https://gutenberg.org/ebooks/28240.txt.utf-8"
 
264
 
265
  return all_split_docs
266
 
267
+ def _split_generators(self, dl_manager: Union[DownloadManager, StreamingDownloadManager]):
268
  self.logger.info("Loading local data...")
269
+ all_local_data: dict[str, list[str]] = dl_manager.download_and_extract(_LOCAL_FILES) # type: ignore
270
+
271
+ local_data = self._load_local_data(all_local_data[self.config.name])
272
  gutenberg_data = defaultdict(dict)
273
  for split, samples in local_data.items():
274
  for sample in samples:
 
277
  self.logger.info(
278
  f"Loaded {gutenberg_data_size} samples from local data for `{self.config.name}` configuration."
279
  )
280
+ delta = self._load_local_delta(all_local_data["delta"][0])
281
+ lengths = self._load_local_lengths(all_local_data["lengths"][0])
282
  gutenberg_keys = set(
283
  sample["gutenberg_key"] if self.config.name == "full" else sample["gutenberg_key"].split("_")[0]
284
  for split in local_data.values()