onkarsus13 commited on
Commit
d591d43
·
verified ·
1 Parent(s): 39fbcfd

Add files using upload-large-folder tool

Browse files
ATLAS/atlas-train-dataset-1.0.1/CHANGELOG ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Changelog
2
+
3
+ All notable changes to the Atlas Dataset will be documented in this file.
4
+
5
+ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
+ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
+
8
+ ## [Unreleased]
9
+
10
+ ## [1.0.1] - 2023-06-25
11
+
12
+ ### Updated
13
+
14
+ - Update metrics.
15
+
16
+ ## [1.0.0] - 2023-04-25
17
+
18
+ ### Added
19
+
20
+ - Readme with Description, Documentation, and License sections.
21
+ - Initial version of the training dataset with metric scripts for evaluation.
22
+
ATLAS/atlas-train-dataset-1.0.1/README.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # The Atlas training Dataset
2
+
3
+ This archive contains the Atlas challenge training dataset as well as the metrics calculation scripts.
4
+
5
+
6
+ ## Documentation
7
+
8
+ For more information about the challenge and the dataset, you are invited to visit the [Atlas website](https://atlas-challenge.u-bourgogne.fr).
9
+
10
+
11
+ ## License
12
+
13
+ This work is licensed under a [Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License](http://creativecommons.org/licenses/by-nc-sa/4.0/).
14
+
ATLAS/atlas-train-dataset-1.0.1/metric_calculation/README.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## File organisation
2
+
3
+ calculate_metrics.py is the main file and will allow you to calculate the metrics used in the ATLAS challenge, values per images can be stored in a json file or average values in a csv file. The metrics are defined in the metrics.py file.
4
+ Images from the label file should be named "lbxx.nii.gz" and have a corresponding images in the segmentation file named "imxx.nii.gz".
5
+
6
+ ## Run the code
7
+
8
+ Install python dependencies
9
+
10
+ `python -m pip install -r requirements.txt`
11
+
12
+ Calculate the default metrics and store the output in a json or csv file
13
+
14
+ `python calculate_metrics.py --segmentation_folder /path/to/the/model/segmentation/ --label_folder /path/to/the/ground/truth/ --csv_output_file /path/to/the/output/csv/file --json_output_file /path/to/the/output/json/file`
ATLAS/atlas-train-dataset-1.0.1/metric_calculation/calculate_metrics.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import csv
3
+ import argparse
4
+ from evaluator import aggregate_scores
5
+
6
+ def calcultate_metrics(segmentation_folder, label_folder, author, status, csv_output_file=None, json_output_file=None, num_threads=8):
7
+
8
+ """"
9
+ param segmentation_folder: folder with the segmentations under nifty format
10
+ param label_folder: folder with the grond truth images under nifty format
11
+ param output_file: path to the output csv file with average performances
12
+ param json_output_file: path to the json output file with performances per image
13
+ param num_threads: number of cpu threads to parallelize the computations
14
+ return:
15
+ """
16
+ pred_gt_tuples = []
17
+ for i, p in enumerate(os.listdir(label_folder)):
18
+ if p.endswith('nii.gz'):
19
+ file = os.path.join(label_folder, p)
20
+ pred_gt_tuples.append([os.path.join(segmentation_folder, 'im' + p[2:]),file])
21
+
22
+ scores = aggregate_scores(pred_gt_tuples, labels=[[1, 2], 2],
23
+ json_output_file=json_output_file, num_threads=num_threads)
24
+ if csv_output_file != None:
25
+ evaluation_metrics = {
26
+ "Authors": author,
27
+ "Liver ASD (mm³)": round(scores["mean"]["[1, 2]"]["Avg. Symmetric Surface Distance"], 1),
28
+ "Liver Dice (%)": round(scores["mean"]["[1, 2]"]["Dice"] * 100, 1),
29
+ "Liver Hausdorff Distance (mm³)": round(scores["mean"]["[1, 2]"]["Hausdorff Distance"], 1),
30
+ "Liver Surface Dice (%)": round(scores["mean"]["[1, 2]"]["Surface Dice"] * 100, 1),
31
+ "Tumor ASD (mm³)": round(scores["mean"]["2"]["Avg. Symmetric Surface Distance"], 1),
32
+ "Tumor Dice (%)": round(scores["mean"]["2"]["Dice"] * 100, 1),
33
+ "Tumor Hausdorff Distance (mm³)": round(scores["mean"]["2"]["Hausdorff Distance"], 1),
34
+ "Tumor Surface Dice (%)": round(scores["mean"]["2"]["Surface Dice"] * 100, 1),
35
+ "RMSE on Tumor Burden (%)": round(scores["mean"]["RMSE on Tumor Burden"] * 100, 1),
36
+ "Status": status
37
+ }
38
+
39
+ # Check if file exists, if not, write header
40
+ if not os.path.isfile(csv_output_file):
41
+ with open(csv_output_file, 'w') as csvfile:
42
+ writer = csv.DictWriter(csvfile, fieldnames=evaluation_metrics.keys())
43
+ writer.writeheader()
44
+
45
+ with open(csv_output_file, 'a') as csvfile:
46
+ writer = csv.DictWriter(csvfile, fieldnames=evaluation_metrics.keys())
47
+ writer.writerow(evaluation_metrics)
48
+
49
+ if __name__ == "__main__":
50
+ parser = argparse.ArgumentParser(description='Metrics calculation')
51
+ parser.add_argument('--segmentation_folder', default="/path/to/the/segmentation/folder", type=str)
52
+ parser.add_argument('--label_folder', default="/path/to/the/label/folder/labelsTr", type=str)
53
+ parser.add_argument('--author', default="Author 1", type=str)
54
+ parser.add_argument('--status', default="Docker container submitted", type=str)
55
+ parser.add_argument('--csv_output_file', default="/path/to/the/output/csv/file.csv", type=str)
56
+ parser.add_argument('--json_output_file', default="/path/to/the/output/json/file.json", type=str)
57
+ parser.add_argument('--num_threads', default=8, type=int)
58
+
59
+ args = parser.parse_args()
60
+ calcultate_metrics(args.segmentation_folder, args.label_folder, args.author, args.status, csv_output_file=args.csv_output_file, json_output_file=args.json_output_file, num_threads=args.num_threads)
ATLAS/atlas-train-dataset-1.0.1/metric_calculation/evaluator.py ADDED
@@ -0,0 +1,430 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ import collections
17
+ import inspect
18
+ import json
19
+ import hashlib
20
+ from datetime import datetime
21
+ from multiprocessing.pool import Pool
22
+ import numpy as np
23
+ import pandas as pd
24
+ import SimpleITK as sitk
25
+ from metrics import ConfusionMatrix, ALL_METRICS
26
+ from batchgenerators.utilities.file_and_folder_operations import save_json
27
+ from collections import OrderedDict
28
+
29
+
30
+ class Evaluator:
31
+ """Object that holds test and reference segmentations with label information
32
+ and computes a number of metrics on the two. 'labels' must either be an
33
+ iterable of numeric values (or tuples thereof) or a dictionary with string
34
+ names and numeric values.
35
+ """
36
+
37
+ default_metrics = [
38
+ "False Positive Rate",
39
+ "Dice",
40
+ "Jaccard",
41
+ "Precision",
42
+ "Recall",
43
+ "Accuracy",
44
+ "False Omission Rate",
45
+ "Negative Predictive Value",
46
+ "False Negative Rate",
47
+ "True Negative Rate",
48
+ "False Discovery Rate",
49
+ "Total Positives Test",
50
+ "Total Positives Reference",
51
+ "Hausdorff Distance",
52
+ "Hausdorff Distance 95",
53
+ "Avg. Surface Distance",
54
+ "Avg. Symmetric Surface Distance",
55
+ "Surface Dice"
56
+ ]
57
+
58
+ default_advanced_metrics = []
59
+
60
+ def __init__(self,
61
+ test=None,
62
+ reference=None,
63
+ labels=None,
64
+ metrics=None,
65
+ advanced_metrics=None,
66
+ nan_for_nonexisting=False):
67
+
68
+ self.test = None
69
+ self.reference = None
70
+ self.confusion_matrix = ConfusionMatrix()
71
+ self.labels = None
72
+ self.nan_for_nonexisting = nan_for_nonexisting
73
+ self.result = None
74
+
75
+ self.metrics = []
76
+ if metrics is None:
77
+ for m in self.default_metrics:
78
+ self.metrics.append(m)
79
+ else:
80
+ for m in metrics:
81
+ self.metrics.append(m)
82
+
83
+ self.advanced_metrics = []
84
+ if advanced_metrics is None:
85
+ for m in self.default_advanced_metrics:
86
+ self.advanced_metrics.append(m)
87
+ else:
88
+ for m in advanced_metrics:
89
+ self.advanced_metrics.append(m)
90
+
91
+ self.set_reference(reference)
92
+ self.set_test(test)
93
+ if labels is not None:
94
+ self.set_labels(labels)
95
+ else:
96
+ if test is not None and reference is not None:
97
+ self.construct_labels()
98
+
99
+ def set_test(self, test):
100
+ """Set the test segmentation."""
101
+
102
+ self.test = test
103
+
104
+ def set_reference(self, reference):
105
+ """Set the reference segmentation."""
106
+
107
+ self.reference = reference
108
+
109
+ def set_labels(self, labels):
110
+ """Set the labels.
111
+ :param labels= may be a dictionary (int->str), a set (of ints), a tuple (of ints) or a list (of ints). Labels
112
+ will only have names if you pass a dictionary"""
113
+
114
+ if isinstance(labels, dict):
115
+ self.labels = collections.OrderedDict(labels)
116
+ elif isinstance(labels, set):
117
+ self.labels = list(labels)
118
+ elif isinstance(labels, np.ndarray):
119
+ self.labels = [i for i in labels]
120
+ elif isinstance(labels, (list, tuple)):
121
+ self.labels = labels
122
+ else:
123
+ raise TypeError("Can only handle dict, list, tuple, set & numpy array, but input is of type {}".format(type(labels)))
124
+
125
+ def construct_labels(self):
126
+ """Construct label set from unique entries in segmentations."""
127
+
128
+ if self.test is None and self.reference is None:
129
+ raise ValueError("No test or reference segmentations.")
130
+ elif self.test is None:
131
+ labels = np.unique(self.reference)
132
+ else:
133
+ labels = np.union1d(np.unique(self.test),
134
+ np.unique(self.reference))
135
+ self.labels = list(map(lambda x: int(x), labels))
136
+
137
+ def set_metrics(self, metrics):
138
+ """Set evaluation metrics"""
139
+
140
+ if isinstance(metrics, set):
141
+ self.metrics = list(metrics)
142
+ elif isinstance(metrics, (list, tuple, np.ndarray)):
143
+ self.metrics = metrics
144
+ else:
145
+ raise TypeError("Can only handle list, tuple, set & numpy array, but input is of type {}".format(type(metrics)))
146
+
147
+ def add_metric(self, metric):
148
+
149
+ if metric not in self.metrics:
150
+ self.metrics.append(metric)
151
+
152
+ def evaluate(self, test=None, reference=None, advanced=False, **metric_kwargs):
153
+ """Compute metrics for segmentations."""
154
+ if test is not None:
155
+ self.set_test(test)
156
+
157
+ if reference is not None:
158
+ self.set_reference(reference)
159
+
160
+ if self.test is None or self.reference is None:
161
+ raise ValueError("Need both test and reference segmentations.")
162
+
163
+ if self.labels is None:
164
+ self.construct_labels()
165
+
166
+ self.metrics.sort()
167
+
168
+ # get functions for evaluation
169
+ # somewhat convoluted, but allows users to define additonal metrics
170
+ # on the fly, e.g. inside an IPython console
171
+ _funcs = {m: ALL_METRICS[m] for m in self.metrics + self.advanced_metrics}
172
+ frames = inspect.getouterframes(inspect.currentframe())
173
+ for metric in self.metrics:
174
+ for f in frames:
175
+ if metric in f[0].f_locals:
176
+ _funcs[metric] = f[0].f_locals[metric]
177
+ break
178
+ else:
179
+ if metric in _funcs:
180
+ continue
181
+ else:
182
+ raise NotImplementedError(
183
+ "Metric {} not implemented.".format(metric))
184
+
185
+ # get results
186
+ self.result = OrderedDict()
187
+
188
+ eval_metrics = self.metrics
189
+ if advanced:
190
+ eval_metrics += self.advanced_metrics
191
+
192
+ if isinstance(self.labels, dict):
193
+
194
+ for label, name in self.labels.items():
195
+ k = str(name)
196
+ self.result[k] = OrderedDict()
197
+ if not hasattr(label, "__iter__"):
198
+ self.confusion_matrix.set_test(self.test == label)
199
+ self.confusion_matrix.set_reference(self.reference == label)
200
+ else:
201
+ current_test = 0
202
+ current_reference = 0
203
+ for l in label:
204
+ current_test += (self.test == l)
205
+ current_reference += (self.reference == l)
206
+ self.confusion_matrix.set_test(current_test)
207
+ self.confusion_matrix.set_reference(current_reference)
208
+ for metric in eval_metrics:
209
+ self.result[k][metric] = _funcs[metric](confusion_matrix=self.confusion_matrix,
210
+ nan_for_nonexisting=self.nan_for_nonexisting,
211
+ **metric_kwargs)
212
+
213
+ else:
214
+
215
+ for i, l in enumerate(self.labels):
216
+ k = str(l)
217
+ self.result[k] = OrderedDict()
218
+ if isinstance(l, list):
219
+ self.confusion_matrix.set_test(np.isin(self.test, l))
220
+ self.confusion_matrix.set_reference(np.isin(self.reference, l))
221
+ else:
222
+ self.confusion_matrix.set_test(self.test == l)
223
+ self.confusion_matrix.set_reference(self.reference == l)
224
+
225
+ for metric in eval_metrics:
226
+ self.result[k][metric] = _funcs[metric](confusion_matrix=self.confusion_matrix,
227
+ nan_for_nonexisting=self.nan_for_nonexisting,
228
+ **metric_kwargs)
229
+
230
+ return self.result
231
+
232
+ def to_dict(self):
233
+
234
+ if self.result is None:
235
+ self.evaluate()
236
+ return self.result
237
+
238
+ def to_array(self):
239
+ """Return result as numpy array (labels x metrics)."""
240
+
241
+ if self.result is None:
242
+ self.evaluate
243
+
244
+ result_metrics = sorted(self.result[list(self.result.keys())[0]].keys())
245
+
246
+ a = np.zeros((len(self.labels), len(result_metrics)), dtype=np.float32)
247
+
248
+ if isinstance(self.labels, dict):
249
+ for i, label in enumerate(self.labels.keys()):
250
+ for j, metric in enumerate(result_metrics):
251
+ a[i][j] = self.result[self.labels[label]][metric]
252
+ else:
253
+ for i, label in enumerate(self.labels):
254
+ for j, metric in enumerate(result_metrics):
255
+ a[i][j] = self.result[label][metric]
256
+
257
+ return a
258
+
259
+ def to_pandas(self):
260
+ """Return result as pandas DataFrame."""
261
+
262
+ a = self.to_array()
263
+
264
+ if isinstance(self.labels, dict):
265
+ labels = list(self.labels.values())
266
+ else:
267
+ labels = self.labels
268
+
269
+ result_metrics = sorted(self.result[list(self.result.keys())[0]].keys())
270
+
271
+ return pd.DataFrame(a, index=labels, columns=result_metrics)
272
+
273
+
274
+ class NiftiEvaluator(Evaluator):
275
+
276
+ def __init__(self, *args, **kwargs):
277
+
278
+ self.test_nifti = None
279
+ self.reference_nifti = None
280
+ super(NiftiEvaluator, self).__init__(*args, **kwargs)
281
+
282
+ def set_test(self, test):
283
+ """Set the test segmentation."""
284
+
285
+ if test is not None:
286
+ self.test_nifti = sitk.ReadImage(test)
287
+ super(NiftiEvaluator, self).set_test(sitk.GetArrayFromImage(self.test_nifti))
288
+ else:
289
+ self.test_nifti = None
290
+ super(NiftiEvaluator, self).set_test(test)
291
+
292
+ def set_reference(self, reference):
293
+ """Set the reference segmentation."""
294
+
295
+ if reference is not None:
296
+ self.reference_nifti = sitk.ReadImage(reference)
297
+ super(NiftiEvaluator, self).set_reference(sitk.GetArrayFromImage(self.reference_nifti))
298
+ else:
299
+ self.reference_nifti = None
300
+ super(NiftiEvaluator, self).set_reference(reference)
301
+
302
+ def evaluate(self, test=None, reference=None, voxel_spacing=None, **metric_kwargs):
303
+
304
+ if voxel_spacing is None:
305
+ voxel_spacing = np.array(self.test_nifti.GetSpacing())[::-1]
306
+ metric_kwargs["voxel_spacing"] = voxel_spacing
307
+
308
+ return super(NiftiEvaluator, self).evaluate(test, reference, **metric_kwargs)
309
+
310
+
311
+ def run_evaluation(args):
312
+ test, ref, evaluator, metric_kwargs = args
313
+ # evaluate
314
+ evaluator.set_test(test)
315
+ evaluator.set_reference(ref)
316
+ if evaluator.labels is None:
317
+ evaluator.construct_labels()
318
+ current_scores = evaluator.evaluate(**metric_kwargs)
319
+ if type(test) == str:
320
+ current_scores["test"] = test
321
+ if type(ref) == str:
322
+ current_scores["reference"] = ref
323
+ return current_scores
324
+
325
+
326
+ def aggregate_scores(test_ref_pairs,
327
+ evaluator=NiftiEvaluator,
328
+ labels=None,
329
+ nanmean=True,
330
+ json_output_file=None,
331
+ json_name="",
332
+ json_description="",
333
+ json_author="Fabian",
334
+ json_task="",
335
+ num_threads=2,
336
+ do_mean=True,
337
+ get_tumor_burden=True,
338
+ **metric_kwargs):
339
+ """
340
+ test = predicted image
341
+ :param test_ref_pairs:
342
+ :param evaluator:
343
+ :param labels: must be a dict of int-> str or a list of int
344
+ :param nanmean:
345
+ :param json_output_file:
346
+ :param json_name:
347
+ :param json_description:
348
+ :param json_author:
349
+ :param json_task:
350
+ :param metric_kwargs:
351
+ :return:
352
+ """
353
+
354
+ if type(evaluator) == type:
355
+ evaluator = evaluator()
356
+
357
+ if labels is not None:
358
+ evaluator.set_labels(labels)
359
+
360
+ all_scores = OrderedDict()
361
+ all_scores["all"] = []
362
+ all_scores["mean"] = OrderedDict()
363
+
364
+ test = [i[0] for i in test_ref_pairs]
365
+ ref = [i[1] for i in test_ref_pairs]
366
+ p = Pool(num_threads)
367
+ all_res = p.map(run_evaluation, zip(test, ref, [evaluator]*len(ref), [metric_kwargs]*len(ref)))
368
+
369
+ p.close()
370
+ p.join()
371
+
372
+ if get_tumor_burden:
373
+ tumor_burden_segmentation_list = []
374
+ tumor_burden_reference_list = []
375
+ for i in range(len(all_res)):
376
+ liver_label_volume = all_res[i]["[1, 2]"]['Total Positives Reference']
377
+ liver_seg_volume = all_res[i]["[1, 2]"]['Total Positives Test']
378
+
379
+ tumor_label_volume = all_res[i]["2"]['Total Positives Reference']
380
+ tumor_seg_volume = all_res[i]["2"]['Total Positives Test']
381
+
382
+ tumor_burden_reference = tumor_label_volume / liver_label_volume
383
+ tumor_burden_segmentation = tumor_seg_volume / liver_seg_volume
384
+
385
+ tumor_burden_reference_list.append(tumor_burden_reference)
386
+ tumor_burden_segmentation_list.append(tumor_burden_segmentation)
387
+
388
+ all_res[i]["2"]["Tumor Burden Reference"] = tumor_burden_reference
389
+ all_res[i]["2"]["Tumor Burden Test"] = tumor_burden_segmentation
390
+ rmse_on_tumor_burden = np.nanmean(np.power(np.subtract(tumor_burden_reference_list, tumor_burden_segmentation_list),2))
391
+
392
+ for i in range(len(all_res)):
393
+ all_scores["all"].append(all_res[i])
394
+
395
+ # append score list for mean
396
+ for label, score_dict in all_res[i].items():
397
+ if label in ("test", "reference"):
398
+ continue
399
+ if label not in all_scores["mean"]:
400
+ all_scores["mean"][label] = OrderedDict()
401
+ for score, value in score_dict.items():
402
+ if score not in all_scores["mean"][label]:
403
+ all_scores["mean"][label][score] = []
404
+ all_scores["mean"][label][score].append(value)
405
+ if do_mean:
406
+ for label in all_scores["mean"]:
407
+ for score in all_scores["mean"][label]:
408
+ if nanmean:
409
+ all_scores["mean"][label][score] = float(np.nanmean(all_scores["mean"][label][score]))
410
+ else:
411
+ all_scores["mean"][label][score] = float(np.mean(all_scores["mean"][label][score]))
412
+ if get_tumor_burden:
413
+ all_scores["mean"]["RMSE on Tumor Burden"] = rmse_on_tumor_burden
414
+
415
+ # save to file if desired
416
+ # we create a hopefully unique id by hashing the entire output dictionary
417
+ if json_output_file is not None:
418
+ json_dict = OrderedDict()
419
+ json_dict["name"] = json_name
420
+ json_dict["description"] = json_description
421
+ timestamp = datetime.today()
422
+ json_dict["timestamp"] = str(timestamp)
423
+ json_dict["task"] = json_task
424
+ json_dict["author"] = json_author
425
+ json_dict["results"] = all_scores
426
+ json_dict["id"] = hashlib.md5(json.dumps(json_dict).encode("utf-8")).hexdigest()[:12]
427
+ save_json(json_dict, json_output_file)
428
+
429
+
430
+ return all_scores
ATLAS/atlas-train-dataset-1.0.1/metric_calculation/metrics.py ADDED
@@ -0,0 +1,442 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import numpy as np
16
+ from medpy import metric
17
+
18
+
19
+ def assert_shape(test, reference):
20
+
21
+ assert test.shape == reference.shape, "Shape mismatch: {} and {}".format(
22
+ test.shape, reference.shape)
23
+
24
+
25
+ class ConfusionMatrix:
26
+
27
+ def __init__(self, test=None, reference=None):
28
+
29
+ self.tp = None
30
+ self.fp = None
31
+ self.tn = None
32
+ self.fn = None
33
+ self.size = None
34
+ self.reference_empty = None
35
+ self.reference_full = None
36
+ self.test_empty = None
37
+ self.test_full = None
38
+ self.set_reference(reference)
39
+ self.set_test(test)
40
+
41
+ def set_test(self, test):
42
+
43
+ self.test = test
44
+ self.reset()
45
+
46
+ def set_reference(self, reference):
47
+
48
+ self.reference = reference
49
+ self.reset()
50
+
51
+ def reset(self):
52
+
53
+ self.tp = None
54
+ self.fp = None
55
+ self.tn = None
56
+ self.fn = None
57
+ self.size = None
58
+ self.test_empty = None
59
+ self.test_full = None
60
+ self.reference_empty = None
61
+ self.reference_full = None
62
+
63
+ def compute(self):
64
+
65
+ if self.test is None or self.reference is None:
66
+ raise ValueError("'test' and 'reference' must both be set to compute confusion matrix.")
67
+
68
+ assert_shape(self.test, self.reference)
69
+
70
+ self.tp = int(((self.test != 0) * (self.reference != 0)).sum())
71
+ self.fp = int(((self.test != 0) * (self.reference == 0)).sum())
72
+ self.tn = int(((self.test == 0) * (self.reference == 0)).sum())
73
+ self.fn = int(((self.test == 0) * (self.reference != 0)).sum())
74
+ self.size = int(np.prod(self.reference.shape, dtype=np.int64))
75
+ self.test_empty = not np.any(self.test)
76
+ self.test_full = np.all(self.test)
77
+ self.reference_empty = not np.any(self.reference)
78
+ self.reference_full = np.all(self.reference)
79
+
80
+ def get_matrix(self):
81
+
82
+ for entry in (self.tp, self.fp, self.tn, self.fn):
83
+ if entry is None:
84
+ self.compute()
85
+ break
86
+
87
+ return self.tp, self.fp, self.tn, self.fn
88
+
89
+ def get_size(self):
90
+
91
+ if self.size is None:
92
+ self.compute()
93
+ return self.size
94
+
95
+ def get_existence(self):
96
+
97
+ for case in (self.test_empty, self.test_full, self.reference_empty, self.reference_full):
98
+ if case is None:
99
+ self.compute()
100
+ break
101
+
102
+ return self.test_empty, self.test_full, self.reference_empty, self.reference_full
103
+
104
+
105
+ def dice(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
106
+ """2TP / (2TP + FP + FN)"""
107
+
108
+ if confusion_matrix is None:
109
+ confusion_matrix = ConfusionMatrix(test, reference)
110
+
111
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
112
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
113
+
114
+ if test_empty and reference_empty:
115
+ if nan_for_nonexisting:
116
+ return float("NaN")
117
+ else:
118
+ return 0.
119
+
120
+ return float(2. * tp / (2 * tp + fp + fn))
121
+
122
+
123
+ def jaccard(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
124
+ """TP / (TP + FP + FN)"""
125
+
126
+ if confusion_matrix is None:
127
+ confusion_matrix = ConfusionMatrix(test, reference)
128
+
129
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
130
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
131
+
132
+ if test_empty and reference_empty:
133
+ if nan_for_nonexisting:
134
+ return float("NaN")
135
+ else:
136
+ return 0.
137
+
138
+ return float(tp / (tp + fp + fn))
139
+
140
+
141
+ def precision(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
142
+ """TP / (TP + FP)"""
143
+
144
+ if confusion_matrix is None:
145
+ confusion_matrix = ConfusionMatrix(test, reference)
146
+
147
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
148
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
149
+
150
+ if test_empty or tp == fp == 0:
151
+ if nan_for_nonexisting:
152
+ return float("NaN")
153
+ else:
154
+ return 0.
155
+
156
+ return float(tp / (tp + fp))
157
+
158
+
159
+ def sensitivity(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
160
+ """TP / (TP + FN)"""
161
+
162
+ if confusion_matrix is None:
163
+ confusion_matrix = ConfusionMatrix(test, reference)
164
+
165
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
166
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
167
+
168
+ if reference_empty:
169
+ if nan_for_nonexisting:
170
+ return float("NaN")
171
+ else:
172
+ return 0.
173
+
174
+ return float(tp / (tp + fn))
175
+
176
+
177
+ def recall(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
178
+ """TP / (TP + FN)"""
179
+
180
+ return sensitivity(test, reference, confusion_matrix, nan_for_nonexisting, **kwargs)
181
+
182
+
183
+ def specificity(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
184
+ """TN / (TN + FP)"""
185
+
186
+ if confusion_matrix is None:
187
+ confusion_matrix = ConfusionMatrix(test, reference)
188
+
189
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
190
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
191
+
192
+ if reference_full:
193
+ if nan_for_nonexisting:
194
+ return float("NaN")
195
+ else:
196
+ return 0.
197
+
198
+ return float(tn / (tn + fp))
199
+
200
+
201
+ def accuracy(test=None, reference=None, confusion_matrix=None, **kwargs):
202
+ """(TP + TN) / (TP + FP + FN + TN)"""
203
+
204
+ if confusion_matrix is None:
205
+ confusion_matrix = ConfusionMatrix(test, reference)
206
+
207
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
208
+
209
+ return float((tp + tn) / (tp + fp + tn + fn))
210
+
211
+
212
+ def fscore(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, beta=1., **kwargs):
213
+ """(1 + b^2) * TP / ((1 + b^2) * TP + b^2 * FN + FP)"""
214
+
215
+ precision_ = precision(test, reference, confusion_matrix, nan_for_nonexisting)
216
+ recall_ = recall(test, reference, confusion_matrix, nan_for_nonexisting)
217
+
218
+ return (1 + beta*beta) * precision_ * recall_ /\
219
+ ((beta*beta * precision_) + recall_)
220
+
221
+
222
+ def false_positive_rate(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
223
+ """FP / (FP + TN)"""
224
+
225
+ return 1 - specificity(test, reference, confusion_matrix, nan_for_nonexisting)
226
+
227
+
228
+ def false_omission_rate(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
229
+ """FN / (TN + FN)"""
230
+
231
+ if confusion_matrix is None:
232
+ confusion_matrix = ConfusionMatrix(test, reference)
233
+
234
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
235
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
236
+
237
+ if test_full:
238
+ if nan_for_nonexisting:
239
+ return float("NaN")
240
+ else:
241
+ return 0.
242
+
243
+ return float(fn / (fn + tn))
244
+
245
+
246
+ def false_negative_rate(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
247
+ """FN / (TP + FN)"""
248
+
249
+ return 1 - sensitivity(test, reference, confusion_matrix, nan_for_nonexisting)
250
+
251
+
252
+ def true_negative_rate(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
253
+ """TN / (TN + FP)"""
254
+
255
+ return specificity(test, reference, confusion_matrix, nan_for_nonexisting)
256
+
257
+
258
+ def false_discovery_rate(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
259
+ """FP / (TP + FP)"""
260
+
261
+ return 1 - precision(test, reference, confusion_matrix, nan_for_nonexisting)
262
+
263
+
264
+ def negative_predictive_value(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, **kwargs):
265
+ """TN / (TN + FN)"""
266
+
267
+ return 1 - false_omission_rate(test, reference, confusion_matrix, nan_for_nonexisting)
268
+
269
+
270
+ def total_positives_test(test=None, reference=None, confusion_matrix=None, **kwargs):
271
+ """TP + FP"""
272
+
273
+ if confusion_matrix is None:
274
+ confusion_matrix = ConfusionMatrix(test, reference)
275
+
276
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
277
+
278
+ return tp + fp
279
+
280
+
281
+ def total_negatives_test(test=None, reference=None, confusion_matrix=None, **kwargs):
282
+ """TN + FN"""
283
+
284
+ if confusion_matrix is None:
285
+ confusion_matrix = ConfusionMatrix(test, reference)
286
+
287
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
288
+
289
+ return tn + fn
290
+
291
+
292
+ def total_positives_reference(test=None, reference=None, confusion_matrix=None, **kwargs):
293
+ """TP + FN"""
294
+
295
+ if confusion_matrix is None:
296
+ confusion_matrix = ConfusionMatrix(test, reference)
297
+
298
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
299
+
300
+ return tp + fn
301
+
302
+
303
+ def total_negatives_reference(test=None, reference=None, confusion_matrix=None, **kwargs):
304
+ """TN + FP"""
305
+
306
+ if confusion_matrix is None:
307
+ confusion_matrix = ConfusionMatrix(test, reference)
308
+
309
+ tp, fp, tn, fn = confusion_matrix.get_matrix()
310
+
311
+ return tn + fp
312
+
313
+ def hausdorff_distance(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, voxel_spacing=None, connectivity=1, **kwargs):
314
+
315
+ if confusion_matrix is None:
316
+ confusion_matrix = ConfusionMatrix(test, reference)
317
+
318
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
319
+
320
+ if test_empty or test_full or reference_empty or reference_full:
321
+
322
+
323
+ if nan_for_nonexisting:
324
+ return float("NaN")
325
+ else:
326
+ confusion_matrix.test = convert_corner_to_true(confusion_matrix.test)
327
+
328
+ test, reference = confusion_matrix.test, confusion_matrix.reference
329
+
330
+ return metric.hd(test, reference, voxel_spacing, connectivity)
331
+
332
+ def convert_corner_to_true(segmentation):
333
+ a, b, c = segmentation.shape
334
+ segmentation[0, 0, 0] = True
335
+ segmentation[a - 1, 0, 0] = True
336
+ segmentation[0, b - 1, 0] = True
337
+ segmentation[0, 0, c - 1] = True
338
+
339
+ segmentation[a - 1, b - 1, 0] = True
340
+ segmentation[a - 1, b - 1, c - 1] = True
341
+ segmentation[0, b - 1, c - 1] = True
342
+ segmentation[a - 1, 0, c - 1] = True
343
+ return segmentation
344
+
345
+
346
+ def hausdorff_distance_95(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, voxel_spacing=None, connectivity=1, **kwargs):
347
+
348
+ if confusion_matrix is None:
349
+ confusion_matrix = ConfusionMatrix(test, reference)
350
+
351
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
352
+
353
+ if test_empty or test_full or reference_empty or reference_full:
354
+ if nan_for_nonexisting:
355
+ return float("NaN")
356
+ else:
357
+ return hausdorff_distance(test=test, reference=reference, confusion_matrix=confusion_matrix, nan_for_nonexisting=nan_for_nonexisting, voxel_spacing=voxel_spacing, connectivity=connectivity)
358
+
359
+ test, reference = confusion_matrix.test, confusion_matrix.reference
360
+
361
+ return metric.hd95(test, reference, voxel_spacing, connectivity)
362
+
363
+
364
+ def avg_surface_distance(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, voxel_spacing=None, connectivity=1, **kwargs):
365
+
366
+ if confusion_matrix is None:
367
+ confusion_matrix = ConfusionMatrix(test, reference)
368
+
369
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
370
+
371
+ if test_empty or test_full or reference_empty or reference_full:
372
+ if nan_for_nonexisting:
373
+ return float("NaN")
374
+ else:
375
+ return hausdorff_distance(test=test, reference=reference, confusion_matrix=confusion_matrix, nan_for_nonexisting=nan_for_nonexisting, voxel_spacing=voxel_spacing, connectivity=connectivity)
376
+
377
+ test, reference = confusion_matrix.test, confusion_matrix.reference
378
+
379
+ return metric.asd(test, reference, voxel_spacing, connectivity)
380
+
381
+ def surface_dice(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=False, voxel_spacing=None, connectivity=1, distance=5, **kwargs):
382
+
383
+ if confusion_matrix is None:
384
+ confusion_matrix = ConfusionMatrix(test, reference)
385
+
386
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
387
+
388
+ if test_empty or test_full or reference_empty or reference_full:
389
+ if nan_for_nonexisting:
390
+ return float("NaN")
391
+ else:
392
+ return 0
393
+
394
+ test, reference = confusion_matrix.test, confusion_matrix.reference
395
+ sds_gt = metric.binary.__surface_distances(test, reference, voxel_spacing, connectivity)
396
+ sds_pred = metric.binary.__surface_distances(reference, test, voxel_spacing, connectivity)
397
+ overlap_gt = sum(i < distance for i in sds_gt)
398
+ overlap_pred = sum(i < distance for i in sds_pred)
399
+
400
+ return (overlap_gt + overlap_pred) / (len(sds_gt) + len(sds_pred))
401
+
402
+
403
+ def avg_surface_distance_symmetric(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, voxel_spacing=None, connectivity=1, **kwargs):
404
+
405
+ if confusion_matrix is None:
406
+ confusion_matrix = ConfusionMatrix(test, reference)
407
+
408
+ test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence()
409
+
410
+ if test_empty or test_full or reference_empty or reference_full:
411
+ if nan_for_nonexisting:
412
+ return float("NaN")
413
+ else:
414
+ return hausdorff_distance(test=test, reference=reference, confusion_matrix=confusion_matrix, nan_for_nonexisting=nan_for_nonexisting, voxel_spacing=voxel_spacing, connectivity=connectivity)
415
+
416
+ test, reference = confusion_matrix.test, confusion_matrix.reference
417
+
418
+ return metric.assd(test, reference, voxel_spacing, connectivity)
419
+
420
+
421
+ ALL_METRICS = {
422
+ "False Positive Rate": false_positive_rate,
423
+ "Dice": dice,
424
+ "Jaccard": jaccard,
425
+ "Hausdorff Distance": hausdorff_distance,
426
+ "Hausdorff Distance 95": hausdorff_distance_95,
427
+ "Precision": precision,
428
+ "Recall": recall,
429
+ "Avg. Symmetric Surface Distance": avg_surface_distance_symmetric,
430
+ "Avg. Surface Distance": avg_surface_distance,
431
+ "Accuracy": accuracy,
432
+ "False Omission Rate": false_omission_rate,
433
+ "Negative Predictive Value": negative_predictive_value,
434
+ "False Negative Rate": false_negative_rate,
435
+ "True Negative Rate": true_negative_rate,
436
+ "False Discovery Rate": false_discovery_rate,
437
+ "Total Positives Test": total_positives_test,
438
+ "Total Negatives Test": total_negatives_test,
439
+ "Total Positives Reference": total_positives_reference,
440
+ "total Negatives Reference": total_negatives_reference,
441
+ "Surface Dice": surface_dice
442
+ }
ATLAS/atlas-train-dataset-1.0.1/metric_calculation/requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ batchgenerators==0.24
2
+ MedPy==0.4.0
3
+ numpy==1.21.5
4
+ pandas==1.5.3
5
+ SimpleITK==2.2.1
ATLAS/atlas-train-dataset-1.0.1/train/dataset.json ADDED
@@ -0,0 +1,265 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "description": "ATLAS challenge train dataset",
3
+ "labels": {
4
+ "0": "background",
5
+ "1": "liver",
6
+ "2": "tumour"
7
+ },
8
+ "licence": "CC BY-NC-SA 4.0",
9
+ "modality": {
10
+ "0": "T1w"
11
+ },
12
+ "name": "cgfl",
13
+ "numTest": 0,
14
+ "numTraining": 60,
15
+ "reference": "Vanderbilt University",
16
+ "release": "14/04/2023",
17
+ "tensorImageSize": "3D",
18
+ "validation": [
19
+ ],
20
+ "training": [
21
+ {
22
+ "image": "imagesTr/im0.nii.gz",
23
+ "label": "labelsTr/lb0.nii.gz"
24
+ },
25
+ {
26
+ "image": "imagesTr/im1.nii.gz",
27
+ "label": "labelsTr/lb1.nii.gz"
28
+ },
29
+ {
30
+ "image": "imagesTr/im2.nii.gz",
31
+ "label": "labelsTr/lb2.nii.gz"
32
+ },
33
+ {
34
+ "image": "imagesTr/im3.nii.gz",
35
+ "label": "labelsTr/lb3.nii.gz"
36
+ },
37
+ {
38
+ "image": "imagesTr/im4.nii.gz",
39
+ "label": "labelsTr/lb4.nii.gz"
40
+ },
41
+ {
42
+ "image": "imagesTr/im5.nii.gz",
43
+ "label": "labelsTr/lb5.nii.gz"
44
+ },
45
+ {
46
+ "image": "imagesTr/im6.nii.gz",
47
+ "label": "labelsTr/lb6.nii.gz"
48
+ },
49
+ {
50
+ "image": "imagesTr/im7.nii.gz",
51
+ "label": "labelsTr/lb7.nii.gz"
52
+ },
53
+ {
54
+ "image": "imagesTr/im8.nii.gz",
55
+ "label": "labelsTr/lb8.nii.gz"
56
+ },
57
+ {
58
+ "image": "imagesTr/im9.nii.gz",
59
+ "label": "labelsTr/lb9.nii.gz"
60
+ },
61
+ {
62
+ "image": "imagesTr/im10.nii.gz",
63
+ "label": "labelsTr/lb10.nii.gz"
64
+ },
65
+ {
66
+ "image": "imagesTr/im11.nii.gz",
67
+ "label": "labelsTr/lb11.nii.gz"
68
+ },
69
+ {
70
+ "image": "imagesTr/im12.nii.gz",
71
+ "label": "labelsTr/lb12.nii.gz"
72
+ },
73
+ {
74
+ "image": "imagesTr/im13.nii.gz",
75
+ "label": "labelsTr/lb13.nii.gz"
76
+ },
77
+ {
78
+ "image": "imagesTr/im14.nii.gz",
79
+ "label": "labelsTr/lb14.nii.gz"
80
+ },
81
+ {
82
+ "image": "imagesTr/im15.nii.gz",
83
+ "label": "labelsTr/lb15.nii.gz"
84
+ },
85
+ {
86
+ "image": "imagesTr/im16.nii.gz",
87
+ "label": "labelsTr/lb16.nii.gz"
88
+ },
89
+ {
90
+ "image": "imagesTr/im17.nii.gz",
91
+ "label": "labelsTr/lb17.nii.gz"
92
+ },
93
+ {
94
+ "image": "imagesTr/im18.nii.gz",
95
+ "label": "labelsTr/lb18.nii.gz"
96
+ },
97
+ {
98
+ "image": "imagesTr/im19.nii.gz",
99
+ "label": "labelsTr/lb19.nii.gz"
100
+ },
101
+ {
102
+ "image": "imagesTr/im20.nii.gz",
103
+ "label": "labelsTr/lb20.nii.gz"
104
+ },
105
+ {
106
+ "image": "imagesTr/im21.nii.gz",
107
+ "label": "labelsTr/lb21.nii.gz"
108
+ },
109
+ {
110
+ "image": "imagesTr/im22.nii.gz",
111
+ "label": "labelsTr/lb22.nii.gz"
112
+ },
113
+ {
114
+ "image": "imagesTr/im23.nii.gz",
115
+ "label": "labelsTr/lb23.nii.gz"
116
+ },
117
+ {
118
+ "image": "imagesTr/im24.nii.gz",
119
+ "label": "labelsTr/lb24.nii.gz"
120
+ },
121
+ {
122
+ "image": "imagesTr/im25.nii.gz",
123
+ "label": "labelsTr/lb25.nii.gz"
124
+ },
125
+ {
126
+ "image": "imagesTr/im26.nii.gz",
127
+ "label": "labelsTr/lb26.nii.gz"
128
+ },
129
+ {
130
+ "image": "imagesTr/im27.nii.gz",
131
+ "label": "labelsTr/lb27.nii.gz"
132
+ },
133
+ {
134
+ "image": "imagesTr/im28.nii.gz",
135
+ "label": "labelsTr/lb28.nii.gz"
136
+ },
137
+ {
138
+ "image": "imagesTr/im29.nii.gz",
139
+ "label": "labelsTr/lb29.nii.gz"
140
+ },
141
+ {
142
+ "image": "imagesTr/im30.nii.gz",
143
+ "label": "labelsTr/lb30.nii.gz"
144
+ },
145
+ {
146
+ "image": "imagesTr/im31.nii.gz",
147
+ "label": "labelsTr/lb31.nii.gz"
148
+ },
149
+ {
150
+ "image": "imagesTr/im32.nii.gz",
151
+ "label": "labelsTr/lb32.nii.gz"
152
+ },
153
+ {
154
+ "image": "imagesTr/im33.nii.gz",
155
+ "label": "labelsTr/lb33.nii.gz"
156
+ },
157
+ {
158
+ "image": "imagesTr/im34.nii.gz",
159
+ "label": "labelsTr/lb34.nii.gz"
160
+ },
161
+ {
162
+ "image": "imagesTr/im35.nii.gz",
163
+ "label": "labelsTr/lb35.nii.gz"
164
+ },
165
+ {
166
+ "image": "imagesTr/im36.nii.gz",
167
+ "label": "labelsTr/lb36.nii.gz"
168
+ },
169
+ {
170
+ "image": "imagesTr/im37.nii.gz",
171
+ "label": "labelsTr/lb37.nii.gz"
172
+ },
173
+ {
174
+ "image": "imagesTr/im38.nii.gz",
175
+ "label": "labelsTr/lb38.nii.gz"
176
+ },
177
+ {
178
+ "image": "imagesTr/im39.nii.gz",
179
+ "label": "labelsTr/lb39.nii.gz"
180
+ },
181
+ {
182
+ "image": "imagesTr/im40.nii.gz",
183
+ "label": "labelsTr/lb40.nii.gz"
184
+ },
185
+ {
186
+ "image": "imagesTr/im41.nii.gz",
187
+ "label": "labelsTr/lb41.nii.gz"
188
+ },
189
+ {
190
+ "image": "imagesTr/im42.nii.gz",
191
+ "label": "labelsTr/lb42.nii.gz"
192
+ },
193
+ {
194
+ "image": "imagesTr/im43.nii.gz",
195
+ "label": "labelsTr/lb43.nii.gz"
196
+ },
197
+ {
198
+ "image": "imagesTr/im44.nii.gz",
199
+ "label": "labelsTr/lb44.nii.gz"
200
+ },
201
+ {
202
+ "image": "imagesTr/im45.nii.gz",
203
+ "label": "labelsTr/lb45.nii.gz"
204
+ },
205
+ {
206
+ "image": "imagesTr/im46.nii.gz",
207
+ "label": "labelsTr/lb46.nii.gz"
208
+ },
209
+ {
210
+ "image": "imagesTr/im47.nii.gz",
211
+ "label": "labelsTr/lb47.nii.gz"
212
+ },
213
+ {
214
+ "image": "imagesTr/im48.nii.gz",
215
+ "label": "labelsTr/lb48.nii.gz"
216
+ },
217
+ {
218
+ "image": "imagesTr/im49.nii.gz",
219
+ "label": "labelsTr/lb49.nii.gz"
220
+ },
221
+ {
222
+ "image": "imagesTr/im50.nii.gz",
223
+ "label": "labelsTr/lb50.nii.gz"
224
+ },
225
+ {
226
+ "image": "imagesTr/im51.nii.gz",
227
+ "label": "labelsTr/lb51.nii.gz"
228
+ },
229
+ {
230
+ "image": "imagesTr/im52.nii.gz",
231
+ "label": "labelsTr/lb52.nii.gz"
232
+ },
233
+ {
234
+ "image": "imagesTr/im53.nii.gz",
235
+ "label": "labelsTr/lb53.nii.gz"
236
+ },
237
+ {
238
+ "image": "imagesTr/im54.nii.gz",
239
+ "label": "labelsTr/lb54.nii.gz"
240
+ },
241
+ {
242
+ "image": "imagesTr/im55.nii.gz",
243
+ "label": "labelsTr/lb55.nii.gz"
244
+ },
245
+ {
246
+ "image": "imagesTr/im56.nii.gz",
247
+ "label": "labelsTr/lb56.nii.gz"
248
+ },
249
+ {
250
+ "image": "imagesTr/im57.nii.gz",
251
+ "label": "labelsTr/lb57.nii.gz"
252
+ },
253
+ {
254
+ "image": "imagesTr/im58.nii.gz",
255
+ "label": "labelsTr/lb58.nii.gz"
256
+ },
257
+ {
258
+ "image": "imagesTr/im59.nii.gz",
259
+ "label": "labelsTr/lb59.nii.gz"
260
+ }
261
+ ]
262
+ }
263
+
264
+
265
+
ATLAS/atlas-train-dataset-1.0.1/train/patient_info_train.json ADDED
@@ -0,0 +1,662 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "0": {
3
+ "machine": "TrioTim",
4
+ "date": "2014",
5
+ "sequence": "VIBE",
6
+ "spacing": {
7
+ "x": 0.781,
8
+ "y": 0.781,
9
+ "z": 2.3
10
+ },
11
+ "contrast_phase": "arterial"
12
+ },
13
+ "1": {
14
+ "machine": "Skyra",
15
+ "date": "2017",
16
+ "sequence": "VIBE TWIST",
17
+ "spacing": {
18
+ "x": 0.938,
19
+ "y": 0.938,
20
+ "z": 2.5
21
+ },
22
+ "contrast_phase": "arterial"
23
+ },
24
+ "2": {
25
+ "machine": "Skyra",
26
+ "date": "2017",
27
+ "sequence": "VIBE",
28
+ "spacing": {
29
+ "x": 1.042,
30
+ "y": 1.042,
31
+ "z": 3.0
32
+ },
33
+ "contrast_phase": "delayed"
34
+ },
35
+ "3": {
36
+ "machine": "Aera",
37
+ "date": "2012",
38
+ "sequence": "VIBE",
39
+ "spacing": {
40
+ "x": 0.879,
41
+ "y": 0.879,
42
+ "z": 4.0
43
+ },
44
+ "contrast_phase": "arterial"
45
+ },
46
+ "4": {
47
+ "machine": "TrioTim",
48
+ "date": "2012",
49
+ "sequence": "VIBE",
50
+ "spacing": {
51
+ "x": 0.742,
52
+ "y": 0.742,
53
+ "z": 2.0
54
+ },
55
+ "contrast_phase": "arterial"
56
+ },
57
+ "5": {
58
+ "machine": "Aera",
59
+ "date": "2019",
60
+ "sequence": "VIBE CAIPIRINHA",
61
+ "spacing": {
62
+ "x": 1.25,
63
+ "y": 1.25,
64
+ "z": 3.0
65
+ },
66
+ "contrast_phase": "arterial"
67
+ },
68
+ "6": {
69
+ "machine": "TrioTim",
70
+ "date": "2013",
71
+ "sequence": "VIBE",
72
+ "spacing": {
73
+ "x": 0.879,
74
+ "y": 0.879,
75
+ "z": 2.5
76
+ },
77
+ "contrast_phase": "arterial"
78
+ },
79
+ "7": {
80
+ "machine": "Skyra",
81
+ "date": "2017",
82
+ "sequence": "VIBE",
83
+ "spacing": {
84
+ "x": 1.136,
85
+ "y": 1.136,
86
+ "z": 3.0
87
+ },
88
+ "contrast_phase": "arterial"
89
+ },
90
+ "8": {
91
+ "machine": "MAGNETOM Sola",
92
+ "date": "2019",
93
+ "sequence": "VIBE CAIPIRINHA",
94
+ "spacing": {
95
+ "x": 1.25,
96
+ "y": 1.25,
97
+ "z": 3.0
98
+ },
99
+ "contrast_phase": "delayed"
100
+ },
101
+ "9": {
102
+ "machine": "Skyra",
103
+ "date": "2020",
104
+ "sequence": "VIBE",
105
+ "spacing": {
106
+ "x": 1.042,
107
+ "y": 1.042,
108
+ "z": 3.0
109
+ },
110
+ "contrast_phase": "portal"
111
+ },
112
+ "10": {
113
+ "machine": "TrioTim",
114
+ "date": "2014",
115
+ "sequence": "VIBE",
116
+ "spacing": {
117
+ "x": 0.82,
118
+ "y": 0.82,
119
+ "z": 2.5
120
+ },
121
+ "contrast_phase": "unkown"
122
+ },
123
+ "11": {
124
+ "machine": "Aera",
125
+ "date": "2019",
126
+ "sequence": "VIBE CAIPIRINHA",
127
+ "spacing": {
128
+ "x": 1.25,
129
+ "y": 1.25,
130
+ "z": 3.0
131
+ },
132
+ "contrast_phase": "arterial"
133
+ },
134
+ "12": {
135
+ "machine": "Signa HDxt",
136
+ "date": "2017",
137
+ "sequence": "LAVA FLEX",
138
+ "spacing": {
139
+ "x": 0.781,
140
+ "y": 0.781,
141
+ "z": 4.6
142
+ },
143
+ "contrast_phase": "unkown"
144
+ },
145
+ "13": {
146
+ "machine": "MAGNETOM Sola",
147
+ "date": "2020",
148
+ "sequence": "VIBE CAIPIRINHA",
149
+ "spacing": {
150
+ "x": 1.188,
151
+ "y": 1.188,
152
+ "z": 3.0
153
+ },
154
+ "contrast_phase": "portal"
155
+ },
156
+ "14": {
157
+ "machine": "Aera",
158
+ "date": "2017",
159
+ "sequence": "VIBE CAIPIRINHA",
160
+ "spacing": {
161
+ "x": 1.312,
162
+ "y": 1.312,
163
+ "z": 3.0
164
+ },
165
+ "contrast_phase": "arterial"
166
+ },
167
+ "15": {
168
+ "machine": "Skyra",
169
+ "date": "2017",
170
+ "sequence": "VIBE TWIST",
171
+ "spacing": {
172
+ "x": 0.938,
173
+ "y": 0.938,
174
+ "z": 2.5
175
+ },
176
+ "contrast_phase": "arterial"
177
+ },
178
+ "16": {
179
+ "machine": "Aera",
180
+ "date": "2017",
181
+ "sequence": "VIBE CAIPIRINHA",
182
+ "spacing": {
183
+ "x": 1.406,
184
+ "y": 1.406,
185
+ "z": 3.0
186
+ },
187
+ "contrast_phase": "delayed"
188
+ },
189
+ "17": {
190
+ "machine": "Aera",
191
+ "date": "2015",
192
+ "sequence": "VIBE CAIPIRINHA",
193
+ "spacing": {
194
+ "x": 1.188,
195
+ "y": 1.188,
196
+ "z": 3.0
197
+ },
198
+ "contrast_phase": "arterial"
199
+ },
200
+ "18": {
201
+ "machine": "Aera",
202
+ "date": "2019",
203
+ "sequence": "VIBE",
204
+ "spacing": {
205
+ "x": 1.25,
206
+ "y": 1.25,
207
+ "z": 3.5
208
+ },
209
+ "contrast_phase": "arterial"
210
+ },
211
+ "19": {
212
+ "machine": "SIGNA Explorer",
213
+ "date": "2016",
214
+ "sequence": "LAVA",
215
+ "spacing": {
216
+ "x": 0.859,
217
+ "y": 0.859,
218
+ "z": 4.2
219
+ },
220
+ "contrast_phase": "delayed"
221
+ },
222
+ "20": {
223
+ "machine": "Skyra",
224
+ "date": "2017",
225
+ "sequence": "VIBE TWIST",
226
+ "spacing": {
227
+ "x": 0.938,
228
+ "y": 0.938,
229
+ "z": 2.5
230
+ },
231
+ "contrast_phase": "arterial"
232
+ },
233
+ "21": {
234
+ "machine": "TrioTim",
235
+ "date": "2016",
236
+ "sequence": "VIBE",
237
+ "spacing": {
238
+ "x": 0.977,
239
+ "y": 0.977,
240
+ "z": 2.1
241
+ },
242
+ "contrast_phase": "delayed"
243
+ },
244
+ "22": {
245
+ "machine": "Skyra",
246
+ "date": "2017",
247
+ "sequence": "VIBE TWIST",
248
+ "spacing": {
249
+ "x": 0.938,
250
+ "y": 0.938,
251
+ "z": 2.5
252
+ },
253
+ "contrast_phase": "arterial"
254
+ },
255
+ "23": {
256
+ "machine": "Aera",
257
+ "date": "2015",
258
+ "sequence": "VIBE CAIPIRINHA",
259
+ "spacing": {
260
+ "x": 1.188,
261
+ "y": 1.188,
262
+ "z": 3.0
263
+ },
264
+ "contrast_phase": "portal"
265
+ },
266
+ "24": {
267
+ "machine": "Signa HDxt",
268
+ "date": "2017",
269
+ "sequence": "LAVA FLEX",
270
+ "spacing": {
271
+ "x": 0.82,
272
+ "y": 0.82,
273
+ "z": 4.4
274
+ },
275
+ "contrast_phase": "unkown"
276
+ },
277
+ "25": {
278
+ "machine": "TrioTim",
279
+ "date": "2013",
280
+ "sequence": "VIBE",
281
+ "spacing": {
282
+ "x": 0.938,
283
+ "y": 0.938,
284
+ "z": 2.0
285
+ },
286
+ "contrast_phase": "arterial"
287
+ },
288
+ "26": {
289
+ "machine": "Aera",
290
+ "date": "2013",
291
+ "sequence": "VIBE",
292
+ "spacing": {
293
+ "x": 0.82,
294
+ "y": 0.82,
295
+ "z": 4.0
296
+ },
297
+ "contrast_phase": "arterial"
298
+ },
299
+ "27": {
300
+ "machine": "TrioTim",
301
+ "date": "2014",
302
+ "sequence": "VIBE",
303
+ "spacing": {
304
+ "x": 0.684,
305
+ "y": 0.684,
306
+ "z": 2.0
307
+ },
308
+ "contrast_phase": "portal"
309
+ },
310
+ "28": {
311
+ "machine": "Aera",
312
+ "date": "2017",
313
+ "sequence": "VIBE CAIPIRINHA",
314
+ "spacing": {
315
+ "x": 1.188,
316
+ "y": 1.188,
317
+ "z": 3.0
318
+ },
319
+ "contrast_phase": "arterial"
320
+ },
321
+ "29": {
322
+ "machine": "Aera",
323
+ "date": "2018",
324
+ "sequence": "VIBE CAIPIRINHA",
325
+ "spacing": {
326
+ "x": 1.188,
327
+ "y": 1.188,
328
+ "z": 3.0
329
+ },
330
+ "contrast_phase": "no contrast agent"
331
+ },
332
+ "30": {
333
+ "machine": "Optima MR450w",
334
+ "date": "2018",
335
+ "sequence": "LAVA",
336
+ "spacing": {
337
+ "x": 0.84,
338
+ "y": 0.84,
339
+ "z": 4.0
340
+ },
341
+ "contrast_phase": "unkown"
342
+ },
343
+ "31": {
344
+ "machine": "Aera",
345
+ "date": "2015",
346
+ "sequence": "VIBE CAIPIRINHA",
347
+ "spacing": {
348
+ "x": 1.188,
349
+ "y": 1.188,
350
+ "z": 3.0
351
+ },
352
+ "contrast_phase": "delayed"
353
+ },
354
+ "32": {
355
+ "machine": "Skyra",
356
+ "date": "2018",
357
+ "sequence": "VIBE TWIST",
358
+ "spacing": {
359
+ "x": 0.938,
360
+ "y": 0.938,
361
+ "z": 2.5
362
+ },
363
+ "contrast_phase": "arterial"
364
+ },
365
+ "33": {
366
+ "machine": "Aera",
367
+ "date": "2016",
368
+ "sequence": "VIBE CAIPIRINHA",
369
+ "spacing": {
370
+ "x": 1.188,
371
+ "y": 1.188,
372
+ "z": 2.6
373
+ },
374
+ "contrast_phase": "arterial"
375
+ },
376
+ "34": {
377
+ "machine": "Aera",
378
+ "date": "2018",
379
+ "sequence": "VIBE CAIPIRINHA",
380
+ "spacing": {
381
+ "x": 1.25,
382
+ "y": 1.25,
383
+ "z": 3.0
384
+ },
385
+ "contrast_phase": "arterial"
386
+ },
387
+ "35": {
388
+ "machine": "Aera",
389
+ "date": "2014",
390
+ "sequence": "VIBE CAIPIRINHA",
391
+ "spacing": {
392
+ "x": 1.188,
393
+ "y": 1.188,
394
+ "z": 3.0
395
+ },
396
+ "contrast_phase": "arterial"
397
+ },
398
+ "36": {
399
+ "machine": "Aera",
400
+ "date": "2016",
401
+ "sequence": "VIBE",
402
+ "spacing": {
403
+ "x": 1.25,
404
+ "y": 1.25,
405
+ "z": 3.5
406
+ },
407
+ "contrast_phase": "delayed"
408
+ },
409
+ "37": {
410
+ "machine": "Skyra",
411
+ "date": "2020",
412
+ "sequence": "VIBE",
413
+ "spacing": {
414
+ "x": 1.172,
415
+ "y": 1.172,
416
+ "z": 3.0
417
+ },
418
+ "contrast_phase": "portal"
419
+ },
420
+ "38": {
421
+ "machine": "Skyra",
422
+ "date": "2019",
423
+ "sequence": "VIBE",
424
+ "spacing": {
425
+ "x": 1.042,
426
+ "y": 1.042,
427
+ "z": 3.0
428
+ },
429
+ "contrast_phase": "portal"
430
+ },
431
+ "39": {
432
+ "machine": "TrioTim",
433
+ "date": "2014",
434
+ "sequence": "VIBE",
435
+ "spacing": {
436
+ "x": 0.762,
437
+ "y": 0.762,
438
+ "z": 2.0
439
+ },
440
+ "contrast_phase": "arterial"
441
+ },
442
+ "40": {
443
+ "machine": "Skyra",
444
+ "date": "2017",
445
+ "sequence": "VIBE TWIST",
446
+ "spacing": {
447
+ "x": 0.938,
448
+ "y": 0.938,
449
+ "z": 2.5
450
+ },
451
+ "contrast_phase": "arterial"
452
+ },
453
+ "41": {
454
+ "machine": "Aera",
455
+ "date": "2013",
456
+ "sequence": "VIBE CAIPIRINHA",
457
+ "spacing": {
458
+ "x": 1.25,
459
+ "y": 1.25,
460
+ "z": 3.5
461
+ },
462
+ "contrast_phase": "portal"
463
+ },
464
+ "42": {
465
+ "machine": "Skyra",
466
+ "date": "2017",
467
+ "sequence": "VIBE TWIST",
468
+ "spacing": {
469
+ "x": 0.938,
470
+ "y": 0.938,
471
+ "z": 2.5
472
+ },
473
+ "contrast_phase": "arterial"
474
+ },
475
+ "43": {
476
+ "machine": "MAGNETOM Sola",
477
+ "date": "2020",
478
+ "sequence": "VIBE",
479
+ "spacing": {
480
+ "x": 0.938,
481
+ "y": 0.938,
482
+ "z": 3.0
483
+ },
484
+ "contrast_phase": "arterial"
485
+ },
486
+ "44": {
487
+ "machine": "Aera",
488
+ "date": "2017",
489
+ "sequence": "VIBE CAIPIRINHA",
490
+ "spacing": {
491
+ "x": 1.188,
492
+ "y": 1.188,
493
+ "z": 3.0
494
+ },
495
+ "contrast_phase": "arterial"
496
+ },
497
+ "45": {
498
+ "machine": "MAGNETOM Sola",
499
+ "date": "2020",
500
+ "sequence": "VIBE",
501
+ "spacing": {
502
+ "x": 0.938,
503
+ "y": 0.938,
504
+ "z": 3.0
505
+ },
506
+ "contrast_phase": "arterial"
507
+ },
508
+ "46": {
509
+ "machine": "Aera",
510
+ "date": "2018",
511
+ "sequence": "VIBE CAIPIRINHA",
512
+ "spacing": {
513
+ "x": 1.188,
514
+ "y": 1.188,
515
+ "z": 3.0
516
+ },
517
+ "contrast_phase": "portal"
518
+ },
519
+ "47": {
520
+ "machine": "OPTIMA MR360",
521
+ "date": "2015",
522
+ "sequence": "LAVA",
523
+ "spacing": {
524
+ "x": 0.859,
525
+ "y": 0.859,
526
+ "z": 4.4
527
+ },
528
+ "contrast_phase": "unkown"
529
+ },
530
+ "48": {
531
+ "machine": "Aera",
532
+ "date": "2016",
533
+ "sequence": "VIBE CAIPIRINHA",
534
+ "spacing": {
535
+ "x": 1.312,
536
+ "y": 1.312,
537
+ "z": 3.0
538
+ },
539
+ "contrast_phase": "arterial"
540
+ },
541
+ "49": {
542
+ "machine": "Aera",
543
+ "date": "2020",
544
+ "sequence": "VIBE CAIPIRINHA",
545
+ "spacing": {
546
+ "x": 1.188,
547
+ "y": 1.188,
548
+ "z": 3.0
549
+ },
550
+ "contrast_phase": "arterial"
551
+ },
552
+ "50": {
553
+ "machine": "SIGNA Explorer",
554
+ "date": "2016",
555
+ "sequence": "LAVA",
556
+ "spacing": {
557
+ "x": 0.859,
558
+ "y": 0.859,
559
+ "z": 4.6
560
+ },
561
+ "contrast_phase": "unkown"
562
+ },
563
+ "51": {
564
+ "machine": "Skyra",
565
+ "date": "2019",
566
+ "sequence": "VIBE",
567
+ "spacing": {
568
+ "x": 1.042,
569
+ "y": 1.042,
570
+ "z": 3.0
571
+ },
572
+ "contrast_phase": "portal"
573
+ },
574
+ "52": {
575
+ "machine": "Aera",
576
+ "date": "2014",
577
+ "sequence": "VIBE",
578
+ "spacing": {
579
+ "x": 1.188,
580
+ "y": 1.188,
581
+ "z": 3.0
582
+ },
583
+ "contrast_phase": "no contrast agent"
584
+ },
585
+ "53": {
586
+ "machine": "Aera",
587
+ "date": "2019",
588
+ "sequence": "VIBE CAIPIRINHA",
589
+ "spacing": {
590
+ "x": 1.188,
591
+ "y": 1.188,
592
+ "z": 3.0
593
+ },
594
+ "contrast_phase": "arterial"
595
+ },
596
+ "54": {
597
+ "machine": "TrioTim",
598
+ "date": "2016",
599
+ "sequence": "VIBE",
600
+ "spacing": {
601
+ "x": 0.918,
602
+ "y": 0.918,
603
+ "z": 2.0
604
+ },
605
+ "contrast_phase": "arterial"
606
+ },
607
+ "55": {
608
+ "machine": "SIGNA Voyager",
609
+ "date": "2017",
610
+ "sequence": "LAVA",
611
+ "spacing": {
612
+ "x": 0.898,
613
+ "y": 0.898,
614
+ "z": 4.4
615
+ },
616
+ "contrast_phase": "unkown"
617
+ },
618
+ "56": {
619
+ "machine": "Aera",
620
+ "date": "2020",
621
+ "sequence": "VIBE CAIPIRINHA",
622
+ "spacing": {
623
+ "x": 1.375,
624
+ "y": 1.375,
625
+ "z": 3.0
626
+ },
627
+ "contrast_phase": "portal"
628
+ },
629
+ "57": {
630
+ "machine": "Aera",
631
+ "date": "2019",
632
+ "sequence": "VIBE",
633
+ "spacing": {
634
+ "x": 1.25,
635
+ "y": 1.25,
636
+ "z": 3.5
637
+ },
638
+ "contrast_phase": "delayed"
639
+ },
640
+ "58": {
641
+ "machine": "Aera",
642
+ "date": "2013",
643
+ "sequence": "VIBE",
644
+ "spacing": {
645
+ "x": 1.146,
646
+ "y": 1.146,
647
+ "z": 3.0
648
+ },
649
+ "contrast_phase": "arterial"
650
+ },
651
+ "59": {
652
+ "machine": "Aera",
653
+ "date": "2018",
654
+ "sequence": "VIBE CAIPIRINHA",
655
+ "spacing": {
656
+ "x": 1.312,
657
+ "y": 1.312,
658
+ "z": 3.0
659
+ },
660
+ "contrast_phase": "arterial"
661
+ }
662
+ }
amos/__MACOSX/amos22/._.DS_Store ADDED
Binary file (120 Bytes). View file
 
amos/__MACOSX/amos22/._imagesVa ADDED
Binary file (220 Bytes). View file
 
amos/__MACOSX/amos22/._readme.md ADDED
Binary file (433 Bytes). View file
 
amos/amos22/.DS_Store ADDED
Binary file (18.4 kB). View file
 
amos/amos22/dataset.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "AMOS", "description": "Amos: A large-scale abdominal multi-organ benchmark for versatile medical image segmentation", "author": "Yuanfeng Ji", "reference": "SRIDB x CUHKSZ x HKU x LGCHSZ x LGPHSZ", "licence": "CC-BY-SA 4.0", "release": "1.0 01/05/2022", "contact": "[email protected]", "tensorImageSize": "3D", "modality": {"0": "CT"}, "labels": {"0": "background", "1": "spleen", "2": "right kidney", "3": "left kidney", "4": "gall bladder", "5": "esophagus", "6": "liver", "7": "stomach", "8": "arota", "9": "postcava", "10": "pancreas", "11": "right adrenal gland", "12": "left adrenal gland", "13": "duodenum", "14": "bladder", "15": "prostate/uterus"}, "numTraining": 240, "numValidation": 120, "numTest": 240, "training": [{"image": "./imagesTr/amos_0001.nii.gz", "label": "./labelsTr/amos_0001.nii.gz"}, {"image": "./imagesTr/amos_0004.nii.gz", "label": "./labelsTr/amos_0004.nii.gz"}, {"image": "./imagesTr/amos_0005.nii.gz", "label": "./labelsTr/amos_0005.nii.gz"}, {"image": "./imagesTr/amos_0006.nii.gz", "label": "./labelsTr/amos_0006.nii.gz"}, {"image": "./imagesTr/amos_0007.nii.gz", "label": "./labelsTr/amos_0007.nii.gz"}, {"image": "./imagesTr/amos_0009.nii.gz", "label": "./labelsTr/amos_0009.nii.gz"}, {"image": "./imagesTr/amos_0010.nii.gz", "label": "./labelsTr/amos_0010.nii.gz"}, {"image": "./imagesTr/amos_0011.nii.gz", "label": "./labelsTr/amos_0011.nii.gz"}, {"image": "./imagesTr/amos_0014.nii.gz", "label": "./labelsTr/amos_0014.nii.gz"}, {"image": "./imagesTr/amos_0015.nii.gz", "label": "./labelsTr/amos_0015.nii.gz"}, {"image": "./imagesTr/amos_0016.nii.gz", "label": "./labelsTr/amos_0016.nii.gz"}, {"image": "./imagesTr/amos_0017.nii.gz", "label": "./labelsTr/amos_0017.nii.gz"}, {"image": "./imagesTr/amos_0019.nii.gz", "label": "./labelsTr/amos_0019.nii.gz"}, {"image": "./imagesTr/amos_0021.nii.gz", "label": "./labelsTr/amos_0021.nii.gz"}, {"image": "./imagesTr/amos_0023.nii.gz", "label": "./labelsTr/amos_0023.nii.gz"}, {"image": "./imagesTr/amos_0024.nii.gz", "label": "./labelsTr/amos_0024.nii.gz"}, {"image": "./imagesTr/amos_0025.nii.gz", "label": "./labelsTr/amos_0025.nii.gz"}, {"image": "./imagesTr/amos_0027.nii.gz", "label": "./labelsTr/amos_0027.nii.gz"}, {"image": "./imagesTr/amos_0030.nii.gz", "label": "./labelsTr/amos_0030.nii.gz"}, {"image": "./imagesTr/amos_0033.nii.gz", "label": "./labelsTr/amos_0033.nii.gz"}, {"image": "./imagesTr/amos_0035.nii.gz", "label": "./labelsTr/amos_0035.nii.gz"}, {"image": "./imagesTr/amos_0036.nii.gz", "label": "./labelsTr/amos_0036.nii.gz"}, {"image": "./imagesTr/amos_0038.nii.gz", "label": "./labelsTr/amos_0038.nii.gz"}, {"image": "./imagesTr/amos_0042.nii.gz", "label": "./labelsTr/amos_0042.nii.gz"}, {"image": "./imagesTr/amos_0043.nii.gz", "label": "./labelsTr/amos_0043.nii.gz"}, {"image": "./imagesTr/amos_0044.nii.gz", "label": "./labelsTr/amos_0044.nii.gz"}, {"image": "./imagesTr/amos_0045.nii.gz", "label": "./labelsTr/amos_0045.nii.gz"}, {"image": "./imagesTr/amos_0047.nii.gz", "label": "./labelsTr/amos_0047.nii.gz"}, {"image": "./imagesTr/amos_0048.nii.gz", "label": "./labelsTr/amos_0048.nii.gz"}, {"image": "./imagesTr/amos_0049.nii.gz", "label": "./labelsTr/amos_0049.nii.gz"}, {"image": "./imagesTr/amos_0050.nii.gz", "label": "./labelsTr/amos_0050.nii.gz"}, {"image": "./imagesTr/amos_0052.nii.gz", "label": "./labelsTr/amos_0052.nii.gz"}, {"image": "./imagesTr/amos_0054.nii.gz", "label": "./labelsTr/amos_0054.nii.gz"}, {"image": "./imagesTr/amos_0057.nii.gz", "label": "./labelsTr/amos_0057.nii.gz"}, {"image": "./imagesTr/amos_0058.nii.gz", "label": "./labelsTr/amos_0058.nii.gz"}, {"image": "./imagesTr/amos_0059.nii.gz", "label": "./labelsTr/amos_0059.nii.gz"}, {"image": "./imagesTr/amos_0060.nii.gz", "label": "./labelsTr/amos_0060.nii.gz"}, {"image": "./imagesTr/amos_0064.nii.gz", "label": "./labelsTr/amos_0064.nii.gz"}, {"image": "./imagesTr/amos_0066.nii.gz", "label": "./labelsTr/amos_0066.nii.gz"}, {"image": "./imagesTr/amos_0067.nii.gz", "label": "./labelsTr/amos_0067.nii.gz"}, {"image": "./imagesTr/amos_0069.nii.gz", "label": "./labelsTr/amos_0069.nii.gz"}, {"image": "./imagesTr/amos_0071.nii.gz", "label": "./labelsTr/amos_0071.nii.gz"}, {"image": "./imagesTr/amos_0072.nii.gz", "label": "./labelsTr/amos_0072.nii.gz"}, {"image": "./imagesTr/amos_0075.nii.gz", "label": "./labelsTr/amos_0075.nii.gz"}, {"image": "./imagesTr/amos_0076.nii.gz", "label": "./labelsTr/amos_0076.nii.gz"}, {"image": "./imagesTr/amos_0077.nii.gz", "label": "./labelsTr/amos_0077.nii.gz"}, {"image": "./imagesTr/amos_0078.nii.gz", "label": "./labelsTr/amos_0078.nii.gz"}, {"image": "./imagesTr/amos_0079.nii.gz", "label": "./labelsTr/amos_0079.nii.gz"}, {"image": "./imagesTr/amos_0081.nii.gz", "label": "./labelsTr/amos_0081.nii.gz"}, {"image": "./imagesTr/amos_0083.nii.gz", "label": "./labelsTr/amos_0083.nii.gz"}, {"image": "./imagesTr/amos_0084.nii.gz", "label": "./labelsTr/amos_0084.nii.gz"}, {"image": "./imagesTr/amos_0086.nii.gz", "label": "./labelsTr/amos_0086.nii.gz"}, {"image": "./imagesTr/amos_0088.nii.gz", "label": "./labelsTr/amos_0088.nii.gz"}, {"image": "./imagesTr/amos_0089.nii.gz", "label": "./labelsTr/amos_0089.nii.gz"}, {"image": "./imagesTr/amos_0092.nii.gz", "label": "./labelsTr/amos_0092.nii.gz"}, {"image": "./imagesTr/amos_0094.nii.gz", "label": "./labelsTr/amos_0094.nii.gz"}, {"image": "./imagesTr/amos_0097.nii.gz", "label": "./labelsTr/amos_0097.nii.gz"}, {"image": "./imagesTr/amos_0098.nii.gz", "label": "./labelsTr/amos_0098.nii.gz"}, {"image": "./imagesTr/amos_0099.nii.gz", "label": "./labelsTr/amos_0099.nii.gz"}, {"image": "./imagesTr/amos_0102.nii.gz", "label": "./labelsTr/amos_0102.nii.gz"}, {"image": "./imagesTr/amos_0103.nii.gz", "label": "./labelsTr/amos_0103.nii.gz"}, {"image": "./imagesTr/amos_0104.nii.gz", "label": "./labelsTr/amos_0104.nii.gz"}, {"image": "./imagesTr/amos_0105.nii.gz", "label": "./labelsTr/amos_0105.nii.gz"}, {"image": "./imagesTr/amos_0109.nii.gz", "label": "./labelsTr/amos_0109.nii.gz"}, {"image": "./imagesTr/amos_0110.nii.gz", "label": "./labelsTr/amos_0110.nii.gz"}, {"image": "./imagesTr/amos_0111.nii.gz", "label": "./labelsTr/amos_0111.nii.gz"}, {"image": "./imagesTr/amos_0113.nii.gz", "label": "./labelsTr/amos_0113.nii.gz"}, {"image": "./imagesTr/amos_0115.nii.gz", "label": "./labelsTr/amos_0115.nii.gz"}, {"image": "./imagesTr/amos_0116.nii.gz", "label": "./labelsTr/amos_0116.nii.gz"}, {"image": "./imagesTr/amos_0118.nii.gz", "label": "./labelsTr/amos_0118.nii.gz"}, {"image": "./imagesTr/amos_0119.nii.gz", "label": "./labelsTr/amos_0119.nii.gz"}, {"image": "./imagesTr/amos_0121.nii.gz", "label": "./labelsTr/amos_0121.nii.gz"}, {"image": "./imagesTr/amos_0124.nii.gz", "label": "./labelsTr/amos_0124.nii.gz"}, {"image": "./imagesTr/amos_0125.nii.gz", "label": "./labelsTr/amos_0125.nii.gz"}, {"image": "./imagesTr/amos_0126.nii.gz", "label": "./labelsTr/amos_0126.nii.gz"}, {"image": "./imagesTr/amos_0127.nii.gz", "label": "./labelsTr/amos_0127.nii.gz"}, {"image": "./imagesTr/amos_0129.nii.gz", "label": "./labelsTr/amos_0129.nii.gz"}, {"image": "./imagesTr/amos_0131.nii.gz", "label": "./labelsTr/amos_0131.nii.gz"}, {"image": "./imagesTr/amos_0133.nii.gz", "label": "./labelsTr/amos_0133.nii.gz"}, {"image": "./imagesTr/amos_0134.nii.gz", "label": "./labelsTr/amos_0134.nii.gz"}, {"image": "./imagesTr/amos_0135.nii.gz", "label": "./labelsTr/amos_0135.nii.gz"}, {"image": "./imagesTr/amos_0137.nii.gz", "label": "./labelsTr/amos_0137.nii.gz"}, {"image": "./imagesTr/amos_0138.nii.gz", "label": "./labelsTr/amos_0138.nii.gz"}, {"image": "./imagesTr/amos_0141.nii.gz", "label": "./labelsTr/amos_0141.nii.gz"}, {"image": "./imagesTr/amos_0142.nii.gz", "label": "./labelsTr/amos_0142.nii.gz"}, {"image": "./imagesTr/amos_0143.nii.gz", "label": "./labelsTr/amos_0143.nii.gz"}, {"image": "./imagesTr/amos_0147.nii.gz", "label": "./labelsTr/amos_0147.nii.gz"}, {"image": "./imagesTr/amos_0149.nii.gz", "label": "./labelsTr/amos_0149.nii.gz"}, {"image": "./imagesTr/amos_0152.nii.gz", "label": "./labelsTr/amos_0152.nii.gz"}, {"image": "./imagesTr/amos_0153.nii.gz", "label": "./labelsTr/amos_0153.nii.gz"}, {"image": "./imagesTr/amos_0154.nii.gz", "label": "./labelsTr/amos_0154.nii.gz"}, {"image": "./imagesTr/amos_0156.nii.gz", "label": "./labelsTr/amos_0156.nii.gz"}, {"image": "./imagesTr/amos_0158.nii.gz", "label": "./labelsTr/amos_0158.nii.gz"}, {"image": "./imagesTr/amos_0159.nii.gz", "label": "./labelsTr/amos_0159.nii.gz"}, {"image": "./imagesTr/amos_0160.nii.gz", "label": "./labelsTr/amos_0160.nii.gz"}, {"image": "./imagesTr/amos_0161.nii.gz", "label": "./labelsTr/amos_0161.nii.gz"}, {"image": "./imagesTr/amos_0162.nii.gz", "label": "./labelsTr/amos_0162.nii.gz"}, {"image": "./imagesTr/amos_0166.nii.gz", "label": "./labelsTr/amos_0166.nii.gz"}, {"image": "./imagesTr/amos_0170.nii.gz", "label": "./labelsTr/amos_0170.nii.gz"}, {"image": "./imagesTr/amos_0171.nii.gz", "label": "./labelsTr/amos_0171.nii.gz"}, {"image": "./imagesTr/amos_0172.nii.gz", "label": "./labelsTr/amos_0172.nii.gz"}, {"image": "./imagesTr/amos_0173.nii.gz", "label": "./labelsTr/amos_0173.nii.gz"}, {"image": "./imagesTr/amos_0175.nii.gz", "label": "./labelsTr/amos_0175.nii.gz"}, {"image": "./imagesTr/amos_0177.nii.gz", "label": "./labelsTr/amos_0177.nii.gz"}, {"image": "./imagesTr/amos_0179.nii.gz", "label": "./labelsTr/amos_0179.nii.gz"}, {"image": "./imagesTr/amos_0180.nii.gz", "label": "./labelsTr/amos_0180.nii.gz"}, {"image": "./imagesTr/amos_0181.nii.gz", "label": "./labelsTr/amos_0181.nii.gz"}, {"image": "./imagesTr/amos_0184.nii.gz", "label": "./labelsTr/amos_0184.nii.gz"}, {"image": "./imagesTr/amos_0185.nii.gz", "label": "./labelsTr/amos_0185.nii.gz"}, {"image": "./imagesTr/amos_0186.nii.gz", "label": "./labelsTr/amos_0186.nii.gz"}, {"image": "./imagesTr/amos_0188.nii.gz", "label": "./labelsTr/amos_0188.nii.gz"}, {"image": "./imagesTr/amos_0190.nii.gz", "label": "./labelsTr/amos_0190.nii.gz"}, {"image": "./imagesTr/amos_0192.nii.gz", "label": "./labelsTr/amos_0192.nii.gz"}, {"image": "./imagesTr/amos_0193.nii.gz", "label": "./labelsTr/amos_0193.nii.gz"}, {"image": "./imagesTr/amos_0195.nii.gz", "label": "./labelsTr/amos_0195.nii.gz"}, {"image": "./imagesTr/amos_0196.nii.gz", "label": "./labelsTr/amos_0196.nii.gz"}, {"image": "./imagesTr/amos_0197.nii.gz", "label": "./labelsTr/amos_0197.nii.gz"}, {"image": "./imagesTr/amos_0198.nii.gz", "label": "./labelsTr/amos_0198.nii.gz"}, {"image": "./imagesTr/amos_0199.nii.gz", "label": "./labelsTr/amos_0199.nii.gz"}, {"image": "./imagesTr/amos_0212.nii.gz", "label": "./labelsTr/amos_0212.nii.gz"}, {"image": "./imagesTr/amos_0214.nii.gz", "label": "./labelsTr/amos_0214.nii.gz"}, {"image": "./imagesTr/amos_0215.nii.gz", "label": "./labelsTr/amos_0215.nii.gz"}, {"image": "./imagesTr/amos_0217.nii.gz", "label": "./labelsTr/amos_0217.nii.gz"}, {"image": "./imagesTr/amos_0224.nii.gz", "label": "./labelsTr/amos_0224.nii.gz"}, {"image": "./imagesTr/amos_0225.nii.gz", "label": "./labelsTr/amos_0225.nii.gz"}, {"image": "./imagesTr/amos_0226.nii.gz", "label": "./labelsTr/amos_0226.nii.gz"}, {"image": "./imagesTr/amos_0230.nii.gz", "label": "./labelsTr/amos_0230.nii.gz"}, {"image": "./imagesTr/amos_0231.nii.gz", "label": "./labelsTr/amos_0231.nii.gz"}, {"image": "./imagesTr/amos_0235.nii.gz", "label": "./labelsTr/amos_0235.nii.gz"}, {"image": "./imagesTr/amos_0237.nii.gz", "label": "./labelsTr/amos_0237.nii.gz"}, {"image": "./imagesTr/amos_0239.nii.gz", "label": "./labelsTr/amos_0239.nii.gz"}, {"image": "./imagesTr/amos_0242.nii.gz", "label": "./labelsTr/amos_0242.nii.gz"}, {"image": "./imagesTr/amos_0245.nii.gz", "label": "./labelsTr/amos_0245.nii.gz"}, {"image": "./imagesTr/amos_0248.nii.gz", "label": "./labelsTr/amos_0248.nii.gz"}, {"image": "./imagesTr/amos_0249.nii.gz", "label": "./labelsTr/amos_0249.nii.gz"}, {"image": "./imagesTr/amos_0254.nii.gz", "label": "./labelsTr/amos_0254.nii.gz"}, {"image": "./imagesTr/amos_0259.nii.gz", "label": "./labelsTr/amos_0259.nii.gz"}, {"image": "./imagesTr/amos_0263.nii.gz", "label": "./labelsTr/amos_0263.nii.gz"}, {"image": "./imagesTr/amos_0264.nii.gz", "label": "./labelsTr/amos_0264.nii.gz"}, {"image": "./imagesTr/amos_0268.nii.gz", "label": "./labelsTr/amos_0268.nii.gz"}, {"image": "./imagesTr/amos_0272.nii.gz", "label": "./labelsTr/amos_0272.nii.gz"}, {"image": "./imagesTr/amos_0273.nii.gz", "label": "./labelsTr/amos_0273.nii.gz"}, {"image": "./imagesTr/amos_0274.nii.gz", "label": "./labelsTr/amos_0274.nii.gz"}, {"image": "./imagesTr/amos_0276.nii.gz", "label": "./labelsTr/amos_0276.nii.gz"}, {"image": "./imagesTr/amos_0279.nii.gz", "label": "./labelsTr/amos_0279.nii.gz"}, {"image": "./imagesTr/amos_0281.nii.gz", "label": "./labelsTr/amos_0281.nii.gz"}, {"image": "./imagesTr/amos_0282.nii.gz", "label": "./labelsTr/amos_0282.nii.gz"}, {"image": "./imagesTr/amos_0288.nii.gz", "label": "./labelsTr/amos_0288.nii.gz"}, {"image": "./imagesTr/amos_0294.nii.gz", "label": "./labelsTr/amos_0294.nii.gz"}, {"image": "./imagesTr/amos_0296.nii.gz", "label": "./labelsTr/amos_0296.nii.gz"}, {"image": "./imagesTr/amos_0297.nii.gz", "label": "./labelsTr/amos_0297.nii.gz"}, {"image": "./imagesTr/amos_0299.nii.gz", "label": "./labelsTr/amos_0299.nii.gz"}, {"image": "./imagesTr/amos_0301.nii.gz", "label": "./labelsTr/amos_0301.nii.gz"}, {"image": "./imagesTr/amos_0302.nii.gz", "label": "./labelsTr/amos_0302.nii.gz"}, {"image": "./imagesTr/amos_0307.nii.gz", "label": "./labelsTr/amos_0307.nii.gz"}, {"image": "./imagesTr/amos_0317.nii.gz", "label": "./labelsTr/amos_0317.nii.gz"}, {"image": "./imagesTr/amos_0320.nii.gz", "label": "./labelsTr/amos_0320.nii.gz"}, {"image": "./imagesTr/amos_0321.nii.gz", "label": "./labelsTr/amos_0321.nii.gz"}, {"image": "./imagesTr/amos_0330.nii.gz", "label": "./labelsTr/amos_0330.nii.gz"}, {"image": "./imagesTr/amos_0332.nii.gz", "label": "./labelsTr/amos_0332.nii.gz"}, {"image": "./imagesTr/amos_0336.nii.gz", "label": "./labelsTr/amos_0336.nii.gz"}, {"image": "./imagesTr/amos_0337.nii.gz", "label": "./labelsTr/amos_0337.nii.gz"}, {"image": "./imagesTr/amos_0341.nii.gz", "label": "./labelsTr/amos_0341.nii.gz"}, {"image": "./imagesTr/amos_0348.nii.gz", "label": "./labelsTr/amos_0348.nii.gz"}, {"image": "./imagesTr/amos_0349.nii.gz", "label": "./labelsTr/amos_0349.nii.gz"}, {"image": "./imagesTr/amos_0350.nii.gz", "label": "./labelsTr/amos_0350.nii.gz"}, {"image": "./imagesTr/amos_0351.nii.gz", "label": "./labelsTr/amos_0351.nii.gz"}, {"image": "./imagesTr/amos_0353.nii.gz", "label": "./labelsTr/amos_0353.nii.gz"}, {"image": "./imagesTr/amos_0358.nii.gz", "label": "./labelsTr/amos_0358.nii.gz"}, {"image": "./imagesTr/amos_0361.nii.gz", "label": "./labelsTr/amos_0361.nii.gz"}, {"image": "./imagesTr/amos_0362.nii.gz", "label": "./labelsTr/amos_0362.nii.gz"}, {"image": "./imagesTr/amos_0366.nii.gz", "label": "./labelsTr/amos_0366.nii.gz"}, {"image": "./imagesTr/amos_0367.nii.gz", "label": "./labelsTr/amos_0367.nii.gz"}, {"image": "./imagesTr/amos_0370.nii.gz", "label": "./labelsTr/amos_0370.nii.gz"}, {"image": "./imagesTr/amos_0371.nii.gz", "label": "./labelsTr/amos_0371.nii.gz"}, {"image": "./imagesTr/amos_0374.nii.gz", "label": "./labelsTr/amos_0374.nii.gz"}, {"image": "./imagesTr/amos_0376.nii.gz", "label": "./labelsTr/amos_0376.nii.gz"}, {"image": "./imagesTr/amos_0378.nii.gz", "label": "./labelsTr/amos_0378.nii.gz"}, {"image": "./imagesTr/amos_0379.nii.gz", "label": "./labelsTr/amos_0379.nii.gz"}, {"image": "./imagesTr/amos_0380.nii.gz", "label": "./labelsTr/amos_0380.nii.gz"}, {"image": "./imagesTr/amos_0381.nii.gz", "label": "./labelsTr/amos_0381.nii.gz"}, {"image": "./imagesTr/amos_0383.nii.gz", "label": "./labelsTr/amos_0383.nii.gz"}, {"image": "./imagesTr/amos_0384.nii.gz", "label": "./labelsTr/amos_0384.nii.gz"}, {"image": "./imagesTr/amos_0387.nii.gz", "label": "./labelsTr/amos_0387.nii.gz"}, {"image": "./imagesTr/amos_0388.nii.gz", "label": "./labelsTr/amos_0388.nii.gz"}, {"image": "./imagesTr/amos_0390.nii.gz", "label": "./labelsTr/amos_0390.nii.gz"}, {"image": "./imagesTr/amos_0391.nii.gz", "label": "./labelsTr/amos_0391.nii.gz"}, {"image": "./imagesTr/amos_0392.nii.gz", "label": "./labelsTr/amos_0392.nii.gz"}, {"image": "./imagesTr/amos_0395.nii.gz", "label": "./labelsTr/amos_0395.nii.gz"}, {"image": "./imagesTr/amos_0396.nii.gz", "label": "./labelsTr/amos_0396.nii.gz"}, {"image": "./imagesTr/amos_0398.nii.gz", "label": "./labelsTr/amos_0398.nii.gz"}, {"image": "./imagesTr/amos_0400.nii.gz", "label": "./labelsTr/amos_0400.nii.gz"}, {"image": "./imagesTr/amos_0401.nii.gz", "label": "./labelsTr/amos_0401.nii.gz"}, {"image": "./imagesTr/amos_0402.nii.gz", "label": "./labelsTr/amos_0402.nii.gz"}, {"image": "./imagesTr/amos_0403.nii.gz", "label": "./labelsTr/amos_0403.nii.gz"}, {"image": "./imagesTr/amos_0404.nii.gz", "label": "./labelsTr/amos_0404.nii.gz"}, {"image": "./imagesTr/amos_0405.nii.gz", "label": "./labelsTr/amos_0405.nii.gz"}, {"image": "./imagesTr/amos_0406.nii.gz", "label": "./labelsTr/amos_0406.nii.gz"}, {"image": "./imagesTr/amos_0408.nii.gz", "label": "./labelsTr/amos_0408.nii.gz"}, {"image": "./imagesTr/amos_0410.nii.gz", "label": "./labelsTr/amos_0410.nii.gz"}, {"image": "./imagesTr/amos_0507.nii.gz", "label": "./labelsTr/amos_0507.nii.gz"}, {"image": "./imagesTr/amos_0508.nii.gz", "label": "./labelsTr/amos_0508.nii.gz"}, {"image": "./imagesTr/amos_0510.nii.gz", "label": "./labelsTr/amos_0510.nii.gz"}, {"image": "./imagesTr/amos_0514.nii.gz", "label": "./labelsTr/amos_0514.nii.gz"}, {"image": "./imagesTr/amos_0517.nii.gz", "label": "./labelsTr/amos_0517.nii.gz"}, {"image": "./imagesTr/amos_0518.nii.gz", "label": "./labelsTr/amos_0518.nii.gz"}, {"image": "./imagesTr/amos_0522.nii.gz", "label": "./labelsTr/amos_0522.nii.gz"}, {"image": "./imagesTr/amos_0530.nii.gz", "label": "./labelsTr/amos_0530.nii.gz"}, {"image": "./imagesTr/amos_0532.nii.gz", "label": "./labelsTr/amos_0532.nii.gz"}, {"image": "./imagesTr/amos_0538.nii.gz", "label": "./labelsTr/amos_0538.nii.gz"}, {"image": "./imagesTr/amos_0540.nii.gz", "label": "./labelsTr/amos_0540.nii.gz"}, {"image": "./imagesTr/amos_0541.nii.gz", "label": "./labelsTr/amos_0541.nii.gz"}, {"image": "./imagesTr/amos_0548.nii.gz", "label": "./labelsTr/amos_0548.nii.gz"}, {"image": "./imagesTr/amos_0551.nii.gz", "label": "./labelsTr/amos_0551.nii.gz"}, {"image": "./imagesTr/amos_0554.nii.gz", "label": "./labelsTr/amos_0554.nii.gz"}, {"image": "./imagesTr/amos_0555.nii.gz", "label": "./labelsTr/amos_0555.nii.gz"}, {"image": "./imagesTr/amos_0557.nii.gz", "label": "./labelsTr/amos_0557.nii.gz"}, {"image": "./imagesTr/amos_0558.nii.gz", "label": "./labelsTr/amos_0558.nii.gz"}, {"image": "./imagesTr/amos_0570.nii.gz", "label": "./labelsTr/amos_0570.nii.gz"}, {"image": "./imagesTr/amos_0571.nii.gz", "label": "./labelsTr/amos_0571.nii.gz"}, {"image": "./imagesTr/amos_0578.nii.gz", "label": "./labelsTr/amos_0578.nii.gz"}, {"image": "./imagesTr/amos_0580.nii.gz", "label": "./labelsTr/amos_0580.nii.gz"}, {"image": "./imagesTr/amos_0582.nii.gz", "label": "./labelsTr/amos_0582.nii.gz"}, {"image": "./imagesTr/amos_0583.nii.gz", "label": "./labelsTr/amos_0583.nii.gz"}, {"image": "./imagesTr/amos_0584.nii.gz", "label": "./labelsTr/amos_0584.nii.gz"}, {"image": "./imagesTr/amos_0585.nii.gz", "label": "./labelsTr/amos_0585.nii.gz"}, {"image": "./imagesTr/amos_0586.nii.gz", "label": "./labelsTr/amos_0586.nii.gz"}, {"image": "./imagesTr/amos_0587.nii.gz", "label": "./labelsTr/amos_0587.nii.gz"}, {"image": "./imagesTr/amos_0588.nii.gz", "label": "./labelsTr/amos_0588.nii.gz"}, {"image": "./imagesTr/amos_0589.nii.gz", "label": "./labelsTr/amos_0589.nii.gz"}, {"image": "./imagesTr/amos_0590.nii.gz", "label": "./labelsTr/amos_0590.nii.gz"}, {"image": "./imagesTr/amos_0591.nii.gz", "label": "./labelsTr/amos_0591.nii.gz"}, {"image": "./imagesTr/amos_0592.nii.gz", "label": "./labelsTr/amos_0592.nii.gz"}, {"image": "./imagesTr/amos_0593.nii.gz", "label": "./labelsTr/amos_0593.nii.gz"}, {"image": "./imagesTr/amos_0594.nii.gz", "label": "./labelsTr/amos_0594.nii.gz"}, {"image": "./imagesTr/amos_0595.nii.gz", "label": "./labelsTr/amos_0595.nii.gz"}, {"image": "./imagesTr/amos_0596.nii.gz", "label": "./labelsTr/amos_0596.nii.gz"}, {"image": "./imagesTr/amos_0597.nii.gz", "label": "./labelsTr/amos_0597.nii.gz"}, {"image": "./imagesTr/amos_0599.nii.gz", "label": "./labelsTr/amos_0599.nii.gz"}, {"image": "./imagesTr/amos_0600.nii.gz", "label": "./labelsTr/amos_0600.nii.gz"}], "validation": [{"image": "./imagesVa/amos_0008.nii.gz", "label": "./labelsVa/amos_0008.nii.gz"}, {"image": "./imagesVa/amos_0013.nii.gz", "label": "./labelsVa/amos_0013.nii.gz"}, {"image": "./imagesVa/amos_0018.nii.gz", "label": "./labelsVa/amos_0018.nii.gz"}, {"image": "./imagesVa/amos_0022.nii.gz", "label": "./labelsVa/amos_0022.nii.gz"}, {"image": "./imagesVa/amos_0029.nii.gz", "label": "./labelsVa/amos_0029.nii.gz"}, {"image": "./imagesVa/amos_0032.nii.gz", "label": "./labelsVa/amos_0032.nii.gz"}, {"image": "./imagesVa/amos_0034.nii.gz", "label": "./labelsVa/amos_0034.nii.gz"}, {"image": "./imagesVa/amos_0040.nii.gz", "label": "./labelsVa/amos_0040.nii.gz"}, {"image": "./imagesVa/amos_0041.nii.gz", "label": "./labelsVa/amos_0041.nii.gz"}, {"image": "./imagesVa/amos_0051.nii.gz", "label": "./labelsVa/amos_0051.nii.gz"}, {"image": "./imagesVa/amos_0056.nii.gz", "label": "./labelsVa/amos_0056.nii.gz"}, {"image": "./imagesVa/amos_0061.nii.gz", "label": "./labelsVa/amos_0061.nii.gz"}, {"image": "./imagesVa/amos_0063.nii.gz", "label": "./labelsVa/amos_0063.nii.gz"}, {"image": "./imagesVa/amos_0070.nii.gz", "label": "./labelsVa/amos_0070.nii.gz"}, {"image": "./imagesVa/amos_0073.nii.gz", "label": "./labelsVa/amos_0073.nii.gz"}, {"image": "./imagesVa/amos_0085.nii.gz", "label": "./labelsVa/amos_0085.nii.gz"}, {"image": "./imagesVa/amos_0087.nii.gz", "label": "./labelsVa/amos_0087.nii.gz"}, {"image": "./imagesVa/amos_0090.nii.gz", "label": "./labelsVa/amos_0090.nii.gz"}, {"image": "./imagesVa/amos_0106.nii.gz", "label": "./labelsVa/amos_0106.nii.gz"}, {"image": "./imagesVa/amos_0108.nii.gz", "label": "./labelsVa/amos_0108.nii.gz"}, {"image": "./imagesVa/amos_0112.nii.gz", "label": "./labelsVa/amos_0112.nii.gz"}, {"image": "./imagesVa/amos_0117.nii.gz", "label": "./labelsVa/amos_0117.nii.gz"}, {"image": "./imagesVa/amos_0120.nii.gz", "label": "./labelsVa/amos_0120.nii.gz"}, {"image": "./imagesVa/amos_0123.nii.gz", "label": "./labelsVa/amos_0123.nii.gz"}, {"image": "./imagesVa/amos_0128.nii.gz", "label": "./labelsVa/amos_0128.nii.gz"}, {"image": "./imagesVa/amos_0132.nii.gz", "label": "./labelsVa/amos_0132.nii.gz"}, {"image": "./imagesVa/amos_0136.nii.gz", "label": "./labelsVa/amos_0136.nii.gz"}, {"image": "./imagesVa/amos_0140.nii.gz", "label": "./labelsVa/amos_0140.nii.gz"}, {"image": "./imagesVa/amos_0144.nii.gz", "label": "./labelsVa/amos_0144.nii.gz"}, {"image": "./imagesVa/amos_0150.nii.gz", "label": "./labelsVa/amos_0150.nii.gz"}, {"image": "./imagesVa/amos_0155.nii.gz", "label": "./labelsVa/amos_0155.nii.gz"}, {"image": "./imagesVa/amos_0157.nii.gz", "label": "./labelsVa/amos_0157.nii.gz"}, {"image": "./imagesVa/amos_0167.nii.gz", "label": "./labelsVa/amos_0167.nii.gz"}, {"image": "./imagesVa/amos_0174.nii.gz", "label": "./labelsVa/amos_0174.nii.gz"}, {"image": "./imagesVa/amos_0176.nii.gz", "label": "./labelsVa/amos_0176.nii.gz"}, {"image": "./imagesVa/amos_0189.nii.gz", "label": "./labelsVa/amos_0189.nii.gz"}, {"image": "./imagesVa/amos_0191.nii.gz", "label": "./labelsVa/amos_0191.nii.gz"}, {"image": "./imagesVa/amos_0194.nii.gz", "label": "./labelsVa/amos_0194.nii.gz"}, {"image": "./imagesVa/amos_0200.nii.gz", "label": "./labelsVa/amos_0200.nii.gz"}, {"image": "./imagesVa/amos_0202.nii.gz", "label": "./labelsVa/amos_0202.nii.gz"}, {"image": "./imagesVa/amos_0203.nii.gz", "label": "./labelsVa/amos_0203.nii.gz"}, {"image": "./imagesVa/amos_0204.nii.gz", "label": "./labelsVa/amos_0204.nii.gz"}, {"image": "./imagesVa/amos_0206.nii.gz", "label": "./labelsVa/amos_0206.nii.gz"}, {"image": "./imagesVa/amos_0207.nii.gz", "label": "./labelsVa/amos_0207.nii.gz"}, {"image": "./imagesVa/amos_0208.nii.gz", "label": "./labelsVa/amos_0208.nii.gz"}, {"image": "./imagesVa/amos_0216.nii.gz", "label": "./labelsVa/amos_0216.nii.gz"}, {"image": "./imagesVa/amos_0218.nii.gz", "label": "./labelsVa/amos_0218.nii.gz"}, {"image": "./imagesVa/amos_0219.nii.gz", "label": "./labelsVa/amos_0219.nii.gz"}, {"image": "./imagesVa/amos_0223.nii.gz", "label": "./labelsVa/amos_0223.nii.gz"}, {"image": "./imagesVa/amos_0228.nii.gz", "label": "./labelsVa/amos_0228.nii.gz"}, {"image": "./imagesVa/amos_0233.nii.gz", "label": "./labelsVa/amos_0233.nii.gz"}, {"image": "./imagesVa/amos_0238.nii.gz", "label": "./labelsVa/amos_0238.nii.gz"}, {"image": "./imagesVa/amos_0244.nii.gz", "label": "./labelsVa/amos_0244.nii.gz"}, {"image": "./imagesVa/amos_0247.nii.gz", "label": "./labelsVa/amos_0247.nii.gz"}, {"image": "./imagesVa/amos_0250.nii.gz", "label": "./labelsVa/amos_0250.nii.gz"}, {"image": "./imagesVa/amos_0255.nii.gz", "label": "./labelsVa/amos_0255.nii.gz"}, {"image": "./imagesVa/amos_0257.nii.gz", "label": "./labelsVa/amos_0257.nii.gz"}, {"image": "./imagesVa/amos_0258.nii.gz", "label": "./labelsVa/amos_0258.nii.gz"}, {"image": "./imagesVa/amos_0278.nii.gz", "label": "./labelsVa/amos_0278.nii.gz"}, {"image": "./imagesVa/amos_0280.nii.gz", "label": "./labelsVa/amos_0280.nii.gz"}, {"image": "./imagesVa/amos_0283.nii.gz", "label": "./labelsVa/amos_0283.nii.gz"}, {"image": "./imagesVa/amos_0284.nii.gz", "label": "./labelsVa/amos_0284.nii.gz"}, {"image": "./imagesVa/amos_0286.nii.gz", "label": "./labelsVa/amos_0286.nii.gz"}, {"image": "./imagesVa/amos_0287.nii.gz", "label": "./labelsVa/amos_0287.nii.gz"}, {"image": "./imagesVa/amos_0289.nii.gz", "label": "./labelsVa/amos_0289.nii.gz"}, {"image": "./imagesVa/amos_0290.nii.gz", "label": "./labelsVa/amos_0290.nii.gz"}, {"image": "./imagesVa/amos_0292.nii.gz", "label": "./labelsVa/amos_0292.nii.gz"}, {"image": "./imagesVa/amos_0293.nii.gz", "label": "./labelsVa/amos_0293.nii.gz"}, {"image": "./imagesVa/amos_0304.nii.gz", "label": "./labelsVa/amos_0304.nii.gz"}, {"image": "./imagesVa/amos_0308.nii.gz", "label": "./labelsVa/amos_0308.nii.gz"}, {"image": "./imagesVa/amos_0309.nii.gz", "label": "./labelsVa/amos_0309.nii.gz"}, {"image": "./imagesVa/amos_0310.nii.gz", "label": "./labelsVa/amos_0310.nii.gz"}, {"image": "./imagesVa/amos_0311.nii.gz", "label": "./labelsVa/amos_0311.nii.gz"}, {"image": "./imagesVa/amos_0313.nii.gz", "label": "./labelsVa/amos_0313.nii.gz"}, {"image": "./imagesVa/amos_0316.nii.gz", "label": "./labelsVa/amos_0316.nii.gz"}, {"image": "./imagesVa/amos_0318.nii.gz", "label": "./labelsVa/amos_0318.nii.gz"}, {"image": "./imagesVa/amos_0323.nii.gz", "label": "./labelsVa/amos_0323.nii.gz"}, {"image": "./imagesVa/amos_0325.nii.gz", "label": "./labelsVa/amos_0325.nii.gz"}, {"image": "./imagesVa/amos_0326.nii.gz", "label": "./labelsVa/amos_0326.nii.gz"}, {"image": "./imagesVa/amos_0328.nii.gz", "label": "./labelsVa/amos_0328.nii.gz"}, {"image": "./imagesVa/amos_0333.nii.gz", "label": "./labelsVa/amos_0333.nii.gz"}, {"image": "./imagesVa/amos_0334.nii.gz", "label": "./labelsVa/amos_0334.nii.gz"}, {"image": "./imagesVa/amos_0339.nii.gz", "label": "./labelsVa/amos_0339.nii.gz"}, {"image": "./imagesVa/amos_0342.nii.gz", "label": "./labelsVa/amos_0342.nii.gz"}, {"image": "./imagesVa/amos_0344.nii.gz", "label": "./labelsVa/amos_0344.nii.gz"}, {"image": "./imagesVa/amos_0346.nii.gz", "label": "./labelsVa/amos_0346.nii.gz"}, {"image": "./imagesVa/amos_0352.nii.gz", "label": "./labelsVa/amos_0352.nii.gz"}, {"image": "./imagesVa/amos_0356.nii.gz", "label": "./labelsVa/amos_0356.nii.gz"}, {"image": "./imagesVa/amos_0357.nii.gz", "label": "./labelsVa/amos_0357.nii.gz"}, {"image": "./imagesVa/amos_0363.nii.gz", "label": "./labelsVa/amos_0363.nii.gz"}, {"image": "./imagesVa/amos_0364.nii.gz", "label": "./labelsVa/amos_0364.nii.gz"}, {"image": "./imagesVa/amos_0365.nii.gz", "label": "./labelsVa/amos_0365.nii.gz"}, {"image": "./imagesVa/amos_0368.nii.gz", "label": "./labelsVa/amos_0368.nii.gz"}, {"image": "./imagesVa/amos_0372.nii.gz", "label": "./labelsVa/amos_0372.nii.gz"}, {"image": "./imagesVa/amos_0373.nii.gz", "label": "./labelsVa/amos_0373.nii.gz"}, {"image": "./imagesVa/amos_0377.nii.gz", "label": "./labelsVa/amos_0377.nii.gz"}, {"image": "./imagesVa/amos_0385.nii.gz", "label": "./labelsVa/amos_0385.nii.gz"}, {"image": "./imagesVa/amos_0397.nii.gz", "label": "./labelsVa/amos_0397.nii.gz"}, {"image": "./imagesVa/amos_0399.nii.gz", "label": "./labelsVa/amos_0399.nii.gz"}, {"image": "./imagesVa/amos_0409.nii.gz", "label": "./labelsVa/amos_0409.nii.gz"}, {"image": "./imagesVa/amos_0544.nii.gz", "label": "./labelsVa/amos_0544.nii.gz"}, {"image": "./imagesVa/amos_0545.nii.gz", "label": "./labelsVa/amos_0545.nii.gz"}, {"image": "./imagesVa/amos_0546.nii.gz", "label": "./labelsVa/amos_0546.nii.gz"}, {"image": "./imagesVa/amos_0547.nii.gz", "label": "./labelsVa/amos_0547.nii.gz"}, {"image": "./imagesVa/amos_0549.nii.gz", "label": "./labelsVa/amos_0549.nii.gz"}, {"image": "./imagesVa/amos_0550.nii.gz", "label": "./labelsVa/amos_0550.nii.gz"}, {"image": "./imagesVa/amos_0552.nii.gz", "label": "./labelsVa/amos_0552.nii.gz"}, {"image": "./imagesVa/amos_0553.nii.gz", "label": "./labelsVa/amos_0553.nii.gz"}, {"image": "./imagesVa/amos_0556.nii.gz", "label": "./labelsVa/amos_0556.nii.gz"}, {"image": "./imagesVa/amos_0559.nii.gz", "label": "./labelsVa/amos_0559.nii.gz"}, {"image": "./imagesVa/amos_0561.nii.gz", "label": "./labelsVa/amos_0561.nii.gz"}, {"image": "./imagesVa/amos_0562.nii.gz", "label": "./labelsVa/amos_0562.nii.gz"}, {"image": "./imagesVa/amos_0563.nii.gz", "label": "./labelsVa/amos_0563.nii.gz"}, {"image": "./imagesVa/amos_0568.nii.gz", "label": "./labelsVa/amos_0568.nii.gz"}, {"image": "./imagesVa/amos_0572.nii.gz", "label": "./labelsVa/amos_0572.nii.gz"}, {"image": "./imagesVa/amos_0573.nii.gz", "label": "./labelsVa/amos_0573.nii.gz"}, {"image": "./imagesVa/amos_0575.nii.gz", "label": "./labelsVa/amos_0575.nii.gz"}, {"image": "./imagesVa/amos_0576.nii.gz", "label": "./labelsVa/amos_0576.nii.gz"}, {"image": "./imagesVa/amos_0581.nii.gz", "label": "./labelsVa/amos_0581.nii.gz"}, {"image": "./imagesVa/amos_0598.nii.gz", "label": "./labelsVa/amos_0598.nii.gz"}], "test": [{"image": "./imagesTs/amos_0002.nii.gz"}, {"image": "./imagesTs/amos_0003.nii.gz"}, {"image": "./imagesTs/amos_0012.nii.gz"}, {"image": "./imagesTs/amos_0020.nii.gz"}, {"image": "./imagesTs/amos_0026.nii.gz"}, {"image": "./imagesTs/amos_0028.nii.gz"}, {"image": "./imagesTs/amos_0031.nii.gz"}, {"image": "./imagesTs/amos_0037.nii.gz"}, {"image": "./imagesTs/amos_0039.nii.gz"}, {"image": "./imagesTs/amos_0046.nii.gz"}, {"image": "./imagesTs/amos_0053.nii.gz"}, {"image": "./imagesTs/amos_0055.nii.gz"}, {"image": "./imagesTs/amos_0062.nii.gz"}, {"image": "./imagesTs/amos_0065.nii.gz"}, {"image": "./imagesTs/amos_0068.nii.gz"}, {"image": "./imagesTs/amos_0074.nii.gz"}, {"image": "./imagesTs/amos_0080.nii.gz"}, {"image": "./imagesTs/amos_0082.nii.gz"}, {"image": "./imagesTs/amos_0091.nii.gz"}, {"image": "./imagesTs/amos_0093.nii.gz"}, {"image": "./imagesTs/amos_0095.nii.gz"}, {"image": "./imagesTs/amos_0096.nii.gz"}, {"image": "./imagesTs/amos_0100.nii.gz"}, {"image": "./imagesTs/amos_0101.nii.gz"}, {"image": "./imagesTs/amos_0107.nii.gz"}, {"image": "./imagesTs/amos_0114.nii.gz"}, {"image": "./imagesTs/amos_0122.nii.gz"}, {"image": "./imagesTs/amos_0130.nii.gz"}, {"image": "./imagesTs/amos_0139.nii.gz"}, {"image": "./imagesTs/amos_0145.nii.gz"}, {"image": "./imagesTs/amos_0146.nii.gz"}, {"image": "./imagesTs/amos_0148.nii.gz"}, {"image": "./imagesTs/amos_0151.nii.gz"}, {"image": "./imagesTs/amos_0163.nii.gz"}, {"image": "./imagesTs/amos_0164.nii.gz"}, {"image": "./imagesTs/amos_0165.nii.gz"}, {"image": "./imagesTs/amos_0168.nii.gz"}, {"image": "./imagesTs/amos_0169.nii.gz"}, {"image": "./imagesTs/amos_0178.nii.gz"}, {"image": "./imagesTs/amos_0182.nii.gz"}, {"image": "./imagesTs/amos_0183.nii.gz"}, {"image": "./imagesTs/amos_0187.nii.gz"}, {"image": "./imagesTs/amos_0201.nii.gz"}, {"image": "./imagesTs/amos_0205.nii.gz"}, {"image": "./imagesTs/amos_0209.nii.gz"}, {"image": "./imagesTs/amos_0210.nii.gz"}, {"image": "./imagesTs/amos_0211.nii.gz"}, {"image": "./imagesTs/amos_0213.nii.gz"}, {"image": "./imagesTs/amos_0220.nii.gz"}, {"image": "./imagesTs/amos_0221.nii.gz"}, {"image": "./imagesTs/amos_0222.nii.gz"}, {"image": "./imagesTs/amos_0227.nii.gz"}, {"image": "./imagesTs/amos_0229.nii.gz"}, {"image": "./imagesTs/amos_0232.nii.gz"}, {"image": "./imagesTs/amos_0234.nii.gz"}, {"image": "./imagesTs/amos_0236.nii.gz"}, {"image": "./imagesTs/amos_0240.nii.gz"}, {"image": "./imagesTs/amos_0241.nii.gz"}, {"image": "./imagesTs/amos_0243.nii.gz"}, {"image": "./imagesTs/amos_0246.nii.gz"}, {"image": "./imagesTs/amos_0251.nii.gz"}, {"image": "./imagesTs/amos_0252.nii.gz"}, {"image": "./imagesTs/amos_0253.nii.gz"}, {"image": "./imagesTs/amos_0256.nii.gz"}, {"image": "./imagesTs/amos_0260.nii.gz"}, {"image": "./imagesTs/amos_0261.nii.gz"}, {"image": "./imagesTs/amos_0262.nii.gz"}, {"image": "./imagesTs/amos_0265.nii.gz"}, {"image": "./imagesTs/amos_0266.nii.gz"}, {"image": "./imagesTs/amos_0267.nii.gz"}, {"image": "./imagesTs/amos_0269.nii.gz"}, {"image": "./imagesTs/amos_0270.nii.gz"}, {"image": "./imagesTs/amos_0271.nii.gz"}, {"image": "./imagesTs/amos_0275.nii.gz"}, {"image": "./imagesTs/amos_0277.nii.gz"}, {"image": "./imagesTs/amos_0285.nii.gz"}, {"image": "./imagesTs/amos_0291.nii.gz"}, {"image": "./imagesTs/amos_0295.nii.gz"}, {"image": "./imagesTs/amos_0298.nii.gz"}, {"image": "./imagesTs/amos_0300.nii.gz"}, {"image": "./imagesTs/amos_0303.nii.gz"}, {"image": "./imagesTs/amos_0305.nii.gz"}, {"image": "./imagesTs/amos_0306.nii.gz"}, {"image": "./imagesTs/amos_0312.nii.gz"}, {"image": "./imagesTs/amos_0314.nii.gz"}, {"image": "./imagesTs/amos_0315.nii.gz"}, {"image": "./imagesTs/amos_0319.nii.gz"}, {"image": "./imagesTs/amos_0322.nii.gz"}, {"image": "./imagesTs/amos_0324.nii.gz"}, {"image": "./imagesTs/amos_0327.nii.gz"}, {"image": "./imagesTs/amos_0329.nii.gz"}, {"image": "./imagesTs/amos_0331.nii.gz"}, {"image": "./imagesTs/amos_0335.nii.gz"}, {"image": "./imagesTs/amos_0338.nii.gz"}, {"image": "./imagesTs/amos_0340.nii.gz"}, {"image": "./imagesTs/amos_0343.nii.gz"}, {"image": "./imagesTs/amos_0345.nii.gz"}, {"image": "./imagesTs/amos_0347.nii.gz"}, {"image": "./imagesTs/amos_0354.nii.gz"}, {"image": "./imagesTs/amos_0355.nii.gz"}, {"image": "./imagesTs/amos_0359.nii.gz"}, {"image": "./imagesTs/amos_0360.nii.gz"}, {"image": "./imagesTs/amos_0369.nii.gz"}, {"image": "./imagesTs/amos_0375.nii.gz"}, {"image": "./imagesTs/amos_0382.nii.gz"}, {"image": "./imagesTs/amos_0386.nii.gz"}, {"image": "./imagesTs/amos_0389.nii.gz"}, {"image": "./imagesTs/amos_0393.nii.gz"}, {"image": "./imagesTs/amos_0394.nii.gz"}, {"image": "./imagesTs/amos_0407.nii.gz"}, {"image": "./imagesTs/amos_0411.nii.gz"}, {"image": "./imagesTs/amos_0412.nii.gz"}, {"image": "./imagesTs/amos_0413.nii.gz"}, {"image": "./imagesTs/amos_0414.nii.gz"}, {"image": "./imagesTs/amos_0415.nii.gz"}, {"image": "./imagesTs/amos_0416.nii.gz"}, {"image": "./imagesTs/amos_0417.nii.gz"}, {"image": "./imagesTs/amos_0418.nii.gz"}, {"image": "./imagesTs/amos_0419.nii.gz"}, {"image": "./imagesTs/amos_0420.nii.gz"}, {"image": "./imagesTs/amos_0421.nii.gz"}, {"image": "./imagesTs/amos_0422.nii.gz"}, {"image": "./imagesTs/amos_0423.nii.gz"}, {"image": "./imagesTs/amos_0424.nii.gz"}, {"image": "./imagesTs/amos_0425.nii.gz"}, {"image": "./imagesTs/amos_0426.nii.gz"}, {"image": "./imagesTs/amos_0427.nii.gz"}, {"image": "./imagesTs/amos_0428.nii.gz"}, {"image": "./imagesTs/amos_0429.nii.gz"}, {"image": "./imagesTs/amos_0430.nii.gz"}, {"image": "./imagesTs/amos_0431.nii.gz"}, {"image": "./imagesTs/amos_0432.nii.gz"}, {"image": "./imagesTs/amos_0433.nii.gz"}, {"image": "./imagesTs/amos_0434.nii.gz"}, {"image": "./imagesTs/amos_0435.nii.gz"}, {"image": "./imagesTs/amos_0436.nii.gz"}, {"image": "./imagesTs/amos_0437.nii.gz"}, {"image": "./imagesTs/amos_0438.nii.gz"}, {"image": "./imagesTs/amos_0439.nii.gz"}, {"image": "./imagesTs/amos_0440.nii.gz"}, {"image": "./imagesTs/amos_0441.nii.gz"}, {"image": "./imagesTs/amos_0442.nii.gz"}, {"image": "./imagesTs/amos_0443.nii.gz"}, {"image": "./imagesTs/amos_0444.nii.gz"}, {"image": "./imagesTs/amos_0445.nii.gz"}, {"image": "./imagesTs/amos_0446.nii.gz"}, {"image": "./imagesTs/amos_0447.nii.gz"}, {"image": "./imagesTs/amos_0448.nii.gz"}, {"image": "./imagesTs/amos_0449.nii.gz"}, {"image": "./imagesTs/amos_0450.nii.gz"}, {"image": "./imagesTs/amos_0451.nii.gz"}, {"image": "./imagesTs/amos_0452.nii.gz"}, {"image": "./imagesTs/amos_0453.nii.gz"}, {"image": "./imagesTs/amos_0454.nii.gz"}, {"image": "./imagesTs/amos_0455.nii.gz"}, {"image": "./imagesTs/amos_0456.nii.gz"}, {"image": "./imagesTs/amos_0457.nii.gz"}, {"image": "./imagesTs/amos_0458.nii.gz"}, {"image": "./imagesTs/amos_0459.nii.gz"}, {"image": "./imagesTs/amos_0460.nii.gz"}, {"image": "./imagesTs/amos_0461.nii.gz"}, {"image": "./imagesTs/amos_0462.nii.gz"}, {"image": "./imagesTs/amos_0463.nii.gz"}, {"image": "./imagesTs/amos_0464.nii.gz"}, {"image": "./imagesTs/amos_0465.nii.gz"}, {"image": "./imagesTs/amos_0466.nii.gz"}, {"image": "./imagesTs/amos_0467.nii.gz"}, {"image": "./imagesTs/amos_0468.nii.gz"}, {"image": "./imagesTs/amos_0469.nii.gz"}, {"image": "./imagesTs/amos_0470.nii.gz"}, {"image": "./imagesTs/amos_0471.nii.gz"}, {"image": "./imagesTs/amos_0472.nii.gz"}, {"image": "./imagesTs/amos_0473.nii.gz"}, {"image": "./imagesTs/amos_0474.nii.gz"}, {"image": "./imagesTs/amos_0475.nii.gz"}, {"image": "./imagesTs/amos_0476.nii.gz"}, {"image": "./imagesTs/amos_0477.nii.gz"}, {"image": "./imagesTs/amos_0478.nii.gz"}, {"image": "./imagesTs/amos_0479.nii.gz"}, {"image": "./imagesTs/amos_0480.nii.gz"}, {"image": "./imagesTs/amos_0481.nii.gz"}, {"image": "./imagesTs/amos_0482.nii.gz"}, {"image": "./imagesTs/amos_0483.nii.gz"}, {"image": "./imagesTs/amos_0484.nii.gz"}, {"image": "./imagesTs/amos_0485.nii.gz"}, {"image": "./imagesTs/amos_0486.nii.gz"}, {"image": "./imagesTs/amos_0487.nii.gz"}, {"image": "./imagesTs/amos_0488.nii.gz"}, {"image": "./imagesTs/amos_0489.nii.gz"}, {"image": "./imagesTs/amos_0490.nii.gz"}, {"image": "./imagesTs/amos_0491.nii.gz"}, {"image": "./imagesTs/amos_0492.nii.gz"}, {"image": "./imagesTs/amos_0493.nii.gz"}, {"image": "./imagesTs/amos_0494.nii.gz"}, {"image": "./imagesTs/amos_0495.nii.gz"}, {"image": "./imagesTs/amos_0496.nii.gz"}, {"image": "./imagesTs/amos_0497.nii.gz"}, {"image": "./imagesTs/amos_0498.nii.gz"}, {"image": "./imagesTs/amos_0499.nii.gz"}, {"image": "./imagesTs/amos_0500.nii.gz"}, {"image": "./imagesTs/amos_0501.nii.gz"}, {"image": "./imagesTs/amos_0502.nii.gz"}, {"image": "./imagesTs/amos_0503.nii.gz"}, {"image": "./imagesTs/amos_0504.nii.gz"}, {"image": "./imagesTs/amos_0505.nii.gz"}, {"image": "./imagesTs/amos_0506.nii.gz"}, {"image": "./imagesTs/amos_0509.nii.gz"}, {"image": "./imagesTs/amos_0511.nii.gz"}, {"image": "./imagesTs/amos_0512.nii.gz"}, {"image": "./imagesTs/amos_0513.nii.gz"}, {"image": "./imagesTs/amos_0515.nii.gz"}, {"image": "./imagesTs/amos_0516.nii.gz"}, {"image": "./imagesTs/amos_0519.nii.gz"}, {"image": "./imagesTs/amos_0520.nii.gz"}, {"image": "./imagesTs/amos_0521.nii.gz"}, {"image": "./imagesTs/amos_0523.nii.gz"}, {"image": "./imagesTs/amos_0524.nii.gz"}, {"image": "./imagesTs/amos_0525.nii.gz"}, {"image": "./imagesTs/amos_0526.nii.gz"}, {"image": "./imagesTs/amos_0527.nii.gz"}, {"image": "./imagesTs/amos_0528.nii.gz"}, {"image": "./imagesTs/amos_0529.nii.gz"}, {"image": "./imagesTs/amos_0531.nii.gz"}, {"image": "./imagesTs/amos_0533.nii.gz"}, {"image": "./imagesTs/amos_0534.nii.gz"}, {"image": "./imagesTs/amos_0535.nii.gz"}, {"image": "./imagesTs/amos_0536.nii.gz"}, {"image": "./imagesTs/amos_0537.nii.gz"}, {"image": "./imagesTs/amos_0539.nii.gz"}, {"image": "./imagesTs/amos_0542.nii.gz"}, {"image": "./imagesTs/amos_0543.nii.gz"}, {"image": "./imagesTs/amos_0560.nii.gz"}, {"image": "./imagesTs/amos_0564.nii.gz"}, {"image": "./imagesTs/amos_0565.nii.gz"}, {"image": "./imagesTs/amos_0566.nii.gz"}, {"image": "./imagesTs/amos_0567.nii.gz"}, {"image": "./imagesTs/amos_0569.nii.gz"}, {"image": "./imagesTs/amos_0574.nii.gz"}, {"image": "./imagesTs/amos_0577.nii.gz"}, {"image": "./imagesTs/amos_0579.nii.gz"}]}
amos/amos22/imagesTr/.DS_Store ADDED
Binary file (22.5 kB). View file
 
amos/amos22/readme.md ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ### Overview
2
+
3
+ Despite the considerable progress in automatic abdominal multi-organ segmentation from CT/MRI scans in recent years, a comprehensive evaluation of the models' capabilities is hampered by the lack of a large-scale benchmark from diverse clinical scenarios. Constraint by the high cost of collecting and labeling 3D medical data, most of the deep learning models to date are driven by datasets with a limited number of organs of interest or samples, which still limits the power of modern deep models and makes it difficult to provide a fully comprehensive and fair estimate of various methods. To mitigate the limitations, we present AMOS, a large-scale, diverse, clinical dataset for abdominal organ segmentation. <u>AMOS provides 500 CT and 100 MRI scans collected from multi-center, multi-vendor, multi-modality, multi-phase, multi-disease patients, each with voxel-level annotations of 15 abdominal organs, providing challenging examples and test-bed for studying robust segmentation algorithms</u> under diverse targets and scenarios. We further benchmark several state-of-the-art medical segmentation models to evaluate the status of the existing methods on this new challenging dataset. We have made our datasets, benchmark servers, and baselines publicly available, and hope to inspire future research. For more details, please refer to our paper "https://arxiv.org/pdf/2206.08023.pdf" as well as homepage "https://jiyuanfeng.github.io/AMOS/".
4
+
5
+ ### Structure
6
+
7
+ AMOS provides the following content. imagesTr and labelsTr provide 240 scans (200 CT and 40 MRI), imagesVa and labelsVa provide 120 scans for model selection (100 CT and 20 MRI), and imagesTs provide 120 test data (please submit your predictions from https://amos22.grand-challenge.org/evaluation/challenge/submissions to get a score). Please note that id numbers less than 500 belong to CT data, otherwise they belong to MRI data.
8
+
9
+ ```bash
10
+ amos
11
+ │ readme.md
12
+ │ dataset.json
13
+ └───imagesTr
14
+ │ │ amos_xxxx.nii.gz
15
+ │ │ ...
16
+ └───imagesVa
17
+ └───imagesTs
18
+ └───labelsTr
19
+ └───labelsVa
20
+ └───labelsTs
21
+
22
+ ```
23
+
24
+ ### Citation
25
+
26
+ if you found this dataset useful for your research, please cite:
27
+
28
+ ```
29
+ @article{ji2022amos,
30
+ title={AMOS: A Large-Scale Abdominal Multi-Organ Benchmark for Versatile Medical Image Segmentation},
31
+ author={Ji, Yuanfeng and Bai, Haotian and Yang, Jie and Ge, Chongjian and Zhu, Ye and Zhang, Ruimao and Li, Zhen and Zhang, Lingyan and Ma, Wanling and Wan, Xiang and others},
32
+ journal={arXiv preprint arXiv:2206.08023},
33
+ year={2022}
34
+ }
35
+ ```
36
+
37
+ ### Upcoming
38
+
39
+ We will publish more meta information and corresponding APIs in October, while more unlabeled data will be used to support more learning scenarios
kits21/kits21/kits21/annotation/cache.json ADDED
The diff for this file is too large to render. See raw diff
 
kits21/kits21/kits21/annotation/import.py ADDED
@@ -0,0 +1,342 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import shutil
3
+ from pathlib import Path
4
+ import json
5
+
6
+ import nibabel as nib
7
+ import numpy as np
8
+
9
+ from kits21.annotation.postprocessing import delineation_to_seg, load_json, write_json
10
+ from kits21.configuration.labels import KITS_LABEL_NAMES, LABEL_AGGREGATION_ORDER
11
+ from kits21.configuration.paths import SRC_DIR, TRAINING_DIR, TESTING_DIR, CACHE_FILE
12
+
13
+
14
+ def get_case_dir(case):
15
+ assert SRC_DIR is not None, "SRC_DIR was none, this is most likely due to KITS21_SERVER_DATA not being in your " \
16
+ "environment variables. This functionality is intended to be used only by the " \
17
+ "KiTS organizers."
18
+ # TODO remove hardcoding -- test both to find it
19
+ page = int(case // 50)
20
+ tst = "training_data"
21
+ if case >= 300:
22
+ tst = "testing_data"
23
+ return (SRC_DIR / tst / "cases_{:05d}".format(page) / "case_{:05d}".format(case)).resolve(strict=True)
24
+
25
+
26
+ def get_all_case_dirs():
27
+ # TODO set this number dynamically
28
+ return [get_case_dir(i) for i in range(400)]
29
+
30
+
31
+ def get_region_dir(case_dir, region):
32
+ return (case_dir / region).resolve(strict=True)
33
+
34
+
35
+ def get_all_region_dirs(case_dir):
36
+ return [r for r in case_dir.glob("*")]
37
+
38
+
39
+ def get_instance_dir(region_dir, instance):
40
+ return (region_dir / "{:02d}".format(instance)).resolve(strict=True)
41
+
42
+
43
+ def get_all_instance_dirs(region_dir):
44
+ return [i for i in region_dir.glob("*")]
45
+
46
+
47
+ def get_existing_instances(region_dir):
48
+ case_id = region_dir.parent.name
49
+ base_dir = Path(__file__).resolve().parent.parent / "data"
50
+ if int(case_id.split("_")[-1]) >= 300:
51
+ base_dir = TESTING_DIR
52
+ seg_dir = base_dir / case_id / "segmentations"
53
+ return [x for x in seg_dir.glob("*{}*".format(region_dir.name))]
54
+
55
+
56
+ def get_delineation(instance_dir, delineation):
57
+ return (instance_dir / "delineation{}".format(delineation)).resolve(strict=True)
58
+
59
+
60
+ def get_all_delineations(instance_dir):
61
+ return [d for d in instance_dir.glob("delineation*")]
62
+
63
+
64
+ def get_most_recent_save(parent_dir):
65
+ # Get latest file and list of remainder
66
+ try:
67
+ srt_files = sorted([s for s in parent_dir.glob("*")])
68
+ latest = srt_files[-1]
69
+ except Exception as e:
70
+ print()
71
+ print("Error finding most recent save in", str(parent_dir))
72
+ raise(e)
73
+
74
+ return latest
75
+
76
+
77
+ def update_raw(delineation_path, case_id, in_test_set):
78
+ # Get parent directory (create if necessary)
79
+ destination_parent = TRAINING_DIR / case_id
80
+ if in_test_set:
81
+ destination_parent = TESTING_DIR / case_id
82
+ if not destination_parent.exists():
83
+ destination_parent.mkdir()
84
+ destination_parent = destination_parent / "raw"
85
+ if not destination_parent.exists():
86
+ destination_parent.mkdir()
87
+
88
+ custom_hilums = None
89
+ if (destination_parent / "meta.json").exists():
90
+ with (destination_parent / "meta.json").open() as f:
91
+ old_meta = json.loads(f.read())
92
+ if "custom_hilums" in old_meta:
93
+ custom_hilums = old_meta["custom_hilums"]
94
+
95
+ # Get source directory
96
+ src = delineation_path.parent.parent.parent.parent
97
+
98
+ # Copy all annotation files to destination
99
+ shutil.copytree(str(src), str(destination_parent), dirs_exist_ok=True)
100
+
101
+ if custom_hilums is not None:
102
+ with (destination_parent / "meta.json").open() as f:
103
+ new_meta = json.loads(f.read())
104
+ with (destination_parent / "meta.json").open('w') as f:
105
+ new_meta["custom_hilums"] = custom_hilums
106
+ f.write(json.dumps(new_meta, indent=2))
107
+
108
+
109
+ def get_localization(delineation_path):
110
+ return get_most_recent_save(delineation_path.parent.parent / "localization")
111
+
112
+
113
+ def get_artery_localization(delineation_path):
114
+ pth = delineation_path.parent.parent.parent.parent / "artery" / "00" / "localization"
115
+ if not pth.exists():
116
+ return None
117
+ return get_most_recent_save(pth)
118
+
119
+
120
+ def get_image_path(case_id, in_test_set):
121
+ if in_test_set:
122
+ return (TESTING_DIR / case_id / "imaging.nii.gz").resolve(strict=True)
123
+ else:
124
+ return (TRAINING_DIR / case_id / "imaging.nii.gz").resolve(strict=True)
125
+
126
+
127
+ def save_segmentation(case_id, region_type, delineation_path, n1img, in_test_set):
128
+ # Create name of destination file
129
+ annotation_num = int(delineation_path.parent.name[-1])
130
+ instance_num = int(delineation_path.parent.parent.name)
131
+ filename = "{}_instance-{}_annotation-{}.nii.gz".format(region_type, instance_num+1, annotation_num)
132
+
133
+ # Get parent directory (create if necessary)
134
+ destination_parent = TRAINING_DIR / case_id
135
+ if in_test_set:
136
+ destination_parent = TESTING_DIR / case_id
137
+ if not destination_parent.exists():
138
+ destination_parent.mkdir()
139
+ destination_parent = destination_parent / "segmentations"
140
+ if not destination_parent.exists():
141
+ destination_parent.mkdir()
142
+ destination = destination_parent / filename
143
+
144
+ # Save file
145
+ nib.save(n1img, str(destination))
146
+
147
+
148
+ def run_import(delineation_path):
149
+ # Useful values
150
+ region_type = delineation_path.parent.parent.parent.name
151
+ case_id = delineation_path.parent.parent.parent.parent.name
152
+ in_test_set = False
153
+ if delineation_path.parent.parent.parent.parent.parent.parent.name == "testing_data":
154
+ in_test_set = True
155
+
156
+ # Copy updated raw data
157
+ update_raw(delineation_path, case_id, in_test_set)
158
+
159
+ # Kidneys require hilum information from the localization
160
+ localization = None
161
+ if region_type == "kidney":
162
+ localization = get_localization(delineation_path)
163
+
164
+ # Path to underlying CT scan stored as .nii.gz
165
+ image_path = get_image_path(case_id, in_test_set)
166
+
167
+ meta_path = image_path.parent / "raw" / "meta.json"
168
+ meta = load_json(meta_path)
169
+
170
+ # Compute and save segmentation based on delineation
171
+ seg_nib = delineation_to_seg(region_type, image_path, delineation_path, meta, localization)
172
+ save_segmentation(case_id, region_type, delineation_path, seg_nib, in_test_set)
173
+
174
+
175
+ def aggregate(parent, region, idnum, agg, affine, agtype="maj"):
176
+
177
+ seg_files = [x for x in parent.glob("{}*.nii.gz".format(region))]
178
+ instances = [int(x.stem.split("_")[1].split("-")[1]) for x in seg_files]
179
+ unq_insts = sorted(list(set(instances)))
180
+
181
+ reg_agg = None
182
+ for inst in unq_insts:
183
+ inst_agg = None
184
+ n_anns = 0
185
+ for tins, tfnm in zip(instances, seg_files):
186
+ if tins != inst:
187
+ continue
188
+ seg_nib = nib.load(str(tfnm))
189
+ n_anns += 1
190
+ if inst_agg is None:
191
+ inst_agg = np.asanyarray(seg_nib.dataobj)
192
+ affine = seg_nib.affine
193
+ else:
194
+ inst_agg = inst_agg + np.asanyarray(seg_nib.dataobj)
195
+
196
+ if agtype == "maj":
197
+ inst = np.greater(inst_agg, n_anns/2).astype(inst_agg.dtype)
198
+ elif agtype == "or":
199
+ inst = np.greater(inst_agg, 0).astype(inst_agg.dtype)
200
+ elif agtype == "and":
201
+ inst = np.equal(inst_agg, n_anns).astype(inst_agg.dtype)
202
+
203
+ if reg_agg is None:
204
+ reg_agg = np.copy(inst)
205
+ else:
206
+ reg_agg = np.logical_or(reg_agg, inst).astype(reg_agg.dtype)
207
+
208
+ # If no info here, just return what we started with
209
+ if reg_agg is None:
210
+ return agg, affine
211
+
212
+ if agg is None:
213
+ agg = idnum*reg_agg
214
+ else:
215
+ agg = np.where(np.logical_not(np.equal(reg_agg, 0)), idnum*reg_agg, agg)
216
+
217
+ return agg, affine
218
+
219
+
220
+ def aggregate_case(case_id):
221
+ base_dir = Path(__file__).resolve().parent.parent / "data"
222
+ if int(case_id.split("_")[-1]) >= 300:
223
+ base_dir = TESTING_DIR
224
+
225
+ segs = base_dir / case_id / "segmentations"
226
+
227
+ affine = None
228
+ agg = None
229
+ for label_id in LABEL_AGGREGATION_ORDER:
230
+ agg, affine = aggregate(segs, KITS_LABEL_NAMES[label_id], label_id, agg, affine, agtype="or")
231
+ if agg is not None:
232
+ nib.save(
233
+ nib.Nifti1Image(agg.astype(np.int32), affine),
234
+ str(base_dir / case_id / "aggregated_OR_seg.nii.gz")
235
+ )
236
+
237
+ affine = None
238
+ agg = None
239
+ for label_id in LABEL_AGGREGATION_ORDER:
240
+ agg, affine = aggregate(segs, KITS_LABEL_NAMES[label_id], label_id, agg, affine, agtype="and")
241
+ if agg is not None:
242
+ nib.save(
243
+ nib.Nifti1Image(agg.astype(np.int32), affine),
244
+ str(base_dir / case_id / "aggregated_AND_seg.nii.gz")
245
+ )
246
+
247
+ affine = None
248
+ agg = None
249
+ for label_id in LABEL_AGGREGATION_ORDER:
250
+ agg, affine = aggregate(segs, KITS_LABEL_NAMES[label_id], label_id, agg, affine, agtype="maj")
251
+ if agg is not None:
252
+ nib.save(
253
+ nib.Nifti1Image(agg.astype(np.int32), affine),
254
+ str(base_dir / case_id / "aggregated_MAJ_seg.nii.gz")
255
+ )
256
+
257
+
258
+ def cleanup(case_dir):
259
+ base_dir = Path(__file__).resolve().parent.parent / "data"
260
+ if int(case_dir.name.split("_")[-1]) >= 300:
261
+ base_dir = TESTING_DIR
262
+ case_dir = base_dir / case_dir.name / "raw"
263
+ region_dirs = get_all_region_dirs(case_dir)
264
+ for region_dir in region_dirs:
265
+ instance_dirs = get_all_instance_dirs(region_dir)
266
+ for instance_dir in instance_dirs:
267
+ sessions = [x for x in instance_dir.glob("*")]
268
+ for sess in sessions:
269
+ srt_files = sorted([s for s in sess.glob("*")])
270
+ for f in srt_files[:-1]:
271
+ f.unlink()
272
+
273
+
274
+ def main(args):
275
+ cache = load_json(CACHE_FILE)
276
+ cli = True
277
+ if args.case is not None:
278
+ case_dirs = [get_case_dir(args.case)]
279
+ else:
280
+ cli = False
281
+ case_dirs = get_all_case_dirs()
282
+
283
+ for case_dir in case_dirs:
284
+ print(case_dir.name)
285
+ reaggregate = args.reaggregate
286
+ if cli and args.region is not None:
287
+ region_dirs = [get_region_dir(case_dir, args.region)]
288
+ else:
289
+ cli = False
290
+ region_dirs = get_all_region_dirs(case_dir)
291
+
292
+ for region_dir in region_dirs:
293
+ # Skip regions no longer being used
294
+ if region_dir.name in ["artery", "vein", "ureter"]:
295
+ continue
296
+ if cli and args.instance is not None:
297
+ instance_dirs = [get_instance_dir(region_dir, args.instance - 1)]
298
+ else:
299
+ cli = False
300
+ instance_dirs = get_all_instance_dirs(region_dir)
301
+
302
+ for instance_dir in instance_dirs:
303
+ if cli and args.delineation is not None:
304
+ delineations = [get_delineation(instance_dir, args.delineation)]
305
+ else:
306
+ delineations = get_all_delineations(instance_dir)
307
+
308
+ for delineation in delineations:
309
+ dln_file = get_most_recent_save(delineation)
310
+ cache_key = str(delineation.relative_to(delineation.parent.parent.parent.parent))
311
+ if args.regenerate or cache_key not in cache or cache[cache_key] != dln_file.name:
312
+ run_import(dln_file)
313
+ cache[cache_key] = dln_file.name
314
+ write_json(CACHE_FILE, cache)
315
+ reaggregate = True
316
+
317
+ # Delete any instances that were generated before but don't exist anymore
318
+ generated_instances = get_existing_instances(region_dir)
319
+ for gi in generated_instances:
320
+ if int(gi.stem.split("instance-")[1][0]) not in [int(x.name)+1 for x in instance_dirs]:
321
+ print("Deleting legacy file:", str(gi.name))
322
+ gi.unlink()
323
+ reaggregate = True
324
+
325
+ if reaggregate:
326
+ aggregate_case(case_dir.name, )
327
+
328
+ # Clean up all unused raw files
329
+ cleanup(case_dir)
330
+
331
+
332
+ if __name__ == '__main__':
333
+ parser = argparse.ArgumentParser()
334
+ parser.add_argument("-c", "--case", help="The index of the case to import", type=int)
335
+ parser.add_argument("-r", "--region", help="The type of region to import", type=str)
336
+ parser.add_argument("-i", "--instance", help="The index of the instance of that region to import", type=int)
337
+ parser.add_argument("-d", "--delineation", help="The index of the delineation of that instance to import (1, 2, or 3)", type=int)
338
+ parser.add_argument("--regenerate", help="Regenerate segmentations regardless of cached values", action="store_true")
339
+ parser.add_argument("--reaggregate", help="Reaggregate segmentations regardless of whether it was changed", action="store_true")
340
+ if __name__ == "__main__":
341
+ cl_args = parser.parse_args()
342
+ main(cl_args)
kits21/kits21/kits21/annotation/postprocessing.py ADDED
@@ -0,0 +1,749 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Code for turning user delineations into dense segmentations."""
2
+ import json
3
+
4
+ import numpy as np
5
+ import nibabel as nib
6
+ from PIL import Image, ImageDraw
7
+ from numpy.core.fromnumeric import cumsum
8
+ import torch
9
+ import torch.nn.functional
10
+ from scipy import signal
11
+ from skimage import measure
12
+ import cv2
13
+
14
+ #pylint: disable=no-member
15
+
16
+
17
+ def load_json(json_path):
18
+ with json_path.open() as f:
19
+ return json.loads(f.read())
20
+
21
+
22
+ def write_json(json_path, data):
23
+ with json_path.open("w") as f:
24
+ return f.write(json.dumps(data, indent=2))
25
+
26
+
27
+ def get_containing_box(dln, shape):
28
+ annotated_frames = set([])
29
+ maxs = [0, 0]
30
+ mins = [np.inf, np.inf]
31
+ max_sz = 0
32
+ for ann in dln["annotations"]:
33
+ annotated_frames.add(ann["frame"])
34
+ for pt in ann["spatial_payload"]:
35
+ if pt[0] > maxs[0]:
36
+ maxs[0] = pt[0]
37
+ if pt[1] > maxs[1]:
38
+ maxs[1] = pt[1]
39
+ if pt[0] < mins[0]:
40
+ mins[0] = pt[0]
41
+ if pt[1] < mins[1]:
42
+ mins[1] = pt[1]
43
+ if ann["line_size"] > max_sz:
44
+ max_sz = ann["line_size"]
45
+
46
+ afrms = sorted(list(annotated_frames))
47
+ last = afrms[0]
48
+ min_step = np.inf
49
+ for afrm in afrms[1:]:
50
+ if afrm - last < min_step:
51
+ min_step = afrm - last
52
+ last = afrm
53
+
54
+ abs_zmin = 0
55
+ abs_zmax = shape[0] - 1
56
+ return {
57
+ "xmin": max(0, int(np.floor(mins[0] - max_sz))),
58
+ "xmax": min(shape[2] - 1, int(np.ceil(maxs[0] + max_sz))),
59
+ "ymin": max(0, int(np.floor(mins[1] - max_sz))),
60
+ "ymax": min(shape[1] - 1, int(np.ceil(maxs[1] + max_sz))),
61
+ "zmin": max(abs_zmin, min(afrms) - min_step),
62
+ "zmax": min(abs_zmax, max(afrms) + min_step),
63
+ "step": min_step,
64
+ "xdim": shape[2],
65
+ "ydim": shape[1],
66
+ "zdim": shape[0]
67
+ }
68
+
69
+
70
+ def get_cropped_scan(cbox, img_nib):
71
+ return img_nib.get_fdata()[
72
+ cbox["zmin"]:cbox["zmax"] + 1,
73
+ cbox["ymin"]:cbox["ymax"] + 1,
74
+ cbox["xmin"]:cbox["xmax"] + 1
75
+ ]
76
+
77
+
78
+ def generate_cropped_drawing_interior(cbox, dln):
79
+ ret = np.zeros((
80
+ cbox["zmax"] - cbox["zmin"] + 1,
81
+ cbox["ymax"] - cbox["ymin"] + 1,
82
+ cbox["xmax"] - cbox["xmin"] + 1
83
+ ), dtype=np.int)
84
+
85
+ for i in range(ret.shape[0]):
86
+ with Image.new("L", (ret.shape[2]*10, ret.shape[1]*10)) as im:
87
+ draw = ImageDraw.Draw(im)
88
+ drew = False
89
+ for stroke in dln["annotations"]:
90
+ if stroke["deprecated"]:
91
+ continue
92
+ if i + cbox["zmin"] == stroke["frame"]:
93
+ drew = True
94
+ draw.line(
95
+ [
96
+ (
97
+ int(round((x[0] - cbox["xmin"])*10)),
98
+ int(round((x[1] - cbox["ymin"])*10))
99
+ )
100
+ for x in stroke["spatial_payload"]
101
+ ],
102
+ fill=128,
103
+ width=int(round(stroke["line_size"]*10))+4,
104
+ joint="curve"
105
+ )
106
+ srt = stroke["spatial_payload"][0]
107
+ draw.ellipse(
108
+ [
109
+ (
110
+ int(round((srt[0] - cbox["xmin"] - stroke["line_size"]/2)*10))-2,
111
+ int(round((srt[1] - cbox["ymin"] - stroke["line_size"]/2)*10))-2
112
+ ),
113
+ (
114
+ int(round((srt[0] - cbox["xmin"] + stroke["line_size"]/2)*10))+2,
115
+ int(round((srt[1] - cbox["ymin"] + stroke["line_size"]/2)*10))+2
116
+ )
117
+ ],
118
+ fill=128
119
+ )
120
+ end = stroke["spatial_payload"][-1]
121
+ draw.ellipse(
122
+ [
123
+ (
124
+ int(round((end[0] - cbox["xmin"] - stroke["line_size"]/2)*10))-2,
125
+ int(round((end[1] - cbox["ymin"] - stroke["line_size"]/2)*10))-2
126
+ ),
127
+ (
128
+ int(round((end[0] - cbox["xmin"] + stroke["line_size"]/2)*10))+2,
129
+ int(round((end[1] - cbox["ymin"] + stroke["line_size"]/2)*10))+2
130
+ )
131
+ ],
132
+ fill=128
133
+ )
134
+ if drew:
135
+ ImageDraw.floodfill(im, (0,0), 128, thresh=63.5)
136
+ rszd = im.resize((ret.shape[2], ret.shape[1]), Image.BILINEAR)
137
+ ret[i,:,:] = np.less(np.array(rszd), 63.9).astype(np.int)
138
+
139
+ return ret
140
+
141
+
142
+ def get_contour(bin_seg):
143
+ if bin_seg is None:
144
+ return None
145
+ contours, hierarchy = cv2.findContours(bin_seg.astype(np.uint8)*255, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
146
+ return contours[0]
147
+
148
+
149
+ def distance(p1, p2):
150
+ return (p1[0][0] - p2[0][0])*(p1[0][0] - p2[0][0]) + (p1[0][1] - p2[0][1])*(p1[0][1] - p2[0][1])
151
+
152
+
153
+ def find_nearest_neighbors_slow_v2(lg_cntr, sm_cntr):
154
+ matches = np.zeros_like(lg_cntr)
155
+ step = sm_cntr.shape[0]/lg_cntr.shape[0]
156
+ mini = None
157
+ mind = np.inf
158
+ for i in range(lg_cntr.shape[0]):
159
+ candidate_matches = np.zeros_like(lg_cntr)
160
+ offset = i*step
161
+ for j in range(lg_cntr.shape[0]):
162
+ candidate_matches[j] = sm_cntr[int(np.round(offset + j*step)) % sm_cntr.shape[0]]
163
+
164
+ dist = np.square(lg_cntr - candidate_matches).sum()
165
+ if dist < mind:
166
+ mini = i
167
+ matches = candidate_matches.copy()
168
+ mind = dist
169
+
170
+ return matches
171
+
172
+
173
+ def draw_filled_contour(ind, bef_i, aft_i, drw_c, bef_bin, aft_bin, float_contour):
174
+ blown_up = np.zeros((drw_c.shape[1]*10, drw_c.shape[2]*10), dtype=np.uint8)
175
+ points = np.round(float_contour*10).astype(np.int32) + 1
176
+ cv2.fillPoly(blown_up, pts=[points], color=128)
177
+ drw_c[ind,:,:] = np.logical_or(
178
+ drw_c[ind,:,:],
179
+ np.logical_or(
180
+ np.greater(cv2.resize(blown_up, (drw_c.shape[2], drw_c.shape[1]), cv2.INTER_LINEAR), 32),
181
+ np.multiply(bef_bin, aft_bin)
182
+ )
183
+ )
184
+
185
+
186
+ def get_group(istr, bef_to_aft, aft_to_bef):
187
+ bef_grp = set([istr])
188
+ aft_grp = set([])
189
+ bef_ln = len(bef_grp)
190
+ aft_ln = len(aft_grp)
191
+ while True:
192
+ for ai in aft_grp:
193
+ for atb in aft_to_bef[ai]:
194
+ if atb["ovr_sz"] > 0:
195
+ bef_grp.add(str(atb["ind"]))
196
+ for bi in bef_grp:
197
+ for bta in bef_to_aft[bi]:
198
+ if bta["ovr_sz"] > 0:
199
+ aft_grp.add(str(bta["ind"]))
200
+ if len(bef_grp) != bef_ln or len(aft_grp) != aft_ln:
201
+ bef_ln = len(bef_grp)
202
+ aft_ln = len(aft_grp)
203
+ else:
204
+ break
205
+ return list(bef_grp), list(aft_grp)
206
+
207
+
208
+ def splice_contour(spliced, stretches, cntr, cur_sz, ctr_ind):
209
+ # Get nearest pair
210
+ mini = None
211
+ minj = None
212
+ mind = np.inf
213
+ for i in range(cur_sz):
214
+ for j in range(cntr.shape[0]):
215
+ dst = distance(spliced[i], cntr[j])
216
+ if dst < mind:
217
+ mini = i
218
+ minj = j
219
+ mind = dst
220
+
221
+ ret_sp = spliced.copy()
222
+ ret_sp[mini+1:mini+cntr.shape[0]+1] = cntr
223
+ ret_sp[mini+cntr.shape[0]+1:cur_sz+cntr.shape[0]] = spliced[mini+1:cur_sz]
224
+
225
+ ret_st = stretches.copy()
226
+ ret_st[mini+1:mini+cntr.shape[0]+1] = ctr_ind*np.ones((cntr.shape[0], 1))
227
+ ret_st[mini+cntr.shape[0]+1:cur_sz+cntr.shape[0]] = stretches[mini+1:cur_sz]
228
+
229
+ return ret_sp, ret_st
230
+
231
+
232
+ def splice_contours(cntrs):
233
+ lengths = [cr.shape[0] for cr in cntrs]
234
+ stretches = -1*np.ones(
235
+ (sum(lengths),1),
236
+ dtype=np.int32
237
+ )
238
+
239
+ spliced = np.zeros(
240
+ (sum(lengths),) + cntrs[0].shape[1:],
241
+ dtype=cntrs[0].dtype
242
+ )
243
+ spliced[0:cntrs[0].shape[0]] = cntrs[0].copy()
244
+ stretches[0:cntrs[0].shape[0]] = np.zeros((cntrs[0].shape[0], 1))
245
+ for i in range(1, len(cntrs)):
246
+ spliced, stretches = splice_contour(spliced, stretches, cntrs[i], sum(lengths[:i]), i)
247
+
248
+ return spliced, stretches
249
+
250
+
251
+ def slice_matches(matches, splice_inds):
252
+ ret = []
253
+ for i in range(np.max(splice_inds)+1):
254
+ ret += [matches[splice_inds == i,:].reshape((-1,1,2))]
255
+
256
+ return ret
257
+
258
+
259
+ def interpolate_merge_association(bef_grp, aft_grp, bef_lbl, aft_lbl, drw_c, bef_i, aft_i, step):
260
+ # Get composites for each
261
+ tot_bef_bin = np.zeros_like(bef_lbl)
262
+ for lbl in bef_grp:
263
+ tot_bef_bin = np.logical_or(
264
+ tot_bef_bin,
265
+ np.equal(bef_lbl, int(lbl))
266
+ )
267
+ tot_aft_bin = np.zeros_like(aft_lbl)
268
+ for lbl in aft_grp:
269
+ tot_aft_bin = np.logical_or(
270
+ tot_aft_bin,
271
+ np.equal(aft_lbl, int(lbl))
272
+ )
273
+
274
+ # Get individual values
275
+ bef_bins = [
276
+ np.equal(bef_lbl, int(x))
277
+ for x in bef_grp
278
+ ]
279
+ aft_bins = [
280
+ np.equal(aft_lbl, int(x))
281
+ for x in aft_grp
282
+ ]
283
+ bef_cntrs = [
284
+ get_contour(bef_bin)
285
+ for bef_bin in bef_bins
286
+ ]
287
+ aft_cntrs = [
288
+ get_contour(aft_bin)
289
+ for aft_bin in aft_bins
290
+ ]
291
+ if len(bef_grp) > len(aft_grp):
292
+ nonref_cntrs = bef_cntrs
293
+ spliced_nonref, splice_inds = splice_contours(bef_cntrs)
294
+ ref_cntrs = aft_cntrs
295
+ start = aft_i
296
+ inc = -1
297
+ else:
298
+ nonref_cntrs = aft_cntrs
299
+ spliced_nonref, splice_inds = splice_contours(aft_cntrs)
300
+ ref_cntrs = bef_cntrs
301
+ start = bef_i
302
+ inc = 1
303
+
304
+ for ref_cntr in ref_cntrs:
305
+ matches = find_nearest_neighbors_slow_v2(ref_cntr, spliced_nonref)
306
+ rev_matches = find_nearest_neighbors_slow_v2(spliced_nonref, ref_cntr)
307
+ sliced_matches = slice_matches(rev_matches, splice_inds)
308
+ for i in range(1, int(np.ceil((aft_i - bef_i)/2))):
309
+ draw_filled_contour(
310
+ start + i*inc, bef_i, aft_i,
311
+ drw_c, tot_bef_bin, tot_aft_bin,
312
+ i/step*matches + (step - i)/step*ref_cntr
313
+ )
314
+ for nonref_frag, ref_frag in zip(nonref_cntrs, sliced_matches):
315
+ for i in range(int(np.ceil((aft_i - bef_i)/2)), aft_i - bef_i):
316
+ draw_filled_contour(
317
+ start + i*inc, bef_i, aft_i,
318
+ drw_c, tot_bef_bin, tot_aft_bin,
319
+ i/step*nonref_frag + (step - i)/step*ref_frag
320
+ )
321
+
322
+
323
+ def interpolate_simple_association(bef_bin, aft_bin, drw_c, bef_i, aft_i, bef_cnt, aft_cnt, step):
324
+ # cnt <- center
325
+ # cntr <- contour
326
+ bef_cntr = get_contour(bef_bin)
327
+ aft_cntr = get_contour(aft_bin)
328
+ if bef_cntr is None:
329
+ start = bef_i
330
+ inc = 1
331
+ ref = bef_cntr
332
+ bef_cntr = np.array([
333
+ [bef_cnt]
334
+ ])
335
+ bef_bin = np.zeros_like(aft_bin)
336
+ elif aft_cntr is None:
337
+ start = aft_i
338
+ inc = -1
339
+ ref = aft_cntr
340
+ aft_cntr = np.array([
341
+ [aft_cnt]
342
+ ])
343
+ aft_bin = np.zeros_like(bef_bin)
344
+ if bef_cntr.shape[0] > aft_cntr.shape[0]:
345
+ start = bef_i
346
+ inc = 1
347
+ ref = bef_cntr
348
+ matches = find_nearest_neighbors_slow_v2(bef_cntr, aft_cntr)
349
+ else:
350
+ start = aft_i
351
+ inc = -1
352
+ ref = aft_cntr
353
+ matches = find_nearest_neighbors_slow_v2(aft_cntr, bef_cntr)
354
+
355
+ for i in range(1, aft_i - bef_i):
356
+ draw_filled_contour(
357
+ start + i*inc, bef_i, aft_i,
358
+ drw_c, bef_bin, aft_bin,
359
+ i/step*matches + (step - i)/step*ref
360
+ )
361
+
362
+
363
+ def interpolate_step(bef_i, aft_i, drw_c, step):
364
+ # Label connected components in each
365
+ bef_lbl = measure.label(drw_c[bef_i, :, :], background=0)
366
+ aft_lbl = measure.label(drw_c[aft_i, :, :], background=0)
367
+
368
+ # Associate connected components based on proximity and overlap
369
+ num_bef = np.max(bef_lbl)
370
+ num_aft = np.max(aft_lbl)
371
+
372
+ aft_cvg = [False for _ in range(num_aft)]
373
+
374
+ bef_to_aft = {}
375
+ aft_to_bef = {}
376
+
377
+ # Iterate over all pairs of blobs
378
+ for i in range(1, num_bef+1):
379
+ bef_bin = np.equal(bef_lbl, i).astype(np.int)
380
+ bef_cnt_x, bef_cnt_y = np.argwhere(bef_bin == 1).sum(0)/bef_bin.sum()
381
+ bef_covered = False
382
+ istr = "{}".format(i)
383
+ for j in range(1, num_aft+1):
384
+ aft_bin = np.equal(aft_lbl, j).astype(np.int)
385
+
386
+ # Get size of overlap
387
+ ovr_sz = np.multiply(bef_bin, aft_bin).sum()
388
+
389
+ # Get metrics describing blob proximity
390
+ aft_cnt_x, aft_cnt_y = np.argwhere(aft_bin == 1).sum(0)/aft_bin.sum()
391
+ cnt_dsp = [aft_cnt_y - bef_cnt_y, aft_cnt_x - bef_cnt_x]
392
+ cnt_dst_sq = cnt_dsp[0]**2 + cnt_dsp[1]**2
393
+
394
+ if ovr_sz > 0 or cnt_dst_sq < 5**2:
395
+ jstr = "{}".format(j)
396
+ if istr not in bef_to_aft:
397
+ bef_to_aft[istr] = []
398
+ bef_to_aft[istr] += [{
399
+ "ind": j,
400
+ "ovr_sz": int(ovr_sz),
401
+ "cnt_dst_sq": cnt_dst_sq
402
+ }]
403
+ if jstr not in aft_to_bef:
404
+ aft_to_bef[jstr] = []
405
+ aft_to_bef[jstr] += [{
406
+ "ind": i,
407
+ "ovr_sz": int(ovr_sz),
408
+ "cnt_dst_sq": cnt_dst_sq
409
+ }]
410
+ bef_covered = True
411
+ aft_cvg[j-1] = True
412
+
413
+ if not bef_covered:
414
+ interpolate_simple_association(
415
+ bef_bin, None, drw_c, bef_i, aft_i,
416
+ [bef_cnt_y, bef_cnt_x], [bef_cnt_y, bef_cnt_x], step
417
+ )
418
+
419
+ for j, ac in enumerate(aft_cvg):
420
+ if not ac:
421
+ aft_bin = np.equal(aft_lbl, j+1).astype(np.int)
422
+ aft_cnt_x, aft_cnt_y = np.argwhere(aft_bin == 1).sum(0)/aft_bin.sum()
423
+ interpolate_simple_association(
424
+ None, aft_bin, drw_c, bef_i, aft_i,
425
+ [aft_cnt_y, aft_cnt_x], [aft_cnt_y, aft_cnt_x], step
426
+ )
427
+
428
+ # If each only has one candidate, that's easy
429
+ for istr in bef_to_aft:
430
+ if len(bef_to_aft[istr]) == 1 and len(aft_to_bef[str(bef_to_aft[istr][0]["ind"])]) == 1:
431
+ bef_bin = np.equal(bef_lbl, int(istr)).astype(np.int)
432
+ aft_bin = np.equal(aft_lbl, bef_to_aft[istr][0]["ind"]).astype(np.int)
433
+ aft_cnt_x, aft_cnt_y = np.argwhere(aft_bin == 1).sum(0)/aft_bin.sum()
434
+ bef_cnt_x, bef_cnt_y = np.argwhere(bef_bin == 1).sum(0)/bef_bin.sum()
435
+ interpolate_simple_association(
436
+ bef_bin, aft_bin, drw_c, bef_i, aft_i,
437
+ [bef_cnt_y, bef_cnt_x], [aft_cnt_y, aft_cnt_x], step
438
+ )
439
+ else: # More complex decision...
440
+ strict_bta = [x for x in bef_to_aft[istr] if x["ovr_sz"] > 0]
441
+ strict_atb = []
442
+ for k in range(len(strict_bta)):
443
+ strict_atb += [
444
+ x for x in aft_to_bef[str(strict_bta[k]["ind"])]
445
+ if x["ovr_sz"] > 0
446
+ ]
447
+ handled = False
448
+ if len(strict_bta) == 1:
449
+ if len(strict_atb) == 1:
450
+ handled = True
451
+ bef_bin = np.equal(bef_lbl, int(istr)).astype(np.int)
452
+ aft_bin = np.equal(aft_lbl, strict_bta[0]["ind"]).astype(np.int)
453
+ aft_cnt_x, aft_cnt_y = np.argwhere(aft_bin == 1).sum(0)/aft_bin.sum()
454
+ bef_cnt_x, bef_cnt_y = np.argwhere(bef_bin == 1).sum(0)/bef_bin.sum()
455
+ interpolate_simple_association(
456
+ bef_bin, aft_bin, drw_c, bef_i, aft_i,
457
+ [bef_cnt_y, bef_cnt_x], [aft_cnt_y, aft_cnt_x], step
458
+ )
459
+ if not handled: # Need to do a group merge
460
+ bef_grp, aft_grp = get_group(istr, bef_to_aft, aft_to_bef)
461
+ interpolate_merge_association(
462
+ bef_grp, aft_grp, bef_lbl, aft_lbl, drw_c, bef_i, aft_i, step
463
+ )
464
+
465
+ return drw_c
466
+
467
+
468
+ def interpolate_drawings(drw_c, step, arb_bdry=False):
469
+ # Get inclusive start and end frames
470
+ start = 0
471
+ while start < drw_c.shape[0]:
472
+ if np.sum(drw_c[start]) > 0:
473
+ break
474
+ else:
475
+ start += 1
476
+ end = drw_c.shape[0] - 1
477
+ while end > start:
478
+ if np.sum(drw_c[end]) > 0:
479
+ break
480
+ else:
481
+ end -= 1
482
+
483
+
484
+ if arb_bdry:
485
+ start += step
486
+ end -= step
487
+
488
+ while start < end + step + 1:
489
+ drw_c = interpolate_step(max(start - step, 0), min(start, drw_c.shape[0] - 1), drw_c, step)
490
+ start += step
491
+
492
+ return drw_c
493
+
494
+
495
+ def get_blur_kernel_d(affine):
496
+ kerx = signal.gaussian(5, std=1/np.abs(affine[0,2])).reshape(5, 1)
497
+ kerxy = np.outer(kerx, kerx).reshape(1, 5, 5)
498
+ kerz = signal.gaussian(5, std=1/np.abs(affine[2,0])).reshape(5, 1, 1)
499
+ kerxyz = np.outer(kerz, kerxy)
500
+ kerxyz /= np.sum(kerxyz)
501
+ return torch.from_numpy(kerxyz.reshape(1,1,5,5,5)).to("cuda:0")
502
+
503
+
504
+ def get_threshold(region_type):
505
+ # This seems to work -- no need to adjust based on region now that ureter is gone
506
+ return -30
507
+
508
+
509
+ def find_hilum_in_slice(thresh, side):
510
+ # TODO use custom if available
511
+ thresh = thresh.astype(np.uint8)
512
+ (
513
+ nb_components, output, stats, centroids
514
+ ) = cv2.connectedComponentsWithStats(thresh, connectivity=4)
515
+ sizes = stats[:, -1]
516
+
517
+ max_label = 0
518
+ max_size = 0
519
+ for i in range(1, nb_components):
520
+ if sizes[i] > max_size:
521
+ max_label = i
522
+ max_size = sizes[i]
523
+
524
+ thresh[output != max_label] = 0
525
+ centroid = np.array(tuple(centroids[max_label]))
526
+
527
+ contours, _ = cv2.findContours(
528
+ thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE
529
+ )
530
+
531
+ if len(contours) == 0:
532
+ return None
533
+
534
+ primary_contour = contours[0]
535
+ hull = cv2.convexHull(primary_contour, returnPoints=False)
536
+ defects = cv2.convexityDefects(primary_contour, hull)
537
+
538
+ # Choose from defects
539
+ distances = []
540
+ scores = []
541
+ criteria = []
542
+ depths = []
543
+
544
+ if defects is None:
545
+ return None
546
+
547
+ for i in range(defects.shape[0]):
548
+ s, e, f, d = defects[i, 0]
549
+ start = np.array(tuple(primary_contour[s][0]))
550
+ end = np.array(tuple(primary_contour[e][0]))
551
+ furthest = np.array(tuple(primary_contour[f][0]))
552
+ defect_center = (start + end)/2
553
+ depth = np.linalg.norm(furthest - defect_center)
554
+ centroid_offset = centroid - defect_center
555
+ distance = np.linalg.norm(start - end, ord=2)
556
+ # print(centroid, defect_center, centroid_offset, distance)
557
+ if side == "left":
558
+ score = 1*centroid_offset[0] + centroid_offset[1]
559
+ elif side == "right":
560
+ score = -1*centroid_offset[0] + centroid_offset[1]
561
+ distance = np.linalg.norm(start - end, ord=2)
562
+ scores = scores + [score]
563
+ distances = distances + [distance]
564
+ depths = depths + [depth]
565
+ criteria = criteria + [int(score>0)*(distance+3*depth)]
566
+
567
+ if np.sum(criteria) > 1e-2:
568
+ winner = np.argmax(criteria)
569
+ s, e, f, d = defects[winner, 0]
570
+ start = tuple(primary_contour[s][0])
571
+ end = tuple(primary_contour[e][0])
572
+ hlm = [start, end]
573
+ else:
574
+ hlm = None
575
+
576
+ return hlm
577
+
578
+
579
+ def apply_hilum_to_slice(thresholded_c, blur_c, threshold, ind, hlm):
580
+ if hlm is None:
581
+ return
582
+
583
+ cv2.line(thresholded_c[ind], hlm[0], hlm[1], 1, 2)
584
+ abuse_slc = thresholded_c[ind].copy()
585
+ mask = np.zeros((thresholded_c.shape[1]+2, thresholded_c.shape[2]+2), np.uint8)
586
+ cv2.floodFill(abuse_slc, mask, (0,0), 1)
587
+ thresholded_c[ind] = np.logical_and(
588
+ (np.equal(abuse_slc, 0) | thresholded_c[ind]).astype(thresholded_c[ind].dtype),
589
+ np.greater(blur_c[ind], threshold)
590
+ )
591
+
592
+
593
+ # TODO allow for custom hilums to be specified in dln
594
+ # Polygons will be allowed for logged-in users
595
+ def add_renal_hilum(thresholded_c, blr_c, threshold, lzn, side, cbox, custom_hilums):
596
+ first_hilum_slice = None
597
+ last_hilum_slice = None
598
+ for ann in lzn["annotations"]:
599
+ if ann["spatial_type"] == "whole-image" and not ann["deprecated"]:
600
+ bound = None
601
+ for cp in ann["classification_payloads"]:
602
+ if cp["confidence"] > 0.5:
603
+ if cp["class_id"] == 7:
604
+ bound = "sup"
605
+ elif cp["class_id"] == 8:
606
+ bound = "inf"
607
+ if bound is None:
608
+ continue
609
+ frame = int(ann["frame"])
610
+ if bound == "sup":
611
+ if first_hilum_slice is None or frame < first_hilum_slice:
612
+ first_hilum_slice = frame - cbox["zmin"]
613
+ elif bound == "inf":
614
+ if last_hilum_slice is None or frame > last_hilum_slice:
615
+ last_hilum_slice = frame - cbox["zmin"]
616
+
617
+ for ind in range(thresholded_c.shape[0]):
618
+ if "slice_{}".format(ind) in custom_hilums:
619
+ for hlm in custom_hilums["slice_{}".format(ind)]:
620
+ apply_hilum_to_slice(thresholded_c, blr_c, threshold, ind, hlm)
621
+ elif (
622
+ (
623
+ first_hilum_slice is not None and ind >= first_hilum_slice
624
+ ) and (
625
+ last_hilum_slice is not None and ind <= last_hilum_slice
626
+ )
627
+ ):
628
+ # TODO send dln here and use custom hilum if possible
629
+ hlm = find_hilum_in_slice(thresholded_c[ind].copy(), side)
630
+ apply_hilum_to_slice(thresholded_c, blr_c, threshold, ind, hlm)
631
+ else:
632
+ if first_hilum_slice is None:
633
+ print("First hilum slice could not be determined")
634
+ if last_hilum_slice is None:
635
+ print("Last hilum slice could not be determined")
636
+
637
+ return thresholded_c
638
+
639
+
640
+ def get_side(cbox):
641
+ if cbox["xmin"] + cbox["xmax"] > cbox["xdim"]:
642
+ return "left"
643
+ return "right"
644
+
645
+
646
+ def generate_segmentation(region_type, cropped_img, cropped_drw, step=1, affine=None, lzn=None, cbox=None, custom_hilums={}):
647
+ # Interpolate drawings
648
+ cropped_drw = interpolate_drawings(cropped_drw, step)
649
+
650
+ # Send tensors to GPU
651
+ img_d = torch.from_numpy(cropped_img).to("cuda:0")
652
+ drw_d = torch.from_numpy(cropped_drw).to("cuda:0")
653
+
654
+ # Apply a 3d blur convolution
655
+ blur_kernel_d = get_blur_kernel_d(affine)
656
+ blr_d = torch.nn.functional.conv3d(
657
+ img_d.reshape((1,1)+cropped_img.shape),
658
+ blur_kernel_d, stride=1, padding=2
659
+ ).reshape(cropped_img.shape)
660
+
661
+ # Apply threshold
662
+ threshold = get_threshold(region_type)
663
+ thresholded_d = torch.logical_and(
664
+ torch.greater(blr_d, threshold),
665
+ torch.greater(drw_d, 0)
666
+ ).int()
667
+
668
+ # If region is kidney, add hilum, redraw, and get new threshold
669
+ thresholded_c = thresholded_d.to("cpu").numpy()
670
+ blr_c = blr_d.to("cpu").numpy()
671
+ if region_type == "kidney":
672
+ side = get_side(cbox)
673
+ thresholded_c = add_renal_hilum(thresholded_c, blr_c, threshold, lzn, side, cbox, custom_hilums)
674
+
675
+ # Bring result back to cpu memory
676
+ return thresholded_c
677
+
678
+
679
+ def inflate_seg_to_image_size(cbox, cropped_seg):
680
+ seg_np = np.zeros((cbox["zdim"], cbox["ydim"], cbox["xdim"]), dtype=np.int)
681
+ seg_np[
682
+ cbox["zmin"]:cbox["zmax"] + 1,
683
+ cbox["ymin"]:cbox["ymax"] + 1,
684
+ cbox["xmin"]:cbox["xmax"] + 1,
685
+ ] = cropped_seg
686
+ return seg_np
687
+
688
+
689
+ def get_custom_hilums(meta, cbox):
690
+ ret = {}
691
+ if "custom_hilums" not in meta:
692
+ return ret
693
+
694
+ for ch in meta["custom_hilums"]:
695
+ if ch["slice_index"] < cbox["zmin"] or ch["slice_index"] > cbox["zmax"]:
696
+ continue
697
+
698
+ dct_key = "slice_{}".format(ch["slice_index"] - cbox["zmin"])
699
+ if dct_key not in ret:
700
+ ret[dct_key] = []
701
+
702
+ for hlm in ch["hilums"]:
703
+ ret[dct_key] += [
704
+ [
705
+ (
706
+ hlm[0][0] - cbox["xmin"],
707
+ hlm[0][1] - cbox["ymin"]
708
+ ),
709
+ (
710
+ hlm[1][0] - cbox["xmin"],
711
+ hlm[1][1] - cbox["ymin"]
712
+ )
713
+ ]
714
+ ]
715
+
716
+ return ret
717
+
718
+
719
+ def delineation_to_seg(region_type, image_path, delineation_path, meta, localization_path=None):
720
+ # Read and parse delination and (maybe) localization from file
721
+ lzn = None
722
+ if region_type == "kidney":
723
+ assert localization_path is not None
724
+ lzn = load_json(localization_path)
725
+ dln = load_json(delineation_path)
726
+
727
+ # Read CT scan
728
+ img_nib = nib.load(str(image_path))
729
+
730
+ # Crop image to the smallest possible box for memory/computational efficiency
731
+ cbox = get_containing_box(dln, img_nib.shape)
732
+ cropped_img = get_cropped_scan(cbox, img_nib)
733
+
734
+ # Generate the drawing made by the annotator
735
+ cropped_drw = generate_cropped_drawing_interior(cbox, dln)
736
+
737
+ # Get any custom hilums within the containing box
738
+ custom_hilums = get_custom_hilums(meta, cbox)
739
+
740
+ # Apply heuristics to infer segmentation based on drawing and image
741
+ cropped_seg = generate_segmentation(
742
+ region_type, cropped_img, cropped_drw, cbox["step"], img_nib.affine, lzn, cbox, custom_hilums
743
+ )
744
+
745
+ # Undo cropping to get final segmentation
746
+ seg = inflate_seg_to_image_size(cbox, cropped_seg)
747
+
748
+ # Return the seg in nifti format
749
+ return nib.Nifti1Image(seg.astype(np.uint8), img_nib.affine)
kits21/kits21/kits21/annotation/sample_segmentations.py ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import shutil
2
+ from multiprocessing import Pool
3
+
4
+ import SimpleITK as sitk
5
+ import numpy as np
6
+ from batchgenerators.utilities.file_and_folder_operations import *
7
+
8
+ from kits21.configuration.labels import LABEL_AGGREGATION_ORDER, NUMBER_OF_GROUPS
9
+ from kits21.configuration.paths import TRAINING_DIR, TESTING_DIR
10
+
11
+
12
+ def get_number_of_instances(segmentations_folder: str, label_name: str = 'kidney'):
13
+ nii_files = subfiles(segmentations_folder, suffix='.nii.gz', prefix=label_name, join=False)
14
+ instance_strings = [i.split('_')[1] for i in nii_files]
15
+ instance_idx = [int(i.split('-')[-1]) for i in instance_strings]
16
+ return list(np.unique(instance_idx))
17
+
18
+
19
+ def get_annotations(segmentations_folder: str, label_name: str = 'kidney', instance_idx: int = 1):
20
+ nii_files = subfiles(segmentations_folder, suffix='.nii.gz', prefix=label_name + '_instance-%s' % instance_idx, join=False)
21
+ annotation_strings = [i.split('_')[-1][:-7] for i in nii_files]
22
+ annotation_idx = [int(i.split('-')[-1]) for i in annotation_strings]
23
+ return list(np.unique(annotation_idx))
24
+
25
+
26
+ def build_segmentation(kidney_files, tumor_files, cyst_files, output_file: str) -> None:
27
+ labelid_files_mapping = {
28
+ i: j if j is not None else list() for i, j in {
29
+ 1: kidney_files,
30
+ 2: tumor_files,
31
+ 3: cyst_files,
32
+ }.items()}
33
+
34
+ seg = None
35
+ seg_itk = None
36
+
37
+ for current_label in LABEL_AGGREGATION_ORDER:
38
+ files = labelid_files_mapping[current_label]
39
+ for f in files:
40
+ if seg is None:
41
+ seg_itk = sitk.ReadImage(f)
42
+ seg = sitk.GetArrayFromImage(seg_itk).astype(np.uint8)
43
+ seg[seg == 1] = current_label
44
+ else:
45
+ new_seg = sitk.GetArrayFromImage(sitk.ReadImage(f)).astype(np.uint8)
46
+ seg[new_seg == 1] = current_label
47
+
48
+ seg = seg.astype(np.uint8)
49
+ seg = sitk.GetImageFromArray(seg)
50
+ seg.CopyInformation(seg_itk)
51
+ sitk.WriteImage(seg, output_file)
52
+
53
+
54
+ def generate_samples(segmentations_folder: str, samples_output_folder: str, num_groups=3, seed=1234):
55
+ """
56
+ We do this the stupid way, because the smart way is above my head right now.
57
+
58
+ Why groups? We can only determine the inter-rater disagreement within each group because otherwise we might see the
59
+ same annotation more than once. So we compute the inter-rater disagreement within each group, then average across
60
+ groups
61
+
62
+ :param segmentations_folder:
63
+ :param samples_output_folder:
64
+ :return:
65
+ """
66
+ instances_kidney = get_number_of_instances(segmentations_folder, 'kidney')
67
+ instances_cyst = get_number_of_instances(segmentations_folder, 'cyst')
68
+ instances_tumor = get_number_of_instances(segmentations_folder, 'tumor')
69
+
70
+ anno_kidney = [get_annotations(segmentations_folder, 'kidney', i) for i in instances_kidney]
71
+ anno_cyst = [get_annotations(segmentations_folder, 'cyst', i) for i in instances_cyst]
72
+ anno_tumor = [get_annotations(segmentations_folder, 'tumor', i) for i in instances_tumor]
73
+
74
+ num_kidney_seg_per_group = min([len(i) for i in anno_kidney]) if len(anno_kidney) > 0 else np.nan
75
+ num_cyst_seg_per_group = min([len(i) for i in anno_cyst]) if len(anno_cyst) > 0 else np.nan
76
+ num_tumor_seg_per_group = min([len(i) for i in anno_tumor]) if len(anno_tumor) > 0 else np.nan
77
+
78
+ n_seg_per_group = int(np.nanmin((num_kidney_seg_per_group, num_cyst_seg_per_group, num_tumor_seg_per_group)))
79
+
80
+ rs = np.random.RandomState(seed)
81
+ for n in range(num_groups):
82
+ output_folder = join(samples_output_folder, 'group_%s' % str(n))
83
+ maybe_mkdir_p(output_folder)
84
+
85
+ random_offsets_kidney = [rs.randint(0, len(i)) for i in anno_kidney]
86
+ random_offsets_tumor = [rs.randint(0, len(i)) for i in anno_tumor]
87
+ random_offsets_cyst = [rs.randint(0, len(i)) for i in anno_cyst]
88
+
89
+ for i in range(n_seg_per_group):
90
+ output_filename = 'kidney'
91
+ kidney_files = []
92
+ for ik, inst_k in enumerate(instances_kidney):
93
+ anno = anno_kidney[ik][(random_offsets_kidney[ik] + i) % len(anno_kidney[ik])]
94
+ kidney_files.append(join(segmentations_folder, 'kidney_instance-%s_annotation-%s.nii.gz' % (inst_k, anno)))
95
+ output_filename += "_i%sa%s" % (inst_k, anno)
96
+
97
+ output_filename += '_cyst'
98
+ cyst_files = []
99
+ for ic, inst_c in enumerate(instances_cyst):
100
+ anno = anno_cyst[ic][(random_offsets_cyst[ic] + i) % len(anno_cyst[ic])]
101
+ cyst_files.append(join(segmentations_folder, 'cyst_instance-%s_annotation-%s.nii.gz' % (inst_c, anno)))
102
+ output_filename += "_i%sa%s" % (inst_c, anno)
103
+
104
+ output_filename += '_tumor'
105
+ tumor_files = []
106
+ for it, inst_t in enumerate(instances_tumor):
107
+ anno = anno_tumor[it][(random_offsets_tumor[it] + i) % len(anno_tumor[it])]
108
+ tumor_files.append(join(segmentations_folder, 'tumor_instance-%s_annotation-%s.nii.gz' % (inst_t, anno)))
109
+ output_filename += "_i%sa%s" % (inst_t, anno)
110
+
111
+ output_filename += ".nii.gz"
112
+ build_segmentation(kidney_files, tumor_files, cyst_files, join(output_folder, output_filename))
113
+
114
+
115
+ def generate_samples_for_all_cases(num_processes: int, num_groups_per_case: int = 5, testing: bool = True) -> None:
116
+ """
117
+ THIS WILL DELETE PREVIOUSLY EXISTING SAMPLES! BEWARE!
118
+
119
+ :param num_processes:
120
+ :param num_groups_per_case:
121
+ :return:
122
+ """
123
+ source_dir = TRAINING_DIR
124
+ if testing:
125
+ source_dir = TESTING_DIR
126
+
127
+ cases = subfolders(source_dir, prefix='case_', join=False)
128
+ case_ids = [int(i.split('_')[-1]) for i in cases]
129
+ p = Pool(num_processes)
130
+ res = []
131
+ for case, caseid in zip(cases, case_ids):
132
+ if isdir(join(source_dir, case, 'segmentations')) and \
133
+ len(subfiles(join(source_dir, case, 'segmentations'), suffix='.nii.gz')) > 0:
134
+ if isdir(join(source_dir, case, 'segmentation_samples')):
135
+ shutil.rmtree(join(source_dir, case, 'segmentation_samples'))
136
+ if isfile(join(source_dir, case, 'inter_rater_disagreement.json')):
137
+ os.remove(join(source_dir, case, 'inter_rater_disagreement.json'))
138
+ if isfile(join(source_dir, case, 'tolerances.json')):
139
+ os.remove(join(source_dir, case, 'tolerances.json'))
140
+ res.append(p.starmap_async(
141
+ generate_samples, ((join(source_dir, case, 'segmentations'),
142
+ join(source_dir, case, 'segmentation_samples'),
143
+ num_groups_per_case,
144
+ caseid), )
145
+ ))
146
+ _ = [i.get() for i in res]
147
+ p.close()
148
+ p.join()
149
+
150
+
151
+ if __name__ == '__main__':
152
+ if __name__ == '__main__':
153
+ import argparse
154
+ parser = argparse.ArgumentParser()
155
+ parser.add_argument('-num_processes', required=False, default=12, type=int)
156
+ parser.add_argument('-testing', required=False, default=False, type=bool)
157
+ args = parser.parse_args()
158
+ generate_samples_for_all_cases(args.num_processes, NUMBER_OF_GROUPS, args.testing)