Datasets:
Uploading tokenizer_robustness_completion_chinese_numerical_formats subset
Browse files
README.md
CHANGED
@@ -450,6 +450,40 @@ dataset_info:
|
|
450 |
num_examples: 25
|
451 |
download_size: 7358
|
452 |
dataset_size: 4339
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
453 |
configs:
|
454 |
- config_name: tokenizer_robustness_completion_chinese_borrowing
|
455 |
data_files:
|
@@ -503,6 +537,10 @@ configs:
|
|
503 |
data_files:
|
504 |
- split: test
|
505 |
path: tokenizer_robustness_completion_chinese_keyboard_proximity_errors/test-*
|
|
|
|
|
|
|
|
|
506 |
---
|
507 |
|
508 |
# Dataset Card for Tokenization Robustness
|
|
|
450 |
num_examples: 25
|
451 |
download_size: 7358
|
452 |
dataset_size: 4339
|
453 |
+
- config_name: tokenizer_robustness_completion_chinese_numerical_formats
|
454 |
+
features:
|
455 |
+
- name: question
|
456 |
+
dtype: string
|
457 |
+
- name: choices
|
458 |
+
list: string
|
459 |
+
- name: answer
|
460 |
+
dtype: int64
|
461 |
+
- name: answer_label
|
462 |
+
dtype: string
|
463 |
+
- name: split
|
464 |
+
dtype: string
|
465 |
+
- name: subcategories
|
466 |
+
dtype: string
|
467 |
+
- name: category
|
468 |
+
dtype: string
|
469 |
+
- name: lang
|
470 |
+
dtype: string
|
471 |
+
- name: second_lang
|
472 |
+
dtype: string
|
473 |
+
- name: notes
|
474 |
+
dtype: string
|
475 |
+
- name: id
|
476 |
+
dtype: string
|
477 |
+
- name: set_id
|
478 |
+
dtype: float64
|
479 |
+
- name: variation_id
|
480 |
+
dtype: float64
|
481 |
+
splits:
|
482 |
+
- name: test
|
483 |
+
num_bytes: 1104
|
484 |
+
num_examples: 7
|
485 |
+
download_size: 5945
|
486 |
+
dataset_size: 1104
|
487 |
configs:
|
488 |
- config_name: tokenizer_robustness_completion_chinese_borrowing
|
489 |
data_files:
|
|
|
537 |
data_files:
|
538 |
- split: test
|
539 |
path: tokenizer_robustness_completion_chinese_keyboard_proximity_errors/test-*
|
540 |
+
- config_name: tokenizer_robustness_completion_chinese_numerical_formats
|
541 |
+
data_files:
|
542 |
+
- split: test
|
543 |
+
path: tokenizer_robustness_completion_chinese_numerical_formats/test-*
|
544 |
---
|
545 |
|
546 |
# Dataset Card for Tokenization Robustness
|
tokenizer_robustness_completion_chinese_numerical_formats/test-00000-of-00001.parquet
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5a72a970f681a76926ebb4baedd898d079986cea951177bd3238b76d2793f9b7
|
3 |
+
size 5945
|