Racoci commited on
Commit
c7edac7
·
1 Parent(s): 1038c33

Update tests.

Browse files
Files changed (1) hide show
  1. tests.ipynb +504 -73
tests.ipynb CHANGED
@@ -1,24 +1,8 @@
1
  {
2
- "nbformat": 4,
3
- "nbformat_minor": 0,
4
- "metadata": {
5
- "colab": {
6
- "provenance": []
7
- },
8
- "kernelspec": {
9
- "name": "python3",
10
- "display_name": "Python 3"
11
- },
12
- "language_info": {
13
- "name": "python"
14
- }
15
- },
16
  "cells": [
17
  {
18
  "cell_type": "code",
19
- "source": [
20
- "!pip install -U datasets huggingface_hub fsspec"
21
- ],
22
  "metadata": {
23
  "colab": {
24
  "base_uri": "https://localhost:8080/"
@@ -26,77 +10,505 @@
26
  "id": "OSWN0xQn6z8u",
27
  "outputId": "bb05c540-e196-49c3-cf7a-d2f4942abe52"
28
  },
29
- "execution_count": null,
30
  "outputs": [
31
  {
32
- "output_type": "stream",
33
  "name": "stdout",
 
34
  "text": [
35
- "Requirement already satisfied: datasets in /usr/local/lib/python3.11/dist-packages (2.14.4)\n",
36
  "Collecting datasets\n",
37
- " Downloading datasets-3.6.0-py3-none-any.whl.metadata (19 kB)\n",
38
- "Requirement already satisfied: huggingface_hub in /usr/local/lib/python3.11/dist-packages (0.32.4)\n",
39
  "Collecting huggingface_hub\n",
40
- " Downloading huggingface_hub-0.33.0-py3-none-any.whl.metadata (14 kB)\n",
41
- "Requirement already satisfied: fsspec in /usr/local/lib/python3.11/dist-packages (2025.3.2)\n",
42
  "Collecting fsspec\n",
43
  " Downloading fsspec-2025.5.1-py3-none-any.whl.metadata (11 kB)\n",
44
- "Requirement already satisfied: filelock in /usr/local/lib/python3.11/dist-packages (from datasets) (3.18.0)\n",
45
- "Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.11/dist-packages (from datasets) (2.0.2)\n",
46
- "Requirement already satisfied: pyarrow>=15.0.0 in /usr/local/lib/python3.11/dist-packages (from datasets) (18.1.0)\n",
47
- "Requirement already satisfied: dill<0.3.9,>=0.3.0 in /usr/local/lib/python3.11/dist-packages (from datasets) (0.3.7)\n",
48
- "Requirement already satisfied: pandas in /usr/local/lib/python3.11/dist-packages (from datasets) (2.2.2)\n",
49
- "Requirement already satisfied: requests>=2.32.2 in /usr/local/lib/python3.11/dist-packages (from datasets) (2.32.3)\n",
50
- "Requirement already satisfied: tqdm>=4.66.3 in /usr/local/lib/python3.11/dist-packages (from datasets) (4.67.1)\n",
51
- "Requirement already satisfied: xxhash in /usr/local/lib/python3.11/dist-packages (from datasets) (3.5.0)\n",
52
- "Requirement already satisfied: multiprocess<0.70.17 in /usr/local/lib/python3.11/dist-packages (from datasets) (0.70.15)\n",
53
- " Downloading fsspec-2025.3.0-py3-none-any.whl.metadata (11 kB)\n",
54
- "Requirement already satisfied: packaging in /usr/local/lib/python3.11/dist-packages (from datasets) (24.2)\n",
55
- "Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.11/dist-packages (from datasets) (6.0.2)\n",
56
- "Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.11/dist-packages (from huggingface_hub) (4.14.0)\n",
57
- "Requirement already satisfied: hf-xet<2.0.0,>=1.1.2 in /usr/local/lib/python3.11/dist-packages (from huggingface_hub) (1.1.2)\n",
58
- "Requirement already satisfied: aiohttp!=4.0.0a0,!=4.0.0a1 in /usr/local/lib/python3.11/dist-packages (from fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (3.11.15)\n",
59
- "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/dist-packages (from requests>=2.32.2->datasets) (3.4.2)\n",
60
- "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.11/dist-packages (from requests>=2.32.2->datasets) (3.10)\n",
61
- "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.11/dist-packages (from requests>=2.32.2->datasets) (2.4.0)\n",
62
- "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.11/dist-packages (from requests>=2.32.2->datasets) (2025.4.26)\n",
63
- "Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.11/dist-packages (from pandas->datasets) (2.9.0.post0)\n",
64
- "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.11/dist-packages (from pandas->datasets) (2025.2)\n",
65
- "Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.11/dist-packages (from pandas->datasets) (2025.2)\n",
66
- "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/lib/python3.11/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (2.6.1)\n",
67
- "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.11/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (1.3.2)\n",
68
- "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.11/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (25.3.0)\n",
69
- "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.11/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (1.6.0)\n",
70
- "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.11/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (6.4.4)\n",
71
- "Requirement already satisfied: propcache>=0.2.0 in /usr/local/lib/python3.11/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (0.3.1)\n",
72
- "Requirement already satisfied: yarl<2.0,>=1.17.0 in /usr/local/lib/python3.11/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets) (1.20.0)\n",
73
- "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.11/dist-packages (from python-dateutil>=2.8.2->pandas->datasets) (1.17.0)\n",
74
- "Downloading datasets-3.6.0-py3-none-any.whl (491 kB)\n",
75
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m491.5/491.5 kB\u001b[0m \u001b[31m6.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
76
- "\u001b[?25hDownloading huggingface_hub-0.33.0-py3-none-any.whl (514 kB)\n",
77
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m514.8/514.8 kB\u001b[0m \u001b[31m14.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
78
- "\u001b[?25hDownloading fsspec-2025.3.0-py3-none-any.whl (193 kB)\n",
79
- "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m193.6/193.6 kB\u001b[0m \u001b[31m6.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
80
- "\u001b[?25h"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  ]
82
  }
 
 
 
83
  ]
84
  },
85
  {
86
  "cell_type": "markdown",
 
 
 
87
  "source": [
88
  "### 📥 Step 1: Preprocess the Data\n",
89
  "\n",
90
  "Since the dataset is hosted on Hugging Face, you can load it directly using the `load_dataset` function."
91
- ],
92
- "metadata": {
93
- "id": "edF1DuNE6nSg"
94
- }
95
  },
96
  {
97
  "cell_type": "code",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
98
  "source": [
99
- "import numpy as np\n",
100
  "import pandas as pd\n",
101
  "from datasets import load_dataset\n",
102
  "\n",
@@ -106,12 +518,31 @@
106
  "# Convert to a pandas DataFrame for easier manipulation\n",
107
  "df = pd.DataFrame(dataset[\"train\"])\n",
108
  "df"
109
- ],
110
- "metadata": {
111
- "id": "HmHDx6cU7WRG"
 
 
 
 
 
 
 
 
 
 
 
 
 
112
  },
113
- "execution_count": null,
114
- "outputs": []
 
 
 
 
115
  }
116
- ]
117
- }
 
 
 
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 2,
 
 
6
  "metadata": {
7
  "colab": {
8
  "base_uri": "https://localhost:8080/"
 
10
  "id": "OSWN0xQn6z8u",
11
  "outputId": "bb05c540-e196-49c3-cf7a-d2f4942abe52"
12
  },
 
13
  "outputs": [
14
  {
 
15
  "name": "stdout",
16
+ "output_type": "stream",
17
  "text": [
 
18
  "Collecting datasets\n",
19
+ " Using cached datasets-3.6.0-py3-none-any.whl.metadata (19 kB)\n",
 
20
  "Collecting huggingface_hub\n",
21
+ " Using cached huggingface_hub-0.33.0-py3-none-any.whl.metadata (14 kB)\n",
 
22
  "Collecting fsspec\n",
23
  " Downloading fsspec-2025.5.1-py3-none-any.whl.metadata (11 kB)\n",
24
+ "Collecting filelock (from datasets)\n",
25
+ " Using cached filelock-3.18.0-py3-none-any.whl.metadata (2.9 kB)\n",
26
+ "Collecting numpy>=1.17 (from datasets)\n",
27
+ " Using cached numpy-2.3.0-cp313-cp313-manylinux_2_28_x86_64.whl.metadata (62 kB)\n",
28
+ "Collecting pyarrow>=15.0.0 (from datasets)\n",
29
+ " Using cached pyarrow-20.0.0-cp313-cp313-manylinux_2_28_x86_64.whl.metadata (3.3 kB)\n",
30
+ "Collecting dill<0.3.9,>=0.3.0 (from datasets)\n",
31
+ " Using cached dill-0.3.8-py3-none-any.whl.metadata (10 kB)\n",
32
+ "Collecting pandas (from datasets)\n",
33
+ " Using cached pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (91 kB)\n",
34
+ "Collecting requests>=2.32.2 (from datasets)\n",
35
+ " Using cached requests-2.32.4-py3-none-any.whl.metadata (4.9 kB)\n",
36
+ "Collecting tqdm>=4.66.3 (from datasets)\n",
37
+ " Using cached tqdm-4.67.1-py3-none-any.whl.metadata (57 kB)\n",
38
+ "Collecting xxhash (from datasets)\n",
39
+ " Using cached xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)\n",
40
+ "Collecting multiprocess<0.70.17 (from datasets)\n",
41
+ " Using cached multiprocess-0.70.16-py312-none-any.whl.metadata (7.2 kB)\n",
42
+ "Collecting fsspec\n",
43
+ " Using cached fsspec-2025.3.0-py3-none-any.whl.metadata (11 kB)\n",
44
+ "Requirement already satisfied: packaging in ./.venv/lib/python3.13/site-packages (from datasets) (25.0)\n",
45
+ "Collecting pyyaml>=5.1 (from datasets)\n",
46
+ " Using cached PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB)\n",
47
+ "Collecting aiohttp!=4.0.0a0,!=4.0.0a1 (from fsspec[http]<=2025.3.0,>=2023.1.0->datasets)\n",
48
+ " Using cached aiohttp-3.12.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (7.6 kB)\n",
49
+ "Collecting typing-extensions>=3.7.4.3 (from huggingface_hub)\n",
50
+ " Using cached typing_extensions-4.14.0-py3-none-any.whl.metadata (3.0 kB)\n",
51
+ "Collecting hf-xet<2.0.0,>=1.1.2 (from huggingface_hub)\n",
52
+ " Using cached hf_xet-1.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (879 bytes)\n",
53
+ "Collecting aiohappyeyeballs>=2.5.0 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)\n",
54
+ " Using cached aiohappyeyeballs-2.6.1-py3-none-any.whl.metadata (5.9 kB)\n",
55
+ "Collecting aiosignal>=1.1.2 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)\n",
56
+ " Using cached aiosignal-1.3.2-py2.py3-none-any.whl.metadata (3.8 kB)\n",
57
+ "Collecting attrs>=17.3.0 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)\n",
58
+ " Using cached attrs-25.3.0-py3-none-any.whl.metadata (10 kB)\n",
59
+ "Collecting frozenlist>=1.1.1 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)\n",
60
+ " Using cached frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (18 kB)\n",
61
+ "Collecting multidict<7.0,>=4.5 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)\n",
62
+ " Using cached multidict-6.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.3 kB)\n",
63
+ "Collecting propcache>=0.2.0 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)\n",
64
+ " Using cached propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)\n",
65
+ "Collecting yarl<2.0,>=1.17.0 (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)\n",
66
+ " Using cached yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (73 kB)\n",
67
+ "Collecting idna>=2.0 (from yarl<2.0,>=1.17.0->aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets)\n",
68
+ " Using cached idna-3.10-py3-none-any.whl.metadata (10 kB)\n",
69
+ "Collecting charset_normalizer<4,>=2 (from requests>=2.32.2->datasets)\n",
70
+ " Using cached charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (35 kB)\n",
71
+ "Collecting urllib3<3,>=1.21.1 (from requests>=2.32.2->datasets)\n",
72
+ " Using cached urllib3-2.4.0-py3-none-any.whl.metadata (6.5 kB)\n",
73
+ "Collecting certifi>=2017.4.17 (from requests>=2.32.2->datasets)\n",
74
+ " Using cached certifi-2025.4.26-py3-none-any.whl.metadata (2.5 kB)\n",
75
+ "Requirement already satisfied: python-dateutil>=2.8.2 in ./.venv/lib/python3.13/site-packages (from pandas->datasets) (2.9.0.post0)\n",
76
+ "Collecting pytz>=2020.1 (from pandas->datasets)\n",
77
+ " Using cached pytz-2025.2-py2.py3-none-any.whl.metadata (22 kB)\n",
78
+ "Collecting tzdata>=2022.7 (from pandas->datasets)\n",
79
+ " Using cached tzdata-2025.2-py2.py3-none-any.whl.metadata (1.4 kB)\n",
80
+ "Requirement already satisfied: six>=1.5 in ./.venv/lib/python3.13/site-packages (from python-dateutil>=2.8.2->pandas->datasets) (1.17.0)\n",
81
+ "Using cached datasets-3.6.0-py3-none-any.whl (491 kB)\n",
82
+ "Using cached fsspec-2025.3.0-py3-none-any.whl (193 kB)\n",
83
+ "Using cached dill-0.3.8-py3-none-any.whl (116 kB)\n",
84
+ "Using cached multiprocess-0.70.16-py312-none-any.whl (146 kB)\n",
85
+ "Using cached huggingface_hub-0.33.0-py3-none-any.whl (514 kB)\n",
86
+ "Using cached hf_xet-1.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.8 MB)\n",
87
+ "Using cached aiohttp-3.12.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.7 MB)\n",
88
+ "Using cached multidict-6.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (222 kB)\n",
89
+ "Using cached yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (352 kB)\n",
90
+ "Using cached aiohappyeyeballs-2.6.1-py3-none-any.whl (15 kB)\n",
91
+ "Using cached aiosignal-1.3.2-py2.py3-none-any.whl (7.6 kB)\n",
92
+ "Using cached attrs-25.3.0-py3-none-any.whl (63 kB)\n",
93
+ "Using cached frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (232 kB)\n",
94
+ "Using cached idna-3.10-py3-none-any.whl (70 kB)\n",
95
+ "Using cached numpy-2.3.0-cp313-cp313-manylinux_2_28_x86_64.whl (16.6 MB)\n",
96
+ "Using cached propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (206 kB)\n",
97
+ "Using cached pyarrow-20.0.0-cp313-cp313-manylinux_2_28_x86_64.whl (42.3 MB)\n",
98
+ "Using cached PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (759 kB)\n",
99
+ "Using cached requests-2.32.4-py3-none-any.whl (64 kB)\n",
100
+ "Using cached charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (148 kB)\n",
101
+ "Using cached urllib3-2.4.0-py3-none-any.whl (128 kB)\n",
102
+ "Using cached certifi-2025.4.26-py3-none-any.whl (159 kB)\n",
103
+ "Using cached tqdm-4.67.1-py3-none-any.whl (78 kB)\n",
104
+ "Using cached typing_extensions-4.14.0-py3-none-any.whl (43 kB)\n",
105
+ "Using cached filelock-3.18.0-py3-none-any.whl (16 kB)\n",
106
+ "Using cached pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (12.0 MB)\n",
107
+ "Using cached pytz-2025.2-py2.py3-none-any.whl (509 kB)\n",
108
+ "Using cached tzdata-2025.2-py2.py3-none-any.whl (347 kB)\n",
109
+ "Using cached xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)\n",
110
+ "Installing collected packages: pytz, xxhash, urllib3, tzdata, typing-extensions, tqdm, pyyaml, pyarrow, propcache, numpy, multidict, idna, hf-xet, fsspec, frozenlist, filelock, dill, charset_normalizer, certifi, attrs, aiohappyeyeballs, yarl, requests, pandas, multiprocess, aiosignal, huggingface_hub, aiohttp, datasets\n",
111
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m29/29\u001b[0m [datasets]/29\u001b[0m [datasets]ce_hub]]\n",
112
+ "\u001b[1A\u001b[2KSuccessfully installed aiohappyeyeballs-2.6.1 aiohttp-3.12.12 aiosignal-1.3.2 attrs-25.3.0 certifi-2025.4.26 charset_normalizer-3.4.2 datasets-3.6.0 dill-0.3.8 filelock-3.18.0 frozenlist-1.7.0 fsspec-2025.3.0 hf-xet-1.1.3 huggingface_hub-0.33.0 idna-3.10 multidict-6.4.4 multiprocess-0.70.16 numpy-2.3.0 pandas-2.3.0 propcache-0.3.2 pyarrow-20.0.0 pytz-2025.2 pyyaml-6.0.2 requests-2.32.4 tqdm-4.67.1 typing-extensions-4.14.0 tzdata-2025.2 urllib3-2.4.0 xxhash-3.5.0 yarl-1.20.1\n"
113
  ]
114
  }
115
+ ],
116
+ "source": [
117
+ "!pip install -U datasets huggingface_hub fsspec"
118
  ]
119
  },
120
  {
121
  "cell_type": "markdown",
122
+ "metadata": {
123
+ "id": "edF1DuNE6nSg"
124
+ },
125
  "source": [
126
  "### 📥 Step 1: Preprocess the Data\n",
127
  "\n",
128
  "Since the dataset is hosted on Hugging Face, you can load it directly using the `load_dataset` function."
129
+ ]
 
 
 
130
  },
131
  {
132
  "cell_type": "code",
133
+ "execution_count": 3,
134
+ "metadata": {
135
+ "id": "HmHDx6cU7WRG"
136
+ },
137
+ "outputs": [
138
+ {
139
+ "name": "stderr",
140
+ "output_type": "stream",
141
+ "text": [
142
+ "/home/escher/Projects/OpenPsychometrics/2018-11-08-IPIP-FFM-data/2018-11-08-OpenPsychometrics-IPIP-FFM/.venv/lib/python3.13/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
143
+ " from .autonotebook import tqdm as notebook_tqdm\n",
144
+ "Generating train split: 100%|██████████| 609204/609204 [00:00<00:00, 781185.05 examples/s]\n",
145
+ "Generating validation split: 100%|██████████| 203068/203068 [00:00<00:00, 842451.01 examples/s]\n",
146
+ "Generating test split: 100%|██████████| 203069/203069 [00:00<00:00, 769756.37 examples/s]\n"
147
+ ]
148
+ },
149
+ {
150
+ "data": {
151
+ "text/html": [
152
+ "<div>\n",
153
+ "<style scoped>\n",
154
+ " .dataframe tbody tr th:only-of-type {\n",
155
+ " vertical-align: middle;\n",
156
+ " }\n",
157
+ "\n",
158
+ " .dataframe tbody tr th {\n",
159
+ " vertical-align: top;\n",
160
+ " }\n",
161
+ "\n",
162
+ " .dataframe thead th {\n",
163
+ " text-align: right;\n",
164
+ " }\n",
165
+ "</style>\n",
166
+ "<table border=\"1\" class=\"dataframe\">\n",
167
+ " <thead>\n",
168
+ " <tr style=\"text-align: right;\">\n",
169
+ " <th></th>\n",
170
+ " <th>EXT1</th>\n",
171
+ " <th>EXT2</th>\n",
172
+ " <th>EXT3</th>\n",
173
+ " <th>EXT4</th>\n",
174
+ " <th>EXT5</th>\n",
175
+ " <th>EXT6</th>\n",
176
+ " <th>EXT7</th>\n",
177
+ " <th>EXT8</th>\n",
178
+ " <th>EXT9</th>\n",
179
+ " <th>EXT10</th>\n",
180
+ " <th>...</th>\n",
181
+ " <th>screenw</th>\n",
182
+ " <th>screenh</th>\n",
183
+ " <th>introelapse</th>\n",
184
+ " <th>testelapse</th>\n",
185
+ " <th>endelapse</th>\n",
186
+ " <th>IPC</th>\n",
187
+ " <th>country</th>\n",
188
+ " <th>lat_appx_lots_of_err</th>\n",
189
+ " <th>long_appx_lots_of_err</th>\n",
190
+ " <th>__index_level_0__</th>\n",
191
+ " </tr>\n",
192
+ " </thead>\n",
193
+ " <tbody>\n",
194
+ " <tr>\n",
195
+ " <th>0</th>\n",
196
+ " <td>2.0</td>\n",
197
+ " <td>2.0</td>\n",
198
+ " <td>5.0</td>\n",
199
+ " <td>4.0</td>\n",
200
+ " <td>4.0</td>\n",
201
+ " <td>1.0</td>\n",
202
+ " <td>2.0</td>\n",
203
+ " <td>4.0</td>\n",
204
+ " <td>4.0</td>\n",
205
+ " <td>2.0</td>\n",
206
+ " <td>...</td>\n",
207
+ " <td>1324.0</td>\n",
208
+ " <td>745.0</td>\n",
209
+ " <td>7.0</td>\n",
210
+ " <td>2872.0</td>\n",
211
+ " <td>17</td>\n",
212
+ " <td>1</td>\n",
213
+ " <td>US</td>\n",
214
+ " <td>38.0</td>\n",
215
+ " <td>-97.0</td>\n",
216
+ " <td>422981</td>\n",
217
+ " </tr>\n",
218
+ " <tr>\n",
219
+ " <th>1</th>\n",
220
+ " <td>2.0</td>\n",
221
+ " <td>5.0</td>\n",
222
+ " <td>5.0</td>\n",
223
+ " <td>5.0</td>\n",
224
+ " <td>2.0</td>\n",
225
+ " <td>5.0</td>\n",
226
+ " <td>2.0</td>\n",
227
+ " <td>5.0</td>\n",
228
+ " <td>3.0</td>\n",
229
+ " <td>5.0</td>\n",
230
+ " <td>...</td>\n",
231
+ " <td>1366.0</td>\n",
232
+ " <td>768.0</td>\n",
233
+ " <td>70.0</td>\n",
234
+ " <td>223.0</td>\n",
235
+ " <td>18</td>\n",
236
+ " <td>47</td>\n",
237
+ " <td>US</td>\n",
238
+ " <td>38.0</td>\n",
239
+ " <td>-97.0</td>\n",
240
+ " <td>325363</td>\n",
241
+ " </tr>\n",
242
+ " <tr>\n",
243
+ " <th>2</th>\n",
244
+ " <td>3.0</td>\n",
245
+ " <td>3.0</td>\n",
246
+ " <td>5.0</td>\n",
247
+ " <td>1.0</td>\n",
248
+ " <td>5.0</td>\n",
249
+ " <td>1.0</td>\n",
250
+ " <td>5.0</td>\n",
251
+ " <td>5.0</td>\n",
252
+ " <td>3.0</td>\n",
253
+ " <td>4.0</td>\n",
254
+ " <td>...</td>\n",
255
+ " <td>1920.0</td>\n",
256
+ " <td>1080.0</td>\n",
257
+ " <td>30.0</td>\n",
258
+ " <td>207.0</td>\n",
259
+ " <td>10</td>\n",
260
+ " <td>1</td>\n",
261
+ " <td>US</td>\n",
262
+ " <td>42.8886</td>\n",
263
+ " <td>-88.0384</td>\n",
264
+ " <td>105821</td>\n",
265
+ " </tr>\n",
266
+ " <tr>\n",
267
+ " <th>3</th>\n",
268
+ " <td>1.0</td>\n",
269
+ " <td>5.0</td>\n",
270
+ " <td>3.0</td>\n",
271
+ " <td>5.0</td>\n",
272
+ " <td>2.0</td>\n",
273
+ " <td>4.0</td>\n",
274
+ " <td>3.0</td>\n",
275
+ " <td>5.0</td>\n",
276
+ " <td>1.0</td>\n",
277
+ " <td>5.0</td>\n",
278
+ " <td>...</td>\n",
279
+ " <td>1440.0</td>\n",
280
+ " <td>960.0</td>\n",
281
+ " <td>4.0</td>\n",
282
+ " <td>166.0</td>\n",
283
+ " <td>10</td>\n",
284
+ " <td>1</td>\n",
285
+ " <td>AU</td>\n",
286
+ " <td>-37.7919</td>\n",
287
+ " <td>145.084</td>\n",
288
+ " <td>169213</td>\n",
289
+ " </tr>\n",
290
+ " <tr>\n",
291
+ " <th>4</th>\n",
292
+ " <td>4.0</td>\n",
293
+ " <td>3.0</td>\n",
294
+ " <td>3.0</td>\n",
295
+ " <td>3.0</td>\n",
296
+ " <td>3.0</td>\n",
297
+ " <td>2.0</td>\n",
298
+ " <td>4.0</td>\n",
299
+ " <td>3.0</td>\n",
300
+ " <td>2.0</td>\n",
301
+ " <td>3.0</td>\n",
302
+ " <td>...</td>\n",
303
+ " <td>1920.0</td>\n",
304
+ " <td>1200.0</td>\n",
305
+ " <td>12.0</td>\n",
306
+ " <td>194.0</td>\n",
307
+ " <td>4</td>\n",
308
+ " <td>2</td>\n",
309
+ " <td>US</td>\n",
310
+ " <td>37.5402</td>\n",
311
+ " <td>-122.3041</td>\n",
312
+ " <td>839500</td>\n",
313
+ " </tr>\n",
314
+ " <tr>\n",
315
+ " <th>...</th>\n",
316
+ " <td>...</td>\n",
317
+ " <td>...</td>\n",
318
+ " <td>...</td>\n",
319
+ " <td>...</td>\n",
320
+ " <td>...</td>\n",
321
+ " <td>...</td>\n",
322
+ " <td>...</td>\n",
323
+ " <td>...</td>\n",
324
+ " <td>...</td>\n",
325
+ " <td>...</td>\n",
326
+ " <td>...</td>\n",
327
+ " <td>...</td>\n",
328
+ " <td>...</td>\n",
329
+ " <td>...</td>\n",
330
+ " <td>...</td>\n",
331
+ " <td>...</td>\n",
332
+ " <td>...</td>\n",
333
+ " <td>...</td>\n",
334
+ " <td>...</td>\n",
335
+ " <td>...</td>\n",
336
+ " <td>...</td>\n",
337
+ " </tr>\n",
338
+ " <tr>\n",
339
+ " <th>609199</th>\n",
340
+ " <td>1.0</td>\n",
341
+ " <td>5.0</td>\n",
342
+ " <td>1.0</td>\n",
343
+ " <td>5.0</td>\n",
344
+ " <td>1.0</td>\n",
345
+ " <td>5.0</td>\n",
346
+ " <td>1.0</td>\n",
347
+ " <td>5.0</td>\n",
348
+ " <td>1.0</td>\n",
349
+ " <td>5.0</td>\n",
350
+ " <td>...</td>\n",
351
+ " <td>360.0</td>\n",
352
+ " <td>640.0</td>\n",
353
+ " <td>6.0</td>\n",
354
+ " <td>364.0</td>\n",
355
+ " <td>12</td>\n",
356
+ " <td>1</td>\n",
357
+ " <td>US</td>\n",
358
+ " <td>38.0</td>\n",
359
+ " <td>-97.0</td>\n",
360
+ " <td>259178</td>\n",
361
+ " </tr>\n",
362
+ " <tr>\n",
363
+ " <th>609200</th>\n",
364
+ " <td>4.0</td>\n",
365
+ " <td>1.0</td>\n",
366
+ " <td>4.0</td>\n",
367
+ " <td>3.0</td>\n",
368
+ " <td>4.0</td>\n",
369
+ " <td>0.0</td>\n",
370
+ " <td>5.0</td>\n",
371
+ " <td>2.0</td>\n",
372
+ " <td>4.0</td>\n",
373
+ " <td>2.0</td>\n",
374
+ " <td>...</td>\n",
375
+ " <td>360.0</td>\n",
376
+ " <td>640.0</td>\n",
377
+ " <td>15.0</td>\n",
378
+ " <td>180.0</td>\n",
379
+ " <td>17</td>\n",
380
+ " <td>1</td>\n",
381
+ " <td>CR</td>\n",
382
+ " <td>9.8533</td>\n",
383
+ " <td>-83.9023</td>\n",
384
+ " <td>365838</td>\n",
385
+ " </tr>\n",
386
+ " <tr>\n",
387
+ " <th>609201</th>\n",
388
+ " <td>4.0</td>\n",
389
+ " <td>2.0</td>\n",
390
+ " <td>3.0</td>\n",
391
+ " <td>2.0</td>\n",
392
+ " <td>5.0</td>\n",
393
+ " <td>1.0</td>\n",
394
+ " <td>4.0</td>\n",
395
+ " <td>2.0</td>\n",
396
+ " <td>5.0</td>\n",
397
+ " <td>2.0</td>\n",
398
+ " <td>...</td>\n",
399
+ " <td>400.0</td>\n",
400
+ " <td>640.0</td>\n",
401
+ " <td>24.0</td>\n",
402
+ " <td>519.0</td>\n",
403
+ " <td>392</td>\n",
404
+ " <td>6</td>\n",
405
+ " <td>US</td>\n",
406
+ " <td>38.0</td>\n",
407
+ " <td>-97.0</td>\n",
408
+ " <td>131932</td>\n",
409
+ " </tr>\n",
410
+ " <tr>\n",
411
+ " <th>609202</th>\n",
412
+ " <td>1.0</td>\n",
413
+ " <td>4.0</td>\n",
414
+ " <td>3.0</td>\n",
415
+ " <td>3.0</td>\n",
416
+ " <td>2.0</td>\n",
417
+ " <td>4.0</td>\n",
418
+ " <td>2.0</td>\n",
419
+ " <td>4.0</td>\n",
420
+ " <td>2.0</td>\n",
421
+ " <td>5.0</td>\n",
422
+ " <td>...</td>\n",
423
+ " <td>1368.0</td>\n",
424
+ " <td>912.0</td>\n",
425
+ " <td>217.0</td>\n",
426
+ " <td>209.0</td>\n",
427
+ " <td>17</td>\n",
428
+ " <td>2</td>\n",
429
+ " <td>US</td>\n",
430
+ " <td>38.0</td>\n",
431
+ " <td>-97.0</td>\n",
432
+ " <td>671155</td>\n",
433
+ " </tr>\n",
434
+ " <tr>\n",
435
+ " <th>609203</th>\n",
436
+ " <td>4.0</td>\n",
437
+ " <td>2.0</td>\n",
438
+ " <td>2.0</td>\n",
439
+ " <td>3.0</td>\n",
440
+ " <td>2.0</td>\n",
441
+ " <td>1.0</td>\n",
442
+ " <td>2.0</td>\n",
443
+ " <td>5.0</td>\n",
444
+ " <td>3.0</td>\n",
445
+ " <td>5.0</td>\n",
446
+ " <td>...</td>\n",
447
+ " <td>1366.0</td>\n",
448
+ " <td>768.0</td>\n",
449
+ " <td>11.0</td>\n",
450
+ " <td>203.0</td>\n",
451
+ " <td>12</td>\n",
452
+ " <td>1</td>\n",
453
+ " <td>CA</td>\n",
454
+ " <td>45.9784</td>\n",
455
+ " <td>-66.6905</td>\n",
456
+ " <td>121958</td>\n",
457
+ " </tr>\n",
458
+ " </tbody>\n",
459
+ "</table>\n",
460
+ "<p>609204 rows × 111 columns</p>\n",
461
+ "</div>"
462
+ ],
463
+ "text/plain": [
464
+ " EXT1 EXT2 EXT3 EXT4 EXT5 EXT6 EXT7 EXT8 EXT9 EXT10 ... \\\n",
465
+ "0 2.0 2.0 5.0 4.0 4.0 1.0 2.0 4.0 4.0 2.0 ... \n",
466
+ "1 2.0 5.0 5.0 5.0 2.0 5.0 2.0 5.0 3.0 5.0 ... \n",
467
+ "2 3.0 3.0 5.0 1.0 5.0 1.0 5.0 5.0 3.0 4.0 ... \n",
468
+ "3 1.0 5.0 3.0 5.0 2.0 4.0 3.0 5.0 1.0 5.0 ... \n",
469
+ "4 4.0 3.0 3.0 3.0 3.0 2.0 4.0 3.0 2.0 3.0 ... \n",
470
+ "... ... ... ... ... ... ... ... ... ... ... ... \n",
471
+ "609199 1.0 5.0 1.0 5.0 1.0 5.0 1.0 5.0 1.0 5.0 ... \n",
472
+ "609200 4.0 1.0 4.0 3.0 4.0 0.0 5.0 2.0 4.0 2.0 ... \n",
473
+ "609201 4.0 2.0 3.0 2.0 5.0 1.0 4.0 2.0 5.0 2.0 ... \n",
474
+ "609202 1.0 4.0 3.0 3.0 2.0 4.0 2.0 4.0 2.0 5.0 ... \n",
475
+ "609203 4.0 2.0 2.0 3.0 2.0 1.0 2.0 5.0 3.0 5.0 ... \n",
476
+ "\n",
477
+ " screenw screenh introelapse testelapse endelapse IPC country \\\n",
478
+ "0 1324.0 745.0 7.0 2872.0 17 1 US \n",
479
+ "1 1366.0 768.0 70.0 223.0 18 47 US \n",
480
+ "2 1920.0 1080.0 30.0 207.0 10 1 US \n",
481
+ "3 1440.0 960.0 4.0 166.0 10 1 AU \n",
482
+ "4 1920.0 1200.0 12.0 194.0 4 2 US \n",
483
+ "... ... ... ... ... ... ... ... \n",
484
+ "609199 360.0 640.0 6.0 364.0 12 1 US \n",
485
+ "609200 360.0 640.0 15.0 180.0 17 1 CR \n",
486
+ "609201 400.0 640.0 24.0 519.0 392 6 US \n",
487
+ "609202 1368.0 912.0 217.0 209.0 17 2 US \n",
488
+ "609203 1366.0 768.0 11.0 203.0 12 1 CA \n",
489
+ "\n",
490
+ " lat_appx_lots_of_err long_appx_lots_of_err __index_level_0__ \n",
491
+ "0 38.0 -97.0 422981 \n",
492
+ "1 38.0 -97.0 325363 \n",
493
+ "2 42.8886 -88.0384 105821 \n",
494
+ "3 -37.7919 145.084 169213 \n",
495
+ "4 37.5402 -122.3041 839500 \n",
496
+ "... ... ... ... \n",
497
+ "609199 38.0 -97.0 259178 \n",
498
+ "609200 9.8533 -83.9023 365838 \n",
499
+ "609201 38.0 -97.0 131932 \n",
500
+ "609202 38.0 -97.0 671155 \n",
501
+ "609203 45.9784 -66.6905 121958 \n",
502
+ "\n",
503
+ "[609204 rows x 111 columns]"
504
+ ]
505
+ },
506
+ "execution_count": 3,
507
+ "metadata": {},
508
+ "output_type": "execute_result"
509
+ }
510
+ ],
511
  "source": [
 
512
  "import pandas as pd\n",
513
  "from datasets import load_dataset\n",
514
  "\n",
 
518
  "# Convert to a pandas DataFrame for easier manipulation\n",
519
  "df = pd.DataFrame(dataset[\"train\"])\n",
520
  "df"
521
+ ]
522
+ }
523
+ ],
524
+ "metadata": {
525
+ "colab": {
526
+ "provenance": []
527
+ },
528
+ "kernelspec": {
529
+ "display_name": ".venv",
530
+ "language": "python",
531
+ "name": "python3"
532
+ },
533
+ "language_info": {
534
+ "codemirror_mode": {
535
+ "name": "ipython",
536
+ "version": 3
537
  },
538
+ "file_extension": ".py",
539
+ "mimetype": "text/x-python",
540
+ "name": "python",
541
+ "nbconvert_exporter": "python",
542
+ "pygments_lexer": "ipython3",
543
+ "version": "3.13.3"
544
  }
545
+ },
546
+ "nbformat": 4,
547
+ "nbformat_minor": 0
548
+ }