File size: 6,332 Bytes
b6f487c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "5d3c22fa-b62f-4828-bb55-60640e1a393c",
   "metadata": {},
   "outputs": [],
   "source": [
    "!pip install datasets"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "9f6e89ca-43c5-409c-987e-94b6a1c89082",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 测试 datasets 正确性\n",
    "!datasets-cli test /workspace/data/MNBVC-core --data_dir=/workspace/data/MNBVC-core --save_info --all_configs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "2cb2a5cf-43b1-4372-8583-bd37455406a0",
   "metadata": {},
   "outputs": [],
   "source": [
    "from datasets import load_from_disk,load_dataset_builder, get_dataset_split_names, get_dataset_config_names\n",
    "\n",
    "DATASET='/workspace/data/MNBVC-core'\n",
    "CONFIGURATION='qa_mfa'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "ddadbe3a-5b78-4b22-8d5a-fb82b42dd01f",
   "metadata": {},
   "outputs": [
    {
     "ename": "FileNotFoundError",
     "evalue": "Directory /workspace/data/MNBVC-core is neither a `Dataset` directory nor a `DatasetDict` directory.",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mFileNotFoundError\u001b[0m                         Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[15], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m ds \u001b[38;5;241m=\u001b[39m \u001b[43mload_from_disk\u001b[49m\u001b[43m(\u001b[49m\u001b[43mDATASET\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m/opt/conda/envs/envd/lib/python3.10/site-packages/datasets/load.py:2252\u001b[0m, in \u001b[0;36mload_from_disk\u001b[0;34m(dataset_path, fs, keep_in_memory, storage_options)\u001b[0m\n\u001b[1;32m   2250\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m DatasetDict\u001b[38;5;241m.\u001b[39mload_from_disk(dataset_path, keep_in_memory\u001b[38;5;241m=\u001b[39mkeep_in_memory, storage_options\u001b[38;5;241m=\u001b[39mstorage_options)\n\u001b[1;32m   2251\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 2252\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mFileNotFoundError\u001b[39;00m(\n\u001b[1;32m   2253\u001b[0m         \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDirectory \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdataset_path\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m is neither a `Dataset` directory nor a `DatasetDict` directory.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m   2254\u001b[0m     )\n",
      "\u001b[0;31mFileNotFoundError\u001b[0m: Directory /workspace/data/MNBVC-core is neither a `Dataset` directory nor a `DatasetDict` directory."
     ]
    }
   ],
   "source": [
    "ds = load_from_disk(DATASET)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "5fe8f56e-019f-475d-811a-f707450934e2",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Help on function load_from_disk in module datasets.load:\n",
      "\n",
      "load_from_disk(dataset_path: str, fs='deprecated', keep_in_memory: Optional[bool] = None, storage_options: Optional[dict] = None) -> Union[datasets.arrow_dataset.Dataset, datasets.dataset_dict.DatasetDict]\n",
      "    Loads a dataset that was previously saved using [`~Dataset.save_to_disk`] from a dataset directory, or\n",
      "    from a filesystem using any implementation of `fsspec.spec.AbstractFileSystem`.\n",
      "    \n",
      "    Args:\n",
      "        dataset_path (`str`):\n",
      "            Path (e.g. `\"dataset/train\"`) or remote URI (e.g.\n",
      "            `\"s3://my-bucket/dataset/train\"`) of the [`Dataset`] or [`DatasetDict`] directory where the dataset will be\n",
      "            loaded from.\n",
      "        fs (`~filesystems.S3FileSystem` or `fsspec.spec.AbstractFileSystem`, *optional*):\n",
      "            Instance of the remote filesystem used to download the files from.\n",
      "    \n",
      "            <Deprecated version=\"2.9.0\">\n",
      "    \n",
      "            `fs` was deprecated in version 2.9.0 and will be removed in 3.0.0.\n",
      "            Please use `storage_options` instead, e.g. `storage_options=fs.storage_options`.\n",
      "    \n",
      "            </Deprecated>\n",
      "    \n",
      "        keep_in_memory (`bool`, defaults to `None`):\n",
      "            Whether to copy the dataset in-memory. If `None`, the dataset\n",
      "            will not be copied in-memory unless explicitly enabled by setting `datasets.config.IN_MEMORY_MAX_SIZE` to\n",
      "            nonzero. See more details in the [improve performance](../cache#improve-performance) section.\n",
      "    \n",
      "        storage_options (`dict`, *optional*):\n",
      "            Key/value pairs to be passed on to the file-system backend, if any.\n",
      "    \n",
      "            <Added version=\"2.9.0\"/>\n",
      "    \n",
      "    Returns:\n",
      "        [`Dataset`] or [`DatasetDict`]:\n",
      "        - If `dataset_path` is a path of a dataset directory: the dataset requested.\n",
      "        - If `dataset_path` is a path of a dataset dict directory, a [`DatasetDict`] with each split.\n",
      "    \n",
      "    Example:\n",
      "    \n",
      "    ```py\n",
      "    >>> from datasets import load_from_disk\n",
      "    >>> ds = load_from_disk('path/to/dataset/directory')\n",
      "    ```\n",
      "\n"
     ]
    }
   ],
   "source": [
    "help(load_from_disk)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6a2a08b1-0f99-4944-bc6d-98d288f6e366",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}