Browse Source

测试

master
hxdst 11 months ago
parent
commit
bb0e92897c
1 changed files with 140 additions and 0 deletions
  1. +140
    -0
      read_data.ipynb

+ 140
- 0
read_data.ipynb View File

@ -0,0 +1,140 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"id": "f0668822",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"from torch.utils.data import Dataset"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "a68847cd",
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Help on class Dataset in module torch.utils.data.dataset:\n",
"\n",
"class Dataset(typing.Generic)\n",
" | An abstract class representing a :class:`Dataset`.\n",
" | \n",
" | All datasets that represent a map from keys to data samples should subclass\n",
" | it. All subclasses should overwrite :meth:`__getitem__`, supporting fetching a\n",
" | data sample for a given key. Subclasses could also optionally overwrite\n",
" | :meth:`__len__`, which is expected to return the size of the dataset by many\n",
" | :class:`~torch.utils.data.Sampler` implementations and the default options\n",
" | of :class:`~torch.utils.data.DataLoader`.\n",
" | \n",
" | .. note::\n",
" | :class:`~torch.utils.data.DataLoader` by default constructs a index\n",
" | sampler that yields integral indices. To make it work with a map-style\n",
" | dataset with non-integral indices/keys, a custom sampler must be provided.\n",
" | \n",
" | Method resolution order:\n",
" | Dataset\n",
" | typing.Generic\n",
" | builtins.object\n",
" | \n",
" | Methods defined here:\n",
" | \n",
" | __add__(self, other: 'Dataset[T_co]') -> 'ConcatDataset[T_co]'\n",
" | \n",
" | __getitem__(self, index) -> +T_co\n",
" | \n",
" | ----------------------------------------------------------------------\n",
" | Data descriptors defined here:\n",
" | \n",
" | __dict__\n",
" | dictionary for instance variables (if defined)\n",
" | \n",
" | __weakref__\n",
" | list of weak references to the object (if defined)\n",
" | \n",
" | ----------------------------------------------------------------------\n",
" | Data and other attributes defined here:\n",
" | \n",
" | __annotations__ = {}\n",
" | \n",
" | __orig_bases__ = (typing.Generic[+T_co],)\n",
" | \n",
" | __parameters__ = (+T_co,)\n",
" | \n",
" | ----------------------------------------------------------------------\n",
" | Class methods inherited from typing.Generic:\n",
" | \n",
" | __class_getitem__(params) from builtins.type\n",
" | Parameterizes a generic class.\n",
" | \n",
" | At least, parameterizing a generic class is the *main* thing this method\n",
" | does. For example, for some generic class `Foo`, this is called when we\n",
" | do `Foo[int]` - there, with `cls=Foo` and `params=int`.\n",
" | \n",
" | However, note that this method is also called when defining generic\n",
" | classes in the first place with `class Foo(Generic[T]): ...`.\n",
" | \n",
" | __init_subclass__(*args, **kwargs) from builtins.type\n",
" | This method is called when a class is subclassed.\n",
" | \n",
" | The default implementation does nothing. It may be\n",
" | overridden to extend subclasses.\n",
"\n"
]
}
],
"source": [
"help(Dataset)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "de52bf0e",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"class Mydata(DataSet):\n",
" def __init__(DataSet):\n",
" pass\n",
" \n",
" def __getitem__(self,idx):\n",
" \n",
" \n",
" "
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

Loading…
Cancel
Save