-
Notifications
You must be signed in to change notification settings - Fork 1.6k
/
Copy path__init__.py
106 lines (90 loc) · 4.05 KB
/
__init__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
# coding=utf-8
# Copyright 2024 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=line-too-long
"""`tensorflow_datasets` (`tfds`) defines a collection of datasets ready-to-use with TensorFlow.
Each dataset is defined as a `tfds.core.DatasetBuilder`, which encapsulates
the logic to download the dataset and construct an input pipeline, as well as
contains the dataset documentation (version, splits, number of examples, etc.).
The main library entrypoints are:
* `tfds.builder`: fetch a `tfds.core.DatasetBuilder` by name
* `tfds.load`: convenience method to construct a builder, download the data, and
create an input pipeline, returning a `tf.data.Dataset`.
Documentation:
* These API docs
* [Available datasets](https://www.tensorflow.org/datasets/catalog/overview)
* [Colab tutorial](https://colab.research.google.com/github/tensorflow/datasets/blob/master/docs/overview.ipynb)
* [Add a dataset](https://www.tensorflow.org/datasets/add_dataset)
"""
# pylint: enable=line-too-long
# pylint: disable=g-import-not-at-top,g-bad-import-order,wrong-import-position,unused-import
import time
_TIMESTAMP_IMPORT_STARTS = time.time()
from absl import logging
from etils import epy as _epy
import tensorflow_datasets.core.logging as _tfds_logging
from tensorflow_datasets.core.logging import call_metadata as _call_metadata
_metadata = _call_metadata.CallMetadata()
_metadata.start_time_micros = int(_TIMESTAMP_IMPORT_STARTS * 1e6)
_import_time_ms_dataset_builders = 0
try:
# Imports for registration
_before_dataset_imports = time.time()
from tensorflow_datasets import dataset_collections
# pytype: disable=import-error
# For builds that don't include all dataset builders, we don't want to fail on
# import errors of dataset builders.
with _epy.lazy_api_imports(globals()):
from tensorflow_datasets import audio
from tensorflow_datasets import graphs
from tensorflow_datasets import image
from tensorflow_datasets import image_classification
from tensorflow_datasets import object_detection
from tensorflow_datasets import nearest_neighbors
from tensorflow_datasets import question_answering
from tensorflow_datasets import d4rl
from tensorflow_datasets import ranking
from tensorflow_datasets import recommendation
from tensorflow_datasets import rl_unplugged
from tensorflow_datasets import rlds
from tensorflow_datasets import robotics
from tensorflow_datasets import robomimic
from tensorflow_datasets import structured
from tensorflow_datasets import summarization
from tensorflow_datasets import text
from tensorflow_datasets import text_simplification
from tensorflow_datasets import time_series
from tensorflow_datasets import translate
from tensorflow_datasets import video
from tensorflow_datasets import vision_language
# pytype: enable=import-error
_import_time_ms_dataset_builders = int(
(time.time() - _before_dataset_imports) * 1000
)
# Public API to create and generate a dataset
from tensorflow_datasets.public_api import * # pylint: disable=wildcard-import
from tensorflow_datasets import public_api # pylint: disable=g-bad-import-order
# __all__ for import * as well as documentation
__all__ = public_api.__all__
except Exception as exception: # pylint: disable=broad-except
_metadata.mark_error()
logging.exception(exception)
finally:
_metadata.mark_end()
_tfds_logging.tfds_import(
metadata=_metadata,
import_time_ms_tensorflow=0,
import_time_ms_dataset_builders=_import_time_ms_dataset_builders,
)
del _epy