forked from activeloopai/deeplake
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path__init__.py
109 lines (97 loc) · 2.57 KB
/
__init__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
import threading
from queue import Queue
from botocore.config import Config
import numpy as np
import multiprocessing
import sys
from hub.util.check_latest_version import warn_if_update_required
if sys.platform == "darwin":
multiprocessing.set_start_method("fork", force=True)
__pdoc__ = {
"api": False,
"auto": False,
"cli": False,
"client": False,
"constants": False,
"config": False,
"integrations": False,
"tests": False,
"util": False,
"Dataset.clear_cache": False,
"Dataset.flush": False,
"Dataset.read_only": False,
"Dataset.size_approx": False,
"Dataset.token": False,
"Dataset.num_samples": False,
}
from .api.dataset import dataset as api_dataset
from .api.read import read
from .api.link import link
from .api.tiled import tiled
from .core.dataset import Dataset
from .core.transform import compute, compose
from .core.tensor import Tensor
from .util.bugout_reporter import hub_reporter
from .compression import SUPPORTED_COMPRESSIONS
from .htype import HTYPE_CONFIGURATIONS
from .integrations import huggingface
compressions = list(SUPPORTED_COMPRESSIONS)
htypes = sorted(list(HTYPE_CONFIGURATIONS))
list = api_dataset.list
exists = api_dataset.exists
load = api_dataset.load
empty = api_dataset.empty
like = api_dataset.like
delete = api_dataset.delete
rename = api_dataset.rename
copy = api_dataset.copy
deepcopy = api_dataset.deepcopy
ingest = api_dataset.ingest
ingest_kaggle = api_dataset.ingest_kaggle
ingest_dataframe = api_dataset.ingest_dataframe
ingest_huggingface = huggingface.ingest_huggingface
dataset = api_dataset.init
tensor = Tensor
__all__ = [
"tensor",
"read",
"link",
"__version__",
"load",
"empty",
"exists",
"compute",
"compose",
"copy",
"dataset",
"Dataset",
"deepcopy",
"like",
"list",
"ingest",
"ingest_kaggle",
"ingest_huggingface",
"compressions",
"htypes",
"config",
"delete",
"copy",
"rename",
]
__version__ = "2.5.0"
warn_if_update_required(__version__)
__encoded_version__ = np.array(__version__)
config = {"s3": Config(max_pool_connections=50, connect_timeout=300, read_timeout=300)}
hub_reporter.tags.append(f"version:{__version__}")
hub_reporter.system_report(publish=True)
hub_reporter.setup_excepthook(publish=True)
event_queue: Queue = Queue()
def send_event():
while True:
try:
event = event_queue.get()
client, event_dict = event
client.send_event(event_dict)
except Exception:
pass
threading.Thread(target=send_event, daemon=True).start()