-
Notifications
You must be signed in to change notification settings - Fork 326
/
__init__.py
103 lines (77 loc) · 2.69 KB
/
__init__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import weakref
from warnings import warn
import torch
from tensordict import set_lazy_legacy
from torch import multiprocessing as mp
from torch.distributions.transforms import _InverseTransform, ComposeTransform
set_lazy_legacy(False).set()
if torch.cuda.device_count() > 1:
n = torch.cuda.device_count() - 1
os.environ["MUJOCO_EGL_DEVICE_ID"] = str(1 + (os.getpid() % n))
from ._extension import _init_extension
try:
from .version import __version__
except ImportError:
__version__ = None
try:
from torch.compiler import is_dynamo_compiling
except ImportError:
from torch._dynamo import is_compiling as is_dynamo_compiling
_init_extension()
try:
mp.set_start_method("spawn")
except RuntimeError as err:
if str(err).startswith("context has already been set"):
mp_start_method = mp.get_start_method()
if mp_start_method != "spawn":
warn(
f"failed to set start method to spawn, "
f"and current start method for mp is {mp_start_method}."
)
import torchrl.collectors
import torchrl.data
import torchrl.envs
import torchrl.modules
import torchrl.objectives
import torchrl.trainers
from torchrl._utils import compile_with_warmup, timeit
# Filter warnings in subprocesses: True by default given the multiple optional
# deps of the library. This can be turned on via `torchrl.filter_warnings_subprocess = False`.
filter_warnings_subprocess = True
_THREAD_POOL_INIT = torch.get_num_threads()
# monkey-patch dist transforms until https://github.com/pytorch/pytorch/pull/135001/ finds a home
@property
def _inv(self):
"""Patched version of Transform.inv.
Returns the inverse :class:`Transform` of this transform.
This should satisfy ``t.inv.inv is t``.
"""
inv = None
if self._inv is not None:
inv = self._inv()
if inv is None:
inv = _InverseTransform(self)
if not is_dynamo_compiling():
self._inv = weakref.ref(inv)
return inv
torch.distributions.transforms.Transform.inv = _inv
@property
def _inv(self):
inv = None
if self._inv is not None:
inv = self._inv()
if inv is None:
inv = ComposeTransform([p.inv for p in reversed(self.parts)])
if not is_dynamo_compiling():
self._inv = weakref.ref(inv)
inv._inv = weakref.ref(self)
else:
# We need inv.inv to be equal to self, but weakref can cause a graph break
inv._inv = lambda out=self: out
return inv
ComposeTransform.inv = _inv