Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Versioning] v0.5 bump #2267

Merged
merged 29 commits into from
Jul 10, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
amend
  • Loading branch information
vmoens committed Jul 9, 2024
commit 1ea60c79d94182006091eb19af4a827c95e25f22
12 changes: 8 additions & 4 deletions torchrl/envs/batched_envs.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,17 +406,18 @@ def _find_sync_values(self):
return _do_nothing, _do_nothing

if worker_device is None:
worker_not_main = [False]
worker_not_main = False

def find_all_worker_devices(item, worker_not_main=worker_not_main):
def find_all_worker_devices(item):
nonlocal worker_not_main
if hasattr(item, "device"):
worker_not_main[0] = worker_not_main[0] or (
worker_not_main = worker_not_main or (
item.device != self_device
)

for td in self.shared_tensordicts:
td.apply(find_all_worker_devices, filter_empty=True)
if worker_not_main[0]:
if worker_not_main:
if torch.cuda.is_available():
worker_device = (
torch.device("cuda")
Expand All @@ -431,6 +432,8 @@ def find_all_worker_devices(item, worker_not_main=worker_not_main):
)
else:
raise RuntimeError("Did not find a valid worker device")
else:
worker_device = self_device

if (
worker_device is not None
Expand Down Expand Up @@ -460,6 +463,7 @@ def find_all_worker_devices(item, worker_not_main=worker_not_main):
and self_device.type == "mps"
):
return _mps_sync(self_device), _mps_sync(self_device)
return _do_nothing, _do_nothing

def __getstate__(self):
out = copy(self.__dict__)
Expand Down
Loading