Skip to content

Commit

Permalink
[Doc] MADDPG bug fix of buffer device and improve explaination (#2519)
Browse files Browse the repository at this point in the history
  • Loading branch information
matteobettini authored Oct 29, 2024
1 parent 8eac84a commit 3e4b292
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 2 deletions.
2 changes: 1 addition & 1 deletion torchrl/data/replay_buffers/storages.py
Original file line number Diff line number Diff line change
Expand Up @@ -1106,7 +1106,7 @@ def __init__(
if self.device.type != "cpu":
raise ValueError(
"Memory map device other than CPU isn't supported. To cast your data to the desired device, "
"use `buffer.append_transform(lambda x: x.to(device)` or a similar transform."
"use `buffer.append_transform(lambda x: x.to(device))` or a similar transform."
)
self._len = 0

Expand Down
9 changes: 8 additions & 1 deletion tutorials/sphinx-tutorials/multiagent_competitive_ddpg.py
Original file line number Diff line number Diff line change
Expand Up @@ -655,16 +655,23 @@
# There are many types of buffers, in this tutorial we use a basic buffer to store and sample tensordict
# data randomly.
#
# This buffer uses :class:`~.data.LazyMemmapStorage`, which stores data on disk.
# This allows to use the disk memory, but can result in slower sampling as it requires data to be cast to the training device.
# To store your buffer on the GPU, you can use :class:`~.data.LazyTensorStorage`, passing the desired device.
# This will result in faster sampling but is subject to the memory constraints of the selected device.
#

replay_buffers = {}
for group, _agents in env.group_map.items():
replay_buffer = ReplayBuffer(
storage=LazyMemmapStorage(
memory_size, device=device
memory_size
), # We will store up to memory_size multi-agent transitions
sampler=RandomSampler(),
batch_size=train_batch_size, # We will sample batches of this size
)
if device.type != "cpu":
replay_buffer.append_transform(lambda x: x.to(device))
replay_buffers[group] = replay_buffer

######################################################################
Expand Down

0 comments on commit 3e4b292

Please sign in to comment.