Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature]: Added support for TensorDictSequence module subsampling #332

Merged
merged 12 commits into from
Aug 10, 2022
Prev Previous commit
Next Next commit
Fixed tests
  • Loading branch information
nicolas-dufour committed Aug 1, 2022
commit c1fdc1f3c9b067a21b18d002105b6e673232fb7c
12 changes: 5 additions & 7 deletions test/test_tensordictmodules.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ def test_functional_probabilistic_laterconstruct(self, safe, spec_type):
tensordict_module, (
params,
buffers,
) = tensordict_module.ç()
) = tensordict_module.make_functional_with_buffers()

td = TensorDict({"in": torch.randn(3, 3)}, [3])
td = tensordict_module(td, params=params, buffers=buffers)
Expand Down Expand Up @@ -1527,8 +1527,8 @@ def test_vmap_probabilistic(self, safe, spec_type):
elif safe and spec_type == "bounded":
assert ((td_out.get("out") < 0.1) | (td_out.get("out") > -0.1)).all()

@pytest.mark.parametrize("vmap", [True, False])
def test_submodule_sequence(self, vmap):
@pytest.mark.parametrize("functional", [True, False])
def test_submodule_sequence(self, functional):
td_module_1 = TensorDictModule(
nn.Linear(3, 2),
in_keys=["in"],
Expand All @@ -1541,18 +1541,17 @@ def test_submodule_sequence(self, vmap):
)
td_module = TensorDictSequence(td_module_1, td_module_2)

if vmap:
if functional:
td_module, (params, buffers) = td_module.make_functional_with_buffers()
td_0 = TensorDict({"in": torch.randn(5, 3)}, [5])
td_module(td_0, params=params, buffers=buffers, vmap=True)
td_module(td_0, params=params, buffers=buffers)
assert td_0.get("out").shape == torch.Size([5, 4])
td_1 = TensorDict({"in": torch.randn(5, 3)}, [5])
td_module(
td_1,
out_keys_filter=["hidden"],
params=params,
buffers=buffers,
vmap=True,
)
assert "hidden" in td_1.keys()
assert "out" not in td_1.keys()
Expand All @@ -1562,7 +1561,6 @@ def test_submodule_sequence(self, vmap):
in_keys_filter=["hidden"],
params=params,
buffers=buffers,
vmap=True,
)
assert "out" in td_2.keys()
assert td_2.get("out").shape == torch.Size([5, 4])
Expand Down