Skip to content

Commit

Permalink
[BugFix] Fix shape setting in CompositeSpec (#1620)
Browse files Browse the repository at this point in the history
Co-authored-by: Matteo Bettini <[email protected]>
  • Loading branch information
vmoens and matteobettini authored Oct 10, 2023
1 parent fdee633 commit 5e81445
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 5 deletions.
17 changes: 17 additions & 0 deletions test/test_specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -651,6 +651,23 @@ def test_nested_composite_spec_update(self, shape, is_complete, device, dtype):
}
assert ts["nested_cp"]["act"] is not None

def test_change_batch_size(self, shape, is_complete, device, dtype):
ts = self._composite_spec(shape, is_complete, device, dtype)
ts["nested"] = CompositeSpec(
leaf=UnboundedContinuousTensorSpec(shape), shape=shape
)
ts = ts.expand(3, *shape)
assert ts["nested"].shape == (3, *shape)
assert ts["nested", "leaf"].shape == (3, *shape)
ts.shape = ()
# this does not change
assert ts["nested"].shape == (3, *shape)
assert ts.shape == ()
ts["nested"].shape = ()
ts.shape = (3,)
assert ts.shape == (3,)
assert ts["nested"].shape == (3,)


@pytest.mark.parametrize("shape", [(), (2, 3)])
@pytest.mark.parametrize("device", get_default_devices())
Expand Down
4 changes: 2 additions & 2 deletions torchrl/data/tensor_specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3135,10 +3135,10 @@ def shape(self, value: torch.Size):
raise RuntimeError("Cannot modify shape of locked composite spec.")
for key, spec in self.items():
if isinstance(spec, CompositeSpec):
if spec.shape[: self.ndim] != self.shape:
if spec.shape[: len(value)] != value:
spec.shape = value
elif spec is not None:
if spec.shape[: self.ndim] != self.shape:
if spec.shape[: len(value)] != value:
raise ValueError(
f"The shape of the spec and the CompositeSpec mismatch during shape resetting: the "
f"{self.ndim} first dimensions should match but got self['{key}'].shape={spec.shape} and "
Expand Down
10 changes: 7 additions & 3 deletions torchrl/envs/libs/gym.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,14 @@
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from __future__ import annotations

import importlib
import warnings
from copy import copy
from types import ModuleType
from typing import Dict, List, Optional, Tuple
from typing import Dict, List, Tuple
from warnings import warn

import numpy as np
Expand Down Expand Up @@ -310,7 +313,8 @@ def _gym_to_torchrl_spec_transform(
categorical_action_encoding=categorical_action_encoding,
remap_state_to_observation=remap_state_to_observation,
)
return CompositeSpec(**spec_out)
# the batch-size must be set later
return CompositeSpec(spec_out)
elif isinstance(spec, gym_spaces.dict.Dict):
return _gym_to_torchrl_spec_transform(
spec.spaces,
Expand Down Expand Up @@ -910,7 +914,7 @@ def info_dict_reader(self, value: callable):
self._info_dict_reader = value

def _reset(
self, tensordict: Optional[TensorDictBase] = None, **kwargs
self, tensordict: TensorDictBase | None = None, **kwargs
) -> TensorDictBase:
if self._is_batched:
# batched (aka 'vectorized') env reset is a bit special: envs are
Expand Down

0 comments on commit 5e81445

Please sign in to comment.