Skip to content

Commit

Permalink
[pre-commit.ci] pre-commit suggestions (#895)
Browse files Browse the repository at this point in the history
Co-authored-by: otaj <[email protected]>
  • Loading branch information
pre-commit-ci[bot] and otaj authored Oct 5, 2022
1 parent 9d757d6 commit 9dd72c4
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 37 deletions.
14 changes: 7 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,20 @@ repos:
- id: detect-private-key

- repo: https://github.com/asottile/pyupgrade
rev: v2.34.0
rev: v2.38.2
hooks:
- id: pyupgrade
args: [--py36-plus]
name: Upgrade code

- repo: https://github.com/myint/docformatter
rev: v1.4
- repo: https://github.com/PyCQA/docformatter
rev: v1.5.0
hooks:
- id: docformatter
args: [--in-place, --wrap-summaries=115, --wrap-descriptions=120]

- repo: https://github.com/executablebooks/mdformat
rev: 0.7.14
rev: 0.7.16
hooks:
- id: mdformat
additional_dependencies:
Expand All @@ -50,17 +50,17 @@ repos:
- id: isort

- repo: https://github.com/psf/black
rev: 22.6.0
rev: 22.8.0
hooks:
- id: black
name: Format code

- repo: https://github.com/asottile/yesqa
rev: v1.3.0
rev: v1.4.0
hooks:
- id: yesqa

- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
rev: 5.0.4
hooks:
- id: flake8
45 changes: 21 additions & 24 deletions pl_bolts/models/self_supervised/resnets.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,10 +285,12 @@ def _resnet(arch, block, layers, pretrained, progress, **kwargs):
return model


RESNET_PAPER = '`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_'


@under_review()
def resnet18(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNet-18 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
f"""ResNet-18 model from {RESNET_PAPER}
Args:
pretrained: If True, returns a model pre-trained on ImageNet
Expand All @@ -299,8 +301,7 @@ def resnet18(pretrained: bool = False, progress: bool = True, **kwargs):

@under_review()
def resnet34(pretrained=False, progress=True, **kwargs):
r"""ResNet-34 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
f"""ResNet-34 model from {RESNET_PAPER}
Args:
pretrained: If True, returns a model pre-trained on ImageNet
Expand All @@ -311,8 +312,7 @@ def resnet34(pretrained=False, progress=True, **kwargs):

@under_review()
def resnet50(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNet-50 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
f"""ResNet-50 model from {RESNET_PAPER}
Args:
pretrained: If True, returns a model pre-trained on ImageNet
Expand All @@ -323,8 +323,7 @@ def resnet50(pretrained: bool = False, progress: bool = True, **kwargs):

@under_review()
def resnet101(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNet-101 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
f"""ResNet-101 model from {RESNET_PAPER}
Args:
pretrained: If True, returns a model pre-trained on ImageNet
Expand All @@ -335,8 +334,7 @@ def resnet101(pretrained: bool = False, progress: bool = True, **kwargs):

@under_review()
def resnet152(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNet-152 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
f"""ResNet-34 model from {RESNET_PAPER}
Args:
pretrained: If True, returns a model pre-trained on ImageNet
Expand All @@ -345,10 +343,14 @@ def resnet152(pretrained: bool = False, progress: bool = True, **kwargs):
return _resnet("resnet152", Bottleneck, [3, 8, 36, 3], pretrained, progress, **kwargs)


AGGREGATED_RESNET_PAPER = (
'`"Aggregated Residual Transformation for Deep Neural Networks" <https://arxiv.org/pdf/1611.05431.pdf>`_'
)


@under_review()
def resnext50_32x4d(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNeXt-50 32x4d model from
`"Aggregated Residual Transformation for Deep Neural Networks" <https://arxiv.org/pdf/1611.05431.pdf>`_
f"""ResNeXt-50 32x4d model from {AGGREGATED_RESNET_PAPER}
Args:
pretrained: If True, returns a model pre-trained on ImageNet
Expand All @@ -361,8 +363,7 @@ def resnext50_32x4d(pretrained: bool = False, progress: bool = True, **kwargs):

@under_review()
def resnext101_32x8d(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNeXt-101 32x8d model from
`"Aggregated Residual Transformation for Deep Neural Networks" <https://arxiv.org/pdf/1611.05431.pdf>`_
f"""ResNeXt-101 32x8d model from {AGGREGATED_RESNET_PAPER}
Args:
pretrained: If True, returns a model pre-trained on ImageNet
Expand All @@ -375,11 +376,9 @@ def resnext101_32x8d(pretrained: bool = False, progress: bool = True, **kwargs):

@under_review()
def wide_resnet50_2(pretrained: bool = False, progress: bool = True, **kwargs):
r"""Wide ResNet-50-2 model from
`"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_
The model is the same as ResNet except for the bottleneck number of channels
which is twice larger in every block. The number of channels in outer 1x1
convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
r"""Wide ResNet-50-2 model from `"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_ The model is
the same as ResNet except for the bottleneck number of channels which is twice larger in every block. The
number of channels in outer 1x1 convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
channels, and in Wide ResNet-50-2 has 2048-1024-2048.
Args:
Expand All @@ -392,11 +391,9 @@ def wide_resnet50_2(pretrained: bool = False, progress: bool = True, **kwargs):

@under_review()
def wide_resnet101_2(pretrained: bool = False, progress: bool = True, **kwargs):
r"""Wide ResNet-101-2 model from
`"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_
The model is the same as ResNet except for the bottleneck number of channels
which is twice larger in every block. The number of channels in outer 1x1
convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
r"""Wide ResNet-101-2 model from `"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_ The model is
the same as ResNet except for the bottleneck number of channels which is twice larger in every block. The
number of channels in outer 1x1 convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
channels, and in Wide ResNet-50-2 has 2048-1024-2048.
Args:
Expand Down
1 change: 0 additions & 1 deletion pl_bolts/utils/arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,6 @@ def parse_lit_args(self, *args: Any, **kwargs: Any) -> Namespace:

@under_review()
def gather_lit_args(cls: Any, root_cls: Optional[Any] = None) -> List[LitArg]:

if root_cls is None:
if issubclass(cls, LightningModule):
root_cls = LightningModule
Expand Down
2 changes: 0 additions & 2 deletions tests/datamodules/test_datamodules.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@


def test_dev_datasets(datadir):

ds = CIFAR10(data_dir=datadir)
for _ in ds:
pass
Expand Down Expand Up @@ -48,7 +47,6 @@ def _create_synth_Cityscapes_dataset(path_dir):


def test_cityscapes_datamodule(datadir):

_create_synth_Cityscapes_dataset(datadir)

batch_size = 1
Expand Down
3 changes: 0 additions & 3 deletions tests/datasets/test_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

@pytest.mark.parametrize("batch_size,num_samples", [(16, 100), (1, 0)])
def test_dummy_ds(catch_warnings, batch_size, num_samples):

if num_samples > 0:

ds = DummyDataset((1, 28, 28), (1,), num_samples=num_samples)
Expand All @@ -41,7 +40,6 @@ def test_dummy_ds(catch_warnings, batch_size, num_samples):

@pytest.mark.parametrize("batch_size,size,num_samples", [(16, 32, 100), (1, 0, 0)])
def test_rand_dict_ds(catch_warnings, batch_size, size, num_samples):

if num_samples > 0 or size > 0:
ds = RandomDictDataset(size, num_samples=num_samples)
dl = DataLoader(ds, batch_size=batch_size)
Expand Down Expand Up @@ -83,7 +81,6 @@ def test_rand_ds(catch_warnings, batch_size, size, num_samples):

@pytest.mark.parametrize("batch_size,size,num_samples", [(16, 32, 100), (1, 0, 0)])
def test_rand_str_dict_ds(catch_warnings, batch_size, size, num_samples):

if num_samples > 0 and size > 0:
ds = RandomDictStringDataset(size=size, num_samples=100)
dl = DataLoader(ds, batch_size=batch_size)
Expand Down

0 comments on commit 9dd72c4

Please sign in to comment.