From f30f62165d0c9f9ccdc0330b0005c35ffaaa1635 Mon Sep 17 00:00:00 2001 From: Phil Wang Date: Sat, 27 Mar 2021 09:48:37 -0700 Subject: [PATCH] cleanup --- glom_pytorch/glom_pytorch.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/glom_pytorch/glom_pytorch.py b/glom_pytorch/glom_pytorch.py index e337484..8eacf50 100644 --- a/glom_pytorch/glom_pytorch.py +++ b/glom_pytorch/glom_pytorch.py @@ -108,7 +108,7 @@ def __init__( self.attention = ConsensusAttention(num_patches_side, attend_self = consensus_self, local_consensus_radius = local_consensus_radius) def forward(self, img, iters = None, levels = None, return_all = False): - b, h, w, _, device = *img.shape, img.device + b, device = img.shape[0], img.device iters = default(iters, self.levels * 2) # need to have twice the number of levels of iterations in order for information to propagate up and back down. can be overridden tokens = self.image_to_tokens(img) diff --git a/setup.py b/setup.py index 7d9a71c..7c673e0 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name = 'glom-pytorch', packages = find_packages(), - version = '0.0.12', + version = '0.0.14', license='MIT', description = 'Glom - Pytorch', author = 'Phil Wang',