From 0abb1e009ce98f527a630f50fee0be37519938ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EC=B5=9C=EC=9D=98=EC=A7=84?= Date: Tue, 27 Sep 2022 16:32:19 +0900 Subject: [PATCH] Debug network_swin2sr --- models/network_swin2sr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/network_swin2sr.py b/models/network_swin2sr.py index a8e2d83..a0672a9 100644 --- a/models/network_swin2sr.py +++ b/models/network_swin2sr.py @@ -148,7 +148,7 @@ def forward(self, x, mask=None): # cosine attention attn = (F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1)) - logit_scale = torch.clamp(self.logit_scale, max=torch.log(torch.tensor(1. / 0.01))).exp() + logit_scale = torch.clamp(self.logit_scale, max=torch.log(torch.tensor(1. / 0.01)).to(self.logit_scale.device)).exp() attn = attn * logit_scale relative_position_bias_table = self.cpb_mlp(self.relative_coords_table).view(-1, self.num_heads)