Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/BiaPyX/BiaPy
Browse files Browse the repository at this point in the history
# Conflicts:
#	biapy/data/generators/pair_data_2D_generator.py

Signed-off-by: Ignacio Arganda-Carreras <[email protected]>
  • Loading branch information
iarganda committed Feb 26, 2024
2 parents 11a3f52 + da89c67 commit 37f4bc5
Show file tree
Hide file tree
Showing 5 changed files with 6 additions and 4 deletions.
2 changes: 1 addition & 1 deletion biapy/data/generators/pair_data_2D_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def ensure_shape(self, img, mask):

# Super-resolution check. if random_crops_in_DA is activated the images have not been cropped yet,
# so this check can not be done and it will be done in the random crop
if not self.random_crops_in_DA and self.Y_provided and self.random_crop_scale != 1:
if not self.random_crops_in_DA and self.Y_provided and any([x != 1 for x in self.random_crop_scale]):
s = [img.shape[0]*self.random_crop_scale[0], img.shape[1]*self.random_crop_scale[1]]
if all(x!=y for x,y in zip(s,mask.shape[:-1])):
raise ValueError("Images loaded need to be LR and its HR version. LR shape:"
Expand Down
2 changes: 1 addition & 1 deletion biapy/data/generators/pair_data_3D_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def ensure_shape(self, img, mask):

# Super-resolution check. if random_crops_in_DA is activated the images have not been cropped yet,
# so this check can not be done and it will be done in the random crop
if not self.random_crops_in_DA and self.Y_provided and self.random_crop_scale != 1:
if not self.random_crops_in_DA and self.Y_provided and any([x != 1 for x in self.random_crop_scale]):
s = [img.shape[0]*self.random_crop_scale[0], img.shape[1]*self.random_crop_scale[1], img.shape[2]*self.random_crop_scale[2]]
if all(x!=y for x,y in zip(s,mask.shape[0:-1])):
raise ValueError("Images loaded need to be LR and its HR version. LR shape:"
Expand Down
2 changes: 1 addition & 1 deletion biapy/engine/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def __call__(self, y_pred, y_true):
y_true = (y_true/255).type(torch.long)

if self.num_classes > 2:
return self.jaccard(y_pred, y_true.squeeze())
return self.jaccard(y_pred, y_true.squeeze() if y_true.shape[0] > 1 else y_true.squeeze().unsqueeze(0))
else:
return self.jaccard(y_pred, y_true)

Expand Down
2 changes: 2 additions & 0 deletions biapy/engine/semantic_seg.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,8 @@ def after_merge_patches(self, pred):
pred = (pred>0.5).astype(np.uint8)
if pred.ndim == 4 and self.cfg.PROBLEM.NDIM == '3D':
pred = np.expand_dims(pred,0)
if pred.ndim == 3 and self.cfg.PROBLEM.NDIM == '2D':
pred = np.expand_dims(pred,0)
save_tif(pred, self.cfg.PATHS.RESULT_DIR.PER_IMAGE_BIN, self.processing_filenames, verbose=self.cfg.TEST.VERBOSE)

def after_merge_patches_by_chunks_proccess_patch(self, filename):
Expand Down
2 changes: 1 addition & 1 deletion biapy/models/resunet.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ class ResUNet(nn.Module):
"""
def __init__(self, image_shape=(256, 256, 1), activation="ELU", feature_maps=[32, 64, 128, 256], drop_values=[0.1,0.1,0.1,0.1],
batch_norm=False, k_size=3, upsample_layer="convtranspose", z_down=[2,2,2,2], n_classes=1,
output_channels="BC", upsampling_factor=1, upsampling_position="pre"):
output_channels="BC", upsampling_factor=(), upsampling_position="pre"):
super(ResUNet, self).__init__()

self.depth = len(feature_maps)-1
Expand Down

0 comments on commit 37f4bc5

Please sign in to comment.