From bb98c166dbf71c515e88de0d865c3cd1c1e7a679 Mon Sep 17 00:00:00 2001 From: anwai98 Date: Sun, 22 Dec 2024 11:56:00 +0100 Subject: [PATCH] Raise assertion error for missing installation --- micro_sam/models/peft_sam.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/micro_sam/models/peft_sam.py b/micro_sam/models/peft_sam.py index 7b73ff43..d7a98a76 100644 --- a/micro_sam/models/peft_sam.py +++ b/micro_sam/models/peft_sam.py @@ -6,6 +6,11 @@ from segment_anything.modeling import Sam +try: + import bitsandbytes as bnb +except Exception: + bnb = None + class LoRASurgery(nn.Module): """Operates on the attention layers for performing low-rank adaptation. @@ -325,6 +330,7 @@ def __init__( # Whether to quantize the linear layers to 4 bit precision. # NOTE: This is currently supported for CUDA-supported devices only. if quantize: + assert bnb is not None, "Please install 'bitsandbytes'." import bitsandbytes as bnb for name, module in model.image_encoder.named_modules(): if isinstance(module, torch.nn.Linear):