Skip to content

Commit

Permalink
feat(diffusers): allow multiple lora adapters
Browse files Browse the repository at this point in the history
Signed-off-by: Ettore Di Giacinto <[email protected]>
  • Loading branch information
mudler committed Nov 5, 2024
1 parent 20cd881 commit 973df34
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 1 deletion.
3 changes: 3 additions & 0 deletions backend/backend.proto
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,9 @@ message ModelOptions {
bool NoKVOffload = 57;

string ModelPath = 59;

repeated string LoraAdapters = 60;
repeated float LoraScales = 61;
}

message Result {
Expand Down
16 changes: 15 additions & 1 deletion backend/python/diffusers/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,10 +311,24 @@ def LoadModel(self, request, context):
if request.LoraAdapter:
# Check if its a local file and not a directory ( we load lora differently for a safetensor file )
if os.path.exists(request.LoraAdapter) and not os.path.isdir(request.LoraAdapter):
# self.load_lora_weights(request.LoraAdapter, 1, device, torchType)
self.pipe.load_lora_weights(request.LoraAdapter)
else:
self.pipe.unet.load_attn_procs(request.LoraAdapter)
if len(request.LoraAdapters) > 0:
i = 0
adapters_name = []
adapters_weights = []
for adapter in request.LoraAdapters:
if not os.path.isabs(adapter):
adapter = os.path.join(request.ModelPath, adapter)
self.pipe.load_lora_weights(adapter, adapter_name=f"adapter_{i}")
i += 1
adapters_name.append(f"adapter_{i}")

for adapters_weight in request.LoraScales:
adapters_weights.append(adapters_weight)

self.pipe.set_adapters(adapters_name, adapter_weights=adapters_weights)

if request.CUDA:
self.pipe.to('cuda')
Expand Down

0 comments on commit 973df34

Please sign in to comment.