Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Code cleanup #100

Open
wants to merge 2 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions lora_diffusion/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
from .lora import *
from .dataset import *
from .utils import *
from .lora import *
45 changes: 29 additions & 16 deletions lora_diffusion/cli_lora_add.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,25 @@
from typing import Literal, Union, Dict
import os
import shutil
from pathlib import Path
from typing import Literal, Union

import fire
import torch
from diffusers import StableDiffusionPipeline

import torch
from .lora import tune_lora_scale, weight_apply_lora
from .lora import weight_apply_lora
from .to_ckpt_v2 import convert_to_ckpt


def _text_lora_path(path: str) -> str:
def _text_lora_path(path: Union[str, Path]) -> Union[str, Path]:
assert path.endswith(".pt"), "Only .pt files are supported"
return ".".join(path.split(".")[:-1] + ["text_encoder", "pt"])


def add(
path_1: str,
path_2: str,
output_path: str,
path_1: Union[str, Path],
path_2: Union[str, Path],
output_path: Union[str, Path],
alpha: float = 0.5,
mode: Literal[
"lpl",
Expand All @@ -26,14 +28,14 @@ def add(
] = "lpl",
with_text_lora: bool = False,
):
print("Lora Add, mode " + mode)
print(f"Lora Add, mode {mode}")
if mode == "lpl":
for _path_1, _path_2, opt in [(path_1, path_2, "unet")] + (
[(_text_lora_path(path_1), _text_lora_path(path_2), "text_encoder")]
if with_text_lora
else []
):
print("Loading", _path_1, _path_2)
print(f"Loading {_path_1} {_path_2}")
out_list = []
if opt == "text_encoder":
if not os.path.exists(_path_1):
Expand All @@ -50,7 +52,7 @@ def add(
l2pairs = zip(l2[::2], l2[1::2])

for (x1, y1), (x2, y2) in zip(l1pairs, l2pairs):
# print("Merging", x1.shape, y1.shape, x2.shape, y2.shape)
# print(f'Merging {x1.shape} {y1.shape} {x2.shape} {y2.shape}')
x1.data = alpha * x1.data + (1 - alpha) * x2.data
y1.data = alpha * y1.data + (1 - alpha) * y2.data

Expand All @@ -59,11 +61,15 @@ def add(

if opt == "unet":

print("Saving merged UNET to", output_path)
print(f"Saving merged UNET to {output_path}")
torch.save(out_list, output_path)

elif opt == "text_encoder":
print("Saving merged text encoder to", _text_lora_path(output_path))
print(
f"Saving merged text encoder to \
{_text_lora_path(output_path)}"
)

torch.save(
out_list,
_text_lora_path(output_path),
Expand All @@ -75,7 +81,10 @@ def add(
path_1,
).to("cpu")

weight_apply_lora(loaded_pipeline.unet, torch.load(path_2), alpha=alpha)
weight_apply_lora(
loaded_pipeline.unet,
torch.load(path_2),
alpha=alpha)
if with_text_lora:

weight_apply_lora(
Expand All @@ -93,7 +102,10 @@ def add(
path_1,
).to("cpu")

weight_apply_lora(loaded_pipeline.unet, torch.load(path_2), alpha=alpha)
weight_apply_lora(
loaded_pipeline.unet,
torch.load(path_2),
alpha=alpha)
if with_text_lora:
weight_apply_lora(
loaded_pipeline.text_encoder,
Expand All @@ -102,15 +114,16 @@ def add(
target_replace_module=["CLIPAttention"],
)

_tmp_output = output_path + ".tmp"
new_suffix = ".tmp"
_tmp_output = output_path.with_suffix(output_path.suffix + new_suffix)

loaded_pipeline.save_pretrained(_tmp_output)
convert_to_ckpt(_tmp_output, output_path, as_half=True)
# remove the tmp_output folder
shutil.rmtree(_tmp_output)

else:
print("Unknown mode", mode)
print(f"Unknown mode {mode}")
raise ValueError(f"Unknown mode {mode}")


Expand Down
Loading