We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 104293f commit 0cecfc6Copy full SHA for 0cecfc6
loras/place-your-loras-here.txt
modules/LoRA.py
@@ -0,0 +1,15 @@
1
+from pathlib import Path
2
+
3
+from peft import PeftModel
4
5
+import modules.shared as shared
6
+from modules.models import load_model
7
8
9
+def add_lora_to_model(lora_name):
10
11
+ # Is there a more efficient way of returning to the base model?
12
+ if lora_name == "None":
13
+ shared.model, shared.tokenizer = load_model(shared.model_name)
14
+ else:
15
+ shared.model = PeftModel.from_pretrained(shared.model, Path(f"loras/{lora_name}"))
0 commit comments