Remove LoRA tab, move it into the Parameters menu
This commit is contained in:
parent
214dc6868e
commit
29fe7b1c74
3 changed files with 14 additions and 16 deletions
|
@ -10,6 +10,8 @@ def add_lora_to_model(lora_name):
|
|||
|
||||
# Is there a more efficient way of returning to the base model?
|
||||
if lora_name == "None":
|
||||
print(f"Reloading the model to remove the LoRA...")
|
||||
shared.model, shared.tokenizer = load_model(shared.model_name)
|
||||
else:
|
||||
print(f"Adding the LoRA {lora_name} to the model...")
|
||||
shared.model = PeftModel.from_pretrained(shared.model, Path(f"loras/{lora_name}"))
|
||||
|
|
|
@ -56,7 +56,7 @@ settings = {
|
|||
},
|
||||
'lora_prompts': {
|
||||
'default': 'Common sense questions and answers\n\nQuestion: \nFactual answer:',
|
||||
'alpaca-lora-7b': "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n### Instruction:\nWrite a Python script that generates text using the transformers library.\n### Response:\n"
|
||||
'alpaca-lora-7b': "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n### Instruction:\nWrite a poem about the transformers Python library. \nMention the word \"large language models\" in that poem.\n### Response:\n"
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue