Browse Source

Make custom LoRAs work by default #385

oobabooga 2 năm trước cách đây
mục cha
commit
a78b6508fc
2 tập tin đã thay đổi với 2 bổ sung2 xóa
  1. 1 1
      modules/LoRA.py
  2. 1 1
      modules/shared.py

+ 1 - 1
modules/LoRA.py

@@ -17,6 +17,6 @@ def add_lora_to_model(lora_name):
         print(f"Adding the LoRA {lora_name} to the model...")
 
         params = {}
-        #params['device_map'] = {'': 0}
+        params['device_map'] = {'': 0}
         #params['dtype'] = shared.model.dtype
         shared.model = PeftModel.from_pretrained(shared.model, Path(f"loras/{lora_name}"), **params)

+ 1 - 1
modules/shared.py

@@ -56,7 +56,7 @@ settings = {
     },
     'lora_prompts': {
         'default': 'Common sense questions and answers\n\nQuestion: \nFactual answer:',
-        'alpaca-lora-7b': "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n### Instruction:\nWrite a poem about the transformers Python library. \nMention the word \"large language models\" in that poem.\n### Response:\n"
+        '(alpaca-lora-7b|alpaca-lora-13b|alpaca-lora-30b)': "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n### Instruction:\nWrite a poem about the transformers Python library. \nMention the word \"large language models\" in that poem.\n### Response:\n"
     }
 }