Add LLaMA support

This commit is contained in:
oobabooga 2023-03-03 14:39:14 -03:00
parent 2bff646130
commit ea5c5eb3da
4 changed files with 110 additions and 2 deletions

View file

@ -6,6 +6,7 @@ model_name = ""
soft_prompt_tensor = None
soft_prompt = False
is_RWKV = False
is_LLaMA = False
# Chat variables
history = {'internal': [], 'visible': []}
@ -42,6 +43,7 @@ settings = {
'default': 'NovelAI-Sphinx Moth',
'pygmalion-*': 'Pygmalion',
'RWKV-*': 'Naive',
'llama-*': 'Naive',
'(rosey|chip|joi)_.*_instruct.*': 'Instruct Joi (Contrastive Search)'
},
'prompts': {