Allow custom tokenizer for llamacpp_HF loader (#3941)
This commit is contained in:
parent
75646df268
commit
fdcee0c215
2 changed files with 4 additions and 3 deletions
|
@ -249,9 +249,10 @@ def llamacpp_loader(model_name):
|
|||
def llamacpp_HF_loader(model_name):
|
||||
from modules.llamacpp_hf import LlamacppHF
|
||||
|
||||
for fname in ["oobabooga_llama-tokenizer", "llama-tokenizer"]:
|
||||
for fname in [model_name, "oobabooga_llama-tokenizer", "llama-tokenizer"]:
|
||||
path = Path(f'{shared.args.model_dir}/{fname}')
|
||||
if path.exists():
|
||||
if all((path / file).exists() for file in ['tokenizer_config.json', 'special_tokens_map.json', 'tokenizer.model']):
|
||||
logger.info(f'Using tokenizer from: {path}')
|
||||
break
|
||||
else:
|
||||
logger.error("Could not load the model because a tokenizer in transformers format was not found. Please download oobabooga/llama-tokenizer.")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue