Make llama.cpp read prompt size and seed from settings (#2299)

This commit is contained in:
DGdev91 2023-05-25 15:29:31 +02:00 committed by GitHub
parent ee674afa50
commit cf088566f8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 9 additions and 3 deletions

View file

@ -39,8 +39,8 @@ class LlamaCppModel:
params = {
'model_path': str(path),
'n_ctx': 2048,
'seed': 0,
'n_ctx': shared.args.n_ctx,
'seed': int(shared.args.llama_cpp_seed),
'n_threads': shared.args.threads or None,
'n_batch': shared.args.n_batch,
'use_mmap': not shared.args.no_mmap,