Add repetition penalty range parameter to transformers (#2916)

This commit is contained in:
oobabooga 2023-06-29 13:40:13 -03:00 committed by GitHub
parent c6cae106e7
commit 3443219cbc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 55 additions and 5 deletions

View file

@ -71,6 +71,7 @@ class ExllamaModel:
self.generator.settings.top_k = state['top_k']
self.generator.settings.typical = state['typical_p']
self.generator.settings.token_repetition_penalty_max = state['repetition_penalty']
self.generator.settings.token_repetition_penalty_sustain = state['repetition_penalty_range']
if state['ban_eos_token']:
self.generator.disallow_tokens([self.tokenizer.eos_token_id])
else: