Fix logprobs tokens in OpenAI API (#5339)
This commit is contained in:
parent
b5cabb6e9d
commit
db1da9f98d
2 changed files with 4 additions and 4 deletions
|
@ -268,8 +268,8 @@ def apply_stopping_strings(reply, all_stop_strings):
|
|||
return reply, stop_found
|
||||
|
||||
|
||||
def get_reply_from_output_ids(output_ids, state, starting_from=0):
|
||||
reply = decode(output_ids[starting_from:], state['skip_special_tokens'])
|
||||
def get_reply_from_output_ids(output_ids, state=None, starting_from=0):
|
||||
reply = decode(output_ids[starting_from:], state['skip_special_tokens'] if state else True)
|
||||
|
||||
# Handle tokenizers that do not add the leading space for the first token
|
||||
if (hasattr(shared.tokenizer, 'convert_ids_to_tokens') and len(output_ids) > starting_from) and not reply.startswith(' '):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue