API: better handle temperature = 0
This commit is contained in:
parent
817866c9cf
commit
6247eafcc5
2 changed files with 4 additions and 6 deletions
|
@ -97,9 +97,6 @@ async def openai_completions(request: Request, request_data: CompletionRequest):
|
|||
path = request.url.path
|
||||
is_legacy = "/generate" in path
|
||||
|
||||
if request_data.temperature == 0:
|
||||
request_data.do_sample = False
|
||||
|
||||
if request_data.stream:
|
||||
async def generator():
|
||||
async with streaming_semaphore:
|
||||
|
@ -123,9 +120,6 @@ async def openai_chat_completions(request: Request, request_data: ChatCompletion
|
|||
path = request.url.path
|
||||
is_legacy = "/generate" in path
|
||||
|
||||
if request_data.temperature == 0:
|
||||
request_data.do_sample = False
|
||||
|
||||
if request_data.stream:
|
||||
async def generator():
|
||||
async with streaming_semaphore:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue