[Fix] Better error message for batched prompts (#342)

This commit is contained in:
Zhuohan Li 2023-07-03 09:27:31 -07:00 committed by GitHub
parent 0bd2a573a5
commit 0ffded812a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -358,7 +358,13 @@ async def create_completion(raw_request: Request):
model_name = request.model
request_id = f"cmpl-{random_uuid()}"
if isinstance(request.prompt, list):
assert len(request.prompt) == 1
if len(request.prompt) == 0:
return create_error_response(HTTPStatus.BAD_REQUEST,
"please provide at least one prompt")
if len(request.prompt) > 1:
return create_error_response(HTTPStatus.BAD_REQUEST,
"multiple prompts in a batch is not "
"currently supported")
prompt = request.prompt[0]
else:
prompt = request.prompt