Skip to content

Commit

Permalink
Revert "llama_cpp server: prompt is a string". Closes #187
Browse files Browse the repository at this point in the history
This reverts commit b9098b0.
  • Loading branch information
abetlen committed May 12, 2023
1 parent 684d7c8 commit 8895b90
Showing 1 changed file with 6 additions and 2 deletions.
8 changes: 6 additions & 2 deletions llama_cpp/server/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,9 @@ def get_llama():
)

class CreateCompletionRequest(BaseModel):
prompt: Optional[str] = Field(
default="", description="The prompt to generate completions for."
prompt: Union[str, List[str]] = Field(
default="",
description="The prompt to generate completions for."
)
suffix: Optional[str] = Field(
default=None,
Expand Down Expand Up @@ -222,6 +223,9 @@ class Config:
def create_completion(
request: CreateCompletionRequest, llama: llama_cpp.Llama = Depends(get_llama)
):
if isinstance(request.prompt, list):
request.prompt = "".join(request.prompt)

completion_or_chunks = llama(
**request.dict(
exclude={
Expand Down

0 comments on commit 8895b90

Please sign in to comment.