From e46f770534a4d05498ef4486143c80d6103a33df Mon Sep 17 00:00:00 2001 From: takatost Date: Fri, 17 Nov 2023 13:43:27 +0800 Subject: [PATCH] feat: supports for new version of openllm --- api/core/third_party/langchain/llms/openllm.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/api/core/third_party/langchain/llms/openllm.py b/api/core/third_party/langchain/llms/openllm.py index 6151fe3f1c2231..d83f54da6e4158 100644 --- a/api/core/third_party/langchain/llms/openllm.py +++ b/api/core/third_party/langchain/llms/openllm.py @@ -51,7 +51,8 @@ def _call( ) -> str: params = { "prompt": prompt, - "llm_config": self.llm_kwargs + "llm_config": self.llm_kwargs, + "stop": stop, } headers = {"Content-Type": "application/json"} @@ -65,11 +66,11 @@ def _call( raise ValueError(f"OpenLLM HTTP {response.status_code} error: {response.text}") json_response = response.json() - completion = json_response["responses"][0] + completion = json_response["outputs"][0]['text'] completion = completion.lstrip(prompt) - if stop is not None: - completion = enforce_stop_tokens(completion, stop) + # if stop is not None: + # completion = enforce_stop_tokens(completion, stop) return completion