From d74a442be2889988f959ae23a4dc19625a75cb15 Mon Sep 17 00:00:00 2001 From: sunshinexcode <24xinhui@163.com> Date: Mon, 26 Aug 2024 10:42:50 +0800 Subject: [PATCH] fix(): fix prompt and attribute settings --- agents/property.json | 6 +++--- agents/property.json.example | 4 ++-- .../extension/gemini_llm_python/gemini_llm.py | 8 ++++---- .../extension/gemini_llm_python/gemini_llm_extension.py | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/agents/property.json b/agents/property.json index bf384a9c..ecefb963 100644 --- a/agents/property.json +++ b/agents/property.json @@ -1841,9 +1841,9 @@ "greeting": "ASTRA agent connected. How can i help you today?", "max_memory_length": 10, "max_output_tokens": 512, - "model": "gemini-1.0-pro-latest", + "model": "gemini-1.5-flash", "prompt": "", - "temperature": 0.1, + "temperature": 1.0, "top_k": 40, "top_p": 0.95 } @@ -2375,4 +2375,4 @@ } ] } -} \ No newline at end of file +} diff --git a/agents/property.json.example b/agents/property.json.example index 8b6cdd86..d6245847 100644 --- a/agents/property.json.example +++ b/agents/property.json.example @@ -1961,9 +1961,9 @@ "greeting": "ASTRA agent connected. How can i help you today?", "max_memory_length": 10, "max_output_tokens": 512, - "model": "gemini-1.0-pro-latest", + "model": "gemini-1.5-flash", "prompt": "", - "temperature": 0.1, + "temperature": 1.0, "top_k": 40, "top_p": 0.95 } diff --git a/agents/ten_packages/extension/gemini_llm_python/gemini_llm.py b/agents/ten_packages/extension/gemini_llm_python/gemini_llm.py index 4b129cea..f11317b5 100644 --- a/agents/ten_packages/extension/gemini_llm_python/gemini_llm.py +++ b/agents/ten_packages/extension/gemini_llm_python/gemini_llm.py @@ -24,9 +24,9 @@ def default_config(cls): return cls( api_key="", max_output_tokens=512, - model="gemini-1.0-pro-latest", + model="gemini-1.5-flash", prompt="You are a voice assistant who talks in a conversational way and can chat with me like my friends. I will speak to you in English or Chinese, and you will answer in the corrected and improved version of my text with the language I use. Don’t talk like a robot, instead I would like you to talk like a real human with emotions. I will use your answer for text-to-speech, so don’t return me any meaningless characters. I want you to be helpful, when I’m asking you for advice, give me precise, practical and useful advice instead of being vague. When giving me a list of options, express the options in a narrative way instead of bullet points.", - temperature=0.1, + temperature=1.0, top_k=40, top_p=0.95, ) @@ -36,12 +36,12 @@ class GeminiLLM: def __init__(self, config: GeminiLLMConfig): self.config = config genai.configure(api_key=self.config.api_key) - self.model = genai.GenerativeModel(self.config.model) + self.model = genai.GenerativeModel(model_name=self.config.model, system_instruction=self.config.prompt) def get_chat_completions_stream(self, messages: List[Dict[str, str]]): try: chat = self.model.start_chat(history=messages[0:-1]) - response = chat.send_message((self.config.prompt, messages[-1].get("parts")), + response = chat.send_message(messages[-1].get("parts"), generation_config=genai.types.GenerationConfig( max_output_tokens=self.config.max_output_tokens, temperature=self.config.temperature, diff --git a/agents/ten_packages/extension/gemini_llm_python/gemini_llm_extension.py b/agents/ten_packages/extension/gemini_llm_python/gemini_llm_extension.py index 58295089..0db28c57 100644 --- a/agents/ten_packages/extension/gemini_llm_python/gemini_llm_extension.py +++ b/agents/ten_packages/extension/gemini_llm_python/gemini_llm_extension.py @@ -59,19 +59,19 @@ def on_start(self, ten: TenEnv) -> None: try: val = ten.get_property_string(key) if val: - gemini_llm_config.key = val + setattr(gemini_llm_config, key, val) except Exception as e: logger.warning(f"get_property_string optional {key} failed, err: {e}") for key in [PROPERTY_TEMPERATURE, PROPERTY_TOP_P]: try: - gemini_llm_config.key = float(ten.get_property_float(key)) + setattr(gemini_llm_config, key, float(ten.get_property_float(key))) except Exception as e: logger.warning(f"get_property_float optional {key} failed, err: {e}") for key in [PROPERTY_MAX_OUTPUT_TOKENS, PROPERTY_TOP_K]: try: - gemini_llm_config.key = int(ten.get_property_int(key)) + setattr(gemini_llm_config, key, int(ten.get_property_int(key))) except Exception as e: logger.warning(f"get_property_int optional {key} failed, err: {e}")