Skip to content

Commit

Permalink
fix: fix circular imports and breaks
Browse files Browse the repository at this point in the history
  • Loading branch information
plutoless committed Dec 23, 2024
1 parent 3944cf1 commit 69a3b44
Show file tree
Hide file tree
Showing 14 changed files with 55 additions and 42 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
)
from ten.async_ten_env import AsyncTenEnv
from ten_ai_base.helper import get_properties_string
from ten_ai_base.llm_tool import AsyncLLMToolBaseExtension
from ten_ai_base import AsyncLLMToolBaseExtension
from ten_ai_base.types import LLMToolMetadata, LLMToolMetadataParameter, LLMToolResult
from .log import logger

Expand Down
11 changes: 5 additions & 6 deletions agents/ten_packages/extension/coze_python_async/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,13 @@
Data,
)

from ten_ai_base import BaseConfig, ChatMemory
from ten_ai_base.llm import (
from ten_ai_base.config import BaseConfig
from ten_ai_base.chat_memory import ChatMemory
from ten_ai_base import (
AsyncLLMBaseExtension,
LLMCallCompletionArgs,
LLMDataCompletionArgs,
LLMToolMetadata,
)
from ten_ai_base.types import LLMChatCompletionUserMessageParam

from ten_ai_base.types import LLMChatCompletionUserMessageParam, LLMCallCompletionArgs, LLMDataCompletionArgs, LLMToolMetadata,

CMD_IN_FLUSH = "flush"
CMD_IN_ON_USER_JOINED = "on_user_joined"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
)
from dataclasses import dataclass

from ten_ai_base import BaseConfig
from ten_ai_base.config import BaseConfig

DATA_OUT_TEXT_DATA_PROPERTY_TEXT = "text"
DATA_OUT_TEXT_DATA_PROPERTY_IS_FINAL = "is_final"
Expand Down
7 changes: 3 additions & 4 deletions agents/ten_packages/extension/dify_python/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,11 @@

import aiohttp
from ten import AsyncTenEnv, AudioFrame, Cmd, CmdResult, Data, StatusCode, VideoFrame
from ten_ai_base import BaseConfig
from ten_ai_base.llm import (
from ten_ai_base.config import BaseConfig
from ten_ai_base import (
AsyncLLMBaseExtension,
LLMDataCompletionArgs,
)
from ten_ai_base.types import LLMChatCompletionUserMessageParam
from ten_ai_base.types import LLMChatCompletionUserMessageParam, LLMDataCompletionArgs

CMD_IN_FLUSH = "flush"
CMD_IN_ON_USER_JOINED = "on_user_joined"
Expand Down
6 changes: 3 additions & 3 deletions agents/ten_packages/extension/gemini_v2v_python/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@
from ten_ai_base.const import CMD_PROPERTY_RESULT, CMD_TOOL_CALL
from ten_ai_base.llm import AsyncLLMBaseExtension
from dataclasses import dataclass
from ten_ai_base import (
BaseConfig,
ChatMemory,
from ten_ai_base.config import BaseConfig
from ten_ai_base.chat_memory import ChatMemory
from ten_ai_base.usage import (
LLMUsage,
LLMCompletionTokensDetails,
LLMPromptTokensDetails,
Expand Down
15 changes: 10 additions & 5 deletions agents/ten_packages/extension/glue_python_async/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,21 +25,26 @@
Data,
)

from ten_ai_base import (
BaseConfig,
from ten_ai_base.config import BaseConfig
from ten_ai_base.chat_memory import (
ChatMemory,
EVENT_MEMORY_APPENDED,
)
from ten_ai_base.usage import (
LLMUsage,
LLMCompletionTokensDetails,
LLMPromptTokensDetails,
EVENT_MEMORY_APPENDED,
)
from ten_ai_base.llm import (
from ten_ai_base import (
AsyncLLMBaseExtension,
)
from ten_ai_base.types import (
LLMChatCompletionUserMessageParam,
LLMToolResult,
LLMCallCompletionArgs,
LLMDataCompletionArgs,
LLMToolMetadata,
)
from ten_ai_base.types import LLMChatCompletionUserMessageParam, LLMToolResult

CMD_IN_FLUSH = "flush"
CMD_IN_ON_USER_JOINED = "on_user_joined"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def send_flush_cmd(self, ten: TenEnv) -> None:
flush_cmd = Cmd.create(CMD_NAME_FLUSH)
ten.send_cmd(
flush_cmd,
lambda ten, result: ten.log_info("send_cmd done"),
lambda ten, result, _: ten.log_info("send_cmd done"),
)

ten.log_info(f"sent cmd: {CMD_NAME_FLUSH}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
get_property_bool,
get_property_string,
)
from ten_ai_base.llm import AsyncLLMBaseExtension
from ten_ai_base import AsyncLLMBaseExtension
from ten_ai_base.types import (
LLMCallCompletionArgs,
LLMChatCompletionContentPartParam,
Expand Down
13 changes: 8 additions & 5 deletions agents/ten_packages/extension/openai_v2v_python/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,15 @@
)
from ten.audio_frame import AudioFrameDataFmt
from ten_ai_base.const import CMD_PROPERTY_RESULT, CMD_TOOL_CALL
from ten_ai_base.llm import AsyncLLMBaseExtension
from ten_ai_base import AsyncLLMBaseExtension
from dataclasses import dataclass
from ten_ai_base import (
BaseConfig,
from ten_ai_base.config import BaseConfig
from ten_ai_base.chat_memory import (
ChatMemory,
EVENT_MEMORY_EXPIRED,
EVENT_MEMORY_APPENDED,
)
from ten_ai_base.usage import (
LLMUsage,
LLMCompletionTokensDetails,
LLMPromptTokensDetails,
Expand Down Expand Up @@ -350,7 +352,8 @@ def get_time_ms() -> int:
f"On response done {msg_resp_id} {status} {message.response.usage}"
)
if message.response.usage:
await self._update_usage(message.response.usage)
pass
# await self._update_usage(message.response.usage)
case ResponseAudioTranscriptDelta():
self.ten_env.log_info(
f"On response transcript delta {message.response_id} {message.output_index} {message.content_index} {message.delta}"
Expand Down Expand Up @@ -586,7 +589,7 @@ def tool_dict(tool: LLMToolMetadata):
async def on_tools_update(self, _: AsyncTenEnv, tool: LLMToolMetadata) -> None:
"""Called when a new tool is registered. Implement this method to process the new tool."""
self.ten_env.log_info(f"on tools update {tool}")
await self._update_session()
# await self._update_session()

def _replace(self, prompt: str) -> str:
result = prompt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from base64 import b64encode

from ten_ai_base.const import CMD_CHAT_COMPLETION_CALL
from ten_ai_base.llm_tool import AsyncLLMToolBaseExtension
from ten_ai_base import AsyncLLMToolBaseExtension
from ten_ai_base.types import (
LLMChatCompletionUserMessageParam,
LLMToolMetadata,
Expand Down
7 changes: 2 additions & 5 deletions agents/ten_packages/extension/vision_tool_python/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,8 @@
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from ten_ai_base.llm_tool import (
AsyncLLMToolBaseExtension,
LLMToolMetadata,
LLMToolResult,
)
from ten_ai_base import AsyncLLMToolBaseExtension
from ten_ai_base.types import LLMToolMetadata, LLMToolResult
from ten import (
AudioFrame,
VideoFrame,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@
from ten import Cmd

from ten.async_ten_env import AsyncTenEnv
from ten_ai_base import BaseConfig
from ten_ai_base.llm_tool import AsyncLLMToolBaseExtension
from ten_ai_base.config import BaseConfig
from ten_ai_base import AsyncLLMToolBaseExtension
from ten_ai_base.types import LLMToolMetadata, LLMToolMetadataParameter, LLMToolResult

CMD_TOOL_REGISTER = "tool_register"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,19 @@
# See the LICENSE file for more information.
#

from .types import LLMCallCompletionArgs, LLMDataCompletionArgs, LLMToolMetadata, LLMToolResult, LLMChatCompletionMessageParam
from .types import (
LLMCallCompletionArgs,
LLMDataCompletionArgs,
LLMToolMetadata,
LLMToolResult,
LLMChatCompletionMessageParam,
)
from .usage import LLMUsage, LLMCompletionTokensDetails, LLMPromptTokensDetails
from .llm import AsyncLLMBaseExtension
from .llm_tool import AsyncLLMToolBaseExtension
from .chat_memory import ChatMemory, EVENT_MEMORY_APPENDED, EVENT_MEMORY_EXPIRED
from .helper import AsyncQueue, AsyncEventEmitter
from .config import BaseConfig
from .llm import AsyncLLMBaseExtension
from .llm_tool import AsyncLLMToolBaseExtension

# Specify what should be imported when a user imports * from the
# ten_ai_base package.
Expand All @@ -26,4 +32,9 @@
"AsyncEventEmitter",
"BaseConfig",
"LLMChatCompletionMessageParam",
"LLMUsage",
"LLMCompletionTokensDetails",
"LLMPromptTokensDetails",
"EVENT_MEMORY_APPENDED",
"EVENT_MEMORY_EXPIRED",
]
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
#
from abc import ABC, abstractmethod
import asyncio
import traceback

from ten import (
AsyncExtension,
Expand Down Expand Up @@ -74,17 +75,15 @@ async def on_cmd(self, async_ten_env: AsyncTenEnv, cmd: Cmd) -> None:
async_ten_env.log_debug(f"on_cmd name {cmd_name}")
if cmd_name == CMD_TOOL_REGISTER:
try:
tool_metadata_json = json.loads(
cmd.get_property_to_json(CMD_PROPERTY_TOOL)
)
tool_metadata_json = cmd.get_property_to_json(CMD_PROPERTY_TOOL)
async_ten_env.log_info(f"register tool: {tool_metadata_json}")
tool_metadata = LLMToolMetadata.model_validate_json(tool_metadata_json)
async with self.available_tools_lock:
self.available_tools.append(tool_metadata)
await self.on_tools_update(async_ten_env, tool_metadata)
await async_ten_env.return_result(CmdResult.create(StatusCode.OK), cmd)
except Exception as err:
async_ten_env.log_warn(f"on_cmd failed: {err}")
async_ten_env.log_warn(f"on_cmd failed: {traceback.format_exc()}")
await async_ten_env.return_result(
CmdResult.create(StatusCode.ERROR), cmd
)
Expand Down

0 comments on commit 69a3b44

Please sign in to comment.