Skip to content

Commit

Permalink
Draft: add async_llm and async_llm_tool default extensions (#373)
Browse files Browse the repository at this point in the history
* feat: init default_async_llm_extension_python and default_async_llm_tool_extension_python

* feat: manifest for ten_ai_base package

* fix: ten env

* chore: ignore installed sdk

* fix: syntax

* fix: clean
  • Loading branch information
wangyoucao577 authored Oct 31, 2024
1 parent 3fc9b72 commit 928c24f
Show file tree
Hide file tree
Showing 20 changed files with 426 additions and 42 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ clean: clean-agents

clean-agents:
@echo ">> clean agents"
rm -rf agents/bin/worker agents/out agents/interface agents/include agents/lib agents/lib64 agents/ten_packages/system agents/ten_packages/extension_group agents/.release
rm -rf agents/bin/worker agents/out agents/interface agents/include agents/lib agents/lib64 agents/ten_packages/system/ten_runtime* agents/ten_packages/system/agora_rtc_sdk agents/ten_packages/system/azure_speech_sdk agents/ten_packages/system/nlohmann_json agents/.release
@echo ">> done"

docker-build-playground:
Expand Down
5 changes: 4 additions & 1 deletion agents/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@ ten_packages/extension/agora_rtc
ten_packages/extension/azure_tts
ten_packages/extension/agora_sess_ctrl
ten_packages/extension/py_init_extension_cpp
ten_packages/system
ten_packages/system/agora_rtc_sdk
ten_packages/system/azure_speech_sdk
ten_packages/system/nlohmann_json
ten_packages/system/ten_runtime*
.ten
agoradns.dat
agorareport.dat
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# default_async_llm_extension_python

<!-- brief introduction for the extension -->

## Features

<!-- main features introduction -->

- xxx feature

## API

Refer to `api` definition in [manifest.json] and default values in [property.json](property.json).

<!-- Additional API.md can be referred to if extra introduction needed -->

## Development

### Build

<!-- build dependencies and steps -->

### Unit test

<!-- how to do unit test for the extension -->

## Misc

<!-- others if applicable -->
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from . import addon
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from ten import (
Addon,
register_addon_as_extension,
TenEnv,
)
from .extension import DefaultAsyncLLMExtension


@register_addon_as_extension("default_async_llm_extension_python")
class DefaultAsyncLLMExtensionAddon(Addon):
def on_create_instance(self, ten_env: TenEnv, name: str, context) -> None:
ten_env.log_info("on_create_instance")
ten_env.on_create_instance_done(
DefaultAsyncLLMExtension(name), context)
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from ten import AsyncTenEnv
from ten_ai_base import (
AsyncLLMBaseExtension, LLMCallCompletionArgs, LLMDataCompletionArgs, LLMToolMetadata
)


class DefaultAsyncLLMExtension(AsyncLLMBaseExtension):
async def on_start(self, ten_env: AsyncTenEnv) -> None:
await super().on_start(ten_env)

"""Implement this method to construct and start your resources."""
ten_env.log_debug("TODO: on_start")

async def on_stop(self, ten_env: AsyncTenEnv) -> None:
await super().on_stop(ten_env)

"""Implement this method to stop and destruct your resources."""
ten_env.log_debug("TODO: on_stop")

async def on_call_chat_completion(self, ten_env: AsyncTenEnv, **kargs: LLMCallCompletionArgs) -> None:
"""Called when a chat completion is requested by cmd call. Implement this method to process the chat completion."""
ten_env.log_debug("TODO: on_call_chat_completion")

async def on_data_chat_completion(self, ten_env: AsyncTenEnv, **kargs: LLMDataCompletionArgs) -> None:
"""
Called when a chat completion is requested by data input. Implement this method to process the chat completion.
Note that this method is stream-based, and it should consider supporting local context caching.
"""
ten_env.log_debug("TODO: on_data_chat_completion")

async def on_tools_update(self, ten_env: AsyncTenEnv, tool: LLMToolMetadata) -> None:
"""Called when a new tool is registered. Implement this method to process the new tool."""
ten_env.log_debug("TODO: on_tools_update")
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
{
"type": "extension",
"name": "default_async_llm_extension_python",
"version": "0.1.0",
"dependencies": [
{
"type": "system",
"name": "ten_ai_base",
"version": "0.1.0"
}
],
"package": {
"include": [
"manifest.json",
"property.json",
"requirements.txt",
"**.tent",
"**.py",
"README.md"
]
},
"api": {
"property": {},
"cmd_in": [
{
"name": "tool_register",
"property": {
"tool": {
"type": "string"
}
},
"required": [
"tool"
]
},
{
"name": "call_chat_completion",
"property": {
"messages": {
"type": "string"
},
"stream": {
"type": "bool"
}
},
"required": [
"messages"
],
"result": {
"property": {
"text": {
"type": "string"
}
},
"required": [
"text"
]
}
},
{
"name": "flush"
}
],
"cmd_out": [
{
"name": "flush"
}
],
"data_in": [
{
"name": "text_data",
"property": {
"text": {
"type": "string"
},
"is_final": {
"type": "bool"
}
},
"required": [
"text"
]
}
],
"data_out": [
{
"name": "text_data",
"property": {
"text": {
"type": "string"
},
"end_of_segment": {
"type": "bool"
}
},
"required": [
"text",
"end_of_segment"
]
}
]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{}
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# default_async_llm_tool_extension_python

<!-- brief introduction for the extension -->

## Features

<!-- main features introduction -->

- xxx feature

## API

Refer to `api` definition in [manifest.json] and default values in [property.json](property.json).

<!-- Additional API.md can be referred to if extra introduction needed -->

## Development

### Build

<!-- build dependencies and steps -->

### Unit test

<!-- how to do unit test for the extension -->

## Misc

<!-- others if applicable -->
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from . import addon
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from ten import (
Addon,
register_addon_as_extension,
TenEnv,
)
from .extension import DefaultAsyncLLMToolExtension


@register_addon_as_extension("default_async_llm_tool_extension_python")
class DefaultAsyncLLMToolExtensionAddon(Addon):

def on_create_instance(self, ten_env: TenEnv, name: str, context) -> None:
ten_env.log_info("on_create_instance")
ten_env.on_create_instance_done(
DefaultAsyncLLMToolExtension(name), context)
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#
# This file is part of TEN Framework, an open source project.
# Licensed under the Apache License, Version 2.0.
# See the LICENSE file for more information.
#
from ten import (
TenEnv,
AsyncTenEnv,
)
from ten_ai_base import AsyncLLMToolBaseExtension, LLMToolMetadata, LLMToolResult


class DefaultAsyncLLMToolExtension(AsyncLLMToolBaseExtension):
async def on_start(self, ten_env: AsyncTenEnv) -> None:
await super().on_start(ten_env)

"""Implement this method to construct and start your resources."""
ten_env.log_debug("TODO: on_start")

async def on_stop(self, ten_env: AsyncTenEnv) -> None:
await super().on_stop(ten_env)

"""Implement this method to stop and destruct your resources."""
ten_env.log_debug("TODO: on_stop")

def get_tool_metadata(self, ten_env: TenEnv) -> list[LLMToolMetadata]:
ten_env.log_debug("TODO: get_tool_metadata")

async def run_tool(self, ten_env: AsyncTenEnv, name: str, args: dict) -> LLMToolResult:
ten_env.log_debug(f"TODO: run_tool {name} {args}")
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
{
"type": "extension",
"name": "default_async_llm_tool_extension_python",
"version": "0.1.0",
"dependencies": [
{
"type": "system",
"name": "ten_ai_base",
"version": "0.1.0"
}
],
"package": {
"include": [
"manifest.json",
"property.json",
"requirements.txt",
"**.tent",
"**.py",
"README.md"
]
},
"api": {
"property": {},
"cmd_in": [
{
"name": "tool_call",
"property": {
"name": {
"type": "string"
},
"arguments": {
"type": "string"
}
},
"required": [
"name",
"arguments"
],
"result": {
"property": {
"tool_result": {
"type": "string"
}
},
"required": [
"tool_result"
]
}
}
],
"cmd_out": [
{
"name": "tool_register",
"property": {
"tool": {
"type": "string"
}
},
"required": [
"tool"
]
}
]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{}
Empty file.
Loading

0 comments on commit 928c24f

Please sign in to comment.