diff --git a/libs/community/langchain_community/agents/openai_assistant/base.py b/libs/community/langchain_community/agents/openai_assistant/base.py index d2b1b7c02ace38..9fe5085e9b2956 100644 --- a/libs/community/langchain_community/agents/openai_assistant/base.py +++ b/libs/community/langchain_community/agents/openai_assistant/base.py @@ -28,6 +28,15 @@ def _get_openai_client() -> openai.OpenAI: + """Get the OpenAI client. + + Returns: + openai.OpenAI: OpenAI client + + Raises: + ImportError: If `openai` is not installed. + AttributeError: If the installed `openai` version is not compatible. + """ try: import openai @@ -44,6 +53,15 @@ def _get_openai_client() -> openai.OpenAI: def _get_openai_async_client() -> openai.AsyncOpenAI: + """Get the async OpenAI client. + + Returns: + openai.AsyncOpenAI: Async OpenAI client + + Raises: + ImportError: If `openai` is not installed. + AttributeError: If the installed `openai` version is not compatible. + """ try: import openai @@ -60,14 +78,14 @@ def _get_openai_async_client() -> openai.AsyncOpenAI: def _convert_file_ids_into_attachments(file_ids: list) -> list: - """ - Convert file_ids into attachments + """Convert file_ids into attachments File search and Code interpreter will be turned on by default. Args: file_ids (list): List of file_ids that need to be converted into attachments. + Returns: - A list of attachments that are converted from file_ids. + list: List of attachments converted from file_ids. """ attachments = [] for id in file_ids: @@ -83,14 +101,15 @@ def _convert_file_ids_into_attachments(file_ids: list) -> list: def _is_assistants_builtin_tool( tool: Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool], ) -> bool: - """ - Determine if tool corresponds to OpenAI Assistants built-in. + """Determine if tool corresponds to OpenAI Assistants built-in. Args: - tool : Tool that needs to be determined + tool (Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]): + Tool that needs to be determined. + Returns: A boolean response of true or false indicating if the tool corresponds to - OpenAI Assistants built-in. + OpenAI Assistants built-in. """ assistants_builtin_tools = ("code_interpreter", "retrieval", "file_search") return ( @@ -109,10 +128,11 @@ def _get_assistants_tool( such as "code_interpreter" and "retrieval." Args: - tool: Tools or functions that need to be converted to OpenAI tools. - Returns: - A dictionary of tools that are converted into OpenAI tools. + tool (Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]): + Tools or functions that need to be converted to OpenAI tools. + Returns: + Dict[str, Any]: A dictionary of tools that are converted into OpenAI tools. """ if _is_assistants_builtin_tool(tool): return tool # type: ignore @@ -124,18 +144,25 @@ def _get_assistants_tool( class OpenAIAssistantV2Runnable(OpenAIAssistantRunnable): """Run an OpenAI Assistant. + Attributes: + client (Any): OpenAI or AzureOpenAI client. + async_client (Any): Async OpenAI or AzureOpenAI client. + assistant_id (str): OpenAI assistant ID. + check_every_ms (float): Frequency to check progress in milliseconds. + as_agent (bool): Whether to use the assistant as a LangChain agent. + Example using OpenAI tools: .. code-block:: python from langchain.agents.openai_assistant import OpenAIAssistantV2Runnable - interpreter_assistant = OpenAIAssistantV2Runnable.create_assistant( - name="langchain assistant", + assistant = OpenAIAssistantV2Runnable.create_assistant( + name="math assistant", instructions="You are a personal math tutor. Write and run code to answer math questions.", tools=[{"type": "code_interpreter"}], model="gpt-4-1106-preview" ) - output = interpreter_assistant.invoke({"content": "What's 10 - 4 raised to the 2.7"}) + output = assistant.invoke({"content": "What's 10 - 4 raised to the 2.7"}) Example using custom tools and AgentExecutor: .. code-block:: python @@ -155,8 +182,7 @@ class OpenAIAssistantV2Runnable(OpenAIAssistantRunnable): ) agent_executor = AgentExecutor(agent=agent, tools=tools) - agent_executor.invoke({"content": "What's 10 - 4 raised to the 2.7"}) - + agent_executor.invoke({"content": "Analyze the data..."}) Example using custom tools and custom execution: .. code-block:: python @@ -206,12 +232,13 @@ def execute_agent(agent, tools, input): assistant_id: str """OpenAI assistant id.""" check_every_ms: float = 1_000.0 - """Frequency with which to check run progress in ms.""" + """Frequency with which to check run progress in milliseconds.""" as_agent: bool = False """Use as a LangChain agent, compatible with the AgentExecutor.""" @model_validator(mode="after") def validate_async_client(self) -> Self: + """Validate that the async client is set, otherwise initialize it.""" if self.async_client is None: import openai @@ -234,18 +261,20 @@ def create_assistant( """Create an OpenAI Assistant and instantiate the Runnable. Args: - name: Assistant name. - instructions: Assistant instructions. - tools: Assistant tools. Can be passed in OpenAI format or as BaseTools. - tool_resources: Assistant tool resources. Can be passed in OpenAI format - model: Assistant model to use. - client: OpenAI or AzureOpenAI client. - Will create default OpenAI client (Assistant v2) if not specified. + name (str): Assistant name. + instructions (str): Assistant instructions. + tools (Sequence[Union[BaseTool, dict]]): Assistant tools. Can be passed + in OpenAI format or as BaseTools. + tool_resources (Optional[Union[AssistantToolResources, dict, NotGiven]]): + Assistant tool resources. Can be passed in OpenAI format. + model (str): Assistant model to use. + client (Optional[Union[openai.OpenAI, openai.AzureOpenAI]]): OpenAI or + AzureOpenAI client. Will create default OpenAI client (Assistant v2) + if not specified. Returns: - OpenAIAssistantRunnable configured to run using the created assistant. + OpenAIAssistantRunnable: The configured assistant runnable. """ - client = client or _get_openai_client() if tool_resources is None: from openai._types import NOT_GIVEN @@ -263,10 +292,10 @@ def create_assistant( def invoke( self, input: dict, config: Optional[RunnableConfig] = None, **kwargs: Any ) -> OutputType: - """Invoke assistant. + """Invoke the assistant. Args: - input: Runnable input dict that can have: + input (dict): Runnable input dict that can have: content: User message when starting a new run. thread_id: Existing thread to use. run_id: Existing run to use. Should only be supplied when providing @@ -282,10 +311,10 @@ def invoke( tools: Override Assistant tools for this run. tool_resources: Override Assistant tool resources for this run (v2 API). run_metadata: Metadata to associate with new run. - config: Runnable config: + config (Optional[RunnableConfig]): Configuration for the run. - Return: - If self.as_agent, will return + Returns: + OutputType: If self.as_agent, will return Union[List[OpenAIAssistantAction], OpenAIAssistantFinish]. Otherwise, will return OpenAI types Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]]. @@ -293,7 +322,6 @@ def invoke( Raises: BaseException: If an error occurs during the invocation. """ - config = ensure_config(config) callback_manager = CallbackManager.configure( inheritable_callbacks=config.get("callbacks"), @@ -372,16 +400,18 @@ async def acreate_assistant( """Create an AsyncOpenAI Assistant and instantiate the Runnable. Args: - name: Assistant name. - instructions: Assistant instructions. - tools: Assistant tools. Can be passed in OpenAI format or as BaseTools. - tool_resources: Assistant tool resources. Can be passed in OpenAI format - model: Assistant model to use. - async_client: AsyncOpenAI client. - Will create default async_client if not specified. + name (str): Assistant name. + instructions (str): Assistant instructions. + tools (Sequence[Union[BaseTool, dict]]): Assistant tools. Can be passed + in OpenAI format or as BaseTools. + tool_resources (Optional[Union[AssistantToolResources, dict, NotGiven]]): + Assistant tool resources. Can be passed in OpenAI format. + model (str): Assistant model to use. + async_client (Optional[Union[openai.OpenAI, openai.AzureOpenAI]]): OpenAI or + AzureOpenAI async client. Will create default async_client if not specified. Returns: - AsyncOpenAIAssistantRunnable configured to run using the created assistant. + AsyncOpenAIAssistantRunnable: The configured assistant runnable. """ async_client = async_client or _get_openai_async_client() if tool_resources is None: @@ -405,7 +435,7 @@ async def ainvoke( """Async invoke assistant. Args: - input: Runnable input dict that can have: + input (dict): Runnable input dict that can have: content: User message when starting a new run. thread_id: Existing thread to use. run_id: Existing run to use. Should only be supplied when providing @@ -421,15 +451,17 @@ async def ainvoke( tools: Override Assistant tools for this run. tool_resources: Override Assistant tool resources for this run (v2 API). run_metadata: Metadata to associate with new run. - config: Runnable config: + config (Optional[RunnableConfig]): Configuration for the run. - Return: - If self.as_agent, will return + Returns: + OutputType: If self.as_agent, will return Union[List[OpenAIAssistantAction], OpenAIAssistantFinish]. Otherwise, will return OpenAI types Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]]. - """ + Raises: + BaseException: If an error occurs during the invocation. + """ config = config or {} callback_manager = CallbackManager.configure( inheritable_callbacks=config.get("callbacks"), @@ -496,6 +528,14 @@ async def ainvoke( return response def _create_run(self, input: dict) -> Any: + """Create a new run within an existing thread. + + Args: + input (dict): The input data for the new run. + + Returns: + Any: The created run object. + """ params = { k: v for k, v in input.items() @@ -508,6 +548,15 @@ def _create_run(self, input: dict) -> Any: ) def _create_thread_and_run(self, input: dict, thread: dict) -> Any: + """Create a new thread and run. + + Args: + input (dict): The input data for the run. + thread (dict): The thread data to create. + + Returns: + Any: The created thread and run. + """ params = { k: v for k, v in input.items() @@ -523,10 +572,18 @@ def _create_thread_and_run(self, input: dict, thread: dict) -> Any: return run async def _acreate_run(self, input: dict) -> Any: + """Asynchronously create a new run within an existing thread. + + Args: + input (dict): The input data for the new run. + + Returns: + Any: The created run object. + """ params = { k: v for k, v in input.items() - if k in ("instructions", "model", "tools", "tool_resources" "run_metadata") + if k in ("instructions", "model", "tools", "tool_resources", "run_metadata") } return await self.async_client.beta.threads.runs.create( input["thread_id"], @@ -535,6 +592,15 @@ async def _acreate_run(self, input: dict) -> Any: ) async def _acreate_thread_and_run(self, input: dict, thread: dict) -> Any: + """Asynchronously create a new thread and run simultaneously. + + Args: + input (dict): The input data for the run. + thread (dict): The thread data to create. + + Returns: + Any: The created thread and run. + """ params = { k: v for k, v in input.items()