Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Refactor import statements and enhance error logging #4071

Merged
merged 22 commits into from
Oct 9, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
34f0102
Refactor import paths for `get_patched_openai_client` in astra_assist…
ogabrielluiz Oct 8, 2024
5a78ec6
Enhance error logging with file information in directory_reader.py
ogabrielluiz Oct 8, 2024
0647d00
Refactor MetaphorToolkit to use new input/output structure and update…
ogabrielluiz Oct 8, 2024
59eac0d
Enhance error message with code snippet preview in class validation f…
ogabrielluiz Oct 8, 2024
e5da4b9
update import statements and refactoring input handling in JSON files.
ogabrielluiz Oct 8, 2024
1ed5966
Merge branch 'main' into fix/startup-errors
ogabrielluiz Oct 9, 2024
2897cbb
[autofix.ci] apply automated fixes
autofix-ci[bot] Oct 9, 2024
f9d3b00
Remove unused import of 'Tool' from Metaphor.py
ogabrielluiz Oct 9, 2024
233f1e7
Merge branch 'main' into fix/startup-errors
ogabrielluiz Oct 9, 2024
c425cbb
[autofix.ci] apply automated fixes
autofix-ci[bot] Oct 9, 2024
7584a9a
Merge branch 'main' into fix/startup-errors
github-actions[bot] Oct 9, 2024
65438e4
Merge branch 'main' into fix/startup-errors
github-actions[bot] Oct 9, 2024
0c8fbb2
Merge branch 'main' into fix/startup-errors
github-actions[bot] Oct 9, 2024
49983f1
Merge branch 'main' into fix/startup-errors
github-actions[bot] Oct 9, 2024
602c606
Merge branch 'main' into fix/startup-errors
github-actions[bot] Oct 9, 2024
4fddb3f
Merge branch 'main' into fix/startup-errors
github-actions[bot] Oct 9, 2024
95f38f8
Merge branch 'main' into fix/startup-errors
github-actions[bot] Oct 9, 2024
56491b2
Handle 'code' field separately to avoid evaluation errors on startup
ogabrielluiz Oct 9, 2024
59b3f49
Refactor exception class placement for SpiderTool and AssistantsRun
ogabrielluiz Oct 9, 2024
4a539c4
Remove debug statement in custom_component module
ogabrielluiz Oct 9, 2024
f324d7c
Simplify error logging in directory_reader.py by removing exception d…
ogabrielluiz Oct 9, 2024
16dfb2f
update starter projects
ogabrielluiz Oct 9, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from astra_assistants.astra_assistants_manager import AssistantManager

from langflow.components.astra_assistants.util import (
from langflow.base.astra_assistants.util import (
get_patched_openai_client,
litellm_model_names,
tool_names,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from langflow.components.astra_assistants.util import get_patched_openai_client
from langflow.base.astra_assistants.util import get_patched_openai_client
from langflow.custom.custom_component.component_with_cache import ComponentWithCache
from langflow.inputs import MultilineInput, StrInput
from langflow.schema.message import Message
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from langflow.components.astra_assistants.util import get_patched_openai_client
from langflow.base.astra_assistants.util import get_patched_openai_client
from langflow.custom.custom_component.component_with_cache import ComponentWithCache
from langflow.inputs import MultilineInput
from langflow.schema.message import Message
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from langflow.components.astra_assistants.util import get_patched_openai_client
from langflow.base.astra_assistants.util import get_patched_openai_client
from langflow.custom.custom_component.component_with_cache import ComponentWithCache
from langflow.inputs import MultilineInput, StrInput
from langflow.schema.message import Message
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from langflow.components.astra_assistants.util import get_patched_openai_client
from langflow.base.astra_assistants.util import get_patched_openai_client
from langflow.custom.custom_component.component_with_cache import ComponentWithCache
from langflow.schema.message import Message
from langflow.template.field.base import Output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from openai import OpenAI
from openai.lib.streaming import AssistantEventHandler

from langflow.components.astra_assistants.util import get_patched_openai_client
from langflow.base.astra_assistants.util import get_patched_openai_client
from langflow.custom.custom_component.component_with_cache import ComponentWithCache
from langflow.inputs import MultilineInput
from langflow.schema import dotdict
Expand Down
65 changes: 40 additions & 25 deletions src/backend/base/langflow/components/toolkits/Metaphor.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,51 @@
from langchain_community.agent_toolkits.base import BaseToolkit
from langchain_core.tools import Tool, tool
from langchain_core.tools import tool
from metaphor_python import Metaphor # type: ignore

from langflow.custom import CustomComponent
from langflow.custom import Component
from langflow.field_typing import Tool
from langflow.io import BoolInput, IntInput, Output, SecretStrInput


class MetaphorToolkit(CustomComponent):
display_name: str = "Metaphor"
description: str = "Metaphor Toolkit"
class MetaphorToolkit(Component):
display_name = "Metaphor"
description = "Metaphor Toolkit for search and content retrieval"
documentation = "https://python.langchain.com/docs/integrations/tools/metaphor_search"
beta: bool = True
name = "Metaphor"
# api key should be password = True
field_config = {
"metaphor_api_key": {"display_name": "Metaphor API Key", "password": True},
"code": {"advanced": True},
}

def build(
self,
metaphor_api_key: str,
use_autoprompt: bool = True,
search_num_results: int = 5,
similar_num_results: int = 5,
) -> Tool | BaseToolkit:
# If documents, then we need to create a Vectara instance using .from_documents
client = Metaphor(api_key=metaphor_api_key)
beta = True

inputs = [
SecretStrInput(
name="metaphor_api_key",
display_name="Metaphor API Key",
password=True,
),
BoolInput(
name="use_autoprompt",
display_name="Use Autoprompt",
value=True,
),
IntInput(
name="search_num_results",
display_name="Search Number of Results",
value=5,
),
IntInput(
name="similar_num_results",
display_name="Similar Number of Results",
value=5,
),
]

outputs = [
Output(name="tools", display_name="Tools", method="build_toolkit"),
]

def build_toolkit(self) -> Tool:
client = Metaphor(api_key=self.metaphor_api_key)

@tool
def search(query: str):
"""Call search engine with a query."""
return client.search(query, use_autoprompt=use_autoprompt, num_results=search_num_results)
return client.search(query, use_autoprompt=self.use_autoprompt, num_results=self.search_num_results)

@tool
def get_contents(ids: list[str]):
Expand All @@ -46,6 +61,6 @@ def find_similar(url: str):

The url passed in should be a URL returned from `search`
"""
return client.find_similar(url, num_results=similar_num_results)
return client.find_similar(url, num_results=self.similar_num_results)

return [search, get_contents, find_similar] # type: ignore
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,9 @@ def filter_loaded_components(self, data: dict, with_errors: bool) -> dict:
component_tuple = (*build_component(component), component)
components.append(component_tuple)
except Exception as e:
logger.debug(f"Error while loading component { component['name']}")
logger.debug(
f"Error while loading component '{ component['name']}' from file '{component['file']}'"
)
logger.debug(e)
continue
items.append({"name": menu["name"], "path": menu["path"], "components": components})
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -727,7 +727,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key) if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return None\n"
"value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key) if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return None\n"
},
"input_value": {
"advanced": false,
Expand Down
Loading
Loading