diff --git a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py index 039caea245b..7f56874be21 100644 --- a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py @@ -40,7 +40,7 @@ class OpenAIEmbeddingsComponent(LCEmbeddingsModel): ), DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), SecretStrInput(name="openai_api_base", display_name="OpenAI API Base", advanced=True), - SecretStrInput(name="openai_api_key", display_name="OpenAI API Key"), + SecretStrInput(name="openai_api_key", display_name="OpenAI API Key", value="OPENAI_API_KEY"), SecretStrInput(name="openai_api_type", display_name="OpenAI API Type", advanced=True), MessageTextInput(name="openai_api_version", display_name="OpenAI API Version", advanced=True), MessageTextInput( diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json index 163384b2c2c..50f9385acb1 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-pxptT", + "id": "ChatInput-Y6mi1", "name": "message", "output_types": [ "Message" @@ -13,7 +14,7 @@ }, "targetHandle": { "fieldName": "user_input", - "id": "Prompt-1S5SU", + "id": "Prompt-Z4WYI", "inputTypes": [ "Message", "Text" @@ -21,17 +22,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-pxptT{œdataTypeœ:œChatInputœ,œidœ:œChatInput-pxptTœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-1S5SU{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-1S5SUœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-pxptT", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-pxptTœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-1S5SU", - "targetHandle": "{œfieldNameœ: œuser_inputœ, œidœ: œPrompt-1S5SUœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-Y6mi1{œdataTypeœ:œChatInputœ,œidœ:œChatInput-Y6mi1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-Z4WYI{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-Z4WYIœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-Y6mi1", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-Y6mi1œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-Z4WYI", + "targetHandle": "{œfieldNameœ: œuser_inputœ, œidœ: œPrompt-Z4WYIœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-1S5SU", + "id": "Prompt-Z4WYI", "name": "prompt", "output_types": [ "Message" @@ -39,24 +41,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-nJXWj", + "id": "OpenAIModel-26Eve", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-1S5SU{œdataTypeœ:œPromptœ,œidœ:œPrompt-1S5SUœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-nJXWj{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-nJXWjœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-1S5SU", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-1S5SUœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-nJXWj", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-nJXWjœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-Z4WYI{œdataTypeœ:œPromptœ,œidœ:œPrompt-Z4WYIœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-26Eve{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-26Eveœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-Z4WYI", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-Z4WYIœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-26Eve", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-26Eveœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-nJXWj", + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-26Eve", "name": "text_output", "output_types": [ "Message" @@ -64,24 +66,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-XP4bj", + "id": "ChatOutput-cQnVI", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-nJXWj{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-nJXWjœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-XP4bj{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-XP4bjœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-nJXWj", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-nJXWjœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-XP4bj", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-XP4bjœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-26Eve{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-26Eveœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-cQnVI{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-cQnVIœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-26Eve", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-26Eveœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-cQnVI", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-cQnVIœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "id": "ChatInput-pxptT", + "id": "ChatInput-Y6mi1", "node": { "base_classes": [ "Message" @@ -264,7 +266,7 @@ }, "dragging": false, "height": 308, - "id": "ChatInput-pxptT", + "id": "ChatInput-Y6mi1", "position": { "x": -493.6459512396177, "y": 1083.200545525551 @@ -281,7 +283,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-1S5SU", + "id": "Prompt-Z4WYI", "node": { "base_classes": [ "Message" @@ -389,7 +391,7 @@ }, "dragging": false, "height": 422, - "id": "Prompt-1S5SU", + "id": "Prompt-Z4WYI", "position": { "x": 56.354011530798516, "y": 1157.2005405164796 @@ -404,7 +406,10 @@ }, { "data": { - "id": "OpenAIModel-nJXWj", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-26Eve", "node": { "base_classes": [ "LanguageModel", @@ -416,11 +421,12 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", - "edited": false, + "edited": true, "field_order": [ "input_value", "max_tokens", "model_kwargs", + "json_mode", "output_schema", "model_name", "openai_api_base", @@ -477,7 +483,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -499,6 +505,21 @@ "type": "str", "value": "" }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, "max_tokens": { "advanced": true, "display_name": "Max Tokens", @@ -572,7 +593,7 @@ "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -580,7 +601,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "output_schema": { "advanced": true, @@ -660,11 +681,11 @@ } } }, - "type": "OpenAIModel" + "type": "OpenAIModelComponent" }, "dragging": false, "height": 621, - "id": "OpenAIModel-nJXWj", + "id": "OpenAIModel-26Eve", "position": { "x": 624.3539730827923, "y": 1053.2005475562555 @@ -679,7 +700,7 @@ }, { "data": { - "id": "ChatOutput-XP4bj", + "id": "ChatOutput-cQnVI", "node": { "base_classes": [ "Message" @@ -839,7 +860,7 @@ }, "dragging": false, "height": 308, - "id": "ChatOutput-XP4bj", + "id": "ChatOutput-cQnVI", "position": { "x": 1219.477374823274, "y": 1200.950216973985 @@ -854,15 +875,15 @@ } ], "viewport": { - "x": 392.1085223509972, - "y": -327.49805229761307, - "zoom": 0.5000000676901589 + "x": 366.93776265249005, + "y": -343.56726676261223, + "zoom": 0.5000000676901587 } }, "description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ", "endpoint_name": null, - "id": "f652abdc-7ef2-4e52-a00b-847b7aa32cee", + "id": "e533253b-818b-4b5a-9793-55ab83fffb07", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Basic Prompting (Hello, World)" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json index ec4a19aac11..7828ac5854d 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "URL", - "id": "URL-k9NkE", + "id": "URL-rETJU", "name": "data", "output_types": [ "Data" @@ -13,24 +14,25 @@ }, "targetHandle": { "fieldName": "data", - "id": "ParseData-EwWXd", + "id": "ParseData-AqSfN", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-URL-k9NkE{œdataTypeœ:œURLœ,œidœ:œURL-k9NkEœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-EwWXd{œfieldNameœ:œdataœ,œidœ:œParseData-EwWXdœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "URL-k9NkE", - "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-k9NkEœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-EwWXd", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-EwWXdœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-URL-rETJU{œdataTypeœ:œURLœ,œidœ:œURL-rETJUœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-AqSfN{œfieldNameœ:œdataœ,œidœ:œParseData-AqSfNœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "URL-rETJU", + "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-rETJUœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-AqSfN", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-AqSfNœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-EwWXd", + "id": "ParseData-AqSfN", "name": "text", "output_types": [ "Message" @@ -38,7 +40,7 @@ }, "targetHandle": { "fieldName": "references", - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "inputTypes": [ "Message", "Text" @@ -46,17 +48,18 @@ "type": "str" } }, - "id": "reactflow__edge-ParseData-EwWXd{œdataTypeœ:œParseDataœ,œidœ:œParseData-EwWXdœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-B9Mq6{œfieldNameœ:œreferencesœ,œidœ:œPrompt-B9Mq6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-EwWXd", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-EwWXdœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-B9Mq6", - "targetHandle": "{œfieldNameœ: œreferencesœ, œidœ: œPrompt-B9Mq6œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-AqSfN{œdataTypeœ:œParseDataœ,œidœ:œParseData-AqSfNœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-rizUK{œfieldNameœ:œreferencesœ,œidœ:œPrompt-rizUKœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-AqSfN", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-AqSfNœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-rizUK", + "targetHandle": "{œfieldNameœ: œreferencesœ, œidœ: œPrompt-rizUKœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "TextInput", - "id": "TextInput-uf6ij", + "id": "TextInput-OffFR", "name": "text", "output_types": [ "Message" @@ -64,7 +67,7 @@ }, "targetHandle": { "fieldName": "instructions", - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "inputTypes": [ "Message", "Text" @@ -72,17 +75,18 @@ "type": "str" } }, - "id": "reactflow__edge-TextInput-uf6ij{œdataTypeœ:œTextInputœ,œidœ:œTextInput-uf6ijœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-B9Mq6{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-B9Mq6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "TextInput-uf6ij", - "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-uf6ijœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-B9Mq6", - "targetHandle": "{œfieldNameœ: œinstructionsœ, œidœ: œPrompt-B9Mq6œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-TextInput-OffFR{œdataTypeœ:œTextInputœ,œidœ:œTextInput-OffFRœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-rizUK{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-rizUKœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-OffFR", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-OffFRœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-rizUK", + "targetHandle": "{œfieldNameœ: œinstructionsœ, œidœ: œPrompt-rizUKœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "name": "prompt", "output_types": [ "Message" @@ -90,24 +94,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-X9ukk", + "id": "OpenAIModel-qmhKV", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-B9Mq6{œdataTypeœ:œPromptœ,œidœ:œPrompt-B9Mq6œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-X9ukk{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-X9ukkœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-B9Mq6", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-B9Mq6œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-X9ukk", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-X9ukkœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-rizUK{œdataTypeœ:œPromptœ,œidœ:œPrompt-rizUKœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-qmhKV{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-qmhKVœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-rizUK", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-rizUKœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-qmhKV", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-qmhKVœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-X9ukk", + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-qmhKV", "name": "text_output", "output_types": [ "Message" @@ -115,24 +119,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-5r5Iw", + "id": "ChatOutput-W684s", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-X9ukk{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-X9ukkœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-5r5Iw{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-5r5Iwœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-X9ukk", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-X9ukkœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-5r5Iw", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-5r5Iwœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-qmhKV{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-qmhKVœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-W684s{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-W684sœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-qmhKV", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-qmhKVœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-W684s", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-W684sœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "id": "URL-k9NkE", + "id": "URL-rETJU", "node": { "base_classes": [ "Data" @@ -214,7 +218,7 @@ }, "dragging": false, "height": 358, - "id": "URL-k9NkE", + "id": "URL-rETJU", "position": { "x": 220.79156431407534, "y": 498.8186168722667 @@ -229,7 +233,7 @@ }, { "data": { - "id": "ParseData-EwWXd", + "id": "ParseData-AqSfN", "node": { "base_classes": [ "Message" @@ -346,7 +350,7 @@ }, "dragging": false, "height": 384, - "id": "ParseData-EwWXd", + "id": "ParseData-AqSfN", "position": { "x": 754.3607306709101, "y": 736.8516961537598 @@ -363,7 +367,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "node": { "base_classes": [ "Message" @@ -496,7 +500,7 @@ }, "dragging": false, "height": 515, - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "position": { "x": 1368.0633591447076, "y": 467.19448061224284 @@ -511,7 +515,7 @@ }, { "data": { - "id": "TextInput-uf6ij", + "id": "TextInput-OffFR", "node": { "base_classes": [ "Message" @@ -590,7 +594,7 @@ }, "dragging": false, "height": 308, - "id": "TextInput-uf6ij", + "id": "TextInput-OffFR", "position": { "x": 743.7338453293725, "y": 301.58775454952183 @@ -605,7 +609,10 @@ }, { "data": { - "id": "OpenAIModel-X9ukk", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-qmhKV", "node": { "base_classes": [ "LanguageModel", @@ -617,11 +624,12 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", - "edited": false, + "edited": true, "field_order": [ "input_value", "max_tokens", "model_kwargs", + "json_mode", "output_schema", "model_name", "openai_api_base", @@ -678,7 +686,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -700,6 +708,21 @@ "type": "str", "value": "" }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, "max_tokens": { "advanced": true, "display_name": "Max Tokens", @@ -773,7 +796,7 @@ "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -781,7 +804,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "output_schema": { "advanced": true, @@ -861,11 +884,11 @@ } } }, - "type": "OpenAIModel" + "type": "OpenAIModelComponent" }, "dragging": false, "height": 621, - "id": "OpenAIModel-X9ukk", + "id": "OpenAIModel-qmhKV", "position": { "x": 1899.407626221589, "y": 395.9013619556682 @@ -880,7 +903,7 @@ }, { "data": { - "id": "ChatOutput-5r5Iw", + "id": "ChatOutput-W684s", "node": { "base_classes": [ "Message" @@ -1040,7 +1063,7 @@ }, "dragging": false, "height": 308, - "id": "ChatOutput-5r5Iw", + "id": "ChatOutput-W684s", "position": { "x": 2449.3489426461606, "y": 571.2449700910389 @@ -1062,8 +1085,8 @@ }, "description": "This flow can be used to create a blog post following instructions from the user, using two other blogs as reference.", "endpoint_name": null, - "id": "13da3150-95b9-4d81-9ad2-f635dcdce7ab", + "id": "da9999f8-9013-4bd9-8adb-653c94ebf08c", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Blog Writer" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json index d66c6b22bba..d0e7c53b045 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "File", - "id": "File-h46aK", + "id": "File-Q3Xrb", "name": "data", "output_types": [ "Data" @@ -13,24 +14,25 @@ }, "targetHandle": { "fieldName": "data", - "id": "ParseData-sqVr1", + "id": "ParseData-1Y5jJ", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-File-h46aK{œdataTypeœ:œFileœ,œidœ:œFile-h46aKœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-sqVr1{œfieldNameœ:œdataœ,œidœ:œParseData-sqVr1œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "File-h46aK", - "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-h46aKœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-sqVr1", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-sqVr1œ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-File-Q3Xrb{œdataTypeœ:œFileœ,œidœ:œFile-Q3Xrbœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-1Y5jJ{œfieldNameœ:œdataœ,œidœ:œParseData-1Y5jJœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "File-Q3Xrb", + "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-Q3Xrbœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-1Y5jJ", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-1Y5jJœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-sqVr1", + "id": "ParseData-1Y5jJ", "name": "text", "output_types": [ "Message" @@ -38,7 +40,7 @@ }, "targetHandle": { "fieldName": "Document", - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "inputTypes": [ "Message", "Text" @@ -46,17 +48,18 @@ "type": "str" } }, - "id": "reactflow__edge-ParseData-sqVr1{œdataTypeœ:œParseDataœ,œidœ:œParseData-sqVr1œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-mQ7w2{œfieldNameœ:œDocumentœ,œidœ:œPrompt-mQ7w2œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-sqVr1", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-sqVr1œ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-mQ7w2", - "targetHandle": "{œfieldNameœ: œDocumentœ, œidœ: œPrompt-mQ7w2œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-1Y5jJ{œdataTypeœ:œParseDataœ,œidœ:œParseData-1Y5jJœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-CMJEB{œfieldNameœ:œDocumentœ,œidœ:œPrompt-CMJEBœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-1Y5jJ", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-1Y5jJœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-CMJEB", + "targetHandle": "{œfieldNameœ: œDocumentœ, œidœ: œPrompt-CMJEBœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-cMXe0", + "id": "ChatInput-mc7sJ", "name": "message", "output_types": [ "Message" @@ -64,7 +67,7 @@ }, "targetHandle": { "fieldName": "Question", - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "inputTypes": [ "Message", "Text" @@ -72,17 +75,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-cMXe0{œdataTypeœ:œChatInputœ,œidœ:œChatInput-cMXe0œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-mQ7w2{œfieldNameœ:œQuestionœ,œidœ:œPrompt-mQ7w2œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-cMXe0", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-cMXe0œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-mQ7w2", - "targetHandle": "{œfieldNameœ: œQuestionœ, œidœ: œPrompt-mQ7w2œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-mc7sJ{œdataTypeœ:œChatInputœ,œidœ:œChatInput-mc7sJœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-CMJEB{œfieldNameœ:œQuestionœ,œidœ:œPrompt-CMJEBœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-mc7sJ", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-mc7sJœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-CMJEB", + "targetHandle": "{œfieldNameœ: œQuestionœ, œidœ: œPrompt-CMJEBœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "name": "prompt", "output_types": [ "Message" @@ -90,24 +94,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-O0AGC", + "id": "OpenAIModel-U2g5u", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-mQ7w2{œdataTypeœ:œPromptœ,œidœ:œPrompt-mQ7w2œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-O0AGC{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-O0AGCœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-mQ7w2", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-mQ7w2œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-O0AGC", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-O0AGCœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-CMJEB{œdataTypeœ:œPromptœ,œidœ:œPrompt-CMJEBœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-U2g5u{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-U2g5uœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-CMJEB", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-CMJEBœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-U2g5u", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-U2g5uœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-O0AGC", + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-U2g5u", "name": "text_output", "output_types": [ "Message" @@ -115,24 +119,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-efggd", + "id": "ChatOutput-yZjPO", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-O0AGC{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-O0AGCœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-efggd{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-efggdœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-O0AGC", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-O0AGCœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-efggd", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-efggdœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-U2g5u{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-U2g5uœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-yZjPO{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-yZjPOœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-U2g5u", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-U2g5uœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-yZjPO", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-yZjPOœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "id": "File-h46aK", + "id": "File-Q3Xrb", "node": { "base_classes": [ "Data" @@ -243,7 +247,7 @@ }, "dragging": false, "height": 300, - "id": "File-h46aK", + "id": "File-Q3Xrb", "position": { "x": -449.0807503257012, "y": -253.5304920926106 @@ -258,7 +262,7 @@ }, { "data": { - "id": "ParseData-sqVr1", + "id": "ParseData-1Y5jJ", "node": { "base_classes": [ "Message" @@ -375,7 +379,7 @@ }, "dragging": false, "height": 384, - "id": "ParseData-sqVr1", + "id": "ParseData-1Y5jJ", "position": { "x": 73.79471204296345, "y": -186.9430114986888 @@ -392,7 +396,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "node": { "base_classes": [ "Message" @@ -525,7 +529,7 @@ }, "dragging": false, "height": 515, - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "position": { "x": 637.3518652087848, "y": 47.191730368560215 @@ -540,7 +544,7 @@ }, { "data": { - "id": "ChatInput-cMXe0", + "id": "ChatInput-mc7sJ", "node": { "base_classes": [ "Message" @@ -723,7 +727,7 @@ }, "dragging": false, "height": 308, - "id": "ChatInput-cMXe0", + "id": "ChatInput-mc7sJ", "position": { "x": 50.08709924122684, "y": 320.88186720121615 @@ -738,7 +742,10 @@ }, { "data": { - "id": "OpenAIModel-O0AGC", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-U2g5u", "node": { "base_classes": [ "LanguageModel", @@ -750,11 +757,12 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", - "edited": false, + "edited": true, "field_order": [ "input_value", "max_tokens", "model_kwargs", + "json_mode", "output_schema", "model_name", "openai_api_base", @@ -811,7 +819,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -833,6 +841,21 @@ "type": "str", "value": "" }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, "max_tokens": { "advanced": true, "display_name": "Max Tokens", @@ -906,7 +929,7 @@ "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -914,7 +937,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "output_schema": { "advanced": true, @@ -994,26 +1017,26 @@ } } }, - "type": "OpenAIModel" + "type": "OpenAIModelComponent" }, "dragging": false, "height": 621, - "id": "OpenAIModel-O0AGC", + "id": "OpenAIModel-U2g5u", "position": { - "x": 1227.3672858178775, - "y": 11.61201090144857 + "x": 1249.1992451905348, + "y": 2.8792271523856243 }, "positionAbsolute": { - "x": 1227.3672858178775, - "y": 11.61201090144857 + "x": 1249.1992451905348, + "y": 2.8792271523856243 }, - "selected": false, + "selected": true, "type": "genericNode", "width": 384 }, { "data": { - "id": "ChatOutput-efggd", + "id": "ChatOutput-yZjPO", "node": { "base_classes": [ "Message" @@ -1173,7 +1196,7 @@ }, "dragging": false, "height": 308, - "id": "ChatOutput-efggd", + "id": "ChatOutput-yZjPO", "position": { "x": 1831.1359796346408, "y": 139.5174517327903 @@ -1188,15 +1211,15 @@ } ], "viewport": { - "x": 249.03047748371796, - "y": 251.71203687916693, + "x": 252.03047748371796, + "y": 253.71203687916693, "zoom": 0.4580440916596844 } }, "description": "This flow integrates PDF reading with a language model to answer document-specific questions. Ideal for small-scale texts, it facilitates direct queries with immediate insights.", "endpoint_name": null, - "id": "4b4cbf9e-34fe-4613-a460-3b7af89b7788", + "id": "483d5200-b59b-4afa-a71f-52fcfcde8fca", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Document QA" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json index c3396f03e74..1edb56cd54f 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "Memory", - "id": "Memory-uy2TA", + "id": "Memory-VIq7F", "name": "messages_text", "output_types": [ "Message" @@ -13,7 +14,7 @@ }, "targetHandle": { "fieldName": "context", - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "inputTypes": [ "Message", "Text" @@ -21,17 +22,18 @@ "type": "str" } }, - "id": "reactflow__edge-Memory-uy2TA{œdataTypeœ:œMemoryœ,œidœ:œMemory-uy2TAœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-m9rUs{œfieldNameœ:œcontextœ,œidœ:œPrompt-m9rUsœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "Memory-uy2TA", - "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-uy2TAœ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-m9rUs", - "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-m9rUsœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Memory-VIq7F{œdataTypeœ:œMemoryœ,œidœ:œMemory-VIq7Fœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-gEaWL{œfieldNameœ:œcontextœ,œidœ:œPrompt-gEaWLœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "Memory-VIq7F", + "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-VIq7Fœ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-gEaWL", + "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-gEaWLœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-hSTqh", + "id": "ChatInput-gIy9N", "name": "message", "output_types": [ "Message" @@ -39,7 +41,7 @@ }, "targetHandle": { "fieldName": "user_message", - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "inputTypes": [ "Message", "Text" @@ -47,17 +49,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-hSTqh{œdataTypeœ:œChatInputœ,œidœ:œChatInput-hSTqhœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-m9rUs{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-m9rUsœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-hSTqh", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-hSTqhœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-m9rUs", - "targetHandle": "{œfieldNameœ: œuser_messageœ, œidœ: œPrompt-m9rUsœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-gIy9N{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gIy9Nœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-gEaWL{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-gEaWLœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-gIy9N", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-gIy9Nœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-gEaWL", + "targetHandle": "{œfieldNameœ: œuser_messageœ, œidœ: œPrompt-gEaWLœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "name": "prompt", "output_types": [ "Message" @@ -65,24 +68,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-WmUtU", + "id": "OpenAIModel-uNcAU", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-m9rUs{œdataTypeœ:œPromptœ,œidœ:œPrompt-m9rUsœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-WmUtU{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-WmUtUœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-m9rUs", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-m9rUsœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-WmUtU", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-WmUtUœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-gEaWL{œdataTypeœ:œPromptœ,œidœ:œPrompt-gEaWLœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-uNcAU{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-uNcAUœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-gEaWL", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-gEaWLœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-uNcAU", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-uNcAUœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-WmUtU", + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-uNcAU", "name": "text_output", "output_types": [ "Message" @@ -90,24 +93,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-LIvGN", + "id": "ChatOutput-KtSB9", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-WmUtU{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-WmUtUœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-LIvGN{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-LIvGNœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-WmUtU", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-WmUtUœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-LIvGN", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-LIvGNœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-uNcAU{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-uNcAUœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-KtSB9{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-KtSB9œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-uNcAU", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-uNcAUœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-KtSB9", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-KtSB9œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "id": "Memory-uy2TA", + "id": "Memory-VIq7F", "node": { "base_classes": [ "Data", @@ -296,7 +299,7 @@ }, "dragging": false, "height": 266, - "id": "Memory-uy2TA", + "id": "Memory-VIq7F", "position": { "x": 1264.7588980556088, "y": 506.6868269980502 @@ -313,7 +316,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "node": { "base_classes": [ "Message" @@ -446,7 +449,7 @@ }, "dragging": false, "height": 515, - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "position": { "x": 1880.8227904110583, "y": 625.8049209882275 @@ -461,7 +464,7 @@ }, { "data": { - "id": "ChatInput-hSTqh", + "id": "ChatInput-gIy9N", "node": { "base_classes": [ "Message" @@ -644,7 +647,7 @@ }, "dragging": false, "height": 308, - "id": "ChatInput-hSTqh", + "id": "ChatInput-gIy9N", "position": { "x": 1275.9262193671882, "y": 836.1228056896347 @@ -659,7 +662,10 @@ }, { "data": { - "id": "OpenAIModel-WmUtU", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-uNcAU", "node": { "base_classes": [ "LanguageModel", @@ -671,11 +677,12 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", - "edited": false, + "edited": true, "field_order": [ "input_value", "max_tokens", "model_kwargs", + "json_mode", "output_schema", "model_name", "openai_api_base", @@ -732,7 +739,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -754,6 +761,21 @@ "type": "str", "value": "" }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, "max_tokens": { "advanced": true, "display_name": "Max Tokens", @@ -827,7 +849,7 @@ "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -835,7 +857,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "output_schema": { "advanced": true, @@ -915,11 +937,11 @@ } } }, - "type": "OpenAIModel" + "type": "OpenAIModelComponent" }, "dragging": false, "height": 621, - "id": "OpenAIModel-WmUtU", + "id": "OpenAIModel-uNcAU", "position": { "x": 2428.0215346784357, "y": 569.9683144303319 @@ -934,7 +956,7 @@ }, { "data": { - "id": "ChatOutput-LIvGN", + "id": "ChatOutput-KtSB9", "node": { "base_classes": [ "Message" @@ -1094,7 +1116,7 @@ }, "dragging": false, "height": 308, - "id": "ChatOutput-LIvGN", + "id": "ChatOutput-KtSB9", "position": { "x": 2988.248820475989, "y": 705.837390387878 @@ -1116,8 +1138,8 @@ }, "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.", "endpoint_name": null, - "id": "2a47bc35-69ca-4d8b-9895-2a7fab222b9f", + "id": "16c029a0-0d89-4c36-8a8c-e5410206df38", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Memory Chatbot" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json index ae6115c674d..7890fe16421 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-c4xn9", + "id": "ChatInput-tuEeg", "name": "message", "output_types": [ "Message" @@ -13,50 +14,25 @@ }, "targetHandle": { "fieldName": "search_input", - "id": "AstraDB-7nAHJ", + "id": "AstraDB-xVF1f", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-ChatInput-c4xn9{œdataTypeœ:œChatInputœ,œidœ:œChatInput-c4xn9œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-AstraDB-7nAHJ{œfieldNameœ:œsearch_inputœ,œidœ:œAstraDB-7nAHJœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "ChatInput-c4xn9", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-c4xn9œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "AstraDB-7nAHJ", - "targetHandle": "{œfieldNameœ: œsearch_inputœ, œidœ: œAstraDB-7nAHJœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-BKXc6", - "name": "embeddings", - "output_types": [ - "Embeddings" - ] - }, - "targetHandle": { - "fieldName": "embedding", - "id": "AstraDB-7nAHJ", - "inputTypes": [ - "Embeddings", - "dict" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIEmbeddings-BKXc6{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-BKXc6œ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-7nAHJ{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-7nAHJœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", - "source": "OpenAIEmbeddings-BKXc6", - "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-BKXc6œ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", - "target": "AstraDB-7nAHJ", - "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-7nAHJœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-ChatInput-tuEeg{œdataTypeœ:œChatInputœ,œidœ:œChatInput-tuEegœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-AstraDB-xVF1f{œfieldNameœ:œsearch_inputœ,œidœ:œAstraDB-xVF1fœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-tuEeg", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-tuEegœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "AstraDB-xVF1f", + "targetHandle": "{œfieldNameœ: œsearch_inputœ, œidœ: œAstraDB-xVF1fœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "AstraDB", - "id": "AstraDB-7nAHJ", + "id": "AstraDB-xVF1f", "name": "search_results", "output_types": [ "Data" @@ -64,24 +40,25 @@ }, "targetHandle": { "fieldName": "data", - "id": "ParseData-d61Q0", + "id": "ParseData-ZG3Aa", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-AstraDB-7nAHJ{œdataTypeœ:œAstraDBœ,œidœ:œAstraDB-7nAHJœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}-ParseData-d61Q0{œfieldNameœ:œdataœ,œidœ:œParseData-d61Q0œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "AstraDB-7nAHJ", - "sourceHandle": "{œdataTypeœ: œAstraDBœ, œidœ: œAstraDB-7nAHJœ, œnameœ: œsearch_resultsœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-d61Q0", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-d61Q0œ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-AstraDB-xVF1f{œdataTypeœ:œAstraDBœ,œidœ:œAstraDB-xVF1fœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}-ParseData-ZG3Aa{œfieldNameœ:œdataœ,œidœ:œParseData-ZG3Aaœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "AstraDB-xVF1f", + "sourceHandle": "{œdataTypeœ: œAstraDBœ, œidœ: œAstraDB-xVF1fœ, œnameœ: œsearch_resultsœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-ZG3Aa", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-ZG3Aaœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-d61Q0", + "id": "ParseData-ZG3Aa", "name": "text", "output_types": [ "Message" @@ -89,7 +66,7 @@ }, "targetHandle": { "fieldName": "context", - "id": "Prompt-vqAlG", + "id": "Prompt-0Hp9v", "inputTypes": [ "Message", "Text" @@ -97,17 +74,18 @@ "type": "str" } }, - "id": "reactflow__edge-ParseData-d61Q0{œdataTypeœ:œParseDataœ,œidœ:œParseData-d61Q0œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-vqAlG{œfieldNameœ:œcontextœ,œidœ:œPrompt-vqAlGœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-d61Q0", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-d61Q0œ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-vqAlG", - "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-vqAlGœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-ZG3Aa{œdataTypeœ:œParseDataœ,œidœ:œParseData-ZG3Aaœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-0Hp9v{œfieldNameœ:œcontextœ,œidœ:œPrompt-0Hp9vœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-ZG3Aa", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-ZG3Aaœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-0Hp9v", + "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-0Hp9vœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-c4xn9", + "id": "ChatInput-tuEeg", "name": "message", "output_types": [ "Message" @@ -115,7 +93,7 @@ }, "targetHandle": { "fieldName": "question", - "id": "Prompt-vqAlG", + "id": "Prompt-0Hp9v", "inputTypes": [ "Message", "Text" @@ -123,17 +101,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-c4xn9{œdataTypeœ:œChatInputœ,œidœ:œChatInput-c4xn9œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-vqAlG{œfieldNameœ:œquestionœ,œidœ:œPrompt-vqAlGœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-c4xn9", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-c4xn9œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-vqAlG", - "targetHandle": "{œfieldNameœ: œquestionœ, œidœ: œPrompt-vqAlGœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-tuEeg{œdataTypeœ:œChatInputœ,œidœ:œChatInput-tuEegœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-0Hp9v{œfieldNameœ:œquestionœ,œidœ:œPrompt-0Hp9vœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-tuEeg", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-tuEegœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-0Hp9v", + "targetHandle": "{œfieldNameœ: œquestionœ, œidœ: œPrompt-0Hp9vœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-vqAlG", + "id": "Prompt-0Hp9v", "name": "prompt", "output_types": [ "Message" @@ -141,99 +120,127 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-ybL3k", + "id": "OpenAIModel-BQXFs", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-vqAlG{œdataTypeœ:œPromptœ,œidœ:œPrompt-vqAlGœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-ybL3k{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-ybL3kœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-vqAlG", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-vqAlGœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-ybL3k", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-ybL3kœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-0Hp9v{œdataTypeœ:œPromptœ,œidœ:œPrompt-0Hp9vœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-BQXFs{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-BQXFsœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-0Hp9v", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-0Hp9vœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-BQXFs", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-BQXFsœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-ybL3k", - "name": "text_output", + "dataType": "File", + "id": "File-BTJVJ", + "name": "data", "output_types": [ - "Message" + "Data" ] }, "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-BpzuD", + "fieldName": "data_inputs", + "id": "SplitText-RkdZ3", "inputTypes": [ - "Message" + "Data" ], - "type": "str" + "type": "other" } }, - "id": "reactflow__edge-OpenAIModel-ybL3k{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-ybL3kœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-BpzuD{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-BpzuDœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-ybL3k", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-ybL3kœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-BpzuD", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-BpzuDœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-File-BTJVJ{œdataTypeœ:œFileœ,œidœ:œFile-BTJVJœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-RkdZ3{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-RkdZ3œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "File-BTJVJ", + "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-BTJVJœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "SplitText-RkdZ3", + "targetHandle": "{œfieldNameœ: œdata_inputsœ, œidœ: œSplitText-RkdZ3œ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "className": "", "data": { "sourceHandle": { - "dataType": "File", - "id": "File-bf6wn", - "name": "data", + "dataType": "SplitText", + "id": "SplitText-RkdZ3", + "name": "chunks", "output_types": [ "Data" ] }, "targetHandle": { - "fieldName": "data_inputs", - "id": "SplitText-52wBo", + "fieldName": "ingest_data", + "id": "AstraDB-XXizY", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-File-bf6wn{œdataTypeœ:œFileœ,œidœ:œFile-bf6wnœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-52wBo{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-52wBoœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "File-bf6wn", - "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-bf6wnœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "SplitText-52wBo", - "targetHandle": "{œfieldNameœ: œdata_inputsœ, œidœ: œSplitText-52wBoœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-SplitText-RkdZ3{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-RkdZ3œ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-AstraDB-XXizY{œfieldNameœ:œingest_dataœ,œidœ:œAstraDB-XXizYœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "SplitText-RkdZ3", + "sourceHandle": "{œdataTypeœ: œSplitTextœ, œidœ: œSplitText-RkdZ3œ, œnameœ: œchunksœ, œoutput_typesœ: [œDataœ]}", + "target": "AstraDB-XXizY", + "targetHandle": "{œfieldNameœ: œingest_dataœ, œidœ: œAstraDB-XXizYœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { "data": { "sourceHandle": { - "dataType": "SplitText", - "id": "SplitText-52wBo", - "name": "chunks", + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-BQXFs", + "name": "text_output", "output_types": [ - "Data" + "Message" ] }, "targetHandle": { - "fieldName": "ingest_data", - "id": "AstraDB-vyd5U", + "fieldName": "input_value", + "id": "ChatOutput-fDyGT", "inputTypes": [ - "Data" + "Message" + ], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-BQXFs{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-BQXFsœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-fDyGT{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-fDyGTœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-BQXFs", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-BQXFsœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-fDyGT", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-fDyGTœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "data": { + "sourceHandle": { + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-fpOKp", + "name": "embeddings", + "output_types": [ + "Embeddings" + ] + }, + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDB-XXizY", + "inputTypes": [ + "Embeddings", + "dict" ], "type": "other" } }, - "id": "reactflow__edge-SplitText-52wBo{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-52wBoœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-AstraDB-vyd5U{œfieldNameœ:œingest_dataœ,œidœ:œAstraDB-vyd5Uœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "SplitText-52wBo", - "sourceHandle": "{œdataTypeœ: œSplitTextœ, œidœ: œSplitText-52wBoœ, œnameœ: œchunksœ, œoutput_typesœ: [œDataœ]}", - "target": "AstraDB-vyd5U", - "targetHandle": "{œfieldNameœ: œingest_dataœ, œidœ: œAstraDB-vyd5Uœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-OpenAIEmbeddings-fpOKp{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-fpOKpœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-XXizY{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-XXizYœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", + "source": "OpenAIEmbeddings-fpOKp", + "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-fpOKpœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", + "target": "AstraDB-XXizY", + "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-XXizYœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" }, { "data": { "sourceHandle": { "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-sRZMc", + "id": "OpenAIEmbeddings-lCQlU", "name": "embeddings", "output_types": [ "Embeddings" @@ -241,7 +248,7 @@ }, "targetHandle": { "fieldName": "embedding", - "id": "AstraDB-vyd5U", + "id": "AstraDB-xVF1f", "inputTypes": [ "Embeddings", "dict" @@ -249,17 +256,17 @@ "type": "other" } }, - "id": "reactflow__edge-OpenAIEmbeddings-sRZMc{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-sRZMcœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-vyd5U{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-vyd5Uœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", - "source": "OpenAIEmbeddings-sRZMc", - "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-sRZMcœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", - "target": "AstraDB-vyd5U", - "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-vyd5Uœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-OpenAIEmbeddings-lCQlU{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-lCQlUœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-xVF1f{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-xVF1fœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", + "source": "OpenAIEmbeddings-lCQlU", + "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-lCQlUœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", + "target": "AstraDB-xVF1f", + "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-xVF1fœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" } ], "nodes": [ { "data": { - "id": "ChatInput-c4xn9", + "id": "ChatInput-tuEeg", "node": { "base_classes": [ "Message" @@ -442,7 +449,7 @@ }, "dragging": false, "height": 308, - "id": "ChatInput-c4xn9", + "id": "ChatInput-tuEeg", "position": { "x": 642.3545710150049, "y": 220.22556606238678 @@ -457,7 +464,7 @@ }, { "data": { - "id": "AstraDB-7nAHJ", + "id": "AstraDB-xVF1f", "node": { "base_classes": [ "Data", @@ -863,7 +870,7 @@ }, "dragging": false, "height": 753, - "id": "AstraDB-7nAHJ", + "id": "AstraDB-xVF1f", "position": { "x": 1246.0381406498648, "y": 333.25157075413966 @@ -878,53 +885,36 @@ }, { "data": { - "id": "OpenAIEmbeddings-BKXc6", + "id": "ParseData-ZG3Aa", "node": { "base_classes": [ - "Embeddings" + "Message" ], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Generate embeddings using OpenAI models.", - "display_name": "OpenAI Embeddings", + "description": "Convert Data into plain text following a specified template.", + "display_name": "Parse Data", "documentation": "", "edited": false, "field_order": [ - "default_headers", - "default_query", - "chunk_size", - "client", - "deployment", - "embedding_ctx_length", - "max_retries", - "model", - "model_kwargs", - "openai_api_base", - "openai_api_key", - "openai_api_type", - "openai_api_version", - "openai_organization", - "openai_proxy", - "request_timeout", - "show_progress_bar", - "skip_empty", - "tiktoken_model_name", - "tiktoken_enable" + "data", + "template", + "sep" ], "frozen": false, - "icon": "OpenAI", + "icon": "braces", "output_types": [], "outputs": [ { "cache": true, - "display_name": "Embeddings", + "display_name": "Text", "hidden": false, - "method": "build_embeddings", - "name": "embeddings", - "selected": "Embeddings", + "method": "parse_data", + "name": "text", + "selected": "Message", "types": [ - "Embeddings" + "Message" ], "value": "__UNDEFINED__" } @@ -932,41 +922,6 @@ "pinned": false, "template": { "_type": "Component", - "chunk_size": { - "advanced": true, - "display_name": "Chunk Size", - "dynamic": false, - "info": "", - "list": false, - "name": "chunk_size", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1000 - }, - "client": { - "advanced": true, - "display_name": "Client", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "client", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, "code": { "advanced": true, "dynamic": true, @@ -983,147 +938,192 @@ "show": true, "title_case": false, "type": "code", - "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" - }, - "default_headers": { - "advanced": true, - "display_name": "Default Headers", - "dynamic": false, - "info": "Default headers to use for the API request.", - "list": false, - "name": "default_headers", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "default_query": { - "advanced": true, - "display_name": "Default Query", - "dynamic": false, - "info": "Default query parameters to use for the API request.", - "list": false, - "name": "default_query", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" }, - "deployment": { - "advanced": true, - "display_name": "Deployment", + "data": { + "advanced": false, + "display_name": "Data", "dynamic": false, - "info": "", + "info": "The data to convert to text.", "input_types": [ - "Message" + "Data" ], "list": false, - "load_from_db": false, - "name": "deployment", + "name": "data", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_input": true, "trace_as_metadata": true, - "type": "str", + "type": "other", "value": "" }, - "embedding_ctx_length": { - "advanced": true, - "display_name": "Embedding Context Length", - "dynamic": false, - "info": "", - "list": false, - "name": "embedding_ctx_length", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1536 - }, - "max_retries": { + "sep": { "advanced": true, - "display_name": "Max Retries", + "display_name": "Separator", "dynamic": false, "info": "", "list": false, - "name": "max_retries", + "load_from_db": false, + "name": "sep", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "int", - "value": 3 + "type": "str", + "value": "\n" }, - "model": { + "template": { "advanced": false, - "display_name": "Model", + "display_name": "Template", "dynamic": false, - "info": "", - "name": "model", - "options": [ - "text-embedding-3-small", - "text-embedding-3-large", - "text-embedding-ada-002" + "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", + "input_types": [ + "Message" ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "text-embedding-3-small" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", "list": false, - "name": "model_kwargs", + "load_from_db": false, + "multiline": true, + "name": "template", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_input": true, - "type": "dict", - "value": {} - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "", - "input_types": [], - "load_from_db": true, - "name": "openai_api_base", - "password": true, - "placeholder": "", - "required": false, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + } + } + }, + "type": "ParseData" + }, + "dragging": false, + "height": 384, + "id": "ParseData-ZG3Aa", + "position": { + "x": 1854.1518317915907, + "y": 459.3386924128532 + }, + "positionAbsolute": { + "x": 1854.1518317915907, + "y": 459.3386924128532 + }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-0Hp9v", + "node": { + "base_classes": [ + "Message" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [ + "context", + "question" + ] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "error": null, + "field_order": [ + "template" + ], + "frozen": false, + "full_path": null, + "icon": "prompts", + "is_composition": null, + "is_input": null, + "is_output": null, + "name": "", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "hidden": false, + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": [ + "Message" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_build_config: dict, current_build_config: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_build_config, current_build_config)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_build_config\n # and update the frontend_node with those values\n update_template_values(frontend_template=frontend_node, raw_template=current_build_config[\"template\"])\n return frontend_node\n" + }, + "context": { + "advanced": false, + "display_name": "context", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": [ + "Message", + "Text" + ], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "context", + "password": false, + "placeholder": "", + "required": false, "show": true, "title_case": false, "type": "str", "value": "" }, - "openai_api_key": { + "question": { "advanced": false, - "display_name": "OpenAI API Key", + "display_name": "question", "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", "info": "", - "input_types": [], + "input_types": [ + "Message", + "Text" + ], + "list": false, "load_from_db": false, - "name": "openai_api_key", - "password": true, + "multiline": true, + "name": "question", + "password": false, "placeholder": "", "required": false, "show": true, @@ -1131,73 +1131,403 @@ "type": "str", "value": "" }, - "openai_api_type": { + "template": { + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "prompt", + "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: " + } + } + }, + "type": "Prompt" + }, + "dragging": false, + "height": 515, + "id": "Prompt-0Hp9v", + "position": { + "x": 2486.0988668404975, + "y": 496.5120474157301 + }, + "positionAbsolute": { + "x": 2486.0988668404975, + "y": 496.5120474157301 + }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-BQXFs", + "node": { + "base_classes": [ + "LanguageModel", + "Message" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": true, + "field_order": [ + "input_value", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "stream", + "system_message", + "seed" + ], + "frozen": false, + "icon": "OpenAI", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "hidden": false, + "method": "text_response", + "name": "text_output", + "selected": "Message", + "types": [ + "Message" + ], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "selected": "LanguageModel", + "types": [ + "LanguageModel" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { "advanced": true, - "display_name": "OpenAI API Type", + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + }, + "input_value": { + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "advanced": true, + "display_name": "Model Kwargs", "dynamic": false, "info": "", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "advanced": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4-turbo" + }, + "openai_api_base": { + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "openai_api_key": { + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], "load_from_db": true, - "name": "openai_api_type", + "name": "openai_api_key", "password": true, "placeholder": "", "required": false, "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" + }, + "output_schema": { + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "advanced": true, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false }, - "openai_api_version": { + "system_message": { "advanced": true, - "display_name": "OpenAI API Version", + "display_name": "System Message", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], + "info": "System message to pass to the model.", "list": false, "load_from_db": false, - "name": "openai_api_version", + "name": "system_message", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "openai_organization": { - "advanced": true, - "display_name": "OpenAI Organization", + "temperature": { + "advanced": false, + "display_name": "Temperature", "dynamic": false, "info": "", - "input_types": [ + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + } + }, + "type": "OpenAIModelComponent" + }, + "dragging": false, + "height": 621, + "id": "OpenAIModel-BQXFs", + "position": { + "x": 3145.6693008609222, + "y": 374.23955005474204 + }, + "positionAbsolute": { + "x": 3145.6693008609222, + "y": 374.23955005474204 + }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "id": "ChatOutput-fDyGT", + "node": { + "base_classes": [ + "Message" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "sender", + "sender_name", + "session_id", + "data_template" + ], + "frozen": false, + "icon": "ChatOutput", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": [ "Message" ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", "list": false, "load_from_db": false, - "name": "openai_organization", + "multiline": true, + "name": "code", + "password": false, "placeholder": "", - "required": false, + "required": true, "show": true, "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n" }, - "openai_proxy": { + "data_template": { "advanced": true, - "display_name": "OpenAI Proxy", + "display_name": "Data Template", "dynamic": false, - "info": "", + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", "input_types": [ "Message" ], "list": false, "load_from_db": false, - "name": "openai_proxy", + "name": "data_template", "placeholder": "", "required": false, "show": true, @@ -1205,79 +1535,77 @@ "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "" + "value": "{text}" }, - "request_timeout": { - "advanced": true, - "display_name": "Request Timeout", + "input_value": { + "advanced": false, + "display_name": "Text", "dynamic": false, - "info": "", + "info": "Message to be passed as output.", + "input_types": [ + "Message" + ], "list": false, - "name": "request_timeout", + "load_from_db": false, + "name": "input_value", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "float", + "type": "str", "value": "" }, - "show_progress_bar": { - "advanced": true, - "display_name": "Show Progress Bar", - "dynamic": false, - "info": "", - "list": false, - "name": "show_progress_bar", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "skip_empty": { + "sender": { "advanced": true, - "display_name": "Skip Empty", + "display_name": "Sender Type", "dynamic": false, - "info": "", - "list": false, - "name": "skip_empty", + "info": "Type of sender.", + "name": "sender", + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "bool", - "value": false + "type": "str", + "value": "Machine" }, - "tiktoken_enable": { + "sender_name": { "advanced": true, - "display_name": "TikToken Enable", + "display_name": "Sender Name", "dynamic": false, - "info": "If False, you must have transformers installed.", + "info": "Name of the sender.", + "input_types": [ + "Message" + ], "list": false, - "name": "tiktoken_enable", + "load_from_db": false, + "name": "sender_name", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "bool", - "value": true + "type": "str", + "value": "AI" }, - "tiktoken_model_name": { + "session_id": { "advanced": true, - "display_name": "TikToken Model Name", + "display_name": "Session ID", "dynamic": false, - "info": "", + "info": "Session ID for the message.", "input_types": [ "Message" ], "list": false, "load_from_db": false, - "name": "tiktoken_model_name", + "name": "session_id", "placeholder": "", "required": false, "show": true, @@ -1289,18 +1617,18 @@ } } }, - "type": "OpenAIEmbeddings" + "type": "ChatOutput" }, "dragging": false, - "height": 394, - "id": "OpenAIEmbeddings-BKXc6", + "height": 308, + "id": "ChatOutput-fDyGT", "position": { - "x": 603.2488770584523, - "y": 661.6162066128852 + "x": 3769.242086248817, + "y": 585.3403837062634 }, "positionAbsolute": { - "x": 603.2488770584523, - "y": 661.6162066128852 + "x": 3769.242086248817, + "y": 585.3403837062634 }, "selected": false, "type": "genericNode", @@ -1308,36 +1636,37 @@ }, { "data": { - "id": "ParseData-d61Q0", + "id": "SplitText-RkdZ3", "node": { "base_classes": [ - "Message" + "Data" ], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Convert Data into plain text following a specified template.", - "display_name": "Parse Data", + "description": "Split text into chunks based on specified criteria.", + "display_name": "Split Text", "documentation": "", "edited": false, "field_order": [ - "data", - "template", - "sep" + "data_inputs", + "chunk_overlap", + "chunk_size", + "separator" ], "frozen": false, - "icon": "braces", + "icon": "scissors-line-dashed", "output_types": [], "outputs": [ { "cache": true, - "display_name": "Text", + "display_name": "Chunks", "hidden": false, - "method": "parse_data", - "name": "text", - "selected": "Message", + "method": "split_text", + "name": "chunks", + "selected": "Data", "types": [ - "Message" + "Data" ], "value": "__UNDEFINED__" } @@ -1345,6 +1674,36 @@ "pinned": false, "template": { "_type": "Component", + "chunk_overlap": { + "advanced": false, + "display_name": "Chunk Overlap", + "dynamic": false, + "info": "Number of characters to overlap between chunks.", + "list": false, + "name": "chunk_overlap", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 200 + }, + "chunk_size": { + "advanced": false, + "display_name": "Chunk Size", + "dynamic": false, + "info": "The maximum number of characters in each chunk.", + "list": false, + "name": "chunk_size", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1000 + }, "code": { "advanced": true, "dynamic": true, @@ -1361,55 +1720,37 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" + "value": "from typing import List\n\nfrom langchain_text_splitters import CharacterTextSplitter\n\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, IntInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.utils.util import unescape_string\n\n\nclass SplitTextComponent(Component):\n display_name: str = \"Split Text\"\n description: str = \"Split text into chunks based on specified criteria.\"\n icon = \"scissors-line-dashed\"\n\n inputs = [\n HandleInput(\n name=\"data_inputs\",\n display_name=\"Data Inputs\",\n info=\"The data to split.\",\n input_types=[\"Data\"],\n is_list=True,\n ),\n IntInput(\n name=\"chunk_overlap\",\n display_name=\"Chunk Overlap\",\n info=\"Number of characters to overlap between chunks.\",\n value=200,\n ),\n IntInput(\n name=\"chunk_size\",\n display_name=\"Chunk Size\",\n info=\"The maximum number of characters in each chunk.\",\n value=1000,\n ),\n MessageTextInput(\n name=\"separator\",\n display_name=\"Separator\",\n info=\"The character to split on. Defaults to newline.\",\n value=\"\\n\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Chunks\", name=\"chunks\", method=\"split_text\"),\n ]\n\n def _docs_to_data(self, docs):\n data = []\n for doc in docs:\n data.append(Data(text=doc.page_content, data=doc.metadata))\n return data\n\n def split_text(self) -> List[Data]:\n separator = unescape_string(self.separator)\n\n documents = []\n for _input in self.data_inputs:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n\n splitter = CharacterTextSplitter(\n chunk_overlap=self.chunk_overlap,\n chunk_size=self.chunk_size,\n separator=separator,\n )\n docs = splitter.split_documents(documents)\n data = self._docs_to_data(docs)\n self.status = data\n return data\n" }, - "data": { + "data_inputs": { "advanced": false, - "display_name": "Data", + "display_name": "Data Inputs", "dynamic": false, - "info": "The data to convert to text.", + "info": "The data to split.", "input_types": [ "Data" ], - "list": false, - "name": "data", + "list": true, + "name": "data_inputs", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "other", "value": "" }, - "sep": { - "advanced": true, - "display_name": "Separator", - "dynamic": false, - "info": "", - "list": false, - "load_from_db": false, - "name": "sep", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "\n" - }, - "template": { + "separator": { "advanced": false, - "display_name": "Template", + "display_name": "Separator", "dynamic": false, - "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", + "info": "The character to split on. Defaults to newline.", "input_types": [ "Message" ], "list": false, "load_from_db": false, - "multiline": true, - "name": "template", + "name": "separator", "placeholder": "", "required": false, "show": true, @@ -1417,22 +1758,22 @@ "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "{text}" + "value": "\n" } } }, - "type": "ParseData" + "type": "SplitText" }, "dragging": false, - "height": 384, - "id": "ParseData-d61Q0", + "height": 527, + "id": "SplitText-RkdZ3", "position": { - "x": 1854.1518317915907, - "y": 459.3386924128532 + "x": 2044.2799160989089, + "y": 1185.3130355818519 }, "positionAbsolute": { - "x": 1854.1518317915907, - "y": 459.3386924128532 + "x": 2044.2799160989089, + "y": 1185.3130355818519 }, "selected": false, "type": "genericNode", @@ -1440,47 +1781,35 @@ }, { "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-vqAlG", + "id": "File-BTJVJ", "node": { "base_classes": [ - "Message" + "Data" ], "beta": false, "conditional_paths": [], - "custom_fields": { - "template": [ - "context", - "question" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", + "custom_fields": {}, + "description": "A generic file loader.", + "display_name": "File", "documentation": "", "edited": false, - "error": null, "field_order": [ - "template" + "path", + "silent_errors" ], "frozen": false, - "full_path": null, - "icon": "prompts", - "is_composition": null, - "is_input": null, - "is_output": null, - "name": "", + "icon": "file-text", "output_types": [], "outputs": [ { "cache": true, - "display_name": "Prompt Message", + "display_name": "Data", "hidden": false, - "method": "build_prompt", - "name": "prompt", - "selected": "Message", + "method": "load_file", + "name": "data", + "selected": "Data", "types": [ - "Message" + "Data" ], "value": "__UNDEFINED__" } @@ -1504,85 +1833,73 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_build_config: dict, current_build_config: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_build_config, current_build_config)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_build_config\n # and update the frontend_node with those values\n update_template_values(frontend_template=frontend_node, raw_template=current_build_config[\"template\"])\n return frontend_node\n" + "value": "from pathlib import Path\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=TEXT_FILE_TYPES,\n info=f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"load_file\"),\n ]\n\n def load_file(self) -> Data:\n if not self.path:\n raise ValueError(\"Please, upload a file to use this component.\")\n resolved_path = self.resolve_path(self.path)\n silent_errors = self.silent_errors\n\n extension = Path(resolved_path).suffix[1:].lower()\n\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n\n data = parse_text_file_to_data(resolved_path, silent_errors)\n self.status = data if data else \"No data\"\n return data or Data()\n" }, - "context": { + "path": { "advanced": false, - "display_name": "context", + "display_name": "Path", "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx" ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "context", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "question": { - "advanced": false, - "display_name": "question", - "dynamic": false, - "field_type": "str", - "fileTypes": [], "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], + "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx", "list": false, - "load_from_db": false, - "multiline": true, - "name": "question", - "password": false, + "name": "path", "placeholder": "", "required": false, "show": true, "title_case": false, - "type": "str", + "trace_as_metadata": true, + "type": "file", "value": "" }, - "template": { - "advanced": false, - "display_name": "Template", + "silent_errors": { + "advanced": true, + "display_name": "Silent Errors", "dynamic": false, - "info": "", + "info": "If true, errors will not raise an exception.", "list": false, - "name": "template", + "name": "silent_errors", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: " + "trace_as_metadata": true, + "type": "bool", + "value": false } } }, - "type": "Prompt" + "type": "File" }, "dragging": false, - "height": 515, - "id": "Prompt-vqAlG", + "height": 300, + "id": "File-BTJVJ", "position": { - "x": 2486.0988668404975, - "y": 496.5120474157301 + "x": 1418.981990122179, + "y": 1539.3825691184466 }, "positionAbsolute": { - "x": 2486.0988668404975, - "y": 496.5120474157301 + "x": 1418.981990122179, + "y": 1539.3825691184466 }, "selected": false, "type": "genericNode", @@ -1590,108 +1907,93 @@ }, { "data": { - "id": "OpenAIModel-ybL3k", + "id": "AstraDB-XXizY", "node": { "base_classes": [ - "LanguageModel", - "Message" + "Data", + "Retriever" ], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", + "description": "Implementation of Vector Store using Astra DB with search capabilities", + "display_name": "Astra DB", + "documentation": "https://python.langchain.com/docs/integrations/vectorstores/astradb", "edited": false, "field_order": [ - "input_value", - "max_tokens", - "model_kwargs", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" + "collection_name", + "token", + "api_endpoint", + "search_input", + "ingest_data", + "namespace", + "metric", + "batch_size", + "bulk_insert_batch_concurrency", + "bulk_insert_overwrite_concurrency", + "bulk_delete_concurrency", + "setup_mode", + "pre_delete_collection", + "metadata_indexing_include", + "embedding", + "metadata_indexing_exclude", + "collection_indexing_policy", + "search_type", + "number_of_results" ], "frozen": false, - "icon": "OpenAI", + "icon": "AstraDB", "output_types": [], "outputs": [ { "cache": true, - "display_name": "Text", - "hidden": false, - "method": "text_response", - "name": "text_output", - "selected": "Message", + "display_name": "Retriever", + "method": "build_base_retriever", + "name": "base_retriever", + "selected": "Retriever", "types": [ - "Message" + "Retriever" ], "value": "__UNDEFINED__" }, { "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { + "display_name": "Search Results", + "method": "search_documents", + "name": "search_results", + "selected": "Data", + "types": [ + "Data" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_endpoint": { "advanced": false, - "display_name": "Input", + "display_name": "API Endpoint", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, + "info": "API endpoint URL for the Astra DB service.", + "input_types": [], "load_from_db": false, - "name": "input_value", + "name": "api_endpoint", + "password": true, "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, "type": "str", "value": "" }, - "max_tokens": { + "batch_size": { "advanced": true, - "display_name": "Max Tokens", + "display_name": "Batch Size", "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "info": "Optional number of data to process in a single batch.", "list": false, - "name": "max_tokens", + "name": "batch_size", "placeholder": "", "required": false, "show": true, @@ -1700,127 +2002,77 @@ "type": "int", "value": "" }, - "model_kwargs": { + "bulk_delete_concurrency": { "advanced": true, - "display_name": "Model Kwargs", + "display_name": "Bulk Delete Concurrency", "dynamic": false, - "info": "", + "info": "Optional concurrency level for bulk delete operations.", "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], + "name": "bulk_delete_concurrency", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", - "value": "gpt-4-turbo" + "type": "int", + "value": "" }, - "openai_api_base": { + "bulk_insert_batch_concurrency": { "advanced": true, - "display_name": "OpenAI API Base", + "display_name": "Bulk Insert Batch Concurrency", "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "info": "Optional concurrency level for bulk insert operations.", "list": false, - "load_from_db": false, - "name": "openai_api_base", + "name": "bulk_insert_batch_concurrency", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "openai_api_key": { - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [], - "load_from_db": false, - "name": "openai_api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", + "type": "int", "value": "" }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { + "bulk_insert_overwrite_concurrency": { "advanced": true, - "display_name": "Seed", + "display_name": "Bulk Insert Overwrite Concurrency", "dynamic": false, - "info": "The seed controls the reproducibility of the job.", + "info": "Optional concurrency level for bulk insert operations that overwrite existing data.", "list": false, - "name": "seed", + "name": "bulk_insert_overwrite_concurrency", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, "type": "int", - "value": 1 + "value": "" }, - "stream": { + "code": { "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", "list": false, - "name": "stream", + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, "placeholder": "", - "required": false, + "required": true, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false + "type": "code", + "value": "from loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent\nfrom langflow.io import (\n BoolInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n DataInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://python.langchain.com/docs/integrations/vectorstores/astradb\"\n icon: str = \"AstraDB\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.\",\n options=[\"Sync\", \"Async\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n advanced=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding or Astra Vectorize\",\n input_types=[\"Embeddings\", \"dict\"],\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info=\"Optional dictionary defining the indexing policy for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n options=[\"Similarity\", \"MMR\"],\n value=\"Similarity\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n ]\n\n def build_vector_store(self):\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {self.setup_mode}\")\n\n if not isinstance(self.embedding, dict):\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n dict_options = self.embedding.get(\"collection_vector_service_options\", {})\n dict_options[\"authentication\"] = {\n k: v for k, v in dict_options.get(\"authentication\", {}).items() if k and v\n }\n dict_options[\"parameters\"] = {k: v for k, v in dict_options.get(\"parameters\", {}).items() if k and v}\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(dict_options),\n \"collection_embedding_api_key\": self.embedding.get(\"collection_embedding_api_key\"),\n }\n vector_store_kwargs = {\n **embedding_dict,\n \"collection_name\": self.collection_name,\n \"token\": self.token,\n \"api_endpoint\": self.api_endpoint,\n \"namespace\": self.namespace or None,\n \"metric\": self.metric or None,\n \"batch_size\": self.batch_size or None,\n \"bulk_insert_batch_concurrency\": self.bulk_insert_batch_concurrency or None,\n \"bulk_insert_overwrite_concurrency\": self.bulk_insert_overwrite_concurrency or None,\n \"bulk_delete_concurrency\": self.bulk_delete_concurrency or None,\n \"setup_mode\": setup_mode_value,\n \"pre_delete_collection\": self.pre_delete_collection or False,\n }\n\n if self.metadata_indexing_include:\n vector_store_kwargs[\"metadata_indexing_include\"] = self.metadata_indexing_include\n elif self.metadata_indexing_exclude:\n vector_store_kwargs[\"metadata_indexing_exclude\"] = self.metadata_indexing_exclude\n elif self.collection_indexing_policy:\n vector_store_kwargs[\"collection_indexing_policy\"] = self.collection_indexing_policy\n\n try:\n vector_store = AstraDBVectorStore(**vector_store_kwargs)\n except Exception as e:\n raise ValueError(f\"Error initializing AstraDBVectorStore: {str(e)}\") from e\n\n self.status = self._astradb_collection_to_data(vector_store.collection)\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store):\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n raise ValueError(\"Vector Store Inputs must be Data objects.\")\n\n if documents and self.embedding is not None:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n raise ValueError(f\"Error adding documents to AstraDBVectorStore: {str(e)}\") from e\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n\n def search_documents(self):\n vector_store = self.build_vector_store()\n\n logger.debug(f\"Search input: {self.search_input}\")\n logger.debug(f\"Search type: {self.search_type}\")\n logger.debug(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n if self.search_type == \"Similarity\":\n docs = vector_store.similarity_search(\n query=self.search_input,\n k=self.number_of_results,\n )\n elif self.search_type == \"MMR\":\n docs = vector_store.max_marginal_relevance_search(\n query=self.search_input,\n k=self.number_of_results,\n )\n else:\n raise ValueError(f\"Invalid search type: {self.search_type}\")\n except Exception as e:\n raise ValueError(f\"Error performing search in AstraDBVectorStore: {str(e)}\") from e\n\n logger.debug(f\"Retrieved documents: {len(docs)}\")\n\n data = [Data.from_document(doc) for doc in docs]\n logger.debug(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n else:\n logger.debug(\"No search input provided. Skipping search.\")\n return []\n\n def _astradb_collection_to_data(self, collection):\n data = []\n data_dict = collection.find()\n if data_dict and \"data\" in data_dict:\n data_dict = data_dict[\"data\"].get(\"documents\", [])\n\n for item in data_dict:\n data.append(Data(content=item[\"content\"]))\n return data\n" }, - "system_message": { + "collection_indexing_policy": { "advanced": true, - "display_name": "System Message", + "display_name": "Collection Indexing Policy", "dynamic": false, - "info": "System message to pass to the model.", + "info": "Optional dictionary defining the indexing policy for the collection.", "list": false, "load_from_db": false, - "name": "system_message", + "name": "collection_indexing_policy", "placeholder": "", "required": false, "show": true, @@ -1829,332 +2081,169 @@ "type": "str", "value": "" }, - "temperature": { + "collection_name": { "advanced": false, - "display_name": "Temperature", + "display_name": "Collection Name", "dynamic": false, - "info": "", + "info": "The name of the collection within Astra DB where the vectors will be stored.", "list": false, - "name": "temperature", + "load_from_db": false, + "name": "collection_name", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 621, - "id": "OpenAIModel-ybL3k", - "position": { - "x": 3145.6693008609222, - "y": 374.23955005474204 - }, - "positionAbsolute": { - "x": 3145.6693008609222, - "y": 374.23955005474204 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "ChatOutput-BpzuD", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "sender", - "sender_name", - "session_id", - "data_template" - ], - "frozen": false, - "icon": "ChatOutput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n" + "type": "str", + "value": "langflow" }, - "data_template": { - "advanced": true, - "display_name": "Data Template", + "embedding": { + "advanced": false, + "display_name": "Embedding or Astra Vectorize", "dynamic": false, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "info": "", "input_types": [ - "Message" + "Embeddings", + "dict" ], "list": false, - "load_from_db": false, - "name": "data_template", + "name": "embedding", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, - "type": "str", - "value": "{text}" + "type": "other", + "value": "" }, - "input_value": { + "ingest_data": { "advanced": false, - "display_name": "Text", + "display_name": "Ingest Data", "dynamic": false, - "info": "Message to be passed as output.", + "info": "", "input_types": [ - "Message" + "Data" ], - "list": false, - "load_from_db": false, - "name": "input_value", + "list": true, + "name": "ingest_data", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_input": true, "trace_as_metadata": true, - "type": "str", + "type": "other", "value": "" }, - "sender": { + "metadata_indexing_exclude": { "advanced": true, - "display_name": "Sender Type", + "display_name": "Metadata Indexing Exclude", "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], + "info": "Optional list of metadata fields to exclude from the indexing.", + "list": false, + "load_from_db": false, + "name": "metadata_indexing_exclude", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, "type": "str", - "value": "Machine" + "value": "" }, - "sender_name": { + "metadata_indexing_include": { "advanced": true, - "display_name": "Sender Name", + "display_name": "Metadata Indexing Include", "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], + "info": "Optional list of metadata fields to include in the indexing.", "list": false, "load_from_db": false, - "name": "sender_name", + "name": "metadata_indexing_include", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "AI" + "value": "" }, - "session_id": { + "metric": { "advanced": true, - "display_name": "Session ID", + "display_name": "Metric", "dynamic": false, - "info": "Session ID for the message.", - "input_types": [ - "Message" + "info": "Optional distance metric for vector comparisons in the vector store.", + "name": "metric", + "options": [ + "cosine", + "dot_product", + "euclidean" ], - "list": false, - "load_from_db": false, - "name": "session_id", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" - } - } - }, - "type": "ChatOutput" - }, - "dragging": false, - "height": 308, - "id": "ChatOutput-BpzuD", - "position": { - "x": 3769.242086248817, - "y": 585.3403837062634 - }, - "positionAbsolute": { - "x": 3769.242086248817, - "y": 585.3403837062634 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "SplitText-52wBo", - "node": { - "base_classes": [ - "Data" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Split text into chunks based on specified criteria.", - "display_name": "Split Text", - "documentation": "", - "edited": false, - "field_order": [ - "data_inputs", - "chunk_overlap", - "chunk_size", - "separator" - ], - "frozen": false, - "icon": "scissors-line-dashed", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Chunks", - "hidden": false, - "method": "split_text", - "name": "chunks", - "selected": "Data", - "types": [ - "Data" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "chunk_overlap": { - "advanced": false, - "display_name": "Chunk Overlap", + }, + "namespace": { + "advanced": true, + "display_name": "Namespace", "dynamic": false, - "info": "Number of characters to overlap between chunks.", + "info": "Optional namespace within Astra DB to use for the collection.", "list": false, - "name": "chunk_overlap", + "load_from_db": false, + "name": "namespace", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "int", - "value": 200 + "type": "str", + "value": "" }, - "chunk_size": { - "advanced": false, - "display_name": "Chunk Size", + "number_of_results": { + "advanced": true, + "display_name": "Number of Results", "dynamic": false, - "info": "The maximum number of characters in each chunk.", + "info": "Number of results to return.", "list": false, - "name": "chunk_size", + "name": "number_of_results", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, "type": "int", - "value": 1000 + "value": 4 }, - "code": { + "pre_delete_collection": { "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from typing import List\n\nfrom langchain_text_splitters import CharacterTextSplitter\n\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, IntInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.utils.util import unescape_string\n\n\nclass SplitTextComponent(Component):\n display_name: str = \"Split Text\"\n description: str = \"Split text into chunks based on specified criteria.\"\n icon = \"scissors-line-dashed\"\n\n inputs = [\n HandleInput(\n name=\"data_inputs\",\n display_name=\"Data Inputs\",\n info=\"The data to split.\",\n input_types=[\"Data\"],\n is_list=True,\n ),\n IntInput(\n name=\"chunk_overlap\",\n display_name=\"Chunk Overlap\",\n info=\"Number of characters to overlap between chunks.\",\n value=200,\n ),\n IntInput(\n name=\"chunk_size\",\n display_name=\"Chunk Size\",\n info=\"The maximum number of characters in each chunk.\",\n value=1000,\n ),\n MessageTextInput(\n name=\"separator\",\n display_name=\"Separator\",\n info=\"The character to split on. Defaults to newline.\",\n value=\"\\n\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Chunks\", name=\"chunks\", method=\"split_text\"),\n ]\n\n def _docs_to_data(self, docs):\n data = []\n for doc in docs:\n data.append(Data(text=doc.page_content, data=doc.metadata))\n return data\n\n def split_text(self) -> List[Data]:\n separator = unescape_string(self.separator)\n\n documents = []\n for _input in self.data_inputs:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n\n splitter = CharacterTextSplitter(\n chunk_overlap=self.chunk_overlap,\n chunk_size=self.chunk_size,\n separator=separator,\n )\n docs = splitter.split_documents(documents)\n data = self._docs_to_data(docs)\n self.status = data\n return data\n" - }, - "data_inputs": { - "advanced": false, - "display_name": "Data Inputs", + "display_name": "Pre Delete Collection", "dynamic": false, - "info": "The data to split.", - "input_types": [ - "Data" - ], - "list": true, - "name": "data_inputs", + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "list": false, + "name": "pre_delete_collection", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "other", - "value": "" + "type": "bool", + "value": false }, - "separator": { + "search_input": { "advanced": false, - "display_name": "Separator", + "display_name": "Search Input", "dynamic": false, - "info": "The character to split on. Defaults to newline.", + "info": "", "input_types": [ "Message" ], "list": false, "load_from_db": false, - "name": "separator", + "multiline": true, + "name": "search_input", "placeholder": "", "required": false, "show": true, @@ -2162,148 +2251,75 @@ "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "\n" - } - } - }, - "type": "SplitText" - }, - "dragging": false, - "height": 527, - "id": "SplitText-52wBo", - "position": { - "x": 2044.2799160989089, - "y": 1185.3130355818519 - }, - "positionAbsolute": { - "x": 2044.2799160989089, - "y": 1185.3130355818519 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "File-bf6wn", - "node": { - "base_classes": [ - "Data" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "A generic file loader.", - "display_name": "File", - "documentation": "", - "edited": false, - "field_order": [ - "path", - "silent_errors" - ], - "frozen": false, - "icon": "file-text", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Data", - "hidden": false, - "method": "load_file", - "name": "data", - "selected": "Data", - "types": [ - "Data" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { + "value": "" + }, + "search_type": { "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", + "display_name": "Search Type", + "dynamic": false, "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, + "name": "search_type", + "options": [ + "Similarity", + "MMR" + ], "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, - "type": "code", - "value": "from pathlib import Path\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=TEXT_FILE_TYPES,\n info=f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"load_file\"),\n ]\n\n def load_file(self) -> Data:\n if not self.path:\n raise ValueError(\"Please, upload a file to use this component.\")\n resolved_path = self.resolve_path(self.path)\n silent_errors = self.silent_errors\n\n extension = Path(resolved_path).suffix[1:].lower()\n\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n\n data = parse_text_file_to_data(resolved_path, silent_errors)\n self.status = data if data else \"No data\"\n return data or Data()\n" + "trace_as_metadata": true, + "type": "str", + "value": "Similarity" }, - "path": { - "advanced": false, - "display_name": "Path", + "setup_mode": { + "advanced": true, + "display_name": "Setup Mode", "dynamic": false, - "fileTypes": [ - "txt", - "md", - "mdx", - "csv", - "json", - "yaml", - "yml", - "xml", - "html", - "htm", - "pdf", - "docx", - "py", - "sh", - "sql", - "js", - "ts", - "tsx" + "info": "Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.", + "name": "setup_mode", + "options": [ + "Sync", + "Async", + "Off" ], - "file_path": "", - "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx", - "list": false, - "name": "path", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "file", - "value": "" + "type": "str", + "value": "Sync" }, - "silent_errors": { - "advanced": true, - "display_name": "Silent Errors", + "token": { + "advanced": false, + "display_name": "Astra DB Application Token", "dynamic": false, - "info": "If true, errors will not raise an exception.", - "list": false, - "name": "silent_errors", + "info": "Authentication token for accessing Astra DB.", + "input_types": [], + "load_from_db": false, + "name": "token", + "password": true, "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false + "type": "str", + "value": "" } } }, - "type": "File" + "type": "AstraDB" }, "dragging": false, - "height": 300, - "id": "File-bf6wn", + "height": 753, + "id": "AstraDB-XXizY", "position": { - "x": 1418.981990122179, - "y": 1539.3825691184466 + "x": 2676.4816074350247, + "y": 1269.304067004569 }, "positionAbsolute": { - "x": 1418.981990122179, - "y": 1539.3825691184466 + "x": 2676.4816074350247, + "y": 1269.304067004569 }, "selected": false, "type": "genericNode", @@ -2311,63 +2327,53 @@ }, { "data": { - "id": "AstraDB-vyd5U", + "id": "OpenAIEmbeddings-fpOKp", "node": { "base_classes": [ - "Data", - "Retriever" + "Embeddings" ], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Implementation of Vector Store using Astra DB with search capabilities", - "display_name": "Astra DB", - "documentation": "https://python.langchain.com/docs/integrations/vectorstores/astradb", + "description": "Generate embeddings using OpenAI models.", + "display_name": "OpenAI Embeddings", + "documentation": "", "edited": false, "field_order": [ - "collection_name", - "token", - "api_endpoint", - "search_input", - "ingest_data", - "namespace", - "metric", - "batch_size", - "bulk_insert_batch_concurrency", - "bulk_insert_overwrite_concurrency", - "bulk_delete_concurrency", - "setup_mode", - "pre_delete_collection", - "metadata_indexing_include", - "embedding", - "metadata_indexing_exclude", - "collection_indexing_policy", - "search_type", - "number_of_results" + "default_headers", + "default_query", + "chunk_size", + "client", + "deployment", + "embedding_ctx_length", + "max_retries", + "model", + "model_kwargs", + "openai_api_base", + "openai_api_key", + "openai_api_type", + "openai_api_version", + "openai_organization", + "openai_proxy", + "request_timeout", + "show_progress_bar", + "skip_empty", + "tiktoken_model_name", + "tiktoken_enable" ], "frozen": false, - "icon": "AstraDB", + "icon": "OpenAI", "output_types": [], "outputs": [ { "cache": true, - "display_name": "Retriever", - "method": "build_base_retriever", - "name": "base_retriever", - "selected": "Retriever", - "types": [ - "Retriever" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Search Results", - "method": "search_documents", - "name": "search_results", - "selected": "Data", + "display_name": "Embeddings", + "hidden": false, + "method": "build_embeddings", + "name": "embeddings", + "selected": "Embeddings", "types": [ - "Data" + "Embeddings" ], "value": "__UNDEFINED__" } @@ -2375,259 +2381,303 @@ "pinned": false, "template": { "_type": "Component", - "api_endpoint": { - "advanced": false, - "display_name": "API Endpoint", + "chunk_size": { + "advanced": true, + "display_name": "Chunk Size", "dynamic": false, - "info": "API endpoint URL for the Astra DB service.", - "input_types": [], + "info": "", + "list": false, + "name": "chunk_size", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1000 + }, + "client": { + "advanced": true, + "display_name": "Client", + "dynamic": false, + "info": "", + "input_types": [ + "Message" + ], + "list": false, "load_from_db": false, - "name": "api_endpoint", - "password": true, + "name": "client", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, "type": "str", "value": "" }, - "batch_size": { + "code": { "advanced": true, - "display_name": "Batch Size", + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" + }, + "default_headers": { + "advanced": true, + "display_name": "Default Headers", "dynamic": false, - "info": "Optional number of data to process in a single batch.", + "info": "Default headers to use for the API request.", "list": false, - "name": "batch_size", + "name": "default_headers", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" + "trace_as_input": true, + "type": "dict", + "value": {} }, - "bulk_delete_concurrency": { + "default_query": { "advanced": true, - "display_name": "Bulk Delete Concurrency", + "display_name": "Default Query", "dynamic": false, - "info": "Optional concurrency level for bulk delete operations.", + "info": "Default query parameters to use for the API request.", "list": false, - "name": "bulk_delete_concurrency", + "name": "default_query", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" + "trace_as_input": true, + "type": "dict", + "value": {} }, - "bulk_insert_batch_concurrency": { + "deployment": { "advanced": true, - "display_name": "Bulk Insert Batch Concurrency", + "display_name": "Deployment", "dynamic": false, - "info": "Optional concurrency level for bulk insert operations.", + "info": "", + "input_types": [ + "Message" + ], "list": false, - "name": "bulk_insert_batch_concurrency", + "load_from_db": false, + "name": "deployment", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "int", + "type": "str", "value": "" }, - "bulk_insert_overwrite_concurrency": { + "embedding_ctx_length": { "advanced": true, - "display_name": "Bulk Insert Overwrite Concurrency", + "display_name": "Embedding Context Length", "dynamic": false, - "info": "Optional concurrency level for bulk insert operations that overwrite existing data.", + "info": "", "list": false, - "name": "bulk_insert_overwrite_concurrency", + "name": "embedding_ctx_length", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, "type": "int", - "value": "" + "value": 1536 }, - "code": { + "max_retries": { "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", + "display_name": "Max Retries", + "dynamic": false, "info": "", "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, + "name": "max_retries", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, - "type": "code", - "value": "from loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent\nfrom langflow.io import (\n BoolInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n DataInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://python.langchain.com/docs/integrations/vectorstores/astradb\"\n icon: str = \"AstraDB\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.\",\n options=[\"Sync\", \"Async\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n advanced=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding or Astra Vectorize\",\n input_types=[\"Embeddings\", \"dict\"],\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info=\"Optional dictionary defining the indexing policy for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n options=[\"Similarity\", \"MMR\"],\n value=\"Similarity\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n ]\n\n def build_vector_store(self):\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {self.setup_mode}\")\n\n if not isinstance(self.embedding, dict):\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n dict_options = self.embedding.get(\"collection_vector_service_options\", {})\n dict_options[\"authentication\"] = {\n k: v for k, v in dict_options.get(\"authentication\", {}).items() if k and v\n }\n dict_options[\"parameters\"] = {k: v for k, v in dict_options.get(\"parameters\", {}).items() if k and v}\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(dict_options),\n \"collection_embedding_api_key\": self.embedding.get(\"collection_embedding_api_key\"),\n }\n vector_store_kwargs = {\n **embedding_dict,\n \"collection_name\": self.collection_name,\n \"token\": self.token,\n \"api_endpoint\": self.api_endpoint,\n \"namespace\": self.namespace or None,\n \"metric\": self.metric or None,\n \"batch_size\": self.batch_size or None,\n \"bulk_insert_batch_concurrency\": self.bulk_insert_batch_concurrency or None,\n \"bulk_insert_overwrite_concurrency\": self.bulk_insert_overwrite_concurrency or None,\n \"bulk_delete_concurrency\": self.bulk_delete_concurrency or None,\n \"setup_mode\": setup_mode_value,\n \"pre_delete_collection\": self.pre_delete_collection or False,\n }\n\n if self.metadata_indexing_include:\n vector_store_kwargs[\"metadata_indexing_include\"] = self.metadata_indexing_include\n elif self.metadata_indexing_exclude:\n vector_store_kwargs[\"metadata_indexing_exclude\"] = self.metadata_indexing_exclude\n elif self.collection_indexing_policy:\n vector_store_kwargs[\"collection_indexing_policy\"] = self.collection_indexing_policy\n\n try:\n vector_store = AstraDBVectorStore(**vector_store_kwargs)\n except Exception as e:\n raise ValueError(f\"Error initializing AstraDBVectorStore: {str(e)}\") from e\n\n self.status = self._astradb_collection_to_data(vector_store.collection)\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store):\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n raise ValueError(\"Vector Store Inputs must be Data objects.\")\n\n if documents and self.embedding is not None:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n raise ValueError(f\"Error adding documents to AstraDBVectorStore: {str(e)}\") from e\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n\n def search_documents(self):\n vector_store = self.build_vector_store()\n\n logger.debug(f\"Search input: {self.search_input}\")\n logger.debug(f\"Search type: {self.search_type}\")\n logger.debug(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n if self.search_type == \"Similarity\":\n docs = vector_store.similarity_search(\n query=self.search_input,\n k=self.number_of_results,\n )\n elif self.search_type == \"MMR\":\n docs = vector_store.max_marginal_relevance_search(\n query=self.search_input,\n k=self.number_of_results,\n )\n else:\n raise ValueError(f\"Invalid search type: {self.search_type}\")\n except Exception as e:\n raise ValueError(f\"Error performing search in AstraDBVectorStore: {str(e)}\") from e\n\n logger.debug(f\"Retrieved documents: {len(docs)}\")\n\n data = [Data.from_document(doc) for doc in docs]\n logger.debug(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n else:\n logger.debug(\"No search input provided. Skipping search.\")\n return []\n\n def _astradb_collection_to_data(self, collection):\n data = []\n data_dict = collection.find()\n if data_dict and \"data\" in data_dict:\n data_dict = data_dict[\"data\"].get(\"documents\", [])\n\n for item in data_dict:\n data.append(Data(content=item[\"content\"]))\n return data\n" + "trace_as_metadata": true, + "type": "int", + "value": 3 }, - "collection_indexing_policy": { - "advanced": true, - "display_name": "Collection Indexing Policy", + "model": { + "advanced": false, + "display_name": "Model", "dynamic": false, - "info": "Optional dictionary defining the indexing policy for the collection.", - "list": false, - "load_from_db": false, - "name": "collection_indexing_policy", + "info": "", + "name": "model", + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, "type": "str", - "value": "" + "value": "text-embedding-3-small" }, - "collection_name": { - "advanced": false, - "display_name": "Collection Name", + "model_kwargs": { + "advanced": true, + "display_name": "Model Kwargs", "dynamic": false, - "info": "The name of the collection within Astra DB where the vectors will be stored.", + "info": "", "list": false, - "load_from_db": false, - "name": "collection_name", + "name": "model_kwargs", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "langflow" + "trace_as_input": true, + "type": "dict", + "value": {} }, - "embedding": { - "advanced": false, - "display_name": "Embedding or Astra Vectorize", + "openai_api_base": { + "advanced": true, + "display_name": "OpenAI API Base", "dynamic": false, "info": "", - "input_types": [ - "Embeddings", - "dict" - ], - "list": false, - "name": "embedding", + "input_types": [], + "load_from_db": false, + "name": "openai_api_base", + "password": true, "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "other", + "type": "str", "value": "" }, - "ingest_data": { + "openai_api_key": { "advanced": false, - "display_name": "Ingest Data", + "display_name": "OpenAI API Key", "dynamic": false, "info": "", - "input_types": [ - "Data" - ], - "list": true, - "name": "ingest_data", + "input_types": [], + "load_from_db": true, + "name": "openai_api_key", + "password": true, "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "other", - "value": "" + "type": "str", + "value": "OPENAI_API_KEY" }, - "metadata_indexing_exclude": { + "openai_api_type": { "advanced": true, - "display_name": "Metadata Indexing Exclude", + "display_name": "OpenAI API Type", "dynamic": false, - "info": "Optional list of metadata fields to exclude from the indexing.", - "list": false, + "info": "", + "input_types": [], "load_from_db": false, - "name": "metadata_indexing_exclude", + "name": "openai_api_type", + "password": true, "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, "type": "str", "value": "" }, - "metadata_indexing_include": { + "openai_api_version": { "advanced": true, - "display_name": "Metadata Indexing Include", + "display_name": "OpenAI API Version", "dynamic": false, - "info": "Optional list of metadata fields to include in the indexing.", + "info": "", + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, - "name": "metadata_indexing_include", + "name": "openai_api_version", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "metric": { + "openai_organization": { "advanced": true, - "display_name": "Metric", + "display_name": "OpenAI Organization", "dynamic": false, - "info": "Optional distance metric for vector comparisons in the vector store.", - "name": "metric", - "options": [ - "cosine", - "dot_product", - "euclidean" + "info": "", + "input_types": [ + "Message" ], + "list": false, + "load_from_db": false, + "name": "openai_organization", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "namespace": { + "openai_proxy": { "advanced": true, - "display_name": "Namespace", + "display_name": "OpenAI Proxy", "dynamic": false, - "info": "Optional namespace within Astra DB to use for the collection.", + "info": "", + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, - "name": "namespace", + "name": "openai_proxy", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "number_of_results": { + "request_timeout": { "advanced": true, - "display_name": "Number of Results", + "display_name": "Request Timeout", "dynamic": false, - "info": "Number of results to return.", + "info": "", "list": false, - "name": "number_of_results", + "name": "request_timeout", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "int", - "value": 4 + "type": "float", + "value": "" }, - "pre_delete_collection": { + "show_progress_bar": { "advanced": true, - "display_name": "Pre Delete Collection", + "display_name": "Show Progress Bar", "dynamic": false, - "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "info": "", "list": false, - "name": "pre_delete_collection", + "name": "show_progress_bar", "placeholder": "", "required": false, "show": true, @@ -2636,94 +2686,70 @@ "type": "bool", "value": false }, - "search_input": { - "advanced": false, - "display_name": "Search Input", + "skip_empty": { + "advanced": true, + "display_name": "Skip Empty", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], "list": false, - "load_from_db": false, - "multiline": true, - "name": "search_input", + "name": "skip_empty", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, - "type": "str", - "value": "" + "type": "bool", + "value": false }, - "search_type": { + "tiktoken_enable": { "advanced": true, - "display_name": "Search Type", + "display_name": "TikToken Enable", "dynamic": false, - "info": "", - "name": "search_type", - "options": [ - "Similarity", - "MMR" - ], + "info": "If False, you must have transformers installed.", + "list": false, + "name": "tiktoken_enable", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", - "value": "Similarity" + "type": "bool", + "value": true }, - "setup_mode": { + "tiktoken_model_name": { "advanced": true, - "display_name": "Setup Mode", + "display_name": "TikToken Model Name", "dynamic": false, - "info": "Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.", - "name": "setup_mode", - "options": [ - "Sync", - "Async", - "Off" + "info": "", + "input_types": [ + "Message" ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "Sync" - }, - "token": { - "advanced": false, - "display_name": "Astra DB Application Token", - "dynamic": false, - "info": "Authentication token for accessing Astra DB.", - "input_types": [], + "list": false, "load_from_db": false, - "name": "token", - "password": true, + "name": "tiktoken_model_name", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, "type": "str", "value": "" } } }, - "type": "AstraDB" + "type": "OpenAIEmbeddings" }, "dragging": false, - "height": 753, - "id": "AstraDB-vyd5U", + "height": 394, + "id": "OpenAIEmbeddings-fpOKp", "position": { - "x": 2676.4816074350247, - "y": 1269.304067004569 + "x": 2044.683126356786, + "y": 1785.2283494456522 }, "positionAbsolute": { - "x": 2676.4816074350247, - "y": 1269.304067004569 + "x": 2044.683126356786, + "y": 1785.2283494456522 }, "selected": false, "type": "genericNode", @@ -2731,7 +2757,7 @@ }, { "data": { - "id": "OpenAIEmbeddings-sRZMc", + "id": "OpenAIEmbeddings-lCQlU", "node": { "base_classes": [ "Embeddings" @@ -2836,7 +2862,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" + "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" }, "default_headers": { "advanced": true, @@ -2958,7 +2984,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": true, + "load_from_db": false, "name": "openai_api_base", "password": true, "placeholder": "", @@ -2974,7 +3000,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -2982,7 +3008,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "openai_api_type": { "advanced": true, @@ -2990,7 +3016,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": true, + "load_from_db": false, "name": "openai_api_type", "password": true, "placeholder": "", @@ -3146,14 +3172,14 @@ }, "dragging": false, "height": 394, - "id": "OpenAIEmbeddings-sRZMc", + "id": "OpenAIEmbeddings-lCQlU", "position": { - "x": 2050.0569098721217, - "y": 1823.5240486490072 + "x": 628.9252513328779, + "y": 648.6750537749285 }, "positionAbsolute": { - "x": 2050.0569098721217, - "y": 1823.5240486490072 + "x": 628.9252513328779, + "y": 648.6750537749285 }, "selected": false, "type": "genericNode", @@ -3161,15 +3187,15 @@ } ], "viewport": { - "x": -108.04801490857153, - "y": -44.38043074355511, - "zoom": 0.32281188532359256 + "x": -110.08684771034166, + "y": -46.27017080984389, + "zoom": 0.3228119071747796 } }, "description": "Visit https://docs.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", "endpoint_name": null, - "id": "7804e4a4-8e16-45e0-88ab-ed6248daa0eb", + "id": "f1a53ec2-49e2-4029-b8a8-1a73079f9653", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Vector Store RAG" } \ No newline at end of file diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index cb65a71c12c..10e5aa3e4a8 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -174,9 +174,10 @@ export default function ParameterComponent({ const handleOnNewValue = async ( newValue: string | string[] | boolean | Object[], + dbValue?: boolean, skipSnapshot: boolean | undefined = false, ): Promise => { - handleOnNewValueHook(newValue, skipSnapshot); + handleOnNewValueHook(newValue, dbValue, skipSnapshot); }; const handleNodeClass = (newNodeClass: APIClassType, code?: string): void => { @@ -470,16 +471,6 @@ export default function ParameterComponent({ { - setNode(data.id, (oldNode) => { - let newNode = cloneDeep(oldNode); - newNode.data = { - ...newNode.data, - }; - newNode.data.node.template[name].load_from_db = value; - return newNode; - }); - }} name={name} data={data.node?.template[name]!} /> diff --git a/src/frontend/src/CustomNodes/hooks/use-handle-new-value.tsx b/src/frontend/src/CustomNodes/hooks/use-handle-new-value.tsx index a32c4d2dc56..25ef5482500 100644 --- a/src/frontend/src/CustomNodes/hooks/use-handle-new-value.tsx +++ b/src/frontend/src/CustomNodes/hooks/use-handle-new-value.tsx @@ -18,7 +18,7 @@ const useHandleOnNewValue = ( ) => { const setErrorData = useAlertStore((state) => state.setErrorData); - const handleOnNewValue = async (newValue, skipSnapshot = false) => { + const handleOnNewValue = async (newValue, dbValue, skipSnapshot = false) => { const nodeTemplate = data.node!.template[name]; const currentValue = nodeTemplate.value; @@ -63,6 +63,10 @@ const useHandleOnNewValue = ( ...newNode.data, }; + if (dbValue) { + newNode.data.node.template[name].load_from_db = dbValue; + } + if (data.node?.template[name].real_time_refresh && newTemplate) { newNode.data.node.template = newTemplate; } else { diff --git a/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx b/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx index 415cd92d78c..4dcda2deb9d 100644 --- a/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx +++ b/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx @@ -29,7 +29,9 @@ export default function AddNewVariableButton({ const setErrorData = useAlertStore((state) => state.setErrorData); const componentFields = useTypesStore((state) => state.ComponentFields); const unavaliableFields = new Set( - Object.keys(useGlobalVariablesStore((state) => state.unavaliableFields)), + Object.keys( + useGlobalVariablesStore((state) => state.unavaliableFields) ?? {}, + ), ); const availableFields = () => { diff --git a/src/frontend/src/components/codeAreaComponent/index.tsx b/src/frontend/src/components/codeAreaComponent/index.tsx index 00721137fa3..ad8cfaf6f4a 100644 --- a/src/frontend/src/components/codeAreaComponent/index.tsx +++ b/src/frontend/src/components/codeAreaComponent/index.tsx @@ -23,7 +23,7 @@ export default function CodeAreaComponent({ useEffect(() => { if (disabled && myValue !== "") { setMyValue(""); - onChange("", true); + onChange("", undefined, true); } }, [disabled]); diff --git a/src/frontend/src/components/floatComponent/index.tsx b/src/frontend/src/components/floatComponent/index.tsx index 8db5b823a58..07de1fbb253 100644 --- a/src/frontend/src/components/floatComponent/index.tsx +++ b/src/frontend/src/components/floatComponent/index.tsx @@ -16,7 +16,7 @@ export default function FloatComponent({ // Clear component state useEffect(() => { if (disabled && value !== "") { - onChange("", true); + onChange("", undefined, true); } }, [disabled]); diff --git a/src/frontend/src/components/inputFileComponent/index.tsx b/src/frontend/src/components/inputFileComponent/index.tsx index 9710f7f6131..f8dd9725f2b 100644 --- a/src/frontend/src/components/inputFileComponent/index.tsx +++ b/src/frontend/src/components/inputFileComponent/index.tsx @@ -27,7 +27,7 @@ export default function InputFileComponent({ useEffect(() => { if (disabled && value !== "") { setMyValue(""); - onChange("", true); + onChange("", undefined, true); onFileChange(""); } }, [disabled, onChange]); diff --git a/src/frontend/src/components/inputGlobalComponent/index.tsx b/src/frontend/src/components/inputGlobalComponent/index.tsx index 4d20e813238..1d3cf4f6089 100644 --- a/src/frontend/src/components/inputGlobalComponent/index.tsx +++ b/src/frontend/src/components/inputGlobalComponent/index.tsx @@ -1,4 +1,5 @@ import { useEffect } from "react"; +import { Controller } from "react-hook-form"; import { deleteGlobalVariable } from "../../controllers/API"; import DeleteConfirmationModal from "../../modals/deleteConfirmationModal"; import useAlertStore from "../../stores/alertStore"; @@ -13,7 +14,6 @@ import { CommandItem } from "../ui/command"; export default function InputGlobalComponent({ disabled, onChange, - setDb, name, data, editNode = false, @@ -23,39 +23,17 @@ export default function InputGlobalComponent({ ); const getVariableId = useGlobalVariablesStore((state) => state.getVariableId); - const unavaliableFields = useGlobalVariablesStore( - (state) => state.unavaliableFields, - ); const removeGlobalVariable = useGlobalVariablesStore( (state) => state.removeGlobalVariable, ); const setErrorData = useAlertStore((state) => state.setErrorData); useEffect(() => { - if (data) - if ( - ((globalVariablesEntries && - !globalVariablesEntries.includes(data.value)) || - !globalVariablesEntries) && - data.load_from_db - ) { - setTimeout(() => { - onChange("", true); - setDb(false); - }, 100); + if (data && globalVariablesEntries) + if (data.load_from_db && !globalVariablesEntries.includes(data.value)) { + onChange("", false, true); } - }, [globalVariablesEntries, data]); - - useEffect(() => { - if (!data.value && data.display_name) { - if (unavaliableFields[data.display_name!] && !disabled) { - setTimeout(() => { - setDb(true); - onChange(unavaliableFields[data.display_name!]); - }, 100); - } - } - }, [unavaliableFields]); + }, [globalVariablesEntries]); async function handleDelete(key: string) { const id = getVariableId(key); @@ -64,8 +42,7 @@ export default function InputGlobalComponent({ .then(() => { removeGlobalVariable(key); if (data?.value === key && data?.load_from_db) { - onChange(""); - setDb(false); + onChange("", false); } }) .catch(() => { @@ -137,12 +114,10 @@ export default function InputGlobalComponent({ : "" } setSelectedOption={(value) => { - onChange(value); - setDb(value !== "" ? true : false); + onChange(value, value !== "" ? true : false); }} onChange={(value, skipSnapshot) => { - onChange(value, skipSnapshot); - setDb(false); + onChange(value, false, skipSnapshot); }} /> ); diff --git a/src/frontend/src/components/intComponent/index.tsx b/src/frontend/src/components/intComponent/index.tsx index f0d67a4021a..7621d67322c 100644 --- a/src/frontend/src/components/intComponent/index.tsx +++ b/src/frontend/src/components/intComponent/index.tsx @@ -19,7 +19,7 @@ export default function IntComponent({ // Clear component state useEffect(() => { if (disabled && value !== "") { - onChange("", true); + onChange("", undefined, true); } }, [disabled, onChange]); diff --git a/src/frontend/src/components/promptComponent/index.tsx b/src/frontend/src/components/promptComponent/index.tsx index 0dcec3dbf7f..c2e721baf14 100644 --- a/src/frontend/src/components/promptComponent/index.tsx +++ b/src/frontend/src/components/promptComponent/index.tsx @@ -19,7 +19,7 @@ export default function PromptAreaComponent({ }: PromptAreaComponentType): JSX.Element { useEffect(() => { if (disabled && value !== "") { - onChange("", true); + onChange("", undefined, true); } }, [disabled]); diff --git a/src/frontend/src/components/tableComponent/components/tableNodeCellRender/index.tsx b/src/frontend/src/components/tableComponent/components/tableNodeCellRender/index.tsx index d0c3d4e5336..1495ff98f47 100644 --- a/src/frontend/src/components/tableComponent/components/tableNodeCellRender/index.tsx +++ b/src/frontend/src/components/tableComponent/components/tableNodeCellRender/index.tsx @@ -24,32 +24,21 @@ import ToggleShadComponent from "../../../toggleShadComponent"; export default function TableNodeCellRender({ node: { data }, - value: { - value, - nodeClass, - handleOnNewValue: handleOnNewValueNode, - handleOnChangeDb: handleOnChangeDbNode, - }, + value: { value, nodeClass, handleOnNewValue: handleOnNewValueNode }, }: CustomCellRendererProps) { - const handleOnNewValue = (newValue: any, name: string) => { - handleOnNewValueNode(newValue, name); + const handleOnNewValue = (newValue: any, name: string, dbValue?: boolean) => { + handleOnNewValueNode(newValue, name, dbValue); setTemplateData((old) => { let newData = cloneDeep(old); newData.value = newValue; + if (dbValue) { + newData.load_from_db = newValue; + } return newData; }); setTemplateValue(newValue); }; - const handleOnChangeDb = (newValue: boolean, name: string) => { - handleOnChangeDbNode(newValue, name); - setTemplateData((old) => { - let newData = cloneDeep(old); - newData.load_from_db = newValue; - return newData; - }); - }; - const [templateValue, setTemplateValue] = useState(value); const [templateData, setTemplateData] = useState(data); @@ -106,10 +95,9 @@ export default function TableNodeCellRender({ handleOnNewValue(value, templateData.key)} - setDb={(value) => { - handleOnChangeDb(value, templateData.key); - }} + onChange={(value, dbValue, snapshot) => + handleOnNewValue(value, templateData.key, dbValue) + } name={templateData.key} data={templateData} /> diff --git a/src/frontend/src/components/textAreaComponent/index.tsx b/src/frontend/src/components/textAreaComponent/index.tsx index 18a7f5b4000..253e3eff9a8 100644 --- a/src/frontend/src/components/textAreaComponent/index.tsx +++ b/src/frontend/src/components/textAreaComponent/index.tsx @@ -18,7 +18,7 @@ export default function TextAreaComponent({ // Clear text area useEffect(() => { if (disabled && value !== "") { - onChange("", true); + onChange("", undefined, true); } }, [disabled]); diff --git a/src/frontend/src/modals/editNodeModal/hooks/use-column-defs.tsx b/src/frontend/src/modals/editNodeModal/hooks/use-column-defs.tsx index 71f8566dea1..cad877aa634 100644 --- a/src/frontend/src/modals/editNodeModal/hooks/use-column-defs.tsx +++ b/src/frontend/src/modals/editNodeModal/hooks/use-column-defs.tsx @@ -6,8 +6,7 @@ import { NodeDataType } from "../../../types/flow"; const useColumnDefs = ( myData: NodeDataType, - handleOnNewValue: (newValue: any, name: string) => void, - handleOnChangeDb: (value: boolean, key: string) => void, + handleOnNewValue: (newValue: any, name: string, setDb?: boolean) => void, changeAdvanced: (n: string) => void, open: boolean, ) => { @@ -49,7 +48,6 @@ const useColumnDefs = ( value: params.data.value, nodeClass: myData.node, handleOnNewValue: handleOnNewValue, - handleOnChangeDb: handleOnChangeDb, }; }, minWidth: 340, diff --git a/src/frontend/src/modals/editNodeModal/index.tsx b/src/frontend/src/modals/editNodeModal/index.tsx index 991af0d7390..e267b6bac11 100644 --- a/src/frontend/src/modals/editNodeModal/index.tsx +++ b/src/frontend/src/modals/editNodeModal/index.tsx @@ -36,12 +36,11 @@ const EditNodeModal = forwardRef( !myData.current.node!.template[n]?.advanced; } - const handleOnNewValue = (newValue: any, key: string) => { + const handleOnNewValue = (newValue: any, key: string, setDb?: boolean) => { myData.current.node!.template[key].value = newValue; - }; - - const handleOnChangeDb = (newValue: boolean, key: string) => { - myData.current.node!.template[key].load_from_db = newValue; + if (setDb) { + myData.current.node!.template[key].load_from_db = newValue; + } }; const rowData = useRowData(data, open); @@ -49,7 +48,6 @@ const EditNodeModal = forwardRef( const columnDefs: ColDef[] = useColumnDefs( data, handleOnNewValue, - handleOnChangeDb, changeAdvanced, open, ); diff --git a/src/frontend/src/modals/newFlowModal/index.tsx b/src/frontend/src/modals/newFlowModal/index.tsx index 37a6afeeb91..295ecd3957d 100644 --- a/src/frontend/src/modals/newFlowModal/index.tsx +++ b/src/frontend/src/modals/newFlowModal/index.tsx @@ -10,12 +10,6 @@ export default function NewFlowModal({ }: newFlowModalPropsType): JSX.Element { const examples = useFlowsManagerStore((state) => state.examples); - examples?.forEach((example) => { - if (example.name === "Blog Writter") { - example.name = "Blog Writer"; - } - }); - return ( diff --git a/src/frontend/src/stores/flowStore.ts b/src/frontend/src/stores/flowStore.ts index df12c944c31..0b67dd88fc5 100644 --- a/src/frontend/src/stores/flowStore.ts +++ b/src/frontend/src/stores/flowStore.ts @@ -40,6 +40,7 @@ import { getInputsAndOutputs } from "../utils/storeUtils"; import useAlertStore from "./alertStore"; import { useDarkStore } from "./darkStore"; import useFlowsManagerStore from "./flowsManagerStore"; +import { useGlobalVariablesStore } from "./globalVariablesStore/globalVariables"; // this is our useStore hook that we can use in our components to get parts of the store and call actions const useFlowStore = create((set, get) => ({ @@ -288,7 +289,12 @@ const useFlowStore = create((set, get) => ({ id: newId, }, }; - updateGroupRecursion(newNode, selection.edges); + updateGroupRecursion( + newNode, + selection.edges, + useGlobalVariablesStore.getState().unavaliableFields, + useGlobalVariablesStore.getState().globalVariablesEntries, + ); // Add the new node to the list of nodes in state newNodes = newNodes diff --git a/src/frontend/src/stores/flowsManagerStore.ts b/src/frontend/src/stores/flowsManagerStore.ts index a804710ac00..a99916107a3 100644 --- a/src/frontend/src/stores/flowsManagerStore.ts +++ b/src/frontend/src/stores/flowsManagerStore.ts @@ -1,3 +1,4 @@ +import { AxiosError } from "axios"; import { cloneDeep } from "lodash"; import pDebounce from "p-debounce"; import { Edge, Node, Viewport, XYPosition } from "reactflow"; @@ -23,11 +24,13 @@ import { extractFieldsFromComponenents, processDataFromFlow, processFlows, + updateGroupRecursion, } from "../utils/reactflowUtils"; import useAlertStore from "./alertStore"; import { useDarkStore } from "./darkStore"; import useFlowStore from "./flowStore"; import { useFolderStore } from "./foldersStore"; +import { useGlobalVariablesStore } from "./globalVariablesStore/globalVariables"; import { useTypesStore } from "./typesStore"; let saveTimeoutId: NodeJS.Timeout | null = null; @@ -202,6 +205,14 @@ const useFlowsManagerStore = create((set, get) => ({ let flowData = flow ? processDataFromFlow(flow) : { nodes: [], edges: [], viewport: { zoom: 1, x: 0, y: 0 } }; + flowData?.nodes.forEach((node) => { + updateGroupRecursion( + node, + flowData?.edges, + useGlobalVariablesStore.getState().unavaliableFields, + useGlobalVariablesStore.getState().globalVariablesEntries, + ); + }); if (newProject) { // Create a new flow with a default name if no flow is provided. const folder_id = useFolderStore.getState().folderUrl; diff --git a/src/frontend/src/stores/globalVariablesStore/globalVariables.ts b/src/frontend/src/stores/globalVariablesStore/globalVariables.ts index 708f7ec09fa..4f80559f5d2 100644 --- a/src/frontend/src/stores/globalVariablesStore/globalVariables.ts +++ b/src/frontend/src/stores/globalVariablesStore/globalVariables.ts @@ -4,12 +4,12 @@ import getUnavailableFields from "./utils/get-unavailable-fields"; export const useGlobalVariablesStore = create( (set, get) => ({ - unavaliableFields: {}, + unavaliableFields: undefined, setUnavaliableFields: (fields) => { set({ unavaliableFields: fields }); }, removeUnavaliableField: (field) => { - const newFields = get().unavaliableFields; + const newFields = get().unavaliableFields || {}; delete newFields[field]; set({ unavaliableFields: newFields }); }, @@ -18,7 +18,7 @@ export const useGlobalVariablesStore = create( setGlobalVariables: (variables) => { set({ globalVariables: variables, - globalVariablesEntries: Object.keys(variables), + globalVariablesEntries: Object.keys(variables) || [], unavaliableFields: getUnavailableFields(variables), }); }, @@ -27,7 +27,7 @@ export const useGlobalVariablesStore = create( const newVariables = { ...get().globalVariables, [name]: data }; set({ globalVariables: newVariables, - globalVariablesEntries: Object.keys(newVariables), + globalVariablesEntries: Object.keys(newVariables) || [], unavaliableFields: getUnavailableFields(newVariables), }); }, @@ -38,7 +38,7 @@ export const useGlobalVariablesStore = create( delete newVariables[name]; set({ globalVariables: newVariables, - globalVariablesEntries: Object.keys(newVariables), + globalVariablesEntries: Object.keys(newVariables) || [], unavaliableFields: getUnavailableFields(newVariables), }); }, diff --git a/src/frontend/src/types/components/index.ts b/src/frontend/src/types/components/index.ts index 09324920f9c..7c5c60579f0 100644 --- a/src/frontend/src/types/components/index.ts +++ b/src/frontend/src/types/components/index.ts @@ -91,8 +91,7 @@ export type InputListComponentType = { export type InputGlobalComponentType = { disabled: boolean; - onChange: (value: string, snapshot?: boolean) => void; - setDb: (value: boolean) => void; + onChange: (value: string, dbValue: boolean, snapshot?: boolean) => void; name: string; data: InputFieldType; editNode?: boolean; @@ -124,7 +123,11 @@ export type TextAreaComponentType = { nodeClass?: APIClassType; setNodeClass?: (value: APIClassType) => void; disabled: boolean; - onChange: (value: string[] | string, skipSnapshot?: boolean) => void; + onChange: ( + value: string[] | string, + dbValue?: boolean, + skipSnapshot?: boolean, + ) => void; value: string; editNode?: boolean; id?: string; @@ -146,7 +149,11 @@ export type PromptAreaComponentType = { nodeClass?: APIClassType; setNodeClass?: (value: APIClassType, code?: string) => void; disabled: boolean; - onChange: (value: string[] | string, skipSnapshot?: boolean) => void; + onChange: ( + value: string[] | string, + dbValue?: boolean, + skipSnapshot?: boolean, + ) => void; value: string; readonly?: boolean; editNode?: boolean; @@ -156,7 +163,11 @@ export type PromptAreaComponentType = { export type CodeAreaComponentType = { setOpenModal?: (bool: boolean) => void; disabled: boolean; - onChange: (value: string[] | string, skipSnapshot?: boolean) => void; + onChange: ( + value: string[] | string, + dbValue?: boolean, + skipSnapshot?: boolean, + ) => void; value: string; editNode?: boolean; nodeClass?: APIClassType; @@ -171,7 +182,11 @@ export type CodeAreaComponentType = { export type FileComponentType = { IOInputProps?; disabled: boolean; - onChange: (value: string[] | string, skipSnapshot?: boolean) => void; + onChange: ( + value: string[] | string, + dbValue?: boolean, + skipSnapshot?: boolean, + ) => void; value: string; fileTypes: Array; onFileChange: (value: string) => void; @@ -204,7 +219,7 @@ export type IntComponentType = { value: string; disabled?: boolean; rangeSpec: RangeSpecType; - onChange: (value: string, skipSnapshot?: boolean) => void; + onChange: (value: string, dbValue?: boolean, skipSnapshot?: boolean) => void; editNode?: boolean; id?: string; }; @@ -212,7 +227,7 @@ export type IntComponentType = { export type FloatComponentType = { value: string; disabled?: boolean; - onChange: (value: string, skipSnapshot?: boolean) => void; + onChange: (value: string, dbValue?: boolean, skipSnapshot?: boolean) => void; rangeSpec: RangeSpecType; editNode?: boolean; id?: string; diff --git a/src/frontend/src/types/zustand/globalVariables/index.ts b/src/frontend/src/types/zustand/globalVariables/index.ts index 4b178088c84..e4749ee8fb8 100644 --- a/src/frontend/src/types/zustand/globalVariables/index.ts +++ b/src/frontend/src/types/zustand/globalVariables/index.ts @@ -25,7 +25,7 @@ export type GlobalVariablesStore = { ) => void; removeGlobalVariable: (name: string) => Promise; getVariableId: (name: string) => string | undefined; - unavaliableFields: { [name: string]: string }; + unavaliableFields: { [name: string]: string } | undefined; setUnavaliableFields: (fields: { [name: string]: string }) => void; removeUnavaliableField: (field: string) => void; }; diff --git a/src/frontend/src/utils/reactflowUtils.ts b/src/frontend/src/utils/reactflowUtils.ts index d16a7e851c4..e4ac017a8a3 100644 --- a/src/frontend/src/utils/reactflowUtils.ts +++ b/src/frontend/src/utils/reactflowUtils.ts @@ -1487,11 +1487,30 @@ export function isOutputType(type: string): boolean { return OUTPUT_TYPES.has(type); } -export function updateGroupRecursion(groupNode: NodeType, edges: Edge[]) { +export function updateGroupRecursion( + groupNode: NodeType, + edges: Edge[], + unavailableFields: + | { + [name: string]: string; + } + | undefined, + globalVariablesEntries: string[] | undefined, +) { + updateGlobalVariables( + groupNode.data.node, + unavailableFields, + globalVariablesEntries, + ); if (groupNode.data.node?.flow) { groupNode.data.node.flow.data!.nodes.forEach((node) => { if (node.data.node?.flow) { - updateGroupRecursion(node, node.data.node.flow.data!.edges); + updateGroupRecursion( + node, + node.data.node.flow.data!.edges, + unavailableFields, + globalVariablesEntries, + ); } }); let newFlow = groupNode.data.node!.flow; @@ -1503,6 +1522,41 @@ export function updateGroupRecursion(groupNode: NodeType, edges: Edge[]) { } } +export function updateGlobalVariables( + node: APIClassType | undefined, + unavailableFields: + | { + [name: string]: string; + } + | undefined, + globalVariablesEntries: string[] | undefined, +) { + if (node && node.template) { + Object.keys(node.template).forEach((field) => { + if ( + globalVariablesEntries && + node!.template[field].load_from_db && + !globalVariablesEntries.includes(node!.template[field].value) + ) { + node!.template[field].value = ""; + node!.template[field].load_from_db = false; + } + if ( + !node!.template[field].load_from_db && + node!.template[field].value === "" && + unavailableFields && + Object.keys(unavailableFields).includes( + node!.template[field].display_name ?? "", + ) + ) { + node!.template[field].value = + unavailableFields[node!.template[field].display_name ?? ""]; + node!.template[field].load_from_db = true; + } + }); + } +} + export function getGroupOutputNodeId( flow: FlowType, p_name: string,