Skip to content

feat: add csv output parser #3483

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Sep 27, 2024
7 changes: 7 additions & 0 deletions src/backend/base/langflow/base/models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from langchain_core.language_models.llms import LLM
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage
from langchain_core.output_parsers import BaseOutputParser

from langflow.base.constants import STREAM_INFO_TEXT
from langflow.custom import Component
Expand All @@ -20,6 +21,9 @@ class LCModelComponent(Component):
description: str = "Model Description"
trace_type = "llm"

# Optional output parser to pass to the runnable. Subclasses may allow the user to input an `output_parser`
output_parser: BaseOutputParser | None = None

_base_inputs: List[InputTypes] = [
MessageInput(name="input_value", display_name="Input"),
MessageTextInput(
Expand Down Expand Up @@ -163,6 +167,9 @@ def get_chat_result(
messages.append(SystemMessage(content=system_message))
inputs: Union[list, dict] = messages or {}
try:
if self.output_parser is not None:
runnable = runnable | self.output_parser

runnable = runnable.with_config( # type: ignore
{
"run_name": self.display_name,
Expand Down
16 changes: 0 additions & 16 deletions src/backend/base/langflow/base/prompts/api_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,20 +221,4 @@ def process_prompt_template(
# Update the input variables field in the template
update_input_variables_field(input_variables, frontend_node_template)

# Optional: cleanup fields based on specific conditions
cleanup_prompt_template_fields(input_variables, frontend_node_template)

return input_variables


def cleanup_prompt_template_fields(input_variables, template):
"""Removes unused fields if the conditions are met in the template."""
prompt_fields = [
key for key, field in template.items() if isinstance(field, dict) and field.get("type") == "prompt"
]

if len(prompt_fields) == 1:
for key in list(template.keys()): # Use list to copy keys
field = template.get(key, {})
if isinstance(field, dict) and field.get("type") != "code" and key not in input_variables + prompt_fields:
del template[key]
1 change: 1 addition & 0 deletions src/backend/base/langflow/components/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
"link_extractors",
"memories",
"outputs",
"output_parsers",
"retrievers",
"textsplitters",
"toolkits",
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/AIMLModel.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from langflow.field_typing.range_spec import RangeSpec
from langchain_openai import ChatOpenAI
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.aiml_constants import AIML_CHAT_MODELS
Expand Down Expand Up @@ -59,6 +60,13 @@ class AIMLModelComponent(LCModelComponent):
advanced=True,
value=1,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import MessageTextInput, SecretStrInput
from langflow.inputs.inputs import HandleInput
from langflow.io import DictInput, DropdownInput


Expand Down Expand Up @@ -55,6 +56,13 @@ class AmazonBedrockComponent(LCModelComponent):
MessageTextInput(name="region_name", display_name="Region Name", value="us-east-1"),
DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True, is_list=True),
MessageTextInput(name="endpoint_url", display_name="Endpoint URL", advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/AnthropicModel.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -49,6 +50,13 @@ class AnthropicModelComponent(LCModelComponent):
info="Prefill text to guide the model's response.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import MessageTextInput
from langflow.inputs.inputs import HandleInput
from langflow.io import DropdownInput, FloatInput, IntInput, SecretStrInput


Expand Down Expand Up @@ -47,6 +48,13 @@ class AzureChatOpenAIComponent(LCModelComponent):
advanced=True,
info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -67,6 +68,13 @@ class QianfanChatEndpointComponent(LCModelComponent):
display_name="Endpoint",
info="Endpoint of the Qianfan LLM, required if custom model used.",
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/CohereModel.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from langchain_cohere import ChatCohere
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand All @@ -22,6 +23,13 @@ class CohereComponent(LCModelComponent):
value="COHERE_API_KEY",
),
FloatInput(name="temperature", display_name="Temperature", value=0.75),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -48,6 +49,13 @@ class GoogleGenerativeAIComponent(LCModelComponent):
info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/GroqModel.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from langflow.inputs.inputs import HandleInput
import requests
from typing import List
from langchain_groq import ChatGroq
Expand Down Expand Up @@ -52,6 +53,13 @@ class GroqModel(LCModelComponent):
options=[],
refresh_button=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def get_models(self) -> List[str]:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from langflow.inputs.inputs import HandleInput
from tenacity import retry, stop_after_attempt, wait_fixed
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint

Expand Down Expand Up @@ -27,6 +28,13 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
SecretStrInput(name="huggingfacehub_api_token", display_name="API Token", password=True),
DictInput(name="model_kwargs", display_name="Model Keyword Arguments", advanced=True),
IntInput(name="retry_attempts", display_name="Retry Attempts", value=1, advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def create_huggingface_endpoint(
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/Maritalk.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from langflow.field_typing import LanguageModel
from langflow.field_typing.range_spec import RangeSpec
from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput
from langflow.inputs.inputs import HandleInput


class MaritalkModelComponent(LCModelComponent):
Expand Down Expand Up @@ -33,6 +34,13 @@ class MaritalkModelComponent(LCModelComponent):
advanced=False,
),
FloatInput(name="temperature", display_name="Temperature", value=0.1, range_spec=RangeSpec(min=0, max=1)),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/MistralModel.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from langchain_mistralai import ChatMistralAI
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -55,6 +56,13 @@ class MistralAIModelComponent(LCModelComponent):
FloatInput(name="top_p", display_name="Top P", advanced=True, value=1),
IntInput(name="random_seed", display_name="Random Seed", value=1, advanced=True),
BoolInput(name="safe_mode", display_name="Safe Mode", advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/NvidiaModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput
from langflow.inputs.inputs import HandleInput
from langflow.schema.dotdict import dotdict


Expand Down Expand Up @@ -47,6 +48,13 @@ class NVIDIAModelComponent(LCModelComponent):
advanced=True,
value=1,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/OllamaModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs.inputs import HandleInput
from langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, StrInput


Expand Down Expand Up @@ -204,6 +205,13 @@ def get_model(self, base_url_value: str) -> list[str]:
info="Template to use for generating text.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/OpenAIModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

from langflow.field_typing.range_spec import RangeSpec
from langchain_openai import ChatOpenAI
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -75,6 +76,13 @@ class OpenAIModelComponent(LCModelComponent):
advanced=True,
value=1,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from langchain_community.chat_models import ChatPerplexity
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -59,6 +60,13 @@ class PerplexityComponent(LCModelComponent):
info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/VertexAiModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import MessageTextInput
from langflow.inputs.inputs import HandleInput
from langflow.io import BoolInput, FileInput, FloatInput, IntInput, StrInput


Expand All @@ -28,6 +29,13 @@ class ChatVertexAIComponent(LCModelComponent):
IntInput(name="top_k", display_name="Top K", advanced=True),
FloatInput(name="top_p", display_name="Top P", value=0.95, advanced=True),
BoolInput(name="verbose", display_name="Verbose", value=False, advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel:
Expand Down
Loading
Loading