Skip to content

Commit

Permalink
feat: add csv output parser (#3483)
Browse files Browse the repository at this point in the history
* Adds the CSV Output Parser

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <[email protected]>
  • Loading branch information
3 people authored Sep 27, 2024
1 parent 948b150 commit 8e50f2f
Show file tree
Hide file tree
Showing 36 changed files with 544 additions and 93 deletions.
7 changes: 7 additions & 0 deletions src/backend/base/langflow/base/models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from langchain_core.language_models.llms import LLM
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage
from langchain_core.output_parsers import BaseOutputParser

from langflow.base.constants import STREAM_INFO_TEXT
from langflow.custom import Component
Expand All @@ -19,6 +20,9 @@ class LCModelComponent(Component):
description: str = "Model Description"
trace_type = "llm"

# Optional output parser to pass to the runnable. Subclasses may allow the user to input an `output_parser`
output_parser: BaseOutputParser | None = None

_base_inputs: list[InputTypes] = [
MessageInput(name="input_value", display_name="Input"),
MessageTextInput(
Expand Down Expand Up @@ -162,6 +166,9 @@ def get_chat_result(
messages.append(SystemMessage(content=system_message))
inputs: list | dict = messages or {}
try:
if self.output_parser is not None:
runnable = runnable | self.output_parser

runnable = runnable.with_config( # type: ignore
{
"run_name": self.display_name,
Expand Down
16 changes: 0 additions & 16 deletions src/backend/base/langflow/base/prompts/api_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,20 +220,4 @@ def process_prompt_template(
# Update the input variables field in the template
update_input_variables_field(input_variables, frontend_node_template)

# Optional: cleanup fields based on specific conditions
cleanup_prompt_template_fields(input_variables, frontend_node_template)

return input_variables


def cleanup_prompt_template_fields(input_variables, template):
"""Removes unused fields if the conditions are met in the template."""
prompt_fields = [
key for key, field in template.items() if isinstance(field, dict) and field.get("type") == "prompt"
]

if len(prompt_fields) == 1:
for key in list(template.keys()): # Use list to copy keys
field = template.get(key, {})
if isinstance(field, dict) and field.get("type") != "code" and key not in input_variables + prompt_fields:
del template[key]
1 change: 1 addition & 0 deletions src/backend/base/langflow/components/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
"link_extractors",
"memories",
"outputs",
"output_parsers",
"retrievers",
"textsplitters",
"toolkits",
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/AIMLModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
SecretStrInput,
StrInput,
)
from langflow.inputs.inputs import HandleInput


class AIMLModelComponent(LCModelComponent):
Expand Down Expand Up @@ -59,6 +60,13 @@ class AIMLModelComponent(LCModelComponent):
advanced=True,
value=1,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import MessageTextInput, SecretStrInput
from langflow.inputs.inputs import HandleInput
from langflow.io import DictInput, DropdownInput


Expand Down Expand Up @@ -55,6 +56,13 @@ class AmazonBedrockComponent(LCModelComponent):
MessageTextInput(name="region_name", display_name="Region Name", value="us-east-1"),
DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True, is_list=True),
MessageTextInput(name="endpoint_url", display_name="Endpoint URL", advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/AnthropicModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs.inputs import HandleInput
from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput


Expand Down Expand Up @@ -49,6 +50,13 @@ class AnthropicModelComponent(LCModelComponent):
info="Prefill text to guide the model's response.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import MessageTextInput
from langflow.inputs.inputs import HandleInput
from langflow.io import DropdownInput, FloatInput, IntInput, SecretStrInput


Expand Down Expand Up @@ -48,6 +49,13 @@ class AzureChatOpenAIComponent(LCModelComponent):
advanced=True,
info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

from langflow.base.models.model import LCModelComponent
from langflow.field_typing.constants import LanguageModel
from langflow.inputs.inputs import HandleInput
from langflow.io import DropdownInput, FloatInput, MessageTextInput, SecretStrInput


Expand Down Expand Up @@ -67,6 +68,13 @@ class QianfanChatEndpointComponent(LCModelComponent):
display_name="Endpoint",
info="Endpoint of the Qianfan LLM, required if custom model used.",
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/CohereModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs.inputs import HandleInput
from langflow.io import FloatInput, SecretStrInput


Expand All @@ -22,6 +23,13 @@ class CohereComponent(LCModelComponent):
value="COHERE_API_KEY",
),
FloatInput(name="temperature", display_name="Temperature", value=0.75),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput
from langflow.inputs.inputs import HandleInput


class GoogleGenerativeAIComponent(LCModelComponent):
Expand Down Expand Up @@ -48,6 +49,13 @@ class GoogleGenerativeAIComponent(LCModelComponent):
info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/GroqModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs.inputs import HandleInput
from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput


Expand Down Expand Up @@ -51,6 +52,13 @@ class GroqModel(LCModelComponent):
options=[],
refresh_button=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def get_models(self) -> list[str]:
Expand Down
42 changes: 7 additions & 35 deletions src/backend/base/langflow/components/models/HuggingFaceModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
# TODO: langchain_community.llms.huggingface_endpoint is depreciated. Need to update to langchain_huggingface, but have dependency with langchain_core 0.3.0
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.io import DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput
from langflow.inputs.inputs import HandleInput
from langflow.io import DictInput, DropdownInput, IntInput, SecretStrInput, StrInput


class HuggingFaceEndpointsComponent(LCModelComponent):
Expand Down Expand Up @@ -37,41 +38,12 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
SecretStrInput(name="huggingfacehub_api_token", display_name="API Token", password=True),
DictInput(name="model_kwargs", display_name="Model Keyword Arguments", advanced=True),
IntInput(name="retry_attempts", display_name="Retry Attempts", value=1, advanced=True),
IntInput(
name="max_new_tokens", display_name="Max New Tokens", value=512, info="Maximum number of generated tokens"
),
IntInput(
name="top_k",
display_name="Top K",
advanced=True,
info="The number of highest probability vocabulary tokens to keep for top-k-filtering",
),
FloatInput(
name="top_p",
display_name="Top P",
value=0.95,
advanced=True,
info="If set to < 1, only the smallest set of most probable tokens with probabilities that add up to `top_p` or higher are kept for generation",
),
FloatInput(
name="typical_p",
display_name="Typical P",
value=0.95,
advanced=True,
info="Typical Decoding mass.",
),
FloatInput(
name="temperature",
display_name="Temperature",
value=0.8,
advanced=True,
info="The value used to module the logits distribution",
),
FloatInput(
name="repetition_penalty",
display_name="Repetition Penalty",
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
info="The parameter for repetition penalty. 1.0 means no penalty.",
input_types=["OutputParser"],
),
]

Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/Maritalk.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from langflow.field_typing import LanguageModel
from langflow.field_typing.range_spec import RangeSpec
from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput
from langflow.inputs.inputs import HandleInput


class MaritalkModelComponent(LCModelComponent):
Expand Down Expand Up @@ -33,6 +34,13 @@ class MaritalkModelComponent(LCModelComponent):
advanced=False,
),
FloatInput(name="temperature", display_name="Temperature", value=0.1, range_spec=RangeSpec(min=0, max=1)),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/MistralModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs.inputs import HandleInput
from langflow.io import BoolInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput


Expand Down Expand Up @@ -55,6 +56,13 @@ class MistralAIModelComponent(LCModelComponent):
FloatInput(name="top_p", display_name="Top P", advanced=True, value=1),
IntInput(name="random_seed", display_name="Random Seed", value=1, advanced=True),
BoolInput(name="safe_mode", display_name="Safe Mode", advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/NvidiaModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput
from langflow.inputs.inputs import HandleInput
from langflow.schema.dotdict import dotdict


Expand Down Expand Up @@ -47,6 +48,13 @@ class NVIDIAModelComponent(LCModelComponent):
advanced=True,
value=1,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/OllamaModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs.inputs import HandleInput
from langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, StrInput


Expand Down Expand Up @@ -204,6 +205,13 @@ def get_model(self, base_url_value: str) -> list[str]:
info="Template to use for generating text.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/OpenAIModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
SecretStrInput,
StrInput,
)
from langflow.inputs.inputs import HandleInput


class OpenAIModelComponent(LCModelComponent):
Expand Down Expand Up @@ -75,6 +76,13 @@ class OpenAIModelComponent(LCModelComponent):
advanced=True,
value=1,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Loading

0 comments on commit 8e50f2f

Please sign in to comment.