Skip to content

Commit

Permalink
format
Browse files Browse the repository at this point in the history
  • Loading branch information
jordanrfrazier committed Sep 10, 2024
1 parent 12b35a1 commit f6e45ba
Show file tree
Hide file tree
Showing 30 changed files with 825 additions and 483 deletions.
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/AIMLModel.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from langflow.field_typing.range_spec import RangeSpec
from langchain_openai import ChatOpenAI
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.aiml_constants import AIML_CHAT_MODELS
Expand Down Expand Up @@ -59,6 +60,13 @@ class AIMLModelComponent(LCModelComponent):
advanced=True,
value=1,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import MessageTextInput, SecretStrInput
from langflow.inputs.inputs import HandleInput
from langflow.io import DictInput, DropdownInput


Expand Down Expand Up @@ -55,6 +56,13 @@ class AmazonBedrockComponent(LCModelComponent):
MessageTextInput(name="region_name", display_name="Region Name", value="us-east-1"),
DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True, is_list=True),
MessageTextInput(name="endpoint_url", display_name="Endpoint URL", advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/AnthropicModel.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -49,6 +50,13 @@ class AnthropicModelComponent(LCModelComponent):
info="Prefill text to guide the model's response.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import MessageTextInput
from langflow.inputs.inputs import HandleInput
from langflow.io import DropdownInput, FloatInput, IntInput, SecretStrInput


Expand Down Expand Up @@ -47,6 +48,13 @@ class AzureChatOpenAIComponent(LCModelComponent):
advanced=True,
info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -67,6 +68,13 @@ class QianfanChatEndpointComponent(LCModelComponent):
display_name="Endpoint",
info="Endpoint of the Qianfan LLM, required if custom model used.",
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/CohereModel.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from langchain_cohere import ChatCohere
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand All @@ -22,6 +23,13 @@ class CohereComponent(LCModelComponent):
value="COHERE_API_KEY",
),
FloatInput(name="temperature", display_name="Temperature", value=0.75),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -48,6 +49,13 @@ class GoogleGenerativeAIComponent(LCModelComponent):
info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/GroqModel.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from langflow.inputs.inputs import HandleInput
import requests
from typing import List
from langchain_groq import ChatGroq
Expand Down Expand Up @@ -52,6 +53,13 @@ class GroqModel(LCModelComponent):
options=[],
refresh_button=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def get_models(self) -> List[str]:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from langflow.inputs.inputs import HandleInput
from tenacity import retry, stop_after_attempt, wait_fixed
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint

Expand Down Expand Up @@ -27,6 +28,13 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
SecretStrInput(name="huggingfacehub_api_token", display_name="API Token", password=True),
DictInput(name="model_kwargs", display_name="Model Keyword Arguments", advanced=True),
IntInput(name="retry_attempts", display_name="Retry Attempts", value=1, advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def create_huggingface_endpoint(
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/Maritalk.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from langflow.field_typing import LanguageModel
from langflow.field_typing.range_spec import RangeSpec
from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput
from langflow.inputs.inputs import HandleInput


class MaritalkModelComponent(LCModelComponent):
Expand Down Expand Up @@ -33,6 +34,13 @@ class MaritalkModelComponent(LCModelComponent):
advanced=False,
),
FloatInput(name="temperature", display_name="Temperature", value=0.1, range_spec=RangeSpec(min=0, max=1)),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/MistralModel.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from langchain_mistralai import ChatMistralAI
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -55,6 +56,13 @@ class MistralAIModelComponent(LCModelComponent):
FloatInput(name="top_p", display_name="Top P", advanced=True, value=1),
IntInput(name="random_seed", display_name="Random Seed", value=1, advanced=True),
BoolInput(name="safe_mode", display_name="Safe Mode", advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/NvidiaModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput
from langflow.inputs.inputs import HandleInput
from langflow.schema.dotdict import dotdict


Expand Down Expand Up @@ -47,6 +48,13 @@ class NVIDIAModelComponent(LCModelComponent):
advanced=True,
value=1,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/OllamaModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs.inputs import HandleInput
from langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, StrInput


Expand Down Expand Up @@ -204,6 +205,13 @@ def get_model(self, base_url_value: str) -> list[str]:
info="Template to use for generating text.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
6 changes: 3 additions & 3 deletions src/backend/base/langflow/components/models/OpenAIModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,9 @@ class OpenAIModelComponent(LCModelComponent):
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The output parser to use to parse the output of the model",
advanced=False, # TODO fraz: probably make true
input_types=["BaseOutputParser"],
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from langchain_community.chat_models import ChatPerplexity
from langflow.inputs.inputs import HandleInput
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
Expand Down Expand Up @@ -59,6 +60,13 @@ class PerplexityComponent(LCModelComponent):
info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
advanced=True,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
Expand Down
8 changes: 8 additions & 0 deletions src/backend/base/langflow/components/models/VertexAiModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import MessageTextInput
from langflow.inputs.inputs import HandleInput
from langflow.io import BoolInput, FileInput, FloatInput, IntInput, StrInput


Expand All @@ -28,6 +29,13 @@ class ChatVertexAIComponent(LCModelComponent):
IntInput(name="top_k", display_name="Top K", advanced=True),
FloatInput(name="top_p", display_name="Top P", value=0.95, advanced=True),
BoolInput(name="verbose", display_name="Verbose", value=False, advanced=True),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]

def build_model(self) -> LanguageModel:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from langflow.custom.custom_component.component import Component
from langflow.field_typing.constants import OutputParser
from langflow.io import Output
from langflow.schema.message import Message
from langchain_core.output_parsers import CommaSeparatedListOutputParser, BaseOutputParser
from langchain_core.output_parsers import CommaSeparatedListOutputParser


class CSVOutputParserComponent(Component):
Expand All @@ -22,7 +23,7 @@ class CSVOutputParserComponent(Component):
Output(display_name="Output Parser", name="output_parser", method="build_parser"),
]

def build_parser(self) -> BaseOutputParser:
def build_parser(self) -> OutputParser:
return CommaSeparatedListOutputParser()

def format_instructions(self) -> Message:
Expand Down
7 changes: 6 additions & 1 deletion src/backend/base/langflow/field_typing/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from langchain_core.language_models import BaseLanguageModel, BaseLLM
from langchain_core.language_models.chat_models import BaseChatModel
from langchain_core.memory import BaseMemory
from langchain_core.output_parsers import BaseOutputParser
from langchain_core.output_parsers import BaseOutputParser, BaseLLMOutputParser
from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate, PromptTemplate
from langchain_core.retrievers import BaseRetriever
from langchain_core.tools import BaseTool, Tool
Expand All @@ -28,6 +28,11 @@
BaseRetriever,
VectorStoreRetriever,
)
OutputParser = TypeVar(
"OutputParser",
BaseOutputParser,
BaseLLMOutputParser,
)


class Object:
Expand Down
Loading

0 comments on commit f6e45ba

Please sign in to comment.