diff --git a/src/backend/base/langflow/components/models/Maritalk.py b/src/backend/base/langflow/components/models/Maritalk.py new file mode 100644 index 000000000000..c0740d2f77c7 --- /dev/null +++ b/src/backend/base/langflow/components/models/Maritalk.py @@ -0,0 +1,61 @@ +from langchain_community.chat_models import ChatMaritalk + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.field_typing.range_spec import RangeSpec +from langflow.inputs import BoolInput, DropdownInput, FloatInput, IntInput, MessageInput, SecretStrInput, StrInput + + +class MaritalkModelComponent(LCModelComponent): + display_name = "Maritalk" + description = "Generates text using Maritalk LLMs." + icon = "Maritalk" + name = "Maritalk" + inputs = [ + MessageInput(name="input_value", display_name="Input"), + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + value=512, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + ), + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + options=["sabia-2-small", "sabia-2-medium"], + value=["sabia-2-small"], + ), + SecretStrInput( + name="api_key", + display_name="Maritalk API Key", + info="The Maritalk API Key to use for the OpenAI model.", + advanced=False, + ), + FloatInput(name="temperature", display_name="Temperature", value=0.1, range_spec=RangeSpec(min=0, max=1)), + BoolInput(name="stream", display_name="Stream", info=STREAM_INFO_TEXT, value=False, advanced=True), + StrInput( + name="system_message", + display_name="System Message", + info="System message to pass to the model.", + advanced=True, + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + # self.output_schea is a list of dictionarie s + # let's convert it to a dictionary + api_key = self.api_key + temperature = self.temperature + model_name: str = self.model_name + max_tokens = self.max_tokens + + output = ChatMaritalk( + max_tokens=max_tokens, + model=model_name, + api_key=api_key, + temperature=temperature or 0.1, + ) + return output # type: ignore diff --git a/src/frontend/src/icons/Maritalk/MaritalkIcon.jsx b/src/frontend/src/icons/Maritalk/MaritalkIcon.jsx new file mode 100644 index 000000000000..17c9aafb221e --- /dev/null +++ b/src/frontend/src/icons/Maritalk/MaritalkIcon.jsx @@ -0,0 +1,336 @@ +const SvgMaritalkIcon = (props) => ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +); +export default SvgMaritalkIcon; diff --git a/src/frontend/src/icons/Maritalk/index.tsx b/src/frontend/src/icons/Maritalk/index.tsx new file mode 100644 index 000000000000..8137e4a0f05a --- /dev/null +++ b/src/frontend/src/icons/Maritalk/index.tsx @@ -0,0 +1,9 @@ +import React, { forwardRef } from "react"; +import SvgMaritalkIcon from "./MaritalkIcon"; + +export const MaritalkIcon = forwardRef< + SVGSVGElement, + React.PropsWithChildren<{}> +>((props, ref) => { + return ; +}); diff --git a/src/frontend/src/icons/Maritalk/maritalk-icon.svg b/src/frontend/src/icons/Maritalk/maritalk-icon.svg new file mode 100644 index 000000000000..b3121e439d7e --- /dev/null +++ b/src/frontend/src/icons/Maritalk/maritalk-icon.svg @@ -0,0 +1,139 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts index adbdd2d0c29f..198f9eeb01f8 100644 --- a/src/frontend/src/utils/styleUtils.ts +++ b/src/frontend/src/utils/styleUtils.ts @@ -184,6 +184,7 @@ import { GroqIcon } from "../icons/Groq"; import { HuggingFaceIcon } from "../icons/HuggingFace"; import { IFixIcon } from "../icons/IFixIt"; import { LangChainIcon } from "../icons/LangChain"; +import { MaritalkIcon } from "../icons/Maritalk"; import { MetaIcon } from "../icons/Meta"; import { MidjourneyIcon } from "../icons/Midjorney"; import { MongoDBIcon } from "../icons/MongoDB"; @@ -339,6 +340,7 @@ export const nodeIconsLucide: iconsType = { OllamaEmbeddings: OllamaIcon, ChatOllamaModel: OllamaIcon, FAISS: MetaIcon, + Maritalk: MaritalkIcon, FaissSearch: MetaIcon, LangChain: LangChainIcon, AzureOpenAiModel: AzureIcon,