Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add Perplexity Models Component #3351

Merged
merged 13 commits into from
Aug 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 83 additions & 0 deletions src/backend/base/langflow/components/models/PerplexityModel.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
from langchain_community.chat_models import ChatPerplexity
from pydantic.v1 import SecretStr

from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.io import FloatInput, SecretStrInput, DropdownInput, IntInput


class PerplexityComponent(LCModelComponent):
display_name = "Perplexity"
description = "Generate text using Perplexity LLMs."
documentation = "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/"
icon = "Perplexity"
name = "PerplexityModel"

inputs = LCModelComponent._base_inputs + [
DropdownInput(
name="model_name",
display_name="Model Name",
advanced=False,
options=[
"llama-3.1-sonar-small-128k-online",
"llama-3.1-sonar-large-128k-online",
"llama-3.1-sonar-huge-128k-online",
"llama-3.1-sonar-small-128k-chat",
"llama-3.1-sonar-large-128k-chat",
"llama-3.1-8b-instruct",
"llama-3.1-70b-instruct",
],
value="llama-3.1-sonar-small-128k-online",
),
IntInput(
name="max_output_tokens",
display_name="Max Output Tokens",
info="The maximum number of tokens to generate.",
),
SecretStrInput(
name="api_key",
display_name="Perplexity API Key",
info="The Perplexity API Key to use for the Perplexity model.",
advanced=False,
),
FloatInput(name="temperature", display_name="Temperature", value=0.75),
FloatInput(
name="top_p",
display_name="Top P",
info="The maximum cumulative probability of tokens to consider when sampling.",
advanced=True,
),
IntInput(
name="n",
display_name="N",
info="Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.",
advanced=True,
),
IntInput(
name="top_k",
display_name="Top K",
info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
advanced=True,
),
]

def build_model(self) -> LanguageModel: # type: ignore[type-var]
api_key = SecretStr(self.api_key).get_secret_value()
temperature = self.temperature
model = self.model_name
max_output_tokens = self.max_output_tokens
top_k = self.top_k
top_p = self.top_p
n = self.n

output = ChatPerplexity(
model=model,
temperature=temperature or 0.75,
pplx_api_key=api_key,
top_k=top_k or None,
top_p=top_p or None,
n=n or 1,
max_output_tokens=max_output_tokens,
)

return output # type: ignore
2 changes: 2 additions & 0 deletions src/backend/base/langflow/components/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from .OllamaModel import ChatOllamaComponent
from .OpenAIModel import OpenAIModelComponent
from .VertexAiModel import ChatVertexAIComponent
from .PerplexityModel import PerplexityComponent

__all__ = [
"AIMLModelComponent",
Expand All @@ -22,5 +23,6 @@
"ChatOllamaComponent",
"OpenAIModelComponent",
"ChatVertexAIComponent",
"PerplexityComponent",
"base",
]
1 change: 1 addition & 0 deletions src/frontend/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

21 changes: 21 additions & 0 deletions src/frontend/src/icons/Perplexity/Perplexity.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
const SvgPerplexity = (props) => (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 512 509.64" // Adjust viewBox if needed
width="1em"
height="1em"
{...props}
>
<path
fill="#1F1F1F"
d="M115.613 0h280.774C459.974 0 512 52.025 512 115.612v278.415c0 63.587-52.026 115.613-115.613 115.613H115.613C52.026 509.64 0 457.614 0 394.027V115.612C0 52.025 52.026 0 115.613 0z"
/>
<path
fill="#fff"
fillRule="nonzero"
d="M348.851 128.063l-68.946 58.302h68.946v-58.302zm-83.908 48.709l100.931-85.349v94.942h32.244v143.421h-38.731v90.004l-94.442-86.662v83.946h-17.023v-83.906l-96.596 86.246v-89.628h-37.445V186.365h38.732V90.768l95.309 84.958v-83.16h17.023l-.002 84.206zm-29.209 26.616c-34.955.02-69.893 0-104.83 0v109.375h20.415v-27.121l84.415-82.254zm41.445 0l82.208 82.324v27.051h21.708V203.388c-34.617 0-69.274.02-103.916 0zm-42.874-17.023l-64.669-57.646v57.646h64.669zm13.617 124.076v-95.2l-79.573 77.516v88.731l79.573-71.047zm17.252-95.022v94.863l77.19 70.83c0-29.485-.012-58.943-.012-88.425l-77.178-77.268z"
/>
</svg>
);

export default SvgPerplexity;
9 changes: 9 additions & 0 deletions src/frontend/src/icons/Perplexity/index.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import React, { forwardRef } from "react";
import PerplexitySVG from "./perplexity";

export const PerplexityIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return <PerplexitySVG ref={ref} {...props} />;
});
Binary file added src/frontend/src/icons/Perplexity/perplexity.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 2 additions & 0 deletions src/frontend/src/utils/styleUtils.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { AIMLIcon } from "@/icons/AIML";
import Perplexity from "@/icons/Perplexity/Perplexity";
import { AthenaIcon } from "@/icons/athena/index";
import { freezeAllIcon } from "@/icons/freezeAll";
import {
Expand Down Expand Up @@ -599,4 +600,5 @@ export const nodeIconsLucide: iconsType = {
athenaIcon: AthenaIcon,
OptionIcon: OptionIcon,
Option: OptionIcon,
Perplexity,
};
Loading