forked from langflow-ai/langflow
-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add GroqLogo and GroqIcon components (langflow-ai#1853)
* Update package.json format command to include only specific directories * Add GroqLogo component and GroqIcon to the project * Update dependencies and add GroqModelSpecs component * Fix nullable column issue in langflow/alembic/versions/6e7b581b5648_fix_nullable.py * Add GroqModelSpecs component and update dependencies * Update GroqModelSpecs and GroqModel display names * chore: Add langchain-pinecone dependency and update constants.py
- Loading branch information
1 parent
21be126
commit a037bf9
Showing
14 changed files
with
528 additions
and
238 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
MODEL_NAMES = ["llama3-8b-8192", "llama3-70b-8192", "mixtral-8x7b-32768", "gemma-7b-it"] |
86 changes: 86 additions & 0 deletions
86
src/backend/base/langflow/components/model_specs/GroqModelSpecs.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
from typing import Optional | ||
|
||
from langchain_groq import ChatGroq | ||
from pydantic.v1 import SecretStr | ||
|
||
from langflow.base.constants import STREAM_INFO_TEXT | ||
from langflow.base.models.groq_constants import MODEL_NAMES | ||
from langflow.base.models.model import LCModelComponent | ||
from langflow.field_typing import BaseLanguageModel | ||
|
||
|
||
class GroqModelSpecs(LCModelComponent): | ||
display_name: str = "Groq" | ||
description: str = "Generate text using Groq." | ||
icon = "Groq" | ||
|
||
field_order = [ | ||
"groq_api_key", | ||
"model", | ||
"max_output_tokens", | ||
"temperature", | ||
"top_k", | ||
"top_p", | ||
"n", | ||
"input_value", | ||
"system_message", | ||
"stream", | ||
] | ||
|
||
def build_config(self): | ||
return { | ||
"groq_api_key": { | ||
"display_name": "Groq API Key", | ||
"info": "API key for the Groq API.", | ||
"password": True, | ||
}, | ||
"groq_api_base": { | ||
"display_name": "Groq API Base", | ||
"info": "Base URL path for API requests, leave blank if not using a proxy or service emulator.", | ||
"advanced": True, | ||
}, | ||
"max_tokens": { | ||
"display_name": "Max Output Tokens", | ||
"info": "The maximum number of tokens to generate.", | ||
"advanced": True, | ||
}, | ||
"temperature": { | ||
"display_name": "Temperature", | ||
"info": "Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", | ||
}, | ||
"n": { | ||
"display_name": "N", | ||
"info": "Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.", | ||
"advanced": True, | ||
}, | ||
"model_name": { | ||
"display_name": "Model", | ||
"info": "The name of the model to use. Supported examples: gemini-pro", | ||
"options": MODEL_NAMES, | ||
}, | ||
"stream": { | ||
"display_name": "Stream", | ||
"info": STREAM_INFO_TEXT, | ||
"advanced": True, | ||
}, | ||
} | ||
|
||
def build( | ||
self, | ||
groq_api_key: str, | ||
model_name: str, | ||
groq_api_base: Optional[str] = None, | ||
max_tokens: Optional[int] = None, | ||
temperature: float = 0.1, | ||
n: Optional[int] = 1, | ||
stream: bool = False, | ||
) -> BaseLanguageModel: | ||
return ChatGroq( | ||
model_name=model_name, | ||
max_tokens=max_tokens or None, # type: ignore | ||
temperature=temperature, | ||
groq_api_base=groq_api_base, | ||
n=n or 1, | ||
groq_api_key=SecretStr(groq_api_key), | ||
streaming=stream, | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,95 @@ | ||
from typing import Optional | ||
|
||
from langchain_groq import ChatGroq | ||
from langflow.base.models.groq_constants import MODEL_NAMES | ||
from pydantic.v1 import SecretStr | ||
|
||
from langflow.base.constants import STREAM_INFO_TEXT | ||
from langflow.base.models.model import LCModelComponent | ||
from langflow.field_typing import Text | ||
|
||
|
||
class GroqModel(LCModelComponent): | ||
display_name: str = "Groq" | ||
description: str = "Generate text using Groq." | ||
icon = "Groq" | ||
|
||
field_order = [ | ||
"groq_api_key", | ||
"model", | ||
"max_output_tokens", | ||
"temperature", | ||
"top_k", | ||
"top_p", | ||
"n", | ||
"input_value", | ||
"system_message", | ||
"stream", | ||
] | ||
|
||
def build_config(self): | ||
return { | ||
"groq_api_key": { | ||
"display_name": "Groq API Key", | ||
"info": "API key for the Groq API.", | ||
"password": True, | ||
}, | ||
"groq_api_base": { | ||
"display_name": "Groq API Base", | ||
"info": "Base URL path for API requests, leave blank if not using a proxy or service emulator.", | ||
"advanced": True, | ||
}, | ||
"max_tokens": { | ||
"display_name": "Max Output Tokens", | ||
"info": "The maximum number of tokens to generate.", | ||
"advanced": True, | ||
}, | ||
"temperature": { | ||
"display_name": "Temperature", | ||
"info": "Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", | ||
}, | ||
"n": { | ||
"display_name": "N", | ||
"info": "Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.", | ||
"advanced": True, | ||
}, | ||
"model_name": { | ||
"display_name": "Model", | ||
"info": "The name of the model to use. Supported examples: gemini-pro", | ||
"options": MODEL_NAMES, | ||
}, | ||
"input_value": {"display_name": "Input", "info": "The input to the model."}, | ||
"stream": { | ||
"display_name": "Stream", | ||
"info": STREAM_INFO_TEXT, | ||
"advanced": True, | ||
}, | ||
"system_message": { | ||
"display_name": "System Message", | ||
"info": "System message to pass to the model.", | ||
"advanced": True, | ||
}, | ||
} | ||
|
||
def build( | ||
self, | ||
groq_api_key: str, | ||
model_name: str, | ||
input_value: Text, | ||
groq_api_base: Optional[str] = None, | ||
max_tokens: Optional[int] = None, | ||
temperature: float = 0.1, | ||
n: Optional[int] = 1, | ||
stream: bool = False, | ||
system_message: Optional[str] = None, | ||
) -> Text: | ||
output = ChatGroq( | ||
model_name=model_name, | ||
max_tokens=max_tokens or None, # type: ignore | ||
temperature=temperature, | ||
groq_api_base=groq_api_base, | ||
n=n or 1, | ||
groq_api_key=SecretStr(groq_api_key), | ||
streaming=stream, | ||
) | ||
return self.get_chat_result(output, stream, input_value, system_message) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
Oops, something went wrong.