Skip to content

Commit

Permalink
Add GroqLogo and GroqIcon components (langflow-ai#1853)
Browse files Browse the repository at this point in the history
* Update package.json format command to include only specific directories

* Add GroqLogo component and GroqIcon to the project

* Update dependencies and add GroqModelSpecs component

* Fix nullable column issue in langflow/alembic/versions/6e7b581b5648_fix_nullable.py

* Add GroqModelSpecs component and update dependencies

* Update GroqModelSpecs and GroqModel display names

* chore: Add langchain-pinecone dependency and update constants.py
  • Loading branch information
ogabrielluiz authored May 7, 2024
1 parent 21be126 commit a037bf9
Show file tree
Hide file tree
Showing 14 changed files with 528 additions and 238 deletions.
1 change: 1 addition & 0 deletions docs/docs/migration/global-variables.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ The default list at the moment is:
- AZURE_OPENAI_API_VERSION
- COHERE_API_KEY
- GOOGLE_API_KEY
- GROQ_API_KEY
- HUGGINGFACEHUB_API_TOKEN
- OPENAI_API_KEY
- PINECONE_API_KEY
Expand Down
221 changes: 128 additions & 93 deletions poetry.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ langchain-astradb = "^0.1.0"
langchain-openai = "^0.1.1"
zep-python = { version = "^2.0.0rc5", allow-prereleases = true }
langchain-google-vertexai = "^1.0.3"
langchain-groq = "^0.1.3"
langchain-pinecone = "^0.1.0"

[tool.poetry.group.dev.dependencies]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
def upgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
# table_names = inspector.get_table_names()
# ### commands auto generated by Alembic - please adjust! ###
columns = inspector.get_columns("apikey")
column_names = {column["name"]: column for column in columns}
Expand All @@ -42,7 +42,7 @@ def upgrade() -> None:
def downgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
# table_names = inspector.get_table_names()
columns = inspector.get_columns("apikey")
column_names = {column["name"]: column for column in columns}
# ### commands auto generated by Alembic - please adjust! ###
Expand Down
1 change: 1 addition & 0 deletions src/backend/base/langflow/base/models/groq_constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
MODEL_NAMES = ["llama3-8b-8192", "llama3-70b-8192", "mixtral-8x7b-32768", "gemma-7b-it"]
86 changes: 86 additions & 0 deletions src/backend/base/langflow/components/model_specs/GroqModelSpecs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
from typing import Optional

from langchain_groq import ChatGroq
from pydantic.v1 import SecretStr

from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.groq_constants import MODEL_NAMES
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import BaseLanguageModel


class GroqModelSpecs(LCModelComponent):
display_name: str = "Groq"
description: str = "Generate text using Groq."
icon = "Groq"

field_order = [
"groq_api_key",
"model",
"max_output_tokens",
"temperature",
"top_k",
"top_p",
"n",
"input_value",
"system_message",
"stream",
]

def build_config(self):
return {
"groq_api_key": {
"display_name": "Groq API Key",
"info": "API key for the Groq API.",
"password": True,
},
"groq_api_base": {
"display_name": "Groq API Base",
"info": "Base URL path for API requests, leave blank if not using a proxy or service emulator.",
"advanced": True,
},
"max_tokens": {
"display_name": "Max Output Tokens",
"info": "The maximum number of tokens to generate.",
"advanced": True,
},
"temperature": {
"display_name": "Temperature",
"info": "Run inference with this temperature. Must by in the closed interval [0.0, 1.0].",
},
"n": {
"display_name": "N",
"info": "Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.",
"advanced": True,
},
"model_name": {
"display_name": "Model",
"info": "The name of the model to use. Supported examples: gemini-pro",
"options": MODEL_NAMES,
},
"stream": {
"display_name": "Stream",
"info": STREAM_INFO_TEXT,
"advanced": True,
},
}

def build(
self,
groq_api_key: str,
model_name: str,
groq_api_base: Optional[str] = None,
max_tokens: Optional[int] = None,
temperature: float = 0.1,
n: Optional[int] = 1,
stream: bool = False,
) -> BaseLanguageModel:
return ChatGroq(
model_name=model_name,
max_tokens=max_tokens or None, # type: ignore
temperature=temperature,
groq_api_base=groq_api_base,
n=n or 1,
groq_api_key=SecretStr(groq_api_key),
streaming=stream,
)
95 changes: 95 additions & 0 deletions src/backend/base/langflow/components/models/GroqModel.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
from typing import Optional

from langchain_groq import ChatGroq
from langflow.base.models.groq_constants import MODEL_NAMES
from pydantic.v1 import SecretStr

from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import Text


class GroqModel(LCModelComponent):
display_name: str = "Groq"
description: str = "Generate text using Groq."
icon = "Groq"

field_order = [
"groq_api_key",
"model",
"max_output_tokens",
"temperature",
"top_k",
"top_p",
"n",
"input_value",
"system_message",
"stream",
]

def build_config(self):
return {
"groq_api_key": {
"display_name": "Groq API Key",
"info": "API key for the Groq API.",
"password": True,
},
"groq_api_base": {
"display_name": "Groq API Base",
"info": "Base URL path for API requests, leave blank if not using a proxy or service emulator.",
"advanced": True,
},
"max_tokens": {
"display_name": "Max Output Tokens",
"info": "The maximum number of tokens to generate.",
"advanced": True,
},
"temperature": {
"display_name": "Temperature",
"info": "Run inference with this temperature. Must by in the closed interval [0.0, 1.0].",
},
"n": {
"display_name": "N",
"info": "Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.",
"advanced": True,
},
"model_name": {
"display_name": "Model",
"info": "The name of the model to use. Supported examples: gemini-pro",
"options": MODEL_NAMES,
},
"input_value": {"display_name": "Input", "info": "The input to the model."},
"stream": {
"display_name": "Stream",
"info": STREAM_INFO_TEXT,
"advanced": True,
},
"system_message": {
"display_name": "System Message",
"info": "System message to pass to the model.",
"advanced": True,
},
}

def build(
self,
groq_api_key: str,
model_name: str,
input_value: Text,
groq_api_base: Optional[str] = None,
max_tokens: Optional[int] = None,
temperature: float = 0.1,
n: Optional[int] = 1,
stream: bool = False,
system_message: Optional[str] = None,
) -> Text:
output = ChatGroq(
model_name=model_name,
max_tokens=max_tokens or None, # type: ignore
temperature=temperature,
groq_api_base=groq_api_base,
n=n or 1,
groq_api_key=SecretStr(groq_api_key),
streaming=stream,
)
return self.get_chat_result(output, stream, input_value, system_message)
1 change: 1 addition & 0 deletions src/backend/base/langflow/services/settings/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
"ASTRA_DB_APPLICATION_TOKEN",
"ASTRA_DB_API_ENDPOINT",
"COHERE_API_KEY",
"GROQ_API_KEY",
"HUGGINGFACEHUB_API_TOKEN",
"PINECONE_API_KEY",
"SEARCHAPI_API_KEY",
Expand Down
17 changes: 9 additions & 8 deletions src/backend/base/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit a037bf9

Please sign in to comment.