This repository has been archived by the owner on Nov 13, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 121
A few bug fixes following OpenAI client update #178
Merged
igiloh-pinecone
merged 7 commits into
pinecone-io:main
from
igiloh-pinecone:bugfix/cli_openai_error
Nov 16, 2023
Merged
Changes from 6 commits
Commits
Show all changes
7 commits
Select commit
Hold shift + click to select a range
9d8bfba
[cli] Bug fix - OpenAI models call changed
igiloh-pinecone a266dc5
[cli] Improve error handling
igiloh-pinecone 96ed4f1
[llm] Bug fix in OpenAILLM.available_models
igiloh-pinecone adb9791
[test] Added unit test for OpenAILLM.availble_models
igiloh-pinecone b8d9c5d
[CLI] Correct typo in error message
igiloh-pinecone 15a2189
[cli] Verify pinecone connection on `canopy start`
igiloh-pinecone f6075d8
[tests] fix typo
igiloh-pinecone File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -21,6 +21,7 @@ | |
from canopy.knowledge_base import KnowledgeBase | ||
from canopy.knowledge_base import connect_to_pinecone | ||
from canopy.knowledge_base.chunker import Chunker | ||
from canopy.chat_engine import ChatEngine | ||
from canopy.models.data_models import Document | ||
from canopy.tokenizer import Tokenizer | ||
from canopy_cli.data_loader import ( | ||
|
@@ -43,6 +44,12 @@ | |
DEFAULT_SERVER_URL = f"http://localhost:8000/{API_VERSION}" | ||
spinner = Spinner() | ||
|
||
OPENAI_AUTH_ERROR_MSG = ( | ||
"Failed to connect to OpenAI, please make sure that the OPENAI_API_KEY " | ||
"environment variable is set correctly.\n" | ||
"Please visit https://platform.openai.com/account/api-keys for more details" | ||
) | ||
|
||
|
||
def check_server_health(url: str): | ||
try: | ||
|
@@ -72,28 +79,17 @@ def wait_for_server(chat_server_url: str): | |
check_server_health(chat_server_url) | ||
|
||
|
||
def validate_connection(): | ||
def validate_pinecone_connection(): | ||
try: | ||
connect_to_pinecone() | ||
except RuntimeError as e: | ||
msg = ( | ||
f"{str(e)}\n" | ||
"Credentials should be set by the PINECONE_API_KEY and PINECONE_ENVIRONMENT" | ||
" environment variables. " | ||
" environment variables.\n" | ||
"Please visit https://www.pinecone.io/docs/quickstart/ for more details." | ||
) | ||
raise CLIError(msg) | ||
try: | ||
openai.Model.list() | ||
except Exception: | ||
msg = ( | ||
"Failed to connect to OpenAI, please make sure that the OPENAI_API_KEY " | ||
"environment variable is set correctly.\n" | ||
"Please visit https://platform.openai.com/account/api-keys for more details" | ||
) | ||
raise CLIError(msg) | ||
click.echo("Canopy: ", nl=False) | ||
click.echo(click.style("Ready\n", bold=True, fg="green")) | ||
|
||
|
||
def _initialize_tokenizer(): | ||
|
@@ -104,17 +100,25 @@ def _initialize_tokenizer(): | |
raise CLIError(msg) | ||
|
||
|
||
def _load_kb_config(config_file: Optional[str]) -> Dict[str, Any]: | ||
def _read_config_file(config_file: Optional[str]) -> Dict[str, Any]: | ||
if config_file is None: | ||
return {} | ||
|
||
try: | ||
with open(os.path.join("config", config_file), 'r') as f: | ||
with open(config_file, 'r') as f: | ||
config = yaml.safe_load(f) | ||
except Exception as e: | ||
msg = f"Failed to load config file {config_file}. Reason:\n{e}" | ||
raise CLIError(msg) | ||
|
||
return config | ||
|
||
|
||
def _load_kb_config(config_file: Optional[str]) -> Dict[str, Any]: | ||
config = _read_config_file(config_file) | ||
if not config: | ||
return {} | ||
|
||
if "knowledge_base" in config: | ||
kb_config = config.get("knowledge_base", None) | ||
elif "chat_engine" in config: | ||
|
@@ -132,6 +136,22 @@ def _load_kb_config(config_file: Optional[str]) -> Dict[str, Any]: | |
return kb_config | ||
|
||
|
||
def _validate_chat_engine(config_file: Optional[str]): | ||
config = _read_config_file(config_file) | ||
Tokenizer.initialize() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We should have a "with tokenizer..." There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yeah, thought about it. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Just task, i will add |
||
try: | ||
ChatEngine.from_config(config.get("chat_engine", {})) | ||
except openai.OpenAIError: | ||
raise CLIError(OPENAI_AUTH_ERROR_MSG) | ||
except Exception as e: | ||
msg = f"Failed to initialize Canopy server. Reason:\n{e}" | ||
if config_file: | ||
msg += f"\nPlease check the configuration file {config_file}" | ||
raise CLIError(msg) | ||
finally: | ||
Tokenizer.clear() | ||
|
||
|
||
class CanopyCommandGroup(click.Group): | ||
""" | ||
A custom click Group that lets us control the order of commands in the help menu. | ||
|
@@ -165,9 +185,7 @@ def cli(ctx): | |
Visit https://www.pinecone.io/ to sign up for free. | ||
""" | ||
if ctx.invoked_subcommand is None: | ||
validate_connection() | ||
click.echo(ctx.get_help()) | ||
# click.echo(command.get_help(ctx)) | ||
|
||
|
||
@cli.command(help="Check if canopy server is running and healthy.") | ||
|
@@ -273,19 +291,21 @@ def upsert(index_name: str, | |
) | ||
raise CLIError(msg) | ||
|
||
validate_pinecone_connection() | ||
|
||
_initialize_tokenizer() | ||
|
||
kb_config = _load_kb_config(config) | ||
kb = KnowledgeBase.from_config(kb_config, index_name=index_name) | ||
try: | ||
kb = KnowledgeBase.from_config(kb_config, index_name=index_name) | ||
except openai.OpenAIError: | ||
raise CLIError(OPENAI_AUTH_ERROR_MSG) | ||
|
||
try: | ||
kb.connect() | ||
except RuntimeError as e: | ||
# TODO: kb should throw a specific exception for each case | ||
msg = str(e) | ||
if "credentials" in msg: | ||
msg += ("\nCredentials should be set by the PINECONE_API_KEY and " | ||
"PINECONE_ENVIRONMENT environment variables. Please visit " | ||
"https://www.pinecone.io/docs/quickstart/ for more details.") | ||
raise CLIError(msg) | ||
|
||
click.echo("Canopy is going to upsert data from ", nl=False) | ||
|
@@ -543,6 +563,9 @@ def chat(chat_server_url, rag, debug, stream): | |
help="Index name, if not provided already in as an environment variable") | ||
def start(host: str, port: str, reload: bool, | ||
config: Optional[str], index_name: Optional[str]): | ||
validate_pinecone_connection() | ||
_validate_chat_engine(config) | ||
|
||
note_msg = ( | ||
"🚨 Note 🚨\n" | ||
"For debugging only. To run the Canopy server in production " | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Isn't it a duplicate?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Nope,
config
can return empty ({}
) from_read_config_file()
.