Skip to content
This repository has been archived by the owner on Nov 13, 2024. It is now read-only.

Commit

Permalink
A few bug fixes following OpenAI client update (#178)
Browse files Browse the repository at this point in the history
* [cli] Bug fix - OpenAI models call changed

We had a call for openai.Models to check the OpenAI connection, but the client has changed

* [cli] Improve error handling

- Only verify OpenAI connection if OpenAI is used in configuration
- Verify configuration before starting the server, to give a clear error message
- Don't error out on the main `canopy` command (always show the intro \ help message)

* [llm] Bug fix in OpenAILLM.available_models

Did not update to use the new OpenAI client

* [test] Added unit test for OpenAILLM.availble_models

It was missing a test, appraently

* [CLI] Correct typo in error message

You look away one sec and Co-Pilot is havocing your error messages...

* [cli] Verify pinecone connection on `canopy start`

Meant to add it before and forgot

* [tests] fix typo
  • Loading branch information
igiloh-pinecone authored Nov 16, 2023
1 parent 5897d16 commit 0698ff2
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 23 deletions.
2 changes: 1 addition & 1 deletion src/canopy/llm/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def __init__(self,

@property
def available_models(self):
return [k["id"] for k in openai.Model.list().data]
return [k.id for k in self._client.models.list()]

def chat_completion(self,
messages: Messages,
Expand Down
67 changes: 45 additions & 22 deletions src/canopy_cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from canopy.knowledge_base import KnowledgeBase
from canopy.knowledge_base import connect_to_pinecone
from canopy.knowledge_base.chunker import Chunker
from canopy.chat_engine import ChatEngine
from canopy.models.data_models import Document
from canopy.tokenizer import Tokenizer
from canopy_cli.data_loader import (
Expand All @@ -43,6 +44,12 @@
DEFAULT_SERVER_URL = f"http://localhost:8000/{API_VERSION}"
spinner = Spinner()

OPENAI_AUTH_ERROR_MSG = (
"Failed to connect to OpenAI, please make sure that the OPENAI_API_KEY "
"environment variable is set correctly.\n"
"Please visit https://platform.openai.com/account/api-keys for more details"
)


def check_server_health(url: str):
try:
Expand Down Expand Up @@ -72,28 +79,17 @@ def wait_for_server(chat_server_url: str):
check_server_health(chat_server_url)


def validate_connection():
def validate_pinecone_connection():
try:
connect_to_pinecone()
except RuntimeError as e:
msg = (
f"{str(e)}\n"
"Credentials should be set by the PINECONE_API_KEY and PINECONE_ENVIRONMENT"
" environment variables. "
" environment variables.\n"
"Please visit https://www.pinecone.io/docs/quickstart/ for more details."
)
raise CLIError(msg)
try:
openai.Model.list()
except Exception:
msg = (
"Failed to connect to OpenAI, please make sure that the OPENAI_API_KEY "
"environment variable is set correctly.\n"
"Please visit https://platform.openai.com/account/api-keys for more details"
)
raise CLIError(msg)
click.echo("Canopy: ", nl=False)
click.echo(click.style("Ready\n", bold=True, fg="green"))


def _initialize_tokenizer():
Expand All @@ -104,17 +100,25 @@ def _initialize_tokenizer():
raise CLIError(msg)


def _load_kb_config(config_file: Optional[str]) -> Dict[str, Any]:
def _read_config_file(config_file: Optional[str]) -> Dict[str, Any]:
if config_file is None:
return {}

try:
with open(os.path.join("config", config_file), 'r') as f:
with open(config_file, 'r') as f:
config = yaml.safe_load(f)
except Exception as e:
msg = f"Failed to load config file {config_file}. Reason:\n{e}"
raise CLIError(msg)

return config


def _load_kb_config(config_file: Optional[str]) -> Dict[str, Any]:
config = _read_config_file(config_file)
if not config:
return {}

if "knowledge_base" in config:
kb_config = config.get("knowledge_base", None)
elif "chat_engine" in config:
Expand All @@ -132,6 +136,22 @@ def _load_kb_config(config_file: Optional[str]) -> Dict[str, Any]:
return kb_config


def _validate_chat_engine(config_file: Optional[str]):
config = _read_config_file(config_file)
Tokenizer.initialize()
try:
ChatEngine.from_config(config.get("chat_engine", {}))
except openai.OpenAIError:
raise CLIError(OPENAI_AUTH_ERROR_MSG)
except Exception as e:
msg = f"Failed to initialize Canopy server. Reason:\n{e}"
if config_file:
msg += f"\nPlease check the configuration file {config_file}"
raise CLIError(msg)
finally:
Tokenizer.clear()


class CanopyCommandGroup(click.Group):
"""
A custom click Group that lets us control the order of commands in the help menu.
Expand Down Expand Up @@ -165,9 +185,7 @@ def cli(ctx):
Visit https://www.pinecone.io/ to sign up for free.
"""
if ctx.invoked_subcommand is None:
validate_connection()
click.echo(ctx.get_help())
# click.echo(command.get_help(ctx))


@cli.command(help="Check if canopy server is running and healthy.")
Expand Down Expand Up @@ -273,19 +291,21 @@ def upsert(index_name: str,
)
raise CLIError(msg)

validate_pinecone_connection()

_initialize_tokenizer()

kb_config = _load_kb_config(config)
kb = KnowledgeBase.from_config(kb_config, index_name=index_name)
try:
kb = KnowledgeBase.from_config(kb_config, index_name=index_name)
except openai.OpenAIError:
raise CLIError(OPENAI_AUTH_ERROR_MSG)

try:
kb.connect()
except RuntimeError as e:
# TODO: kb should throw a specific exception for each case
msg = str(e)
if "credentials" in msg:
msg += ("\nCredentials should be set by the PINECONE_API_KEY and "
"PINECONE_ENVIRONMENT environment variables. Please visit "
"https://www.pinecone.io/docs/quickstart/ for more details.")
raise CLIError(msg)

click.echo("Canopy is going to upsert data from ", nl=False)
Expand Down Expand Up @@ -543,6 +563,9 @@ def chat(chat_server_url, rag, debug, stream):
help="Index name, if not provided already in as an environment variable")
def start(host: str, port: str, reload: bool,
config: Optional[str], index_name: Optional[str]):
validate_pinecone_connection()
_validate_chat_engine(config)

note_msg = (
"🚨 Note 🚨\n"
"For debugging only. To run the Canopy server in production "
Expand Down
8 changes: 8 additions & 0 deletions tests/system/llm/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,3 +223,11 @@ def test_enforce_function_wrong_output_schema(openai_llm,

assert openai_llm._client.chat.completions.create.call_count == 3, \
"retry did not happen as expected"

@staticmethod
def test_available_models(openai_llm):
models = openai_llm.available_models
assert isinstance(models, list)
assert len(models) > 0
assert isinstance(models[0], str)
assert openai_llm.model_name in models

0 comments on commit 0698ff2

Please sign in to comment.