Skip to content

Commit

Permalink
Merge branch 'main' into release_0_2_36
Browse files Browse the repository at this point in the history
  • Loading branch information
jackgerrits authored Oct 1, 2024
2 parents df8348c + 3a6b88e commit 303b0f5
Show file tree
Hide file tree
Showing 31 changed files with 1,615 additions and 66 deletions.
32 changes: 32 additions & 0 deletions .github/workflows/contrib-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -709,3 +709,35 @@ jobs:
with:
file: ./coverage.xml
flags: unittests

OllamaTest:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-2019]
python-version: ["3.9", "3.10", "3.11", "3.12"]
exclude:
- os: macos-latest
python-version: "3.9"
steps:
- uses: actions/checkout@v4
with:
lfs: true
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install packages and dependencies for all tests
run: |
python -m pip install --upgrade pip wheel
pip install pytest-cov>=5
- name: Install packages and dependencies for Ollama
run: |
pip install -e .[ollama,test]
pytest test/oai/test_ollama.py --skip-openai
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
flags: unittests
14 changes: 8 additions & 6 deletions autogen/agentchat/contrib/vectordb/pgvectordb.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,17 @@
from typing import Callable, List, Optional, Union

import numpy as np

# try:
import pgvector
from pgvector.psycopg import register_vector
from sentence_transformers import SentenceTransformer

from .base import Document, ItemID, QueryResults, VectorDB
from .utils import get_logger

try:
import pgvector
from pgvector.psycopg import register_vector
except ImportError:
raise ImportError("Please install pgvector: `pip install pgvector`")
# except ImportError:
# raise ImportError("Please install pgvector: `pip install pgvector`")

try:
import psycopg
Expand Down Expand Up @@ -416,6 +417,7 @@ def query(
results = []
for query_text in query_texts:
vector = self.embedding_function(query_text)
vector_string = "[" + ",".join([f"{x:.8f}" for x in vector]) + "]"

if distance_type.lower() == "cosine":
index_function = "<=>"
Expand All @@ -428,7 +430,7 @@ def query(
query = (
f"SELECT id, documents, embedding, metadatas "
f"FROM {self.name} "
f"{clause} embedding {index_function} '{str(vector)}' {distance_threshold} "
f"{clause} embedding {index_function} '{vector_string}' {distance_threshold} "
f"LIMIT {n_results}"
)
cursor.execute(query)
Expand Down
2 changes: 2 additions & 0 deletions autogen/logger/file_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from autogen.oai.gemini import GeminiClient
from autogen.oai.groq import GroqClient
from autogen.oai.mistral import MistralAIClient
from autogen.oai.ollama import OllamaClient
from autogen.oai.together import TogetherClient

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -218,6 +219,7 @@ def log_new_client(
| TogetherClient
| GroqClient
| CohereClient
| OllamaClient
| BedrockClient
),
wrapper: OpenAIWrapper,
Expand Down
2 changes: 2 additions & 0 deletions autogen/logger/sqlite_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from autogen.oai.gemini import GeminiClient
from autogen.oai.groq import GroqClient
from autogen.oai.mistral import MistralAIClient
from autogen.oai.ollama import OllamaClient
from autogen.oai.together import TogetherClient

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -405,6 +406,7 @@ def log_new_client(
TogetherClient,
GroqClient,
CohereClient,
OllamaClient,
BedrockClient,
],
wrapper: OpenAIWrapper,
Expand Down
12 changes: 12 additions & 0 deletions autogen/oai/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,13 @@
except ImportError as e:
cohere_import_exception = e

try:
from autogen.oai.ollama import OllamaClient

ollama_import_exception: Optional[ImportError] = None
except ImportError as e:
ollama_import_exception = e

try:
from autogen.oai.bedrock import BedrockClient

Expand Down Expand Up @@ -545,6 +552,11 @@ def _register_default_client(self, config: Dict[str, Any], openai_config: Dict[s
raise ImportError("Please install `cohere` to use the Cohere API.")
client = CohereClient(**openai_config)
self._clients.append(client)
elif api_type is not None and api_type.startswith("ollama"):
if ollama_import_exception:
raise ImportError("Please install with `[ollama]` option to use the Ollama API.")
client = OllamaClient(**openai_config)
self._clients.append(client)
elif api_type is not None and api_type.startswith("bedrock"):
self._configure_openai_config_for_bedrock(config, openai_config)
if bedrock_import_exception:
Expand Down
Loading

0 comments on commit 303b0f5

Please sign in to comment.