Skip to content

Commit

Permalink
Ollama Client (with tool calling) (#3056)
Browse files Browse the repository at this point in the history
* Ollama client! With function calling. Initial commit, client, no docs or tests yet.

* Tidy comments

* Cater for missing prompt token count

* Removed use of eval, added json parsing support library

* Fix to the use of the JSON fix library, handling of Mixtral escape sequence

* Fixed 'name' in JSON bug, catered for single function call JSON without []

* removing role='tool' from inner tool result to reduce token usage.

* Added Ollama documentation and updated library versions

* Added Native Ollama tool calling (v0.3.0 req.) as well as hide/show tools support

* Added native tool calling and hide_tools parameter to documentation

* Update to Ollama 0.3.1, added tests

* Tweak to manual function calling prompt to improve number handling.

* Fix formatting

Co-authored-by: gagb <[email protected]>
Co-authored-by: Jack Gerrits <[email protected]>

* Fix formatting

* Better error message

---------

Co-authored-by: Eric Zhu <[email protected]>
Co-authored-by: gagb <[email protected]>
Co-authored-by: Jack Gerrits <[email protected]>
  • Loading branch information
4 people authored Oct 1, 2024
1 parent db28718 commit 3a6b88e
Show file tree
Hide file tree
Showing 9 changed files with 1,563 additions and 0 deletions.
32 changes: 32 additions & 0 deletions .github/workflows/contrib-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -709,3 +709,35 @@ jobs:
with:
file: ./coverage.xml
flags: unittests

OllamaTest:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-2019]
python-version: ["3.9", "3.10", "3.11", "3.12"]
exclude:
- os: macos-latest
python-version: "3.9"
steps:
- uses: actions/checkout@v4
with:
lfs: true
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install packages and dependencies for all tests
run: |
python -m pip install --upgrade pip wheel
pip install pytest-cov>=5
- name: Install packages and dependencies for Ollama
run: |
pip install -e .[ollama,test]
pytest test/oai/test_ollama.py --skip-openai
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
flags: unittests
2 changes: 2 additions & 0 deletions autogen/logger/file_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from autogen.oai.gemini import GeminiClient
from autogen.oai.groq import GroqClient
from autogen.oai.mistral import MistralAIClient
from autogen.oai.ollama import OllamaClient
from autogen.oai.together import TogetherClient

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -218,6 +219,7 @@ def log_new_client(
| TogetherClient
| GroqClient
| CohereClient
| OllamaClient
| BedrockClient
),
wrapper: OpenAIWrapper,
Expand Down
2 changes: 2 additions & 0 deletions autogen/logger/sqlite_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from autogen.oai.gemini import GeminiClient
from autogen.oai.groq import GroqClient
from autogen.oai.mistral import MistralAIClient
from autogen.oai.ollama import OllamaClient
from autogen.oai.together import TogetherClient

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -405,6 +406,7 @@ def log_new_client(
TogetherClient,
GroqClient,
CohereClient,
OllamaClient,
BedrockClient,
],
wrapper: OpenAIWrapper,
Expand Down
12 changes: 12 additions & 0 deletions autogen/oai/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,13 @@
except ImportError as e:
cohere_import_exception = e

try:
from autogen.oai.ollama import OllamaClient

ollama_import_exception: Optional[ImportError] = None
except ImportError as e:
ollama_import_exception = e

try:
from autogen.oai.bedrock import BedrockClient

Expand Down Expand Up @@ -545,6 +552,11 @@ def _register_default_client(self, config: Dict[str, Any], openai_config: Dict[s
raise ImportError("Please install `cohere` to use the Cohere API.")
client = CohereClient(**openai_config)
self._clients.append(client)
elif api_type is not None and api_type.startswith("ollama"):
if ollama_import_exception:
raise ImportError("Please install with `[ollama]` option to use the Ollama API.")
client = OllamaClient(**openai_config)
self._clients.append(client)
elif api_type is not None and api_type.startswith("bedrock"):
self._configure_openai_config_for_bedrock(config, openai_config)
if bedrock_import_exception:
Expand Down
Loading

0 comments on commit 3a6b88e

Please sign in to comment.