Skip to content

Commit

Permalink
Updated to v0.3.3 with changes to options handling (#3593)
Browse files Browse the repository at this point in the history
  • Loading branch information
marklysze authored Oct 1, 2024
1 parent 3a6b88e commit 27c8828
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 9 deletions.
8 changes: 4 additions & 4 deletions autogen/oai/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def parse_params(self, params: Dict[str, Any]) -> Dict[str, Any]:

if "num_predict" in params:
# Maximum number of tokens to predict, note: -1 is infinite, -2 is fill context, 128 is default
ollama_params["num_predict"] = validate_parameter(params, "num_predict", int, False, 128, None, None)
options_dict["num_predict"] = validate_parameter(params, "num_predict", int, False, 128, None, None)

if "repeat_penalty" in params:
options_dict["repeat_penalty"] = validate_parameter(
Expand All @@ -138,15 +138,15 @@ def parse_params(self, params: Dict[str, Any]) -> Dict[str, Any]:
options_dict["seed"] = validate_parameter(params, "seed", int, False, 42, None, None)

if "temperature" in params:
ollama_params["temperature"] = validate_parameter(
options_dict["temperature"] = validate_parameter(
params, "temperature", (int, float), False, 0.8, None, None
)

if "top_k" in params:
ollama_params["top_k"] = validate_parameter(params, "top_k", int, False, 40, None, None)
options_dict["top_k"] = validate_parameter(params, "top_k", int, False, 40, None, None)

if "top_p" in params:
ollama_params["top_p"] = validate_parameter(params, "top_p", (int, float), False, 0.9, None, None)
options_dict["top_p"] = validate_parameter(params, "top_p", (int, float), False, 0.9, None, None)

if self._native_tool_calls and self._tools_in_conversation and not self._should_hide_tools:
ollama_params["tools"] = params["tools"]
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@
"mistral": ["mistralai>=1.0.1"],
"groq": ["groq>=0.9.0"],
"cohere": ["cohere>=5.5.8"],
"ollama": ["ollama>=0.3.1", "fix_busted_json>=0.0.18"],
"ollama": ["ollama>=0.3.3", "fix_busted_json>=0.0.18"],
"bedrock": ["boto3>=1.34.149"],
}

Expand Down
8 changes: 4 additions & 4 deletions test/oai/test_ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,13 @@ def test_parsing_params(ollama_client):
}
expected_params = {
"model": "llama3.1:8b",
"temperature": 0.8,
"num_predict": 128,
"top_k": 40,
"top_p": 0.9,
"options": {
"repeat_penalty": 1.1,
"seed": 42,
"temperature": 0.8,
"num_predict": 128,
"top_k": 40,
"top_p": 0.9,
},
"stream": False,
}
Expand Down

0 comments on commit 27c8828

Please sign in to comment.