Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 42 additions & 6 deletions litellm/containers/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,13 @@ def create_container(
return response

# get llm provider logic
litellm_params = GenericLiteLLMParams(**kwargs)
# Pass credential params explicitly since they're named args, not in kwargs
litellm_params = GenericLiteLLMParams(
api_key=api_key,
api_base=api_base,
api_version=api_version,
**kwargs,
)
# get provider config
container_provider_config: Optional[BaseContainerConfig] = (
ProviderConfigManager.get_provider_container_config(
Expand Down Expand Up @@ -406,7 +412,13 @@ def list_containers(
return response

# get llm provider logic
litellm_params = GenericLiteLLMParams(**kwargs)
# Pass credential params explicitly since they're named args, not in kwargs
litellm_params = GenericLiteLLMParams(
api_key=api_key,
api_base=api_base,
api_version=api_version,
**kwargs,
)
# get provider config
container_provider_config: Optional[BaseContainerConfig] = (
ProviderConfigManager.get_provider_container_config(
Expand Down Expand Up @@ -594,7 +606,13 @@ def retrieve_container(
return response

# get llm provider logic
litellm_params = GenericLiteLLMParams(**kwargs)
# Pass credential params explicitly since they're named args, not in kwargs
litellm_params = GenericLiteLLMParams(
api_key=api_key,
api_base=api_base,
api_version=api_version,
**kwargs,
)
# get provider config
container_provider_config: Optional[BaseContainerConfig] = (
ProviderConfigManager.get_provider_container_config(
Expand Down Expand Up @@ -774,7 +792,13 @@ def delete_container(
return response

# get llm provider logic
litellm_params = GenericLiteLLMParams(**kwargs)
# Pass credential params explicitly since they're named args, not in kwargs
litellm_params = GenericLiteLLMParams(
api_key=api_key,
api_base=api_base,
api_version=api_version,
**kwargs,
)
# get provider config
container_provider_config: Optional[BaseContainerConfig] = (
ProviderConfigManager.get_provider_container_config(
Expand Down Expand Up @@ -968,7 +992,13 @@ def list_container_files(
return response

# get llm provider logic
litellm_params = GenericLiteLLMParams(**kwargs)
# Pass credential params explicitly since they're named args, not in kwargs
litellm_params = GenericLiteLLMParams(
api_key=api_key,
api_base=api_base,
api_version=api_version,
**kwargs,
)
# get provider config
container_provider_config: Optional[BaseContainerConfig] = (
ProviderConfigManager.get_provider_container_config(
Expand Down Expand Up @@ -1203,7 +1233,13 @@ def upload_container_file(
return response

# get llm provider logic
litellm_params = GenericLiteLLMParams(**kwargs)
# Pass credential params explicitly since they're named args, not in kwargs
litellm_params = GenericLiteLLMParams(
api_key=api_key,
api_base=api_base,
api_version=api_version,
**kwargs,
)
# get provider config
container_provider_config: Optional[BaseContainerConfig] = (
ProviderConfigManager.get_provider_container_config(
Expand Down
9 changes: 7 additions & 2 deletions litellm/llms/openai/containers/transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,13 @@ def get_complete_url(
) -> str:
"""Get the complete URL for OpenAI container API.
"""
if api_base is None:
api_base = "https://api.openai.com/v1"
api_base = (
api_base
or litellm.api_base
or get_secret_str("OPENAI_BASE_URL")
or get_secret_str("OPENAI_API_BASE")
or "https://api.openai.com/v1"
)

return f"{api_base.rstrip('/')}/containers"

Expand Down
163 changes: 163 additions & 0 deletions tests/test_litellm/containers/test_container_regional_api_base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
"""
Tests for OpenAI Containers API regional api_base support.

Validates that litellm.create_container and litellm.upload_container_file
correctly use regional endpoints like https://us.api.openai.com/v1 for
US Data Residency instead of defaulting to https://api.openai.com/v1.
"""

import os
import sys
from unittest.mock import MagicMock, patch

import httpx
import pytest

sys.path.insert(0, os.path.abspath("../../.."))

import litellm


class TestContainerRegionalApiBase:
"""Test suite for container API regional api_base support."""

def setup_method(self):
"""Set up test fixtures."""
os.environ["OPENAI_API_KEY"] = "sk-test123"

def teardown_method(self):
"""Clean up after tests."""
if "OPENAI_API_KEY" in os.environ:
del os.environ["OPENAI_API_KEY"]
if "OPENAI_BASE_URL" in os.environ:
del os.environ["OPENAI_BASE_URL"]
if "OPENAI_API_BASE" in os.environ:
del os.environ["OPENAI_API_BASE"]
litellm.api_base = None

@patch("litellm.llms.custom_httpx.http_handler.HTTPHandler.post")
def test_create_container_uses_regional_api_base(self, mock_post):
"""
Test that litellm.create_container uses the regional api_base when provided.

This validates the fix for US Data Residency support where requests should
go to https://us.api.openai.com/v1 instead of https://api.openai.com/v1.
"""
mock_response = MagicMock(spec=httpx.Response)
mock_response.status_code = 200
mock_response.json.return_value = {
"id": "cntr_123456",
"object": "container",
"created_at": 1747857508,
"status": "running",
"expires_after": {"anchor": "last_active_at", "minutes": 20},
"last_active_at": 1747857508,
"name": "Test Container"
}
mock_post.return_value = mock_response

litellm.create_container(
name="Test Container",
custom_llm_provider="openai",
api_base="https://us.api.openai.com/v1",
)

mock_post.assert_called_once()
call_args = mock_post.call_args
called_url = call_args[1]["url"]

assert "us.api.openai.com" in called_url, f"Expected US regional URL, got: {called_url}"

Check failure

Code scanning / CodeQL

Incomplete URL substring sanitization High test

The string
us.api.openai.com
may be at an arbitrary position in the sanitized URL.

Copilot Autofix

AI 2 months ago

Copilot could not generate an autofix suggestion

Copilot could not generate an autofix suggestion for this alert. Try pushing a new commit or if the problem persists contact support.

assert called_url == "https://us.api.openai.com/v1/containers"

@patch("litellm.llms.custom_httpx.http_handler.HTTPHandler.post")
def test_create_container_uses_env_var_openai_base_url(self, mock_post):
"""
Test that litellm.create_container uses OPENAI_BASE_URL env var.
"""
os.environ["OPENAI_BASE_URL"] = "https://us.api.openai.com/v1"

mock_response = MagicMock(spec=httpx.Response)
mock_response.status_code = 200
mock_response.json.return_value = {
"id": "cntr_123456",
"object": "container",
"created_at": 1747857508,
"status": "running",
"expires_after": {"anchor": "last_active_at", "minutes": 20},
"last_active_at": 1747857508,
"name": "Test Container"
}
mock_post.return_value = mock_response

litellm.create_container(
name="Test Container",
custom_llm_provider="openai",
)

mock_post.assert_called_once()
call_args = mock_post.call_args
called_url = call_args[1]["url"]

assert "us.api.openai.com" in called_url, f"Expected US regional URL, got: {called_url}"

Check failure

Code scanning / CodeQL

Incomplete URL substring sanitization High test

The string
us.api.openai.com
may be at an arbitrary position in the sanitized URL.

Copilot Autofix

AI 2 months ago

In general, to fix incomplete URL substring sanitization, the URL should be parsed with a proper URL parser (such as urllib.parse.urlparse in Python) and the check should be performed on the parsed hostname (and, if needed, scheme and path), not by searching for substrings in the raw URL string.

For this specific test, we want to assert that litellm.create_container uses the US regional API base https://us.api.openai.com/v1. The safest, most precise fix is to parse called_url with urllib.parse.urlparse, then assert that the parsed hostname equals us.api.openai.com (and optionally that the path is /v1/containers). This preserves the intended functionality (ensuring the US regional host is used) while avoiding substring checks. Concretely:

  • Add an import for urlparse (or urllib.parse) at the top of tests/test_litellm/containers/test_container_regional_api_base.py.
  • Replace the assertion on line 101 with code that parses called_url and checks parsed.hostname == "us.api.openai.com". We can also assert that the path equals /v1/containers if we want to keep the test strict, but the minimal fix for the CodeQL finding is to validate the host.
Suggested changeset 1
tests/test_litellm/containers/test_container_regional_api_base.py

Autofix patch

Autofix patch
Run the following command in your local git repository to apply this patch
cat << 'EOF' | git apply
diff --git a/tests/test_litellm/containers/test_container_regional_api_base.py b/tests/test_litellm/containers/test_container_regional_api_base.py
--- a/tests/test_litellm/containers/test_container_regional_api_base.py
+++ b/tests/test_litellm/containers/test_container_regional_api_base.py
@@ -9,6 +9,7 @@
 import os
 import sys
 from unittest.mock import MagicMock, patch
+from urllib.parse import urlparse
 
 import httpx
 import pytest
@@ -97,8 +98,8 @@
         mock_post.assert_called_once()
         call_args = mock_post.call_args
         called_url = call_args[1]["url"]
-        
-        assert "us.api.openai.com" in called_url, f"Expected US regional URL, got: {called_url}"
+        parsed_url = urlparse(called_url)
+        assert parsed_url.hostname == "us.api.openai.com", f"Expected US regional URL host, got: {called_url}"
 
     @patch("litellm.llms.custom_httpx.http_handler.HTTPHandler.post")
     def test_create_container_defaults_to_standard_openai(self, mock_post):
EOF
@@ -9,6 +9,7 @@
import os
import sys
from unittest.mock import MagicMock, patch
from urllib.parse import urlparse

import httpx
import pytest
@@ -97,8 +98,8 @@
mock_post.assert_called_once()
call_args = mock_post.call_args
called_url = call_args[1]["url"]

assert "us.api.openai.com" in called_url, f"Expected US regional URL, got: {called_url}"
parsed_url = urlparse(called_url)
assert parsed_url.hostname == "us.api.openai.com", f"Expected US regional URL host, got: {called_url}"

@patch("litellm.llms.custom_httpx.http_handler.HTTPHandler.post")
def test_create_container_defaults_to_standard_openai(self, mock_post):
Copilot is powered by AI and may make mistakes. Always verify output.

@patch("litellm.llms.custom_httpx.http_handler.HTTPHandler.post")
def test_create_container_defaults_to_standard_openai(self, mock_post):
"""
Test that litellm.create_container defaults to standard OpenAI URL
when no regional api_base is configured.
"""
mock_response = MagicMock(spec=httpx.Response)
mock_response.status_code = 200
mock_response.json.return_value = {
"id": "cntr_123456",
"object": "container",
"created_at": 1747857508,
"status": "running",
"expires_after": {"anchor": "last_active_at", "minutes": 20},
"last_active_at": 1747857508,
"name": "Test Container"
}
mock_post.return_value = mock_response

litellm.create_container(
name="Test Container",
custom_llm_provider="openai",
)

mock_post.assert_called_once()
call_args = mock_post.call_args
called_url = call_args[1]["url"]

assert called_url == "https://api.openai.com/v1/containers"

@patch("litellm.llms.custom_httpx.http_handler.HTTPHandler.post")
def test_upload_container_file_uses_regional_api_base(self, mock_post):
"""
Test that litellm.upload_container_file uses the regional api_base when provided.
"""
mock_response = MagicMock(spec=httpx.Response)
mock_response.status_code = 200
mock_response.json.return_value = {
"id": "file_123456",
"object": "container.file",
"created_at": 1747857508,
"container_id": "cntr_123456",
"path": "/mnt/user/data.csv",
"source": "user",
}
mock_post.return_value = mock_response

litellm.upload_container_file(
container_id="cntr_123456",
file=("data.csv", b"col1,col2\n1,2", "text/csv"),
custom_llm_provider="openai",
api_base="https://us.api.openai.com/v1",
)

mock_post.assert_called_once()
call_args = mock_post.call_args
called_url = call_args[1]["url"]

assert "us.api.openai.com" in called_url, f"Expected US regional URL, got: {called_url}"

Check failure

Code scanning / CodeQL

Incomplete URL substring sanitization High test

The string
us.api.openai.com
may be at an arbitrary position in the sanitized URL.

Copilot Autofix

AI 2 months ago

In general, instead of checking that an allowed host appears as a substring of a URL, the URL should be parsed with a proper URL parser and its hostname (or equivalent structured field) compared to the expected value. This ensures that us.api.openai.com is actually the host, not just part of some other domain or path.

For this specific test, we should stop using "us.api.openai.com" in called_url and instead parse called_url with Python’s standard library urllib.parse.urlparse, then assert that parsed.hostname equals the expected hostname us.api.openai.com. This keeps the test’s intent (“the regional US endpoint is used”) while avoiding substring-based checking. Since only test_container_regional_api_base.py is shown, we will add a single import from urllib.parse import urlparse at the top of this file and replace the substring assertion at line 161 with a hostname comparison using urlparse. No behavior of the runtime code under test changes; we only strengthen the test assertion.

Concretely:

  • Add from urllib.parse import urlparse alongside the existing imports at the top of tests/test_litellm/containers/test_container_regional_api_base.py.
  • In test_upload_container_file_uses_regional_api_base, after called_url = call_args[1]["url"], parse the URL with parsed = urlparse(called_url) and assert parsed.hostname == "us.api.openai.com", with an informative failure message. Remove the original substring assertion.
Suggested changeset 1
tests/test_litellm/containers/test_container_regional_api_base.py

Autofix patch

Autofix patch
Run the following command in your local git repository to apply this patch
cat << 'EOF' | git apply
diff --git a/tests/test_litellm/containers/test_container_regional_api_base.py b/tests/test_litellm/containers/test_container_regional_api_base.py
--- a/tests/test_litellm/containers/test_container_regional_api_base.py
+++ b/tests/test_litellm/containers/test_container_regional_api_base.py
@@ -12,6 +12,7 @@
 
 import httpx
 import pytest
+from urllib.parse import urlparse
 
 sys.path.insert(0, os.path.abspath("../../.."))
 
@@ -157,7 +158,8 @@
         mock_post.assert_called_once()
         call_args = mock_post.call_args
         called_url = call_args[1]["url"]
-        
-        assert "us.api.openai.com" in called_url, f"Expected US regional URL, got: {called_url}"
+
+        parsed_url = urlparse(called_url)
+        assert parsed_url.hostname == "us.api.openai.com", f"Expected US regional host 'us.api.openai.com', got: {parsed_url.hostname} (full URL: {called_url})"
         assert "cntr_123456/files" in called_url
 
EOF
@@ -12,6 +12,7 @@

import httpx
import pytest
from urllib.parse import urlparse

sys.path.insert(0, os.path.abspath("../../.."))

@@ -157,7 +158,8 @@
mock_post.assert_called_once()
call_args = mock_post.call_args
called_url = call_args[1]["url"]

assert "us.api.openai.com" in called_url, f"Expected US regional URL, got: {called_url}"

parsed_url = urlparse(called_url)
assert parsed_url.hostname == "us.api.openai.com", f"Expected US regional host 'us.api.openai.com', got: {parsed_url.hostname} (full URL: {called_url})"
assert "cntr_123456/files" in called_url

Copilot is powered by AI and may make mistakes. Always verify output.
assert "cntr_123456/files" in called_url

Loading