Spaces:
Paused
Paused
| # What is this? | |
| ## Tests if proxy/auth/auth_utils.py works as expected | |
| import sys, os, asyncio, time, random, uuid | |
| import traceback | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| import os | |
| sys.path.insert( | |
| 0, os.path.abspath("../..") | |
| ) # Adds the parent directory to the system path | |
| import pytest | |
| import litellm | |
| from litellm.proxy.auth.auth_utils import ( | |
| _allow_model_level_clientside_configurable_parameters, | |
| ) | |
| from litellm.router import Router | |
| def test_configurable_clientside_parameters( | |
| allowed_param, input_value, should_return_true | |
| ): | |
| router = Router( | |
| model_list=[ | |
| { | |
| "model_name": "dummy-model", | |
| "litellm_params": { | |
| "model": "gpt-3.5-turbo", | |
| "api_key": "dummy-key", | |
| "configurable_clientside_auth_params": [allowed_param], | |
| }, | |
| } | |
| ] | |
| ) | |
| resp = _allow_model_level_clientside_configurable_parameters( | |
| model="dummy-model", | |
| param="api_base", | |
| request_body_value=input_value["api_base"], | |
| llm_router=router, | |
| ) | |
| print(resp) | |
| assert resp == should_return_true | |
| def test_get_end_user_id_from_request_body_always_returns_str(): | |
| from litellm.proxy.auth.auth_utils import get_end_user_id_from_request_body | |
| from fastapi import Request | |
| from unittest.mock import MagicMock | |
| # Create a mock Request object | |
| mock_request = MagicMock(spec=Request) | |
| mock_request.headers = {} | |
| request_body = {"user": 123} | |
| end_user_id = get_end_user_id_from_request_body(request_body, dict(mock_request.headers)) | |
| assert end_user_id == "123" | |
| assert isinstance(end_user_id, str) | |
| def test_get_end_user_id_from_request_body_with_user_header_name( | |
| headers, general_settings_config, request_body, expected_user_id | |
| ): | |
| """Test that get_end_user_id_from_request_body respects user_header_name property""" | |
| from litellm.proxy.auth.auth_utils import get_end_user_id_from_request_body | |
| from fastapi import Request | |
| from unittest.mock import MagicMock, patch | |
| # Create a mock Request object with headers | |
| mock_request = MagicMock(spec=Request) | |
| mock_request.headers = headers | |
| # Mock general_settings at the proxy_server module level | |
| with patch('litellm.proxy.proxy_server.general_settings', general_settings_config): | |
| end_user_id = get_end_user_id_from_request_body(request_body, dict(mock_request.headers)) | |
| assert end_user_id == expected_user_id | |
| def test_get_end_user_id_from_request_body_no_user_found(): | |
| """Test that function returns None when no user ID is found anywhere""" | |
| from litellm.proxy.auth.auth_utils import get_end_user_id_from_request_body | |
| from fastapi import Request | |
| from unittest.mock import MagicMock, patch | |
| # Create a mock Request object with no relevant headers | |
| mock_request = MagicMock(spec=Request) | |
| mock_request.headers = {"X-Other-Header": "some-value"} | |
| # Mock general_settings with user_header_name that doesn't match headers | |
| general_settings_config = {"user_header_name": "X-User-ID"} | |
| # Request body with no user identifiers | |
| request_body = {"model": "gpt-4", "messages": [{"role": "user", "content": "hello"}]} | |
| with patch('litellm.proxy.proxy_server.general_settings', general_settings_config): | |
| end_user_id = get_end_user_id_from_request_body(request_body, dict(mock_request.headers)) | |
| assert end_user_id is None | |
| def test_get_end_user_id_from_request_body_backwards_compatibility(): | |
| """Test that function works with just request_body parameter (backwards compatibility)""" | |
| from litellm.proxy.auth.auth_utils import get_end_user_id_from_request_body | |
| # Test with just request_body - should work like before | |
| request_body = {"user": "test-user-123"} | |
| end_user_id = get_end_user_id_from_request_body(request_body) | |
| assert end_user_id == "test-user-123" | |
| # Test with litellm_metadata | |
| request_body = {"litellm_metadata": {"user": "litellm-user-456"}} | |
| end_user_id = get_end_user_id_from_request_body(request_body) | |
| assert end_user_id == "litellm-user-456" | |
| # Test with metadata.user_id | |
| request_body = {"metadata": {"user_id": "metadata-user-789"}} | |
| end_user_id = get_end_user_id_from_request_body(request_body) | |
| assert end_user_id == "metadata-user-789" | |
| # Test with no user - should return None | |
| request_body = {"model": "gpt-4"} | |
| end_user_id = get_end_user_id_from_request_body(request_body) | |
| assert end_user_id is None | |
| def test_get_model_from_request(request_data, expected_model): | |
| from litellm.proxy.auth.auth_utils import get_model_from_request | |
| request_data = {"target_model_names": "gpt-3.5-turbo, gpt-4o-mini-general-deployment"} | |
| route = "/openai/deployments/gpt-3.5-turbo" | |
| model = get_model_from_request(request_data, "/v1/files") | |
| assert model == ["gpt-3.5-turbo", "gpt-4o-mini-general-deployment"] | |