Skip to content

Commit

Permalink
feat(router.py): allow setting configurable_clientside_auth_params fo…
Browse files Browse the repository at this point in the history
…r a model

Closes #5843
  • Loading branch information
krrishdholakia committed Sep 23, 2024
1 parent b5c774b commit 3a064b7
Show file tree
Hide file tree
Showing 5 changed files with 280 additions and 151 deletions.
9 changes: 5 additions & 4 deletions litellm/proxy/_new_secret_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,12 @@ model_list:
- model_name: "anthropic/*"
litellm_params:
model: "anthropic/*"
- model_name: "fireworks_ai/*"
litellm_params:
model: "fireworks_ai/*"
configurable_clientside_auth_params: ["api_base"]


litellm_settings:
success_callback: ["langfuse"]
cache: true

general_settings:
allow_client_side_credentials: true
cache: true
55 changes: 52 additions & 3 deletions litellm/proxy/auth/auth_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from fastapi import HTTPException, Request, status

from litellm import Router, provider_list
from litellm._logging import verbose_proxy_logger
from litellm.proxy._types import *

Expand Down Expand Up @@ -72,7 +73,41 @@ def check_complete_credentials(request_body: dict) -> bool:
return False


def is_request_body_safe(request_body: dict, general_settings: dict) -> bool:
def _allow_model_level_clientside_configurable_parameters(
model: str, param: str, llm_router: Optional[Router]
) -> bool:
"""
Check if model is allowed to use configurable client-side params
- get matching model
- check if 'clientside_configurable_parameters' is set for model
-
"""
if llm_router is None:
return False
# check if model is set
model_info = llm_router.get_model_group_info(model_group=model)
if model_info is None:
# check if wildcard model is set
if model.split("/", 1)[0] in provider_list:
model_info = llm_router.get_model_group_info(
model_group=model.split("/", 1)[0]
)

if model_info is None:
return False

if model_info is None or model_info.configurable_clientside_auth_params is None:
return False

if param in model_info.configurable_clientside_auth_params:
return True

return False


def is_request_body_safe(
request_body: dict, general_settings: dict, llm_router: Optional[Router], model: str
) -> bool:
"""
Check if the request body is safe.
Expand All @@ -90,6 +125,13 @@ def is_request_body_safe(request_body: dict, general_settings: dict) -> bool:
):
if general_settings.get("allow_client_side_credentials") is True:
return True
elif (
_allow_model_level_clientside_configurable_parameters(
model=model, param=param, llm_router=llm_router
)
is True
):
return True
raise ValueError(
f"Rejected Request: {param} is not allowed in request body. "
"Enable with `general_settings::allow_client_side_credentials` on proxy config.yaml. "
Expand All @@ -116,13 +158,20 @@ async def pre_db_read_auth_checks(
Raises:
- HTTPException if request fails initial auth checks
"""
from litellm.proxy.proxy_server import general_settings, premium_user
from litellm.proxy.proxy_server import general_settings, llm_router, premium_user

# Check 1. request size
await check_if_request_size_is_safe(request=request)

# Check 2. Request body is safe
is_request_body_safe(request_body=request_data, general_settings=general_settings)
is_request_body_safe(
request_body=request_data,
general_settings=general_settings,
llm_router=llm_router,
model=request_data.get(
"model", ""
), # [TODO] use model passed in url as well (azure openai routes)
)

# Check 3. Check if IP address is allowed
is_valid_ip, passed_in_ip = _check_valid_ip(
Expand Down
Loading

0 comments on commit 3a064b7

Please sign in to comment.