Skip to content

Commit

Permalink
fix(http_handler.py): allow setting ca bundle path
Browse files Browse the repository at this point in the history
  • Loading branch information
krrishdholakia committed Jun 1, 2024
1 parent f1ce7bb commit a16a1c4
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 3 deletions.
1 change: 1 addition & 0 deletions litellm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@
}
use_client: bool = False
ssl_verify: bool = True
ssl_certificate: Optional[str] = None
disable_streaming_logging: bool = False
in_memory_llm_clients_cache: dict = {}
### GUARDRAILS ###
Expand Down
10 changes: 8 additions & 2 deletions litellm/llms/custom_httpx/http_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,16 @@ def __init__(
timeout: Optional[Union[float, httpx.Timeout]] = None,
concurrent_limit=1000,
):
sync_proxy_mounts = None
async_proxy_mounts = None
# Check if the HTTP_PROXY and HTTPS_PROXY environment variables are set and use them accordingly.
http_proxy = os.getenv("HTTP_PROXY", None)
https_proxy = os.getenv("HTTPS_PROXY", None)
no_proxy = os.getenv("NO_PROXY", None)
ssl_verify = bool(os.getenv("SSL_VERIFY", litellm.ssl_verify))
cert = os.getenv(
"SSL_CERTIFICATE", litellm.ssl_certificate
) # /path/to/client.pem

sync_proxy_mounts = None
if http_proxy is not None and https_proxy is not None:
async_proxy_mounts = {
"http://": httpx.AsyncHTTPTransport(proxy=httpx.Proxy(url=http_proxy)),
Expand All @@ -46,6 +47,7 @@ def __init__(
),
verify=ssl_verify,
mounts=async_proxy_mounts,
cert=cert,
)

async def close(self):
Expand Down Expand Up @@ -108,6 +110,9 @@ def __init__(
https_proxy = os.getenv("HTTPS_PROXY", None)
no_proxy = os.getenv("NO_PROXY", None)
ssl_verify = bool(os.getenv("SSL_VERIFY", litellm.ssl_verify))
cert = os.getenv(
"SSL_CERTIFICATE", litellm.ssl_certificate
) # /path/to/client.pem

sync_proxy_mounts = None
if http_proxy is not None and https_proxy is not None:
Expand All @@ -132,6 +137,7 @@ def __init__(
),
verify=ssl_verify,
mounts=sync_proxy_mounts,
cert=cert,
)
else:
self.client = client
Expand Down
4 changes: 3 additions & 1 deletion litellm/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ async def acompletion(
extra_headers: Optional[dict] = None,
# Optional liteLLM function params
**kwargs,
):
) -> Union[ModelResponse, CustomStreamWrapper]:
"""
Asynchronously executes a litellm.completion() call for any of litellm supported llms (example gpt-4, gpt-3.5-turbo, claude-2, command-nightly)
Expand Down Expand Up @@ -339,6 +339,8 @@ async def acompletion(
if isinstance(init_response, dict) or isinstance(
init_response, ModelResponse
): ## CACHING SCENARIO
if isinstance(init_response, dict):
response = ModelResponse(**init_response)
response = init_response
elif asyncio.iscoroutine(init_response):
response = await init_response
Expand Down

0 comments on commit a16a1c4

Please # to comment.