You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Issue happens when use litellm(litellm==1.43.18) with gemini hosted by Google in dAnswer.
And from my analysis, it is regarding to these snippets.
For LLM configuration, api_base is "", which is an empty string, but not None. So it goes into this branch and with the wrong value been applied.
if (
api_baseisnotNone
): # for cloudflare ai gateway - https://github.com/BerriAI/litellm/issues/4317ifcustom_llm_provider=="gemini":
url="{}/{}".format(api_base, endpoint)
auth_header= (
gemini_api_key# cloudflare expects api key as bearer token
)
else:
url="{}:{}".format(api_base, endpoint)
ifstreamisTrue:
url=url+"?alt=sse"
Due to that in many places, we accept the empty string as the default parameter, so I suggest that, use
if (api_base)
this checks for both None and empty string.
Relevant log output
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 60, in map_httpcore_exceptions
yield
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 218, in handle_request
resp = self._pool.handle_request(req)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpcore/_sync/connection_pool.py", line 208, in handle_request
raise UnsupportedProtocol(
httpcore.UnsupportedProtocol: Request URL is missing an 'http://' or 'https://' protocol.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10372, in __next__
self.fetch_sync_stream()
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10477, in fetch_sync_stream
self.completion_stream = self.make_call(client=litellm.module_level_client)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/llms/vertex_httpx.py", line 555, in make_sync_call
response = client.post(api_base, headers=headers, data=data, stream=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/llms/custom_httpx/http_handler.py", line 275, in post
raise e
File "/usr/local/lib/python3.11/site-packages/litellm/llms/custom_httpx/http_handler.py", line 266, in post
response = self.client.send(req, stream=stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 908, in send
response = self._send_handling_auth(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 936, in _send_handling_auth
response = self._send_handling_redirects(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 973, in _send_handling_redirects
response = self._send_single_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 1009, in _send_single_request
response = transport.handle_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 217, in handle_request
with map_httpcore_exceptions():
File "/usr/local/lib/python3.11/contextlib.py", line 158, in __exit__
self.gen.throw(typ, value, traceback)
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 77, in map_httpcore_exceptions
raise mapped_exc(message) from exc
httpx.UnsupportedProtocol: Request URL is missing an 'http://' or 'https://' protocol.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/app/danswer/chat/process_message.py", line 666, in stream_chat_message_objects
forpacketin answer.processed_streamed_output:
File "/app/danswer/llm/answering/answer.py", line 577, in processed_streamed_output
forprocessed_packetin _process_stream(output_generator):
File "/app/danswer/llm/answering/answer.py", line 509, in _process_stream
formessagein stream:
File "/app/danswer/llm/answering/answer.py", line 423, in _raw_output_for_non_explicit_tool_calling_llms
yield from self._process_llm_stream(
File "/app/danswer/llm/answering/answer.py", line 329, in _process_llm_stream
formessagein self.llm.stream(
File "/app/danswer/llm/chat_llm.py", line 364, in _stream_implementation
forpartin response:
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10468, in __next__
raise exception_type(
^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 8334, in exception_type
raise e
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 8298, in exception_type
raise APIConnectionError(
litellm.exceptions.APIConnectionError: litellm.APIConnectionError: Request URL is missing an 'http://' or 'https://' protocol.
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 60, in map_httpcore_exceptions
yield
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 218, in handle_request
resp = self._pool.handle_request(req)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpcore/_sync/connection_pool.py", line 208, in handle_request
raise UnsupportedProtocol(
httpcore.UnsupportedProtocol: Request URL is missing an 'http://' or 'https://' protocol.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10372, in __next__
self.fetch_sync_stream()
File "/usr/local/lib/python3.11/site-packages/litellm/utils.py", line 10477, in fetch_sync_stream
self.completion_stream = self.make_call(client=litellm.module_level_client)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/llms/vertex_httpx.py", line 555, in make_sync_call
response = client.post(api_base, headers=headers, data=data, stream=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litellm/llms/custom_httpx/http_handler.py", line 275, in post
raise e
File "/usr/local/lib/python3.11/site-packages/litellm/llms/custom_httpx/http_handler.py", line 266, in post
response = self.client.send(req, stream=stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 908, in send
response = self._send_handling_auth(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 936, in _send_handling_auth
response = self._send_handling_redirects(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 973, in _send_handling_redirects
response = self._send_single_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_client.py", line 1009, in _send_single_request
response = transport.handle_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 217, in handle_request
with map_httpcore_exceptions():
File "/usr/local/lib/python3.11/contextlib.py", line 158, in __exit__
self.gen.throw(typ, value, traceback)
File "/usr/local/lib/python3.11/site-packages/httpx/_transports/default.py", line 77, in map_httpcore_exceptions
raise mapped_exc(message) from exc
httpx.UnsupportedProtocol: Request URL is missing an 'http://' or 'https://' protocol.
url // this is the url specified by _get_token_and_url
https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-pro-latest:streamGenerateContent?key=AImyFakedKey4&alt=sse
True // api_base is not None
// api_base, empty string but not None, so it goes into the wrong branch, and composes a wrong value for the url
/streamGenerateContent?alt=sse
Twitter / LinkedIn details
No response
The text was updated successfully, but these errors were encountered:
What happened?
Issue happens when use litellm(litellm==1.43.18) with gemini hosted by Google in dAnswer.
And from my analysis, it is regarding to these snippets.
For LLM configuration, api_base is "", which is an empty string, but not None. So it goes into this branch and with the wrong value been applied.
Due to that in many places, we accept the empty string as the default parameter, so I suggest that, use
this checks for both None and empty string.
Relevant log output
Twitter / LinkedIn details
No response
The text was updated successfully, but these errors were encountered: