Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
)
from urllib.parse import urlparse

import google.api_core

# TODO: remove ignore once the google package is published with types
import google.generativeai as genai # type: ignore[import]
import requests
Expand Down Expand Up @@ -87,8 +89,6 @@ def _create_retry_decorator() -> Callable[[Any], Any]:
Callable[[Any], Any]: A retry decorator configured for handling specific
Google API exceptions.
"""
import google.api_core.exceptions

multiplier = 2
min_seconds = 1
max_seconds = 60
Expand Down Expand Up @@ -123,14 +123,22 @@ def _chat_with_retry(generation_method: Callable, **kwargs: Any) -> Any:
Any: The result from the chat generation method.
"""
retry_decorator = _create_retry_decorator()
from google.api_core.exceptions import InvalidArgument # type: ignore

@retry_decorator
def _chat_with_retry(**kwargs: Any) -> Any:
try:
return generation_method(**kwargs)
except InvalidArgument as e:
# Do not retry for these errors.
# Do not retry for these errors.
except google.api_core.exceptions.FailedPrecondition as exc:
if "location is not supported" in exc.message:
error_msg = (
"Your location is not supported by google-generativeai "
"at the moment. Try to use ChatVertexAI LLM from "
"langchain_google_vertexai."
)
raise ValueError(error_msg)

except google.api_core.exceptions.InvalidArgument as e:
raise ChatGoogleGenerativeAIError(
f"Invalid argument provided to Gemini: {e}"
) from e
Expand Down
18 changes: 13 additions & 5 deletions libs/partners/google-genai/langchain_google_genai/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,19 @@ def _completion_with_retry(
prompt: LanguageModelInput, is_gemini: bool, stream: bool, **kwargs: Any
) -> Any:
generation_config = kwargs.get("generation_config", {})
if is_gemini:
return llm.client.generate_content(
contents=prompt, stream=stream, generation_config=generation_config
)
return llm.client.generate_text(prompt=prompt, **kwargs)
error_msg = (
"Your location is not supported by google-generativeai at the moment. "
"Try to use VertexAI LLM from langchain_google_vertexai"
)
try:
if is_gemini:
return llm.client.generate_content(
contents=prompt, stream=stream, generation_config=generation_config
)
return llm.client.generate_text(prompt=prompt, **kwargs)
except google.api_core.exceptions.FailedPrecondition as exc:
if "location is not supported" in exc.message:
raise ValueError(error_msg)

return _completion_with_retry(
prompt=prompt, is_gemini=is_gemini, stream=stream, **kwargs
Expand Down