Skip to content

Commit

Permalink
updating dependencies (#742)
Browse files Browse the repository at this point in the history
  • Loading branch information
lkuligin authored Feb 23, 2025
1 parent 476bcc3 commit fa3a45b
Show file tree
Hide file tree
Showing 11 changed files with 737 additions and 966 deletions.
95 changes: 94 additions & 1 deletion libs/genai/langchain_google_genai/_common.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
from importlib import metadata
from typing import Optional, Tuple, TypedDict
from typing import Any, Dict, Optional, Tuple, TypedDict

from google.api_core.gapic_v1.client_info import ClientInfo
from langchain_core.utils import secret_from_env
from pydantic import BaseModel, Field, SecretStr

from langchain_google_genai._enums import HarmBlockThreshold, HarmCategory

Expand All @@ -12,6 +14,97 @@ class GoogleGenerativeAIError(Exception):
"""


class _BaseGoogleGenerativeAI(BaseModel):
"""Base class for Google Generative AI LLMs"""

model: str = Field(
...,
description="""The name of the model to use.
Supported examples:
- gemini-pro
- models/text-bison-001""",
)
"""Model name to use."""
google_api_key: Optional[SecretStr] = Field(
alias="api_key", default_factory=secret_from_env("GOOGLE_API_KEY", default=None)
)
"""Google AI API key.
If not specified will be read from env var ``GOOGLE_API_KEY``."""
credentials: Any = None
"The default custom credentials (google.auth.credentials.Credentials) to use "
"when making API calls. If not provided, credentials will be ascertained from "
"the GOOGLE_API_KEY envvar"
temperature: float = 0.7
"""Run inference with this temperature. Must by in the closed interval
[0.0, 1.0]."""
top_p: Optional[float] = None
"""Decode using nucleus sampling: consider the smallest set of tokens whose
probability sum is at least top_p. Must be in the closed interval [0.0, 1.0]."""
top_k: Optional[int] = None
"""Decode using top-k sampling: consider the set of top_k most probable tokens.
Must be positive."""
max_output_tokens: Optional[int] = Field(default=None, alias="max_tokens")
"""Maximum number of tokens to include in a candidate. Must be greater than zero.
If unset, will default to 64."""
n: int = 1
"""Number of chat completions to generate for each prompt. Note that the API may
not return the full n completions if duplicates are generated."""
max_retries: int = 6
"""The maximum number of retries to make when generating."""

timeout: Optional[float] = None
"""The maximum number of seconds to wait for a response."""

client_options: Optional[Dict] = Field(
default=None,
description=(
"A dictionary of client options to pass to the Google API client, "
"such as `api_endpoint`."
),
)
transport: Optional[str] = Field(
default=None,
description="A string, one of: [`rest`, `grpc`, `grpc_asyncio`].",
)
additional_headers: Optional[Dict[str, str]] = Field(
default=None,
description=(
"A key-value dictionary representing additional headers for the model call"
),
)

safety_settings: Optional[Dict[HarmCategory, HarmBlockThreshold]] = None
"""The default safety settings to use for all generations.
For example:
from google.generativeai.types.safety_types import HarmBlockThreshold, HarmCategory
safety_settings = {
HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,
HarmCategory.HARM_CATEGORY_HATE_SPEECH: HarmBlockThreshold.BLOCK_ONLY_HIGH,
HarmCategory.HARM_CATEGORY_HARASSMENT: HarmBlockThreshold.BLOCK_LOW_AND_ABOVE,
HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: HarmBlockThreshold.BLOCK_NONE,
}
""" # noqa: E501

@property
def lc_secrets(self) -> Dict[str, str]:
return {"google_api_key": "GOOGLE_API_KEY"}

@property
def _identifying_params(self) -> Dict[str, Any]:
"""Get the identifying parameters."""
return {
"model": self.model,
"temperature": self.temperature,
"top_p": self.top_p,
"top_k": self.top_k,
"max_output_tokens": self.max_output_tokens,
"candidate_count": self.n,
}


def get_user_agent(module: Optional[str] = None) -> Tuple[str, str]:
r"""Returns a custom user agent header.
Expand Down
33 changes: 7 additions & 26 deletions libs/genai/langchain_google_genai/_function_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from typing import (
Any,
Callable,
Collection,
Dict,
List,
Literal,
Expand All @@ -22,7 +21,6 @@
import google.ai.generativelanguage as glm
import google.ai.generativelanguage_v1beta.types as gapic
import proto # type: ignore[import]
from google.generativeai.types.content_types import ToolDict # type: ignore[import]
from langchain_core.tools import BaseTool
from langchain_core.tools import tool as callable_as_lc_tool
from langchain_core.utils.function_calling import (
Expand Down Expand Up @@ -59,38 +57,21 @@
_ALLOWED_SCHEMA_FIELDS_SET = set(_ALLOWED_SCHEMA_FIELDS)


class _ToolDictLike(TypedDict):
function_declarations: _FunctionDeclarationLikeList


class _FunctionDeclarationDict(TypedDict):
name: str
description: str
parameters: Dict[str, Collection[str]]


class _ToolDict(TypedDict):
function_declarations: Sequence[_FunctionDeclarationDict]


# Info: This is a FunctionDeclaration(=fc).
_FunctionDeclarationLike = Union[
BaseTool, Type[BaseModel], gapic.FunctionDeclaration, Callable, Dict[str, Any]
]

# Info: This mean one tool.
_FunctionDeclarationLikeList = Sequence[_FunctionDeclarationLike]

class _ToolDict(TypedDict):
function_declarations: Sequence[_FunctionDeclarationLike]


# Info: This means one tool=Sequence of FunctionDeclaration
# The dict should be gapic.Tool like. {"function_declarations": [ { "name": ...}.
# OpenAI like dict is not be accepted. {{'type': 'function', 'function': {'name': ...}
_ToolsType = Union[
gapic.Tool,
ToolDict,
_ToolDictLike,
_FunctionDeclarationLikeList,
_FunctionDeclarationLike,
gapic.Tool, _ToolDict, _FunctionDeclarationLike, Sequence[_FunctionDeclarationLike]
]


Expand Down Expand Up @@ -152,12 +133,12 @@ def convert_to_genai_function_declarations(
gapic_tool = gapic.Tool()
for tool in tools:
if isinstance(tool, gapic.Tool):
gapic_tool.function_declarations.extend(tool.function_declarations)
gapic_tool.function_declarations.extend(tool.function_declarations) # type: ignore[union-attr]
elif isinstance(tool, dict) and "function_declarations" not in tool:
fd = _format_to_gapic_function_declaration(tool)
gapic_tool.function_declarations.append(fd)
elif isinstance(tool, dict):
function_declarations = cast(_ToolDictLike, tool)["function_declarations"]
function_declarations = cast(_ToolDict, tool)["function_declarations"]
if not isinstance(function_declarations, collections.abc.Sequence):
raise ValueError(
"function_declarations should be a list"
Expand All @@ -170,7 +151,7 @@ def convert_to_genai_function_declarations(
]
gapic_tool.function_declarations.extend(fds)
else:
fd = _format_to_gapic_function_declaration(tool)
fd = _format_to_gapic_function_declaration(tool) # type: ignore[arg-type]
gapic_tool.function_declarations.append(fd)
return gapic_tool

Expand Down
Loading

0 comments on commit fa3a45b

Please sign in to comment.