a
    bg8-                     @  sj  d Z ddlmZ ddlZddlmZmZmZmZm	Z	m
Z
mZ ddlmZmZ ddlmZ ddlmZmZmZmZmZ ddlmZmZ dd	lmZmZmZ dd
lmZm Z  ddl!m"Z"m#Z#m$Z$m%Z%m&Z& erddl'm(Z) e*e+Z,G dd de-Z.ddddddZ/ddddddZ0dddddZ1dddd Z2d!d"d"d#d$d%Z3d!d"d"d#d&d'Z4G d(d! d!eeZ5dS ))z&Wrapper around Google's PaLM Chat API.    )annotationsN)TYPE_CHECKINGAnyCallableDictListOptionalcast)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseChatModel)	AIMessageBaseMessageChatMessageHumanMessageSystemMessage)ChatGeneration
ChatResult)convert_to_secret_strget_from_dict_or_envpre_init)	BaseModel	SecretStr)before_sleep_logretryretry_if_exception_typestop_after_attemptwait_exponentialc                   @  s   e Zd ZdZdS )ChatGooglePalmErrorz!Error with the `Google PaLM` API.N)__name__
__module____qualname____doc__ r#   r#   y/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/chat_models/google_palm.pyr   (   s   r   strOptional[List[str]])textstopreturnc                 C  s8   |du r| S |D ]"}|  |}|dkr| d| } q| S )z0Truncates text at the earliest stop token found.N)find)r'   r(   Z
stop_tokenZstop_token_idxr#   r#   r$   _truncate_at_stop_tokens,   s    
r,   zgenai.types.ChatResponser   )responser(   r)   c              	   C  s   | j stdg }| j D ]}|d}|du r<td| t|dd|}|du rdtd| |dkr|t|t|d	d
 q|dkr|t|t|d	d
 q|t|t||dd
 qt	|dS )z9Converts a PaLM API response into a LangChain ChatResult.z.ChatResponse must have at least one candidate.authorNz"ChatResponse must have an author: content z"ChatResponse must have a content: ai)r/   )r'   messagehuman)roler/   )generations)

candidatesr   getr,   appendr   r   r   r   r   )r-   r(   r5   	candidater.   r/   r#   r#   r$   _response_to_result;   s8    


r:   List[BaseMessage]zgenai.types.MessagePromptDict)input_messagesr)   c           
      C  sz  ddl m} d}g }g }tt| }|rh|d\}}t|tr`|dkrRtdtt	|j
}q$t|tr|jr|r|td|d\}}	t|	tr|	jr||jjd|j
d|jjd|	j
dg ntd	q$t|tr|jrtd
q$t|tr||jjd|j
d q$t|tr6||jjd|j
d q$t|tr^||jj|j|j
d q$tdq$|jj|||dS )zNConverts a list of LangChain messages into a PaLM API MessagePrompt structure.r   Nr0   z+System message must be first input message.z1Message examples must come before other messages.r3   )r.   r/   r1   zNHuman example message must be immediately followed by an  AI example response.zKAI example message must be immediately preceded by a Human example message.z<Messages without an explicit role not supported by PaLM API.)contextexamplesmessages)google.generativeaigenerativeailist	enumeratepop
isinstancer   r   r	   r%   r/   r   Zexampler   extendtypesZMessageDictr8   r   r4   ZMessagePromptDict)
r<   genair=   r>   r?   	remainingindexZinput_message_Znext_input_messager#   r#   r$   _messages_to_prompt_dictc   sp    
rL   zCallable[[Any], Any]r)   c                  C  sf   ddl } d}d}d}d}tdt|t|||dt| jjjt| jjjB t| jjj	B t
ttjd	S )
zKReturns a tenacity retry decorator, preconfigured to handle PaLM exceptionsr   N      <   
   T)
multiplierminmax)reraiser(   waitr   before_sleep)Zgoogle.api_core.exceptionsr   r   r   r   Zapi_core
exceptionsZResourceExhaustedZServiceUnavailableZGoogleAPIErrorr   loggerloggingWARNING)ZgooglerR   Zmin_secondsZmax_secondsmax_retriesr#   r#   r$   _create_retry_decorator   s     
r]   ChatGooglePalmr   )llmkwargsr)   c                   s,   t  }|ddd fdd}|f i |S )z*Use tenacity to retry the completion call.r   r`   r)   c                    s    j jf i | S N)clientZchatr`   r_   r#   r$   _chat_with_retry   s    z)chat_with_retry.<locals>._chat_with_retryr]   )r_   r`   retry_decoratorrf   r#   re   r$   chat_with_retry   s    ri   c                   s2   t  }|ddd fdd}|f i |I dH S )z0Use tenacity to retry the async completion call.r   ra   c                    s    j jf i | I d H S rb   )rc   Z
chat_asyncrd   re   r#   r$   _achat_with_retry   s    z+achat_with_retry.<locals>._achat_with_retryNrg   )r_   r`   rh   rj   r#   re   r$   achat_with_retry   s    rk   c                   @  s   e Zd ZU dZded< dZded< dZded	< dZd
ed< dZd
ed< dZ	ded< dZ
ded< eddddZeddddZeddddZedddddZd/d d!d"dd#d$d%d&Zd0d d!d'dd#d$d(d)Zed*dd+d,Zeddd-d.ZdS )1r^   a  `Google PaLM` Chat models API.

    To use you must have the google.generativeai Python package installed and
    either:

        1. The ``GOOGLE_API_KEY`` environment variable set with your API key, or
        2. Pass your API key using the google_api_key kwarg to the ChatGoogle
           constructor.

    Example:
        .. code-block:: python

            from langchain_community.chat_models import ChatGooglePalm
            chat = ChatGooglePalm()

    r   rc   zmodels/chat-bison-001r%   
model_nameNzOptional[SecretStr]google_api_keyzOptional[float]temperaturetop_pzOptional[int]top_krO   intnzDict[str, str]rM   c                 C  s   ddiS )Nrm   GOOGLE_API_KEYr#   selfr#   r#   r$   
lc_secrets   s    zChatGooglePalm.lc_secretsboolc                 C  s   dS )NTr#   rt   r#   r#   r$   is_lc_serializable   s    z!ChatGooglePalm.is_lc_serializablez	List[str]c                 C  s   g dS )z*Get the namespace of the langchain object.)Z	langchainZchat_modelsZgoogle_palmr#   )clsr#   r#   r$   get_lc_namespace  s    zChatGooglePalm.get_lc_namespacer   )valuesr)   c                 C  s   t t|dd}z ddlm} |j| d W n tyJ   tdY n0 ||d< |d durd|d   krxd	ksn td
|d durd|d   krd	ksn td|d dur|d dkrtd|S )zGValidate api key, python package exists, temperature, top_p, and top_k.rm   rs   r   N)Zapi_keyzmCould not import google.generativeai python package. Please install it with `pip install google-generativeai`rc   rn   rO   z+temperature must be in the range [0.0, 1.0]ro   z%top_p must be in the range [0.0, 1.0]rp   ztop_k must be positive)	r   r   r@   rA   	configureZget_secret_valueImportErrorr   
ValueError)ry   r{   rm   rH   r#   r#   r$   validate_environment  s$    

&&z#ChatGooglePalm.validate_environmentr;   r&   z"Optional[CallbackManagerForLLMRun]r   )r?   r(   run_managerr`   r)   c              	   K  s:   t |}t| f| j|| j| j| j| jd|}t||S N)modelpromptrn   ro   rp   Zcandidate_count)rL   ri   rl   rn   ro   rp   rr   r:   ru   r?   r(   r   r`   r   r-   r#   r#   r$   	_generate%  s    zChatGooglePalm._generatez'Optional[AsyncCallbackManagerForLLMRun]c              	     s8   t |}t| | j|| j| j| j| jdI d H }t||S r   )rL   rk   rl   rn   ro   rp   rr   r:   r   r#   r#   r$   
_agenerate;  s    
zChatGooglePalm._ageneratezDict[str, Any]c                 C  s   | j | j| j| j| jdS )zGet the identifying parameters.rl   rn   ro   rp   rr   r   rt   r#   r#   r$   _identifying_paramsP  s    z"ChatGooglePalm._identifying_paramsc                 C  s   dS )Nzgoogle-palm-chatr#   rt   r#   r#   r$   	_llm_type[  s    zChatGooglePalm._llm_type)NN)NN)r   r    r!   r"   __annotations__rl   rm   rn   ro   rp   rr   propertyrv   classmethodrx   rz   r   r   r   r   r   r   r#   r#   r#   r$   r^      s4   
    
)6r"   
__future__r   rZ   typingr   r   r   r   r   r   r	   Zlangchain_core.callbacksr
   r   Z*langchain_core.language_models.chat_modelsr   Zlangchain_core.messagesr   r   r   r   r   Zlangchain_core.outputsr   r   Zlangchain_core.utilsr   r   r   Zpydanticr   r   tenacityr   r   r   r   r   r@   rA   rH   	getLoggerr   rY   	Exceptionr   r,   r:   rL   r]   ri   rk   r^   r#   r#   r#   r$   <module>   s*   $
(H