a
    bg|"                  	   @  s   d dl mZ d dlmZmZmZmZmZ d dlm	Z	 d dl
mZ d dlmZ d dlmZmZmZ d dlmZmZ d dlmZmZ d d	lmZ d d
lmZ d ddddddddddZdddddZdddddZe	dddG dd deeZdS )!    )annotations)AnyDictIteratorListOptional)
deprecated)CallbackManagerForLLMRun)LanguageModelInput)
GenerationGenerationChunk	LLMResult)get_from_dict_or_envpre_init)	BaseModel	SecretStr)BaseLLM)create_retry_decoratorFN
GooglePalmr
   bool"Optional[CallbackManagerForLLMRun]r   )llmprompt	is_geministreamrun_managerkwargsreturnc                   sD   t   j|d}|dddddd fdd}|f |||d|S )	z*Use tenacity to retry the completion call.)max_retriesr   r
   r   r   )r   r   r   r   r   c                   s8   | di }|r" jj| ||dS  jjf d| i|S )Ngeneration_config)contentsr   r   r   )getclientZgenerate_contentZgenerate_text)r   r   r   r   r   r    r/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/google_palm.py_completion_with_retry   s    z5completion_with_retry.<locals>._completion_with_retry)r   r   r   )r   r   )r   r   r   r   r   r   Zretry_decoratorr&   r$   r#   r%   completion_with_retry   s    	
r'   str)
model_namer   c                 C  s   d| v S )Ngeminir$   r)   r$   r$   r%   _is_gemini_model-   s    r,   textr   c                 C  s8   t dd | ddd D }|r0| ddS | S dS )zStrip erroneous leading spaces from text.

    The PaLM API will sometimes erroneously return a single leading space in all
    lines > 1. This function strips that space.
    c                 s  s    | ]}| p|d  dkV  qdS )r    Nr$   ).0liner$   r$   r%   	<genexpr>7       z2_strip_erroneous_leading_spaces.<locals>.<genexpr>
   Nz
 )allsplitreplace)r.   Zhas_leading_spacer$   r$   r%   _strip_erroneous_leading_spaces1   s     r9   z0.0.12z)langchain_google_genai.GoogleGenerativeAI)Zalternative_importc                   @  s$  e Zd ZU dZded< ded< dZded< d	Zd
ed< dZded< dZded< dZ	ded< dZ
ded< dZded< eddddZeddddZeddddZed dd!d"Zed#d#d$d%d&Zd6d d'd(dd)d*d+d,Zd7dd'd(dd-d.d/d0Zeddd1d2Zddd3d4d5ZdS )8r   zg
    DEPRECATED: Use `langchain_google_genai.GoogleGenerativeAI` instead.

    Google PaLM models.
    r   r"   zOptional[SecretStr]google_api_keyzmodels/text-bison-001r(   r)   gffffff?floattemperatureNzOptional[float]top_pzOptional[int]top_kmax_output_tokensr5   intn   r   r   )r   c                 C  s
   t | jS )z=Returns whether a model is belongs to a Gemini family or not.)r,   r)   selfr$   r$   r%   r   \   s    zGooglePalm.is_geminizDict[str, str]c                 C  s   ddiS )Nr:   GOOGLE_API_KEYr$   rC   r$   r$   r%   
lc_secretsa   s    zGooglePalm.lc_secretsc                 C  s   dS )NTr$   rC   r$   r$   r%   is_lc_serializablee   s    zGooglePalm.is_lc_serializablez	List[str]c                 C  s   g dS )z*Get the namespace of the langchain object.)Z	langchainZllmsgoogle_palmr$   )clsr$   r$   r%   get_lc_namespacei   s    zGooglePalm.get_lc_namespacer   )valuesr   c                 C  s$  t |dd}|d }zPddlm} t|tr4| }|j|d t|rZ|j|d|d< n||d< W n t	y~   t	d	Y n0 |d
 durd|d
   krdksn t
d|d durd|d   krdksn t
d|d dur|d dkrt
d|d dur |d dkr t
d|S )z(Validate api key, python package exists.r:   rE   r)   r   N)Zapi_keyr+   r"   znCould not import google-generativeai python package. Please install it with `pip install google-generativeai`.r<   r5   z+temperature must be in the range [0.0, 1.0]r=   z%top_p must be in the range [0.0, 1.0]r>   ztop_k must be positiver?   z+max_output_tokens must be greater than zero)r   Zgoogle.generativeaiZgenerativeai
isinstancer   Zget_secret_value	configurer,   ZGenerativeModelImportError
ValueError)rI   rK   r:   r)   Zgenair$   r$   r%   validate_environmentn   s2    

&&zGooglePalm.validate_environmentzOptional[List[str]]r   r   )promptsstopr   r   r   c              	   K  s   g }|| j | j| j| j| jd}|D ]}| jrht| |dd||d}dd |jD }	|dd |	D  q$t| f| j	|dd|d|}g }
|jD ]$}|d	 }t
|}|
t|d
 q||
 q$t|dS )N)stop_sequencesr<   r=   r>   r?   Zcandidate_countFT)r   r   r   r   r   c                 S  s$   g | ]}d  dd |jjD qS ) c                 S  s   g | ]
}|j qS r$   r.   )r0   pr$   r$   r%   
<listcomp>   r3   z3GooglePalm._generate.<locals>.<listcomp>.<listcomp>)joincontentpartsr0   cr$   r$   r%   rW      s   z(GooglePalm._generate.<locals>.<listcomp>c                 S  s   g | ]}t |d qS )rU   )r   r[   r$   r$   r%   rW      r3   )modelr   r   r   r   outputrU   )generations)r<   r=   r>   r?   rA   r   r'   
candidatesappendr)   r9   r   r   )rD   rQ   rR   r   r   r_   r   r   resr`   Zprompt_generations	candidateZraw_textZstripped_textr$   r$   r%   	_generate   sP    	
zGooglePalm._generatezIterator[GenerationChunk])r   rR   r   r   r   c                 k  sh   | di }|r||d< t| |fdd||d|D ].}t|jd}|r\|j|j|| jd |V  q4d S )Nr   rS   T)r   r   r   r   rU   )chunkverbose)r!   r'   r   r.   Zon_llm_new_tokenrf   )rD   r   rR   r   r   r   Zstream_respre   r$   r$   r%   _stream   s,    
	zGooglePalm._streamc                 C  s   dS )zReturn type of llm.rH   r$   rC   r$   r$   r%   	_llm_type   s    zGooglePalm._llm_typer-   c                 C  s(   | j rtd| jj| j|d}|d S )a  Get the number of tokens present in the text.

        Useful for checking if an input will fit in a model's context window.

        Args:
            text: The string input to tokenize.

        Returns:
            The integer number of tokens in the text.
        z%Counting tokens is not yet supported!)r]   r   Ztoken_count)r   rO   r"   Zcount_text_tokensr)   )rD   r.   resultr$   r$   r%   get_num_tokens   s    zGooglePalm.get_num_tokens)NN)NN)__name__
__module____qualname____doc____annotations__r)   r<   r=   r>   r?   rA   r   propertyr   rF   classmethodrG   rJ   r   rP   rd   rg   rh   rj   r$   r$   r$   r%   r   >   s:   
)  4  )FFN) 
__future__r   typingr   r   r   r   r   Zlangchain_core._api.deprecationr   Zlangchain_core.callbacksr	   Zlangchain_core.language_modelsr
   Zlangchain_core.outputsr   r   r   Zlangchain_core.utilsr   r   Zpydanticr   r   Zlangchain_community.llmsr   Z&langchain_community.utilities.vertexair   r'   r,   r9   r   r$   r$   r$   r%   <module>   s"      
