a
    bg:                     @   s   d dl Z d dlZd dlmZmZmZmZmZmZm	Z	 d dl
mZ d dlmZ d dlmZ d dlmZmZmZ d dlmZmZmZ d dlmZmZ e eZed	d
ddG dd deZdS )    N)AnyDictIteratorListMappingOptionalUnion)
deprecated)CallbackManagerForLLMRun)BaseLLM)
GenerationGenerationChunk	LLMResult)convert_to_secret_strget_from_dict_or_envpre_init)
ConfigDict	SecretStrz0.0.18z1.0zlangchain_ibm.WatsonxLLM)ZsinceZremovalZalternative_importc                   @   sZ  e Zd ZU dZdZeed< dZeed< dZeed< dZ	eed< dZ
ee ed< dZee ed	< dZee ed
< dZee ed< dZee ed< dZee ed< dZee ed< dZee ed< dZeeef ed< dZeed< dZeed< eddZeedddZee eef dddZ!e"e e dddZ#ee$eef dddZ%eedd d!Z&e'd5ee(e eef   e eef d"d#d$Z)d6ee(e  e eef d%d&d'Z*e(e e+d"d(d)Z,e eef e-d*d+d,Z.d7eee(e  ee/ eed-d.d/Z0d8e(e ee(e  ee/ ee ee+d0d1d2Z1d9eee(e  ee/ ee2e- d-d3d4Z3dS ):
WatsonxLLMa-  
    IBM watsonx.ai large language models.

    To use, you should have ``ibm_watsonx_ai`` python package installed,
    and the environment variable ``WATSONX_APIKEY`` set with your API key, or pass
    it as a named parameter to the constructor.


    Example:
        .. code-block:: python

            from ibm_watsonx_ai.metanames import GenTextParamsMetaNames
            parameters = {
                GenTextParamsMetaNames.DECODING_METHOD: "sample",
                GenTextParamsMetaNames.MAX_NEW_TOKENS: 100,
                GenTextParamsMetaNames.MIN_NEW_TOKENS: 1,
                GenTextParamsMetaNames.TEMPERATURE: 0.5,
                GenTextParamsMetaNames.TOP_K: 50,
                GenTextParamsMetaNames.TOP_P: 1,
            }

            from langchain_community.llms import WatsonxLLM
            watsonx_llm = WatsonxLLM(
                model_id="google/flan-ul2",
                url="https://us-south.ml.cloud.ibm.com",
                apikey="*****",
                project_id="*****",
                params=parameters,
            )
     model_iddeployment_id
project_idspace_idNurlapikeytokenpasswordusernameinstance_idversionparamsverifyF	streamingwatsonx_modelZforbid)extra)returnc                 C   s   dS )NF )clsr'   r'   q/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/watsonxllm.pyis_lc_serializablef   s    zWatsonxLLM.is_lc_serializablec                 C   s   dddddddS )NWATSONX_URLWATSONX_APIKEYWATSONX_TOKENWATSONX_PASSWORDWATSONX_USERNAMEWATSONX_INSTANCE_ID)r   r   r   r   r   r   r'   selfr'   r'   r)   
lc_secretsj   s    zWatsonxLLM.lc_secrets)valuesr&   c              	   C   st  t t|dd|d< d|dd v r@t t|dd|d< n
|d sdtjvr|d	 sd
tjvr|d sdtjvrtdn|d sdtjv rt t|dd|d< nx|d	 sd
tjv rt t|d	d
|d	< t t|dd|d< n<|d sdtjv r t t|dd|d< t t|dd|d< |d r6dtjvrJt t|dd|d< zddlm} |d rp|d  nd|d r|d  nd|d r|d  nd|d	 r|d	  nd|d r|d  nd|d r|d  nd|d r |d  ndd}dd |	 D }||d |d ||d |d |d |d d}||d< W n t
yn   t
dY n0 |S ) zCValidate that credentials and python package exists in environment.r   r+   zcloud.ibm.comr   r   r,   r   r-   r   r.   zDid not find 'token', 'password' or 'apikey', please add an environment variable `WATSONX_TOKEN`, 'WATSONX_PASSWORD' or 'WATSONX_APIKEY' which contains it, or pass 'token', 'password' or 'apikey' as a named parameter.r   r/   r   r0   r   )ModelInferenceNr    )r   r   r   r   r   r   r    c                 S   s   i | ]\}}|d ur||qS )Nr'   ).0keyvaluer'   r'   r)   
<dictcomp>   s   z3WatsonxLLM.validate_environment.<locals>.<dictcomp>r   r   r!   r   r   r"   )r   r   credentialsr!   r   r   r"   r$   zdCould not import ibm_watsonx_ai python package. Please install it with `pip install ibm_watsonx_ai`.)r   r   getZget_secret_valueosenviron
ValueErrorZ ibm_watsonx_ai.foundation_modelsr5   itemsImportError)r(   r4   r5   r:   Zcredentials_without_none_valuer$   r'   r'   r)   validate_environmentu   s    









	
zWatsonxLLM.validate_environmentc                 C   s   | j | j| j| j| jdS )zGet the identifying parameters.r   r   r!   r   r   rB   r1   r'   r'   r)   _identifying_params   s    zWatsonxLLM._identifying_paramsc                 C   s   dS )zReturn type of llm.zIBM watsonx.air'   r1   r'   r'   r)   	_llm_type   s    zWatsonxLLM._llm_type)responser&   c                 C   sz   | d u rdddS d}d}t tt tf tddd}| D ]6}|d}|r8||d|d 7 }||d|d 7 }q8||dS )	Nr   )generated_token_countinput_token_count)r7   resultr&   c                 S   s   | | dpdS )Nr   )r;   )r7   rH   r'   r'   r)   get_count_value   s    z8WatsonxLLM._extract_token_usage.<locals>.get_count_valueresultsrG   rF   )strr   r   intr;   )rE   rG   rF   rI   resrJ   r'   r'   r)   _extract_token_usage   s    

zWatsonxLLM._extract_token_usage)stopr&   c                 C   s(   | j ri | j ni }|d ur$||d< |S )NZstop_sequences)r!   )r2   rO   r!   r'   r'   r)   _get_chat_params  s    zWatsonxLLM._get_chat_paramsc           	      C   sv   g }|D ]F}| d}|r|d  d}t|d  dd|id}||g q| |}|| j| jd}t||dS )	z2Create the LLMResult from the choices and prompts.rJ   r   stop_reasongenerated_textfinish_reasontextgeneration_info)Ztoken_usager   r   generations
llm_output)r;   r   appendrN   r   r   r   )	r2   rE   rX   rM   rJ   rS   genZfinal_token_usagerY   r'   r'   r)   _create_llm_result  s     

zWatsonxLLM._create_llm_result)stream_responser&   c                 C   sL   |d st ddS t |d d d t|d d dd| j| jdd	d
S )z0Convert a stream response to a generation chunk.rJ   r   rU   r   rR   rQ   N)r   r   )rS   rY   rT   )r   dictr;   r   r   )r2   r]   r'   r'   r)   $_stream_response_to_generation_chunk  s    
z/WatsonxLLM._stream_response_to_generation_chunk)promptrO   run_managerkwargsr&   c                 K   s*   | j f |g||d|}|jd d jS )a  Call the IBM watsonx.ai inference endpoint.
        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
            run_manager: Optional callback manager.
        Returns:
            The string generated by the model.
        Example:
            .. code-block:: python

                response = watsonx_llm.invoke("What is a molecule")
        )promptsrO   rb   r   )	_generaterX   rU   )r2   ra   rO   rb   rc   rH   r'   r'   r)   _call1  s    zWatsonxLLM._call)rd   rO   rb   streamrc   r&   c                 K   s   | j |d}|dur|n| j}|rt|dkr<td| tdd}| j|d f||d|}	|	D ]}
|du rx|
}qf||
7 }qf|dusJ t|jtr|j	d	}t
|gg|d
S t
|ggdS | jj||d}| |S dS )a  Call the IBM watsonx.ai inference endpoint which then generate the response.
        Args:
            prompts: List of strings (prompts) to pass into the model.
            stop: Optional list of stop words to use when generating.
            run_manager: Optional callback manager.
        Returns:
            The full LLMResult output.
        Example:
            .. code-block:: python

                response = watsonx_llm.generate(["What is a molecule"])
        rO   N   z6WatsonxLLM currently only supports single prompt, got r   r^   r   )rO   rb   rY   rW   )rX   )ra   r!   )rP   r#   lenr>   r   _stream
isinstancerV   r_   popr   r$   generater\   )r2   rd   rO   rb   rg   rc   r!   Zshould_streamZ
generationZstream_iterchunkrY   rE   r'   r'   r)   re   I  s4    

zWatsonxLLM._generatec                 k   sL   | j |d}| jj|d|dD ](}| |}|r@|j|j|d |V  qdS )a4  Call the IBM watsonx.ai inference endpoint which then streams the response.
        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
            run_manager: Optional callback manager.
        Returns:
            The iterator which yields generation chunks.
        Example:
            .. code-block:: python

                response = watsonx_llm.stream("What is a molecule")
                for chunk in response:
                    print(chunk, end='')  # noqa: T201
        rh   T)ra   Zraw_responser!   )ro   N)rP   r$   Zgenerate_text_streamr`   Zon_llm_new_tokenrU   )r2   ra   rO   rb   rc   r!   Zstream_respro   r'   r'   r)   rk   v  s    

zWatsonxLLM._stream)N)N)NN)NNN)NN)4__name__
__module____qualname____doc__r   rK   __annotations__r   r   r   r   r   r   r   r   r   r   r   r    r!   r_   r"   r   boolr#   r$   r   r   Zmodel_configclassmethodr*   propertyr   r3   r   rA   r   rC   rD   staticmethodr   rN   rP   r   r\   r   r`   r
   rf   re   r   rk   r'   r'   r'   r)   r      s   

e
 
"
  
   
0  
r   )loggingr<   typingr   r   r   r   r   r   r   Zlangchain_core._api.deprecationr	   Zlangchain_core.callbacksr
   Z#langchain_core.language_models.llmsr   Zlangchain_core.outputsr   r   r   Zlangchain_core.utilsr   r   r   Zpydanticr   r   	getLoggerrp   loggerr   r'   r'   r'   r)   <module>   s   $
