a
    bg                     @   s   d dl Z d dlmZmZmZmZ d dlmZ d dlm	Z	 d dl
mZmZ d dlmZ d dlmZmZ d dlmZ e eZd	ZG d
d de	ZdS )    N)AnyDictListOptional)CallbackManagerForLLMRun)LLM)
Generation	LLMResult)pre_init)
ConfigDictField)enforce_stop_tokensz8https://clarifai.com/openai/chat-completion/models/GPT-4c                   @   s~  e Zd ZU dZdZee ed< dZee ed< dZ	ee ed< dZ
ee ed< dZee ed< eddd	Zee ed
< eddd	Zee ed< eddd	Zeed< dZeed< eddZeeedddZeeeef dddZeeeef dddZeedddZd!eeee  ee eeeef  eedddZd"ee eee  ee eeeef  eeddd ZdS )#Clarifaia2  Clarifai large language models.

    To use, you should have an account on the Clarifai platform,
    the ``clarifai`` python package installed, and the
    environment variable ``CLARIFAI_PAT`` set with your PAT key,
    or pass it as a named parameter to the constructor.

    Example:
        .. code-block:: python

            from langchain_community.llms import Clarifai
            clarifai_llm = Clarifai(user_id=USER_ID, app_id=APP_ID, model_id=MODEL_ID)
                             (or)
            clarifai_llm = Clarifai(model_url=EXAMPLE_URL)
    N	model_urlmodel_idmodel_version_idapp_iduser_idT)defaultexcludepattokenmodelzhttps://api.clarifai.comapi_baseZforbid)extra)valuesreturnc              
   C   s   zddl m} W n ty*   tdY n0 |d}|d}|d}|d}|d}|d	}|d
}	|d}
||||t|d|	|
||d|d< |S )zuValidate that we have all required info to access Clarifai
        platform and python package exists in environment.r   )ModelXCould not import clarifai python package. Please install it with `pip install clarifai`.r   r   r   r   r   r   r   r   )id)urlr   r   Zmodel_versionr   r   r   base_urlr   )Zclarifai.client.modelr   ImportErrorgetdict)clsr   r   r   r   r   r   r   r   r   r    r&   o/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/clarifai.pyvalidate_environment8   s2    









zClarifai.validate_environment)r   c                 C   s   i S )z4Get the default parameters for calling Clarifai API.r&   selfr&   r&   r'   _default_paramsY   s    zClarifai._default_paramsc                 C   s   i | j | j| j| jdS )zGet the identifying parameters.r   r   r   r   r,   r)   r&   r&   r'   _identifying_params^   s    zClarifai._identifying_paramsc                 C   s   dS )zReturn type of llm.Zclarifair&   r)   r&   r&   r'   	_llm_typej   s    zClarifai._llm_type)promptstoprun_managerinference_paramskwargsr   c           	   
   K   s   zR|du ri  }n| | j jt|dd|d}|jd jjj}|durPt||}W n4 ty } zt	
d|  W Y d}~n
d}~0 0 |S )a~  Call out to Clarfai's PostModelOutputs endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = clarifai_llm.invoke("Tell me a joke.")
        Nzutf-8text)Z
input_typer2   r   Predict failed, exception: )r   Zpredict_by_bytesbytesoutputsdatar4   rawr   	Exceptionloggererror)	r*   r/   r0   r1   r2   r3   predict_responser4   er&   r&   r'   _callo   s    &zClarifai._call)promptsr0   r1   r2   r3   r   c              
      s*  zddl m} W n ty*   tdY n0 g }d}|| jj ztdt||D ]N}	||	|	|  }
 fddt|
D }|du ri  }n| | jj	||d}qT|j
D ]:}|durt|jjj|}n
|jjj}|t|d	g qW n6 ty } ztd
|  W Y d}~n
d}~0 0 t|dS )z*Run the LLM on the given prompt and input.r   )Inputsr       c                    s"   g | ]\}} j t||d qS ))Zinput_idZraw_text)Zget_text_inputstr).0r   inpZ	input_objr&   r'   
<listcomp>   s   z&Clarifai._generate.<locals>.<listcomp>N)inputsr2   )r4   r5   )generations)Zclarifai.client.inputrA   r"   Zfrom_auth_helperr   Zauth_helperrangelen	enumerateZpredictr7   r   r8   r4   r9   appendr   r:   r;   r<   r	   )r*   r@   r0   r1   r2   r3   rA   rI   Z
batch_sizeibatchZinput_batchr=   outputr4   r>   r&   rF   r'   	_generate   s@    



&zClarifai._generate)NNN)NNN) __name__
__module____qualname____doc__r   r   rC   __annotations__r   r   r   r   r   r   r   r   r   r   r   Zmodel_configr
   r   r(   propertyr+   r-   r.   r   r   r?   r	   rQ   r&   r&   r&   r'   r      sV   
    
*   
r   )loggingtypingr   r   r   r   Zlangchain_core.callbacksr   Z#langchain_core.language_models.llmsr   Zlangchain_core.outputsr   r	   Zlangchain_core.utilsr
   Zpydanticr   r   Zlangchain_community.llms.utilsr   	getLoggerrR   r;   ZEXAMPLE_URLr   r&   r&   r&   r'   <module>   s   
