a
    bg                     @   s   d dl mZmZmZmZmZ d dlZd dlmZ d dl	m
Z
 d dlmZmZ d dlmZmZmZ d dlmZ G dd	 d	e
ZdS )
    )AnyDictListMappingOptionalN)CallbackManagerForLLMRun)LLMconvert_to_secret_strget_from_dict_or_env)
ConfigDict	SecretStrmodel_validator)enforce_stop_tokensc                   @   s  e Zd ZU dZdZeed< dZeed< dZ	e
ed< dZeed	< d
Ze
ed< dZe
ed< eed< dZee ed< eddZeddeeedddZeeeef dddZeeeef dddZeedddZd"eeee  ee eedd d!ZdS )#ForefrontAIa2  ForefrontAI large language models.

    To use, you should have the environment variable ``FOREFRONTAI_API_KEY``
    set with your API key.

    Example:
        .. code-block:: python

            from langchain_community.llms import ForefrontAI
            forefrontai = ForefrontAI(endpoint_url="")
     endpoint_urlgffffff?temperature   lengthg      ?top_p(   top_k   repetition_penaltyforefrontai_api_keyNbase_urlZforbid)extrabefore)mode)valuesreturnc                 C   s   t t|dd|d< |S )z,Validate that api key exists in environment.r   ZFOREFRONTAI_API_KEYr	   )clsr     r#   r/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/forefrontai.pyvalidate_environment5   s    
z ForefrontAI.validate_environment)r!   c                 C   s   | j | j| j| j| jdS )z7Get the default parameters for calling ForefrontAI API.r   r   r   r   r   r&   selfr#   r#   r$   _default_params>   s    zForefrontAI._default_paramsc                 C   s   i d| j i| jS )zGet the identifying parameters.r   )r   r)   r'   r#   r#   r$   _identifying_paramsI   s    zForefrontAI._identifying_paramsc                 C   s   dS )zReturn type of llm.Zforefrontair#   r'   r#   r#   r$   	_llm_typeN   s    zForefrontAI._llm_type)promptstoprun_managerkwargsr!   c           	      K   sd   d| j   }tj| j|ddd|i| j|d}| }|d d d }|d	ur`t||}|S )
ar  Call out to ForefrontAI's complete endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = ForefrontAI("Tell me a joke.")
        zBearer zapplication/json)AuthorizationzContent-Typetext)urlheadersjsonresultr   
completionN)r   Zget_secret_valuerequestspostr   r)   r4   r   )	r(   r,   r-   r.   r/   Z
auth_valueresponseZresponse_jsonr1   r#   r#   r$   _callS   s    
zForefrontAI._call)NN) __name__
__module____qualname____doc__r   str__annotations__r   floatr   intr   r   r   r   r   r   r   Zmodel_configr   classmethodr   r   r%   propertyr   r)   r*   r+   r   r   r:   r#   r#   r#   r$   r      s<   

  
r   )typingr   r   r   r   r   r7   Zlangchain_core.callbacksr   Z#langchain_core.language_models.llmsr   Zlangchain_core.utilsr
   r   Zpydanticr   r   r   Zlangchain_community.llms.utilsr   r   r#   r#   r#   r$   <module>   s   