a
    bg                     @  s   d Z ddlmZ ddlZddlmZmZmZmZ ddl	Z	ddl
mZ ddlmZ ddlmZmZmZ ddlmZmZmZmZmZ dd	lmZ eeZG d
d deZG dd deZG dd deeZdS )zWrapper around Minimax APIs.    )annotationsN)AnyDictListOptional)CallbackManagerForLLMRun)LLM)convert_to_secret_strget_from_dict_or_envpre_init)	BaseModel
ConfigDictField	SecretStrmodel_validator)enforce_stop_tokensc                   @  s`   e Zd ZU dZded< ded< ded< ded< edd	ed
ddddZdddddZdS )_MinimaxEndpointClientz(API client for the Minimax LLM endpoint.strhostgroup_idr   api_keyapi_urlbefore)modeDict[str, Any]r   valuesreturnc                 C  s2   d|vr.|d }|d }| d| }||d< |S )Nr   r   r   z /v1/text/chatcompletion?GroupId= )clsr   r   r   r   r   r   n/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/minimax.pyset_api_url"   s    z"_MinimaxEndpointClient.set_api_url)requestr   c                 C  s   dd| j   i}tj| j||d}|jsDtd|j d|j |	 d d dkrtd	|	 d d  d|	 d d
  |	 d S )NAuthorizationzBearer )headersjsonzHTTP z error: Z	base_respstatus_coder   zAPI Z
status_msgZreply)
r   Zget_secret_valuerequestspostr   ok
ValueErrorr&   textr%   )selfr"   r$   responser   r   r    r(   ,   s    z_MinimaxEndpointClient.postN)	__name__
__module____qualname____doc____annotations__r   classmethodr!   r(   r   r   r   r    r      s   
r   c                   @  s   e Zd ZU dZeddZded< dZded< d	Zd
ed< dZ	ded< dZ
ded< eedZded< dZded< dZded< dZded< edddddZedddd Zeddd!d"Zeddd#d$ZdS )%MinimaxCommonz4Common parameters for Minimax large language models.r   )Zprotected_namespacesr   _clientzabab5.5-chatr   model   int
max_tokensgffffff?floattemperaturegffffff?top_p)default_factoryr   model_kwargsNzOptional[str]minimax_api_hostminimax_group_idzOptional[SecretStr]minimax_api_keyr   r   c                 C  sZ   t t|dd|d< t|dd|d< t|dddd|d< t|d |d |d d	|d
< |S )z?Validate that api key and python package exists in environment.rA   ZMINIMAX_API_KEYr@   ZMINIMAX_GROUP_IDr?   ZMINIMAX_API_HOSTzhttps://api.minimax.chat)default)r   r   r   r5   )r	   r
   r   )r   r   r   r   r    validate_environmentN   s$    


z"MinimaxCommon.validate_environment)r   c                 C  s   | j | j| j| jd| jS )z2Get the default parameters for calling OpenAI API.)r6   Ztokens_to_generater;   r<   )r6   r9   r;   r<   r>   r,   r   r   r    _default_paramse   s    zMinimaxCommon._default_paramsc                 C  s   i d| j i| jS )zGet the identifying parameters.r6   )r6   rE   rD   r   r   r    _identifying_paramsp   s    z!MinimaxCommon._identifying_paramsc                 C  s   dS )zReturn type of llm.Zminimaxr   rD   r   r   r    	_llm_typeu   s    zMinimaxCommon._llm_type)r.   r/   r0   r1   r   Zmodel_configr2   r6   r9   r;   r<   r   dictr>   r?   r@   rA   r   rC   propertyrE   rF   rG   r   r   r   r    r4   :   s&   


r4   c                   @  s(   e Zd ZdZd
dddddddd	ZdS )Minimaxa  Minimax large language models.

    To use, you should have the environment variable
    ``MINIMAX_API_KEY`` and ``MINIMAX_GROUP_ID`` set with your API key,
    or pass them as a named parameter to the constructor.
    Example:
     . code-block:: python
         from langchain_community.llms.minimax import Minimax
         minimax = Minimax(model="<model_name>", minimax_api_key="my-api-key",
          minimax_group_id="my-group-id")
    Nr   zOptional[List[str]]z"Optional[CallbackManagerForLLMRun]r   )promptstoprun_managerkwargsr   c                 K  sB   | j }d|dg|d< || | j|}|dur>t||}|S )a)  Call out to Minimax's completion endpoint to chat
        Args:
            prompt: The prompt to pass into the model.
        Returns:
            The string generated by the model.
        Example:
            .. code-block:: python
                response = minimax("Tell me a joke.")
        USER)Zsender_typer+   messagesN)rE   updater5   r(   r   )r,   rK   rL   rM   rN   r"   r+   r   r   r    _call   s    

zMinimax._call)NN)r.   r/   r0   r1   rR   r   r   r   r    rJ   {   s     rJ   ) r1   
__future__r   loggingtypingr   r   r   r   r'   Zlangchain_core.callbacksr   Z#langchain_core.language_models.llmsr   Zlangchain_core.utilsr	   r
   r   Zpydanticr   r   r   r   r   Zlangchain_community.llms.utilsr   	getLoggerr.   loggerr   r4   rJ   r   r   r   r    <module>   s   
 A