a
    bg                     @  s   d Z ddlmZ ddlZddlZddlZddlmZmZm	Z	m
Z
mZmZmZmZmZ ddlmZ ddlmZ ddlmZmZ ddlmZmZ dd	lmZ dd
lmZmZ erddlZe e!Z"dZ#dZ$G dd deZ%dS )z>EverlyAI Endpoints chat wrapper. Relies heavily on ChatOpenAI.    )annotationsN)	TYPE_CHECKINGAnyCallableDictOptionalSequenceSetTypeUnion)BaseMessage)BaseTool)convert_to_secret_strget_from_dict_or_env)Fieldmodel_validatorconvert_message_to_dict)
ChatOpenAI_import_tiktokenzhttps://everlyai.xyz/hostedmeta-llama/Llama-2-7b-chat-hfc                      s   e Zd ZU dZeddddZeddddZed	dd
dZdZ	de
d< eeddZde
d< eZde
d< dZde
d< eddddZeddedddddZddd d!Zd(d"d#d$d% fd&d'Z  ZS ))ChatEverlyAIaQ  `EverlyAI` Chat large language models.

    To use, you should have the ``openai`` python package installed, and the
    environment variable ``EVERLYAI_API_KEY`` set with your API key.
    Alternatively, you can use the everlyai_api_key keyword argument.

    Any parameters that are valid to be passed to the `openai.create` call can be passed
    in, even if not explicitly saved on this class.

    Example:
        .. code-block:: python

            from langchain_community.chat_models import ChatEverlyAI
            chat = ChatEverlyAI(model_name="meta-llama/Llama-2-7b-chat-hf")
    str)returnc                 C  s   dS )zReturn type of chat model.zeverlyai-chat selfr   r   v/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/chat_models/everlyai.py	_llm_type:   s    zChatEverlyAI._llm_typezDict[str, str]c                 C  s   ddiS )Neverlyai_api_keyEVERLYAI_API_KEYr   r   r   r   r   
lc_secrets?   s    zChatEverlyAI.lc_secretsboolc                 C  s   dS )NFr   )clsr   r   r   is_lc_serializableC   s    zChatEverlyAI.is_lc_serializableNzOptional[str]r   model)defaultalias
model_nameeverlyai_api_basezOptional[Set[str]]available_modelszSet[str]c                   C  s   t ddgS )z'Get available models from EverlyAI API.r   z(meta-llama/Llama-2-13b-chat-hf-quantized)setr   r   r   r   get_available_modelsP   s
    z!ChatEverlyAI.get_available_modelsbefore)modedictr   )valuesr   c              
   C  s   t t|dd|d< t|d< zddl}W n. tyV } ztd|W Y d}~n
d}~0 0 z|j|d< W n. ty } ztd	|W Y d}~n
d}~0 0 d
| vrt	|d
< |d
 }| 
 }||vrtd| d| d||d< |S )z?Validate that api key and python package exists in environment.r   r    Zopenai_api_keyZopenai_api_baser   NzTCould not import openai python package. Please install it with `pip install openai`.clientz`openai` has no `ChatCompletion` attribute, this is likely due to an old version of the openai package. Try upgrading it with `pip install --upgrade openai`.r(   zModel name z  not found in available models: .r*   )r   r   DEFAULT_API_BASEopenaiImportErrorZChatCompletionAttributeError
ValueErrorkeysDEFAULT_MODELr,   )r#   r0   r4   eexcr(   r*   r   r   r   validate_environment_override[   sH    z*ChatEverlyAI.validate_environment_overrideztuple[str, tiktoken.Encoding]c                 C  s`   t  }| jd ur| j}n| j}z|d}W n* tyV   td d}||}Y n0 ||fS )Nzgpt-3.5-turbo-0301z5Warning: model not found. Using cl100k_base encoding.Zcl100k_base)r   Ztiktoken_model_namer(   Zencoding_for_modelKeyErrorloggerwarningZget_encoding)r   Z	tiktoken_r%   encodingr   r   r   _get_encoding_model   s    

z ChatEverlyAI._get_encoding_modelzlist[BaseMessage]zCOptional[Sequence[Union[Dict[str, Any], Type, Callable, BaseTool]]]int)messagestoolsr   c                   s   |durt d tjd dkr,t |S |  \}}d}d}d}dd |D }|D ]D}	||7 }|	 D ].\}
}|t|	t
|7 }|
d	krj||7 }qjqV|d7 }|S )
zCalculate num tokens with tiktoken package.

        Official documentation: https://github.com/openai/openai-cookbook/blob/
        main/examples/How_to_format_inputs_to_ChatGPT_models.ipynbNzECounting tokens in tool schemas is not yet supported. Ignoring tools.         r   c                 S  s   g | ]}t |qS r   r   ).0mr   r   r   
<listcomp>       z=ChatEverlyAI.get_num_tokens_from_messages.<locals>.<listcomp>name)warningswarnsysversion_infosuperget_num_tokens_from_messagesrA   itemslenencoder   )r   rC   rD   r%   r@   Ztokens_per_messageZtokens_per_nameZ
num_tokensZmessages_dictmessagekeyvalue	__class__r   r   rR      s&    z)ChatEverlyAI.get_num_tokens_from_messages)N)__name__
__module____qualname____doc__propertyr   r!   classmethodr$   r   __annotations__r   r9   r(   r3   r)   r*   staticmethodr,   r   r<   rA   rR   __classcell__r   r   rY   r   r   )   s&   

- r   )&r^   
__future__r   loggingrO   rM   typingr   r   r   r   r   r   r	   r
   r   Zlangchain_core.messagesr   Zlangchain_core.toolsr   Zlangchain_core.utilsr   r   Zpydanticr   r   Z#langchain_community.adapters.openair   Z&langchain_community.chat_models.openair   r   Ztiktoken	getLoggerr[   r>   r3   r9   r   r   r   r   r   <module>   s"   ,
