a
    bgX                     @   s   d dl Z d dlZd dlmZmZmZmZmZmZm	Z	 d dl
Z
d dlmZ d dlmZ d dlmZ d dlmZmZ d dlmZ dZe jG d	d
 d
Zee dddZdeeeeeeeeeef f dddZG dd deZdS )    N)AnyDictListMappingOptionalUnioncast)CallbackManagerForLLMRun)LLM)get_from_dict_or_env)
ConfigDictmodel_validator)enforce_stop_tokens<   c                   @   sB   e Zd ZU dZeed< eed< ddddZed ddd	ZdS )
AviaryBackendzAviary backend.

    Attributes:
        backend_url: The URL for the Aviary backend.
        bearer: The bearer token for the Aviary backend.
    backend_urlbearerNreturnc                 C   s   d| j i| _d S )NAuthorization)r   headerself r   m/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/aviary.py__post_init__   s    zAviaryBackend.__post_init__c                 C   sT   t d}|sJ dt dd}|r0d| nd}||dsDdnd7 }| ||S )N
AVIARY_URLzAVIARY_URL must be setAVIARY_TOKEN zBearer /)osgetenvendswith)cls
aviary_urlaviary_tokenr   r   r   r   from_env   s    
zAviaryBackend.from_env)	__name__
__module____qualname____doc__str__annotations__r   classmethodr&   r   r   r   r   r      s   
r   r   c               
   C   s   t  } | jd }tj|| jtd}z| }W n> tjyn } z$t	d| d|j
 |W Y d}~n
d}~0 0 tdd | D }|S )zList available modelsz-/routes)headerstimeoutError decoding JSON from . Text response: Nc                 S   s&   g | ]}d |v r| dd dqS )--r   )lstripreplace).0kr   r   r   
<listcomp>8       zget_models.<locals>.<listcomp>)r   r&   r   requestsgetr   TIMEOUTjsonJSONDecodeErrorRuntimeErrortextsortedkeys)backendrequest_urlresponseresulter   r   r   
get_models,   s    
rG   Tr   )modelpromptuse_prompt_formatversionr   c              
   C   s   t  }|j| dd d | d }tj||j||dtd}z
| W S  tj	y } z$t
d| d|j |W Y d}~n
d}~0 0 dS )	z#Get completions from Aviary models.r   r2   query)rI   rJ   )r.   r<   r/   r0   r1   N)r   r&   r   r4   r9   postr   r;   r<   r=   r>   r?   )rH   rI   rJ   rK   rB   urlrD   rF   r   r   r   get_completions=   s     
rO   c                   @   s   e Zd ZU dZdZeed< dZee ed< dZ	ee ed< dZ
eed< dZee ed	< ed
dZeddeeedddZeeeef dddZeedddZdeeee  ee eedddZdS )Aviaryab  Aviary hosted models.

    Aviary is a backend for hosted models. You can
    find out more about aviary at
    http://github.com/ray-project/aviary

    To get a list of the models supported on an
    aviary, follow the instructions on the website to
    install the aviary CLI and then use:
    `aviary models`

    AVIARY_URL and AVIARY_TOKEN environment variables must be set.

    Attributes:
        model: The name of the model to use. Defaults to "amazon/LightGPT".
        aviary_url: The URL for the Aviary backend. Defaults to None.
        aviary_token: The bearer token for the Aviary backend. Defaults to None.
        use_prompt_format: If True, the prompt template for the model will be ignored.
            Defaults to True.
        version: API version to use for Aviary. Defaults to None.

    Example:
        .. code-block:: python

            from langchain_community.llms import Aviary
            os.environ["AVIARY_URL"] = "<URL>"
            os.environ["AVIARY_TOKEN"] = "<TOKEN>"
            light = Aviary(model='amazon/LightGPT')
            output = light('How do you make fried rice?')
    zamazon/LightGPTrH   Nr$   r%   TrJ   rK   Zforbid)extrabefore)mode)valuesr   c              
   C   s   t |dd}t |dd}|tjd< |tjd< z
t }W n0 tjjyf } zt|W Y d}~n
d}~0 0 |d}|r||vrt| d|d  d|S )	z?Validate that api key and python package exists in environment.r$   r   r%   r   NrH   z does not support model .)	r   r    environrG   r9   
exceptionsRequestException
ValueErrorr:   )r#   rT   r$   r%   Zaviary_modelsrF   rH   r   r   r   validate_environment   s    



zAviary.validate_environmentr   c                 C   s   | j | jdS )zGet the identifying parameters.)Z
model_namer$   )rH   r$   r   r   r   r   _identifying_params   s    zAviary._identifying_paramsc                 C   s   d| j dd S )zReturn type of llm.zaviary-r   -)rH   r4   r   r   r   r   	_llm_type   s    zAviary._llm_type)rI   stoprun_managerkwargsr   c                 K   sP   d| j i}| jr| j|d< tf | j|d|}tt|d }|rLt||}|S )a  Call out to Aviary
        Args:
            prompt: The prompt to pass into the model.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = aviary("Tell me a joke.")
        rJ   rK   )rH   rI   Zgenerated_text)rJ   rK   rO   rH   r   r+   r   )r   rI   r^   r_   r`   outputr?   r   r   r   _call   s    


zAviary._call)NN)r'   r(   r)   r*   rH   r+   r,   r$   r   r%   rJ   boolrK   r   Zmodel_configr   r-   r   r   rZ   propertyr   r[   r]   r   r	   rb   r   r   r   r   rP   U   s2   
  
rP   )Tr   ) dataclassesr    typingr   r   r   r   r   r   r   r9   Zlangchain_core.callbacksr	   Z#langchain_core.language_models.llmsr
   Zlangchain_core.utilsr   Zpydanticr   r   Zlangchain_community.llms.utilsr   r;   	dataclassr   r+   rG   rc   floatintrO   rP   r   r   r   r   <module>   s,   $  