a
    bg                     @   s   d Z ddlZddlmZmZmZmZ ddlmZ ddl	m
Z
 ddlmZmZ ddlmZ ddlmZmZ dd	lmZmZmZ dd
lmZ eeZe
ddddG dd deZdS )z,Wrapper around Together AI's Completion API.    N)AnyDictListOptional)ClientSession)
deprecated)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)LLMconvert_to_secret_strget_from_dict_or_env)
ConfigDict	SecretStrmodel_validator)Requestsz0.0.12z1.0zlangchain_together.Together)ZsinceZremovalZalternative_importc                   @   sL  e Zd ZU dZdZeed< eed< eed< dZe	e
 ed< dZe	e
 ed< dZe	e ed	< dZe	e ed
< dZe	e
 ed< dZe	e ed< eddZeddeeedddZeedddZeedddZeedddZeeeef dddZd#ee	ee  e	e  eeddd Z!d$ee	ee  e	e" eedd!d"Z#dS )%TogetheraL  LLM models from `Together`.

    To use, you'll need an API key which you can find here:
    https://api.together.xyz/settings/api-keys. This can be passed in as init param
    ``together_api_key`` or set as environment variable ``TOGETHER_API_KEY``.

    Together AI API reference: https://docs.together.ai/reference/inference
    z"https://api.together.xyz/inferencebase_urltogether_api_keymodelNtemperaturetop_ptop_k
max_tokensrepetition_penaltylogprobsZforbid)extrabefore)mode)valuesreturnc                 C   s   t t|dd|d< |S )z,Validate that api key exists in environment.r   ZTOGETHER_API_KEYr   )clsr    r"   o/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/together.pyvalidate_environmentI   s    
zTogether.validate_environment)r    c                 C   s   dS )zReturn type of model.Ztogetherr"   selfr"   r"   r#   	_llm_typeR   s    zTogether._llm_type)outputr    c                 C   s   |d d d d S )Nr(   choicesr   textr"   )r&   r(   r"   r"   r#   _format_outputW   s    zTogether._format_outputc                  C   s   ddl m}  d|  S )Nr   __version__z
langchain/)Zlangchain_communityr-   r,   r"   r"   r#   get_user_agentZ   s    zTogether.get_user_agentc                 C   s   | j | j| j| j| j| jdS )Nr   r   r   r   r   r   r/   r%   r"   r"   r#   default_params`   s    zTogether.default_params)promptstoprun_managerkwargsr    c                 K   s  d| j   dd}|r.t|dkr.|d n|}i | j||d|}dd | D }t|d	}|j| j|d
}	|	jdkrt	d|	j n>|	jdkrt
d|	j n"|	jdkrt	d|	j d|	j |	 }
|
ddkr|
dd}t	|| |
}|S )zCall out to Together's text generation endpoint.

        Args:
            prompt: The prompt to pass into the model.

        Returns:
            The string generated by the model..
        Bearer application/jsonAuthorizationzContent-Type   r   r1   r2   c                 S   s   i | ]\}}|d ur||qS Nr"   .0kvr"   r"   r#   
<dictcomp>       z"Together._call.<locals>.<dictcomp>)headers)urldata  Together Server: Error   &Together received an invalid payload:    5Together returned an unexpected response with status : statusfinishederrorUndefined Error)r   get_secret_valuelenr0   itemsr   postr   status_code	Exception
ValueErrorr*   jsongetr+   )r&   r1   r2   r3   r4   rB   stop_to_usepayloadrequestresponserD   err_msgr(   r"   r"   r#   _callk   sB    




zTogether._callc              
      s  d| j   dd}|r.t|dkr.|d n|}i | j||d|}dd | D }t 4 I d	H }|j| j||d
4 I d	H }	|	jdkrt	d|	j n>|	jdkrt
d|	j n"|	jdkrt	d|	j d|	j |	 I d	H }
|
ddkr|
dd}t	|| |
}|W  d	  I d	H  W  d	  I d	H  S 1 I d	H sZ0    Y  W d	  I d	H  q1 I d	H s0    Y  d	S )zCall Together model to get predictions based on the prompt.

        Args:
            prompt: The prompt to pass into the model.

        Returns:
            The string generated by the model.
        r5   r6   r7   r9   r   r:   c                 S   s   i | ]\}}|d ur||qS r;   r"   r<   r"   r"   r#   r@      rA   z#Together._acall.<locals>.<dictcomp>N)rW   rB   rE   rF   rG   rH   rI   rJ   rK   rL   rM   rN   rO   )r   rP   rQ   r0   rR   r   rS   r   rL   rU   rV   r*   rW   rX   r+   )r&   r1   r2   r3   r4   rB   rY   rZ   sessionr\   Zresponse_jsonr]   r(   r"   r"   r#   _acall   sL    




zTogether._acall)NN)NN)$__name__
__module____qualname____doc__r   str__annotations__r   r   r   floatr   r   intr   r   r   r   Zmodel_configr   classmethodr   r   r$   propertyr'   dictr+   staticmethodr.   r0   r   r	   r^   r   r`   r"   r"   r"   r#   r      sR   
	  
7  
r   )rd   loggingtypingr   r   r   r   Zaiohttpr   Zlangchain_core._api.deprecationr   Zlangchain_core.callbacksr   r	   Z#langchain_core.language_models.llmsr
   Zlangchain_core.utilsr   r   Zpydanticr   r   r   Z&langchain_community.utilities.requestsr   	getLoggerra   loggerr   r"   r"   r"   r#   <module>   s   
