a
    bgq                     @   s   d dl Z d dlZd dlmZmZmZmZmZ d dlZd dl	m
Z
 d dlmZ d dlmZmZmZ d dlmZmZmZmZ d dlmZ e eZG dd	 d	eZdS )
    N)AnyDictListMappingOptional)CallbackManagerForLLMRun)LLM)convert_to_secret_strget_from_dict_or_envpre_init)
ConfigDictField	SecretStrmodel_validator)enforce_stop_tokensc                   @   s   e Zd ZU dZdZeed< eedZ	e
eef ed< dZee ed< edd	Zed
dee
eef edddZee
e
dddZeeeef dddZeedddZdeeee  ee eedddZdS )StochasticAIa2  StochasticAI large language models.

    To use, you should have the environment variable ``STOCHASTICAI_API_KEY``
    set with your API key.

    Example:
        .. code-block:: python

            from langchain_community.llms import StochasticAI
            stochasticai = StochasticAI(api_url="")
     api_url)default_factorymodel_kwargsNstochasticai_api_keyZforbid)extrabefore)mode)valuesreturnc                 C   s|   t t| j }|di }t|D ]H}||vr&||v rJtd| dt| d| d ||||< q&||d< |S )z>Build extra kwargs from additional params that were passed in.r   zFound z supplied twice.zJ was transferred to model_kwargs.
                    Please confirm that z is what you intended.)	setlistZmodel_fieldskeysget
ValueErrorloggerwarningpop)clsr   Zall_required_field_namesr   
field_name r&   s/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/stochasticai.pybuild_extra*   s    zStochasticAI.build_extrac                 C   s   t t|dd}||d< |S )z,Validate that api key exists in environment.r   ZSTOCHASTICAI_API_KEY)r	   r
   )r$   r   r   r&   r&   r'   validate_environment=   s
    
z!StochasticAI.validate_environment)r   c                 C   s   i d| j id| jiS )zGet the identifying parameters.Zendpoint_urlr   )r   r   selfr&   r&   r'   _identifying_paramsF   s
    z StochasticAI._identifying_paramsc                 C   s   dS )zReturn type of llm.Zstochasticair&   r*   r&   r&   r'   	_llm_typeN   s    zStochasticAI._llm_type)promptstoprun_managerkwargsr   c                 K   s   | j pi }i ||}tj| j||d| j  dddd}|  | }d}|stj|d d | j  dddd}	|	  |	 d }
|
d	}|d
u}t	
d qR|d }|d
urt||}|S )at  Call out to StochasticAI's complete endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = StochasticAI("Tell me a joke.")
        )r.   paramszapplication/json)ZapiKeyAcceptzContent-Type)urljsonheadersFdataZresponseUrl)r4   r6   
completionNg      ?r   )r   requestspostr   r   Zget_secret_valueraise_for_statusr5   r   timesleepr   )r+   r.   r/   r0   r1   r2   Zresponse_postZresponse_post_json	completedZresponse_getZresponse_get_jsontextr&   r&   r'   _callS   s<    

	



zStochasticAI._call)NN)__name__
__module____qualname____doc__r   str__annotations__r   dictr   r   r   r   r   r   r   Zmodel_configr   classmethodr(   r   r)   propertyr   r,   r-   r   r   r@   r&   r&   r&   r'   r      s2   
  
r   )loggingr<   typingr   r   r   r   r   r9   Zlangchain_core.callbacksr   Z#langchain_core.language_models.llmsr   Zlangchain_core.utilsr	   r
   r   Zpydanticr   r   r   r   Zlangchain_community.llms.utilsr   	getLoggerrA   r!   r   r&   r&   r&   r'   <module>   s   
