a
    bg                     @   s   d dl mZ d dlmZmZmZmZmZmZ d dl	m
Z
 d dlmZ d dlmZ d dlmZmZ d dlmZ G dd	 d	eZd
S )    )partial)AnyDictListMappingOptionalSet)CallbackManagerForLLMRun)LLM)pre_init)
ConfigDictField)enforce_stop_tokensc                   @   sX  e Zd ZU dZeed< edddZee ed< edddZ	e
ed< edd	dZe
ed	< ed
ddZe
ed< edddZeed< edddZeed< edddZeed< edddZeed< edddZeed< edddZee
 ed< dZee
 ed< dZee ed< dZee ed< dZee
 ed< dZee ed< g Zeee  ed< dZee
 ed< d Zee ed!< ed"d#dZe
ed#< dZeed$< dZ eed%< ed&d'dZ!ee ed'< dZ"e#ed(< e$d)d*Z%e&e'e d+d,d-Z(e)ee#f d+d.d/Z*e+e)e)d0d1d2Z,e-e.ee#f d+d3d4Z/e-ed+d5d6Z0d:eeee  ee1 e#ed7d8d9Z2dS );GPT4Alla  GPT4All language models.

    To use, you should have the ``gpt4all`` python package installed, the
    pre-trained model file, and the model's config information.

    Example:
        .. code-block:: python

            from langchain_community.llms import GPT4All
            model = GPT4All(model="./models/gpt4all-model.bin", n_threads=8)

            # Simplest invocation
            response = model.invoke("Once upon a time, ")
    modelNbackend)alias   
max_tokensn_partsr   seedFf16_kv
logits_all
vocab_only	use_mlock	embedding   	n_threads   	n_predictgffffff?tempg?top_p(   top_kechostop@   repeat_last_ngzG?repeat_penalty   n_batch	streamingallow_downloadcpudeviceclientZforbid)extra)returnc                   C   s   h dS )N>	   r(   r,   r    r+   r$   r)   r!   r"   r    r3   r3   r3   n/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/gpt4all.py_model_param_namesg   s    zGPT4All._model_param_namesc              
   C   s*   | j | j| j| j| j| j| j| j| jd	S )N	r   r    r$   r"   r!   r+   r)   r(   r,   r6   selfr3   r3   r4   _default_paramsu   s    zGPT4All._default_params)valuesr2   c                 C   s   zddl m} W n ty*   tdY n0 |d }|d\}}}||7 }|||pVd|d |d |d	 d
|d< |d dur|d j|d  z|d j|d< W n" ty   |d jj|d< Y n0 |S )z;Validate that the python package exists in the environment.r   )r   zVCould not import gpt4all python package. Please install it with `pip install gpt4all`.r   /Nr   r-   r/   )
model_path
model_typer-   r/   r0   r   )gpt4allr   ImportError
rpartitionr   Zset_thread_countr=   AttributeError)clsr:   ZGPT4AllModel	full_pathr<   	delimiterZ
model_namer3   r3   r4   validate_environment   s.    

zGPT4All.validate_environmentc                    s*   d j i   fdd j D S )zGet the identifying parameters.r   c                    s"   i | ]\}}|   v r||qS r3   )r5   ).0kvr7   r3   r4   
<dictcomp>   s   z/GPT4All._identifying_params.<locals>.<dictcomp>)r   r9   __dict__itemsr7   r3   r7   r4   _identifying_params   s    
zGPT4All._identifying_paramsc                 C   s   dS )zReturn the type of llm.r>   r3   r7   r3   r3   r4   	_llm_type   s    zGPT4All._llm_type)promptr&   run_managerkwargsr2   c           	      K   sp   d}|rt |j| jd}d}i |  |}| jj|fi |D ]}|rP|| ||7 }q@|durlt||}|S )a  Call out to GPT4All's generate method.

        Args:
            prompt: The prompt to pass into the model.
            stop: A list of strings to stop generation when encountered.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                prompt = "Once upon a time, "
                response = model.invoke(prompt, n_predict=55)
        N)verbose )r   Zon_llm_new_tokenrQ   r9   r0   generater   )	r8   rN   r&   rO   rP   Ztext_callbacktextparamstokenr3   r3   r4   _call   s    

zGPT4All._call)NN)3__name__
__module____qualname____doc__str__annotations__r   r   r   r   intr   r   r   boolr   r   r   r   r   r    r!   floatr"   r$   r%   r&   r   r(   r)   r+   r,   r-   r/   r0   r   r   Zmodel_configstaticmethodr   r5   r   r9   r   rE   propertyr   rL   rM   r	   rW   r3   r3   r3   r4   r      s\   
!
  
r   N)	functoolsr   typingr   r   r   r   r   r   Zlangchain_core.callbacksr	   Z#langchain_core.language_models.llmsr
   Zlangchain_core.utilsr   Zpydanticr   r   Zlangchain_community.llms.utilsr   r   r3   r3   r3   r4   <module>   s    