a
    bg"                     @   s   d dl mZ d dlmZmZmZmZmZmZm	Z	 d dl mZ d dl
mZ d dl mZ d dlmZmZ d dl mZ d dlmZ d dl mZ d dlmZ d dl mZ d dlmZ G dd	 d	eZd
S )    )pre_init)AnyAsyncIteratorDictIteratorListOptionalUnion)root_validator)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)LLM)enforce_stop_tokens)GenerationChunkc                   @   s4  e Zd ZU dZeed< eed< dZee	eef  ed< dZ
edee	f ed< dZeed< ee	eef d	d
dZeed	ddZee	e	dddZdeeee  ee eedddZdeeee  ee eedddZdeeee  ee eee dddZdeeee  ee eee dddZdS )
DeepSparseaH  Neural Magic DeepSparse LLM interface.
    To use, you should have the ``deepsparse`` or ``deepsparse-nightly``
    python package installed. See https://github.com/neuralmagic/deepsparse
    This interface let's you deploy optimized LLMs straight from the
    [SparseZoo](https://sparsezoo.neuralmagic.com/?useCase=text_generation)
    Example:
        .. code-block:: python
            from langchain_community.llms import DeepSparse
            llm = DeepSparse(model="zoo:nlg/text_generation/codegen_mono-350m/pytorch/huggingface/bigpython_bigquery_thepile/base_quant-none")
    pipelinemodelNmodel_configurationgeneration_configF	streaming)returnc                 C   s   | j | j| j| jdS )zGet the identifying parameters.)r   model_configr   r   )r   r   r   r   self r   q/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/deepsparse.py_identifying_params1   s
    zDeepSparse._identifying_paramsc                 C   s   dS )zReturn type of llm.
deepsparser   r   r   r   r   	_llm_type;   s    zDeepSparse._llm_type)valuesr   c                 C   sZ   zddl m} W n ty*   tdY n0 |d p6i }|jf d|d d||d< |S )	z2Validate that ``deepsparse`` package is installed.r   )Pipelinez[Could not import `deepsparse` package. Please install it with `pip install deepsparse[llm]`r   Ztext_generationr   )taskZ
model_pathr   )r   r    ImportErrorcreate)clsr   r    r   r   r   r   validate_environment@   s    

zDeepSparse.validate_environment)promptstoprun_managerkwargsr   c                 K   sl   | j r8d}| jf |||d|D ]}||j7 }q"|}n| jf d|i| jjd j}|durht||}|S )  Generate text from a prompt.
        Args:
            prompt: The prompt to generate text from.
            stop: A list of strings to stop generation when encountered.
        Returns:
            The generated text.
        Example:
            .. code-block:: python
                from langchain_community.llms import DeepSparse
                llm = DeepSparse(model="zoo:nlg/text_generation/codegen_mono-350m/pytorch/huggingface/bigpython_bigquery_thepile/base_quant-none")
                llm.invoke("Tell me a joke.")
         r&   r'   r(   	sequencesr   N)r   _streamtextr   r   generationsr   r   r&   r'   r(   r)   Zcombined_outputchunkr/   r   r   r   _callT   s     

zDeepSparse._callc                    sv   | j rBd}| jf |||d|2 z3 dH W }||j7 }q"6 |}n| jf d|i| jjd j}|durrt||}|S )r*   r+   r,   Nr-   r   )r   _astreamr/   r   r   r0   r   r1   r   r   r   _acallz   s     
zDeepSparse._acallc                 k   sP   | j f |dd| j}|D ].}t|jd jd}|rD|j|jd |V  qdS a  Yields results objects as they are generated in real time.
        It also calls the callback manager's on_llm_new_token event with
        similar parameters to the OpenAI LLM class method of the same name.
        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
        Returns:
            A generator representing the stream of tokens being generated.
        Yields:
            A dictionary like object containing a string token.
        Example:
            .. code-block:: python
                from langchain_community.llms import DeepSparse
                llm = DeepSparse(
                    model="zoo:nlg/text_generation/codegen_mono-350m/pytorch/huggingface/bigpython_bigquery_thepile/base_quant-none",
                    streaming=True
                )
                for chunk in llm.stream("Tell me a joke",
                        stop=["'","
"]):
                    print(chunk, end='', flush=True)  # noqa: T201
        T)r-   r   r   )r/   )tokenNr   r   r   r0   r/   Zon_llm_new_tokenr   r&   r'   r(   r)   Z	inferencer7   r2   r   r   r   r.      s    zDeepSparse._streamc                 K  sV   | j f |dd| j}|D ]4}t|jd jd}|rJ|j|jdI dH  |V  qdS r6   r8   r9   r   r   r   r4      s    zDeepSparse._astream)NN)NN)NN)NN)__name__
__module____qualname____doc__r   __annotations__strr   r   r   r   r	   r   boolpropertyr   r   r   r%   r   r   r3   r   r5   r   r   r.   r   r4   r   r   r   r   r      s`   
	  
)  
)  
)  
r   N)Zlangchain_core.utilsr   typingr   r   r   r   r   r   r	   Zpydanticr
   Zlangchain_core.callbacksr   r   Z#langchain_core.language_models.llmsr   Zlangchain_community.llms.utilsr   Zlangchain_core.outputsr   r   r   r   r   r   <module>   s   $