a
    bg2G                     @  s   d dl mZ d dlZd dlmZmZmZmZmZm	Z	m
Z
mZmZmZ d dlZd dlZd dlmZ d dlmZmZ d dlmZ d dlmZ d dlmZmZ d d	lmZ d
ddddZG dd deZ G dd deZ!eddddG dd dee!Z"dS )    )annotationsN)
AnyAsyncIteratorCallableDictIteratorListMappingOptionalTupleUnion)
deprecated)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseLanguageModel)BaseLLM)GenerationChunk	LLMResult)
ConfigDictstrr   )stream_responsereturnc                 C  s4   t | }|ddu r|nd}t|dd|dS )z0Convert a stream response to a generation chunk.doneTNresponse )textgeneration_info)jsonloadsgetr   )r   Zparsed_responser    r    m/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/ollama.py$_stream_response_to_generation_chunk   s
    
r"   c                   @  s   e Zd ZdZdS )OllamaEndpointNotFoundErrorz-Raised when the Ollama endpoint is not found.N)__name__
__module____qualname____doc__r    r    r    r!   r#   )   s   r#   c                   @  s  e Zd ZU dZded< dZded< dZded< dZd	ed
< dZd	ed< dZ	ded< dZ
ded< dZded< dZded< dZded< dZd	ed< dZd	ed< dZded< dZd	ed< dZded< dZd	ed< dZded< dZded< dZded< dZded< dZded< dZded < dZd!ed"< dZd#ed$< ed%d&d'd(Zed)d&d*d+ZdCdddd,d-d.d/d0ZdDdddd,d1d.d2d3Z dEdd,dd,d-d4d5d6Z!dFdd,dd,d1d4d7d8Z"dGddd:d;d,d<d=d>d?Z#dHddd@d;d,d<d=dAdBZ$dS )I_OllamaCommonzhttp://localhost:11434r   base_urlZllama2modelNzOptional[int]mirostatzOptional[float]mirostat_etamirostat_taunum_ctxnum_gpu
num_threadnum_predictrepeat_last_nrepeat_penaltytemperatureOptional[List[str]]stoptfs_ztop_ktop_pzOptional[str]systemtemplateformattimeoutzOptional[Union[int, str]]
keep_alivezOptional[bool]rawzOptional[dict]headerszUnion[Callable, Tuple, None]authzDict[str, Any]r   c                 C  sZ   | j | j| j| j| j| j| j| j| j| j	| j
| j| j| j| j| jd| j| j| j| jdS )z.Get the default parameters for calling Ollama.)r+   r,   r-   r.   r/   r0   r1   r2   r3   r4   r6   r7   r8   r9   )r*   r<   optionsr:   r;   r>   r?   )r*   r<   r+   r,   r-   r.   r/   r0   r1   r2   r3   r4   r6   r7   r8   r9   r:   r;   r>   r?   selfr    r    r!   _default_params   s,    z_OllamaCommon._default_paramszMapping[str, Any]c                 C  s   i | j | jd| jS )zGet the identifying parameters.)r*   r<   )r*   r<   rF   rD   r    r    r!   _identifying_params   s    z!_OllamaCommon._identifying_paramsr   zIterator[str])promptr6   imageskwargsr   c                 k  s4   ||d}| j f ||| j dd|E d H  d S NrH   rI   z/api/generate)payloadr6   api_url)_create_streamr)   )rE   rH   r6   rI   rJ   rM   r    r    r!   _create_generate_stream   s    

z%_OllamaCommon._create_generate_streamzAsyncIterator[str]c                 K sD   ||d}| j f ||| j dd|2 z3 d H W }|V  q*6 d S rK   )_acreate_streamr)   )rE   rH   r6   rI   rJ   rM   itemr    r    r!   _acreate_generate_stream   s    

z&_OllamaCommon._acreate_generate_stream)rN   rM   r6   rJ   r   c           
        s`   j d ur|d urtdn j d ur, j } j} jD ]}||v r8|| ||< q8d|v rh|d |d< n.i |d d|i fdd| D |d< |drd|dg i|}n|d|dg d	|}tj|d
dit jt	r jni  j
|d jd}d|_|jdkrT|jdkr8td j dn|j}	td|j d|	 |jddS )N2`stop` found in both the input and default params.rC   r6   c                   s    i | ]\}}| j vr||qS r    rF   .0kvrD   r    r!   
<dictcomp>       z0_OllamaCommon._create_stream.<locals>.<dictcomp>messagesrH   rI   rL   Content-Typeapplication/jsonT)urlr@   rA   r   streamr=   utf-8     zwOllama call failed with status code 404. Maybe your model is not found and you should pull the model with `ollama pull z`.$Ollama call failed with status code . Details: )decode_unicode)r6   
ValueErrorrF   itemsr   requestspost
isinstancer@   dictrA   r=   encodingstatus_coder#   r*   r   
iter_lines)
rE   rN   rM   r6   rJ   paramskeyrequest_payloadr   optional_detailr    rD   r!   rO      sd    





z_OllamaCommon._create_streamc              
    s   j d ur|d urtdn j d ur, j } j} jD ]}||v r8|| ||< q8d|v rh|d |d< n.i |d d|i fdd| D |d< |drd|dg i|}n|d|dg d	|}t 4 I d H }|j|d
dit j	t
r j	ni  j| jd4 I d H z}	|	jdkr`|	jdkrDtdn|	j}
td|	j d|
 |	j2 z3 d H W }|dV  qf6 W d   I d H  q1 I d H s0    Y  W d   I d H  q1 I d H s0    Y  d S )NrT   rC   r6   c                   s    i | ]\}}| j vr||qS r    rU   rV   rD   r    r!   rZ   /  r[   z1_OllamaCommon._acreate_stream.<locals>.<dictcomp>r\   rH   rI   rL   r]   r^   )r_   r@   rA   r   r=   rb   rc   z(Ollama call failed with status code 404.rd   re   ra   )r6   rg   rF   rh   r   aiohttpZClientSessionrj   rk   r@   rl   rA   r=   statusr#   r   contentdecode)rE   rN   rM   r6   rJ   rp   rq   rr   sessionr   rs   liner    rD   r!   rQ     sb    




	
z_OllamaCommon._acreate_streamF"Optional[CallbackManagerForLLMRun]boolr   )rH   r6   run_managerverboserJ   r   c           	      K  sh   d }| j ||fi |D ]:}|rt|}|d u r6|}n||7 }|r|j|j|d q|d u rdtd|S Nr}   z$No data received from Ollama stream.)rP   r"   on_llm_new_tokenr   rg   	rE   rH   r6   r|   r}   rJ   final_chunkstream_respchunkr    r    r!   _stream_with_aggregationT  s    z&_OllamaCommon._stream_with_aggregation'Optional[AsyncCallbackManagerForLLMRun]c           	        sx   d }| j ||fi |2 zH3 d H W }|rt|}|d u r>|}n||7 }|r|j|j|dI d H  q6 |d u rttd|S r~   )rS   r"   r   r   rg   r   r    r    r!   _astream_with_aggregationn  s     z'_OllamaCommon._astream_with_aggregation)NN)NN)N)N)NNF)NNF)%r$   r%   r&   r)   __annotations__r*   r+   r,   r-   r.   r/   r0   r1   r2   r3   r4   r6   r7   r8   r9   r:   r;   r<   r=   r>   r?   r@   rA   propertyrF   rG   rP   rS   rO   rQ   r   r   r    r    r    r!   r(   -   s\   
     B @      r(   z0.3.1z1.0.0zlangchain_ollama.OllamaLLM)ZsinceZremovalZalternative_importc                      s   e Zd ZdZeddZeddddZdd	d
d
dddd fddZdd	d
d
dddd fddZ	ddd
ddddddZ
ddd
ddddddZ  ZS )OllamazOllama locally runs large language models.
    To use, follow the instructions at https://ollama.ai/.
    Example:
        .. code-block:: python
            from langchain_community.llms import Ollama
            ollama = Ollama(model="llama2")
    Zforbid)extrar   rB   c                 C  s   dS )zReturn type of llm.z
ollama-llmr    rD   r    r    r!   	_llm_type  s    zOllama._llm_typeNz	List[str]r5   rz   r   r   )promptsr6   rI   r|   rJ   r   c           	        sD   g }|D ]0}t  j|f|||| jd|}||g qt|dS )d  Call out to Ollama's generate endpoint.
        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
        Returns:
            The string generated by the model.
        Example:
            .. code-block:: python
                response = ollama("Tell me a joke.")
        r6   rI   r|   r}   generations)superr   r}   appendr   	rE   r   r6   rI   r|   rJ   r   rH   r   	__class__r    r!   	_generate  s    zOllama._generatec           	        sJ   g }|D ]6}t  j|f|||| jd|I dH }||g qt|dS )r   r   Nr   )r   r   r}   r   r   r   r   r    r!   
_agenerate  s    zOllama._ageneratezIterator[GenerationChunk])rH   r6   r|   rJ   r   c                 k  sF   | j ||fi |D ],}|rt|}|r:|j|j| jd |V  qd S Nr   )rP   r"   r   r   r}   rE   rH   r6   r|   rJ   r   r   r    r    r!   _stream  s    zOllama._streamr   zAsyncIterator[GenerationChunk]c                 K sV   | j ||fi |2 z:3 d H W }|rt|}|rH|j|j| jdI d H  |V  q6 d S r   )rS   r"   r   r   r}   r   r    r    r!   _astream  s     zOllama._astream)NNN)NNN)NN)NN)r$   r%   r&   r'   r   Zmodel_configr   r   r   r   r   r   __classcell__r    r    r   r!   r     s(      #   #    r   )#
__future__r   r   typingr   r   r   r   r   r   r	   r
   r   r   rt   ri   Zlangchain_core._api.deprecationr   Zlangchain_core.callbacksr   r   Zlangchain_core.language_modelsr   Z#langchain_core.language_models.llmsr   Zlangchain_core.outputsr   r   Zpydanticr   r"   	Exceptionr#   r(   r   r    r    r    r!   <module>   s*   0  ^