a
    bg	$                     @   s   d Z ddlmZmZmZmZmZmZmZm	Z	m
Z
mZ ddlmZmZ ddlmZ ddlmZ ddlmZmZmZmZmZ ddlmZmZmZmZ ddlmZ dd	l m!Z! dd
l"m#Z# ddl$m%Z% dZ&G dd deZ'dS )zMLX Chat Wrapper.    )
AnyCallableDictIteratorListLiteralOptionalSequenceTypeUnion)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)LanguageModelInput)BaseChatModel)	AIMessageAIMessageChunkBaseMessageHumanMessageSystemMessage)ChatGenerationChatGenerationChunk
ChatResult	LLMResult)Runnable)BaseToolconvert_to_openai_tool)MLXPipelinez4You are a helpful, respectful, and honest assistant.c                       sp  e Zd ZU dZeed< eedZeed< dZ	e
ed< e
d fdd	Zd#ee eee  ee e
ed
ddZd$ee eee  ee e
ed
ddZd%ee eee edddZeedddZeeedddZeedddZd&ee eee  ee e
ee d
ddZ dde!e"e#ee
f e$e%e&f  ee"eee'd ef  e
e(e)ef d  fd!d"Z*  Z+S )'ChatMLXa  MLX chat models.

    Works with `MLXPipeline` LLM.

    To use, you should have the ``mlx-lm`` python package installed.

    Example:
        .. code-block:: python

            from langchain_community.chat_models import chatMLX
            from langchain_community.llms import MLXPipeline

            llm = MLXPipeline.from_model_id(
                model_id="mlx-community/quantized-gemma-2b-it",
            )
            chat = chatMLX(llm=llm)

    llmcontentsystem_messageN	tokenizer)kwargsc                    s    t  jf i | | jj| _d S )N)super__init__r   r#   )selfr$   	__class__ q/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/chat_models/mlx.pyr&   D   s    zChatMLX.__init__)messagesstoprun_managerr$   returnc                 K   s0   |  |}| jjf |g||d|}| |S N)Zpromptsr-   r.   )_to_chat_promptr   	_generate_to_chat_resultr'   r,   r-   r.   r$   	llm_input
llm_resultr*   r*   r+   r2   H   s    
zChatMLX._generatec                    s6   |  |}| jjf |g||d|I d H }| |S r0   )r1   r   
_agenerater3   r4   r*   r*   r+   r7   U   s    
zChatMLX._agenerateF)r,   tokenizereturn_tensorsr/   c                    sH   |st dt|d ts"t d fdd|D } jj||d|dS )zHConvert a list of messages into a prompt format expected by wrapped LLM.z+At least one HumanMessage must be provided!z$Last message must be a HumanMessage!c                    s   g | ]}  |qS r*   )_to_chatml_format).0mr'   r*   r+   
<listcomp>o       z+ChatMLX._to_chat_prompt.<locals>.<listcomp>T)r8   Zadd_generation_promptr9   )
ValueError
isinstancer   r#   Zapply_chat_template)r'   r,   r8   r9   Zmessages_dictsr*   r>   r+   r1   b   s    zChatMLX._to_chat_prompt)messager/   c                 C   sN   t |trd}n2t |tr d}n"t |tr0d}ntdt| ||jdS )z+Convert LangChain message to ChatML format.systemZ	assistantuserzUnknown message type: )roler!   )rB   r   r   r   rA   typer!   )r'   rC   rF   r*   r*   r+   r;   w   s    


zChatMLX._to_chatml_format)r6   r/   c                 C   sB   g }| j d D ]$}tt|jd|jd}|| qt|| jdS )Nr   r    )rC   generation_info)generations
llm_output)rI   r   r   textrH   appendr   rJ   )r6   Zchat_generationsgZchat_generationr*   r*   r+   r3      s    zChatMLX._to_chat_result)r/   c                 C   s   dS )Nzmlx-chat-wrapperr*   r>   r*   r*   r+   	_llm_type   s    zChatMLX._llm_typec                 k   sf  dd l m} ddlm} zdd l m} ddlm} W n tyN   tdY n0 |d| jj}|dd}|dd}	|d	d }
|d
d }| j|ddd}|	|d }| j
j}t||| jj||
|dt|	D ]\\}}}d }t|ts| j
| }n| j
|}|r@tt|dd}|r:|j||d |V  ||ksZ|d ur||v r qbqd S )Nr   )generate_stepzTCould not import mlx_lm python package. Please install it with `pip install mlx_lm`.model_kwargstempg        Z
max_tokensd   repetition_penaltyrepetition_context_sizeTnp)r8   r9   )rQ   rS   rT   r    )rC   )chunk)Zmlx.corecoreZmlx_lm.utilsrO   ImportErrorgetr   Zpipeline_kwargsr1   arrayr#   eos_token_idzipmodelrangerB   intdecodeitemr   r   Zon_llm_new_token)r'   r,   r-   r.   r$   ZmxrO   rP   rQ   Zmax_new_tokensrS   rT   r5   Zprompt_tokensr[   tokenZprobnrK   rV   r*   r*   r+   _stream   sT    
zChatMLX._stream)tool_choiceautonone)toolsre   r$   r/   c                   s   dd |D }|dur|rt |dkr:tdt | dt|tr\|dvrdd	|id
}nlt|trp|d }nXt|tr|d d d	 |d d	 krtd| d|d d d	  dntd| ||d< t jf d|i|S )a*  Bind tool-like objects to this chat model.

        Assumes model is compatible with OpenAI tool-calling API.

        Args:
            tools: A list of tool definitions to bind to this chat model.
                Supports any tool definition handled by
                :meth:`langchain_core.utils.function_calling.convert_to_openai_tool`.
            tool_choice: Which tool to require the model to call.
                Must be the name of the single provided function or
                "auto" to automatically determine which function to call
                (if any), or a dict of the form:
                {"type": "function", "function": {"name": <<tool_name>>}}.
            **kwargs: Any additional parameters to pass to the
                :class:`~langchain.runnable.Runnable` constructor.
        c                 S   s   g | ]}t |qS r*   r   )r<   Ztoolr*   r*   r+   r?      r@   z&ChatMLX.bind_tools.<locals>.<listcomp>N   zKWhen specifying `tool_choice`, you must provide exactly one tool. Received z tools.rf   functionname)rG   rk   r   zTool choice z/ was specified, but the only provided tool was .zEUnrecognized tool_choice type. Expected str, bool or dict. Received: re   ri   )lenrA   rB   strbooldictr%   bind)r'   ri   re   r$   Zformatted_toolsr(   r*   r+   
bind_tools   s@    




zChatMLX.bind_tools)NN)NN)FN)NN),__name__
__module____qualname____doc__r   __annotations__r   DEFAULT_SYSTEM_PROMPTr"   r#   r   r&   r   r   r   ro   r   r   r2   r   r7   rp   r1   rq   r;   staticmethodr   r3   propertyrN   r   r   rd   r	   r   r   r
   r   r   r   r   r   rs   __classcell__r*   r*   r(   r+   r   ,   sh   
  
  
    
C
r   N)(rw   typingr   r   r   r   r   r   r   r	   r
   r   Z langchain_core.callbacks.managerr   r   Zlangchain_core.language_modelsr   Z*langchain_core.language_models.chat_modelsr   Zlangchain_core.messagesr   r   r   r   r   Zlangchain_core.outputsr   r   r   r   Zlangchain_core.runnablesr   Zlangchain_core.toolsr   Z%langchain_core.utils.function_callingr   Z%langchain_community.llms.mlx_pipeliner   ry   r   r*   r*   r*   r+   <module>   s   0