a
    bg|_                     @  s:  d dl mZ d dlmZmZmZmZmZmZm	Z	m
Z
 d dlmZmZ d dlmZ d dlmZ d dlmZ d dlmZmZ d dlmZ d d	lmZ d d
lmZmZmZ d dlm Z  erd dl!Z!d dl"m#Z# d dl$m%Z& ddddZ'ddddZ(dddddZ)dddddZ*dddd d!Z+e G d"d# d#eZ,dS )$    )annotations)TYPE_CHECKINGAnyCallableDictOptionalSequenceTypeUnion)OpenAIAssistantRunnable
OutputType)beta)CallbackManager)dumpd)RunnableConfigensure_config)BaseTool)convert_to_openai_tool)	BaseModelFieldmodel_validator)SelfN)NotGiven)ToolResourceszopenai.OpenAIreturnc               
   C  sz   zddl } | jddidW S  tyH } ztd|W Y d}~n6d}~0  tyt } ztd|W Y d}~n
d}~0 0 dS )zGet the OpenAI client.

    Returns:
        openai.OpenAI: OpenAI client

    Raises:
        ImportError: If `openai` is not installed.
        AttributeError: If the installed `openai` version is not compatible.
    r   NOpenAI-Betaassistants=v2default_headersBUnable to import openai, please install with `pip install openai`.wPlease make sure you are using a v1.23-compatible version of openai. You can install with `pip install "openai>=1.23"`.)openaiZOpenAIImportErrorAttributeErrorr"   e r'   ~/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/agents/openai_assistant/base.py_get_openai_client   s    
r)   zopenai.AsyncOpenAIc               
   C  sz   zddl } | jddidW S  tyH } ztd|W Y d}~n6d}~0  tyt } ztd|W Y d}~n
d}~0 0 dS )zGet the async OpenAI client.

    Returns:
        openai.AsyncOpenAI: Async OpenAI client

    Raises:
        ImportError: If `openai` is not installed.
        AttributeError: If the installed `openai` version is not compatible.
    r   Nr   r   r   r    r!   )r"   AsyncOpenAIr#   r$   r%   r'   r'   r(   _get_openai_async_client7   s    
r+   list)file_idsr   c                 C  s.   g }| D ] }| |ddiddigd q|S )a  Convert file_ids into attachments
    File search and Code interpreter will be turned on by default.

    Args:
        file_ids (list): List of file_ids that need to be converted into attachments.

    Returns:
        list: List of attachments converted from file_ids.
    typefile_searchcode_interpreter)Zfile_idtools)append)r-   attachmentsidr'   r'   r(   "_convert_file_ids_into_attachmentsP   s    
r5   z:Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]bool)toolr   c                 C  s"   d}t | to d| v o | d |v S )aM  Determine if tool corresponds to OpenAI Assistants built-in.

    Args:
        tool (Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]):
            Tool that needs to be determined.

    Returns:
        A boolean response of true or false indicating if the tool corresponds to
            OpenAI Assistants built-in.
    )r0   Z	retrievalr/   r.   )
isinstancedict)r7   Zassistants_builtin_toolsr'   r'   r(   _is_assistants_builtin_toole   s    

r:   zDict[str, Any]c                 C  s   t | r| S t| S dS )a  Convert a raw function/class to an OpenAI tool.

    Note that OpenAI assistants supports several built-in tools,
    such as "code_interpreter" and "retrieval."

    Args:
        tool (Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]):
            Tools or functions that need to be converted to OpenAI tools.

    Returns:
        Dict[str, Any]: A dictionary of tools that are converted into OpenAI tools.
    N)r:   r   )r7   r'   r'   r(   _get_assistants_toolz   s    r;   c                   @  s0  e Zd ZU dZeedZded< dZded< ded< d	Z	d
ed< dZ
ded< eddddddZei ddddddddddddddd
ddZd6dd dd!d"d#d$Zeddd%ddddd&dddd'd(d)Zd7dd dd!d"d*d+Zddd,d-d.Zdddd/d0d1Zddd,d2d3Zdddd/d4d5ZdS )8OpenAIAssistantV2Runnablea  Run an OpenAI Assistant.

    Attributes:
        client (Any): OpenAI or AzureOpenAI client.
        async_client (Any): Async OpenAI or AzureOpenAI client.
        assistant_id (str): OpenAI assistant ID.
        check_every_ms (float): Frequency to check progress in milliseconds.
        as_agent (bool): Whether to use the assistant as a LangChain agent.

    Example using OpenAI tools:
        .. code-block:: python

            from langchain.agents.openai_assistant import OpenAIAssistantV2Runnable

            assistant = OpenAIAssistantV2Runnable.create_assistant(
                name="math assistant",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=[{"type": "code_interpreter"}],
                model="gpt-4-1106-preview"
            )
            output = assistant.invoke({"content": "What's 10 - 4 raised to the 2.7"})

    Example using custom tools and AgentExecutor:
        .. code-block:: python

            from langchain.agents.openai_assistant import OpenAIAssistantV2Runnable
            from langchain.agents import AgentExecutor
            from langchain.tools import E2BDataAnalysisTool


            tools = [E2BDataAnalysisTool(api_key="...")]
            agent = OpenAIAssistantV2Runnable.create_assistant(
                name="langchain assistant e2b tool",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=tools,
                model="gpt-4-1106-preview",
                as_agent=True
            )

            agent_executor = AgentExecutor(agent=agent, tools=tools)
            agent_executor.invoke({"content": "Analyze the data..."})

    Example using custom tools and custom execution:
        .. code-block:: python

            from langchain.agents.openai_assistant import OpenAIAssistantV2Runnable
            from langchain.agents import AgentExecutor
            from langchain_core.agents import AgentFinish
            from langchain.tools import E2BDataAnalysisTool


            tools = [E2BDataAnalysisTool(api_key="...")]
            agent = OpenAIAssistantV2Runnable.create_assistant(
                name="langchain assistant e2b tool",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=tools,
                model="gpt-4-1106-preview",
                as_agent=True
            )

            def execute_agent(agent, tools, input):
                tool_map = {tool.name: tool for tool in tools}
                response = agent.invoke(input)
                while not isinstance(response, AgentFinish):
                    tool_outputs = []
                    for action in response:
                        tool_output = tool_map[action.tool].invoke(action.tool_input)
                        tool_outputs.append({"output": tool_output, "tool_call_id": action.tool_call_id})
                    response = agent.invoke(
                        {
                            "tool_outputs": tool_outputs,
                            "run_id": action.run_id,
                            "thread_id": action.thread_id
                        }
                    )

                return response

            response = execute_agent(agent, tools, {"content": "What's 10 - 4 raised to the 2.7"})
            next_response = execute_agent(agent, tools, {"content": "now add 17.241", "thread_id": response.thread_id})

    )default_factoryr   clientNasync_clientstrassistant_idg     @@floatcheck_every_msFr6   as_agentafter)moder   r   c                 C  s,   | j du r(ddl}| jj}|j|d| _ | S )z?Validate that the async client is set, otherwise initialize it.Nr   )api_key)r?   r"   r>   rG   r*   )selfr"   rG   r'   r'   r(   validate_async_client   s
    
z/OpenAIAssistantV2Runnable.validate_async_client)model_kwargsr>   tool_resources
extra_bodyzSequence[Union[BaseTool, dict]]zdict[str, float]z2Optional[Union[openai.OpenAI, openai.AzureOpenAI]]z7Optional[Union[AssistantToolResources, dict, NotGiven]]zOptional[object]r   )
nameinstructionsr1   modelrJ   r>   rK   rL   kwargsr   c             	   K  sd   |pt  }|du r"ddlm}
 |
}|jjjf ||dd |D |||d|}| f |j|d|	S )a  Create an OpenAI Assistant and instantiate the Runnable.

        Args:
            name (str): Assistant name.
            instructions (str): Assistant instructions.
            tools (Sequence[Union[BaseTool, dict]]): Assistant tools. Can be passed
                in OpenAI format or as BaseTools.
            tool_resources (Optional[Union[AssistantToolResources, dict, NotGiven]]):
                Assistant tool resources. Can be passed in OpenAI format.
            model (str): Assistant model to use.
            client (Optional[Union[openai.OpenAI, openai.AzureOpenAI]]): OpenAI or
                AzureOpenAI client. Will create default OpenAI client (Assistant v2)
                if not specified.
            model_kwargs: Additional model arguments. Only available for temperature
                and top_p parameters.
            extra_body: Additional body parameters to be passed to the assistant.

        Returns:
            OpenAIAssistantRunnable: The configured assistant runnable.
        Nr   	NOT_GIVENc                 S  s   g | ]}t |qS r'   r;   .0r7   r'   r'   r(   
<listcomp>#      z>OpenAIAssistantV2Runnable.create_assistant.<locals>.<listcomp>)rM   rN   r1   rK   rO   rL   )rA   r>   )r)   openai._typesrR   r   
assistantscreater4   )clsrM   rN   r1   rO   rJ   r>   rK   rL   rP   rR   	assistantr'   r'   r(   create_assistant   s    "

	z*OpenAIAssistantV2Runnable.create_assistantr9   zOptional[RunnableConfig]r   )inputconfigrP   r   c              
   K  s  t |}tj|d|d|dd}|jt| ||dpF|  d}t|dg }|dg | }z| jr|d	r| 	|d	 }| j
jjjjf i |}	nd
|vrd|d ||ddg|dd}
| ||
}	nXd|vr"| j
jjjj|d
 |d d||dd}| |}	n| j
jjjjf i |}	| |	j|	j}	W n4 ty } z|| |W Y d}~n
d}~0 0 z| |	}W n< ty } z"|j||	 d |W Y d}~nd}~0 0 || |S dS )a(  Invoke the assistant.

        Args:
            input (dict): Runnable input dict that can have:
                content: User message when starting a new run.
                thread_id: Existing thread to use.
                run_id: Existing run to use. Should only be supplied when providing
                    the tool output for a required action after an initial invocation.
                file_ids: (deprecated) File ids to include in new run. Use
                    'attachments' instead
                attachments: Assistant files to include in new run. (v2 API).
                message_metadata: Metadata to associate with new message.
                thread_metadata: Metadata to associate with new thread. Only relevant
                    when new thread being created.
                instructions: Additional run instructions.
                model: Override Assistant model for this run.
                tools: Override Assistant tools for this run.
                tool_resources: Override Assistant tool resources for this run (v2 API).
                run_metadata: Metadata to associate with new run.
            config (Optional[RunnableConfig]): Configuration for the run.

        Returns:
            OutputType: If self.as_agent, will return
                Union[List[OpenAIAssistantAction], OpenAIAssistantFinish]. Otherwise,
                will return OpenAI types
                Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]].

        Raises:
            BaseException: If an error occurs during the invocation.
        	callbackstagsmetadataZinheritable_callbacksZinheritable_tagsZinheritable_metadatarun_namerM   r-   r3   intermediate_steps	thread_idusercontentmessage_metadataroleri   r3   rb   thread_metadatamessagesrb   run_idri   rl   r3   rb   Nrb   )r   r   	configuregeton_chain_startr   get_namer5   rD   _parse_intermediate_stepsr>   r   threadsrunssubmit_tool_outputs_create_thread_and_runro   rZ   _create_runZ_wait_for_runr4   rg   BaseExceptionon_chain_error_get_responser9   on_chain_endrH   r^   r_   rP   Zcallback_managerZrun_managerfilesr3   Ztool_outputsrunthread_r&   responser'   r'   r(   invoke+  s`    !


z OpenAIAssistantV2Runnable.invoke)r?   rK   z<Optional[Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI]])rM   rN   r1   rO   r?   rK   rP   r   c                  sd   |pt  }|du r"ddlm} |}dd |D }	|jjj|||	||dI dH }
| f |
j|d|S )a  Create an AsyncOpenAI Assistant and instantiate the Runnable.

        Args:
            name (str): Assistant name.
            instructions (str): Assistant instructions.
            tools (Sequence[Union[BaseTool, dict]]): Assistant tools. Can be passed
                in OpenAI format or as BaseTools.
            tool_resources (Optional[Union[AssistantToolResources, dict, NotGiven]]):
                Assistant tool resources. Can be passed in OpenAI format.
            model (str): Assistant model to use.
            async_client (Optional[Union[openai.OpenAI, openai.AzureOpenAI]]): OpenAI or
            AzureOpenAI async client. Will create default async_client if not specified.

        Returns:
            AsyncOpenAIAssistantRunnable: The configured assistant runnable.
        Nr   rQ   c                 S  s   g | ]}t |qS r'   rS   rT   r'   r'   r(   rV     rW   z?OpenAIAssistantV2Runnable.acreate_assistant.<locals>.<listcomp>)rM   rN   r1   rK   rO   )rA   r?   )r+   rX   rR   r   rY   rZ   r4   )r[   rM   rN   r1   rO   r?   rK   rP   rR   Zopenai_toolsr\   r'   r'   r(   acreate_assistant  s    
z+OpenAIAssistantV2Runnable.acreate_assistantc              
     s  |pi }t j|d|d|dd}|jt| ||dpF|  d}t|dg }|dg | }z| jr|d	r| |d	 }| j	j
jjjf i |I d
H }	nd|vrd|d ||ddg|dd}
| ||
I d
H }	njd|vr<| j	j
jjj|d |d d||ddI d
H }| |I d
H }	n| j	j
jjjf i |I d
H }	| |	j|	jI d
H }	W n4 ty } z|| |W Y d
}~n
d
}~0 0 z| |	}W n< ty } z"|j||	 d |W Y d
}~nd
}~0 0 || |S d
S )a*  Async invoke assistant.

        Args:
            input (dict): Runnable input dict that can have:
                content: User message when starting a new run.
                thread_id: Existing thread to use.
                run_id: Existing run to use. Should only be supplied when providing
                    the tool output for a required action after an initial invocation.
                file_ids: (deprecated) File ids to include in new run. Use
                    'attachments' instead
                attachments: Assistant files to include in new run. (v2 API).
                message_metadata: Metadata to associate with new message.
                thread_metadata: Metadata to associate with new thread. Only relevant
                    when new thread being created.
                instructions: Additional run instructions.
                model: Override Assistant model for this run.
                tools: Override Assistant tools for this run.
                tool_resources: Override Assistant tool resources for this run (v2 API).
                run_metadata: Metadata to associate with new run.
            config (Optional[RunnableConfig]): Configuration for the run.

        Returns:
            OutputType: If self.as_agent, will return
                Union[List[OpenAIAssistantAction], OpenAIAssistantFinish]. Otherwise,
                will return OpenAI types
                Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]].

        Raises:
            BaseException: If an error occurs during the invocation.
        r`   ra   rb   rc   rd   re   r-   r3   rf   Nrg   rh   ri   rj   rk   rm   rn   rp   rq   rr   )r   rs   rt   ru   r   rv   r5   rD   rw   r?   r   rx   ry   rz   _acreate_thread_and_runro   rZ   _acreate_runZ_await_for_runr4   rg   r}   r~   r   r9   r   r   r'   r'   r(   ainvoke  sh    !


z!OpenAIAssistantV2Runnable.ainvoke)r^   r   c                   s>   d  fdd|  D }| jjjjj|d fd| ji|S )zCreate a new run within an existing thread.

        Args:
            input (dict): The input data for the new run.

        Returns:
            Any: The created run object.
        )rN   rO   r1   rK   run_metadataZtruncation_strategyZmax_prompt_tokensc                   s   i | ]\}}| v r||qS r'   r'   rU   kvZallowed_assistant_paramsr'   r(   
<dictcomp>+  rW   z9OpenAIAssistantV2Runnable._create_run.<locals>.<dictcomp>rg   rA   )itemsr>   r   rx   ry   rZ   rA   rH   r^   paramsr'   r   r(   r|     s    		z%OpenAIAssistantV2Runnable._create_run)r^   r   r   c                 C  sJ   dd |  D }|d }r(||d< | jjjjf | j|d|}|S )zCreate a new thread and run.

        Args:
            input (dict): The input data for the run.
            thread (dict): The thread data to create.

        Returns:
            Any: The created thread and run.
        c                 S  s   i | ]\}}|d v r||qS )rN   rO   r1   r   r'   r   r'   r'   r(   r   <  s   zDOpenAIAssistantV2Runnable._create_thread_and_run.<locals>.<dictcomp>rK   rA   r   )r   rt   r>   r   rx   create_and_runrA   rH   r^   r   r   rK   r   r'   r'   r(   r{   2  s    
z0OpenAIAssistantV2Runnable._create_thread_and_runc                   s<   dd |  D }| jjjjj|d fd| ji|I dH S )zAsynchronously create a new run within an existing thread.

        Args:
            input (dict): The input data for the new run.

        Returns:
            Any: The created run object.
        c                 S  s   i | ]\}}|d v r||qS ))rN   rO   r1   rK   r   r'   r   r'   r'   r(   r   S  s   z:OpenAIAssistantV2Runnable._acreate_run.<locals>.<dictcomp>rg   rA   N)r   r?   r   rx   ry   rZ   rA   r   r'   r'   r(   r   J  s    	z&OpenAIAssistantV2Runnable._acreate_runc                   sP   dd |  D }|d }r(||d< | jjjjf | j|d|I dH }|S )zAsynchronously create a new thread and run simultaneously.

        Args:
            input (dict): The input data for the run.
            thread (dict): The thread data to create.

        Returns:
            Any: The created thread and run.
        c                 S  s   i | ]\}}|d v r||qS r   r'   r   r'   r'   r(   r   h  s   zEOpenAIAssistantV2Runnable._acreate_thread_and_run.<locals>.<dictcomp>rK   r   N)r   rt   r?   r   rx   r   rA   r   r'   r'   r(   r   ^  s    
z1OpenAIAssistantV2Runnable._acreate_thread_and_run)N)N)__name__
__module____qualname____doc__r   r)   r>   __annotations__r?   rC   rD   r   rI   classmethodr]   r   r   r   r|   r{   r   r   r'   r'   r'   r(   r<      s4   
S	&2 ^
". br<   )-
__future__r   typingr   r   r   r   r   r   r	   r
   Z&langchain.agents.openai_assistant.baser   r   Zlangchain_core._apir   Zlangchain_core.callbacksr   Zlangchain_core.loadr   Zlangchain_core.runnablesr   r   Zlangchain_core.toolsr   Z%langchain_core.utils.function_callingr   Zpydanticr   r   r   Ztyping_extensionsr   r"   rX   r   Zopenai.types.beta.assistantr   ZAssistantToolResourcesr)   r+   r5   r:   r;   r<   r'   r'   r'   r(   <module>   s*   (