a
    agk:                     @  sz  d dl mZ d dlZd dlZd dlmZ d dlmZmZm	Z	m
Z
mZmZmZmZ d dlZd dlmZ d dlmZ d dlmZ d dlmZ d d	lmZmZ d d
lmZ d dlmZ d dlmZ d dl m!Z! d dl"m#Z# erd dl$m%Z% d dl&m'Z' ddddddZ(ddddddZ)dddddd Z*dd!d"d#d$Z+G d%d& d&eZ,ed'd(d)d*d5d,d-d.d/d0dd0d0dd1d2
d3d4Z-dS )6    )annotationsN)defaultdict)TYPE_CHECKINGAnyCallableDictListOptionalTupleUnion)
deprecated)CallbackManagerForChainRun)BaseLanguageModel)JsonOutputFunctionsParser)BasePromptTemplateChatPromptTemplate)get_colored_text)Response)Chain)LLMChain)SequentialChainOpenAPISpec)	Parameterr   boolzOptional[str])oprefer_shortreturnc                 C  s,   t | dd }t | dd }|r$|p"|S |p*|S )Nsummarydescription)getattr)r   r   r   r    r!   w/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain/chains/openai_functions/openapi.py_get_description   s
    r#   strdict)urlpath_paramsr   c           
        s  t d| }i }|D ]b}|dd}|| }t|tr|d dkrl|d dkrXdnd}d|| }nF|d dkr|d dkr| d	nd}| d	|| }n
d|}nt|tr2|d dkrd	nd  fd
d| D }	|d dkrd}d}n |d dkrd}d}nd}d}|||	7 }n>|d dkrLd| }n$|d dkrld| d	| }n|}|||< q| j	f i |S )Nz{(.*?)}z.;*r   .,;=c                   s   g | ]\}}  ||fqS r!   )join).0kvZkv_sepr!   r"   
<listcomp>3       z_format_url.<locals>.<listcomp> )
refindalllstriprstrip
isinstancelistr.   r%   itemsformat)
r&   r'   Zexpected_path_param
new_paramsparamZclean_paramvalsepnew_valZkv_strsr!   r2   r"   _format_url"   s@    


rC   zList[Parameter]r   )paramsspecr   c                 C  s   i }g }| D ]t}|j r$||j }nt|j d j}||}|jrV|jsV|j|_t|jdd||j	< |j
r||j	 qd||dS )Nr   TZexclude_noneobject)type
propertiesrequired)Zparam_schema
get_schemar;   contentvaluesmedia_type_schemar   jsonloadsnamerJ   append)rD   rE   rI   rJ   pschemarN   r!   r!   r"   _openapi_params_to_json_schemaI   s    
rU   z%Tuple[List[Dict[str, Any]], Callable])rE   r   c                   sB  zddl m} W n ty*   tdY n0 | js>g dd fS g }i  | jD ]}dd | |D }| |D ]}i }| ||}| }| |D ]}	|	||	j	|	j
f< qtt}
| D ]\}}|
|d  | qd	d
ddd}| D ]"\}}|
| rt|
| | ||< q| |}|r|jri }|j D ]4\}}|jr:| |j}t|jdd||< q:t|dkrt| d \}}|dkrdnd}|||< n"t|dkrdt| i|d< || ||}|j|jd|dd}|| ||j|j d |d < qpqLd!ddddddd fdd }||fS )"a4  Convert a valid OpenAPI spec to the JSON Schema format expected for OpenAI
        functions.

    Args:
        spec: OpenAPI spec to convert.

    Returns:
        Tuple of the OpenAI functions JSON schema and a default function for executing
            a request based on the OpenAI function schema.
    r   )APIOperationzeCould not import langchain_community.tools. Please install it with `pip install langchain-community`.c                   S  s   d S Nr!   r!   r!   r!   r"   <lambda>p   r4   z+openapi_spec_to_openai_fn.<locals>.<lambda>c                 S  s   i | ]}|j |jf|qS r!   )rQ   param_in)r/   rS   r!   r!   r"   
<dictcomp>t   s   z-openapi_spec_to_openai_fn.<locals>.<dictcomp>   rD   headerscookiesr'   )queryheadercookiepathTrF   zapplication/jsonrO   dataZanyOfrG   )rH   rI   )rQ   r   
parameters)methodr&   rQ   Nr$   r%   zOptional[dict]r   )rQ   fn_argsr\   rD   kwargsr   c           	        s    |  d } |  d }| di }t||}d|v rVt|d trVt|d |d< i ||}|d urd|v r|d | n||d< |d urd|v r|d | n||d< tj||fi |S )Nrd   r&   r'   rb   r\   rD   )	poprC   r:   r%   rO   dumpsupdaterequestsrequest)	rQ   re   r\   rD   rf   rd   r&   r'   _kwargsZ_name_to_call_mapr!   r"   default_call_api   s     
z3openapi_spec_to_openai_fn.<locals>.default_call_api)NN)Zlangchain_community.toolsrV   ImportErrorpathsZget_parameters_for_pathZget_methods_for_pathZget_operationcopyZget_parameters_for_operationrQ   rY   r   r;   r<   rR   rU   Zget_request_body_for_operationrL   rN   rK   rO   rP   lenrM   Zfrom_openapi_specZoperation_idr   base_urlra   )rE   rV   	functionsra   r'   rd   Zrequest_argsopZ	op_paramsr?   Zparams_by_typeZname_locrS   Zparam_loc_to_arg_nameZ	param_locZarg_namerequest_bodyZmedia_types
media_typeZmedia_type_objectrT   Zschema_dictkeyZapi_opfnrn   r!   rm   r"   openapi_spec_to_openai_fnZ   s~    






  rz   c                   @  sj   e Zd ZU dZded< dZded< dZded< ed	d
ddZed	d
ddZ	dddddddZ
dS )SimpleRequestChainz5Chain for making a simple request to an API endpoint.r   request_methodresponser$   
output_keyfunction	input_keyz	List[str])r   c                 C  s   | j gS rW   )r   selfr!   r!   r"   
input_keys   s    zSimpleRequestChain.input_keysc                 C  s   | j gS rW   )r~   r   r!   r!   r"   output_keys   s    zSimpleRequestChain.output_keysNzDict[str, Any]z$Optional[CallbackManagerForChainRun])inputsrun_managerr   c                 C  s   |p
t  }|| j d}|| j d}t|d}ttj|ddd}d| d| }|| | ||}	|	j	dkr|	j	 d	|	j
 d
| d d|dd  }
n&z|	 }
W n ty   |	j}
Y n0 | j|
iS )z2Run the logic of this chain and return the output.rQ   	argumentsgreen   )indentzCalling endpoint z with arguments:
   z: z
For  zCalled with args: rD   r5   )r   Zget_noop_managerr   rg   r   rO   rh   Zon_textr|   status_codereasonget	Exceptiontextr~   )r   r   r   Z_run_managerrQ   argsZ_pretty_nameZ_pretty_args_textZapi_responser}   r!   r!   r"   _call   s(    



zSimpleRequestChain._call)N)__name__
__module____qualname____doc____annotations__r~   r   propertyr   r   r   r!   r!   r!   r"   r{      s   
 r{   z0.2.13zThis function is deprecated and will be removed in langchain 1.0. See API reference for replacement: https://api.python.langchain.com/en/latest/chains/langchain.chains.openai_functions.openapi.get_openapi_chain.htmlz1.0)ZsincemessageZremovalFzUnion[OpenAPISpec, str]zOptional[BaseLanguageModel]zOptional[BasePromptTemplate]zOptional[Chain]zOptional[Dict]r   )
rE   llmpromptrequest_chainllm_chain_kwargsverboser\   rD   rf   r   c                   sT  zddl m}	 W n. ty> }
 ztd|
W Y d}
~
n
d}
~
0 0 t| tr|	j|	j|	jfD ]P}z|| } W  qW qZ ty }
 z|
W Y d}
~
qZd}
~
0  ty   Y qZ0 qZt| trt	d|  t
| \} |st	d|ptd}tf ||d|itd	d
d|d|pi }|p2t fdd|d}tf ||g|jdg|d|S )a  Create a chain for querying an API from a OpenAPI spec.

    Note: this class is deprecated. See below for a replacement implementation.
        The benefits of this implementation are:

        - Uses LLM tool calling features to encourage properly-formatted API requests;
        - Includes async support.

        .. code-block:: python

            from typing import Any

            from langchain.chains.openai_functions.openapi import openapi_spec_to_openai_fn
            from langchain_community.utilities.openapi import OpenAPISpec
            from langchain_core.prompts import ChatPromptTemplate
            from langchain_openai import ChatOpenAI

            # Define API spec. Can be JSON or YAML
            api_spec = """
            {
            "openapi": "3.1.0",
            "info": {
                "title": "JSONPlaceholder API",
                "version": "1.0.0"
            },
            "servers": [
                {
                "url": "https://jsonplaceholder.typicode.com"
                }
            ],
            "paths": {
                "/posts": {
                "get": {
                    "summary": "Get posts",
                    "parameters": [
                    {
                        "name": "_limit",
                        "in": "query",
                        "required": false,
                        "schema": {
                        "type": "integer",
                        "example": 2
                        },
                        "description": "Limit the number of results"
                    }
                    ]
                }
                }
            }
            }
            """

            parsed_spec = OpenAPISpec.from_text(api_spec)
            openai_fns, call_api_fn = openapi_spec_to_openai_fn(parsed_spec)
            tools = [
                {"type": "function", "function": fn}
                for fn in openai_fns
            ]

            prompt = ChatPromptTemplate.from_template(
                "Use the provided APIs to respond to this user query:\n\n{query}"
            )
            llm = ChatOpenAI(model="gpt-4o-mini", temperature=0).bind_tools(tools)

            def _execute_tool(message) -> Any:
                if tool_calls := message.tool_calls:
                    tool_call = message.tool_calls[0]
                    response = call_api_fn(name=tool_call["name"], fn_args=tool_call["args"])
                    response.raise_for_status()
                    return response.json()
                else:
                    return message.content

            chain = prompt | llm | _execute_tool

        .. code-block:: python

            response = chain.invoke({"query": "Get me top two posts."})

    Args:
        spec: OpenAPISpec or url/file/text string corresponding to one.
        llm: language model, should be an OpenAI function-calling model, e.g.
            `ChatOpenAI(model="gpt-3.5-turbo-0613")`.
        prompt: Main prompt template to use.
        request_chain: Chain for taking the functions output and executing the request.
    r   r   zqCould not import langchain_community.utilities.openapi. Please install it with `pip install langchain-community`.Nz!Unable to parse spec from source zkMust provide an LLM for this chain.For example,
from langchain_openai import ChatOpenAI
llm = ChatOpenAI()
z>Use the provided API's to respond to this user query:

{query}rt   F)Z	args_onlyr   )r   r   Z
llm_kwargsZoutput_parserr~   r   c                   s    | |dS )N)r\   rD   r!   )rQ   r   Zcall_api_fnr\   rD   r!   r"   rX     s   z#get_openapi_chain.<locals>.<lambda>)r|   r   r}   )chainsZinput_variablesZoutput_variablesr   )%langchain_community.utilities.openapir   ro   r:   r$   Zfrom_url	from_fileZ	from_textr   
ValueErrorrz   r   Zfrom_templater   r   r{   r   r   )rE   r   r   r   r   r   r\   rD   rf   r   e
conversionZ
openai_fnsZ	llm_chainr!   r   r"   get_openapi_chain   sh    j


	r   )NNNNFNN).
__future__r   rO   r6   collectionsr   typingr   r   r   r   r   r	   r
   r   rj   Zlangchain_core._apir   Zlangchain_core.callbacksr   Zlangchain_core.language_modelsr   Z.langchain_core.output_parsers.openai_functionsr   Zlangchain_core.promptsr   r   Zlangchain_core.utils.inputr   r   Zlangchain.chains.baser   Zlangchain.chains.llmr   Zlangchain.chains.sequentialr   r   r   Zopenapi_pydanticr   r#   rC   rU   rz   r{   r   r!   r!   r!   r"   <module>   sH   ('n.       