a
    ag#                     @  s   d Z ddlmZ ddlmZmZmZmZ ddlZ	ddl
mZ ddlmZ ddlmZ ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ G dd deeZdS )zDHypothetical Document Embeddings.

https://arxiv.org/abs/2212.10496
    )annotations)AnyDictListOptionalN)CallbackManagerForChainRun)
Embeddings)BaseLanguageModel)StrOutputParser)BasePromptTemplate)Runnable)
ConfigDict)Chain)
PROMPT_MAP)LLMChainc                	   @  s   e Zd ZU dZded< ded< edddZed	d
ddZed	d
ddZ	d	ddddZ
dddddZdddddZd+ddddd d!Zed,d"dd#d$d%d d&d'd(Zedd
d)d*ZdS )-HypotheticalDocumentEmbedderzrGenerate hypothetical document for query, and then embed that.

    Based on https://arxiv.org/abs/2212.10496
    r   base_embeddingsr   	llm_chainTZforbid)Zarbitrary_types_allowedextraz	List[str])returnc                 C  s   | j j d S )z Input keys for Hyde's LLM chain.required)r   Zinput_schemaZmodel_json_schemaself r   h/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain/chains/hyde/base.py
input_keys&   s    z'HypotheticalDocumentEmbedder.input_keysc                 C  s   t | jtr| jjS dgS dS )z!Output keys for Hyde's LLM chain.textN)
isinstancer   r   output_keysr   r   r   r   r   +   s    z(HypotheticalDocumentEmbedder.output_keyszList[List[float]])textsr   c                 C  s   | j |S )zCall the base embeddings.)r   embed_documents)r   r   r   r   r   r    3   s    z,HypotheticalDocumentEmbedder.embed_documentszList[float])
embeddingsr   c                 C  s   t t|jddS )z)Combine embeddings into final embeddings.r   )Zaxis)listnparraymean)r   r!   r   r   r   combine_embeddings7   s    z/HypotheticalDocumentEmbedder.combine_embeddingsstr)r   r   c                 C  sR   | j d }| j||i}t| jtr8|| jd  g}n|g}| |}| |S )z1Generate a hypothetical document and embedded it.r   )r   r   invoker   r   r   r    r&   )r   r   var_nameresultZ	documentsr!   r   r   r   embed_query;   s    

z(HypotheticalDocumentEmbedder.embed_queryNzDict[str, Any]z$Optional[CallbackManagerForChainRun]zDict[str, str])inputsrun_managerr   c                 C  s$   |p
t  }| jj|d| idS )zCall the internal llm chain.	callbacks)config)r   Zget_noop_managerr   r(   Z	get_child)r   r,   r-   Z_run_managerr   r   r   _callF   s    z"HypotheticalDocumentEmbedder._callr	   zOptional[str]zOptional[BasePromptTemplate]r   )llmr   
prompt_keycustom_promptkwargsr   c                 K  sb   |dur|}n2|dur(|t v r(t | }ntdtt   d||B t B }| f ||d|S )zILoad and use LLMChain with either a specific prompt key or custom prompt.NzHMust specify prompt_key if custom_prompt not provided. Should be one of .)r   r   )r   
ValueErrorr"   keysr
   )clsr1   r   r2   r3   r4   promptr   r   r   r   from_llmQ   s    


z%HypotheticalDocumentEmbedder.from_llmc                 C  s   dS )NZ
hyde_chainr   r   r   r   r   _chain_typeh   s    z(HypotheticalDocumentEmbedder._chain_type)N)NN)__name__
__module____qualname____doc____annotations__r   Zmodel_configpropertyr   r   r    r&   r+   r0   classmethodr:   r;   r   r   r   r   r      s,   
   r   )r?   
__future__r   typingr   r   r   r   numpyr#   Zlangchain_core.callbacksr   Zlangchain_core.embeddingsr   Zlangchain_core.language_modelsr	   Zlangchain_core.output_parsersr
   Zlangchain_core.promptsr   Zlangchain_core.runnablesr   Zpydanticr   Zlangchain.chains.baser   Zlangchain.chains.hyde.promptsr   Zlangchain.chains.llmr   r   r   r   r   r   <module>   s   