a
    bg76                     @   s   d dl Z d dlZd dlmZ d dlmZ d dlmZmZmZm	Z	m
Z
mZ d dlmZ d dlmZmZ d dlmZ d dlmZmZ d d	lmZmZ G d
d deZeeef e
eef dddZG dd deZdS )    N)datetime)Enum)AnyDictListOptionalTupleUnion)UUID)AgentActionAgentFinish)BaseCallbackHandler)BaseMessageChatMessage)
Generation	LLMResultc                   @   s   e Zd ZdZdZdZdS )LabelStudioModezLabel Studio mode enumerator.promptZchatN)__name__
__module____qualname____doc__PROMPTCHAT r   r   /var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/callbacks/labelstudio_callback.pyr      s   r   )modereturnc                 C   s4   t jjdt jjdi}t| tr&t | } || j | fS )zGet default Label Studio configs for the given mode.

    Parameters:
        mode: Label Studio mode ("prompt" or "chat")

    Returns: Tuple of Label Studio config and mode
    a	  
<View>
<Style>
    .prompt-box {
        background-color: white;
        border-radius: 10px;
        box-shadow: 0px 4px 6px rgba(0, 0, 0, 0.1);
        padding: 20px;
    }
</Style>
<View className="root">
    <View className="prompt-box">
        <Text name="prompt" value="$prompt"/>
    </View>
    <TextArea name="response" toName="prompt"
              maxSubmissions="1" editable="true"
              required="true"/>
</View>
<Header value="Rate the response:"/>
<Rating name="rating" toName="prompt"/>
</View>a  
<View>
<View className="root">
     <Paragraphs name="dialogue"
               value="$prompt"
               layout="dialogue"
               textKey="content"
               nameKey="role"
               granularity="sentence"/>
  <Header value="Final response:"/>
    <TextArea name="response" toName="dialogue"
              maxSubmissions="1" editable="true"
              required="true"/>
</View>
<Header value="Rate the response:"/>
<Rating name="rating" toName="dialogue"/>
</View>)r   r   valuer   
isinstancestr)r   Z_default_label_configsr   r   r   get_default_label_configs   s    )
r!   c                
       s  e Zd ZU dZdZeed< dddedejfe	e e	e e	e
 ee	e eeef d fddZeeee  ddd	d
Zeeef ee eddddZeedddZddddeeef eee  ee	e e	ee  e	eeef  eedddZeeddddZeeddddZeeddddZeeef eeef edddd Zeeef edd!d"d#Zeeddd$d%Zeeef eedd&d'd(Ze eed)d*d+Z!d7ee	e e	e edd,d-d.Z"eeddd/d0Z#eedd1d2d3Z$e%edd4d5d6Z&  Z'S )8LabelStudioCallbackHandlera  Label Studio callback handler.
    Provides the ability to send predictions to Label Studio
    for human evaluation, feedback and annotation.

    Parameters:
        api_key: Label Studio API key
        url: Label Studio URL
        project_id: Label Studio project ID
        project_name: Label Studio project name
        project_config: Label Studio project config (XML)
        mode: Label Studio mode ("prompt" or "chat")

    Examples:
        >>> from langchain_community.llms import OpenAI
        >>> from langchain_community.callbacks import LabelStudioCallbackHandler
        >>> handler = LabelStudioCallbackHandler(
        ...             api_key='<your_key_here>',
        ...             url='http://localhost:8080',
        ...             project_name='LangChain-%Y-%m-%d',
        ...             mode='prompt'
        ... )
        >>> llm = OpenAI(callbacks=[handler])
        >>> llm.invoke('Tell me a story about a dog.')
    zLangChain-%Y-%m-%dDEFAULT_PROJECT_NAMEN)api_keyurl
project_idproject_nameproject_configr   c                    sT  t    zdd l}W n& ty<   td| jj dY n0 |sztdr\ttd}nt	d| jj d| jj d|| _
|stdrtd}n$td|j d	| jj d
 |j}|| _i | _|j| j| j
d| _|| _|r|| _d | _nt|\| _| _|ptd| _| jd ur:| jt| j| _nXt | j}| jj|d}	|	rt|	d | _| jj| _n| jj|| jd| _| jj| _| jj | _ d\| _!| _"| _#| _$| j % D ]V\}
}|d dkr|
| _!|d d | _"|d d d | _#|d d d | _$ qq| j!sPd| j d}| jt&j'kr@|d7 }n|d7 }t	|d S )Nr   zYou're using z in your code, but you don't have the LabelStudio SDK Python package installed or upgraded to the latest version. Please run `pip install -U label-studio-sdk` before using this callback.ZLABEL_STUDIO_API_KEYz in your code, Label Studio API key is not provided. Please provide Label Studio API key: go to the Label Studio instance, navigate to Account & Settings -> Access Token and copy the key. Use the key as a parameter for the callback: zr(label_studio_api_key='<your_key_here>', ...) or set the environment variable LABEL_STUDIO_API_KEY=<your_key_here>ZLABEL_STUDIO_URLz5Label Studio URL is not provided, using default URL: z8If you want to provide your own URL, use the parameter: zj(label_studio_url='<your_url_here>', ...) or set the environment variable LABEL_STUDIO_URL=<your_url_here>)r%   r$   ZLABEL_STUDIO_PROJECT_ID)title)r)   Zlabel_config)NNNNtypeZTextAreato_nameinputsr   zLabel Studio project "zI" does not have a TextArea tag. Please add a TextArea tag to the project.z
HINT: go to project Settings -> Labeling Interface -> Browse Templates and select "Generative AI -> Supervised Language Model Fine-tuning" template.z
HINT: go to project Settings -> Labeling Interface -> Browse Templates and check available templates under "Generative AI" section.)(super__init__Zlabel_studio_sdkImportError	__class__r   osgetenvr    
ValueErrorr$   warningswarnZLABEL_STUDIO_DEFAULT_URLr%   payloadZClientZ	ls_clientr'   r(   r   r!   r&   Zget_projectint
ls_projectr   todaystrftimeZget_projectsidZcreate_projectZparsed_label_config	from_namer+   r   
input_typeitemsr   r   )selfr$   r%   r&   r'   r(   r   ZlsZproject_titleZexisting_projectsZtag_nameZtag_infoerror_messager0   r   r   r.   j   s    	

	





z#LabelStudioCallbackHandler.__init__)run_idgenerationsr   c              
   C   s   g }| j | d }| j | d di d}t||D ]H\}}|| j|d|i| j| jdddd	 |D id
g|dgd q8| j| d S )NpromptskwargsZinvocation_paramsZ
model_namerB   Ztextareatextc                 S   s   g | ]
}|j qS r   )rF   ).0gr   r   r   
<listcomp>       zFLabelStudioCallbackHandler.add_prompts_generations.<locals>.<listcomp>)r<   r+   r*   r   )resultmodel_version)dataZpredictions)	r6   getzipappendr   r<   r+   r8   Zimport_tasks)r?   rB   rC   tasksrD   rL   r   Z
generationr   r   r   add_prompts_generations   s2    z2LabelStudioCallbackHandler.add_prompts_generations)
serializedrD   rE   r   c                 K   sD   | j dkr$td| j d| j  dt|d }||d| j|< dS ).Save the prompts in memory when an LLM starts.Text
Label Studio project "" has an input type <zw>. To make it work with the mode="chat", the input type should be <Text>.
Read more here https://labelstud.io/tags/textrB   )rD   rE   N)r=   r3   r'   r    r6   )r?   rS   rD   rE   rB   r   r   r   on_llm_start  s    

z'LabelStudioCallbackHandler.on_llm_start)messager   c                 C   s   t |tr|jS |jjS dS )zGet the role of the message.N)r   r   roler0   r   )r?   rY   r   r   r   _get_message_role  s    
z,LabelStudioCallbackHandler._get_message_role)parent_run_idtagsmetadata)rS   messagesrB   r\   r]   r^   rE   r   c                K   s   | j dkr$td| j d| j  dg }|D ]4}	g }
|	D ]}|
| ||jd q8||
 q,||||||d| jt|< dS )rT   Z
ParagraphsrV   rW   z>. To make it work with the mode="chat", the input type should be <Paragraphs>.
Read more here https://labelstud.io/tags/paragraphs)rZ   content)rD   r]   r^   rB   r\   rE   N)r=   r3   r'   rP   r[   r`   r6   r    )r?   rS   r_   rB   r\   r]   r^   rE   rD   Zmessage_listZdialogrY   r   r   r   on_chat_model_start  s.    

z.LabelStudioCallbackHandler.on_chat_model_start)tokenrE   r   c                 K   s   dS )z)Do nothing when a new token is generated.Nr   )r?   rb   rE   r   r   r   on_llm_new_tokenG  s    z+LabelStudioCallbackHandler.on_llm_new_token)responserE   r   c                 K   s*   t |d }| ||j | j| dS )z>Create a new Label Studio task for each prompt and generation.rB   N)r    rR   rC   r6   pop)r?   rd   rE   rB   r   r   r   
on_llm_endK  s    z%LabelStudioCallbackHandler.on_llm_end)errorrE   r   c                 K   s   dS )z%Do nothing when LLM outputs an error.Nr   r?   rg   rE   r   r   r   on_llm_errorU  s    z'LabelStudioCallbackHandler.on_llm_error)rS   r,   rE   r   c                 K   s   d S Nr   )r?   rS   r,   rE   r   r   r   on_chain_startY  s    z)LabelStudioCallbackHandler.on_chain_start)outputsrE   r   c                 K   s   d S rj   r   )r?   rl   rE   r   r   r   on_chain_end^  s    z'LabelStudioCallbackHandler.on_chain_endc                 K   s   dS )z+Do nothing when LLM chain outputs an error.Nr   rh   r   r   r   on_chain_errora  s    z)LabelStudioCallbackHandler.on_chain_error)rS   	input_strrE   r   c                 K   s   dS )zDo nothing when tool starts.Nr   )r?   rS   ro   rE   r   r   r   on_tool_starte  s    z(LabelStudioCallbackHandler.on_tool_start)actionrE   r   c                 K   s   dS )z.Do nothing when agent takes a specific action.Nr   )r?   rq   rE   r   r   r   on_agent_actionn  s    z*LabelStudioCallbackHandler.on_agent_action)outputobservation_prefix
llm_prefixrE   r   c                 K   s   dS )zDo nothing when tool ends.Nr   )r?   rs   rt   ru   rE   r   r   r   on_tool_endr  s    z&LabelStudioCallbackHandler.on_tool_endc                 K   s   dS )z&Do nothing when tool outputs an error.Nr   rh   r   r   r   on_tool_error|  s    z(LabelStudioCallbackHandler.on_tool_error)rF   rE   r   c                 K   s   dS z
Do nothingNr   )r?   rF   rE   r   r   r   on_text  s    z"LabelStudioCallbackHandler.on_text)finishrE   r   c                 K   s   dS rx   r   )r?   rz   rE   r   r   r   on_agent_finish  s    z*LabelStudioCallbackHandler.on_agent_finish)NN)(r   r   r   r   r#   r    __annotations__r   r   r   r7   r	   r.   r   r   rR   r   r   rX   r   r[   r
   ra   rc   r   rf   BaseExceptionri   rk   rm   rn   rp   r   rr   rv   rw   ry   r   r{   __classcell__r   r   rA   r   r"   N   s   

y%



)

	  
r"   )r1   r4   r   enumr   typingr   r   r   r   r   r	   uuidr
   Zlangchain_core.agentsr   r   Zlangchain_core.callbacksr   Zlangchain_core.messagesr   r   Zlangchain_core.outputsr   r   r   r    r!   r"   r   r   r   r   <module>   s    

9