a
    bg                     @   sV   d Z ddlmZ ddlmZmZmZ ddlmZ ddl	m
Z
mZ G dd deeZdS )	z$Wrapper around Moonshot chat models.    )Dict)convert_to_secret_strget_from_dict_or_envpre_init)
ChatOpenAI)MOONSHOT_SERVICE_URL_BASEMoonshotCommonc                   @   s$   e Zd ZdZeeedddZdS )MoonshotChatu  Moonshot chat model integration.

    Setup:
        Install ``openai`` and set environment variables ``MOONSHOT_API_KEY``.

        .. code-block:: bash

            pip install openai
            export MOONSHOT_API_KEY="your-api-key"

    Key init args — completion params:
        model: str
            Name of Moonshot model to use.
        temperature: float
            Sampling temperature.
        max_tokens: Optional[int]
            Max number of tokens to generate.

    Key init args — client params:
        api_key: Optional[str]
            Moonshot API KEY. If not passed in will be read from env var MOONSHOT_API_KEY.
        api_base: Optional[str]
            Base URL for API requests.

    See full list of supported init args and their descriptions in the params section.

    Instantiate:
        .. code-block:: python

            from langchain_community.chat_models import MoonshotChat

            chat = MoonshotChat(
                temperature=0.5,
                api_key="your-api-key",
                model="moonshot-v1-8k",
                # api_base="...",
                # other params...
            )

    Invoke:
        .. code-block:: python

            messages = [
                ("system", "你是一名专业的翻译家，可以将用户的中文翻译为英文。"),
                ("human", "我喜欢编程。"),
            ]
            chat.invoke(messages)

        .. code-block:: python

            AIMessage(
                content='I like programming.',
                additional_kwargs={},
                response_metadata={
                    'token_usage': {
                        'completion_tokens': 5,
                        'prompt_tokens': 27,
                        'total_tokens': 32
                    },
                    'model_name': 'moonshot-v1-8k',
                    'system_fingerprint': None,
                    'finish_reason': 'stop',
                    'logprobs': None
                },
                id='run-71c03f4e-6628-41d5-beb6-d2559ae68266-0'
            )

    Stream:
        .. code-block:: python

            for chunk in chat.stream(messages):
                print(chunk)

        .. code-block:: python

            content='' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
            content='I' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
            content=' like' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
            content=' programming' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
            content='.' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
            content='' additional_kwargs={} response_metadata={'finish_reason': 'stop'} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'

        .. code-block:: python

            stream = chat.stream(messages)
            full = next(stream)
            for chunk in stream:
                full += chunk
            full

        .. code-block:: python

            AIMessageChunk(
                content='I like programming.',
                additional_kwargs={},
                response_metadata={'finish_reason': 'stop'},
                id='run-10c80976-7aa5-4ff7-ba3e-1251665557ef'
            )

    Async:
        .. code-block:: python

            await chat.ainvoke(messages)

            # stream:
            # async for chunk in chat.astream(messages):
            #    print(chunk)

            # batch:
            # await chat.abatch([messages])

        .. code-block:: python

            [AIMessage(content='I like programming.', additional_kwargs={}, response_metadata={'token_usage': {'completion_tokens': 5, 'prompt_tokens': 27, 'total_tokens': 32}, 'model_name': 'moonshot-v1-8k', 'system_fingerprint': None, 'finish_reason': 'stop', 'logprobs': None}, id='run-2938b005-9204-4b9f-b273-1c3272fce9e5-0')]

    Response metadata
        .. code-block:: python

            ai_msg = chat.invoke(messages)
            ai_msg.response_metadata

        .. code-block:: python

            {
                'token_usage': {
                    'completion_tokens': 5,
                    'prompt_tokens': 27,
                    'total_tokens': 32
                },
                'model_name': 'moonshot-v1-8k',
                'system_fingerprint': None,
                'finish_reason': 'stop',
                'logprobs': None
            }

    )valuesreturnc                 C   s   t t|g dd|d< zddl}W n ty>   tdY n0 |d  d|v rZ|d ntd}|d	s|jf i |jj	|d	< |d
s|j
f i |jj	|d
< |S )z2Validate that the environment is set up correctly.)moonshot_api_keyapi_keyZopenai_api_keyZMOONSHOT_API_KEYr   r   NzTCould not import openai python package. Please install it with `pip install openai`.base_url)r   r   clientZasync_client)r   r   openaiImportErrorZget_secret_valuer   getZOpenAIZchatZcompletionsZAsyncOpenAI)clsr
   r   Zclient_params r   v/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/chat_models/moonshot.pyvalidate_environment   s2    




z!MoonshotChat.validate_environmentN)__name__
__module____qualname____doc__r   r   r   r   r   r   r   r	      s    
r	   N)r   typingr   Zlangchain_core.utilsr   r   r   Zlangchain_community.chat_modelsr   Z!langchain_community.llms.moonshotr   r   r	   r   r   r   r   <module>   s
   