a
    bg                     @   sN   d dl Z d dlmZmZ d dlmZ d dlmZ e e	Z
G dd deZdS )    N)AnyOptional)LLM)IpexLLMc                   @   s   e Zd ZdZedddddeee ee eee e	e
dddZedddeee ee e	e
d	d
dZeedddZdS )BigdlLLMzWrapper around the BigdlLLM model

    Example:
        .. code-block:: python

            from langchain_community.llms import BigdlLLM
            llm = BigdlLLM.from_model_id(model_id="THUDM/chatglm-6b")
    NT)tokenizer_idload_in_4bitload_in_low_bit)model_idmodel_kwargsr   r   r	   kwargsreturnc                K   s,  t d z$ddlm}m} ddlm}	m}
 W n tyH   tdY n0 |dur\t d |sht	d|pni }|pv|}z|	j
|fi |}W n$ ty   |
j
|fi |}Y n0 z|j
|fd	d
i|}W n( ty   |j
|fd	d
i|}Y n0 d|v rdd | D }| f ||||d|S )a5  
        Construct object from model_id

        Args:
            model_id: Path for the huggingface repo id to be downloaded or
                      the huggingface checkpoint folder.
            tokenizer_id: Path for the huggingface repo id to be downloaded or
                      the huggingface checkpoint folder which contains the tokenizer.
            model_kwargs: Keyword arguments to pass to the model and tokenizer.
            kwargs: Extra arguments to pass to the model and tokenizer.

        Returns:
            An object of BigdlLLM.
        4BigdlLLM was deprecated. Please use IpexLLM instead.r   	AutoModelAutoModelForCausalLMAutoTokenizerLlamaTokenizerpCould not import bigdl-llm or transformers. Please install it with `pip install --pre --upgrade bigdl-llm[all]`.Nz`load_in_low_bit` option is not supported in BigdlLLM and 
                is ignored. For more data types support with `load_in_low_bit`, 
                use IpexLLM instead.zBigdlLLM only supports loading in 4-bit mode, i.e. load_in_4bit = True. Please install it with `pip install --pre --upgrade bigdl-llm[all]`.r   Ttrust_remote_codec                 S   s   i | ]\}}|d kr||qS r    .0kvr   r   p/var/www/html/cobodadashboardai.evdpl.com/venv/lib/python3.9/site-packages/langchain_community/llms/bigdl_llm.py
<dictcomp>]   s   z*BigdlLLM.from_model_id.<locals>.<dictcomp>r
   model	tokenizerr   )loggerwarningbigdl.llm.transformersr   r   transformersr   r   ImportError
ValueErrorfrom_pretrained	Exceptionitems)clsr
   r   r   r   r	   r   r   r   r   r   _model_kwargs_tokenizer_idr!   r    r   r   r   from_model_id   sd    



zBigdlLLM.from_model_id)r   )r
   r   r   r   r   c                K   s  t d z$ddlm}m} ddlm}m} W n tyH   tdY n0 |pPi }	|pX|}
z|j	|
fi |	}W n$ t
y   |j	|
fi |	}Y n0 z|j|fi |	}W n$ t
y   |j|fi |	}Y n0 d|	v rdd |	 D }	| f ||||	d	|S )
a  
        Construct low_bit object from model_id

        Args:

            model_id: Path for the bigdl-llm transformers low-bit model folder.
            tokenizer_id: Path for the huggingface repo id or local model folder
                      which contains the tokenizer.
            model_kwargs: Keyword arguments to pass to the model and tokenizer.
            kwargs: Extra arguments to pass to the model and tokenizer.

        Returns:
            An object of BigdlLLM.
        r   r   r   r   r   r   c                 S   s   i | ]\}}|d kr||qS r   r   r   r   r   r   r      s   z2BigdlLLM.from_model_id_low_bit.<locals>.<dictcomp>r   )r"   r#   r$   r   r   r%   r   r   r&   r(   r)   Zload_low_bitr*   )r+   r
   r   r   r   r   r   r   r   r,   r-   r!   r    r   r   r   from_model_id_low_biti   s<    

zBigdlLLM.from_model_id_low_bit)r   c                 C   s   dS )Nz	bigdl-llmr   )selfr   r   r   	_llm_type   s    zBigdlLLM._llm_type)N)N)__name__
__module____qualname____doc__classmethodstrr   dictboolr   r   r.   r/   propertyr1   r   r   r   r   r      s:   	 S @r   )loggingtypingr   r   Z#langchain_core.language_models.llmsr   Z!langchain_community.llms.ipex_llmr   	getLoggerr2   r"   r   r   r   r   r   <module>   s
   
