
    Ng                        d Z ddlmZ ddlZddlZddlZddlmZmZm	Z	m
Z
mZmZmZmZmZ ddlmZ ddlmZ ddlmZmZ ddlmZmZ dd	lmZ dd
lmZmZ erddlZ ej         e!          Z"dZ#dZ$ G d de          Z%dS )z>EverlyAI Endpoints chat wrapper. Relies heavily on ChatOpenAI.    )annotationsN)	TYPE_CHECKINGAnyCallableDictOptionalSequenceSetTypeUnion)BaseMessage)BaseTool)convert_to_secret_strget_from_dict_or_env)Fieldmodel_validatorconvert_message_to_dict)
ChatOpenAI_import_tiktokenzhttps://everlyai.xyz/hostedmeta-llama/Llama-2-7b-chat-hfc                  $    e Zd ZU dZed"d            Zed#d            Zed$d            Zd	Z	d
e
d<   	  eed          Zde
d<   	 eZde
d<   	 d	Zde
d<   	 ed%d            Z ed          ed&d                        Zd'dZ	 d(d) fd!Z xZS )*ChatEverlyAIaQ  `EverlyAI` Chat large language models.

    To use, you should have the ``openai`` python package installed, and the
    environment variable ``EVERLYAI_API_KEY`` set with your API key.
    Alternatively, you can use the everlyai_api_key keyword argument.

    Any parameters that are valid to be passed to the `openai.create` call can be passed
    in, even if not explicitly saved on this class.

    Example:
        .. code-block:: python

            from langchain_community.chat_models import ChatEverlyAI
            chat = ChatEverlyAI(model_name="meta-llama/Llama-2-7b-chat-hf")
    returnstrc                    dS )zReturn type of chat model.zeverlyai-chat selfs    d/var/www/html/ai-engine/env/lib/python3.11/site-packages/langchain_community/chat_models/everlyai.py	_llm_typezChatEverlyAI._llm_type:   s	         Dict[str, str]c                
    ddiS )Neverlyai_api_keyEVERLYAI_API_KEYr   r   s    r    
lc_secretszChatEverlyAI.lc_secrets?   s    "$677r"   boolc                    dS )NFr   )clss    r    is_lc_serializablezChatEverlyAI.is_lc_serializableC   s    ur"   NzOptional[str]r%   model)defaultalias
model_nameeverlyai_api_basezOptional[Set[str]]available_modelsSet[str]c                 $    t          ddg          S )z'Get available models from EverlyAI API.r   z(meta-llama/Llama-2-13b-chat-hf-quantized)setr   r"   r    get_available_modelsz!ChatEverlyAI.get_available_modelsP   s!     /:
 
 	
r"   before)modevaluesdictr   c                   t          t          |dd                    |d<   t          |d<   	 ddl}n"# t          $ r}t	          d          |d}~ww xY w	 |j        |d<   n"# t          $ r}t          d	          |d}~ww xY wd
|                                vr
t          |d
<   |d
         }| 
                                }||vrt          d| d| d          ||d<   |S )z?Validate that api key and python package exists in environment.r%   r&   openai_api_keyopenai_api_baser   NzTCould not import openai python package. Please install it with `pip install openai`.clientz`openai` has no `ChatCompletion` attribute, this is likely due to an old version of the openai package. Try upgrading it with `pip install --upgrade openai`.r/   zModel name z  not found in available models: .r1   )r   r   DEFAULT_API_BASEopenaiImportErrorChatCompletionAttributeError
ValueErrorkeysDEFAULT_MODELr5   )r*   r8   r@   eexcr/   r1   s          r    validate_environment_overridez*ChatEverlyAI.validate_environment_override[   se    $9 "" $
 $
  %5 !	MMMM 	 	 	?  	
	%4F8 	 	 	7  		 v{{}},,#0F< L)
3355---'j ' '#' ' '  
 &6!"s,   2 
AAA
A   
A?*A::A?tuple[str, tiktoken.Encoding]c                   t                      }| j        | j        }n| j        }	 |                    d          }nA# t          $ r4 t
                              d           d}|                    |          }Y nw xY w||fS )Nzgpt-3.5-turbo-0301z5Warning: model not found. Using cl100k_base encoding.cl100k_base)r   tiktoken_model_namer/   encoding_for_modelKeyErrorloggerwarningget_encoding)r   	tiktoken_r,   encodings       r    _get_encoding_modelz ChatEverlyAI._get_encoding_model   s    $&&	#/,EEOE	5 334HIIHH 	5 	5 	5NNRSSS!E --e44HHH	5 hs   < ;A:9A:messageslist[BaseMessage]toolsCOptional[Sequence[Union[Dict[str, Any], Type, Callable, BaseTool]]]intc           
        |t          j        d           t          j        d         dk    r!t	                                          |          S |                                 \  }}d}d}d}d |D             }|D ]^}	||z  }|	                                D ]B\  }
}|t          |	                    t          |                              z  }|
dk    r||z  }C_|dz  }|S )	zCalculate num tokens with tiktoken package.

        Official documentation: https://github.com/openai/openai-cookbook/blob/
        main/examples/How_to_format_inputs_to_ChatGPT_models.ipynbNzECounting tokens in tool schemas is not yet supported. Ignoring tools.         r   c                ,    g | ]}t          |          S r   r   ).0ms     r    
<listcomp>z=ChatEverlyAI.get_num_tokens_from_messages.<locals>.<listcomp>   s!    FFF033FFFr"   name)warningswarnsysversion_infosuperget_num_tokens_from_messagesrU   itemslenencoder   )r   rV   rX   r,   rT   tokens_per_messagetokens_per_name
num_tokensmessages_dictmessagekeyvalue	__class__s               r    ri   z)ChatEverlyAI.get_num_tokens_from_messages   s    MW   A!##7777AAA2244x
FFXFFF$ 	2 	2G,,J%mmoo 2 2
U c(//#e**"="=>>>
&==/1J2 	a
r"   )r   r   )r   r#   )r   r(   )r   r2   )r8   r9   r   r   )r   rJ   )N)rV   rW   rX   rY   r   rZ   )__name__
__module____qualname____doc__propertyr!   r'   classmethodr+   r%   __annotations__r   rF   r/   r?   r0   r1   staticmethodr5   r   rI   rU   ri   __classcell__)rt   s   @r    r   r   )   s              X 8 8 8 X8    [ '+****&eMAAAJAAAA-----)+/////-
 
 
 \
 _(###+ + + [ $#+Z   (                      r"   r   )&rx   
__future__r   loggingrf   rd   typingr   r   r   r   r   r	   r
   r   r   langchain_core.messagesr   langchain_core.toolsr   langchain_core.utilsr   r   pydanticr   r   #langchain_community.adapters.openair   &langchain_community.chat_models.openair   r   tiktoken	getLoggerru   rP   r?   rF   r   r   r"   r    <module>r      s   D D " " " " " "  



 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 0 / / / / / ) ) ) ) ) ) L L L L L L L L + + + + + + + + G G G G G G       
  OOO		8	$	$ 1 /P P P P P: P P P P Pr"   