
    NgY1                     ,   d dl Z d dlZd dlmZmZmZmZmZmZm	Z	m
Z
 d dlmZ d dlmZmZ d dlmZ d dlmZ d dlmZ d dlmZ d d	lmZmZmZmZ d d
lmZmZ d dl m!Z!m"Z"m#Z#m$Z$  G d de          Z% eddd           G d dee%                      Z&dS )    N)AnyAsyncIteratorCallableDictIteratorListMappingOptional)
deprecated)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseLanguageModel)LLM)GenerationChunk)PromptValue)check_package_versionget_from_dict_or_envget_pydantic_field_namespre_init)_build_model_kwargsconvert_to_secret_str)
ConfigDictField	SecretStrmodel_validatorc                      e Zd ZU dZeed<   dZeed<    edd          Ze	ed<   	  edd	          Z
eed
<   	 dZee         ed<   	 dZee         ed<   	 dZee         ed<   	 dZeed<   	 dZee         ed<   	 dZeed<   	 dZee	         ed<   dZee         ed<   dZee	         ed<   dZee	         ed<   dZeee	gef                  ed<    ee          Zee	ef         ed<    ed          e dedefd                        Z!e"dedefd            Z#e$de%e	ef         fd             Z&e$de%e	ef         fd!            Z'd$d"ee(e	                  de(e	         fd#Z)dS )%_AnthropicCommonNclientasync_clientzclaude-2
model_name)defaultaliasmodel   
max_tokensmax_tokens_to_sampletemperaturetop_ktop_pF	streamingdefault_request_timeout   max_retriesanthropic_api_urlanthropic_api_keyHUMAN_PROMPT	AI_PROMPTcount_tokens)default_factorymodel_kwargsbefore)modevaluesreturnc                 D    t          |           }t          ||          }|S N)r   r   )clsr7   all_required_field_namess      ^/var/www/html/ai-engine/env/lib/python3.11/site-packages/langchain_community/llms/anthropic.pybuild_extraz_AnthropicCommon.build_extraE   s'     $<C#@#@ $V-EFF    c                 <   t          t          |dd                    |d<   t          |ddd          |d<   	 ddl}t          d	d
           |                    |d         |d                                         |d         |d                   |d<   |                    |d         |d                                         |d         |d                   |d<   |j        |d<   |j        |d<   |d         j	        |d<   n# t          $ r t          d          w xY w|S )z?Validate that api key and python package exists in environment.r/   ANTHROPIC_API_KEYr.   ANTHROPIC_API_URLzhttps://api.anthropic.com)r!   r   N	anthropicz0.3)gte_versionr+   r-   )base_urlapi_keytimeoutr-   r   r   r0   r1   r2   z]Could not import anthropic python package. Please it install it with `pip install anthropic`.)r   r   rC   r   	Anthropicget_secret_valueAsyncAnthropicr0   r1   r2   ImportError)r;   r7   rC   s      r=   validate_environmentz%_AnthropicCommon.validate_environmentL   s    '< )<>QRR'
 '
"# ';/	'
 '
 '
"#	!+5AAAA(22 3423DDFF89"=1	  3    F8 &/%=%= 3423DDFF89"=1	 &> & &F>" &/%;F>""+"5F;%+H%5%BF>"" 	 	 	E  	
 s   CC? ?Dc                     | j         | j        d}| j        
| j        |d<   | j        
| j        |d<   | j        
| j        |d<   i || j        S )z5Get the default parameters for calling Anthropic API.)r&   r#   Nr'   r(   r)   )r&   r#   r'   r(   r)   r4   )selfds     r=   _default_paramsz _AnthropicCommon._default_paramsu   sl     %)$=Z
 
 '#/Am:!AgJ:!AgJ)!)t())r?   c                     i i | j         S )zGet the identifying parameters.)rP   rN   s    r=   _identifying_paramsz$_AnthropicCommon._identifying_params   s     ."-,--r?   stopc                 ~    | j         r| j        st          d          |g }|                    | j         g           |S )N-Please ensure the anthropic package is loaded)r0   r1   	NameErrorextend)rN   rT   s     r=   _get_anthropic_stopz$_AnthropicCommon._get_anthropic_stop   sP      	M 	MKLLL<D 	T&'(((r?   r:   )*__name__
__module____qualname__r   r   __annotations__r   r   r#   strr&   intr'   r
   floatr(   r)   r*   boolr+   r-   r.   r/   r   r0   r1   r2   r   dictr4   r   r   classmethodr>   r   rL   propertyr	   rP   rS   r   rY    r?   r=   r   r   !   s        FCL#z>>>E3>>> %c F F F#FFFA#'K%'''QE8C=@!E8E?!!!DIt(/3Xe_333SKV'+x}+++-1x	*111"&L(3-&&&#Ix}###37L(8SE3J/0777#(5#>#>#>L$sCx.>>>_(### #    [ $#
 &$ &4 & & & X&P *c!2 * * * X* .WS#X%6 . . . X.
 
c(; 
tCy 
 
 
 
 
 
r?   r   z0.0.28z1.0z langchain_anthropic.AnthropicLLM)sinceremovalalternative_importc                      e Zd ZdZ edd          Zededefd            Ze	de
fd            Zde
de
fd	Z	 	 dde
deee
                  dee         dede
f
dZdede
fdZ	 	 dde
deee
                  dee         dede
f
dZ	 	 dde
deee
                  dee         dedee         f
dZ	 	 dde
deee
                  dee         dedee         f
dZde
defdZd
S )rH   a  Anthropic large language models.

    To use, you should have the ``anthropic`` python package installed, and the
    environment variable ``ANTHROPIC_API_KEY`` set with your API key, or pass
    it as a named parameter to the constructor.

    Example:
        .. code-block:: python

            import anthropic
            from langchain_community.llms import Anthropic

            model = Anthropic(model="<model_name>", anthropic_api_key="my-api-key")

            # Simplest invocation, automatically wrapped with HUMAN_PROMPT
            # and AI_PROMPT.
            response = model.invoke("What are the biggest risks facing humanity?")

            # Or if you want to use the chat mode, build a few-shot-prompt, or
            # put words in the Assistant's mouth, use HUMAN_PROMPT and AI_PROMPT:
            raw_prompt = "What are the biggest risks facing humanity?"
            prompt = f"{anthropic.HUMAN_PROMPT} {prompt}{anthropic.AI_PROMPT}"
            response = model.invoke(prompt)
    T)populate_by_namearbitrary_types_allowedr7   r8   c                 .    t          j        d           |S )z,Raise warning that this class is deprecated.zpThis Anthropic LLM is deprecated. Please use `from langchain_community.chat_models import ChatAnthropic` instead)warningswarn)r;   r7   s     r=   raise_warningzAnthropic.raise_warning   s$     		
 	
 	

 r?   c                     dS )zReturn type of llm.zanthropic-llmre   rR   s    r=   	_llm_typezAnthropic._llm_type   s	     r?   promptc                     | j         r| j        st          d          |                    | j                   r|S t	          j        d| j         |          \  }}|dk    r|S | j          d| | j         dS )NrV   z
^\n*Human:    z Sure, here you go:
)r0   r1   rW   
startswithresubn)rN   rr   corrected_promptn_subss       r=   _wrap_promptzAnthropic._wrap_prompt   s      	M 	MKLLLT.// 	M $&7=$:KV#T#T &Q;;## #SSfSdnSSSSr?   NrT   run_managerkwargsc                     | j         r"d} | j        d|||d|D ]}||j        z  }|S |                     |          }i | j        |} | j        j        j        d|                     |          |d|}|j	        S )a  Call out to Anthropic's completion endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                prompt = "What are the biggest risks facing humanity?"
                prompt = f"\n\nHuman: {prompt}\n\nAssistant:"
                response = model.invoke(prompt)

         rr   rT   r|   rr   stop_sequencesre   )
r*   _streamtextrY   rP   r   completionscreater{   
completion	rN   rr   rT   r|   r}   r   chunkparamsresponses	            r=   _callzAnthropic._call   s    0 > 	J% Dk EK  ) ) ej(

''--3D(3F314;*1 
$$V,,
 
 
 

 ""r?   c                 P    |                      |                                          S r:   )r{   	to_string)rN   rr   s     r=   convert_promptzAnthropic.convert_prompt  s"      !1!1!3!3444r?   c                   K   | j         r(d} | j        d|||d|2 3 d{V }||j        z  }6 |S |                     |          }i | j        |} | j        j        j        d|                     |          |d| d{V }|j	        S )z;Call out to Anthropic's completion endpoint asynchronously.r   r   Nr   re   )
r*   _astreamr   rY   rP   r   r   r   r{   r   r   s	            r=   _acallzAnthropic._acall  s      > 	J,t}  Dk   EK    ) ) ) ) ) ) )e ej(

  ''--3D(3F3=*6= 
$$V,,
 
 
 
 
 
 
 
 
 

 ""s   /c              +     K   |                      |          }i | j        |} | j        j        j        d|                     |          |dd|D ]9}t          |j                  }|r|                    |j	        |           |V  :dS )a\  Call Anthropic completion_stream and return the resulting generator.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
        Returns:
            A generator representing the stream of tokens from Anthropic.
        Example:
            .. code-block:: python

                prompt = "Write a poem about a stream."
                prompt = f"\n\nHuman: {prompt}\n\nAssistant:"
                generator = anthropic.stream(prompt)
                for token in generator:
                    yield token
        Trr   r   streamr   r   Nre   )
rY   rP   r   r   r   r{   r   r   on_llm_new_tokenr   rN   rr   rT   r|   r}   r   tokenr   s           r=   r   zAnthropic._stream  s      . ''--3D(3F33T[,3 
$$V,,T$
 
RX
 
 	 	E $)9:::E F,,UZu,EEEKKKK	 	r?   c                @  K   |                      |          }i | j        |} | j        j        j        d|                     |          |dd| d{V 2 3 d{V }t          |j                  }|r"|                    |j	        |           d{V  |W V  F6 dS )a[  Call Anthropic completion_stream and return the resulting generator.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
        Returns:
            A generator representing the stream of tokens from Anthropic.
        Example:
            .. code-block:: python
                prompt = "Write a poem about a stream."
                prompt = f"\n\nHuman: {prompt}\n\nAssistant:"
                generator = anthropic.stream(prompt)
                for token in generator:
                    yield token
        Tr   Nr   r   re   )
rY   rP   r   r   r   r{   r   r   r   r   r   s           r=   r   zAnthropic._astream@  s     , ''--3D(3F3!E!2!>!E "
$$V,,"
 "
 	"
 "
 
 
 
 
 
 
 		 		 		 		 		 		 		% $)9:::E L!225:U2KKKKKKKKKKKKKK
 
 
s   Br   c                 X    | j         st          d          |                      |          S )zCalculate number of tokens.rV   )r2   rW   )rN   r   s     r=   get_num_tokenszAnthropic.get_num_tokensd  s1      	MKLLL  &&&r?   )NN)rZ   r[   r\   __doc__r   model_configr   r   ro   rd   r^   rq   r{   r
   r   r   r   r   r   r   r   r   r   r   r   r   r   r_   r   re   r?   r=   rH   rH      s        2 : $  L
 4 D    X 3    XT3 T3 T T T T$ %):>	'# '#'# tCy!'# 67	'#
 '# 
'# '# '# '#R5[ 5S 5 5 5 5 %)?C	# ## tCy!# ;<	#
 # 
# # # #: %):>	     tCy!  67	 
   
/	"       J %)?C	" "" tCy!" ;<	"
 " 
	'" " " "H'3 '3 ' ' ' ' ' 'r?   rH   )'rw   rm   typingr   r   r   r   r   r   r	   r
   langchain_core._api.deprecationr   langchain_core.callbacksr   r   langchain_core.language_modelsr   #langchain_core.language_models.llmsr   langchain_core.outputsr   langchain_core.prompt_valuesr   langchain_core.utilsr   r   r   r   langchain_core.utils.utilsr   r   pydanticr   r   r   r   r   rH   re   r?   r=   <module>r      s   				 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 7 6 6 6 6 6        = < < < < < 3 3 3 3 3 3 2 2 2 2 2 2 4 4 4 4 4 4            R Q Q Q Q Q Q Q B B B B B B B B B B B Br r r r r( r r rj 
9  
M' M' M' M' M'% M' M' 
M' M' M'r?   