
    Ng:                         d dl Z d dlZd dlmZmZmZmZmZmZm	Z	 d dl
mZ d dlmZ d dlmZ d dlmZmZmZ d dlmZmZmZ d dlmZmZ  e j        e          Z ed	d
d           G d de                      ZdS )    N)AnyDictIteratorListMappingOptionalUnion)
deprecated)CallbackManagerForLLMRun)BaseLLM)
GenerationGenerationChunk	LLMResult)convert_to_secret_strget_from_dict_or_envpre_init)
ConfigDict	SecretStrz0.0.18z1.0zlangchain_ibm.WatsonxLLM)sinceremovalalternative_importc                      e Zd ZU dZdZeed<   	 dZeed<   	 dZeed<   	 dZ	eed<   	 dZ
ee         ed<   	 dZee         ed	<   	 dZee         ed
<   	 dZee         ed<   	 dZee         ed<   	 dZee         ed<   	 dZee         ed<   	 dZee         ed<   	 dZeeef         ed<   	 dZeed<   	 dZeed<    ed          Zedefd            Zede eef         fd            Z!e"de de fd            Z#ede$eef         fd            Z%edefd            Z&e'	 d,dee(e eef                           de eef         fd            Z)d,dee(e                  de eef         fd Z*de(e         de+fd!Z,d"e eef         de-fd#Z.	 	 d-d$edee(e                  d%ee/         d&edef
d'Z0	 	 	 d.d(e(e         dee(e                  d%ee/         d)ee         d&ede+fd*Z1	 	 d-d$edee(e                  d%ee/         d&ede2e-         f
d+Z3dS )/
WatsonxLLMa-  
    IBM watsonx.ai large language models.

    To use, you should have ``ibm_watsonx_ai`` python package installed,
    and the environment variable ``WATSONX_APIKEY`` set with your API key, or pass
    it as a named parameter to the constructor.


    Example:
        .. code-block:: python

            from ibm_watsonx_ai.metanames import GenTextParamsMetaNames
            parameters = {
                GenTextParamsMetaNames.DECODING_METHOD: "sample",
                GenTextParamsMetaNames.MAX_NEW_TOKENS: 100,
                GenTextParamsMetaNames.MIN_NEW_TOKENS: 1,
                GenTextParamsMetaNames.TEMPERATURE: 0.5,
                GenTextParamsMetaNames.TOP_K: 50,
                GenTextParamsMetaNames.TOP_P: 1,
            }

            from langchain_community.llms import WatsonxLLM
            watsonx_llm = WatsonxLLM(
                model_id="google/flan-ul2",
                url="https://us-south.ml.cloud.ibm.com",
                apikey="*****",
                project_id="*****",
                params=parameters,
            )
     model_iddeployment_id
project_idspace_idNurlapikeytokenpasswordusernameinstance_idversionparamsverifyF	streamingwatsonx_modelforbid)extrareturnc                     dS )NF )clss    _/var/www/html/ai-engine/env/lib/python3.11/site-packages/langchain_community/llms/watsonxllm.pyis_lc_serializablezWatsonxLLM.is_lc_serializablef   s    u    c                     dddddddS )NWATSONX_URLWATSONX_APIKEYWATSONX_TOKENWATSONX_PASSWORDWATSONX_USERNAMEWATSONX_INSTANCE_ID)r   r    r!   r"   r#   r$   r.   selfs    r0   
lc_secretszWatsonxLLM.lc_secretsj   s%     !&$**0
 
 	
r2   valuesc           
         t          t          |dd                    |d<   d|                    dd                                          v r#t          t          |dd                    |d<   nq|d         sIdt          j        vr;|d	         s3d
t          j        vr%|d         sdt          j        vrt          d          |d         sdt          j        v r"t          t          |dd                    |d<   n|d	         sd
t          j        v rCt          t          |d	d
                    |d	<   t          t          |dd                    |d<   nX|d         sdt          j        v rBt          t          |dd                    |d<   t          t          |dd                    |d<   |d         rdt          j        vr!t          t          |dd                    |d<   	 ddlm} |d         r|d                                         nd|d         r|d                                         nd|d         r|d                                         nd|d	         r|d	                                         nd|d         r|d                                         nd|d         r|d                                         nd|d         r|d                                         ndd}d |	                                D             } ||d         |d         ||d         |d         |d         |d                   }||d<   n# t          $ r t          d          w xY w|S )zCValidate that credentials and python package exists in environment.r   r4   zcloud.ibm.comr   r    r5   r!   r6   r"   r7   zDid not find 'token', 'password' or 'apikey', please add an environment variable `WATSONX_TOKEN`, 'WATSONX_PASSWORD' or 'WATSONX_APIKEY' which contains it, or pass 'token', 'password' or 'apikey' as a named parameter.r#   r8   r$   r9   r   )ModelInferenceNr%   )r   r    r!   r"   r#   r$   r%   c                     i | ]
\  }}|||S Nr.   ).0keyvalues      r0   
<dictcomp>z3WatsonxLLM.validate_environment.<locals>.<dictcomp>   s*     . . .)sEEDUUDUDUDUr2   r   r   r&   r   r   r'   )r   r   credentialsr&   r   r   r'   r)   zdCould not import ibm_watsonx_ai python package. Please install it with `pip install ibm_watsonx_ai`.)r   r   getget_secret_valueosenviron
ValueError ibm_watsonx_ai.foundation_modelsr?   itemsImportError)r/   r=   r?   rF   credentials_without_none_valuer)   s         r0   validate_environmentzWatsonxLLM.validate_environmentu   s    . >>
 
u fjj33DDFFFF4$VX7GHH   F8
 7O!#2:55z* 6&bj88x( 9$BJ66 -    Orz$A$A"7(/JJ# #w 
# '9RZ'G'G%:(=OPP& &z" &;(=OPP& &z"" ! %5%C%C#8(;KLL$ $x  &;(=OPP& &z" -( ,A,S,S(=(@UVV) )}%1	GGGGGG <B%=Rve}55777d;A(;KUF8$55777QU ;A/SF7O44666t j)F:&77999 j)F:&77999 m,F=)::<<< =C9<MWF9%66888SW1 K6. .-8->->-@-@. . .* +N
+$_5:h'!,/
+h'  M '4F?## 	 	 	G  	
 s   !EL9 9Mc                 D    | j         | j        | j        | j        | j        dS )zGet the identifying parameters.r   r   r&   r   r   rR   r:   s    r0   _identifying_paramszWatsonxLLM._identifying_params   s-     !/k/
 
 	
r2   c                     dS )zReturn type of llm.zIBM watsonx.air.   r:   s    r0   	_llm_typezWatsonxLLM._llm_type   s
      r2   responsec                    | dddS d}d}dt           dt          t           t          f         dt          fd}| D ]C}|                    d          }|r*| |d|d                   z  }| |d	|d                   z  }D||dS )
Nr   )generated_token_countinput_token_countrC   resultr,   c                 2    |                     | d          pdS )Nr   )rG   )rC   rZ   s     r0   get_count_valuez8WatsonxLLM._extract_token_usage.<locals>.get_count_value   s    ::c1%%**r2   resultsrY   rX   )strr   r   intrG   )rV   rY   rX   r\   resr]   s         r0   _extract_token_usagezWatsonxLLM._extract_token_usage   s     -.QGGG !	+ 	+d38n 	+ 	+ 	+ 	+ 	+  	 	Cggi((G !__5H'RS*%U%UU!%+WQZ* * %
 &;!2
 
 	
r2   stopc                 8    | j         r	i | j         ni }|||d<   |S )Nstop_sequences)r&   )r;   rb   r&   s      r0   _get_chat_paramszWatsonxLLM._get_chat_params  s.    48K!GDKR'+F#$r2   c                 d   g }|D ]v}|                     d          }|r]|d                              d          }t          |d                              d          d|i          }|                    |g           w|                     |          }|| j        | j        d}t          ||          S )	z2Create the LLMResult from the choices and prompts.r]   r   stop_reasongenerated_textfinish_reasontextgeneration_info)token_usager   r   generations
llm_output)rG   r   appendra   r   r   r   )	r;   rV   ro   r`   r]   ri   genfinal_token_usagerp   s	            r0   _create_llm_resultzWatsonxLLM._create_llm_result  s     	* 	*Cggi((G * '
} = =  (899%4m$D   ""C5))) 55h??,!/
 


 [ZHHHHr2   stream_responsec           	          |d         st          d          S t          |d         d         d         t          |d         d                             dd          | j        | j        d	          
          S )z0Convert a stream response to a generation chunk.r]   r   rk   r   rh   rg   N)r   r   )ri   rp   rj   )r   dictrG   r   r   )r;   ru   s     r0   $_stream_response_to_generation_chunkz/WatsonxLLM._stream_response_to_generation_chunk  s    
 y) 	,"++++ +A./?@ -i8;??tTT $%)%7   	
 	
 	
 		
r2   promptrun_managerkwargsc                 V     | j         d|g||d|}|j        d         d         j        S )a  Call the IBM watsonx.ai inference endpoint.
        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
            run_manager: Optional callback manager.
        Returns:
            The string generated by the model.
        Example:
            .. code-block:: python

                response = watsonx_llm.invoke("What is a molecule")
        )promptsrb   r{   r   r.   )	_generatero   rk   )r;   rz   rb   r{   r|   rZ   s         r0   _callzWatsonxLLM._call1  sJ    &   
H4[
 
DJ
 
 !!$Q',,r2   r~   streamc                    |                      |          }||n| j        }|rt          |          dk    rt          d|           t	          d          } | j        |d         f||d|}	|	D ]}
||
}||
z  }|J t          |j        t                    r-|j        	                    d	          }t          |gg|
          S t          |gg          S | j                            ||          }|                     |          S )a  Call the IBM watsonx.ai inference endpoint which then generate the response.
        Args:
            prompts: List of strings (prompts) to pass into the model.
            stop: Optional list of stop words to use when generating.
            run_manager: Optional callback manager.
        Returns:
            The full LLMResult output.
        Example:
            .. code-block:: python

                response = watsonx_llm.generate(["What is a molecule"])
        rb   N   z6WatsonxLLM currently only supports single prompt, got r   rw   r   )rb   r{   rp   rn   )ro   )rz   r&   )re   r(   lenrK   r   _stream
isinstancerl   rx   popr   r)   generatert   )r;   r~   rb   r{   r   r|   r&   should_stream
generationstream_iterchunkrp   rV   s                r0   r   zWatsonxLLM._generateI  s`   ( &&D&11"("4$. 	57||a VWVV   )b111J&$,
!%; BH K % ( (%!&JJ%'JJ)))*4d;; T'7;;LII
 zl^
SSSS:,8888)22'&2QQH**8444r2   c              +      K   |                      |          }| j                            |d|          D ]9}|                     |          }|r|                    |j        |           |V  :dS )a4  Call the IBM watsonx.ai inference endpoint which then streams the response.
        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
            run_manager: Optional callback manager.
        Returns:
            The iterator which yields generation chunks.
        Example:
            .. code-block:: python

                response = watsonx_llm.stream("What is a molecule")
                for chunk in response:
                    print(chunk, end='')  # noqa: T201
        r   T)rz   raw_responser&   )r   N)re   r)   generate_text_streamry   on_llm_new_tokenrk   )r;   rz   rb   r{   r|   r&   stream_respr   s           r0   r   zWatsonxLLM._streamv  s      * &&D&11-BBV C 
 
 	 	K ==kJJE F,,UZu,EEEKKKK	 	r2   rA   )NN)NNN)4__name__
__module____qualname____doc__r   r^   __annotations__r   r   r   r   r   r   r    r!   r"   r#   r$   r%   r&   rx   r'   r	   boolr(   r)   r   r   model_configclassmethodr1   propertyr   r<   r   rP   r   rS   rU   staticmethodr   ra   re   r   rt   r   ry   r   r   r   r   r   r.   r2   r0   r   r      s         > HcM3(J*Hc(#C)	###1"&FHY&&&4!%E8I%%%3$(Hhy!(((6$(Hhy!(((6'+K)$+++9#'GXi '''5!FHTN!!!;!FE#t)!!!0 It1M3:  L 4    [ 
DcN 
 
 
 X
 c$ c4 c c c XcJ 
WS#X%6 
 
 
 X
  3       X  37
 
4S#X/0
	c3h
 
 
 \
2 Xd3i%8 DcN    I4: I) I I I I(
c3h
 

 
 
 
* %):>	- -- tCy!- 67	-
 - 
- - - -6 %):>!%+5 +5c+5 tCy!+5 67	+5
 +5 +5 
+5 +5 +5 +5` %):>	  tCy! 67	
  
/	"     r2   r   )loggingrI   typingr   r   r   r   r   r   r	   langchain_core._api.deprecationr
   langchain_core.callbacksr   #langchain_core.language_models.llmsr   langchain_core.outputsr   r   r   langchain_core.utilsr   r   r   pydanticr   r   	getLoggerr   loggerr   r.   r2   r0   <module>r      sO    				 F F F F F F F F F F F F F F F F F F 6 6 6 6 6 6 = = = = = = 7 7 7 7 7 7 I I I I I I I I I I V V V V V V V V V V * * * * * * * *		8	$	$ 
E6P  A A A A A A A A A Ar2   