
    Ng#                         d dl Z d dlZd dlZd dlZd dlZd dlZd dlmZmZm	Z	m
Z
mZ d dlZd dlmZ d dlmZ d dlmZmZ d dlmZ d dlmZmZmZ  ej        e          ZdZd	Z G d
 de          ZdS )    N)AnyDictListMappingOptional)CallbackManagerForLLMRun)LLM)get_from_dict_or_envpre_init)
get_fields)
ConfigDictFieldmodel_validator
      c                   P   e Zd ZU dZdZeed<   dZeed<   dZeed<   dZ	eed<   dZ
eed<   dZeed<   g Zee         ed	<   dZeed
<   dZeed<   	  ee          Zeeef         ed<   	 dZeed<   dZeed<   dZee         ed<    ed          Z ed          edeeef         defd                        Zededefd            Ze de!eef         fd            Z"e defd            Z#d%dZ$d%dZ%defdZ&e defd            Z'	 	 d&d ed!ee(         d"ee)         d#edef
d$Z*dS )'Beama  Beam API for gpt2 large language model.

    To use, you should have the ``beam-sdk`` python package installed,
    and the environment variable ``BEAM_CLIENT_ID`` set with your client id
    and ``BEAM_CLIENT_SECRET`` set with your client secret. Information on how
    to get this is available here: https://docs.beam.cloud/account/api-keys.

    The wrapper can then be called as follows, where the name, cpu, memory, gpu,
    python version, and python packages can be updated accordingly. Once deployed,
    the instance can be called.

    Example:
        .. code-block:: python

            llm = Beam(model_name="gpt2",
                name="langchain-gpt2",
                cpu=8,
                memory="32Gi",
                gpu="A10G",
                python_version="python3.8",
                python_packages=[
                    "diffusers[torch]>=0.10",
                    "transformers",
                    "torch",
                    "pillow",
                    "accelerate",
                    "safetensors",
                    "xformers",],
                max_length=50)
            llm._deploy()
            call_result = llm._call(input)

     
model_namenamecpumemorygpupython_versionpython_packages
max_lengthurl)default_factorymodel_kwargsbeam_client_idbeam_client_secretNapp_idforbid)extrabefore)modevaluesreturnc                 \   d t          |                                           D             }|                    di           }t          |          D ]U}||vrO||v rt	          d| d          t
                              | d| d           |                    |          ||<   V||d<   |S )z>Build extra kwargs from additional params that were passed in.c                     h | ]	}|j         
S  )alias).0fields     Y/var/www/html/ai-engine/env/lib/python3.11/site-packages/langchain_community/llms/beam.py	<setcomp>z#Beam.build_extra.<locals>.<setcomp>T   s    #V#V#VEEK#V#V#V    r   zFound z supplied twice.zJ was transferred to model_kwargs.
                    Please confirm that z is what you intended.)r   r'   getlist
ValueErrorloggerwarningpop)clsr'   all_required_field_namesr$   
field_names        r/   build_extrazBeam.build_extraP   s     $W#VZ__=S=S=U=U#V#V#V 

>2..v,, 	; 	;J!999&&$%Jj%J%J%JKKK# N N)3N N N   %+JJz$:$:j!!&~r1   c                 ^    t          |dd          }t          |dd          }||d<   ||d<   |S )z?Validate that api key and python package exists in environment.r    BEAM_CLIENT_IDr!   BEAM_CLIENT_SECRET)r
   )r8   r'   r    r!   s       r/   validate_environmentzBeam.validate_environmentc   sT     .$&6
 
 2(*>
 
 $2 '9#$r1   c           
      t    | j         | j        | j        | j        | j        | j        | j        | j        | j        d	S )zGet the identifying parameters.	r   r   r   r   r   r   r   r   r   rA   selfs    r/   _identifying_paramszBeam._identifying_paramsp   sC     /I8k8"1#3/ -

 

 
	
r1   c                     dS )zReturn type of llm.beamr+   rB   s    r/   	_llm_typezBeam._llm_type   s	     vr1   c                    t          j        d          }d}t          |d          5 }|                    |                    | j        | j        | j        | j        | j	        | j
                             ddd           dS # 1 swxY w Y   dS )zBCreates a Python file which will contain your Beam app definition.a          import beam

        # The environment your code will run on
        app = beam.App(
            name="{name}",
            cpu={cpu},
            memory="{memory}",
            gpu="{gpu}",
            python_version="{python_version}",
            python_packages={python_packages},
        )

        app.Trigger.RestAPI(
            inputs={{"prompt": beam.Types.String(), "max_length": beam.Types.String()}},
            outputs={{"text": beam.Types.String()}},
            handler="run.py:beam_langchain",
        )

        zapp.pyw)r   r   r   r   r   r   N)textwrapdedentopenwriteformatr   r   r   r   r   r   rC   scriptscript_namefiles       r/   app_creationzBeam.app_creation   s    
 
. +s## 
	tJJ;#'#6$($8   	 	 	
	 
	 
	 
	 
	 
	 
	 
	 
	 
	 
	 
	 
	 
	 
	 
	 
	 
	s   ABBBc                     t          j        d          }d}t          |d          5 }|                    |                    | j                             ddd           dS # 1 swxY w Y   dS )z5Creates a Python file which will be deployed on beam.a  
        import os
        import transformers
        from transformers import GPT2LMHeadModel, GPT2Tokenizer

        model_name = "{model_name}"

        def beam_langchain(**inputs):
            prompt = inputs["prompt"]
            length = inputs["max_length"]

            tokenizer = GPT2Tokenizer.from_pretrained(model_name)
            model = GPT2LMHeadModel.from_pretrained(model_name)
            encodedPrompt = tokenizer.encode(prompt, return_tensors='pt')
            outputs = model.generate(encodedPrompt, max_length=int(length),
              do_sample=True, pad_token_id=tokenizer.eos_token_id)
            output = tokenizer.decode(outputs[0], skip_special_tokens=True)

            print(output)  # noqa: T201
            return {{"text": output}}

        zrun.pyrI   )r   N)rJ   rK   rL   rM   rN   r   rO   s       r/   run_creationzBeam.run_creation   s    
 
2 +s## 	BtJJv}}}@@AAA	B 	B 	B 	B 	B 	B 	B 	B 	B 	B 	B 	B 	B 	B 	B 	B 	B 	Bs   /A##A'*A'c                    	 ddl }|j        dk    rt          n# t          $ r t          d          w xY w|                                  |                                  t          j        dddd          }|j        dk    r|j        }t          
                    |           |                    d          }|D ]r}|                    d	          r[|                    d
          d         | _        |                    d          d                                         | _        | j        c S st!          d|           t!          d|j                   )Call to Beam.r   Nr   zCould not import beam python package. Please install it with `curl https://raw.githubusercontent.com/slai-labs/get-beam/main/get-beam.sh -sSfL | sh`.zbeam deploy app.pyT)shellcapture_outputtext
z. i  Send requests to: https://apps.beam.cloud//:   z\Failed to retrieve the appID from the deployment output.
                Deployment output: zDeployment failed. Error: )rF   __path__ImportErrorrS   rU   
subprocessrun
returncodestdoutr5   infosplit
startswithr"   stripr   r4   stderr)rC   rF   processoutputlineslines         r/   _deployzBeam._deploy   s   	KKK}""!! # 	 	 	:  	 	. T
 
 
 ""^FKKLL&&E ' '??#STT '"&**S//""5DK#zz#q17799DH;&&&'
 /$*/ /  
 J'.JJKKKs    3c                     | j         r| j         dz   | j        z   }n| j        }t          j        |                                                                          S )Nr^   )r    r!   base64	b64encodeencodedecode)rC   credential_strs     r/   authorizationzBeam.authorization   sU     	5!0369PPNN!4N 5 5 7 788??AAAr1   promptstoprun_managerkwargsc                    | j         r
d| j         z   n| j        }|| j        d}|                    |           ddd| j        z   ddd}t          t                    D ]k}t          j        ||t          j
        |          	          }	|	j        d
k    r|		                                d         c S t          j        t                     lt                              d           dS )rW   zhttps://apps.beam.cloud/)rw   r   z*/*zgzip, deflatezBasic z
keep-alivezapplication/json)AcceptzAccept-EncodingAuthorization
ConnectionzContent-Type)headersdata   rZ   z"Unable to successfully call model.r   )r"   r   r   updaterv   rangeDEFAULT_NUM_TRIESrequestspostjsondumpsstatus_codetimesleepDEFAULT_SLEEP_TIMEr5   r6   )
rC   rw   rx   ry   rz   r   payloadr   _requests
             r/   _callz
Beam._call   s     ;?+S(4;6648#4?CCv.%(::&.
 
 ()) 	+ 	+AmCtz'?R?RSSSG"c))||~~f----J)****;<<<rr1   )r(   N)NN)+__name__
__module____qualname____doc__r   str__annotations__r   r   r   r   r   r   r   r   r   r   dictr   r   r   r    r!   r"   r   r   model_configr   classmethodr;   r   r?   propertyr   rD   rG   rS   rU   ro   rv   r3   r   r   r+   r1   r/   r   r      s           D JD#NNNCMMMFCCMMMNC!#OT#Y###JCMMM#(5#>#>#>L$sCx.>>> NC     FHSM   :  L _(###c3h C    [ $#" 
$ 
4 
 
 
 X
 
WS#X%6 
 
 
 X
 3    X$ $ $ $LB B B B>%L %L %L %L %LN Bs B B B XB  $:>	  tn 67	
  
     r1   r   ) rq   r   loggingrb   rJ   r   typingr   r   r   r   r   r   langchain_core.callbacksr   #langchain_core.language_models.llmsr	   langchain_core.utilsr
   r   langchain_core.utils.pydanticr   pydanticr   r   r   	getLoggerr   r5   r   r   r   r+   r1   r/   <module>r      s9            5 5 5 5 5 5 5 5 5 5 5 5 5 5  = = = = = = 3 3 3 3 3 3 ? ? ? ? ? ? ? ? 4 4 4 4 4 4 7 7 7 7 7 7 7 7 7 7		8	$	$  { { { { {3 { { { { {r1   