
    g6                        d dl Z d dlZ d dlZd dlZd dlmZmZ d dlZd dlm	Z	 d dl
mZ ddlmZmZ ddlmZmZ  edd	
          ZdZer	 d dlmc mZ d dlZd	Zn# e$ r Y nw xY weZej                                        Zd?dZde fdZ!d Z"d Z#d Z$d Z%d Z&d Z'd Z(d Z)d Z*d Z+d Z,ed@d            Z-d Z.d Z/dAdZ0d Z1d Z2d  Z3d! Z4d" Z5d# Z6d$ Z7d% Z8d& Z9d' Z:d( Z;d) Z<d* Z=d+ Z>d, Z?d- Z@d. ZAd/ ZBd0 ZCd1 ZDd2 ZEd3 ZFdBd5ZGd6 ZHedAd7            ZIedAd8            ZJedAd9            ZKedAd:            ZLd; ZMd< ZNd= ZOd> ZPdS )C    N)	lru_cachewraps)version)parse   )parse_flag_from_envstr_to_bool)compare_versionsis_torch_versionUSE_TORCH_XLATdefaultFc                     t           j                            |           d u}|rA	 t           j                            || n|          }dS # t           j        j        $ r Y dS w xY wd S )NTF)	importlibutil	find_specmetadataPackageNotFoundError)pkg_namemetadata_namepackage_exists_s       T/var/www/html/ai-engine/env/lib/python3.11/site-packages/accelerate/utils/imports.py_is_package_availabler   1   s    ^--h77tCN 	"++8MHHS`aaA4!6 	 	 	55	 s   #A
 
A"!A"returnc                      t           S N)_torch_distributed_available     r   is_torch_distributed_availabler!   =   s    ''r    c                      	 n# t           $ r t          d           Y nw xY wt          j                            d          d up t          j                            d          d uS )NzIntel(R) oneCCL Bindings for PyTorch* is required to run DDP on Intel(R) GPUs, but it is not detected. If you see "ValueError: Invalid backend: 'ccl'" error, please install Intel(R) oneCCL Bindings for PyTorch*.	torch_ccloneccl_bindings_for_pytorch)ImportErrorprintr   r   r   r   r    r   is_ccl_availabler'   A   s    
 
 
 
&	
 	
 	
 	
 	

 	  --T9 	O>##$ABB$Ns   c                  @    t           j                            d          S )Noneccl_bind_pt)r   r   r   r   r    r   get_ccl_versionr*   P   s    %%&6777r    c                       t          d          S )Nimport_timerr   r   r    r   is_import_timer_availabler.   T        000r    c                  @    t          d          pt          dd          S )Npynvmlznvidia-ml-pyr-   r   r    r   is_pynvml_availabler2   X   s      **].CHn.].]]r    c                       t          d          S )Npytestr-   r   r    r   is_pytest_availabler5   \        ***r    c                  "    t          dd          S )Nmsampzms-ampr-   r   r    r   is_msamp_availabler9   `   s     (333r    c                       t          d          S )Nschedulefreer-   r   r    r   is_schedulefree_availabler<   d   r/   r    c                  "    t          dd          S )Ntransformer_engineztransformer-enginer-   r   r    r   is_transformer_engine_availabler?   h   s     !57KLLLr    c                       t          d          S )N
lomo_optimr-   r   r    r   is_lomo_availablerB   l   s     ...r    c                  :    t                      pt                      S r   )r9   r?   r   r    r   is_fp8_availablerD   p   s    D#B#D#DDr    c                     t           j                            d          } 	 t          d          t           j        d<   t          j                                        }| r| t           j        d<   nXt           j                            dd           n7# | r| t           j        d<   w t           j                            dd           w xY w|S )z
    Checks if `cuda` is available via an `nvml-based` check which won't trigger the drivers and leave cuda
    uninitialized.
    PYTORCH_NVML_BASED_CUDA_CHECKr   N)osenvirongetstrtorchcudais_availablepop),pytorch_nvml_based_cuda_check_previous_value	availables     r   is_cuda_availablerQ   t   s    
 46:>>Ba3b3b0B69!ff
23J++--	7 	B:fBJ677JNN:DAAAA 8 	B:fBJ677JNN:DAAAAs   :B 4Cc                     | r|r
J d            t           sdS |r t          j                                        dv S | r"t          j                                        dk    S dS )z
    Check if `torch_xla` is available. To train a native pytorch job in an environment with torch xla installed, set
    the USE_TORCH_XLA to false.
    z6The check_is_tpu and check_is_gpu cannot both be true.F)GPUCUDATPUT)_torch_xla_available	torch_xlaruntimedevice_type)check_is_tpucheck_is_gpus     r   is_torch_xla_availabler\      st     hhh0hhhh 8u	 8 ,,../AA	 8 ,,..%774r    c                  ^    t                      rt          dd          S t          d          S )N	deepspeedzdeepspeed-mlu)r   )is_mlu_availabler   r   r    r   is_deepspeed_availabler`      s2     Q$[PPPP ---r    c                  "    t          dd          S )N>=z2.4.0)r   r   r    r   is_pippy_availablerc      s    D'***r    c                     t          d          r|  S t                      rt          j                                        S t                      rdS dS )z8Checks if bf16 is supported, optionally ignoring the TPUT)rZ   F)r\   rQ   rK   rL   is_bf16_supportedis_mps_available)
ignore_tpus    r   is_bf16_availablerh      sW    4000 ~ .z++--- u4r    c                      t          d          } | rBt          j        t          j                            d                    }t          |dd          S dS )Nbitsandbytesrb   z0.39.0Fr   r   r   r   r   r
   r   bnb_versions     r   is_4bit_bnb_availablern      N    *>::N =mI$6$>$>~$N$NOOT8<<<5r    c                      t          d          } | rBt          j        t          j                            d                    }t          |dd          S dS )Nrj   rb   z0.37.2Frk   rl   s     r   is_8bit_bnb_availablerq      ro   r    c                       t          d          S )Nrj   r-   r   r    r   is_bnb_availablers      r/   r    c                  h    t                      sdS dd l} dt          | dt                                v S )NFr   multi_backendfeatures)rs   rj   getattrset)bnbs    r   'is_bitsandbytes_multi_backend_availablerz      s<     ugc:suu====r    c                       t          d          S )Ntorchvisionr-   r   r    r   is_torchvision_availabler}      s     ///r    c                     t          t          j                            dd                    dk    rt          j                            d          	 t          t          j        	                    d                    } t          | dd          r t          j                            dd          S d S # t          $ r"}t          j        d	|            Y d }~d
S d }~ww xY wd S d S )NACCELERATE_USE_MEGATRON_LMFalser   megatronzmegatron-corez==z0.5.0z.dataz)Parse Megatron version failed. Exception:F)r	   rG   rH   rI   r   r   r   r   r   r   r
   	Exceptionwarningswarn)megatron_versiones     r   is_megatron_lm_availabler      s    2:>>">HHIIQNN>##J//;#();)C)CO)T)T#U#U #$4dGDD I$>33GZHHHI I   M!MMNNNuuuuu ON;;s   AB1 1
C;CCc                       t          d          S )Ntransformersr-   r   r    r   is_transformers_availabler      r/   r    c                       t          d          S )Ndatasetsr-   r   r    r   is_datasets_availabler           ,,,r    c                       t          d          S )Npeftr-   r   r    r   is_peft_availabler           (((r    c                       t          d          S )Ntimmr-   r   r    r   is_timm_availabler      r   r    c                       t          d          S )Ntritonr-   r   r    r   is_triton_availabler      r6   r    c                      t          d          } | rBt          j        t          j                            d                    }t          |dd          S dS )Naim<z4.0.0Frk   )r   aim_versions     r   is_aim_availabler      sN    *511N ;mI$6$>$>u$E$EFFS':::5r    c                  >    t          d          pt          d          S )NtensorboardtensorboardXr-   r   r    r   is_tensorboard_availabler      s     //X3H3X3XXr    c                       t          d          S )Nwandbr-   r   r    r   is_wandb_availabler           )))r    c                       t          d          S )Ncomet_mlr-   r   r    r   is_comet_ml_availabler      r   r    c                       t          d          S )Nboto3r-   r   r    r   is_boto3_availabler     r   r    c                  D    t          d          rt          dd          S dS )NrichACCELERATE_ENABLE_RICHF)r   r   r   r    r   is_rich_availabler     s)    V$$ D"#;UCCC5r    c                       t          d          S )N	sagemakerr-   r   r    r   is_sagemaker_availabler          ---r    c                       t          d          S )Ntqdmr-   r   r    r   is_tqdm_availabler     r   r    c                       t          d          S )Nclearmlr-   r   r    r   is_clearml_availabler          +++r    c                       t          d          S )Npandasr-   r   r    r   is_pandas_availabler     r6   r    c                      t          d          rdS t          j                            d          =	 t          j                            d          } dS # t          j        j        $ r Y dS w xY wdS )NmlflowTzmlflow-skinnyF)r   r   r   r   r   r   )r   s    r   is_mlflow_availabler     sz    X&& t~))5	"++O<<A4!6 	 	 	55	5s   A A+*A+1.12c                     t          d|           oEt          j        j                                        o"t          j        j                                        S )zHChecks if MPS device is available. The minimum version required is 1.12.rb   )r   rK   backendsmpsrM   is_built)min_versions    r   rf   rf   (  sB     D+..v5>3E3R3R3T3TvY^YgYkYtYtYvYvvr    c            	         d } t           j                            d          }t           j                            d          dS d}	 t           j                            d          }n# t           j        j        $ r Y dS w xY w | |          } | |          }||k    r t          j        d| d| d	| d
           dS dS )zChecks if ipex is installed.c                     t          t          j        |           j                  dz   t          t          j        |           j                  z   S )N.)rJ   r   r   majorminor)full_versions    r    get_major_and_minor_from_versionz;is_ipex_available.<locals>.get_major_and_minor_from_version2  s=    7=..455;c'-P\B]B]Bc>d>dddr    rK   intel_extension_for_pytorchNFzN/AzIntel Extension for PyTorch z needs to work with PyTorch z.*, but PyTorch z? is found. Please switch to the matching version and run again.T)r   r   r   r   r   r   r   r   )r   _torch_version_ipex_versiontorch_major_and_minoripex_major_and_minors        r   is_ipex_availabler   /  s   e e e '//88N~ =>>FuM!*223PQQ2   uu<<^LL;;MJJ 444l+? l l]q l l*l l l	
 	
 	
 u4s   A' 'A?>A?c                 R   t           j                            d          dS ddl}| rN	 t          j                                        }t          j                                        S # t          $ r Y dS w xY wt          t          d          ot          j                                        S )zQChecks if `torch_mlu` is installed and potentially if a MLU is in the environment	torch_mluNFr   mlu)
r   r   r   r   rK   r   device_countrM   RuntimeErrorhasattr)check_devicer   r   s      r   r_   r_   H       ~,,4u 		&&((A9))+++ 	 	 	55	5%  =UY%;%;%=%==   ;A% %
A32A3c                 R   t           j                            d          dS ddl}| rN	 t          j                                        }t          j                                        S # t          $ r Y dS w xY wt          t          d          ot          j                                        S )zSChecks if `torch_musa` is installed and potentially if a MUSA is in the environment
torch_musaNFr   musa)
r   r   r   r   rK   r   r   rM   r   r   )r   r   r   s      r   is_musa_availabler   Z  s     ~--5u 	
''))A:**,,, 	 	 	55	5&!!?ej&=&=&?&??r   c                 R   t           j                            d          dS ddl}| rN	 t          j                                        }t          j                                        S # t          $ r Y dS w xY wt          t          d          ot          j                                        S )zQChecks if `torch_npu` is installed and potentially if a NPU is in the environment	torch_npuNFr   npu)
r   r   r   r   rK   r   r   rM   r   r   )r   r   r   s      r   is_npu_availabler   l  r   r   c                    	 t          dd          sdS t                      rt          dd          rdS ddl}nt          dd	          rdS | rN	 t          j                                        }t          j                                        S # t          $ r Y dS w xY wt          t          d
          ot          j                                        S )z
    Checks if XPU acceleration is available either via `intel_extension_for_pytorch` or via stock PyTorch (>=2.4) and
    potentially if a XPU is in the environment
    ACCELERATE_USE_XPUTr   Fz<=r   r   Nz2.3xpu)
r   r   r   r   rK   r   r   rM   r   r   )r   r   r   s      r   is_xpu_availabler   ~  s     +3TBBB u D&)) 	5*****D%(( 	5 		&&((A9))+++ 	 	 	55	5%  =UY%;%;%=%==s   ;B 
BBc                       t          d          S )Ndvcliver-   r   r    r   is_dvclive_availabler     r   r    c                       t          d          S )N	torchdatar-   r   r    r   is_torchdata_availabler     r   r    c                      t          d          } | rBt          j        t          j                            d                    }t          |dd          S dS )Nr   rb   z0.8.0Frk   )r   torchdata_versions     r   *is_torchdata_stateful_dataloader_availabler     sQ    *;77N B#M)*<*D*D[*Q*QRR 14AAA5r    c                 <     t                      fd            }|S )zc
    A decorator that ensures the decorated function is only called when deepspeed is enabled.
    c                      ddl m} ddlm} |j        i k    r' |            j        |j        k    rt          d           | i |S )Nr   )AcceleratorState)DistributedTypez|DeepSpeed is not enabled, please make sure that an `Accelerator` is configured for `deepspeed` before calling this function.)accelerate.stater   accelerate.utils.dataclassesr   _shared_statedistributed_type	DEEPSPEED
ValueError)argskwargsr   r   funcs       r   wrapperz#deepspeed_required.<locals>.wrapper  s~    555555@@@@@@)R//4D4D4F4F4W[j[t4t4t0   tT$V$$$r    )r   )r   r   s   ` r   deepspeed_requiredr     s5    
 4[[	% 	% 	% 	% [	% Nr    r   )FF)F)r   )Qr   importlib.metadatarG   r   	functoolsr   r   rK   	packagingr   packaging.versionr   environmentr   r	   versionsr
   r   r   rV   torch_xla.core.xla_modelcore	xla_modelxmtorch_xla.runtimerW   r%   _tpu_availabledistributedrM   r   r   boolr!   r'   r*   r.   r2   r5   r9   r<   r?   rB   rD   rQ   r\   r`   rc   rh   rn   rq   rs   rz   r}   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   rf   r   r_   r   r   r   r   r   r   r   r   r    r   <module>r     s           				  & & & & & & & &        # # # # # # 9 9 9 9 9 9 9 9 8 8 8 8 8 8 8 8 $#OTBBB  ---------    #    &  %0==?? 	 	 	 	( ( ( ( (  8 8 81 1 1^ ^ ^+ + +4 4 41 1 1M M M/ / /E E E  $    ". . .+ + +       1 1 1> > >0 0 0	 	 	1 1 1- - -) ) )) ) )+ + +  Y Y Y* * *- - -* * *  . . .) ) ), , ,+ + +
 
 
w w w w  2 > > > >" @ @ @ @" > > > >" > > > >:, , ,. . .
      s   A A#"A#