
    ڧg}                        U d dl Z d dlmZ d dlmZ d dlmZmZmZm	Z	m
Z
mZ d dlZd dlmZ ddlmZmZ ddlmZmZ ddlmZ d	d
lmZmZmZ d	dlmZ d	dlmZmZ g dZ  G d de
          Z! G d de          Z" G d dej#                  Z$ G d dej#                  Z% G d dej#                  Z&de'de'de'de'de'dee         de(ded e&fd!Z)d"eiZ*ee+ef         e,d#<   i e*d$d%d&Z- G d' d(e          Z. G d) d*e          Z/ G d+ d,e          Z0 G d- d.e          Z1 G d/ d0e          Z2 e             ed1e.j3        f2          dd3d4dee.         de(ded e&fd5                        Z4 e             ed1e/j3        f2          dd3d4dee/         de(ded e&fd6                        Z5 e             ed1e0j3        f2          dd3d4dee0         de(ded e&fd7                        Z6 e             ed1e1j3        f2          dd3d4dee1         de(ded e&fd8                        Z7 e             ed92          dd3d4dee2         de(ded e&fd:                        Z8	 	 dCd=e'de'd>d?d@e+dAe(d d?fdBZ9dS )D    N)OrderedDict)partial)AnyCallableDictList
NamedTupleOptional   )Conv2dNormActivationMLP)ImageClassificationInterpolationMode)_log_api_usage_once   )register_modelWeightsWeightsEnum)_IMAGENET_CATEGORIES)_ovewrite_named_paramhandle_legacy_interface)VisionTransformerViT_B_16_WeightsViT_B_32_WeightsViT_L_16_WeightsViT_L_32_WeightsViT_H_14_Weightsvit_b_16vit_b_32vit_l_16vit_l_32vit_h_14c                       e Zd ZU eed<   eed<   eed<   ej        Zedej	        f         ed<   ej
        Zedej	        f         ed<   dS )ConvStemConfigout_channelskernel_sizestride.
norm_layeractivation_layerN)__name__
__module____qualname__int__annotations__nnBatchNorm2dr(   r   ModuleReLUr)        a/var/www/html/ai-engine/env/lib/python3.11/site-packages/torchvision/models/vision_transformer.pyr$   r$       sn         KKK+->Jbi(99913hsBI~.88888r4   r$   c                   >     e Zd ZdZdZdededef fdZ fdZ xZ	S )MLPBlockzTransformer MLP block.r   in_dimmlp_dimdropoutc                 p   t                                          |||gt          j        d |           |                                 D ]m}t          |t          j                  rQt          j                            |j	                   |j
        &t          j                            |j
        d           nd S )N)r)   inplacer:   ư>std)super__init__r/   GELUmodules
isinstanceLinearinitxavier_uniform_weightbiasnormal_)selfr8   r9   r:   m	__class__s        r5   rA   zMLPBlock.__init__-   s    '6!2RWVZdklll 	6 	6A!RY'' 6''1116%GOOAFO555		6 	6r4   c           	      *   |                     dd           }||dk     rLt          d          D ]<}	dD ]7}
| d|	dz    d|
 }| d|	z   d|
 }||v r|                    |          ||<   8=t                                          |||||||           d S )Nversionr   )rH   rI   linear_r   .   )getrangepopr@   _load_from_state_dict)rK   
state_dictprefixlocal_metadatastrictmissing_keysunexpected_keys
error_msgsrO   itypeold_keynew_keyrM   s                r5   rV   zMLPBlock._load_from_state_dict6   s     !$$Y55?gkk1XX F F. F FD!'<<!<<d<<G!'5155t55G*,,.8nnW.E.E
7+	F 	%%	
 	
 	
 	
 	
r4   )
r*   r+   r,   __doc___versionr-   floatrA   rV   __classcell__rM   s   @r5   r7   r7   (   sv          H6s 6S 65 6 6 6 6 6 6
 
 
 
 
 
 
 
 
r4   r7   c                        e Zd ZdZ eej        d          fdededededed	e	d
e
j        j        f         f fdZde
j        fdZ xZS )EncoderBlockzTransformer encoder block.r=   eps	num_heads
hidden_dimr9   r:   attention_dropoutr(   .c                 .   t                                                       || _         ||          | _        t	          j        |||d          | _        t	          j        |          | _         ||          | _	        t          |||          | _        d S )NT)r:   batch_first)r@   rA   rk   ln_1r/   MultiheadAttentionself_attentionDropoutr:   ln_2r7   mlp)rK   rk   rl   r9   r:   rm   r(   rM   s          r5   rA   zEncoderBlock.__init__Y   s     	" Jz**	 3J	Sdrvwwwz'** Jz**	J99r4   inputc                 \   t          j        |                                dk    d|j                    |                     |          }|                     |||d          \  }}|                     |          }||z   }|                     |          }|                     |          }||z   S )NrR   2Expected (batch_size, seq_length, hidden_dim) got F)need_weights)	torch_assertdimshaperp   rr   r:   rt   ru   )rK   rv   x_ys        r5   forwardzEncoderBlock.forwardn   s    eiikkQ&(j]b]h(j(jkkkIIe""1a"??1LLOOIIIaLLHHQKK1ur4   r*   r+   r,   rb   r   r/   	LayerNormr-   rd   r   rz   r1   rA   Tensorr   re   rf   s   @r5   rh   rh   V   s        $$ 6=WR\t5T5T5T: :: : 	:
 : !: S%(/12: : : : : :*	U\ 	 	 	 	 	 	 	 	r4   rh   c                        e Zd ZdZ eej        d          fdededededed	ed
ede	de
j        j        f         f fdZde
j        fdZ xZS )Encoderz?Transformer Model Encoder for sequence to sequence translation.r=   ri   
seq_length
num_layersrk   rl   r9   r:   rm   r(   .c	           	         t                                                       t          j        t	          j        d||                              d                    | _        t          j        |          | _	        t                      }	t          |          D ]}
t          ||||||          |	d|
 <   t          j        |	          | _         ||          | _        d S )Nr   g{Gz?r>   encoder_layer_)r@   rA   r/   	Parameterrz   emptyrJ   pos_embeddingrs   r:   r   rT   rh   
Sequentiallayersln)rK   r   r   rk   rl   r9   r:   rm   r(   r   r^   rM   s              r5   rA   zEncoder.__init__}   s     	  \%+aZ*P*P*X*X]a*X*b*bccz'**.9mmz"" 	 	A+7!, ,F'A''(( mF++*Z((r4   rv   c                     t          j        |                                dk    d|j                    || j        z   }|                     |                     |                     |                              S )NrR   rx   )rz   r{   r|   r}   r   r   r   r:   )rK   rv   s     r5   r   zEncoder.forward   se    eiikkQ&(j]b]h(j(jkkk**wwt{{4<<#6#677888r4   r   rf   s   @r5   r   r   z   s        II 6=WR\t5T5T5T) )) ) 	)
 ) ) ) !) S%(/12) ) ) ) ) ):9U\ 9 9 9 9 9 9 9 9r4   r   c                       e Zd ZdZdddd eej        d          dfdeded	ed
edededededede	e         de
dej        j        f         de	ee                  f fdZdej        dej        fdZdej        fdZ xZS )r   z;Vision Transformer as per https://arxiv.org/abs/2010.11929.        i  Nr=   ri   
image_size
patch_sizer   rk   rl   r9   r:   rm   num_classesrepresentation_sizer(   .conv_stem_configsc                 
   t                                                       t          |            t          j        ||z  dk    d           || _        || _        || _        || _        || _	        || _
        |	| _        |
| _        || _        |t          j                    }d}t!          |          D ]Q\  }}|                    d| t%          ||j        |j        |j        |j        |j                             |j        }R|                    dt          j        ||d                     || _        nt          j        d|||	          | _        ||z  d
z  }t          j        t          j        dd|                    | _        |dz  }t9          ||||||||          | _        || _        t?                      }|
t          j         ||	          |d<   nFt          j         ||
          |d<   t          j!                    |d<   t          j         |
|	          |d<   t          j        |          | _"        tG          | j        t          j                  r| j        j$        | j        j        d         z  | j        j        d         z  }t          j%        &                    | j        j'        tQ          j)        d|z                       | j        j*        )t          j%        +                    | j        j*                   n| j        j,        tG          | j        j,        t          j                  rt          j%        -                    | j        j,        j'        dtQ          j)        d| j        j,        j        z                       | j        j,        j*        .t          j%        +                    | j        j,        j*                   t]          | j"        d          rtG          | j"        j/        t          j                   r| j"        j/        j0        }t          j%        &                    | j"        j/        j'        tQ          j)        d|z                       t          j%        +                    | j"        j/        j*                   tG          | j"        j1        t          j                   r^t          j%        +                    | j"        j1        j'                   t          j%        +                    | j"        j1        j*                   d S d S )Nr   z&Input shape indivisible by patch size!rR   conv_bn_relu_)in_channelsr%   r&   r'   r(   r)   	conv_lastr   )r   r%   r&   )r   r%   r&   r'   r   head
pre_logitsactr>   r   g       @)meanr?   )2r@   rA   r   rz   r{   r   r   rl   r9   rm   r:   r   r   r(   r/   r   	enumerate
add_moduler   r%   r&   r'   r)   Conv2d	conv_projr   zerosclass_tokenr   encoderr   r   rE   TanhheadsrD   r   rF   trunc_normal_rH   mathsqrtrI   zeros_r   rJ   hasattrr   in_featuresr   )rK   r   r   r   rk   rl   r9   r:   rm   r   r   r(   r   seq_projprev_channelsr^   conv_stem_layer_configr   heads_layersfan_inrM   s                       r5   rA   zVisionTransformer.__init__   sn    	D!!!j:-24\]]]$$$!2&#6 $(}HM-67H-I-I D D))##'A''($1%;%H$:$F5<#9#D)?)P  
 
 
 !7 CRY=zghiii   )1DNNYJJWa  DN !J.14
 <Aq*(E(EFFa
	
 	
 %4?MM&#%9Z#E#EL  )+:?R)S)SL&"$'))L#%9-@+#N#NL ]<00
dnbi00 	>^/$.2LQ2OORVR`RlmnRooFG!!$."7TYq6z=R=R!SSS~".t~2333^%1jAY[][d6e6e1GOO(/ctyt~OgOtIt?u?u     ~',8t~7<===4:|,, 	7DJ<QSUS\1]1] 	7Z*6FG!!$*"7">DIaRXjDYDY!ZZZGNN4:05666djory11 	1GNN4:?1222GNN4:?/00000	1 	1r4   r~   returnc                 ~   |j         \  }}}}| j        }t          j        || j        k    d| j         d| d           t          j        || j        k    d| j         d| d           ||z  }||z  }|                     |          }|                    || j        ||z            }|                    ddd          }|S )NzWrong image height! Expected z	 but got !zWrong image width! Expected r   r   r   )	r}   r   rz   r{   r   r   reshaperl   permute)	rK   r~   nchwpn_hn_ws	            r5   _process_inputz VisionTransformer._process_input  s    W
1aOa4?*,jDO,j,jfg,j,j,jkkka4?*,i4?,i,ief,i,i,ijjj1f1f NN1IIa#)44 IIaAr4   c                    |                      |          }|j        d         }| j                            |dd          }t	          j        ||gd          }|                     |          }|d d df         }|                     |          }|S )Nr   r   r|   )r   r}   r   expandrz   catr   r   )rK   r~   r   batch_class_tokens       r5   r   zVisionTransformer.forward!  s    ""GAJ !,33Ar2>>I(!,!444LLOO aaadGJJqMMr4   )r*   r+   r,   rb   r   r/   r   r-   rd   r
   r   rz   r1   r   r$   rA   r   r   r   re   rf   s   @r5   r   r      sV       EE #&-15<WR\t5T5T5T<@g1 g1g1 g1 	g1
 g1 g1 g1 g1 !g1 g1 &c]g1 S%(/12g1 $D$89g1 g1 g1 g1 g1 g1R     *        r4   r   r   r   rk   rl   r9   weightsprogresskwargsr   c           
         |ut          |dt          |j        d                              |j        d         d         |j        d         d         k    sJ t          |d|j        d         d                    |                    dd          }t	          d|| ||||d|}	|r*|	                    |                    |d	
                     |	S )Nr   
categoriesmin_sizer   r   r      )r   r   r   rk   rl   r9   T)r   
check_hashr3   )r   lenmetarU   r   load_state_dictget_state_dict)
r   r   rk   rl   r9   r   r   r   r   models
             r5   _vision_transformerr   4  s     fmSl9S5T5TUUU|J'*gl:.Fq.IIIIIflGL4LQ4OPPPL#..J    E  Zg44hSW4XXYYYLr4   r   _COMMON_METAz(https://github.com/facebookresearch/SWAGz:https://github.com/facebookresearch/SWAG/blob/main/LICENSE)recipelicensec                   (   e Zd Z ed eed          i edddddd	d
idddd          Z ed eeddej	                  i e
dddddd
idddd          Z ed eeddej	                  i e
ddddddd
idddd           ZeZd!S )"r   z9https://download.pytorch.org/models/vit_b_16-c867db91.pthr   	crop_sizei(r   r   zNhttps://github.com/pytorch/vision/tree/main/references/classification#vit_b_16ImageNet-1KgS㥛DT@g1ZW@zacc@1zacc@5gMb1@g(\t@
                These weights were trained from scratch by using a modified version of `DeIT
                <https://arxiv.org/abs/2012.12877>`_'s training recipe.
            
num_paramsr   r   _metrics_ops
_file_size_docsurl
transformsr   z>https://download.pytorch.org/models/vit_b_16_swag-9ac1b537.pth  r   resize_sizeinterpolationi^-)r   r   g~jtSU@giX@gˡEK@g|?5^t@
                These weights are learnt via transfer learning by end-to-end fine-tuning the original
                `SWAG <https://arxiv.org/abs/2201.08371>`_ weights on ImageNet-1K data.
            r   r   r   r   r   r   zAhttps://download.pytorch.org/models/vit_b_16_lc_swag-4e70ced5.pth+https://github.com/pytorch/vision/pull/5793gbX9xT@gQX@
                These weights are composed of the original frozen `SWAG <https://arxiv.org/abs/2201.08371>`_ trunk
                weights and a linear classifier learnt on top of them trained on ImageNet-1K data.
            r   r   r   r   r   r   r   Nr*   r+   r,   r   r   r   r   IMAGENET1K_V1r   BICUBIC_COMMON_SWAG_METAIMAGENET1K_SWAG_E2E_V1IMAGENET1K_SWAG_LINEAR_V1DEFAULTr3   r4   r5   r   r   _  s       GG7.#>>>

""f##    !
 
 
  M, %WL7+3	
 
 


""##    !
 
 
  4 !(O7+3	
 
 


C""##    !
 
 
! ! !6 GGGr4   r   c                   f    e Zd Z ed eed          i edddddd	d
idddd          ZeZdS )r   z9https://download.pytorch.org/models/vit_b_32-d86f8d99.pthr   r   i1Br   zNhttps://github.com/pytorch/vision/tree/main/references/classification#vit_b_32r   g|?5^R@gW@r   gA`Т@gl	u@r   r   r   N	r*   r+   r,   r   r   r   r   r   r   r3   r4   r5   r   r     s        GG7.#>>>

""f##    !
 
 
  M, GGGr4   r   c                   *   e Zd Z ed eedd          i eddddd	d
didddd          Z ed eeddej	                  i e
ddddddidddd          Z ed eeddej	                  i e
dddddddiddd d!          ZeZd"S )#r   z9https://download.pytorch.org/models/vit_l_16-852ce7e3.pthr      )r   r   i#r   zNhttps://github.com/pytorch/vision/tree/main/references/classification#vit_l_16r   g|?5^S@gFԨW@r   gףp=
N@g;O$@a  
                These weights were trained from scratch by using a modified version of TorchVision's
                `new training recipe
                <https://pytorch.org/blog/how-to-train-state-of-the-art-models-using-torchvision-latest-primitives/>`_.
            r   r   z>https://download.pytorch.org/models/vit_l_16_swag-4f3808c9.pth   r   i0)r   r   gjtV@gT㥛ĠX@gƟv@gy&11@r   r   zAhttps://download.pytorch.org/models/vit_l_16_lc_swag-4d563306.pthr   gMbXIU@g^I[X@r   r   Nr   r3   r4   r5   r   r     s       GG7.#3OOO

#"f##    "
 
 
  M. %WL7+3	
 
 


#"##    "
 
 
  4 !(O7+3	
 
 


C#"##    "
 
 
! ! !6 GGGr4   r   c                   f    e Zd Z ed eed          i edddddd	d
idddd          ZeZdS )r   z9https://download.pytorch.org/models/vit_l_32-c7638314.pthr   r   i[Er   zNhttps://github.com/pytorch/vision/tree/main/references/classification#vit_l_32r   g|?5>S@gGzDW@r   gK7.@gE@r   r   r   Nr   r3   r4   r5   r   r     s        GG7.#>>>

#"f#"    "
 
 
  M, GGGr4   r   c                       e Zd Z ed eeddej                  i edddddd	id
ddd          Z	 ed eeddej                  i eddddddd	idddd          Z
e	ZdS )r   z>https://download.pytorch.org/models/vit_h_14_swag-80465313.pth  r   i%)r   r   r   gS#V@g#~jX@r   g~jŏ@gK7I@r   r   r   zAhttps://download.pytorch.org/models/vit_h_14_lc_swag-c1eb923e.pthr   r   i@%r   gZd;OmU@gQnX@g=
ףpd@gIk֢@r   r   N)r*   r+   r,   r   r   r   r   r   r   r   r   r   r3   r4   r5   r   r   2  s"       $WL7+3	
 
 


#"##    "
 
 
  4 !(O7+3	
 
 


C#"##    "
 
 
! ! !6 %GGGr4   r   
pretrained)r   T)r   r   c                 ^    t                               |           } t          dddddd| |d|S )a  
    Constructs a vit_b_16 architecture from
    `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale <https://arxiv.org/abs/2010.11929>`_.

    Args:
        weights (:class:`~torchvision.models.ViT_B_16_Weights`, optional): The pretrained
            weights to use. See :class:`~torchvision.models.ViT_B_16_Weights`
            below for more details and possible values. By default, no pre-trained weights are used.
        progress (bool, optional): If True, displays a progress bar of the download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.vision_transformer.VisionTransformer``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/vision_transformer.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.ViT_B_16_Weights
        :members:
                r   r   rk   rl   r9   r   r   r3   )r   verifyr   r   r   r   s      r5   r   r   k  R    ( %%g..G 		 	 	 	 	r4   c                 ^    t                               |           } t          dddddd| |d|S )a  
    Constructs a vit_b_32 architecture from
    `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale <https://arxiv.org/abs/2010.11929>`_.

    Args:
        weights (:class:`~torchvision.models.ViT_B_32_Weights`, optional): The pretrained
            weights to use. See :class:`~torchvision.models.ViT_B_32_Weights`
            below for more details and possible values. By default, no pre-trained weights are used.
        progress (bool, optional): If True, displays a progress bar of the download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.vision_transformer.VisionTransformer``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/vision_transformer.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.ViT_B_32_Weights
        :members:
        r  r  r  r  r3   )r   r  r   r	  s      r5   r   r     r
  r4   c                 ^    t                               |           } t          dddddd| |d|S )a  
    Constructs a vit_l_16 architecture from
    `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale <https://arxiv.org/abs/2010.11929>`_.

    Args:
        weights (:class:`~torchvision.models.ViT_L_16_Weights`, optional): The pretrained
            weights to use. See :class:`~torchvision.models.ViT_L_16_Weights`
            below for more details and possible values. By default, no pre-trained weights are used.
        progress (bool, optional): If True, displays a progress bar of the download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.vision_transformer.VisionTransformer``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/vision_transformer.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.ViT_L_16_Weights
        :members:
    r           r  r3   )r   r  r   r	  s      r5   r    r      R    ( %%g..G 		 	 	 	 	r4   c                 ^    t                               |           } t          dddddd| |d|S )a  
    Constructs a vit_l_32 architecture from
    `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale <https://arxiv.org/abs/2010.11929>`_.

    Args:
        weights (:class:`~torchvision.models.ViT_L_32_Weights`, optional): The pretrained
            weights to use. See :class:`~torchvision.models.ViT_L_32_Weights`
            below for more details and possible values. By default, no pre-trained weights are used.
        progress (bool, optional): If True, displays a progress bar of the download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.vision_transformer.VisionTransformer``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/vision_transformer.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.ViT_L_32_Weights
        :members:
    r  r  r  r  r  r  r3   )r   r  r   r	  s      r5   r!   r!     r  r4   )r  Nc                 ^    t                               |           } t          dddddd| |d|S )a  
    Constructs a vit_h_14 architecture from
    `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale <https://arxiv.org/abs/2010.11929>`_.

    Args:
        weights (:class:`~torchvision.models.ViT_H_14_Weights`, optional): The pretrained
            weights to use. See :class:`~torchvision.models.ViT_H_14_Weights`
            below for more details and possible values. By default, no pre-trained weights are used.
        progress (bool, optional): If True, displays a progress bar of the download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.vision_transformer.VisionTransformer``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/vision_transformer.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.ViT_H_14_Weights
        :members:
       r  r  i   i   r  r3   )r   r  r   r	  s      r5   r"   r"     r  r4   bicubicFr   model_statezOrderedDict[str, torch.Tensor]interpolation_modereset_headsc                 6   |d         }|j         \  }}}|dk    rt          d|j                    | |z  dz  dz   }	|	|k    rV|dz  }|	dz  }	|ddddddf         }
|ddddddf         }|                    ddd          }t          t	          j        |                    }||z  |k    rt          d||z   d|           |                    d|||          }| |z  }t          j        	                    |||d	
          }|                    d||	          }|                    ddd          }t          j        |
|gd          }||d<   |rDt                      }|                                D ]\  }}|                    d          s|||<    |}|S )a  This function helps interpolate positional embeddings during checkpoint loading,
    especially when you want to apply a pre-trained model on images with different resolution.

    Args:
        image_size (int): Image size of the new model.
        patch_size (int): Patch size of the new model.
        model_state (OrderedDict[str, torch.Tensor]): State dict of the pre-trained model.
        interpolation_mode (str): The algorithm used for upsampling. Default: bicubic.
        reset_heads (bool): If true, not copying the state of heads. Default: False.

    Returns:
        OrderedDict[str, torch.Tensor]: A state dict which can be loaded into the new model.
    zencoder.pos_embeddingr   z%Unexpected position embedding shape: r   Nr   zPseq_length is not a perfect square! Instead got seq_length_1d * seq_length_1d = z and seq_length = T)sizemodealign_cornersr   r   )r}   
ValueErrorr   r-   r   r   r   r/   
functionalinterpolaterz   r   r   items
startswith)r   r   r  r  r  r   r   r   rl   new_seq_lengthpos_embedding_tokenpos_embedding_imgseq_length_1dnew_seq_length_1dnew_pos_embedding_imgnew_pos_embeddingmodel_state_copykvs                      r5   interpolate_embeddingsr,    sC   *   78M - 3Az:AvvVATVVWWW J.14q8N
 ##a
!+AAArr111H5)!!!QRR(3 .55aA>>DIj1122=(J66 bcp  tA  dA  b  b  V`  b  b  
 .55a]Tabb&*4 !# 9 9"#	 !: !
 !
 !6 = =a^ \ \ !6 = =aA F F!I':<Q&RXYZZZ/@+, 	+AL#))++ , ,1||G,, ,*+$Q'*Kr4   )r  F):r   collectionsr   	functoolsr   typingr   r   r   r   r	   r
   rz   torch.nnr/   ops.miscr   r   transforms._presetsr   r   utilsr   _apir   r   r   _metar   _utilsr   r   __all__r$   r7   r1   rh   r   r   r-   boolr   r   strr.   r   r   r   r   r   r   r   r   r   r    r!   r"   r,  r3   r4   r5   <module>r:     s    # # # # # #       B B B B B B B B B B B B B B B B        0 0 0 0 0 0 0 0 H H H H H H H H ' ' ' ' ' ' 6 6 6 6 6 6 6 6 6 6 ' ' ' ' ' ' B B B B B B B B  9 9 9 9 9Z 9 9 9+
 +
 +
 +
 +
s +
 +
 +
\! ! ! ! !29 ! ! !H#9 #9 #9 #9 #9bi #9 #9 #9LQ Q Q Q Q	 Q Q Qh  	
  k"      B & d38n   8K   L L L L L{ L L L^    {   4M M M M M{ M M M`    {   46% 6% 6% 6% 6%{ 6% 6% 6%r ,0@0N!OPPP6:T   "23 d ]` ev    QP @ ,0@0N!OPPP6:T   "23 d ]` ev    QP @ ,0@0N!OPPP6:T   "23 d ]` ev    QP @ ,0@0N!OPPP6:T   "23 d ]` ev    QP @ !56666:T   "23 d ]` ev    76 H (K KKK 2K 	K
 K &K K K K K Kr4   