
    ڧgD                         d dl Z d dlmZ d dlmZmZmZmZ d dlZd dl	m
Z
 d dlmZ ddlmZ ddlmZ dd	lmZmZmZ dd
lmZ ddlmZmZ g dZdZ G d de
j                  Zdededededededede
j        fdZ d9dedededefdZ!dedee         fdZ" G d  d!ej
        j                  Z#d"ed#d$Z$ G d% d&e          Z% G d' d(e          Z& G d) d*e          Z' G d+ d,e          Z(ded-ee         d.e)d/ede#f
d0Z* e             ed1e%j+        f2          dd3d4d-ee%         d.e)d/ede#fd5                        Z, e             ed1e&j+        f2          dd3d4d-ee&         d.e)d/ede#fd6                        Z- e             ed1e'j+        f2          dd3d4d-ee'         d.e)d/ede#fd7                        Z. e             ed1e(j+        f2          dd3d4d-ee(         d.e)d/ede#fd8                        Z/dS ):    N)partial)AnyDictListOptional)Tensor   )ImageClassification)_log_api_usage_once   )register_modelWeightsWeightsEnum)_IMAGENET_CATEGORIES)_ovewrite_named_paramhandle_legacy_interface)	MNASNetMNASNet0_5_WeightsMNASNet0_75_WeightsMNASNet1_0_WeightsMNASNet1_3_Weights
mnasnet0_5mnasnet0_75
mnasnet1_0
mnasnet1_3g 0U0*3?c                   P     e Zd Z	 ddededededededd	f fd
ZdedefdZ xZS )_InvertedResidual皙?in_chout_chkernel_sizestrideexpansion_factorbn_momentumreturnNc                 `   t                                                       |dvrt          d|           |dvrt          d|           ||z  }||k    o|dk    | _        t	          j        t	          j        ||dd          t	          j        ||          t	          j        d	
          t	          j        ||||dz  ||d          t	          j        ||          t	          j        d	
          t	          j        ||dd          t	          j        ||                    | _	        d S )Nr   r	   z#stride should be 1 or 2 instead of )      z(kernel_size should be 3 or 5 instead of r   F)biasmomentumTinplacer	   paddingr"   groupsr*   )
super__init__
ValueErrorapply_residualnn
SequentialConv2dBatchNorm2dReLUlayers)	selfr   r    r!   r"   r#   r$   mid_ch	__class__s	           V/var/www/html/ai-engine/env/lib/python3.11/site-packages/torchvision/models/mnasnet.pyr3   z_InvertedResidual.__init__#   s<    	K6KKLLLf$$UUUVVV))#vo=&A+mIeVQU333N6K888GD!!!Iffk;!;KTZcipuvvvN6K888GD!!!Iffae444N6K888
 
    inputc                 j    | j         r|                     |          |z   S |                     |          S )N)r5   r;   )r<   rA   s     r?   forwardz_InvertedResidual.forward;   s6     	&;;u%%--;;u%%%r@   )r   )	__name__
__module____qualname__intfloatr3   r   rC   __classcell__r>   s   @r?   r   r   "   s        ru
 

"%
47
AD
X[
jo
	
 
 
 
 
 
0&V & & & & & & & & &r@   r   r   r    r!   r"   
exp_factorrepeatsr$   r%   c                     |dk     rt          d|           t          | |||||          }g }t          d|          D ]*}	|                    t          |||d||                     +t	          j        |g|R  S )z&Creates a stack of inverted residuals.r   z$repeats should be >= 1, instead got )r$   )r4   r   rangeappendr6   r7   )
r   r    r!   r"   rK   rL   r$   first	remaining_s
             r?   _stackrS   B   s     {{IIIJJJeV[&*ZefffEI1g q q*66;:cnooopppp=+++++r@   ?valdivisorround_up_biasc                     d|cxk     rdk     sn t          d|           t          |t          | |dz  z             |z  |z            }||| z  k    r|n||z   S )zAsymmetric rounding to make `val` divisible by `divisor`. With default
    bias, will round up, unless the number is no more than 10% greater than the
    smaller divisible value, i.e. (83, 8) -> 80, but (84, 8) -> 88.              ?zIround_up_bias should be greater than 0.0 and smaller than 1.0 instead of r	   )r4   maxrG   )rU   rV   rW   new_vals       r?   _round_to_multiple_ofr]   P   s     $$$$$$$$terttuuu'3sWq[011W<wFGGG!44477'G:KKr@   alphac                 (     g d} fd|D             S )z^Scales tensor depths as in reference MobileNet code, prefers rounding up
    rather than down.)          (   P   `      i@  c                 6    g | ]}t          |z  d           S )   )r]   ).0depthr^   s     r?   
<listcomp>z_get_depths.<locals>.<listcomp>^   s(    HHH!%%-33HHHr@    )r^   depthss   ` r?   _get_depthsrn   Z   s*     0//FHHHHHHHHr@   c                        e Zd ZdZdZddedededd	f fd
ZdedefdZ	de
dede
dedee         dee         dee         dd	f fdZ xZS )r   a  MNASNet, as described in https://arxiv.org/abs/1807.11626. This
    implements the B1 variant of the model.
    >>> model = MNASNet(1.0, num_classes=1000)
    >>> x = torch.rand(1, 3, 224, 224)
    >>> y = model(x)
    >>> y.dim()
    2
    >>> y.nelement()
    1000
    r	     皙?r^   num_classesdropoutr%   Nc                 ^   t                                                       t          |            |dk    rt          d|           || _        || _        t          |          }t          j        d|d         dddd          t          j	        |d         t          	          t          j        d
          t          j        |d         |d         ddd|d         d          t          j	        |d         t          	          t          j        d
          t          j        |d         |d         dddd          t          j	        |d         t          	          t          |d         |d         ddddt                    t          |d         |d         ddddt                    t          |d         |d         ddddt                    t          |d         |d         ddddt                    t          |d         |d         ddddt                    t          |d         |d         ddddt                    t          j        |d         ddddd          t          j	        dt          	          t          j        d
          g}t          j        | | _        t          j        t          j        |d
          t          j        d|                    | _        |                                 D ]8}t'          |t          j                  rSt          j                            |j        dd           |j        $t          j                            |j                   pt'          |t          j	                  rIt          j                            |j                   t          j                            |j                   t'          |t          j                  rKt          j                            |j        dd           t          j                            |j                   :d S )NrY   z,alpha should be greater than 0.0 instead of r(   r   r   r	   Fr0   r"   r*   r+   Tr-   r/   r)            i   )pr.   fan_outrelu)modenonlinearitysigmoid)r2   r3   r   r4   r^   rr   rn   r6   r8   r9   _BN_MOMENTUMr:   rS   r7   r;   DropoutLinear
classifiermodules
isinstanceinitkaiming_normal_weightr*   zeros_ones_kaiming_uniform_)r<   r^   rr   rs   rm   r;   mr>   s          r?   r3   zMNASNet.__init__p   ss   D!!!C<<SESSTTT
&U## IaAqGGGN6!9|<<<GD!!!IfQiAq6RS9[`aaaN6!9|<<<GD!!!IfQiAqOOON6!9|<<<6!9fQiAq!\BB6!9fQiAq!\BB6!9fQiAq!\BB6!9fQiAq!\BB6!9fQiAq!\BB6!9fQiAq!\BBIfQiq!AEJJJN4,777GD!!!+
. mV,-
Wd(K(K(KRYW[]hMiMijj 
	' 
	'A!RY'' 	'''yv'VVV6%GNN16***Ar~.. 'ah'''qv&&&&Ary)) '((	PY(ZZZqv&&&
	' 
	'r@   xc                     |                      |          }|                    ddg          }|                     |          S )Nr	   r(   )r;   meanr   )r<   r   s     r?   rC   zMNASNet.forward   s6    KKNNFFAq6NNq!!!r@   
state_dictprefixlocal_metadatastrictmissing_keysunexpected_keys
error_msgsc                 B   |                     dd           }|dvrt          d|           |dk    rC| j        dk    s7t          | j                  }	t	          j        dddddd	
          t	          j        dt                    t	          j        d          t	          j        ddddddd	          t	          j        dt                    t	          j        d          t	          j        dddddd	
          t	          j        dt                    t          d|	d         ddddt                    g	}
t          |
          D ]\  }}|| j        |<   d| _        t          j        dt                     t!                                          |||||||           d S )Nversionr'   z+version shluld be set to 1 or 2 instead of r   rZ   r(   r`   r	   Fru   r+   Tr-   r/   ra   r   a  A new version of MNASNet model has been implemented. Your checkpoint was saved using the previous version. This checkpoint will load and work as before, but you may want to upgrade by training a newer model or transfer learning from an updated ImageNet checkpoint.)getr4   r^   rn   r6   r8   r9   r   r:   rS   	enumerater;   _versionwarningswarnUserWarningr2   _load_from_state_dict)r<   r   r   r   r   r   r   r   r   rm   v1_stemidxlayerr>   s                r?   r   zMNASNet._load_from_state_dict   s    !$$Y55&  T7TTUUUa<<
c 1 1
 !,,F	!RAaeDDDrL999%%%	"b!Qq%PPPrL999%%%	"b!QquEEErL999r6!9aAq,??
G (00 ) )
U#(C   DMMI
    	%%oWa	
 	
 	
 	
 	
r@   )rp   rq   )rD   rE   rF   __doc__r   rH   rG   r3   r   rC   r   strboolr   r   rI   rJ   s   @r?   r   r   a   s       	 	 H,' ,'e ,'# ,'u ,'W[ ,' ,' ,' ,' ,' ,'\" "F " " " "/
/
 /
 	/

 /
 3i/
 c/
 I/
 
/
 /
 /
 /
 /
 /
 /
 /
 /
 /
r@   r   )r   r   z(https://github.com/1e100/mnasnet_trainer)min_size
categoriesrecipec                   b    e Zd Z ed eed          i edddddid	d
dd          ZeZdS )r   zIhttps://download.pytorch.org/models/mnasnet0.5_top1_67.823-3ffadce67e.pth   	crop_sizei! ImageNet-1Kg"P@g(\U@zacc@1zacc@5g9v?g;O.!@9These weights reproduce closely the results of the paper.
num_params_metrics_ops
_file_size_docsurl
transformsmetaN	rD   rE   rF   r   r   r
   _COMMON_METAIMAGENET1K_V1DEFAULTrl   r@   r?   r   r      s        GW7.#>>>

!##    T
 
 
  M" GGGr@   r   c                   f    e Zd Z ed eedd          i eddddd	d
idddd          ZeZdS )r   z<https://download.pytorch.org/models/mnasnet0_75-7090bc5f.pthr      r   resize_size+https://github.com/pytorch/vision/pull/6019i_0 r   gQQ@g9vV@r   gQ?gB`"(@
                These weights were trained from scratch by using TorchVision's `new training recipe
                <https://pytorch.org/blog/how-to-train-state-of-the-art-models-using-torchvision-latest-primitives/>`_.
            r   r   r   r   r   r   r   Nr   rl   r@   r?   r   r      s        GJ7.#3OOO

C!##     
 
 
  M* GGGr@   r   c                   b    e Zd Z ed eed          i edddddid	d
dd          ZeZdS )r   zIhttps://download.pytorch.org/models/mnasnet1.0_top1_73.512-f206786ef8.pthr   r   iPB r   gw/]R@gq=
ףV@r   gjt?g
ףp=0@r   r   r   Nr   rl   r@   r?   r   r     s        GW7.#>>>

!##     T
 
 
  M" GGGr@   r   c                   f    e Zd Z ed eedd          i eddddd	d
idddd          ZeZdS )r   z;https://download.pytorch.org/models/mnasnet1_3-a4c69d6f.pthr   r   r   r   i_ r   gMb S@g rhaW@r   gE?g">8@r   r   r   Nr   rl   r@   r?   r   r      s        GI7.#3OOO

C!##     
 
 
  M* GGGr@   r   weightsprogresskwargsc                     |)t          |dt          |j        d                              t          | fi |}|r*|                    |                    |d                     |S )Nrr   r   T)r   
check_hash)r   lenr   r   load_state_dictget_state_dict)r^   r   r   r   models        r?   _mnasnetr   9  st    fmSl9S5T5TUUUE$$V$$E Zg44hSW4XXYYYLr@   
pretrained)r   T)r   r   c                 T    t                               |           } t          d| |fi |S )a  MNASNet with depth multiplier of 0.5 from
    `MnasNet: Platform-Aware Neural Architecture Search for Mobile
    <https://arxiv.org/abs/1807.11626>`_ paper.

    Args:
        weights (:class:`~torchvision.models.MNASNet0_5_Weights`, optional): The
            pretrained weights to use. See
            :class:`~torchvision.models.MNASNet0_5_Weights` below for
            more details, and possible values. By default, no pre-trained
            weights are used.
        progress (bool, optional): If True, displays a progress bar of the
            download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.mnasnet.MNASNet``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/mnasnet.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.MNASNet0_5_Weights
        :members:
    g      ?)r   verifyr   r   r   r   s      r?   r   r   E  1    . !''00GC(55f555r@   c                 T    t                               |           } t          d| |fi |S )a  MNASNet with depth multiplier of 0.75 from
    `MnasNet: Platform-Aware Neural Architecture Search for Mobile
    <https://arxiv.org/abs/1807.11626>`_ paper.

    Args:
        weights (:class:`~torchvision.models.MNASNet0_75_Weights`, optional): The
            pretrained weights to use. See
            :class:`~torchvision.models.MNASNet0_75_Weights` below for
            more details, and possible values. By default, no pre-trained
            weights are used.
        progress (bool, optional): If True, displays a progress bar of the
            download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.mnasnet.MNASNet``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/mnasnet.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.MNASNet0_75_Weights
        :members:
    g      ?)r   r   r   r   s      r?   r   r   a  s1    . "((11GD'866v666r@   c                 T    t                               |           } t          d| |fi |S )a  MNASNet with depth multiplier of 1.0 from
    `MnasNet: Platform-Aware Neural Architecture Search for Mobile
    <https://arxiv.org/abs/1807.11626>`_ paper.

    Args:
        weights (:class:`~torchvision.models.MNASNet1_0_Weights`, optional): The
            pretrained weights to use. See
            :class:`~torchvision.models.MNASNet1_0_Weights` below for
            more details, and possible values. By default, no pre-trained
            weights are used.
        progress (bool, optional): If True, displays a progress bar of the
            download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.mnasnet.MNASNet``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/mnasnet.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.MNASNet1_0_Weights
        :members:
    rZ   )r   r   r   r   s      r?   r   r   }  r   r@   c                 T    t                               |           } t          d| |fi |S )a  MNASNet with depth multiplier of 1.3 from
    `MnasNet: Platform-Aware Neural Architecture Search for Mobile
    <https://arxiv.org/abs/1807.11626>`_ paper.

    Args:
        weights (:class:`~torchvision.models.MNASNet1_3_Weights`, optional): The
            pretrained weights to use. See
            :class:`~torchvision.models.MNASNet1_3_Weights` below for
            more details, and possible values. By default, no pre-trained
            weights are used.
        progress (bool, optional): If True, displays a progress bar of the
            download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.mnasnet.MNASNet``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/mnasnet.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.MNASNet1_3_Weights
        :members:
    g?)r   r   r   r   s      r?   r   r     r   r@   )rT   )0r   	functoolsr   typingr   r   r   r   torchtorch.nnr6   r   transforms._presetsr
   utilsr   _apir   r   r   _metar   _utilsr   r   __all__r   Moduler   rG   rH   r7   rS   r]   rn   r   r   r   r   r   r   r   r   r   r   r   r   r   rl   r@   r?   <module>r      s!          , , , , , , , , , , , ,              5 5 5 5 5 5 ' ' ' ' ' ' 6 6 6 6 6 6 6 6 6 6 ' ' ' ' ' ' B B B B B B B B
 
 
 & & & & &	 & & &@,,,*-,7:,HK,VY,hm,], , , ,L Lu Ls L5 LSV L L L LIu Ic I I I Ir
 r
 r
 r
 r
eho r
 r
 r
l &8        *    +   2       *       2	E 	H[$9 	T 	UX 	]d 	 	 	 	 ,0B0P!QRRR:>QU 6 6 68$67 6$ 6ad 6ip 6 6 6 SR 64 ,0C0Q!RSSS<@SW 7 7 7H%89 7D 7cf 7kr 7 7 7 TS 74 ,0B0P!QRRR:>QU 6 6 68$67 6$ 6ad 6ip 6 6 6 SR 64 ,0B0P!QRRR:>QU 6 6 68$67 6$ 6ad 6ip 6 6 6 SR 6 6 6r@   