
    קgW                     V    d Z ddlZddlZddgZ G d d          Z G d d          ZdS )zAutograd anomaly mode.    Ndetect_anomalyset_detect_anomalyc                   4    e Zd ZdZd	d
dZd
dZdeddfdZdS )r   a  Context-manager that enable anomaly detection for the autograd engine.

    This does two things:

    - Running the forward pass with detection enabled will allow the backward
      pass to print the traceback of the forward operation that created the failing
      backward function.
    - If ``check_nan`` is ``True``, any backward computation that generate "nan"
      value will raise an error. Default ``True``.

    .. warning::
        This mode should be enabled only for debugging as the different tests
        will slow down your program execution.

    Example:
        >>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_ANOMALY)
        >>> import torch
        >>> from torch import autograd
        >>> class MyFunc(autograd.Function):
        ...     @staticmethod
        ...     def forward(ctx, inp):
        ...         return inp.clone()
        ...     @staticmethod
        ...     def backward(ctx, gO):
        ...         # Error during the backward pass
        ...         raise RuntimeError("Some error in backward")
        ...         return gO.clone()
        >>> def run_fn(a):
        ...     out = MyFunc.apply(a)
        ...     return out.sum()
        >>> inp = torch.rand(10, 10, requires_grad=True)
        >>> out = run_fn(inp)
        >>> out.backward()
            Traceback (most recent call last):
              File "<stdin>", line 1, in <module>
              File "/your/pytorch/install/torch/_tensor.py", line 93, in backward
                torch.autograd.backward(self, gradient, retain_graph, create_graph)
              File "/your/pytorch/install/torch/autograd/__init__.py", line 90, in backward
                allow_unreachable=True)  # allow_unreachable flag
              File "/your/pytorch/install/torch/autograd/function.py", line 76, in apply
                return self._forward_cls.backward(self, *args)
              File "<stdin>", line 8, in backward
            RuntimeError: Some error in backward
        >>> with autograd.detect_anomaly():
        ...     inp = torch.rand(10, 10, requires_grad=True)
        ...     out = run_fn(inp)
        ...     out.backward()
            Traceback of forward call that caused the error:
              File "tmp.py", line 53, in <module>
                out = run_fn(inp)
              File "tmp.py", line 44, in run_fn
                out = MyFunc.apply(a)
            Traceback (most recent call last):
              File "<stdin>", line 4, in <module>
              File "/your/pytorch/install/torch/_tensor.py", line 93, in backward
                torch.autograd.backward(self, gradient, retain_graph, create_graph)
              File "/your/pytorch/install/torch/autograd/__init__.py", line 90, in backward
                allow_unreachable=True)  # allow_unreachable flag
              File "/your/pytorch/install/torch/autograd/function.py", line 76, in apply
                return self._forward_cls.backward(self, *args)
              File "<stdin>", line 8, in backward
            RuntimeError: Some error in backward

    TreturnNc                     t          j                    | _        || _        t          j                    | _        t          j        dd           d S )NzqAnomaly Detection has been enabled. This mode will increase the runtime and should only be enabled for debugging.   )
stacklevel)torchis_anomaly_enabledprev	check_nanis_anomaly_check_nan_enabledprev_check_nanwarningswarn)selfr   s     W/var/www/html/ai-engine/env/lib/python3.11/site-packages/torch/autograd/anomaly_mode.py__init__zdetect_anomaly.__init__M   sW    ,..	"#@BB8 		
 	
 	
 	
 	
 	
    c                 :    t          j        d| j                   d S )NT)r
   set_anomaly_enabledr   r   s    r   	__enter__zdetect_anomaly.__enter__X   s    !$77777r   argsc                 D    t          j        | j        | j                   d S Nr
   r   r   r   r   r   s     r   __exit__zdetect_anomaly.__exit__[        !$)T-@AAAAAr   Tr   N)__name__
__module____qualname____doc__r   r   objectr    r   r   r   r      sv        ? ?B	
 	
 	
 	
 	
8 8 8 8Bf B B B B B B Br   c                   @    e Zd ZdZddededdfdZddZd	eddfd
ZdS )r   aT  Context-manager that sets the anomaly detection for the autograd engine on or off.

    ``set_detect_anomaly`` will enable or disable the autograd anomaly detection
    based on its argument :attr:`mode`.
    It can be used as a context-manager or as a function.

    See ``detect_anomaly`` above for details of the anomaly detection behaviour.

    Args:
        mode (bool): Flag whether to enable anomaly detection (``True``),
                     or disable (``False``).
        check_nan (bool): Flag whether to raise an error when the backward
                          generate "nan"

    Tmoder   r   Nc                     t          j                    | _        t          j                    | _        t          j        ||           d S r   )r
   r   r   r   r   r   )r   r*   r   s      r   r   zset_detect_anomaly.__init__p   s<    ,..	#@BB!$	22222r   c                     d S r   r(   r   s    r   r   zset_detect_anomaly.__enter__u   s    r   r   c                 D    t          j        | j        | j                   d S r   r   r   s     r   r   zset_detect_anomaly.__exit__x   r    r   r!   r"   )	r#   r$   r%   r&   boolr   r   r'   r   r(   r   r   r   r   _   s          3 3T 3d 3d 3 3 3 3
   Bf B B B B B B Br   )r&   r   r
   __all__r   r   r(   r   r   <module>r0      s        1
2QB QB QB QB QB QB QB QBhB B B B B B B B B Br   