
    קg                     z    d dl mZ d dlmZ d dlmZ d dlmZ dedefdZdefdZ	ded	ee         fd
Z
defdZdS )    )List_is_sym_size_node)QuantizationAnnotation)Nodenode
input_nodec                     | j                             dt                                }|j        i |_        ||j        |<   || j         d<   d S Nquantization_annotation)metagetr   input_qspec_map)r   r	   qspecr   s       a/var/www/html/ai-engine/env/lib/python3.11/site-packages/torch/ao/quantization/quantizer/utils.py_annotate_input_qspec_mapr   	   sY    "imm!#9#;#;  .624/:?+J7+BDI'(((    c                 v    | j                             dt                                }||_        || j         d<   d S r   )r   r   r   output_qspec)r   r   r   s      r   _annotate_output_qspecr      sB    "imm!#9#;#;  ,1(+BDI'(((r   partition_nodesc                 f    t          |           rdS t          fd| j        D                       S )a  
    This utility is used to handle cases when dynami_shape=True tracing leads
    to symint nodes in the pattern of linear module. In those cases, we need to
    distinguish between the nodes that are in input for just extracting value of
    some dimentions (and symint nodes) vs. the one that is activation.
    For example:
    graph(x, y, weight):
       size_0 = torch.ops.aten.sym_size([x], [0])
       size_1 = torch.ops.aten.sym_size([y], [1])
       view_size = size_0 * size_1
       size_3 = torch.ops.aten.sym_size([x], [2])
       vie_out = torch.ops.aten.view(x, [view_size, size_3])
       return mm(view_out, weight)
    In the example above y node is not actual input. It exist only to extract size_1
    Tc              3   >   K   | ]}|vpt          |          V  d S )Nr   ).0userr   s     r   	<genexpr>z/_node_only_used_for_sym_size.<locals>.<genexpr>.   sJ         o
%	A*;D*A*A     r   )r   allusers)r   r   s    `r   _node_only_used_for_sym_sizer      sT       t    J     r   module_namec                 0     dt           dt          f fd}|S )a  Get the module_name_filter function for a given module name, the filter accepts
    a node and checks if the node comes from a module that has certain module name

    For example:
        node: linear_op = call_function[...](...)  # comes from a module with name blocks.sub.linear1


    >> module_name_filter = _get_module_name_filter("blocks.sub")
    >> print(module_name_filter(node))
    True  # the node is from "blocks.sub" based on the fully qualified name "blocks.sub.linear1"
    nreturnc                     | j                             di           }d fd|                                D             }|v S )Nnn_module_stackc                 b    d}|                      d          rt          d          }| |d          S )Nr   z
L['self'].)
startswithlen)r"   prefixs     r   _normalize_pathzL_get_module_name_filter.<locals>.module_name_filter.<locals>._normalize_pathI   s6    F||L)) +\**VWW:r   c                 ,    g | ]\  }} |          S  r,   )r   r"   _r*   s      r   
<listcomp>zG_get_module_name_filter.<locals>.module_name_filter.<locals>.<listcomp>P   s'    III1##IIIr   )r   r   values)r"   r%   namesr*   r    s      @r   module_name_filterz3_get_module_name_filter.<locals>.module_name_filterA   s^     &**%6;;	 	 	 JIII0F0F0H0HIIIe##r   )r   bool)r    r1   s   ` r   _get_module_name_filterr3   4   s7    $d $t $ $ $ $ $ $$ r   N)typingr    torch.ao.quantization.pt2e.utilsr   )torch.ao.quantization.quantizer.quantizerr   torch.fxr   r   r   r   strr3   r,   r   r   <module>r9      s          > > > > > > L L L L L L      CD Cd C C C CC C C C Ct d4j    2      r   