o
    h%                     @  s`  d Z ddlmZ ddlZddlZddlmZmZmZ ddl	m
Z
 ddlmZmZ g dZejejdd	Zed
edd9ddZedd:d9ddZedd:d9ddZedededdd9ddZededdddddddd	d9ddZedd9d d!Zed"edddddd#dd#		$	%		%d;d<d2d3Zd=d5d6Zd>d7d8ZdS )?a&  This file exports ONNX ops for opset 14.

Note [ONNX operators that are added/updated in opset 14]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
New operators:
    HardSwish, Trilu

Updated operators:
    Reshape
    Add, Sub, Mul, Div
    GRU, LSTM, RNN
    BatchNorm, Cumsum, Relu
    )annotationsN)
_constants_type_utilssymbolic_helper)GLOBALS)	jit_utilsregistration)	hardswishtriltriureshape
batch_normquantized_hardswishscaled_dot_product_attention   )Zopsetzaten::hardswishvgjit_utils.GraphContextc                 C  s   |  d|S )NZ	HardSwishop)r   self r   a/home/www/facesmatcher.com/frenv_anti/lib/python3.10/site-packages/torch/onnx/symbolic_opset14.pyr	   *   s   r	   z
aten::trilc                 C     | j d||ddS )NTrilur   Zupper_ir   r   r   Zdiagonaloutr   r   r   r
   0      r
   z
aten::triuc                 C  r   )Nr      r   r   r   r   r   r   r   5   r   r   zaten::reshapeTc                 C  s   t j| ||ddS )Nr   )Z	allowzero)r   Z_reshape_helper)r   r   shaper   r   r   r   :   s   r   zaten::batch_normifc
                 C  s   t  rt|||||gstjdk rtdddd|S t|d t| |||||\}}}}| j	d||||||d| |s@dnd|sEdndd	
}
|sM|
S |
\}}}|
|  |
|  |S )
N   ZBatchNormalizationr   zaAll input tensors must have the same `dtype`. Turn off Autocast or export using opset version 15.r   r   r      )Z	epsilon_fZ
momentum_fZtraining_mode_iZoutputs)torchZis_autocast_enabledr   Zargs_have_same_dtyper   Zexport_onnx_opset_versionZ _onnx_opset_unsupported_detailedZcheck_training_modeZ_batchnorm_helperr   ZsetTypetype)r   inputweightZbiasZrunning_meanZrunning_varZtrainingZmomentumepsZcudnn_enabledr   resZnew_running_meanZnew_running_varr   r   r   r   C   sJ   	


r   zquantized::hardswishc                 C  s.   t | |\}}}}t| |}t | |||S N)r   Zdequantize_helperr	   Zquantize_helper)r   xZop_scaleZop_zero_point_outputr   r   r   r   z   s   
r   z"aten::scaled_dot_product_attentionb        Fquerytorch._C.Valuekeyvalue	attn_masktorch._C.Value | None	dropout_pfloat	is_causalboolscale
enable_gqac	              
   C  s  |r|r	t |sJ d|rJ dt |rt| |}|r%t| ||}t |}	tt|	}
|
d |
d |
d< |
d< | jd||
d}| d|| d|}| d|| d|}| d	||}t |rj|}nWtj	
|tj	jkr| jd
tdgd}| jd
ttd gd}| d|||}| d||}n$tj	
|tj	jtj	jtj	jfv r| d||}ntdtj	
| | jd|dd}|dkr| d|| jd
tj|tjdd}| d	||S )Nz6is_causal and attn_mask cannot be set at the same timezPconversion of scaled_dot_product_attention not implemented if enable_gqa is TrueZ	Transpose)Zperm_iZMulSqrtZMatMulConstantr0   Zvalue_tinfWhereAddz Unsupported type for attn_mask: ZSoftmaxZaxis_ir   ZDropoutZdtype)r   Z_is_none_attention_scale_causal_attention_maskZ_get_tensor_ranklistranger   r   JitScalarType
from_valueZBOOLr%   tensorr8   FLOATZHALFZBFLOAT16
ValueError)r   r1   r3   r4   r5   r7   r9   r;   r<   Zkey_shape_builtinZkey_transposed_axesZkey_transposedZquery_scaledZkey_transposed_scaledZmul_qkZ
mul_qk_add
const_zeroconst_neg_infZattn_weightr   r   r   r      s\   





r   returnc                 C  s   |  d|}|  d|| j dtjdgtjdd| j dtjtjgtjdd}| j d|tj|	 d}| j dtjd	gtj
dd}|  d
||  d|}| j d|tj|	 d}|S )zCalculate the scale factor for the attention result.

    Args:
        query: Tensor of shape [..., L, E]

    Returns:
        Scalar scale factor := 1 / math.sqrt(query.size(-1))
    ShapeSlicer@   r>   rF   rA   ZCast)Zto_i      ?Divr?   )r   r%   rM   int64r   Z	INT64_MAXr   rK   rL   Z	onnx_typer8   )r   r1   query_shapeZquery_shape_lastZembedding_size	const_oner;   r   r   r   rG      s,   rG   c                 C  s  |  d|}|  d|}| j dtjdgtjdd}| j dtjdgtjdd}|  d|||}|  d|||}| j d||d	d
}	| j dtdgd}
|  d|
|	}| j d|d	d}| j dtdgd}| j dttd gd}|  d|  d||||}|S )a  Create a causal mask for the given query and key tensors.

    Equivalent to::
        mask = torch.ones(L, S, dtype=torch.bool).tril(diagonal=0)
        attn_mask = torch.zeros(L, S, dtype=torch.float)
        attn_mask = attn_mask.masked_fill(not mask, -float("inf"))

    Args:
        query: Tensor of shape [..., L, E]
        key: Tensor of shape [..., S, E]

    Returns:
        Tensor of shape [L, S]
    rS   r@   r>   rF   rA   r=   rT   ZConcatr   rE   rU   ZExpandr   r   r0   rB   rC   ZEqual)r   r%   rM   rW   r8   )r   r1   r3   rX   Z	key_shapeZlast_idxZsecond_last_idxZtarget_lengthZsource_lengthsizerY   r5   rP   rQ   r   r   r   rH      s    rH   )r   r   r+   )Nr0   FNF)r   r   r1   r2   r3   r2   r4   r2   r5   r6   r7   r8   r9   r:   r;   r6   r<   r:   )r   r   r1   r2   rR   r2   )r   r   r1   r2   r3   r2   rR   r2   )__doc__
__future__r   	functoolsr%   Z
torch.onnxr   r   r   Ztorch.onnx._globalsr   Ztorch.onnx._internalr   r   __all__partialZonnx_symbolicZ_onnx_symbolic
parse_argsr	   r
   r   Zquantized_argsr   r   r   r   rG   rH   r   r   r   r   <module>   sF   

5
K$