
    Αi              	         S SK Jr  S SKrS SKrS SKJr  SSKJr  SSKJ	r	  SSK
JrJrJrJr  \(       a  S SKJr  \R"                  R$                  R&                  \R"                  R$                  R(                  \R"                  R$                  R*                  \R"                  R$                  R,                  \R"                  R$                  R.                  \R"                  R$                  R0                  /r\R"                  R$                  R4                  \R"                  R$                  R6                  /r/ S	Qr/ S
Qr0 SS_SS_SS_SS_SS_SS_SS_SS_SS_SS_SS_SS _S!S"_S#S$_S%S&_S'S(_S)S*_S+S,S-S.S/S0S1S2.ErS3q S4 r!S5 r"g)6    )annotationsN)TYPE_CHECKING   )core)in_to_static_mode)OpProtoHolderVariabledefault_main_programstatic_only)Tensor)__eq____ne____lt____le____gt____ge__)__add____radd____sub____rsub____mul____rmul____mod____rmod____div____rdiv____truediv____rtruediv____floordiv____rfloordiv____pow____rpow__r   r   r   r   r   r   r   zA + Br   zA += Br   zA - Br   zA -= Br   zA * Br   zA *= Br   zA / Br   r   zA /= Br   r!   zA ** Br"   zA **= Br   zA //Br    zA //=Br   zA % Br   zA %= B
__matmul__zA @ BzA @= BzA == BzA != BzA < BzA <= BzA > BzA >= B)__rmatmul__r   r   r   r   r   r   Fc                b   ^ ^^^^ SSK Jn  SSKJm  U4S jnS mU" TT:H  UU UUU4S j5      nU$ )Nr   )cond)create_arrayc                    > T" S5      n U $ )Nfloat32 )
null_arrayr'   s    `/var/www/html/banglarbhumi/venv/lib/python3.13/site-packages/paddle/base/layers/math_op_patch.pytrue_fn$_slice_tensor_array.<locals>.true_fnn   s    !),
    c                    XU nU$ Nr*   )arraystartend	new_arrays       r,   false_fn%_slice_tensor_array.<locals>.false_fnr   s    $	r/   c                    > T" T TT5      $ r1   r*   )r2   r4   r6   r3   s   r,   <lambda>%_slice_tensor_array.<locals>.<lambda>v   s    HUE34Or/   )paddle.static.nnr&   paddle.tensorr'   )r2   r3   r4   r&   r-   r5   r'   r6   s   ```   @@r,   _slice_tensor_arrayr=   j   s.    %* Uc\7,OPIr/   c                   ^$^%^&^'^(^)^*^+^,^- S m-S m,S m*U-4S jm&U-4S jn U&4S jm(U(4S jm'U&U*4S jm)[         U*U-4S	 j5       n[         SjU*U-4S jj5       n[         S 5       n[         S 5       n[         S 5       nU&U*4S jm%S n[         S 5       n[         S 5       n[         S 5       n	U&U*4S jm$U$4S jn
U+4S jn[        S 5       nS nS n[        SkS j5       nUR                  SlS j5       nSmSnS jjnU$4S jnU$4S jnU$4S  jnU$4S! jnU$4S" jn SoU%U&U'U(U)U*U,4S$ jjnS% nS& nS' nU&U*4S( jnU&U*4S) jnU&U*4S* jn/ S+U
4PS,U4PS-T%4PS.U4PS/U4PS0U4PS1U4PS2U4PS3U4PS4U4PS5U4PS6U	4PS7U4PS8U4PS9U4PSU4PS:U4PS;U" S;S<S#U5      4PS=U" S=S<S#U5      4PS>U" S>S?S#U5      4PS@U" S@S?S
U5      4PSAU" SASBS#U5      4PSCU" SCSBS#U5      4PSDU" SDSES#U5      4PSFU" SFSES#U5      4PSGU" SGSES
S 5      4PSHU" SHSES
S 5      4PSIU" SISJS#S 5      4PSKU" SKSJS
S 5      4PSLU" SLSMS#S 5      4PSNU" SNSMS
S 5      4PSOU" SOSPS#S 5      4PSQU" SQSPS
S 5      4PSRU" SRSSS#S 5      4PSTU" SUSSS
S 5      4PSVU" SVSWS#S 5      4PSXU" SXSYS#S 5      4PSZU" SZS[S#S 5      4PS\U" S\S]S#S 5      4PS^U" S^S_S#S 5      4PS`U" S`SaS#S 5      4PSbU4PScU4PSdU4PSeU4PSfU4PSgU4Pn[        (       d'  U H  nUSh   nUSi   n [	        [
        UU 5        M      S
qg ShS Km+T+R                  R                   HK  n[        [
        U5      (       a  M  [        T+R                  US 5      n U (       d  M:  [	        [
        UU 5        MM     T+R                  R                   H7  u  n!n"[        T+R                  U"S 5      n#U#(       d  M&  [	        [
        U!U#5        M9     S
qg )pNc                 H    [        5       R                  R                  S5      $ )Ntmp)r
   _name_generatorgenerater*   r/   r,   unique_tmp_name.monkey_patch_variable.<locals>.unique_tmp_name{   s    #%55>>uEEr/   c                Z     U R                   nU$ !   [        SU R                   35      e= f)NzCannot get data type from )dtype
ValueErrorname)varrF   s     r,   safe_get_dtype-monkey_patch_variable.<locals>.safe_get_dtype~   s7    	FIIE 	F9#((DEEs    *c                J    U R                   R                  R                  5       $ r1   )blockprogramcurrent_block)rI   s    r,   rO   ,monkey_patch_variable.<locals>.current_block   s    yy  ..00r/   c                0   > T" 5       nU R                  X!S9$ )N)rH   rF   
create_var)rM   rF   tmp_namerC   s      r,   create_new_tmp_var1monkey_patch_variable.<locals>.create_new_tmp_var   s    "$X;;r/   c                2   > T" 5       nU R                  X1US9$ )N)rH   rF   typerR   )rM   rF   rX   rT   rC   s       r,   create_new_tmp_sparse_var8monkey_patch_variable.<locals>.create_new_tmp_sparse_var   s!    "$XFFr/   c           	        > [        U5      nT" X5      nU R                  SSU/0UR                  UUSS.SS9  SUl        U$ )Nfill_constantOutF)rF   shapevalue	force_cpuT)rX   outputsattrsstop_gradient)float	append_oprF   rc   )rM   r_   rF   r^   rI   rU   s        r,   create_tensor,monkey_patch_variable.<locals>.create_tensor   s\    e . SEN"	  	 
	
 !
r/   c                   > T" XU/ S9$ )N)r^   r*   )rM   r_   rF   rf   s      r,   create_scalar,monkey_patch_variable.<locals>.create_scalar   s    U5;;r/   c           
       > [        U [        5      (       d   e[        U5      nT
" U 5      nT	" X25      nSn/ n[        U R                  5       HJ  u  pxUS:  a.  US:  a  UnUR                  U5        M&  UR                  S5        M9  UR                  U5        ML     US:w  d   eUR                  SSU/0SU /0UUUUS.SS	9  SUl        U$ )
Nr      fill_constant_batch_size_liker]   Input)r^   r_   input_dim_idxoutput_dim_idxT)rX   ra   inputsrb   rc   )
isinstancer	   rd   	enumerater^   appendre   rc   )ref_varr_   rF   rM   rI   	batch_dim	out_shapeidrU   rO   s            r,   create_tensor_with_batchsize;monkey_patch_variable.<locals>.create_tensor_with_batchsize   s    '8,,,,eg& .		gmm,DA1uq= !I$$Q'$$Q'  # - B0SENgY'"!*"+	  	 	
 !
r/   c           	        > T" U 5      nT" 5       nUR                  UU R                  U R                  U R                  SSS9nSS0nUR	                  SSU /0SU/0US	9  U$ )
a  
In dy2static, Variable also needs cpu() and cuda() interface.
But, the underneath operator has only forward op but not backward one.

Returns:
    The tensor which has copied to cpu place.

Examples:
    In Static Graph Mode:

    .. code-block:: python

        >>> import paddle
        >>> paddle.enable_static()

        >>> x = paddle.static.data(name="x", shape=[2,2], dtype='float32')
        >>> y = x.cpu()
FTrH   rF   r^   rX   persistablerc   dst_place_typer   memcpyXr]   rX   rr   ra   rb   )rS   rF   r^   rX   re   )selfrM   rT   outputrb   rO   rC   s        r,   cpu"monkey_patch_variable.<locals>.cpu   s    ( d#"$!!**** " 
 "1%$=VH%	 	 	
 r/   Tc           	     *  > Ub  [         R                  " S5        USLa  [         R                  " S5        T" U 5      nT" 5       nUR                  UU R                  U R                  U R
                  SSS9nSS0nUR                  SS	U /0S
U/0US9  U$ )a/  
In dy2static, Variable also needs cpu() and cuda() interface.
But, the underneath operator has only forward op but not backward one.

Args:
    self(Variable): The variable itself.
    device_id(int, optional): The destination GPU device id. Default: None, means current device.
        We add this argument for dy2static translation, please do not use it.
    blocking(bool, optional): Whether blocking or not, Default: True.
        We add this argument for dy2static translation, please do not use it.

Returns:
    The tensor which has copied to cuda place.

Examples:
    In Static Graph Mode:

    .. code-block:: python

        >>> import paddle
        >>> paddle.enable_static()

        >>> x = paddle.static.data(name="x", shape=[2,2], dtype='float32')
        >>> y = x.cpu()
        >>> z = y.cuda()
z3device_id is not supported, and it will be ignored.Tz2blocking is not supported, and it will be ignored.Fr~   r   rm   r   r   r]   r   )warningswarnrS   rF   r^   rX   re   )	r   	device_idblockingrM   rT   r   rb   rO   rC   s	          r,   cuda#monkey_patch_variable.<locals>.cuda   s    8  MMOP4MMNOd#"$!!**** " 
 "1%$=VH%	 	 	
 r/   c                0    [         R                  " S5        g)z
Variable don't have 'place' interface in static graph mode
But this interface can greatly facilitate dy2static.
So we give a warning here and return None.
zgVariable do not have 'place' interface for static graph mode, try not to use it. None will be returned.Nr   r   r   s    r,   place$monkey_patch_variable.<locals>.place"  s     	u	
r/   c                2    [         R                  " S5        U $ )z
Variable don't have 'contiguous' interface in static graph mode
But this interface can greatly facilitate dy2static.
So we give a warning here and return None.
zlVariable do not have 'contiguous' interface for static graph mode, try not to use it. self will be returned.r   r   s    r,   
contiguous)monkey_patch_variable.<locals>.contiguous-  s     	z	
 r/   c                0    [         R                  " S5        g)z
Variable don't have 'is_contiguous' interface in static graph mode
But this interface can greatly facilitate dy2static.
So we give a warning here and return None.
zoVariable do not have 'is_contiguous' interface for static graph mode, try not to use it. True will be returned.Tr   r   s    r,   is_contiguous,monkey_patch_variable.<locals>.is_contiguous9  s     	}	
 r/   c                   > U R                   U:X  a  U $ T" U 5      nT" X!5      nUR                  SSU /0SU/0U R                   UR                   S.S9  U R                  Ul        U$ )a  
**Notes**:
    **The variable must be a** :ref:`api_paddle_Tensor`

Cast a variable to a specified data type if it differs from the current dtype;
otherwise, return the original variable.

Args:

    self(Variable): The source variable

    dtype: The target data type

Returns:
    Variable: Variable with new dtype

Examples:
    In Static Graph Mode:

    .. code-block:: python

        >>> import paddle
        >>> import paddle.base as base
        >>> paddle.enable_static()
        >>> startup_prog = paddle.static.Program()
        >>> main_prog = paddle.static.Program()
        >>> with base.program_guard(startup_prog, main_prog):
        ...     original_variable = paddle.static.data(name = "new_variable", shape=[2,2], dtype='float32')
        ...     new_variable = original_variable.astype('int64')
        ...     print("new var's dtype is: {}".format(new_variable.dtype))
        ...
        new var's dtype is: paddle.int64

    In Dygraph Mode:

    .. code-block:: python

        >>> import paddle.base as base
        >>> import paddle
        >>> import numpy as np

        >>> x = np.ones([2, 2], np.float32)
        >>> with base.dygraph.guard():
        ...     original_variable = paddle.to_tensor(x)
        ...     print("original var's dtype is: {}, numpy dtype is {}".format(original_variable.dtype, original_variable.numpy().dtype))
        ...     new_variable = original_variable.astype('int64')
        ...     print("new var's dtype is: {}, numpy dtype is {}".format(new_variable.dtype, new_variable.numpy().dtype))
        ...
        original var's dtype is: paddle.float32, numpy dtype is float32
        new var's dtype is: paddle.int64, numpy dtype is int64
castr   r]   )in_dtype	out_dtyper   )rF   re   rc   )r   rF   rM   outrU   rO   s       r,   astype%monkey_patch_variable.<locals>.astypeE  sv    h ::Kd# .$=SEN#zz		B	 	 	
 !..
r/   c                8    U R                  UR                  5      $ r1   )r   rF   )r   others     r,   type_as&monkey_patch_variable.<locals>.type_as  s    {{5;;''r/   c                X   [        U[        5      (       d6  [        5       (       a   SSKJn  U" U5      nO[        S[        U5       35      eU R                  [        R                  R                  R                  :w  a  [        SU R                   35      eSSKJnJn  U" X" U 5      U S9  g)z8

Note:
   The type variable must be LoD Tensor Array.

r   )	to_tensorz4Required input var should be Variable, but received zZOnly Variable with VarType.DENSE_TENSOR_ARRAY support `append` method, but received type: )array_lengtharray_write)xry   r2   N)rs   r	   r   paddle.tensor.creationr   	TypeErrorrX   r   VarDescVarTypeDENSE_TENSOR_ARRAYpaddle.tensor.arrayr   r   )r   rI   r   r   r   s        r,   ru   %monkey_patch_variable.<locals>.append  s     #x(( ""\<nJ4PS9+V  99,,???lmqmvmvlwx  	Bc\$/t<r/   c                h    [        U R                  5      S:  a  [        SU R                   35      eU $ )z
In order to be compatible with the item interface introduced by the dynamic graph, it does nothing but returns self.
It will check that the shape must be a 1-D tensor
rm   z8Required input var should be 1-D Variable, but received )lenr^   r   r   s    r,   _item$monkey_patch_variable.<locals>._item  s6     tzz?QJ4::,W  r/   c                N  ^ ^^ SSK mSSKJn  SSKJn  T R
                  [        R                  R                  R                  :w  a  [        ST R
                   35      e[        U5      S:X  a  SnOUS   n[        U[        5      (       d   eUU4S jnUU 4S jnTR                  R                  T 5      mUS:  a  UT-   nO	U" S	/S
US9nTR                  R!                  T U5      nTR#                  T 5      n[%        USU5      n	US	-   n
U" XVX/5      u  pTR#                  U	T S9  U$ )a_  
The type variable must be LoD Tensor Array.
When self is DenseTensorArray, calling pop is similar to Python's pop on list.
This interface is used to simplify dygraph to static graph operations.

Args:
    self(Variable): The source variable, which must be DENSE_TENSOR_ARRAY
    *args: optional, a int means index.
Returns:
    Variable: self[index]
r   N)
while_loop)r\   zWOnly Variable with VarType.DENSE_TENSOR_ARRAY support `pop` method, but received type: rl   c                (   > TR                  U T5      $ r1   )	less_than)ry   r5   arr_lenpaddles     r,   r&   0monkey_patch_variable.<locals>.pop.<locals>.cond  s    ##Aw//r/   c                   > TR                   R                  TU S9nTR                   R                  UTR                   R                  U5      U5        TR	                  U 5      n X4$ )N)r2   ry   )tensor
array_readr   r   	increment)ry   r5   itemr   r   s      r,   body0monkey_patch_variable.<locals>.pop.<locals>.body  s^    ==++$!+<DMM%%fmm00;Y   #A<r/   rm   int64)r^   rF   r_   )r   )r   r;   r   r<   r\   rX   r   r   r   r   r   r   rs   intr   r   r   assignr=   )r   argsr   r\   idxr&   r   pop_itemr@   r5   ry   _r   r   s   `           @@r,   pop"monkey_patch_variable.<locals>.pop  s    	//99,,???ijnjsjsitu  t9>Cq'C#s####	0	  --,,T27-CqcDC==++D#6mmD!'Q4	!G!$qn=i-r/   c                p   > T" U 5      nT" X0R                   5      nUR                  SSU /0SU/0XS.S9  U$ )Nscaler   r]   )r   biasr   rF   re   )rI   r   r   rM   r   rU   rO   s        r,   _scalar_op_*monkey_patch_variable.<locals>._scalar_op_  sN    c" 		2#<SEN!0	 	 	
 
r/   c                   > T" U SS5      $ )N              r*   )rI   r   s    r,   _neg_$monkey_patch_variable.<locals>._neg_  s    3c**r/   c                &   > TR                  U 5      $ r1   )abs)rI   r   s    r,   _abs_$monkey_patch_variable.<locals>._abs_  s    zz#r/   c                ,    [        U R                  5      $ )ah  
Returns the dimension of current Variable

Returns:
    the dimension

Examples:
    .. code-block:: python

        >>> import paddle

        >>> paddle.enable_static()

        >>> # create a static Variable
        >>> x = paddle.static.data(name='x', shape=[3, 2, 1])
        >>> # print the dimension of the Variable
        >>> print(x.ndim)
        3
r   r^   r   s    r,   _ndim$monkey_patch_variable.<locals>._ndim  s    * 4::r/   c                ,    [        U R                  5      $ )ap  
Returns the dimension of current Variable

Returns:
    the dimension

Examples:
    .. code-block:: python

        >>> import paddle

        >>> paddle.enable_static()

        >>> # create a static Variable
        >>> x = paddle.static.data(name='x', shape=[3, 2, 1])
        >>> # print the dimension of the Variable
        >>> print(x.ndimension())
        3
r   r   s    r,   
ndimension)monkey_patch_variable.<locals>.ndimension      ( 4::r/   c                ,    [        U R                  5      $ )ai  
Returns the dimension of current Variable

Returns:
    the dimension

Examples:
    .. code-block:: python

        >>> import paddle

        >>> paddle.enable_static()

        >>> # create a static Variable
        >>> x = paddle.static.data(name='x', shape=[3, 2, 1])
        >>> # print the dimension of the Variable
        >>> print(x.dim())
        3
r   r   s    r,   dim"monkey_patch_variable.<locals>.dim'  r   r/   c                $    U R                   (       + $ )a  
Whether this Tensor requires gradient computation.

This is a convenience property that returns the opposite of stop_gradient.
Setting requires_grad=True is equivalent to setting stop_gradient=False.

Examples:
    .. code-block:: python

        >>> import paddle
        >>> x = paddle.randn([2, 3])
        >>> print(x.requires_grad)  # False by default
        >>>
        >>> x.requires_grad = False
        >>> print(x.stop_gradient)  # True
)rc   r   s    r,   requires_grad,monkey_patch_variable.<locals>.requires_grad=  s    $ %%%%r/   c                t    [        U[        5      (       d  [        S[        U5       35      eU(       + U l        g)z
Set whether this Tensor requires gradient computation.

Args:
    value (bool): True to enable gradient computation, False to disable.
$requires_grad must be bool, but got Nrs   boolr   rX   rc   )r   r_   s     r,   r   r   Q  s8     %&&6tE{mD  "'Yr/   r   c                v    [        U[        5      (       d  [        S[        U5       35      eU(       + U l        U $ )z
Set whether this Tensor requires gradient computation.

Args:
    requires_grad (bool): True to enable gradient computation, False to disable.
r   r   )r   r   s     r,   requires_grad_-monkey_patch_variable.<locals>.requires_grad__  s@     -..6tM7J6KL  "/.r/   c                   > T" U SU5      $ N      ?r*   rI   r_   r   s     r,   _scalar_add_+monkey_patch_variable.<locals>._scalar_add_m  s    3U++r/   c                   > T" U SU* 5      $ r   r*   r   s     r,   _scalar_sub_+monkey_patch_variable.<locals>._scalar_sub_p  s    3eV,,r/   c                   > T" U SU5      $ )Nr   r*   r   s     r,   _scalar_rsub_,monkey_patch_variable.<locals>._scalar_rsub_s  s    3e,,r/   c                   > T" XS5      $ )Nr   r*   r   s     r,   _scalar_mul_+monkey_patch_variable.<locals>._scalar_mul_v  s    3s++r/   c                   > T" U SU-  S5      $ )Nr   r   r*   r   s     r,   _scalar_div_+monkey_patch_variable.<locals>._scalar_div_y  s    3eS11r/   Fc                   >^ ^^^ UUUU	U
UU UUUU4S jn[         R                  " 5       R                  T5      R                  nSU S3Ul        T Ul        U$ )Nc                  > [        U[        5      (       a)  U R                  [        ;   a	  T" U S5      n Tb  T" X5      $ O[        U[        5      (       au  [        U5      nTS:X  a  U R                  [        ;   a	  T" U S5      n U R                  [
        R                  R                  R                  :X  a	  T" U S5      n Tb  T" X5      $ OA[        U[        5      (       a+  U R                  [        ;  a  T" U S5      n T" T" U 5      SS9nO T" U 5      n[        U[        5      (       dR  T(       a>  U R                   H  nUS:  d  M  T" XU5      n  O)   T" T" U 5      UUU R                  S9nOT" T" U 5      XS9nT" U5      nX$:w  GaN  T[        ;   Ga.  [        UR                  5      S:X  d  [        U R                  5      S:X  a  [
        R                  " X$5      (       a2  [        UR                  5      S:X  aK  [        U R                  5      S:X  a2  [
        R                   " TX$5      nX%:w  a  T" X5      n XE:w  a  T" X5      nO[        U R                  5      S:X  a	  T" X5      n OpT" X5      nOg[
        R"                  " TX$5      (       a5  [$        R&                  " S	T S
U SU S35        [$        R(                  " SSS9  O[+        ST SU SU S35      eT(       a  U nUn UnTS:X  d  TS:X  a@  U R                  [        ;   a,  U R                  UR                  :X  a  T" U S5      n T" US5      nT[,        ;   a  T" T" U 5      SS9nOT" T" U 5      T" U 5      S9nSnUR.                  S:  an  UR                  S   S:X  a[  [0        R2                  " 5       S   n	U	S   n
U	S   n[$        R&                  " U
 SU S[4        T    ST ST S[4        T    S3[6        S9  T" U 5      R9                  TU /U/S.S U0S!U0S"9  U$ )#Nr)   elementwise_divr   	complex64)rF   r   )rF   r^   )r_   rF   zThe input dtypes of OP z are z and z", the output will be auto-promotedignorezThe input dtypes of OP)messagezgot different data type in z	 between .divider   rl   rm   r   :z
The behavior of expression z has been unified with zw(X, Y, axis=-1) from Paddle 2.0. If your code works well in the older versions but crashes in this version, try to use z(X, Y, axis=0) instead of z:. This transitional warning will be dropped in the future.)category)r   Yr]   axisr   )rs   rd   rF   _supported_int_dtype_r   r   r   r   BOOLcomplex_supported_complex_dtype_r	   r^   SUPPORT_PROMOTION_OPSr   is_common_dtype_for_scalarget_promote_dtype_old_irneed_type_promotion_old_irr   r   filterwarningsr   compare_opsndiminspectstackEXPRESSION_MAPDeprecationWarningre   )r   	other_var	lhs_dtypeelem	rhs_dtypepromote_typer@   r   r  r  	file_nameline_numr   rU   ri   rf   r{   rO   method_nameop_typereverserJ   scalar_methods               r,   __impl__Amonkey_patch_variable.<locals>._binary_creator_.<locals>.__impl__  sH    )U++::!66!$	2D !,(99 -Is++ "),	 00

&;;!$	2D::!5!5!:!::!$0D !,(99 -Iw//::%>>!$4D 2%d+;!I
  't,Ii22 $

!8(D $)I " !+ %2)$/%"+"&**	%	 !.%d+9!I
 'y1I%"779??+q0C

Oq4H#>>%     	0A5#djj/Q:N+/+H+H ',L  )8'-d'A(8,29,K	  #4::!3'-d'>,29,H	88  !5gYeI;eT]S^  _A  B !//$.F $5gYi	{RWXaWbbcd   	 H$3D(DJJ"77JJ)//1dI."9i8	k)(t)<FK(!$'~d/C D~~!iooa&8B&>*!!H	 8 k8*,I.YdJeIff}  F  ~G Gmi9.:U9V  WQR 0	 $))"V9+6tn	 *  Jr/   z	
        z
        Args:
            self(Variable): left hand variable
            other_var(Variable|float|int): right hand variable

        Returns:
            Variable
        )r   instanceget_op_protocomment__doc____name__)r$  r%  r&  r'  r(  r,  r   rU   ri   rf   r{   rO   rJ   s   ````  r,   _binary_creator_/monkey_patch_variable.<locals>._binary_creator_|  sY    Q	 Q	f  ((*77@HH 			 	 (r/   c                    [        S5      e)NaT  int(Variable) is not supported in static graph mode. If you are using @to_static, you can try this:
1. If you want to get the value of Variable, you can switch to non-fullgraph mode by setting @to_static(full_graph=True).
2. If you want to run it in full graph mode, you need use Variable.astype(paddle.int32), and do not use int(Variable).r   r   s    r,   _int_$monkey_patch_variable.<locals>._int_   s    E
 	
r/   c                    [        S5      e)NaL  float(Variable) is not supported in static graph mode. If you are using @to_static, you can try this:
1. If you want to get the value of Variable, you can switch to non-fullgraph mode by setting @to_static(full_graph=True).
2. If you want to run it in full graph mode, you need use Variable directly, and do not use float(Variable).r2  r   s    r,   _float_&monkey_patch_variable.<locals>._float_'  s    {
 	
r/   c                    [        S5      e)NaP  complex(Variable) is not supported in static graph mode. If you are using @to_static, you can try this:
1. If you want to get the value of Variable, you can switch to non-fullgraph mode by setting @to_static(full_graph=True).
2. If you want to run it in full graph mode, you need use Variable directly, and do not use complex(Variable).r2  r   s    r,   	_complex_(monkey_patch_variable.<locals>._complex_.  s    }
 	
r/   c                l   > T" U 5      nT" XR                   5      nUR                  SSU /0SU/00 S9  U$ )Nsparse_valuesr   r   r   r   rI   rM   r   rU   rO   s      r,   values%monkey_patch_variable.<locals>.values5  sK    c" 		2 #<SEN	 	 	
 
r/   c                l   > T" U 5      nT" XR                   5      nUR                  SSU /0SU/00 S9  U$ )Nsparse_indicesr   r   r   r   r=  s      r,   indices&monkey_patch_variable.<locals>.indices@  sK    c" 		2!#<SEN	 	 	
 
r/   c                l   > T" U 5      nT" XR                   5      nUR                  SSU /0SU/00 S9  U$ )Nsparse_to_denser   r   r   r   r=  s      r,   to_dense'monkey_patch_variable.<locals>.to_denseK  sK    c" 		2"#<SEN	 	 	
 
r/   __neg____abs__r   r   r   r   r   r   r   ru   r   r   r   r   r  r   r   elementwise_addr   r   elementwise_subr   r   elementwise_mulr   r   r  r   r   r   r!   elementwise_powr"   r   elementwise_floordivr    r   elementwise_modr   r#   	matmul_v2r$   	__rmatmulr   equalr   	not_equalr   r   r   
less_equalr   greater_thanr   greater_equal	__float____int____complex__r>  rB  rF  r   rm   )NT)returnr   )r_   r   rZ  None)T)r   r   rZ  r   )FN)r   propertysetter_already_patch_variablesetattrr	   r<   r   tensor_method_funchasattrgetattrmagic_method_func).rY   r   r   r   r   r   r   ru   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r/  r3  r6  r9  r>  rB  rF  variable_methodsmethodr$  method_implmagic_methodorigin_methodimplr   r   rU   ri   rf   r{   rO   r   rJ   rC   s.                                       @@@@@@@@@@r,   monkey_patch_variablerj  z   s   F1<G"<B % %N 2 2h 
 
 	 	 	 	@D( = =2 	 	 6 6p	+  ,,, & && ' ',--,2 <@b bH


			v	Ev 
Ev 
6	v
 
Gv 
v 
v 
%v 
z"v 
-(v 
6v 
v 
v 
v 
z"v  
!v" 
-(#v$ 
>*%v( Y(95,O	
'v2 -ul	
1v> Y(95,O	
=vF -t]	
EvR Y(95,O	
Qv\ -ul	
[vh Y(95,O	
gvp 0%	
ov| Z):D$G	
{vD ^->dK	
CvL Y(95$G	
KvT Z):D$G	
Sv\  6t	
[vh !7t	
gvt Y(95$G	
sv| Z):D$G	
{vD \;tD	
CvL [+tTB	
KvT 
#HgudCDUvV 
#Hk5$GHWvX 
#Hk5$GHYvZ 
#HlE4HI[v\ 
#HneTJK]v^ 
#HoudKL_v` 
gavb 
Ecvd 
	"evf 
6gvh 
Givj 
Xkvr #"&F )K )KHk;7 '& # 	!==;;Kx--!&--dCK{+{; < ,2==+J+J'L-6==->Dt,5 ,K
 #r/   )#
__future__r   r  r   typingr    r   dygraph.baser   	frameworkr   r	   r
   r   r   r   r   r   r  UINT8INT8INT16INT32INT64r  	COMPLEX64
COMPLEX128r  r  r  r  r^  r=   rj  r*   r/   r,   <module>rw     s   #      ,   	LLLLLLLLLLLL  	LL""LL##  K 2w w 	
 w  w 7  H x 	 G X w  !" '#$ 16   
 i#r/   