o
    #j9I                     @   s   d dl Z d dlmZ d dlmZ d dlmZ d dlmZm	Z	m
Z
 d dlmZmZ ddlmZmZ G d	d
 d
eZG dd dZdd Zdd Zdd ZdddZdS )    N)core)_append_grad_suffix_)Variable)LayerHelper
check_typein_dygraph_mode)flattenmap_structure   )
BlockGuardcopy_var_to_parent_blockc                       s4   e Zd Z fddZ fddZ fddZ  ZS )StaticPyLayerBlockGuardc                    s(   t |dtd t |jj || _d S )Nblockr   )r   StaticPyLayerBlocksuper__init__helpermain_programblock_manager)selfr   	__class__ `/var/www/html/Deteccion_Ine/venv/lib/python3.10/site-packages/paddle/static/nn/static_pylayer.pyr      s   
z StaticPyLayerBlockGuard.__init__c                    s   t    | jS N)r   	__enter__r   r   r   r   r   r   %   s   
z!StaticPyLayerBlockGuard.__enter__c                    s   | j   t |||S r   )r   completer   __exit__)r   exc_typeexc_valexc_tbr   r   r   r   )   s   
z StaticPyLayerBlockGuard.__exit__)__name__
__module____qualname__r   r   r   __classcell__r   r   r   r   r      s    
r   c                   @   s\   e Zd ZdddZdddZedd Zed	d
 Zedd Zdd Z	dd Z
dd ZdS )r   Nc                 C   sF   dd |D | _ g | _|| _td|d| _d | _d | _d | _i | _d S )Nc                 S      g | ]	}t |tr|qS r   
isinstancer   ).0Z
each_inputr   r   r   
<listcomp>1   s    z/StaticPyLayerBlock.__init__.<locals>.<listcomp>Zstatic_pylayer_block)name)	
fwd_inputsfwd_outputscontextr   r   	fwd_op_id_forward_block_id_backward_block_idvar_old_to_new)r   inputsr+   pylayer_contextr   r   r   r   /   s   
zStaticPyLayerBlock.__init__Fc                 C   s   || _ t| S r   )is_backward_blockr   )r   r5   r   r   r   r   B   s   zStaticPyLayerBlock.blockc                 C      | j S r   )r0   r   r   r   r   forward_block_indexF      z&StaticPyLayerBlock.forward_block_indexc                 C   r6   r   )r1   r   r   r   r   backward_block_indexJ   r8   z'StaticPyLayerBlock.backward_block_indexc                 C   r6   r   )r/   r   r   r   r   fwd_op_indexN   r8   zStaticPyLayerBlock.fwd_op_indexc                 C   sv   | j j }| j j|j}|j| _|jtj	j
jd}|jdd| ji| j|gdd|gid}|j| _| j j  d S )N)typeZpylayerZInput)ZOutZScopeblocks)r;   r3   Zoutputsattrs)r   r   current_blockr   
parent_idxidxr0   
create_varr   VarDescVarTypeZSTEP_SCOPESZ	append_opr,   r-   r/   _sync_with_cpp)r   inside_blockparent_blockZ
step_scopeZ
pylayer_opr   r   r   complete_forward_blockR   s    z)StaticPyLayerBlock.complete_forward_blockc           	      C   s   | j j }| j j|j}|j| _|jD ]}tj	
 }tj	jj}|j|| q|| j t|| j |j| jj}|j}|j| j jd||g | jro| jjD ]}||jsht|j d|j d||j qT| j j  d S )Nr<   zL was saved in forward block but could not be found in backward block. Maybe z was renamed somewhere.)r   r   r>   r   r?   r@   r1   opsr   Zop_proto_and_checker_makerZkOpRoleAttrNameZOpRoleZBackwarddescZ	_set_attrZ_set_forward_block_idxr7   _rename_var_recursively_r2   programr:   Zset_blocks_attrr.   Z
saved_varshas_varr+   
ValueError_remove_varrD   )	r   rE   rF   opZop_role_attr_nameZbackwardZforward_block_descZbackward_block_descvarr   r   r   complete_backward_blocki   s6   

z*StaticPyLayerBlock.complete_backward_blockc                 C   s   | j s|  S |  S r   )r5   rG   rQ   r   r   r   r   r      s   zStaticPyLayerBlock.completeNN)F)r"   r#   r$   r   r   propertyr7   r9   r:   rG   rQ   r   r   r   r   r   r   .   s    




'r   c                 C   sZ   | d u rd S t | dd }|d u rd S ddlm} d }t|dkr+t|d |r+|d }|S )Nargsr   )StaticPyLayerContext)getattrZpaddle.jit.dy2static.py_layerrU   lenr(   )funcZfn_bind_argsrU   Zfn_ctxr   r   r   _get_ctx_from_func_   s   rY   c           
      C   s   |  D ]'\}}| |r| j| |  q| jD ]}||| ||| qqddg}| jD ]F}| D ]?}||vr@q9|	|t
jjkrZ||}| j|}t|| q9|	|t
jjkrx||}	|	D ]}| j|}t|| qjq9q3dS )aD  
    Rename the var both the Variable instances and all ops' input and output arg names
    in `cur_block` based on dict `var_old_to_new`.
    Dict `var_old_to_new` should be the following format:
    {
        old_name_0 : new_name_0,
        old_name_1 : new_name_1,
        ...
        old_name_n : new_name_n,
    }
    r<   	sub_blockN)itemsrL   rI   Z_rename_varencoderH   Z_rename_inputZ_rename_outputZ	all_attrsZ	attr_typer   ZAttrTypeZBLOCKZ_block_attr_idrK   r   rJ   ZBLOCKSZ_blocks_attr_ids)
Z	cur_blockr2   Zold_var_nameZnew_var_namerO   Zblock_attr_names	attr_nameZsub_block_idrZ   Zsub_blocks_idsr   r   r   rJ      s4   




rJ   c                 C   sf   t | ts| S |j}| }| jtjjjkr |	| j
r | }|S |j| j| j| jd}t| | |S )N)dtypeshaper;   )r(   r   r   r>   r;   r   rB   rC   ZLOD_TENSOR_ARRAYZ_find_var_recursiver+   rA   r^   r_   paddleZassign)Zparent_block_varZlayer_helperprogr>   Zcurrent_block_varr   r   r   copy_var_from_parent_block   s    

rb   c                    s  t  du s	J dt|tsJ |du r(|D ]}|jdu r'td|j|jqt| }t|}|r8||kr8|nd}t|dt	t
dfd tdi t   fdd	}| dur[t| s]J t||d
}	|	jdd#}
| | }|durt||}dd t|D |
_ng |
_W d   n1 sw   Y   j }|  |dur[t|sJ |du rg }g }|	jD ]*}|j}t|}|j| std|||j|j|j|j
|d}|| q fdd	}t|tsJ |	jddU}
t||}|| }|dur?t|}|j|	j j  }t!|t!|ks$J dt!| dt!| t"||D ]\}}t|t#r=t|}||
j$|j< q)W d   n	1 sJw   Y  |D ]	}|%|j qQ|du rbdS |S )a  
    This API returns ``forward_fn(inputs)``, and two sub-block are created based on
    the logic of ``forward_fn`` and ``backward_fn``, with the operator ``pylayer``
    holding information about the two blocks.

    ``forward_fn`` and ``backward_fn`` should return a nest structure of Variables.
    A nest structure of Variables in PaddlePaddle is Variable(s), or tuple of Variables, or
    list of Variables.

    Note:
        1. If ``backward_fn`` is not None, user needs to keep the number of `Variable` inputs to ``forward_fn`` the same as the
        number of `Variable` outputs to ``backward_fn``, and the number of `Variable` outputs to ``forward_fn``
        the same as the number of `Variable` inputs to ``backward_fn``.

        2. If ``backward_fn`` is None, ``stop_gradient`` attr of all Variable in ``inputs`` is expected to be True.
        Otherwise it might get unexpected results in backward propagation.

        3. This API can only be used under static graph mode.

    Args:
        forward_fn (callable): A callable to be performed in forward propagation
        inputs (list[Variable]): The list of input Variable to the ``forward_fn``
        backward_fn (callable, optional): A callable to be performed in backward propagation. Default: None, which means no need to do backward propagation.
        name (str, optional): The default value is ``None`` . Normally users
            don't have to set this parameter. For more information, please
            refer to :ref:`api_guide_Name` .

    Returns:
        Variable|list(Variable)|tuple(Variable): returns the output of ``forward_fn(inputs)``

    Examples:
        .. code-block:: python

                >>> import paddle
                >>> import numpy as np

                >>> paddle.enable_static()

                >>> def forward_fn(x):
                ...     return paddle.exp(x)

                >>> def backward_fn(dy):
                ...     return 2 * paddle.exp(dy)

                >>> main_program = paddle.static.Program()
                >>> start_program = paddle.static.Program()

                >>> place = paddle.CPUPlace()
                >>> exe = paddle.static.Executor(place)
                >>> with paddle.static.program_guard(main_program, start_program):
                ...     data = paddle.static.data(name="X", shape=[None, 5], dtype="float32")
                ...     data.stop_gradient = False
                ...     ret = paddle.static.nn.static_pylayer(forward_fn, [data], backward_fn)
                ...     data_grad = paddle.static.gradients([ret], data)[0]

                >>> exe.run(start_program)
                >>> x = np.array([[1.0, 2.0, 3.0, 4.0, 5.0]], dtype=np.float32)
                >>> x, x_grad, y = exe.run(
                ...     main_program,
                ...     feed={"X": x},
                ...     fetch_list=[
                ...         data.name,
                ...         data_grad.name,
                ...         ret.name
                ...     ],
                ... )

                >>> print(x)
                [[1. 2. 3. 4. 5.]]
                >>> print(x_grad)
                [[5.4365635 5.4365635 5.4365635 5.4365635 5.4365635]]
                >>> print(y)
                [[  2.7182817   7.389056   20.085537   54.59815   148.41316  ]]
    Fz<please use PyLayer instead of static_pylayer in dygraph modeNz``stop_gradient`` attr of all inputs to ``forward_fn`` are expected to be True, when ``backward_fn == None``, but {}.stop_gradient got {}r+   zbase.layers.static_pylayerstatic_pylayerc                    
   t |  S r   )r   rP   r   r   r   <lambda>Q  s   
 z static_pylayer.<locals>.<lambda>)r4   )r5   c                 S   r&   r   r'   )r)   xr   r   r   r*   [  s
    
z"static_pylayer.<locals>.<listcomp>z6Grad var {} , we can't find its related forward var {})r^   r_   r;   r+   c                    rd   r   )rb   re   rf   r   r   rg   ~  s    Tzneeds to keep the number of inputs to ``forward_fn`` the same as the number of outputs to ``backward_fn``,                     but got z and )rc   )&r   r(   listZstop_gradientrM   formatr+   rY   r   strr;   r   localscallabler   r   r	   r   r-   r   r>   rD   r   rI   Zhas_var_recursiver\   rA   r^   r_   appendrH   r:   Zinput_arg_namesrW   zipr   r2   rN   )Z
forward_fnr3   Zbackward_fnr+   Z	input_varZ
fwd_fn_ctxZ
bwd_fn_ctxZstatic_pylayer_contextZcopy_to_parent_funcZpylayer_block_managerZmgrZorigin_outputoutputr>   Zgrad_var_insZfwd_varZfwd_var_nameZbwd_var_namerP   Zcopy_from_parent_funcZinside_block_inputsZgrad_origin_outputZflat_grad_originZforward_input_namesZ
bwd_outputZfwd_input_nameZbwd_out_newZbwd_varr   rf   r   rc      s   L








8
rc   rR   )r`   Zpaddle.baser   Zpaddle.base.backwardr   Zpaddle.base.frameworkr   Zpaddle.common_ops_importr   r   r   Zpaddle.utilsr   r	   Zcontrol_flowr   r   r   r   rY   rJ   rb   rc   r   r   r   r   <module>   s   i0