o
    "j'                     @   s   d dl Z d dlZd dlmZ d dlmZ G dd dZG dd dZdd Zd	d
 Z	dd Z
dd Zdd Zdd Zd"ddZd#ddZdd Zd#ddZd$ddZd"ddZd"d d!ZdS )%    N)	frameworkc                   @   s   e Zd ZdZdddZedd Zedd Zed	d
 Zedd Z	edd Z
edd Zedd Zedd Zdd Zdd Zdd ZdS )Groupz/
    The abstract representation of group.
    Nc                 C   s8   || _ |dkrt|nd| _|| _|| _|| _|| _d S )Nr   )_rank_in_grouplen_world_size_id_ranks_pg_name)selfZrank_in_groupidranksZpgname r   g/var/www/html/Deteccion_Ine/venv/lib/python3.10/site-packages/paddle/distributed/communication/group.py__init__   s   
zGroup.__init__c                 C      | j S N)r   r   r   r   r   rank#      z
Group.rankc                 C   r   r   )r	   r   r   r   r   r   '   r   zGroup.ranksc                 C   s
   t | jS r   )r   r	   r   r   r   r   nranks+      
zGroup.nranksc                 C   r   r   )r   r   r   r   r   r   /   r   z
Group.namec                 C   r   r   )r
   r   r   r   r   process_group3   r   zGroup.process_groupc                 C   r   r   )r   r   r   r   r   
world_size7   r   zGroup.world_sizec                 C   s
   | j  S r   )r
   r   r   r   r   r   backend;   r   zGroup.backendc                 C   r   r   )r   r   r   r   r   r   ?   r   zGroup.idc                 C   s    | j dk rdS | jdk rdS dS )Nr   F   T)r   r   r   r   r   r   	is_memberC   s
   

zGroup.is_memberc                 C   s   |   r
| j|S dS )Nr   )r   r   index)r   r   r   r   r   get_group_rankJ   s   zGroup.get_group_rankc                 C   sZ   d| j  d| j d| j d}|dtt| j7 }|d7 }|| jr(| j7 }|S d7 }|S )Nzrank: z
, nranks: z, id: z	, ranks: z, z; name: None)r   r   r   joinmapstrr   r   )r   Z	debug_strr   r   r   __repr__P   s   zGroup.__repr__)NN)__name__
__module____qualname____doc__r   propertyr   r   r   r   r   r   r   r   r   r    r%   r   r   r   r   r      s,    








r   c                   @   s   e Zd ZdZi ZdS )_GroupManagerr   N)r&   r'   r(   global_group_idgroup_map_by_idr   r   r   r   r+   Z   s    r+   c                   C   s    t jt jvr
tdt jt j S )Nz$The global group is not initialized.)r+   r,   r-   RuntimeErrorr   r   r   r   _get_global_group_   s   r/   c                 C   s.   | j tjv rtd| j  d| tj| j < d S )NzThe group with id z already exist.)r   r+   r-   r.   groupr   r   r   _add_new_groupe   s   r2   c                 C   s   | j tjkS r   )r   r+   r,   r0   r   r   r   _is_global_groupk   s   r3   c                 C   s4   t  }| r|  std| d| j  dS dS )NzCurrent global rank z is not in group TF)distZget_rankr   warningswarnr   )r1   global_rankr   r   r   _warn_cur_rank_not_in_groupo   s   r8   c                 C   s,   | | }|dksJ d|  d|j |S )Nr   zThe input rank z# can not be found inside the group )r    r   )r7   r1   Z
group_rankr   r   r   _get_or_throw_group_ranky   s
   

r9   c                   C   s   t jt jv S )a@  

    Check whether the distributed environment has been initialized

    Returns:
        `True` if distributed environment has been initialized, otherwise `False`.

    Warning:
        This API only supports the dygraph mode.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle

            >>> print(paddle.distributed.is_initialized())
            False

            >>> paddle.distributed.init_parallel_env()
            >>> print(paddle.distributed.is_initialized())
            True

    )r+   r,   r-   r   r   r   r   is_initialized   s   r:   c                 C   sT   | du rt  n| } | jtjv sJ d| j dt| r#tj  dS tj| j= dS )a  
    Destroy a given group for communication

    Args:
        group (Group, optional): The group to be destroyed. All of process groups, including
                                        the default group, will be destroyed and the distributed
                                        environment will be deinitialized.

    Returns : None

    Warning:
        This API only supports the dygraph mode.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle
            >>> import paddle.distributed as dist

            >>> dist.init_parallel_env()
            >>> group = dist.new_group([0, 1])

            >>> dist.destroy_process_group(group)
            >>> print(dist.is_initialized())
            True
            >>> dist.destroy_process_group()
            >>> print(dist.is_initialized())
            False

    NzDestroy group with id z is invalid.)r/   r   r+   r-   r3   clearr0   r   r   r   destroy_process_group   s    r<   c                 C   s*   | t jv r
t j|  S td|  d dS )a  

    Get group instance by group id.

    Args:
        id (int): the group id. Default value is 0.

    Returns:
        Group: the group instance.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle
            >>> import paddle.distributed as dist

            >>> dist.init_parallel_env()
            >>> gid = paddle.distributed.new_group([2,4,6])
            >>> paddle.distributed.get_group(gid.id)

    zGroup z is not initialized.N)r+   r-   r5   r6   )r   r   r   r   	get_group   s   

r=   c                 C   sN   t  rtj| | S d}t j|fi t }|j|d| gid| gid d S )Nc_sync_calc_streamXOut)typeinputsoutputs)r   in_dynamic_modepaddle_legacy_C_opsr>   LayerHelperlocals	append_op)tensorop_typehelperr   r   r   _sync_calc_stream   s   
rM   c                 C   s\   t  rtj| g| gd|S d}t j|fi t }|j|d| gid| gid|id d S )Nring_idc_sync_comm_streamr?   r@   rA   rB   rC   attrs)r   rD   rE   rF   rO   rG   rH   rI   )rJ   rN   rK   rL   r   r   r   _sync_comm_stream   s   
rR   Tc                 C   sD   |dur
|  s
dS |rt|  dS |du rdn|j}t| | dS )a  

    wait to sync stream for group.

    Args:
        tensor (Tensor): The Tensor used before sync.
        group (Group): The Group instance to perform sync.
        use_calc_stream (bool): Wether to use calculation stream (True) or communication stream (False).
            Default to True.

    Returns:
        None.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle

            >>> paddle.distributed.init_parallel_env()
            >>> tindata = paddle.randn(shape=[2, 3])
            >>> paddle.distributed.all_reduce(tindata, sync_op=True)
            >>> paddle.distributed.wait(tindata)

    Nr   )r   rM   r   rR   )rJ   r1   Zuse_calc_streamrN   r   r   r   wait  s   rS   c                 C   s   | dur
|   s
dS t r7| du rt n| } t }t|tjr'| j }n
|	 }| j|}|
  dS | du r=dn| j}tjdgddd}t rVtj||d|S d}t|tsatdtj|fi t }|j|d	|gid
|gid|id dS )a*  

    Barrier among all participators in the group.

    Args:
        group (Group): The group instance return by new_group or None for global default group.

    Returns:
        None.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle
            >>> from paddle.distributed import init_parallel_env

            >>> paddle.set_device('gpu:%d'%paddle.distributed.ParallelEnv().dev_id)
            >>> init_parallel_env()
            >>> paddle.distributed.barrier()
    Nr      Zint32)ZdtyperN   barrierz,The type of 'group' for barrier must be int.r?   r@   rP   )r   r   rD   r/   Z_current_expected_place
isinstanceZCPUPlacer   rU   Zget_device_idrS   r   rE   fullrF   int
ValueErrorrG   rH   rI   )r1   ZplacetaskZ	device_idrN   Zbarrier_tensorrK   rL   r   r   r   rU   &  s6   

rU   c                 C   s.   t | rtd| du rt } | jS | } | jS )a  
    Get the backend of given group.

    Args:
        group (Group): The group to work on. Use the global group as default.

    Returns:
        Returns the name of the given group backend.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle

            >>> paddle.distributed.init_parallel_env()
            >>> paddle.distributed.get_backend()
            NCCL
    zInvalid group specifiedN)r8   r.   r/   r   r0   r   r   r   get_backend^  s   r[   r   )r   )NT)r5   rE   Zpaddle.distributeddistributedr4   r   r   r+   r/   r2   r3   r8   r9   r:   r<   r=   rM   rR   rS   rU   r[   r   r   r   r   <module>   s&   D


*


$8