o
    * i,                     @  s  d dl mZ d dlZd dlmZmZ d dlZd dlmZ	 d dlm
Z
 er0d dlmZ d dlmZ G dd dZG d	d
 d
Zdd Zdd Zdd Zdd Zdd Zd1ddZd2d3ddZd4d5d d!Zd"d# Zd4d$d%Z	&d6d7d*d+Zd2d3d,d-Zd2d8d/d0ZdS )9    )annotationsN)TYPE_CHECKINGLiteral)	framework)Tensor)ProcessGroupc                   @  s   e Zd ZdZ		d-d.ddZed/ddZed0ddZed/ddZed1ddZ	ed2ddZ
ed/ddZed3ddZed/d d!Zd4d#d$Zd5d'd(Zd5d)d*Zd3d+d,ZdS )6Groupz/
    The abstract representation of group.
    Nrank_in_groupintidranks	list[int]pgProcessGroup | Nonename
str | NonereturnNonec                 C  s8   || _ |dkrt|nd| _|| _|| _|| _|| _d S )Nr   )_rank_in_grouplen_world_size_id_ranks_pg_name)selfr	   r   r   r   r    r   r/home/app/PaddleOCR-VL-test/.venv_paddleocr/lib/python3.10/site-packages/paddle/distributed/communication/group.py__init__"   s   
zGroup.__init__c                 C     | j S N)r   r   r   r   r   rank1      z
Group.rankc                 C  r    r!   )r   r"   r   r   r   r   5   r$   zGroup.ranksc                 C  s
   t | jS r!   )r   r   r"   r   r   r   nranks9      
zGroup.nranksc                 C  r    r!   )r   r"   r   r   r   r   =   r$   z
Group.namer   c                 C  r    r!   )r   r"   r   r   r   process_groupA   r$   zGroup.process_groupc                 C  r    r!   )r   r"   r   r   r   
world_sizeE   r$   zGroup.world_sizestrc                 C  s
   | j  S r!   )r   r   r"   r   r   r   backendI   r&   zGroup.backendc                 C  r    r!   )r   r"   r   r   r   r   M   r$   zGroup.idboolc                 C  s    | j dk rdS | jdk rdS dS )Nr   F   T)r#   r%   r"   r   r   r   	is_memberQ   s
   

zGroup.is_memberr#   int | Literal[-1]c                 C  s   |   r
| j|S dS )Nr   )r-   r   indexr   r#   r   r   r   get_group_rankX   s   zGroup.get_group_rankc                 C  s   |   r	| j| S dS )a#  
        Get the global rank of a process within a group.

        Args:
            rank (int): The local rank within the group.

        Returns:
            If the current process is a member of the group, returns the corresponding global rank;
            otherwise returns -1.

        r   )r-   r   r0   r   r   r   get_global_rank^   s   
zGroup.get_global_rankc                 C  sZ   d| j  d| j d| j d}|dtt| j7 }|d7 }|| jr(| j7 }|S d7 }|S )Nzrank: z
, nranks: z, id: z	, ranks: z, z; name: r   )r#   r%   r   joinmapr)   r   r   )r   Z	debug_strr   r   r   __repr__o   s   zGroup.__repr__)NN)r	   r
   r   r
   r   r   r   r   r   r   r   r   )r   r
   )r   r   )r   r   )r   r   )r   r)   r   r+   )r#   r
   r   r.   )__name__
__module____qualname____doc__r   propertyr#   r   r%   r   r'   r(   r*   r   r-   r1   r2   r5   r   r   r   r   r      s2    	


r   c                   @  s   e Zd ZdZi ZdS )_GroupManagerr   N)r7   r8   r9   global_group_idgroup_map_by_idr   r   r   r   r<   y   s    r<   c                   C  s    t jt jvr
tdt jt j S )Nz$The global group is not initialized.)r<   r=   r>   RuntimeErrorr   r   r   r   _get_global_group~   s   r@   c                 C  s.   | j tjv rtd| j  d| tj| j < d S )NzThe group with id z already exist.)r   r<   r>   r?   groupr   r   r   _add_new_group   s   rC   c                 C  s   | j tjkS r!   )r   r<   r=   rA   r   r   r   _is_global_group   s   rD   c                 C  s4   t  }| r|  std| d| j  dS dS )NzCurrent global rank z is not in group TF)distZget_rankr-   warningswarnr   )rB   global_rankr   r   r   _warn_cur_rank_not_in_group   s   rI   c                 C  s,   | | }|dksJ d|  d|j |S )Nr   zThe input rank z# can not be found inside the group )r1   r   )rH   rB   Z
group_rankr   r   r   _get_or_throw_group_rank   s
   

rJ   r   r+   c                   C  s   t jt jv S )a@  

    Check whether the distributed environment has been initialized

    Returns:
        `True` if distributed environment has been initialized, otherwise `False`.

    Warning:
        This API only supports the dygraph mode.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle

            >>> print(paddle.distributed.is_initialized())
            False

            >>> paddle.distributed.init_parallel_env()
            >>> print(paddle.distributed.is_initialized())
            True

    )r<   r=   r>   r   r   r   r   is_initialized   s   rK   rB   Group | Noner   c                 C  sT   | du rt  n| } | jtjv sJ d| j dt| r#tj  dS tj| j= dS )a  
    Destroy a given group for communication

    Args:
        group (Group, optional): The group to be destroyed. All of process groups, including
                                        the default group, will be destroyed and the distributed
                                        environment will be deinitialized.

    Returns : None

    Warning:
        This API only supports the dygraph mode.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle
            >>> import paddle.distributed as dist

            >>> dist.init_parallel_env()
            >>> group = dist.new_group([0, 1])

            >>> dist.destroy_process_group(group)
            >>> print(dist.is_initialized())
            True
            >>> dist.destroy_process_group()
            >>> print(dist.is_initialized())
            False

    NzDestroy group with id z is invalid.)r@   r   r<   r>   rD   clearrA   r   r   r   destroy_process_group   s    rN   r   r
   c                 C  s*   | t jv r
t j|  S td|  d dS )a  

    Get group instance by group id.

    Args:
        id (int): the group id. Default value is 0.

    Returns:
        Group: the group instance.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle
            >>> import paddle.distributed as dist

            >>> dist.init_parallel_env()
            >>> gid = paddle.distributed.new_group([2,4,6])
            >>> paddle.distributed.get_group(gid.id)

    zGroup z is not initialized.N)r<   r>   rF   rG   )r   r   r   r   	get_group   s   

rO   c                 C  sL   t  r
tj| S d}t j|fi t }|j|d| gid| gid d S )NZc_sync_calc_streamXOut)typeinputsoutputs)r   in_dynamic_modepaddle_C_opsZsync_calc_streamLayerHelperlocals	append_op)tensorop_typehelperr   r   r   _sync_calc_stream  s   
r^   c                 C  sV   t  rtj| g|S d}t j|fi t }|j|d| gid| gid|id d S )NZc_sync_comm_streamrP   rQ   ring_idrR   rS   rT   attrs)r   rU   rV   rW   Zsync_comm_streamrX   rY   rZ   )r[   r_   r\   r]   r   r   r   _sync_comm_stream  s   
rb   Tr[   r   use_calc_streamc                 C  sD   |dur
|  s
dS |rt|  dS |du rdn|j}t| | dS )a  

    wait to sync stream for group.

    Args:
        tensor (Tensor): The Tensor used before sync.
        group (Group): The Group instance to perform sync.
        use_calc_stream (bool): Whether to use calculation stream (True) or communication stream (False).
            Default to True.

    Returns:
        None.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle

            >>> paddle.distributed.init_parallel_env()
            >>> tindata = paddle.randn(shape=[2, 3])
            >>> paddle.distributed.all_reduce(tindata, sync_op=True)
            >>> paddle.distributed.wait(tindata)

    Nr   )r-   r^   r   rb   )r[   rB   rc   r_   r   r   r   wait  s   rd   c                 C  s  | dur
|   s
dS t r7| du rt n| } t }t|tjr'| j }n
|	 }| j|}|
  dS | du r=dn| j}tjdgddd}t r^tjj s\tj||d|S dS d}t|tsitdtj|fi t }|j|d	|gid
|gid|id dS )a*  

    Barrier among all participators in the group.

    Args:
        group (Group): The group instance return by new_group or None for global default group.

    Returns:
        None.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle
            >>> from paddle.distributed import init_parallel_env

            >>> paddle.set_device(f'gpu:{paddle.distributed.ParallelEnv().dev_id}')
            >>> init_parallel_env()
            >>> paddle.distributed.barrier()
    Nr      Zint32)Zdtyper_   barrierz,The type of 'group' for barrier must be int.rP   rQ   r`   )r-   r   rU   r@   Z_current_expected_place
isinstanceZCPUPlacer'   rf   Zget_device_idrd   r   rV   fullcoreZis_compiled_with_xpuZ_legacy_C_opsr
   
ValueErrorrX   rY   rZ   )rB   ZplacetaskZ	device_idr_   Zbarrier_tensorr\   r]   r   r   r   rf   E  s:   

rf   r)   c                 C  s.   t | rtd| du rt } | jS | } | jS )a  
    Get the backend of given group.

    Args:
        group (Group): The group to work on. Use the global group as default.

    Returns:
        Returns the name of the given group backend.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle

            >>> paddle.distributed.init_parallel_env()
            >>> paddle.distributed.get_backend()
            NCCL
    zInvalid group specifiedN)rI   r?   r@   r*   rA   r   r   r   get_backend  s   rl   r6   r!   )rB   rL   r   r   )r   )r   r
   r   r   )NT)r[   r   rB   rL   rc   r+   r   r   )rB   rL   r   r)   )
__future__r   rF   typingr   r   rV   Zpaddle.distributeddistributedrE   r   r   Zpaddle.base.corer   r   r<   r@   rC   rD   rI   rJ   rK   rN   rO   r^   rb   rd   rf   rl   r   r   r   r   <module>   s2   \

*
&: