o
    * i                     @  s   d dl mZ d dlmZmZ d dlZd dlmZ d dlm	Z	m
Z
 d dlmZ er:d dlmZ d dlmZ d d	lmZ 	 		
dd ddZ	d!d"ddZ			d#d$ddZdS )%    )annotations)TYPE_CHECKINGAnyN)stream)_get_global_group_warn_cur_rank_not_in_group)convert_tensor_to_object)Tensor)task)GroupTtensorr	   srcintgroupGroup | Nonesync_opboolreturnr
   c                 C  s   t j| |||ddS )a  
    Receive a tensor to the sender.

    Args:
        tensor (Tensor): The tensor to receive. Its data type
            should be float16, float32, float64, int32, int64, int8, uint8, bool or bfloat16.
        src (int): The source rank id.
        group (Group, optional): The group instance return by new_group or None for global default group. Default: None.
        sync_op (bool, optional): Whether this op is a sync op. The default value is True.

    Returns:
        Return a task object.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle
            >>> import paddle.distributed as dist

            >>> dist.init_parallel_env()
            >>> if dist.get_rank() == 0:
            ...     data = paddle.to_tensor([7, 8, 9])
            ...     dist.send(data, dst=1)
            >>> else:
            ...     data = paddle.to_tensor([1, 2, 3])
            ...     dist.recv(data, src=0)
            >>> print(data)
            >>> # [7, 8, 9] (2 GPUs)
    F)r   r   r   Zuse_calc_stream)r   recv)r   r   r   r    r   q/home/app/PaddleOCR-VL-test/.venv_paddleocr/lib/python3.10/site-packages/paddle/distributed/communication/recv.pyr   #   s   $
r   
int | Nonec                 C  s   t | ||ddS )aG  
    Receive a tensor to the sender.

    Args:
        tensor (Tensor): The Tensor to receive. Its data type
            should be float16, float32, float64, int32, int64, int8, uint8, bool or bfloat16.
        src (int): The source rank id.
        group (Group, optional): The group instance return by new_group or None for global default group. Default: None.

    Returns:
        Return a task object.

    Warning:
        This API only supports the dygraph mode.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle
            >>> import paddle.distributed as dist

            >>> dist.init_parallel_env()
            >>> if dist.get_rank() == 0:
            ...     data = paddle.to_tensor([7, 8, 9])
            ...     task = dist.isend(data, dst=1)
            >>> else:
            ...     data = paddle.to_tensor([1, 2, 3])
            ...     task = dist.irecv(data, src=0)
            >>> task.wait()  # type: ignore[union-attr]
            >>> print(data)
            >>> # [7, 8, 9] (2 GPUs)
    F)r   )r   )r   r   r   r   r   r   irecvL   s   $r   object_list	list[Any]src_in_groupc                 C  s  | du s
t | dkrtd|du rt n|}t|rdS |dur/|dur)td||}n|du r5dn|}tjt | fdd}t|||d t|	 }tj|ftj
d}t|||d d}t|D ]\}}	|		 }	||||	  }
t|
|	| |< ||	7 }qedS )a}  
    Receive a list of Python objects from the sender.

    Args:
        object_list (list): The list to store received objects. Must be pre-allocated with correct size.
        src (int, optional): The source rank id. Default: 0.
        group (Group, optional): The group instance return by new_group or None for global default group. Default: None.
        src_in_group (int, optional): The source rank within the group. Cannot be specified together with src. Default: None.

    Returns:
        This function does not return any value.

    Examples:
        .. code-block:: python

            >>> # doctest: +REQUIRES(env: DISTRIBUTED)
            >>> import paddle
            >>> import paddle.distributed as dist

            >>> dist.init_parallel_env()
            >>> if dist.get_rank() == 0:
            ...     data = ["hello", {"key": 100}, [1, 2, 3]]
            ...     dist.send_object_list(data, dst=1)
            >>> else:
            ...     data = [None] * 3  # type: ignore
            ...     dist.recv_object_list(data, src=0)
            >>> print(data)
            >>> # ["hello", {"key": 100}, [1, 2, 3]] (2 GPUs)
    Nr   z#object_list cannot be None or emptyz7Cannot specify both 'src' and 'src_in_group' arguments.Zint64)Zdtype)r   r   )len
ValueErrorr   r   Zget_global_rankpaddleemptyr   sumitemZuint8	enumerater   )r   r   r   r   Zobject_sizes_tensor
total_sizeZobject_tensoroffsetiZobj_sizeZobj_viewr   r   r   recv_object_lists   s0   #
r&   )r   NT)
r   r	   r   r   r   r   r   r   r   r
   )NN)r   r	   r   r   r   r   r   r
   )NNN)r   r   r   r   r   r   r   r   )
__future__r   typingr   r   r   Z paddle.distributed.communicationr   Z&paddle.distributed.communication.groupr   r   Z4paddle.distributed.communication.serialization_utilsr   r	   Zpaddle.base.corer
   r   r   r   r&   r   r   r   r   <module>   s(   *)