
    [i3                         d dl Z d dlmZ d dlmZ d dlZd dlmZ d dl	m
Z
 d dlmZ dgZ edd	          Z G d
 dee                   ZdS )    N)Iterator)TypeVar)Dataset)SamplerDistributedSampler_T_coT)	covariantc                       e Zd ZdZ	 	 	 	 	 ddededz  dedz  d	ed
ededdfdZdee	         fdZ
defdZdeddfdZdS )r   a'	  Sampler that restricts data loading to a subset of the dataset.

    It is especially useful in conjunction with
    :class:`torch.nn.parallel.DistributedDataParallel`. In such a case, each
    process can pass a :class:`~torch.utils.data.DistributedSampler` instance as a
    :class:`~torch.utils.data.DataLoader` sampler, and load a subset of the
    original dataset that is exclusive to it.

    .. note::
        Dataset is assumed to be of constant size and that any instance of it always
        returns the same elements in the same order.

    Args:
        dataset: Dataset used for sampling.
        num_replicas (int, optional): Number of processes participating in
            distributed training. By default, :attr:`world_size` is retrieved from the
            current distributed group.
        rank (int, optional): Rank of the current process within :attr:`num_replicas`.
            By default, :attr:`rank` is retrieved from the current distributed
            group.
        shuffle (bool, optional): If ``True`` (default), sampler will shuffle the
            indices.
        seed (int, optional): random seed used to shuffle the sampler if
            :attr:`shuffle=True`. This number should be identical across all
            processes in the distributed group. Default: ``0``.
        drop_last (bool, optional): if ``True``, then the sampler will drop the
            tail of the data to make it evenly divisible across the number of
            replicas. If ``False``, the sampler will add extra indices to make
            the data evenly divisible across the replicas. Default: ``False``.

    .. warning::
        In distributed mode, calling the :meth:`set_epoch` method at
        the beginning of each epoch **before** creating the :class:`DataLoader` iterator
        is necessary to make shuffling work properly across multiple epochs. Otherwise,
        the same ordering will be always used.

    Example::

        >>> # xdoctest: +SKIP
        >>> sampler = DistributedSampler(dataset) if is_distributed else None
        >>> loader = DataLoader(dataset, shuffle=(sampler is None),
        ...                     sampler=sampler)
        >>> for epoch in range(start_epoch, n_epochs):
        ...     if is_distributed:
        ...         sampler.set_epoch(epoch)
        ...     train(loader)
    NTr   Fdatasetnum_replicasrankshuffleseed	drop_lastreturnc                    |5t          j                    st          d          t          j                    }|5t          j                    st          d          t          j                    }||k    s|dk     rt          d| d|dz
   d          || _        || _        || _        d| _	        || _
        | j
        r\t          | j                  | j        z  dk    r<t          j        t          | j                  | j        z
  | j        z            | _        n3t          j        t          | j                  | j        z            | _        | j        | j        z  | _        || _        || _        d S )Nz,Requires distributed package to be availabler   zInvalid rank z%, rank should be in the interval [0,    ])distis_availableRuntimeErrorget_world_sizeget_rank
ValueErrorr   r   r   epochr   lenmathceilnum_samples
total_sizer   r   )selfr   r   r   r   r   r   s          O/var/www/icac/venv/lib/python3.11/site-packages/torch/utils/data/distributed.py__init__zDistributedSampler.__init__B   sz    $&& S"#QRRR.00L<$&& S"#QRRR=??D<4!88^^^<Z[K[^^^   (	
" > 	Pc$,//$2CCqHH  $yT\""T%66$:KK   D  $yT\):):T=N)NOOD*T->>			    c                    | j         rpt          j                    }|                    | j        | j        z              t          j        t          | j                  |          	                                }n.t          t          t          | j                                      }| j        sk| j        t          |          z
  }|t          |          k    r||d |         z  }nB||t          j        |t          |          z            z  d |         z  }n|d | j                 }t          |          | j        k    r(t!          dt          |           d| j         d          || j        | j        | j                 }t          |          | j        k    r(t!          dt          |           d| j         d          t)          |          S )N)	generatorzNumber of indices (z) does not match total_size ()zNumber of subsampled indices (z) does not match num_samples ()r   torch	Generatormanual_seedr   r   randpermr   r   tolistlistranger   r    r   r   AssertionErrorr   r   r   iter)r!   gindicespadding_sizes       r"   __iter__zDistributedSampler.__iter__k   s   < 	5!!AMM$)dj0111nS%6%6!DDDKKMMGG5T\!2!23344G~ 	1?S\\9Ls7||++7=L=11Gdis7||0K&L&LL!\M 
 //0Gw<<4?** cc'llccQUQ`ccc  
 $)do8IIJw<<4+++ pWpp]a]mppp  
 G}}r$   c                     | j         S )N)r   )r!   s    r"   __len__zDistributedSampler.__len__   s    r$   r   c                     || _         dS )a1  
        Set the epoch for this sampler.

        When :attr:`shuffle=True`, this ensures all replicas
        use a different random ordering for each epoch. Otherwise, the next iteration of this
        sampler will yield the same ordering.

        Args:
            epoch (int): Epoch number.
        N)r   )r!   r   s     r"   	set_epochzDistributedSampler.set_epoch   s     


r$   )NNTr   F)__name__
__module____qualname____doc__r   intboolr#   r   r   r4   r6   r8    r$   r"   r   r      s        . .f $(' '' Dj' Dj	'
 ' ' ' 
' ' ' 'R"(5/ " " " "H         s t      r$   )r   collections.abcr   typingr   r(   torch.distributeddistributedr   torch.utils.data.datasetr   torch.utils.data.samplerr   __all__r   r   r?   r$   r"   <module>rG      s     $ $ $ $ $ $                    , , , , , , , , , , , ,  
  	4(((L L L L L L L L L Lr$   