o
    پi                     @   sL   d dl Z d dlZd dlmZ d dlmZ G dd deZG dd deZdS )    N)Samplerc                   @   s*   e Zd ZdZd	ddZdd Zdd ZdS )
OrderedDistributedSamplera  Sampler that restricts data loading to a subset of the dataset.
    It is especially useful in conjunction with
    :class:`torch.nn.parallel.DistributedDataParallel`. In such case, each
    process can pass a DistributedSampler instance as a DataLoader sampler,
    and load a subset of the original dataset that is exclusive to it.
    .. note::
        Dataset is assumed to be of constant size.
    Arguments:
        dataset: Dataset used for sampling.
        num_replicas (optional): Number of processes participating in
            distributed training.
        rank (optional): Rank of the current process within num_replicas.
    Nc                 C   s   |d u rt  stdt  }|d u r t  stdt  }|| _|| _|| _tt	
t| jd | j | _| j| j | _d S )N,Requires distributed package to be availableg      ?)distis_availableRuntimeErrorget_world_sizeget_rankdatasetnum_replicasrankintmathceillennum_samples
total_size)selfr
   r   r    r   Q/home/ubuntu/.local/lib/python3.10/site-packages/timm/data/distributed_sampler.py__init__   s    z"OrderedDistributedSampler.__init__c                 C   sl   t tt| j}||d | jt|  7 }t|| jksJ || j| j| j }t|| jks2J t|S N)	listranger   r
   r   r   r   r   iter)r   indicesr   r   r   __iter__%   s   z"OrderedDistributedSampler.__iter__c                 C      | j S r   )r   r   r   r   r   __len__2      z!OrderedDistributedSampler.__len__)NN)__name__
__module____qualname____doc__r   r   r   r   r   r   r   r      s
    
r   c                   @   s>   e Zd ZdZ						dddZd	d
 Zdd Zdd ZdS )RepeatAugSamplera  Sampler that restricts data loading to a subset of the dataset for distributed,
    with repeated augmentation.
    It ensures that different each augmented version of a sample will be visible to a
    different process (GPU). Heavily based on torch.utils.data.DistributedSampler

    This sampler was taken from https://github.com/facebookresearch/deit/blob/0c4b8f60/samplers.py
    Used in
    Copyright (c) 2015-present, Facebook, Inc.
    NT      r   c                 C   s   |d u rt  stdt  }|d u r t  stdt  }|| _|| _|| _|| _|| _	d| _
ttt| j| | j | _| j| j | _|pL|}|rbttt| j| | | | _d S ttt| j| | _d S )Nr   r   )r   r   r   r   r	   r
   r   r   shufflenum_repeatsepochr   r   r   r   r   r   floornum_selected_samples)r   r
   r   r   r(   r)   selected_roundselected_ratior   r   r   r   A   s,   
 zRepeatAugSampler.__init__c                    s   t  }| j  jrt jt j|d}n
t jdt jd}t	 j
trI j
 sIt j
t j }|t  fddt|D  }nt j|t j
dd}| } jt| }|dkrk||d | 7 }t| jkstJ | j j j }t| jksJ t|d  j S )N)	generatorr   )startendc                    s   g | ]	}t | j qS r   )r   r)   ).0ir   r   r   
<listcomp>r   s    z-RepeatAugSampler.__iter__.<locals>.<listcomp>)repeatsdim)torch	Generatormanual_seedr*   r(   randpermr   r
   arange
isinstancer)   float
is_integerr   r   tensorr   repeat_interleaver   tolistr   r   r   r   r   r,   )r   gr   repeat_sizepadding_sizer   r   r   r   e   s"   "zRepeatAugSampler.__iter__c                 C   r   r   )r,   r   r   r   r   r      r    zRepeatAugSampler.__len__c                 C   s
   || _ d S r   )r*   )r   r*   r   r   r   	set_epoch   s   
zRepeatAugSampler.set_epoch)NNTr&   r'   r   )r!   r"   r#   r$   r   r   r   rE   r   r   r   r   r%   6   s    
$r%   )	r   r7   torch.utils.datar   torch.distributeddistributedr   r   r%   r   r   r   r   <module>   s    /