o
    i                     @   s  U d dl Z d dlZd dlZd dlZd dlZd dlmZ d dlZe e	Z
defddZe r7ej s7edejjZejjZejjZejjZejjZe rd dlmZmZmZmZmZm Z m!Z!m"Z"m#Z#m$Z$m%Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z5 G dd	 d	ej6Z7i Z8e9e:ej;f e<d
< dde:de:fddZ=ej>dkrd dlm?Z? ddl@mAZAmBZB ddlCT ddlCmDZDmEZEmFZFmGZGmHZHmIZImJZJmKZKmLZL ddlMmNZN ddlOmPZPmQZQmOZO e1  dS G dd dZReRejSd _.dS )    N)	timedeltareturnc                   C   s   t tjdS )a  
    Return ``True`` if the distributed package is available.

    Otherwise,
    ``torch.distributed`` does not expose any other APIs. Currently,
    ``torch.distributed`` is available on Linux, MacOS and Windows. Set
    ``USE_DISTRIBUTED=1`` to enable it when building PyTorch from source.
    Currently, the default value is ``USE_DISTRIBUTED=1`` for Linux and Windows,
    ``USE_DISTRIBUTED=0`` for MacOS.
    
_c10d_init)hasattrtorch_C r   r   W/home/ubuntu/veenaModal/venv/lib/python3.10/site-packages/torch/distributed/__init__.pyis_available   s   r
   z&Failed to initialize torch.distributed)_broadcast_coalesced"_compute_bucket_assignment_by_size_ControlCollectives_DEFAULT_FIRST_BUCKET_BYTES_make_nccl_premul_sum_register_builtin_comm_hook_register_comm_hook_StoreCollectives_test_python_store_verify_params_across_processesBackendBuiltinCommHookType
DebugLevel	FileStoreget_debug_level
GradBucketLoggerPrefixStoreProcessGroupReducerset_debug_levelset_debug_level_from_envStoreTCPStoreWorkc                   @   s   e Zd ZdZdd ZdS )_DistributedPdbz
        Supports using PDB from inside a multiprocessing child process.

        Usage:
        _DistributedPdb().set_trace()
        c                 O   s@   t j}ztdt _tjj| g|R i | W |t _d S |t _w )Nz
/dev/stdin)sysstdinopenpdbPdbinteraction)selfargskwargs_stdinr   r   r	   r*   L   s
   
z_DistributedPdb.interactionN)__name__
__module____qualname____doc__r*   r   r   r   r	   r$   D   s    r$   _breakpoint_cache  rankskipc           	   	   C   s   |dkr$t tt }t|dd }|t|< ||kr$td| dS |dur;tj	j
jD ]}tj	j
t|d| q-t | krPt }|d|  d |  tj }tj }tjd zt  W tj| ~dS tj| ~w )	aD  
        Set a breakpoint, but only on a single rank.  All other ranks will wait for you to be
        done with the breakpoint before continuing.

        Args:
            rank (int): Which rank to break on.  Default: ``0``
            skip (int): Skip the first ``skip`` calls to this breakpoint. Default: ``0``.
        r      zSkip the breakpoint, counter=%dN)secondszS
!!! ATTENTION !!!

Type 'up' to get to the frame that called dist.breakpoint(rank=z)
F)hashstr	traceback
format_excr3   getlogwarningr   distributeddistributed_c10d_pg_map_set_pg_timeoutr   get_rankr$   message	set_tracer   _meta_in_tls_dispatch_include_DisableTorchDispatch!_set_meta_in_tls_dispatch_includebarrier)	r5   r6   	timeout_skeycountergroupr(   meta_in_tlsguardr   r   r	   
breakpointV   s:   	



rQ   win32)	HashStorer7   )
DeviceMeshinit_device_mesh)*)	_all_gather_base_coalescing_manager_CoalescingManager_create_process_group_wrapper_get_process_group_name_rank_not_in_group_reduce_scatter_base_time_estimatorget_node_local_rank)_remote_device)_create_store_from_optionsregister_rendezvous_handler
rendezvousc                   @   s   e Zd ZdS )_ProcessGroupStubN)r/   r0   r1   r   r   r   r	   rd      s    rd   ztorch.distributed)r   r   r4   )Tloggingr(   r%   r;   typingdatetimer   r   	getLoggerr/   r>   boolr
   r   r   RuntimeError
_DistError	DistError_DistBackendErrorDistBackendError_DistNetworkErrorDistNetworkError_DistStoreErrorDistStoreError_DistQueueEmptyErrorQueueEmptyErrortorch._C._distributed_c10dr   r   r   r   r   r   r   r   r   r   r   _Backendr   r   r   r   r   r   r   r   r   r   r    r!   r"   r#   _Workr)   r$   r3   dictintAny__annotations__rQ   platformrS   device_meshrT   rU   rA   rW   rX   rY   rZ   r[   r\   r]   r^   r_   remote_devicer`   rc   ra   rb   rd   modulesr   r   r   r	   <module>   s>   

l
*,
	