o
    h                     @   s  U d dl Z d dlZd dlZd dlZd dlZd dlZe eZde	fddZ
e
 r1ej s1edejjZejjZejjZejjZejjZe
 rd dlmZmZmZmZmZmZmZm Z m!Z!m"Z"m#Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0m1Z1m2Z3 G dd dej4Z5i Z6e7e8ej9f e:d	< dd
e8de8fddZ;ej<dkrd dlm=Z= ddl>m?Z?m@Z@ ddlAT ddlAmBZBmCZCmDZDmEZEmFZFmGZGmHZHmIZImJZJ ddlKmLZL ddlMmNZNmOZOmMZM e/  dS G dd dZPePejQd _,dS )    Nreturnc                   C   s   t tjdS )a  
    Return ``True`` if the distributed package is available.

    Otherwise,
    ``torch.distributed`` does not expose any other APIs. Currently,
    ``torch.distributed`` is available on Linux, MacOS and Windows. Set
    ``USE_DISTRIBUTED=1`` to enable it when building PyTorch from source.
    Currently, the default value is ``USE_DISTRIBUTED=1`` for Linux and Windows,
    ``USE_DISTRIBUTED=0`` for MacOS.
    
_c10d_init)hasattrtorch_C r   r   `/home/www/facesmatcher.com/frenv_anti/lib/python3.10/site-packages/torch/distributed/__init__.pyis_available   s   r	   z&Failed to initialize torch.distributed)_broadcast_coalesced"_compute_bucket_assignment_by_size_ControlCollectives_DEFAULT_FIRST_BUCKET_BYTES_make_nccl_premul_sum_register_builtin_comm_hook_register_comm_hook_StoreCollectives_test_python_store_verify_params_across_processesBackendBuiltinCommHookType
DebugLevel	FileStoreget_debug_level
GradBucketLoggerPrefixStoreProcessGroupReducerset_debug_levelset_debug_level_from_envStoreTCPStoreWorkc                   @   s   e Zd ZdZdd ZdS )_DistributedPdbz
        Supports using PDB from inside a multiprocessing child process.

        Usage:
        _DistributedPdb().set_trace()
        c                 O   s@   t j}ztdt _tjj| g|R i | W |t _d S |t _w )Nz
/dev/stdin)sysstdinopenpdbPdbinteraction)selfargskwargsZ_stdinr   r   r   r)   K   s
   
z_DistributedPdb.interactionN)__name__
__module____qualname____doc__r)   r   r   r   r   r#   C   s    r#   _breakpoint_cacherankskipc              	   C   s   |dkr$t tt }t|dd }|t|< ||kr$td| dS t | kr9t	 }|
d|  d |  tj }tj }tjd zt  W tj| ~dS tj| ~w )aD  
        Set a breakpoint, but only on a single rank.  All other ranks will wait for you to be
        done with the breakpoint before continuing.

        Args:
            rank (int): Which rank to break on.  Default: ``0``
            skip (int): Skip the first ``skip`` calls to this breakpoint. Default: ``0``.
        r      zSkip the breakpoint, counter=%dNzS
!!! ATTENTION !!!

Type 'up' to get to the frame that called dist.breakpoint(rank=z)
F)hashstr	traceback
format_excr1   getlogwarningZget_rankr#   messageZ	set_tracer   r   Z_meta_in_tls_dispatch_includeZ_DisableTorchDispatchZ!_set_meta_in_tls_dispatch_includeZbarrier)r2   r3   keyZcounterr'   Zmeta_in_tlsguardr   r   r   
breakpointU   s0   	


r?   win32)	HashStorer4   )
DeviceMeshinit_device_mesh)*)	_all_gather_base_coalescing_manager_CoalescingManager_create_process_group_wrapper_get_process_group_name_rank_not_in_group_reduce_scatter_base_time_estimatorget_node_local_rank)_remote_device)_create_store_from_optionsregister_rendezvous_handler
rendezvousc                   @   s   e Zd ZdS )_ProcessGroupStubN)r-   r.   r/   r   r   r   r   rR      s    rR   ztorch.distributed)r   r   )Rloggingr'   r$   r7   typingr   	getLoggerr-   r:   boolr	   r   r   RuntimeErrorZ
_DistErrorZ	DistErrorZ_DistBackendErrorZDistBackendErrorZ_DistNetworkErrorZDistNetworkErrorZ_DistStoreErrorZDistStoreErrorZ_DistQueueEmptyErrorZQueueEmptyErrorZtorch._C._distributed_c10dr
   r   r   r   r   r   r   r   r   r   r   Z_Backendr   r   r   r   r   r   r   r   r   r   r   r    r!   r"   Z_Workr(   r#   r1   dictintAny__annotations__r?   platformrA   Zdevice_meshrB   rC   Zdistributed_c10drE   rF   rG   rH   rI   rJ   rK   rL   rM   Zremote_devicerN   rQ   rO   rP   rR   modulesr   r   r   r   <module>   s<   

l
#,
	