o
    Ii>                     @   s  d dl Z d dlZ d dlZd dlZd dlZd dlmZmZ d dlZd dl	m
Z
 d dlmZ ddlmZmZmZ ddlmZmZ edd	d
ZdZerczd dlm  mZ d dlZd	ZW n	 eyb   Y nw eZej Z dddZ!de"fddZ#dd Z$dd Z%dd Z&dd Z'dd Z(dd Z)dd Z*dd  Z+d!d" Z,d#d$ Z-edd%d&Z.d'd( Z/d)d* Z0d+d, Z1dd-d.Z2d/d0 Z3d1d2 Z4d3d4 Z5d5d6 Z6dd7d8Z7d9d: Z8d;d< Z9d=d> Z:d?d@ Z;dAdB Z<dCdD Z=dEdF Z>dGdH Z?dIdJ Z@dKdL ZAdMdN ZBdOdP ZCdQdR ZDdSdT ZEdUdV ZFdWdX ZGdYdZ ZHd[d\ ZId]d^ ZJd_d` ZKdadb ZLdcdd ZMddfdgZNeddhdiZOeddjdkZPeddldmZQeddndoZReddpdqZSdrds ZTeddtduZUeddvdwZVdxdy ZWdzd{ ZXd|d} ZYd~d ZZdd Z[dd Z\dddZ]dS )    N)	lru_cachewraps)version)parse   )parse_flag_from_envpatch_environmentstr_to_bool)compare_versionsis_torch_versionUSE_TORCH_XLAT)defaultFc                 C   sP   t j| d u}|r&zt j|d u r| n|}W dS  t jjy%   Y dS w d S )NTF)	importlibutil	find_specmetadataPackageNotFoundError)pkg_namemetadata_namepackage_exists_ r   L/home/ubuntu/.local/lib/python3.10/site-packages/accelerate/utils/imports.py_is_package_available2   s   r   returnc                   C   s   t S N)_torch_distributed_availabler   r   r   r   is_torch_distributed_available>   s   r   c                   C   s   t ddrtjj S dS )N>=z2.7.0F)r   torchdistributeddistributed_c10dis_xccl_availabler   r   r   r   r"   B   s   
r"   c                   C      t dS )Nimport_timerr   r   r   r   r   is_import_timer_availableH      r&   c                   C   s   t dpt ddS )Npynvmlznvidia-ml-pyr%   r   r   r   r   is_pynvml_availableL      r)   c                   C   r#   )Npytestr%   r   r   r   r   is_pytest_availableP   r'   r,   c                   C   
   t ddS )Nmsampzms-ampr%   r   r   r   r   is_msamp_availableT      
r/   c                   C   r#   )Nschedulefreer%   r   r   r   r   is_schedulefree_availableX   r'   r2   c                   C   s   t  rtddS tddS )Nintel_transformer_enginezintel-transformer-enginetransformer_enginetransformer-engine)is_hpu_availabler   r   r   r   r   is_transformer_engine_available\   s   

r7   c                  C   s$   t ddrddlm}  |  d S dS )Nr4   r5   r   check_mxfp8_supportF)r   transformer_engine.pytorch.fp8r9   r8   r   r   r   %is_transformer_engine_mxfp8_availablec   s   

r;   c                   C   r#   )N
lomo_optimr%   r   r   r   r   is_lomo_availablek   r'   r=   c                  C   s:   t dd tj } W d   | S 1 sw   Y  | S )z
    Checks if `cuda` is available via an `nvml-based` check which won't trigger the drivers and leave cuda
    uninitialized.
    1)PYTORCH_NVML_BASED_CUDA_CHECKN)r   r   cudais_available)	availabler   r   r   is_cuda_availableo   s   
rC   c                 C   s@   | r|rJ dt sdS |rtj dv S | rtj dkS dS )z
    Check if `torch_xla` is available. To train a native pytorch job in an environment with torch xla installed, set
    the USE_TORCH_XLA to false.
    z6The check_is_tpu and check_is_gpu cannot both be true.F)GPUCUDATPUT)_torch_xla_available	torch_xlaruntimedevice_type)check_is_tpucheck_is_gpur   r   r   is_torch_xla_availablez   s   rM   c                  C   .   t d} | rttjd}t|ddS dS )Ntorchaor   z0.6.1Fr   r   r   r   r   r
   )r   torchao_versionr   r   r   is_torchao_available   
   rR   c                   C   r#   )N	deepspeedr%   r   r   r   r   is_deepspeed_available   r'   rU   c                   C   r-   Nr   z2.4.0r   r   r   r   r   is_pippy_available   r0   rX   c                 C   sZ   t ddr|  S t rtj S t rtj S t r tj S t	 r+tj
jddS dS )z8Checks if bf16 is supported, optionally ignoring the TPUT)rK      r   )rM   rC   r   r@   is_bf16_supportedis_mlu_availablemluis_xpu_availablexpuis_mps_availablebackendsmpsis_macos_or_newer)
ignore_tpur   r   r   is_bf16_available   s   



rd   c                   C   s   t  rdS dS )zChecks if fp16 is supportedFT)is_habana_gaudi1r   r   r   r   is_fp16_available   s   rf   c                   C   s   t  pt pt S )zChecks if fp8 is supported)r/   r7   rR   r   r   r   r   is_fp8_available   s   rg   c                  C   rN   )Nbitsandbytesr   z0.39.0FrP   r   bnb_versionr   r   r   is_4bit_bnb_available   rS   rk   c                  C   rN   )Nrh   r   z0.37.2FrP   ri   r   r   r   is_8bit_bnb_available   rS   rl   c                 C   s6   t d}|r| d urttjd}t|d| S |S )Nrh   r   rP   )min_versionr   rj   r   r   r   is_bnb_available   s
   rn   c                  C   s$   t  sdS dd l} dt| dt v S )NFr   multi_backendfeatures)rn   rh   getattrset)bnbr   r   r   'is_bitsandbytes_multi_backend_available   s   rt   c                   C   r#   )Ntorchvisionr%   r   r   r   r   is_torchvision_available   r'   rv   c               
   C   s   t tjdddkrHtjdd urJzttj	d} t
| ddr*tjddW S W d S  tyG } ztd	|  W Y d }~d
S d }~ww d S d S )NACCELERATE_USE_MEGATRON_LMFalser   megatronzmegatron-corer   0.8.0z	.trainingz)Parse Megatron version failed. Exception:F)r	   osenvirongetr   r   r   r   r   r   r
   	Exceptionwarningswarn)megatron_versioner   r   r   is_megatron_lm_available   s   r   c                   C   r#   )Ntransformersr%   r   r   r   r   is_transformers_available   r'   r   c                   C   r#   )Ndatasetsr%   r   r   r   r   is_datasets_available   r'   r   c                   C   r#   )Npeftr%   r   r   r   r   is_peft_available   r'   r   c                   C   r#   )Ntimmr%   r   r   r   r   is_timm_available   r'   r   c                   C   s   t  rtddS tdS )Ntritonzpytorch-triton-xpu)r]   r   r   r   r   r   is_triton_available   s   
r   c                  C   rN   )Naim<z4.0.0FrP   )r   aim_versionr   r   r   is_aim_available   rS   r   c                   C   s   t dpt dS )NtensorboardtensorboardXr%   r   r   r   r   is_tensorboard_available  s   r   c                   C   r#   )Nwandbr%   r   r   r   r   is_wandb_available  r'   r   c                   C   r#   )Ncomet_mlr%   r   r   r   r   is_comet_ml_available  r'   r   c                   C   r#   )Nswanlabr%   r   r   r   r   is_swanlab_available  r'   r   c                   C   s   t jdkotdS )N)   
   trackio)sysversion_infor   r   r   r   r   is_trackio_available  r*   r   c                   C   r#   )Nboto3r%   r   r   r   r   is_boto3_available  r'   r   c                   C   s   t dr	tddS dS )NrichACCELERATE_ENABLE_RICHF)r   r   r   r   r   r   is_rich_available  s   
r   c                   C   r#   )N	sagemakerr%   r   r   r   r   is_sagemaker_available%  r'   r   c                   C   r#   )Ntqdmr%   r   r   r   r   is_tqdm_available)  r'   r   c                   C   r#   )Nclearmlr%   r   r   r   r   is_clearml_available-  r'   r   c                   C   r#   )Npandasr%   r   r   r   r   is_pandas_available1  r'   r   c                   C   r#   )N
matplotlibr%   r   r   r   r   is_matplotlib_available5  r'   r   c                  C   sL   t drdS tjdd ur$z	tjd} W dS  tjjy#   Y dS w dS )NmlflowTzmlflow-skinnyF)r   r   r   r   r   r   )r   r   r   r   is_mlflow_available9  s   r   1.12c                 C   s"   t d| otjj otjj S )zHChecks if MPS device is available. The minimum version required is 1.12.r   )r   r   r`   ra   rA   is_built)rm   r   r   r   r_   F  s   "r_   c                 C   sV   t jddu r
dS ddl}tdd tj }W d   |S 1 s$w   Y  |S )z
    Checks if `mlu` is available via an `cndev-based` check which won't trigger the drivers and leave mlu
    uninitialized.
    	torch_mluNFr   r>   )PYTORCH_CNDEV_BASED_MLU_CHECK)r   r   r   r   r   r   r\   rA   )check_devicer   rB   r   r   r   r[   M  s   
r[   c                 C   `   t jddu r
dS ddl}| r&ztj }tj W S  ty%   Y dS w t	tdo/tj S )zSChecks if `torch_musa` is installed and potentially if a MUSA is in the environment
torch_musaNFr   musa)
r   r   r   r   r   r   device_countrA   RuntimeErrorhasattr)r   r   r   r   r   r   is_musa_available^     
r   c                 C   sz   t jddu r
dS zddl}W n
 ty   Y dS w | r3ztj }tj W S  t	y2   Y dS w t
tdo<tj S )zQChecks if `torch_npu` is installed and potentially if a NPU is in the environment	torch_npuNFr   npu)r   r   r   r   r~   r   r   r   rA   r   r   )r   r   r   r   r   r   is_npu_availablep  s   
r   c                 C   r   )zSChecks if `torch_sdaa` is installed and potentially if a SDAA is in the environment
torch_sdaaNFr   sdaa)
r   r   r   r   r   r   r   rA   r   r   )r   r   r   r   r   r   is_sdaa_available  r   r   c                 C   s\   t jddu st jddu rdS ddl}| r$ddlm  m  m} ttdo-tj	
 S )zQChecks if `torch.hpu` is installed and potentially if a HPU is in the environmenthabana_frameworksNzhabana_frameworks.torchFr   hpu)r   r   r   habana_frameworks.torch(habana_frameworks.torch.distributed.hcclr   r    hcclr   r   rA   )	init_hcclr   r   r   r   r   r6     s   r6   c                  C   s6   t  rdd lm  m  m}  |  | jjkrdS dS )Nr   TF)r6   *habana_frameworks.torch.utils.experimentalr   utilsexperimental_get_device_typesynDeviceTypesynDeviceGaudi)htexpr   r   r   re     s
   re   c                 C   sR   t ddrdS | rztj }tj W S  ty   Y dS w ttdo(tj S )z~
    Checks if XPU acceleration is available via stock PyTorch (>=2.7) and
    potentially if a XPU is in the environment
    z<=z2.6Fr^   )r   r   r^   r   rA   r   r   )r   r   r   r   r   r]     s   

r]   c                 C   s`   t jdd u r
dS | r&zdd l}tj }tj W S  ty%   Y dS w t	tdo/tj S )Ntorch_neuronxFr   neuron)
r   r   r   r   r   r   r   rA   r   r   )r   r   r   r   r   r   is_neuron_available  s   
r   c                   C   r#   )Ndvcliver%   r   r   r   r   is_dvclive_available  r'   r   c                   C   r#   )N	torchdatar%   r   r   r   r   is_torchdata_available  r'   r   c                  C   rN   )Nr   r   rz   FrP   )r   torchdata_versionr   r   r   *is_torchdata_stateful_dataloader_available  rS   r   c                       t   fdd}|S )zc
    A decorator that ensures the decorated function is only called when torchao is available.
    c                     s   t  std | i |S )Nze`torchao` is not available, please install it before calling this function via `pip install torchao`.)rR   ImportError)argskwargsfuncr   r   wrapper  s
   z!torchao_required.<locals>.wrapperr   r   r   r   r   r   torchao_required  s   r   c                    r   )zc
    A decorator that ensures the decorated function is only called when deepspeed is enabled.
    c                     sF   ddl m} ddlm} |ji kr| j|jkrtd | i |S )Nr   )AcceleratorState)DistributedTypez|DeepSpeed is not enabled, please make sure that an `Accelerator` is configured for `deepspeed` before calling this function.)accelerate.stater   accelerate.utils.dataclassesr   _shared_statedistributed_type	DEEPSPEED
ValueError)r   r   r   r   r   r   r   r     s   z#deepspeed_required.<locals>.wrapperr   r   r   r   r   deepspeed_required  s   r   c                   C   r-   rV   rW   r   r   r   r   is_weights_only_available  s   
r   1.25.0c                 C   s   t tjd}t|d| S )Nnumpyr   )r   r   r   r   r
   )rm   numpy_versionr   r   r   is_numpy_available  s   r   r   )FF)F)r   )r   )^r   importlib.metadatar{   r   r   	functoolsr   r   r   	packagingr   packaging.versionr   environmentr   r   r	   versionsr
   r   r   rG   torch_xla.core.xla_modelcore	xla_modelxmtorch_xla.runtimerH   r   _tpu_availabler    rA   r   r   boolr   r"   r&   r)   r,   r/   r2   r7   r;   r=   rC   rM   rR   rU   rX   rd   rf   rg   rk   rl   rn   rt   rv   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r_   r[   r   r   r   r6   re   r]   r   r   r   r   r   r   r   r   r   r   r   r   <module>   s   



	

