o
    8wi                      @   s  d dl Z d dlmZ d dlmZmZ d dlmZmZm	Z	 d dl
Z
d dlmZ d dl
mZ d dlZd dlmZ d dlmZ d dlmZ d d	lmZ d d
lmZ d dlmZmZmZ d dlm Z  d dl!m"Z" d dl#m$Z$ d dl%m&Z& d dl'm(Z( d dl)m*Z* d dl+m,Z, de	e ddfddZ-de	e. de.de	e. de	e. ddde/e.e.f fddZ0ed:d e&d!e1ded" fd#d$Z2d;d&e.d'e.de1fd(d)Z3d*eddfd+d,Z4ddd-e(defd.d/Z5d0edefd1d2Z6d3e/e7d4f d5e1d-e(d6d7ddf
d8d9Z8dS )<    N)	Generator)AbstractContextManagercontextmanager)AnyCallableOptional)Tensor)_distributed_is_initialized)PossibleUserWarning)XLAAccelerator)Timer)_Loop)_DataFetcher_DataLoaderIterDataFetcher_PrefetchDataFetcher)_BaseProgress)FSDPStrategy)ParallelStrategy)Strategy)RunningStage)rank_zero_warn)is_param_in_hook_signaturelossreturnc                 C   s.   | durt |  std|  ddS dS )zaChecks for finite loss value.

    Args:
        loss: the loss value to check to be finite

    Nz(The loss returned in `training_step` is .)torchisfiniteall
ValueError)r    r   ^/home/ubuntu/sommelier/.venv/lib/python3.10/site-packages/pytorch_lightning/loops/utilities.pycheck_finite_loss'   s   r!   	min_steps	max_steps
min_epochs
max_epochstrainerz
pl.Trainerc                 C   sb   |du r|dkrt dd |jD stdtd d}nd}|du r'| dur'd}|du r-d	}||fS )
a  This utility computes the default values for the minimum and maximum number of steps and epochs given the values
    the user has selected.

    Args:
        min_steps: Minimum number of steps.
        max_steps: Maximum number of steps.
        min_epochs: Minimum number of epochs.
        max_epochs: Maximum number of epochs.
        trainer: Trainer instance.

    Returns:
        The parsed limits, with default values being set for the ones that the user did not specify.

    Nc                 s   s    | ]}t |tV  qd S N)
isinstancer   ).0cbr   r   r    	<genexpr>H   s    z%_parse_loop_limits.<locals>.<genexpr>zj`max_epochs` was not set. Setting it to 1000 epochs. To train without an epoch limit, set `max_epochs=-1`.)categoryi     r   )any	callbacksr   r
   )r"   r#   r$   r%   r&   r   r   r    _parse_loop_limits2   s   r1   Tstrategyblock)NNNc                 c   sN    t | tr"|r"|   dV  W d   dS 1 sw   Y  dS dV  dS )a  Blocks synchronization in :class:`~pytorch_lightning.strategies.parallel.ParallelStrategy`. This is useful for
    example when accumulating gradients to reduce communication when it is not needed.

    Args:
        strategy: the strategy instance to use.
        block: whether the context manager is enabled or not

    Returns:
        context manager with sync behaviour off

    N)r)   r   block_backward_sync)r2   r3   r   r   r    _block_parallel_sync_behavior]   s   
"
r5   r'   currentmaximumc                 C   s   |dko| |kS )zCheck if the limit has been reached (if enabled).

    Args:
        current: the current value
        maximum: the maximum value (or -1 to disable limit)

    Returns:
        bool: whether the limit has been reached

    r'   r   )r6   r7   r   r   r    _is_max_limit_reachedq   s   r8   loopc                 C   s<   t |  D ]}t|tr|  qt|trt| qd S r(   )varsvaluesr)   r   resetr   _reset_progress)r9   vr   r   r    r=      s   


r=   stagec                 C   s   | j }|tjkrd}n$|tjkrd}n|tjtjfv rd}n|tjkr&d}n	td| jj	 t
||}t|dddrFtd	| d
 t S t S )N	test_steptraining_stepvalidation_steppredict_stepzDataFetcher is unsupported for dataloader_iterT)explicitz)Found `dataloader_iter` argument in the `zb`. Note that the support for this signature is experimental and the behavior is subject to change.)lightning_moduler   TESTINGTRAINING
VALIDATINGSANITY_CHECKING
PREDICTINGRuntimeErrorstater?   getattrr   r   r   r   )r&   r?   rF   step_fx_namestep_fxr   r   r    _select_data_fetcher   s"   




rQ   loop_runc                    s"   dt dtdtdtf fdd}|S )Nselfargskwargsr   c                    s   t | tstdt| j dt| ds tdt| j dt r-t dkr-t	j
}n t | jjtr8t	j
}nt | jjtrCt	j
}n
| jrJt	j}nt	j
}|   | g|R i |W  d    S 1 sfw   Y  d S )N`z` needs to be a Loop.inference_modez$.inference_mode` needs to be definedgloo)r)   r   	TypeErrortype__name__hasattrr	   distget_backendr   no_gradr&   acceleratorr   r2   r   rW   )rS   rT   rU   context_managerrR   r   r    
_decorator   s    

$z$_no_grad_context.<locals>._decorator)r   r   )rR   rc   r   rb   r    _no_grad_context   s   rd   hooks.is_expected	pl_modulezpl.LightningModulec              	   C   s   | D ]J}t ||}t|d}|s8|r7t|j}d|v r7|d jtjju r7td|j	 dt
|j d| dq|sLtd|j	 dt
|j d| dqd S )	Ndataloader_idxzYou provided only a single `z5_dataloader`, but have included `dataloader_idx` in `r   zS()`. Either remove the argument or give it a default value i.e. `dataloader_idx=0`.zYou provided multiple `z3_dataloader`, but no `dataloader_idx` argument in `z4()`. Try adding `dataloader_idx=0` to its signature.)rN   r   inspect	signature
parametersdefault	ParameteremptyrL   dataloader_prefixrZ   r[   )re   rf   r?   rg   hookfxparam_presentparamsr   r   r    "_verify_dataloader_idx_requirement   s2   



rt   )T)r'   )9ri   collections.abcr   
contextlibr   r   typingr   r   r   r   torch.distributeddistributedr]   r   pytorch_lightningpl&lightning_fabric.utilities.distributedr	   #lightning_fabric.utilities.warningsr
   "pytorch_lightning.accelerators.xlar   !pytorch_lightning.callbacks.timerr   pytorch_lightning.loopsr    pytorch_lightning.loops.fetchersr   r   r    pytorch_lightning.loops.progressr   pytorch_lightning.strategiesr   %pytorch_lightning.strategies.parallelr   %pytorch_lightning.strategies.strategyr    pytorch_lightning.trainer.statesr   %pytorch_lightning.utilities.rank_zeror   +pytorch_lightning.utilities.signature_utilsr   r!   inttupler1   boolr5   r8   r=   rQ   rd   strrt   r   r   r   r    <module>   sh   

+
