o
    oi                     @   s   d dl Z d dlZd dlmZmZmZmZ d dlmZ d dl	m
Z
 d dlmZmZ d dlmZ d dlmZ e eZG dd	 d	e
ZdS )
    N)AnyCallableDictOptional)override)CheckpointIO)_atomic_saveget_filesystem)_load)_PATHc                
   @   s   e Zd ZdZeddeeef dede	e ddfddZ
ed	d
 fdede	e deeef fddZededdfddZdS )TorchCheckpointIOzCheckpointIO that utilizes :func:`torch.save` and :func:`torch.load` to save and load checkpoints respectively,
    common for most use cases.

    .. warning::  This is an :ref:`experimental <versioning:Experimental API>` feature.

    N
checkpointpathstorage_optionsreturnc                 C   sH   |durt d| jj dt|}|jtj|dd t|| dS )a~  Save model/training states as a checkpoint file through state-dump and file-write.

        Args:
            checkpoint: dict containing model and trainer state
            path: write-target path
            storage_options: not used in ``TorchCheckpointIO.save_checkpoint``

        Raises:
            TypeError:
                If ``storage_options`` arg is passed in

        Nze`Trainer.save_checkpoint(..., storage_options=...)` with `storage_options` arg is not supported for `za`. Please implement your custom `CheckpointIO` to define how you'd like to use `storage_options`.T)exist_ok)		TypeError	__class____name__r	   makedirsosr   dirnamer   )selfr   r   r   fs r   X/home/ubuntu/.local/lib/python3.10/site-packages/lightning/fabric/plugins/io/torch_io.pysave_checkpoint$   s   z!TorchCheckpointIO.save_checkpointc                 C   s   | S Nr   )storagelocr   r   r   <lambda>>   s    zTorchCheckpointIO.<lambda>map_locationc                 C   s,   t |}||std| t||dS )a  Loads checkpoint using :func:`torch.load`, with additional handling for ``fsspec`` remote loading of files.

        Args:
            path: Path to checkpoint
            map_location: a function, :class:`torch.device`, string or a dict specifying how to remap storage
                locations.

        Returns: The loaded checkpoint.

        Raises:
            FileNotFoundError: If ``path`` is not found by the ``fsspec`` filesystem

        zCheckpoint file not found: )r!   )r	   existsFileNotFoundErrorpl_load)r   r   r!   r   r   r   r   load_checkpoint<   s   
z!TorchCheckpointIO.load_checkpointc                 C   s8   t |}||r|j|dd td|  dS dS )ziRemove checkpoint file from the filesystem.

        Args:
            path: Path to checkpoint

        T)	recursivezRemoved checkpoint: N)r	   r"   rmlogdebug)r   r   r   r   r   r   remove_checkpointU   s
   
z#TorchCheckpointIO.remove_checkpointr   )r   
__module____qualname____doc__r   r   strr   r   r   r   r   r%   r*   r   r   r   r   r      s    *
r   )loggingr   typingr   r   r   r   typing_extensionsr   )lightning.fabric.plugins.io.checkpoint_ior   #lightning.fabric.utilities.cloud_ior   r	   r
   r$    lightning.fabric.utilities.typesr   	getLoggerr   r(   r   r   r   r   r   <module>   s   
