o
    NiG2                     @  s&  d dl mZ d dlZd dlmZmZmZ d dlmZ d dl	m	Z	 d dl
mZmZmZmZmZmZ d dlmZmZmZ d dlmZmZmZmZmZ ejejejejejej fZ!ej"j#ej$j#ej%j#ej&j#ej'j#gZ(eej)ej*ej+ej,ej-ej.f Z/ej0j1ej0j2ej0j3ej0j4ej5ej6fZ7G d	d
 d
eZ8dS )    )annotationsN)AnyOptionalUnion)nn)tqdm)adaloralohalokrloraoftshira)	BaseTunerBaseTunerLayer_delete_auxiliary_adapter)2TRANSFORMERS_MODELS_TO_LORA_TARGET_MODULES_MAPPINGModulesToSaveWrapperPeftType_get_submodulesget_auto_gptq_quant_linearc                      s   e Zd ZdZd/ fd
dZd0 fddZd1ddZd2ddZd3ddZe	dd Z
d4d5ddZe	dd  Z	!			"d6d7d'd(Zd8d)d*Zd9d+d,Zd:d-d.Z  ZS );
MixedModela  
    A class that allows to mix different types of adapters in a single model.

    Note: This class should usually not be initialized directly. Instead, use `get_peft_model` with the argument
    `mixed=True`.

    Args:
        model (:obj:`nn.Module`):
            The model to be tuned.
        config (:obj:`PeftConfig`):
            The config of the model to be tuned. The adapter type must be compatible.
        adapter_name (:obj:`str`):
            The name of the first adapter.
    model	nn.ModuleconfigConfigsadapter_namestrreturnNonec                   s   t  ||| d S N)super__init__)selfr   r   r   	__class__ K/home/ubuntu/.local/lib/python3.10/site-packages/peft/tuners/mixed/model.pyr!   G      zMixedModel.__init__c                   s>   t |tjst| jj dt dt| dt 	| dS )z
        A helper method to check the config when a new adapter is being added.

        Raise a ValueError if there is something wrong with the config or if it conflicts with existing adapters.

        z only supports z configs, but got .N)

isinstancer   __args__
ValueErrorr$   __name__COMPATIBLE_TUNER_TYPEStyper    _check_new_adapter_config)r"   r   r#   r%   r&   r/   J   s
   z$MixedModel._check_new_adapter_configargsr   kwargsc                 O  s"  t |tjrtjj| |g|R i | d S t |tjr,tjj| |g|R i | d S t |tj	rBtj
j| |g|R i | d S t |tjrXtjj| |g|R i | d S t |tjrntjj| |g|R i | d S t |tjrtjj| |g|R i | d S tdt| dt d)NzUnsupported config type , should be one of r(   )r)   r   AdaLoraConfigAdaLoraModel_create_and_replacer   
LoraConfig	LoraModelr	   
LoHaConfig	LoHaModelr
   
LoKrConfig	LoKrModelr   	OFTConfigOFTModelr   ShiraConfig
ShiraModelr+   r.   r-   )r"   r   r0   r1   r%   r%   r&   r5   X   s         zMixedModel._create_and_replacec                   s   t ||| t|dr| }nt|dr|j}t|ds*|j|_t|dr*|j|_t|dd d urHt|dr=|j|j_n|j|_|	|jj
 | D ]!\ }t fddtD rb|	|jj
 d v rm|	|jj
 qLd S )N
base_layerquant_linear_modulebiasstatec                 3      | ]}| v V  qd S r   r%   .0prefixnamer%   r&   	<genexpr>       z-MixedModel._replace_module.<locals>.<genexpr>ranknum)setattrhasattrget_base_layerrA   weightrB   getattrrC   r@   todevicenamed_modulesanyPREFIXES)r"   parent
child_name
new_modulechildmoduler%   rH   r&   _replace_modulem   s*   





zMixedModel._replace_modulec                   s   |  D ]\ }t fddtD sd|_q| jD ]K}t| j| dd}|dkr*q|dkr?|  D ]\ }d v r=d|_q2q|dkr^| D ]}t|t	r\t
|dr\|jd ur\d|j_qGqtd	| d
d S )Nc                 3  rD   r   r%   rE   nr%   r&   rJ      rK   z>MixedModel._mark_only_adapters_as_trainable.<locals>.<genexpr>FrB   noneallT	lora_onlyzRequested bias: z, is not implemented.)named_parametersrU   rV   requires_gradactive_adaptersrQ   peft_configmodulesr)   LayersrN   rB   r+   )r"   r   pactive_adapterrB   mr%   r]   r&    _mark_only_adapters_as_trainable   s,   
z+MixedModel._mark_only_adapters_as_trainablec           	      K  sv  | dd }t|}|d us|d urtd| jj d|dd}|dd}|s,|r6td| jj dt| tjrJtj	j
| ||fi |}|S t| tjr^tjj
| ||fi |}|S t| tjrrtjj
| ||fi |}|S t| tjrtjj
| ||fi |}|S t| tjrtjj
| ||fi |}|S t| tjrtjj
| ||fi |}|S tdt|  d	t d
)Ngptq_quantization_configz$GPTQ quantization not supported for  (yet).loaded_in_8bitFloaded_in_4bitz-8bit and 4bit quantization not supported for zUnknown config type r2   r(   )getr   r+   	peft_typevaluepopr)   r   r3   r4   _create_new_moduler   r6   r7   r	   r8   r9   r
   r:   r;   r   r<   r=   r   r>   r?   r.   r-   )	r   r   targetr1   rl   AutoGPTQQuantLinearrn   ro   rY   r%   r%   r&   rt      s6   	zMixedModel._create_new_moduleFUnion[str, list[str]]inference_modeboolc                 C  sX   | j ||d | j D ]}t|tr&|jrtd |  |j	||d q|| _
d S )N)rx   zJAdapter cannot be set when the model is merged. Unmerging the model first.)set_auxiliary_adaptersr   rf   r)   rg   mergedwarningswarnunmergeset_adapterri   )r"   r   rx   r[   r%   r%   r&   r      s   


zMixedModel.set_adapterc                 C  s4   | j d u r|d tvrtdtt|d  | _ | S )N
model_typez0Please specify `target_modules` in `peft_config`)target_modulesr   r+   set)re   model_configr%   r%   r&   _prepare_adapter_config   s   

z"MixedModel._prepare_adapter_configTNprogressbar
safe_mergeadapter_namesOptional[list[str]]c              	     s  |rt | jdd dkrtd fdd}dd | j D }d|r%d	nd
 d }t|| |dD ]P}zt| j|\}	}
}W n	 tyH   Y q2w t|
dr_|rT||
 | |	||
	 |
 q2t
|
tr|
j|
j }t|dr||rx|j d |	 }t|	|| q2| jS )Nquantization_methodgptqz4Cannot merge layers when the model is gptq quantizedc                   sx   g }| }t |dr|| |j}t |ds	t|d d |dd  D ]\}}|j d |j|_q#| j d d S )Nr@      r   r   )rN   appendr@   zipmerge)r[   pathlayerlayer_beforelayer_afterr   r   r%   r&   merge_recursively   s   


"
zBMixedModel._unload_and_optionally_merge.<locals>.merge_recursivelyc                   *   g | ]\ }t  fd dtD s qS )c                 3  rD   r   r%   rE   keyr%   r&   rJ      rK   zEMixedModel._unload_and_optionally_merge.<locals>.<listcomp>.<genexpr>rU   rV   rF   _r%   r   r&   
<listcomp>      * z;MixedModel._unload_and_optionally_merge.<locals>.<listcomp>z
Unloading zand merging  r   )disabledescr@   r   )rQ   r   r+   rT   r   r   AttributeErrorrN   r\   rO   r)   r   modules_to_saveri   r   rM   )r"   r   r   r   r   r   key_listr   r   rW   ru   target_namerY   r%   r   r&   _unload_and_optionally_merge   s2   


z'MixedModel._unload_and_optionally_mergec                 O  s   t d| jj d)Nz(Weighted adapters are not supported for rm   )NotImplementedErrorr$   r,   r"   r0   r1   r%   r%   r&   add_weighted_adapter  r'   zMixedModel.add_weighted_adapterc           	      C  s   t |tr	|g}n|}t|t| j  }|r)tdt| dt| j  |D ]5}| j|= dd | j D }d}|D ] }t	| j|\}}}t |t
r_|| |du r_|jdd }q?q+|pdg | _t| j||d dS )z
        Deletes an existing adapter.

        Args:
            adapter_name (Union[str, list[str]]): Name of the adapter(s) to delete.
        zAdapter(s) z  not found, available adapters: c                   r   )c                 3  rD   r   r%   rE   r   r%   r&   rJ     rK   z7MixedModel.delete_adapter.<locals>.<listcomp>.<genexpr>r   r   r%   r   r&   r     r   z-MixedModel.delete_adapter.<locals>.<listcomp>N)new_active_adapters)r)   r   r   re   keysr+   sortedr   rT   r   r   delete_adapterrd   ri   r   )	r"   r   r   
mismatchedr   new_adapterr   r   ru   r%   r%   r&   r     s,   



zMixedModel.delete_adapterc                 O  s   | j j|i |S r   )r   generater   r%   r%   r&   r   '  s   zMixedModel.generate)r   r   r   r   r   r   r   r   )r   r   r   r   )r   r   r0   r   r1   r   r   r   )r   r   )r   r   r   r   )F)r   rw   rx   ry   r   r   )TFFN)r   ry   r   ry   r   r   )r0   r   r1   r   r   r   )r   rw   r   r   )r0   r   r1   r   )r,   
__module____qualname____doc__r!   r/   r5   r\   rk   staticmethodrt   r   r   r   r   r   r   __classcell__r%   r%   r#   r&   r   7   s(    






0
!r   )9
__future__r   r|   typingr   r   r   torchr   r   peft.tunersr   r	   r
   r   r   r   peft.tuners.tuners_utilsr   r   r   
peft.utilsr   r   r   r   r   LORALOHALOKRADALORAOFTSHIRAr-   r7   rG   r;   r9   r=   r?   rV   r6   r8   r:   r3   r<   r>   r   r   	LoraLayer	LoHaLayer	LoKrLayerAdaLoraLayerOFTLayer
ShiraLayerrg   r   r%   r%   r%   r&   <module>   s4    

