o
    	۷ibc                     @   sL  d dl mZmZmZ d dlZd dlmZ ddlmZ ddl	m
Z
mZ ddlmZ ddlmZmZ ddlmZ dd	lmZmZmZ dd
lmZmZ ddlmZmZ ddlmZmZ ddl m!Z! ddl"m#Z#m$Z$m%Z%m&Z& ddl'm(Z( ddl)m*Z* ddl+m,Z, e&-e.Z/G dd dej0Z1G dd dej0Z2G dd dej0Z3dd Z4d<ddZ5dej6de7dej6fd d!Z8	"		d=d#ej0d$ej6d%ej6d&ej6d'eej6 d(e9d)ee9 d*ee9 de:ej6ej6f fd+d,Z;G d-d. d.ej0Z<G d/d0 d0eZ=e$G d1d2 d2eZ>e$G d3d4 d4e>Z?e$G d5d6 d6e>eZ@G d7d8 d8ee>ZAG d9d: d:ee>ZBg d;ZCdS )>    )CallableOptionalUnionN   )ACT2FN)CacheDynamicCache)GenerationMixin)create_causal_mask!create_sliding_window_causal_mask)FlashAttentionKwargs) GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuplelogging)deprecate_kwarg)check_model_inputs   )Gemma2Configc                       s@   e Zd Zddedef fddZdd Zdd	 Zd
d Z  Z	S )Gemma2RMSNormư>dimepsc                    s&   t    || _tt|| _d S N)super__init__r"   nn	Parametertorchzerosweight)selfr!   r"   	__class__ `/home/ubuntu/vllm_env/lib/python3.10/site-packages/transformers/models/gemma2/modeling_gemma2.pyr%   3   s   
zGemma2RMSNorm.__init__c                 C   s$   |t |djddd| j  S )N   T)keepdim)r(   rsqrtpowmeanr"   )r+   xr.   r.   r/   _norm8   s   $zGemma2RMSNorm._normc                 C   s*   |  | }|d| j   }||S )Ng      ?)r7   floatr*   type_as)r+   r6   outputr.   r.   r/   forward;   s   
zGemma2RMSNorm.forwardc                 C   s   t | jj d| j S )Nz, eps=)tupler*   shaper"   )r+   r.   r.   r/   
extra_reprB   s   zGemma2RMSNorm.extra_repr)r    )
__name__
__module____qualname__intr8   r%   r7   r;   r>   __classcell__r.   r.   r,   r/   r   2   s
    r   c                       s$   e Zd Z fddZdd Z  ZS )	Gemma2MLPc                    sr   t    || _|j| _|j| _tj| j| jdd| _tj| j| jdd| _tj| j| jdd| _	t
|j | _d S NFbias)r$   r%   confighidden_sizeintermediate_sizer&   Linear	gate_projup_proj	down_projr   hidden_activationact_fnr+   rH   r,   r.   r/   r%   G   s   
zGemma2MLP.__init__c                 C   s$   |  | | || | }|S r#   )rN   rP   rL   rM   )r+   r6   rN   r.   r.   r/   r;   Q   s    zGemma2MLP.forward)r?   r@   rA   r%   r;   rC   r.   r.   r,   r/   rD   F   s    
rD   c                       sD   e Zd ZU ejed< ddef fddZe e	dd Z
  ZS )	Gemma2RotaryEmbeddinginv_freqNrH   c                    s   t    t|drt|jtr|jd|jd| _nd| _|j| _	|j| _
|| _t| j | _| | j|\}| _| jd|dd | j| _d S )Nrope_scaling	rope_typetypedefaultrS   F)
persistent)r$   r%   hasattr
isinstancerT   dictgetrU   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrH   r   rope_init_fnattention_scalingregister_bufferrS   original_inv_freq)r+   rH   devicerS   r,   r.   r/   r%   Y   s   
zGemma2RotaryEmbedding.__init__c           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtr6|jjdkr6|jjnd}t	j
|dd+ | |  dd}t	j||fdd	}| | j }| | j }	W d    n1 smw   Y  |j|jd
|	j|jd
fS )Nr   r1   r   mpscpuF)device_typeenabledr0   r!   dtype)rS   r8   expandr=   tord   rZ   rV   strr(   autocast	transposecatcosra   sinrk   )
r+   r6   position_idsinv_freq_expandedposition_ids_expandedrg   freqsembrr   rs   r.   r.   r/   r;   j   s   0&zGemma2RotaryEmbedding.forwardr#   )r?   r@   rA   r(   Tensor__annotations__r   r%   no_gradr   r;   rC   r.   r.   r,   r/   rR   V   s   
 
rR   c                 C   sH   | dd| j d d f }| d| j d d df }tj| |fddS )z*Rotates half the hidden dims of the input..Nr1   r0   ri   )r=   r(   rq   )r6   x1x2r.   r.   r/   rotate_halfz   s   r~   c                 C   sD   | |}| |}| | t| |  }|| t||  }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezer~   )qkrr   rs   rt   unsqueeze_dimq_embedk_embedr.   r.   r/   apply_rotary_pos_emb   s
   

r   hidden_statesn_repreturnc                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)r=   rl   reshape)r   r   batchnum_key_value_headsslenhead_dimr.   r.   r/   	repeat_kv   s
   0r           modulequerykeyvalueattention_maskdropoutscalingsoftcapc                 K   s   |d u r	| j d }t|| j}	t|| j}
t||	dd| }|d ur2|| }t|}|| }|d urM|d d d d d d d |	jd f }|| }tj	j
|dtjd|j}tj	j||| jd}t||
}|dd }||fS )	N      r0   r   r1   )r!   rk   )ptrainingr   )r   r   num_key_value_groupsr(   matmulrp   tanhr=   r&   
functionalsoftmaxfloat32rm   rk   r   r   
contiguous)r   r   r   r   r   r   r   r   kwargs
key_statesvalue_statesattn_weightscausal_maskattn_outputr.   r.   r/   eager_attention_forward   s"   

&r   c                       s   e Zd ZdZdedef fddZedddd		
	
ddej	de
ej	ej	f deej	 dee deej dee de
ej	eej	 ee
ej	  f fddZ  ZS )Gemma2Attentionz=Multi-headed attention from 'Attention Is All You Need' paperrH   	layer_idxc                    s   t    || _|| _t|d|j|j | _|j|j | _	|j
d | _| jj| _d| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j| j |j|jd| _| jj| _|j| dkry|j| _d S d | _d S )Nr   r   TrF   sliding_attention)r$   r%   rH   r   getattrrI   num_attention_headsr   r   r   query_pre_attn_scalarr   attention_dropout	is_causalr&   rK   attention_biasq_projk_projv_projo_projattn_logit_softcappinglayer_typessliding_windowr+   rH   r   r,   r.   r/   r%      s,   


$zGemma2Attention.__init__past_key_valuepast_key_values4.58new_nameversionNr   position_embeddingsr   cache_positionr   r   c                 K   s,  |j d d }g |d| jR }| ||dd}	| ||dd}
| ||dd}|\}}t|	|
||\}	}
|d urW|||d}||
|| j	|\}
}t
}| jjdkret| jj }|| |	|
||f| jrr| jnd| j| j| jd|\}}|jg |dR   }| |}||fS )Nr1   r   r0   )rs   rr   r   eagerr   )r   r   r   r   )r=   r   r   viewrp   r   r   r   updater   r   rH   _attn_implementationr   r   r   r   r   r   r   r   r   )r+   r   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rr   rs   cache_kwargsattention_interfacer   r   r.   r.   r/   r;      s<   



zGemma2Attention.forward)NN)r?   r@   rA   __doc__r   rB   r%   r   r(   ry   r<   r   r   
LongTensorr   r   r;   rC   r.   r.   r,   r/   r      s*    r   c                       s   e Zd Zdedef fddZedddd							
	
		ddejde	ejejf de
ej de
ej de
e de
e de
e de
ej de	eje
e	ejejf  f fddZ  ZS )Gemma2DecoderLayerrH   r   c                    s   t    |j| _|| _|j| | _t||d| _t|| _	t
|j|jd| _t
|j|jd| _t
|j|jd| _t
|j|jd| _d S )N)rH   r   r"   )r$   r%   rI   rH   r   attention_typer   	self_attnrD   mlpr   rms_norm_epsinput_layernormpost_attention_layernormpre_feedforward_layernormpost_feedforward_layernormr   r,   r.   r/   r%     s   

zGemma2DecoderLayer.__init__r   r   r   r   NFr   r   r   rt   output_attentions	use_cacher   r   c	                 K   s   |}
|  |}| jd||||||||d|	\}}| |}|
| }|}
| |}| |}| |}|
| }|f}|rB||f7 }|S )N)r   r   r   rt   r   r   r   r   r.   )r   r   r   r   r   r   )r+   r   r   r   rt   r   r   r   r   r   residualself_attn_weightsoutputsr.   r.   r/   r;   $  s2   
	





zGemma2DecoderLayer.forward)NNNFFN)r?   r@   rA   r   rB   r%   r   r(   ry   r<   r   r   r   boolFloatTensorr;   rC   r.   r.   r,   r/   r     s8    	r   c                       sX   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZ fddZ  ZS )	Gemma2PreTrainedModelrH   modelTr   r   )r   
attentionsc                    s,   t  | d|jjv r|jj  d S d S )NRMSNorm)r$   _init_weightsr-   r?   r*   datazero_)r+   r   r,   r.   r/   r   d  s   z#Gemma2PreTrainedModel._init_weights)r?   r@   rA   r   rz   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsr   rC   r.   r.   r,   r/   r   R  s   
 r   c                       s   e Zd Zdef fddZe e									ddeej	 deej
 deej	 dee d	eej d
ee dee dee deej	 dee defddZ  ZS )Gemma2ModelrH   c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _t j jd| _t d| _d| _|   d S )Nc                    s   g | ]}t  |qS r.   )r   ).0r   rH   r.   r/   
<listcomp>u  s    z(Gemma2Model.__init__.<locals>.<listcomp>r   r   F)r$   r%   pad_token_idpadding_idx
vocab_sizer&   	EmbeddingrI   embed_tokens
ModuleListrangenum_hidden_layerslayersr   r   normrR   
rotary_embgradient_checkpointing	post_initrQ   r,   r   r/   r%   n  s   zGemma2Model.__init__N	input_idsr   rt   r   inputs_embedsr   r   output_hidden_statesr   r   r   c
                 K   s  |d ur|n| j j}|d ur|n| j j}|d ur|n| j j}|d u |d uA r*td| jr9| jr9|r9td d}|d u rB| 	|}|rQ|d u rQ| jsQt
| j d}|	d u rm|d ur]| nd}tj|||jd  |jd}	|d u rv|	d}t| }ts| j |||	||d}tdi |tdi |d	}|}| ||}tj| j jd
 |jd}|| }|rdnd }|rdnd }| jd | j j D ])}|r||f7 }||f|||j |||||	d|
}|d }|r||d f7 }q| |}|r||f7 }t||||dS )Nz:You must specify exactly one of input_ids or inputs_embedszX`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`.Fr   r   r   )rd   )rH   input_embedsr   r   r   rt   )full_attentionr   g      ?rj   r.   )r   r   rt   r   r   r   r   )last_hidden_stater   r   r   )rH   r   r  r   
ValueErrorr   r   loggerwarning_oncer   r   get_seq_lengthr(   aranger=   rd   r   rZ   r[   r
   r   r   tensorrI   rk   r   r   r   r   r   )r+   r  r   rt   r   r  r   r   r  r   r   past_seen_tokenscausal_mask_mappingmask_kwargsr   r   
normalizerall_hidden_statesall_self_attnsdecoder_layerlayer_outputsr.   r.   r/   r;   ~  s   



	

zGemma2Model.forward)	NNNNNNNNN)r?   r@   rA   r   r%   r   r   r   r(   r   ry   r   r   r   r   r   r   r;   rC   r.   r.   r,   r/   r   l  sH    	
r   c                       s   e Zd ZdgZddiZddgdgfiZ fddZee												dd
e	e
j de	e
j de	e
j de	e de	e
j de	e
j de	e de	e de	e de	e
j deee
jf defddZ  ZS )Gemma2ForCausalLMzlm_head.weightlm_headcolwise_repr   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S rE   )
r$   r%   r   r   r   r&   rK   rI   r  r   rQ   r,   r.   r/   r%     s
   
zGemma2ForCausalLM.__init__Nr   r  r   rt   r   r  labelsr   r   r  r   logits_to_keepr   c                 K   s   |dur|n| j j}|	dur|	n| j j}	| jd||||||||	|
d	|}|j}t|tr4t| dn|}| |dd|ddf }| j j	dur[|| j j	 }t
|}|| j j	 }d}|durm| j||| jfi |}t|||j|j|jdS )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, Gemma2ForCausalLM

        >>> model = Gemma2ForCausalLM.from_pretrained("google/gemma-2-9b")
        >>> tokenizer = AutoTokenizer.from_pretrained("google/gemma-2-9b")

        >>> prompt = "What is your favorite condiment?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "What is your favorite condiment?"
        ```N)	r  r   rt   r   r  r   r   r  r   )lossr  r   r   r   r.   )rH   r   r  r   r  rZ   rB   slicer  final_logit_softcappingr(   r   loss_functionr   r   r   r   r   )r+   r  r   rt   r   r  r  r   r   r  r   r  r   r   r   slice_indicesr  r  r.   r.   r/   r;     sB   #

zGemma2ForCausalLM.forward)NNNNNNNNNNr   )r?   r@   rA   _tied_weights_keys_tp_plan_pp_planr%   r   r   r   r(   r   ry   r   r   r   r   rB   r   r;   rC   r.   r.   r,   r/   r    sV    		
r  c                   @      e Zd ZdS )Gemma2ForSequenceClassificationNr?   r@   rA   r.   r.   r.   r/   r$  H      r$  c                   @   r#  )Gemma2ForTokenClassificationNr%  r.   r.   r.   r/   r'  L  r&  r'  )r  r   r   r$  r'  )Nr   )r   NN)Dtypingr   r   r   r(   torch.nnr&   activationsr   cache_utilsr   r   
generationr	   masking_utilsr
   r   modeling_flash_attention_utilsr   modeling_layersr   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   r   utils.deprecationr   utils.genericr   configuration_gemma2r   
get_loggerr?   r  Moduler   rD   rR   r~   r   ry   rB   r   r8   r<   r   r   r   r   r   r  r$  r'  __all__r.   r.   r.   r/   <module>   st   
$


#K< Y