o
    eiU                     @   s:  d dl Z d dlmZ d dlmZ d dlZd dlmZ ddlm	Z	 ddl
mZmZ ddlmZ ddlmZ dd	lmZ dd
lmZmZmZ ddlmZmZ ddlmZmZ ddlmZmZ ddl m!Z! ddl"m#Z#m$Z$m%Z% ddl&m'Z'm(Z( ddl)m*Z* ddl+m,Z, G dd dej-Z.G dd dej-Z/G dd dej-Z0dej1de2dej1fddZ3	d=d ej-d!ej1d"ej1d#ej1d$ej1dB d%e4d&e4d'e!e# fd(d)Z5d*d+ Z6d>d,d-Z7ee7G d.d/ d/ej-Z8G d0d1 d1eZ9e$G d2d3 d3eZ:e$G d4d5 d5e:Z;e$G d6d7 d7e:eZ<G d8d9 d9ee:Z=G d:d; d;ee:Z>g d<Z?dS )?    N)Callable)Optional   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernelized_func)create_causal_mask) GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)maybe_autocastmerge_with_config_defaults)capture_outputs   )HeliumConfigc                       s.   e Zd Zd fdd	Zdd Zdd Z  ZS )	HeliumRMSNormư>c                    s&   t    tt|| _|| _d S N)super__init__nn	Parametertorchonesweightvariance_epsilon)selfhidden_sizeeps	__class__ h/home/ubuntu/transcripts/venv/lib/python3.10/site-packages/transformers/models/helium/modeling_helium.pyr!   1   s   

zHeliumRMSNorm.__init__c                 C   sR   |j }|tj}|djddd}|t|| j  }| jtj| |S )N   T)keepdim)	dtypetor$   float32powmeanrsqrtr'   r&   )r(   hidden_statesinput_dtypevariancer-   r-   r.   forward6   s
   zHeliumRMSNorm.forwardc                 C   s   t | jj d| j S )Nz, eps=)tupler&   shaper'   )r(   r-   r-   r.   
extra_repr=   s   zHeliumRMSNorm.extra_repr)r   )__name__
__module____qualname__r!   r;   r>   __classcell__r-   r-   r+   r.   r   0   s    r   c                       s~   e Zd ZU ejed< ddef fddZe			ddedB de	d de
dB d	ed
ef fddZe edd Z  ZS )HeliumRotaryEmbeddinginv_freqNconfigc                    s   t    |j| _|j| _|| _| jjd | _| j}| jdkr$t	| j }|| j|\}| _
| jd|dd | jd| dd d S )N	rope_typedefaultrD   F)
persistentoriginal_inv_freq)r    r!   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrE   rope_parametersrF   compute_default_rope_parametersr   attention_scalingregister_bufferclone)r(   rE   devicerope_init_fnrD   r+   r-   r.   r!   D   s   


zHeliumRotaryEmbedding.__init__rR   ztorch.deviceseq_lenreturnztorch.Tensorc                 C   sZ   | j d }t| ddp| j| j }d}d|tjd|dtjdj|tjd|   }||fS )	a  
        Computes the inverse frequencies according to the original RoPE implementation
        Args:
            config ([`~transformers.PreTrainedConfig`]):
                The model configuration.
            device (`torch.device`):
                The device to use for initialization of the inverse frequencies.
            seq_len (`int`, *optional*):
                The current sequence length. Unused for this type of RoPE.
        Returns:
            Tuple of (`torch.Tensor`, `float`), containing the inverse frequencies for the RoPE embeddings and the
            post-processing scaling factor applied to the computed cos/sin (unused in this type of RoPE).
        
rope_thetahead_dimNg      ?r   r/   r2   )rR   r2   )	rM   getattrr)   num_attention_headsr$   arangeint64r3   float)rE   rR   rT   basedimattention_factorrD   r-   r-   r.   rN   T   s   
&z5HeliumRotaryEmbedding.compute_default_rope_parametersc           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtr6|jjdkr6|jjnd}t	|dd+ | |  
dd}tj||fdd	}| | j }| | j }	W d    n1 slw   Y  |j|jd
|	j|jd
fS )Nr   r0   r   mpscpuF)device_typeenabledr/   r_   rX   )rD   r]   expandr=   r3   rR   
isinstancetypestrr   	transposer$   catcosrO   sinr2   )
r(   xposition_idsinv_freq_expandedposition_ids_expandedrc   freqsembrl   rm   r-   r-   r.   r;   r   s   0&zHeliumRotaryEmbedding.forwardr   )NNN)r?   r@   rA   r$   Tensor__annotations__r   r!   staticmethodr   intr<   r]   rN   no_gradr   r;   rB   r-   r-   r+   r.   rC   A   s&   
 

rC   c                       s$   e Zd Z fddZdd Z  ZS )	HeliumMLPc                    sx   t    || _|j| _|j| _tj| j| j|jd| _tj| j| j|jd| _	tj| j| j|jd| _
t|j | _d S )Nbias)r    r!   rE   r)   intermediate_sizer"   Linearmlp_bias	gate_projup_proj	down_projr   
hidden_actact_fnr(   rE   r+   r-   r.   r!      s   
zHeliumMLP.__init__c                 C   s$   |  | | || | }|S r   )r   r   r   r   )r(   rn   r   r-   r-   r.   r;      s    zHeliumMLP.forward)r?   r@   rA   r!   r;   rB   r-   r-   r+   r.   ry      s    
ry   r8   n_reprU   c                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)r=   rf   reshape)r8   r   batchnum_key_value_headsslenrW   r-   r-   r.   	repeat_kv   s
   0r           modulequerykeyvalueattention_maskscalingdropoutkwargsc                 K   s   t || j}t || j}	t||dd| }
|d ur |
| }
tjj|
dtjd	|j
}
tjj|
|| jd}
t|
|	}|dd }||
fS )Nr/   r   r0   )r_   r2   )ptrainingr   )r   num_key_value_groupsr$   matmulrj   r"   
functionalsoftmaxr4   r3   r2   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightsattn_outputr-   r-   r.   eager_attention_forward   s   
r   c                 C   s>   | ddddf }| ddddf }t j| |fdddS )	z*Rotates half the hidden dims of the input..r   Nr/   r   r0   re   )r$   stackflatten)rn   x1x2r-   r-   r.   rotate_half   s   r   c                 C   s   | |}| |}|dd|jd d f jddd}|dd|jd d f jddd}| | t| |  }|| t||  }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    .Nr0   r/   re   )	unsqueezer=   repeat_interleaver   )qkrl   rm   unsqueeze_dimq_embedk_embedr-   r-   r.   apply_rotary_pos_emb   s   

$$r   c                       s   e Zd ZdZddededB f fddZ				ddejde	ejejf dB d	ejdB d
e
dB dejdB dee de	ejejf fddZ  ZS )HeliumAttentionz=Multi-headed attention from 'Attention Is All You Need' paperNrE   	layer_idxc                    s   t    || _|| _t|d|j|j | _|j|j | _	dt
| j | _|j| _d| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j|jdd| _d S )NrW   r   Trz   F)r    r!   rE   r   rY   r)   rZ   rW   r   r   mathsqrtr   attention_dropout	is_causalr"   r}   attention_biasq_projk_projv_projo_projr(   rE   r   r+   r-   r.   r!      s$   
zHeliumAttention.__init__r8   position_embeddingsr   past_key_valuescache_positionr   rU   c                 K   s  |j d d }g |d| jR }| ||dd}	| ||dd}
| ||dd}|\}}t|	|
||\}	}
|d urW|||d}||
|| j	|\}
}t
| jjt}|| |	|
||f| jskdn| j| jd|\}}|jg |dR   }| |}||fS )Nr0   r   r/   )rm   rl   r   r   )r   r   )r=   rW   r   viewrj   r   r   r   updater   r   get_interfacerE   _attn_implementationr   r   r   r   r   r   r   )r(   r8   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rl   rm   cache_kwargsattention_interfacer   r   r-   r-   r.   r;      s8   	

zHeliumAttention.forwardr   )NNNN)r?   r@   rA   __doc__r   rw   r!   r$   rt   r<   r   
LongTensorr   r   r;   rB   r-   r-   r+   r.   r      s,    r   c                       s   e Zd ZddededB f fddZ						ddejdejdB d	ejdB d
e	dB de
dB dejdB deejejf dB dee dejfddZ  ZS )HeliumDecoderLayerNrE   r   c                    sR   t    |j| _t||d| _t|| _t|j|jd| _	t|j|jd| _
d S )N)rE   r   r*   )r    r!   r)   r   	self_attnry   mlpr   rms_norm_epsinput_layernormpost_attention_layernormr   r+   r-   r.   r!   #  s   

zHeliumDecoderLayer.__init__Fr8   r   ro   r   	use_cacher   r   r   rU   c              
   K   s^   |}	|  |}| jd|||||||d|\}}
|	| }|}	| |}| |}|	| }|S )N)r8   r   ro   r   r   r   r   r-   )r   r   r   r   )r(   r8   r   ro   r   r   r   r   r   residual_r-   r-   r.   r;   -  s&   




zHeliumDecoderLayer.forwardr   )NNNFNN)r?   r@   rA   r   rw   r!   r$   rt   r   r   boolr<   r   r   r;   rB   r-   r-   r+   r.   r   "  s6    	
r   c                   @   sH   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZdS )HeliumPreTrainedModelrE   modelTr   r   )r8   
attentionsN)r?   r@   rA   r   ru   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsr-   r-   r-   r.   r   O  s   
 
r   c                       s   e Zd Zdef fddZeee							ddej	dB dej
dB dej	dB dedB d	ejdB d
ej	dB dedB dee defddZ  ZS )HeliumModelrE   c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _t j jd| _t d| _d| _|   d S )Nc                    s   g | ]}t  |qS r-   )r   ).0r   rE   r-   r.   
<listcomp>k  s    z(HeliumModel.__init__.<locals>.<listcomp>r   r   F)r    r!   pad_token_idpadding_idx
vocab_sizer"   	Embeddingr)   embed_tokens
ModuleListrangenum_hidden_layerslayersr   r   normrC   
rotary_embgradient_checkpointing	post_initr   r+   r   r.   r!   d  s   zHeliumModel.__init__N	input_idsr   ro   r   inputs_embedsr   r   r   rU   c              
   K   s   |d u |d uA rt d|d u r| |}|r!|d u r!t| jd}|d u r<|d ur-| nd}	tj|jd |jd|	 }|d u rE|	d}t
| j|||||d}
|}| j||d}| jd | jj D ]}||f|
|||||d|}qb| |}t||d	S )
Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r   )rR   )rE   r   r   r   r   ro   )ro   )r   r   ro   r   r   r   )last_hidden_stater   )
ValueErrorr   r   rE   get_seq_lengthr$   r[   r=   rR   r   r
   r   r   r   r   r   )r(   r   r   ro   r   r   r   r   r   past_seen_tokenscausal_maskr8   r   decoder_layerr-   r-   r.   r;   t  sP   

	
zHeliumModel.forward)NNNNNNN)r?   r@   rA   r   r!   r   r   r   r$   r   rt   r   FloatTensorr   r   r   r   r;   rB   r-   r-   r+   r.   r   b  s>    	
r   c                       s   e Zd ZddiZddiZddgdgfiZ fddZee																	
dde	j
d	B de	jd	B de	j
d	B ded	B de	jd	B de	j
d	B ded	B de	j
d	B dee	jB dee defddZ  ZS )HeliumForCausalLMzlm_head.weightzmodel.embed_tokens.weightlm_headcolwise_gather_outputr8   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S )NFrz   )
r    r!   r   r   r   r"   r}   r)   r   r   r   r+   r-   r.   r!     s
   
zHeliumForCausalLM.__init__Nr   r   r   ro   r   r   labelsr   r   logits_to_keepr   rU   c
              
   K   s   | j d|||||||d|
}|j}t|	trt|	 dn|	}| |dd|ddf }d}|durB| jd||| jjd|
}t	|||j
|j|jdS )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, HeliumForCausalLM

        >>> model = HeliumForCausalLM.from_pretrained("google/helium-7b")
        >>> tokenizer = AutoTokenizer.from_pretrained("google/helium-7b")

        >>> prompt = "What is your favorite condiment?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "What is your favorite condiment?"
        ```)r   r   ro   r   r   r   r   N)r   r   r   )lossr   r   r8   r   r-   )r   r   rg   rw   slicer   loss_functionrE   r   r   r   r8   r   )r(   r   r   ro   r   r   r   r   r   r   r   outputsr8   slice_indicesr   r   r-   r-   r.   r;     s0    zHeliumForCausalLM.forward)	NNNNNNNNr   )r?   r@   rA   _tied_weights_keys_tp_plan_pp_planr!   r   r   r$   r   rt   r   r   r   rw   r   r   r   r;   rB   r-   r-   r+   r.   r     sN    		
r   c                   @      e Zd ZdS )HeliumForSequenceClassificationNr?   r@   rA   r-   r-   r-   r.   r	        r	  c                   @   r  )HeliumForTokenClassificationNr
  r-   r-   r-   r.   r    r  r  )r   r   r   r	  r  )r   )r   )@r   collections.abcr   typingr   r$   torch.nnr"   activationsr   cache_utilsr   r   
generationr   integrationsr	   masking_utilsr
   modeling_layersr   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.genericr   r   utils.output_capturingr   configuration_heliumr   Moduler   rC   ry   rt   rw   r   r]   r   r   r   r   r   r   r   r   r	  r  __all__r-   r-   r-   r.   <module>   sn   A

D-PK