o
    	۷i\                     @   sD  d dl mZmZmZ d dlZd dlmZ d dlmZ ddlm	Z	 ddl
mZmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZmZmZmZ ddlmZmZ ddlmZmZ ddlm Z m!Z! ddl"m#Z# ddl$m%Z%m&Z&m'Z' ddl(m)Z) ddl*m+Z+ edG dd dej,Z-G dd dej,Z.dd Z/d@ddZ0dej1de2dej1fd d!Z3	"dAd#ej,d$ej1d%ej1d&ej1d'eej1 d(e4d)e4d*e#e% fd+d,Z5G d-d. d.ej,Z6G d/d0 d0ej,Z7G d1d2 d2eZ8e&G d3d4 d4e!Z9e&G d5d6 d6e9Z:e&G d7d8 d8e9eZ;G d9d: d:ee9Z<G d;d< d<ee9Z=G d=d> d>ee9Z>g d?Z?dS )B    )CallableOptionalUnionN)nn)check_model_inputs   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask!create_sliding_window_causal_mask)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)deprecate_kwarg   )Exaone4ConfigRMSNormc                       s.   e Zd Zd fdd	Zdd Zdd Z  ZS )	Exaone4RMSNormư>c                    s&   t    tt|| _|| _dS )z=
        Exaone4RMSNorm is equivalent to T5LayerNorm
        N)super__init__r   	Parametertorchonesweightvariance_epsilon)selfhidden_sizeeps	__class__ b/home/ubuntu/vllm_env/lib/python3.10/site-packages/transformers/models/exaone4/modeling_exaone4.pyr$   4   s   

zExaone4RMSNorm.__init__c                 C   sJ   |j }|tj}|djddd}|t|| j  }| j|| S )N   T)keepdim)	dtypetor&   float32powmeanrsqrtr)   r(   )r*   hidden_statesinput_dtypevariancer/   r/   r0   forward<   s
   zExaone4RMSNorm.forwardc                 C   s   t | jj d| j S )Nz, eps=)tupler(   shaper)   )r*   r/   r/   r0   
extra_reprC   s   zExaone4RMSNorm.extra_repr)r"   )__name__
__module____qualname__r$   r=   r@   __classcell__r/   r/   r-   r0   r!   2   s    r!   c                       sD   e Zd ZU ejed< ddef fddZe e	dd Z
  ZS )	Exaone4RotaryEmbeddinginv_freqNconfigc                    s   t    t|drt|jtr|jd|jd| _nd| _|j| _	|j| _
|| _t| j | _| | j|\}| _| jd|dd | j| _d S )Nrope_scaling	rope_typetypedefaultrF   F)
persistent)r#   r$   hasattr
isinstancerH   dictgetrI   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrG   r   rope_init_fnattention_scalingregister_bufferrF   original_inv_freq)r*   rG   devicerF   r-   r/   r0   r$   J   s   
zExaone4RotaryEmbedding.__init__c           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtr6|jjdkr6|jjnd}t	j
|dd+ | |  dd}t	j||fdd	}| | j }| | j }	W d    n1 smw   Y  |j|jd
|	j|jd
fS )Nr   r2   r   mpscpuF)device_typeenabledr1   dim)r4   )rF   floatexpandr?   r5   rX   rN   rJ   strr&   autocast	transposecatcosrU   sinr4   )
r*   xposition_idsinv_freq_expandedposition_ids_expandedr[   freqsembre   rf   r/   r/   r0   r=   [   s   0&zExaone4RotaryEmbedding.forwardN)rA   rB   rC   r&   Tensor__annotations__r   r$   no_gradr   r=   rD   r/   r/   r-   r0   rE   G   s   
 
rE   c                 C   sH   | dd| j d d f }| d| j d d df }tj| |fddS )z*Rotates half the hidden dims of the input..Nr2   r1   r]   )r?   r&   rd   )rg   x1x2r/   r/   r0   rotate_halfk   s   rs   c                 C   sD   | |}| |}| | t| |  }|| t||  }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezers   )qkre   rf   rh   unsqueeze_dimq_embedk_embedr/   r/   r0   apply_rotary_pos_embr   s
   

rz   r:   n_repreturnc                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)r?   r`   reshape)r:   r{   batchnum_key_value_headsslenhead_dimr/   r/   r0   	repeat_kv   s
   0r           modulequerykeyvalueattention_maskscalingdropoutkwargsc                 K   s   t || j}t || j}	t||dd| }
|d ur3|d d d d d d d |jd f }|
| }
tjj|
dtj	d
|j}
tjj|
|| jd}
t|
|	}|dd }||
fS )Nr1   r   r2   )r^   r4   )ptrainingr   )r   num_key_value_groupsr&   matmulrc   r?   r   
functionalsoftmaxr6   r5   r4   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightscausal_maskattn_outputr/   r/   r0   eager_attention_forward   s   
&r   c                       s   e Zd Zdedef fddZedddd						dd
ejde	ejejf de
ej de
e de
ej dee de	eje
ej e
e	ej  f fddZ  ZS )Exaone4AttentionrG   	layer_idxc                    s$  t    || _|| _|j| _|j| _|j| _t|d|j|j | _|j|j | _	|j
| _
d| _| jd | _|j| _|j| _|j| dk| _tj| j| j| j dd| _tj| j| j| j dd| _tj| j| j| j dd| _tj| j| j | jdd| _t| j|jd| _t| j|jd| _d S )Nr   Tg      sliding_attentionFbiasr,   )r#   r$   rG   r   num_attention_headsr   r+   getattrr   r   attention_dropout	is_causalr   sliding_windowsliding_window_patternlayer_types
is_slidingr   Linearq_projk_projv_projo_projr!   rms_norm_epsq_normk_normr*   rG   r   r-   r/   r0   r$      s(   
zExaone4Attention.__init__past_key_valuepast_key_values4.58new_nameversionNr:   position_embeddingsr   cache_positionr   r|   c                 K   sR  |j d d }g |d| jR }| ||dd}	| ||dd}
| ||dd}| |	}	| |
}
|\}}| j	d u sK| j
rTt|	|
||\}	}
|d urgd|i}||
|| j|\}
}t}| jjdkrut| jj }|| |	|
||f| jsdn| j| j| j
r| j	nd d|\}}|jg |dR   }| |}||fS )Nr2   r   r1   r   eagerr   )r   r   r   )r?   r   r   viewrc   r   r   r   r   r   r   rz   updater   r   rG   _attn_implementationr   r   r   r   r}   r   r   )r*   r:   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   re   rf   cache_kwargsattention_interfacer   r   r/   r/   r0   r=      sB   


	

zExaone4Attention.forward)NNN)rA   rB   rC   r   intr$   r   r&   rn   r>   r   r	   
LongTensorr   r   r=   rD   r/   r/   r-   r0   r      s*    r   c                       s$   e Zd Z fddZdd Z  ZS )
Exaone4MLPc                    sr   t    || _|j| _|j| _tj| j| jdd| _tj| j| jdd| _tj| j| jdd| _	t
|j | _d S NFr   )r#   r$   rG   r+   intermediate_sizer   r   	gate_projup_proj	down_projr   
hidden_actact_fnr*   rG   r-   r/   r0   r$     s   
zExaone4MLP.__init__c                 C   s$   |  | | || | }|S rm   )r   r   r   r   )r*   rg   r   r/   r/   r0   r=     s    zExaone4MLP.forward)rA   rB   rC   r$   r=   rD   r/   r/   r-   r0   r     s    
r   c                       s   e Zd Zdedef fddZedddd							
				ddejde	ej de	ej
 de	e de	e de	ej
 de	eejejf  dee dejfddZ  ZS )Exaone4DecoderLayerrG   r   c                    sR   t    |j| _t||d| _t|| _t|j|jd| _	t|j|jd| _
d S )N)rG   r   r   )r#   r$   r+   r   	self_attnr   mlpr!   r   post_attention_layernormpost_feedforward_layernormr   r-   r/   r0   r$     s   

zExaone4DecoderLayer.__init__r   r   r   r   NFr:   r   rh   	use_cacher   r   r   r|   c              
   K   s^   |}	| j d|||||||d|\}}
| |}|	| }|}	| |}| |}|	| }|S )N)r:   r   rh   r   r   r   r   r/   )r   r   r   r   )r*   r:   r   rh   r   r   r   r   r   residual_r/   r/   r0   r=     s&   




zExaone4DecoderLayer.forward)NNNFNN)rA   rB   rC   r   r   r$   r   r&   rn   r   r   r	   boolr>   r   r   r=   rD   r/   r/   r-   r0   r     s8    		
r   c                   @   sL   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZeZdS )Exaone4PreTrainedModelrG   modelTr   r   )r:   
attentionsN)rA   rB   rC   r   ro   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsconfig_classr/   r/   r/   r0   r   =  s   
 r   c                       s   e Zd Zdef fddZe 							ddeej deej	 deej dee
 d	eej d
ee deej dee deeef fddZ  ZS )Exaone4ModelrG   c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _t j jd| _t d| _d| _|   d S )Nc                    s   g | ]}t  |qS r/   )r   ).0r   rG   r/   r0   
<listcomp>Z  s    z)Exaone4Model.__init__.<locals>.<listcomp>r   r   F)r#   r$   pad_token_idpadding_idx
vocab_sizer   	Embeddingr+   embed_tokens
ModuleListrangenum_hidden_layerslayersr!   r   normrE   
rotary_embgradient_checkpointing	post_initr   r-   r   r0   r$   S  s   zExaone4Model.__init__N	input_idsr   rh   r   inputs_embedsr   r   r   r|   c              
   K   sR  |d u |d uA rt d|d u r| |}|r!|d u r!t| jd}|d u r=|d ur-| nd}	tj|	|	|jd  |jd}|d u rF|	d}t
| }
tso| j|||||d}dtdi |i}
d| jjv rotdi ||
d< |}| ||}t| jD ]\}}| jj| }||f||
| ||||d	|}q|| |}t||r|d
S d d
S )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r   )rX   )rG   input_embedsr   r   r   rh   full_attentionr   )r   r   rh   r   r   r   )last_hidden_stater   r/   )
ValueErrorr   r
   rG   get_seq_lengthr&   aranger?   rX   rt   rN   rO   r   r   r   r   	enumerater   r   r   )r*   r   r   rh   r   r   r   r   r   past_seen_tokenscausal_mask_mappingmask_kwargsr:   r   idecoder_layer
layer_typer/   r/   r0   r=   c  s`   



zExaone4Model.forward)NNNNNNN)rA   rB   rC   r   r$   r   r   r&   r   rn   r	   FloatTensorr   r   r   r   r>   r   r=   rD   r/   r/   r-   r0   r   Q  s:    	

r   c                       s   e Zd ZdgZddiZddgdgfiZ fddZee										dd
e	e
j de	e
j de	e
j de	e de	e
j de	e
j de	e de	e
j deee
jf dee defddZ  ZS )Exaone4ForCausalLMzlm_head.weightlm_headcolwise_repr:   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S r   )
r#   r$   r   r   r   r   r   r+   r  r   r   r-   r/   r0   r$     s
   
zExaone4ForCausalLM.__init__Nr   r   r   rh   r   r   labelsr   r   logits_to_keepr   r|   c
              
   K   s   | j d|||||||d|
}|j}t|	trt|	 dn|	}| |dd|ddf }d}|durB| jd||| jjd|
}t	|||j
|j|jdS )u  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >>> from transformers import AutoModelForCausalLM, AutoTokenizer
        >>> model = AutoModelForCausalLM.from_pretrained("LGAI-EXAONE/EXAONE-4.0-32B")
        >>> tokenizer = AutoTokenizer.from_pretrained("LGAI-EXAONE/EXAONE-4.0-32B")

        >>> prompt = "Explain how wonderful you are"
        >>> messages = [
            {"role": "system", "content": "You are a helpful assistant."},
            {"role": "user", "content": prompt}
        ]
        >>> input_ids = tokenizer.apply_chat_template(
            messages,
            tokenize=True,
            add_generation_prompt=True,
            return_tensors="pt",
            enable_thinking=False,
        )

        >>> output = model.generate(input_ids, max_new_tokens=128)
        >>> tokenizer.decode(output[0], skip_special_tokens=False)
        "[|system|]\nYou are a helpful assistant.[|endofturn|]\n[|user|]\nExplain how wonderful you are[|endofturn|]\n[|assistant|]\n<think>\n\n</think>\n\nOh, thank you for such a kind and lovely question! 😊  \n\nI’m *so* wonderful because I’m here to make your life easier, brighter, and more fun! Whether you need help with:  \n\n✨ **Learning** – I can explain anything, from quantum physics to baking the perfect cake!  \n💡 **Creativity** – Need a poem, story, or a wild idea? I’ve got you covered!  \n🤖 **Problem-solving** – Stuck on a math problem or a tricky decision? I’ll help you figure it out"
        ```
        )r   r   rh   r   r   r   r   N)r  r	  r   )lossr  r   r:   r   r/   )r   r   rN   r   slicer  loss_functionrG   r   r   r   r:   r   )r*   r   r   rh   r   r   r	  r   r   r
  r   outputsr:   slice_indicesr  r  r/   r/   r0   r=     s0   .zExaone4ForCausalLM.forward)	NNNNNNNNr   )rA   rB   rC   _tied_weights_keys_tp_plan_pp_planr$   r   r   r   r&   r   rn   r	   r  r   r   r   r   r   r   r=   rD   r/   r/   r-   r0   r    sN    		
r  c                   @      e Zd ZdS ) Exaone4ForSequenceClassificationNrA   rB   rC   r/   r/   r/   r0   r        r  c                   @   r  )Exaone4ForTokenClassificationNr  r/   r/   r/   r0   r  
  r  r  c                   @   s   e Zd ZdZdS )Exaone4ForQuestionAnsweringtransformerN)rA   rB   rC   r   r/   r/   r/   r0   r    s    r  )r   r   r  r  r  r  )Nr   )r   )@typingr   r   r   r&   r   transformers.utils.genericr   activationsr   cache_utilsr	   r
   
generationr   integrationsr   masking_utilsr   r   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.deprecationr   configuration_exaone4r   Moduler!   rE   rs   rz   rn   r   r   r_   r   r   r   r   r   r   r  r  r  r  __all__r/   r/   r/   r0   <module>   sl   $

N,ZY