o
    iV                     @   s
  d dl mZmZmZ d dlZd dlmZ d dlmZ ddl	m
Z
 ddlmZmZ ddlmZ ddlmZ dd	lmZmZ dd
lmZ ddlmZmZ ddlmZmZ ddlmZmZ ddl m!Z! ddl"m#Z#m$Z$ ddl%m&Z& ddl'm(Z( ddl)m*Z* edG dd dej+Z,dej-de.dej-fddZ/	d:dej+dej-dej-d ej-d!eej- d"e0d#e0d$e!e fd%d&Z1d;d'd(Z2d)d* Z3G d+d, d,ej+Z4G d-d. d.ej+Z5G d/d0 d0eZ6G d1d2 d2ej+Z7e#G d3d4 d4eZ8e#G d5d6 d6e8Z9e#G d7d8 d8e8eZ:g d9Z;dS )<    )CallableOptionalUnionN)TransformersKwargs   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask!create_sliding_window_causal_mask)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)auto_docstringcan_return_tuple)deprecate_kwarg)check_model_inputs   )Olmo3ConfigRMSNormc                       s.   e Zd Zd fdd	Zdd Zdd Z  ZS )	Olmo3RMSNormư>c                    s&   t    tt|| _|| _dS )z;
        Olmo3RMSNorm is equivalent to T5LayerNorm
        N)super__init__nn	Parametertorchonesweightvariance_epsilon)selfhidden_sizeeps	__class__ e/home/ubuntu/veenaModal/venv/lib/python3.10/site-packages/transformers/models/olmo3/modeling_olmo3.pyr    /   s   

zOlmo3RMSNorm.__init__c                 C   sJ   |j }|tj}|djddd}|t|| j  }| j| |S )N   T)keepdim)	dtypetor#   float32powmeanrsqrtr&   r%   )r'   hidden_statesinput_dtypevariancer,   r,   r-   forward7   s
   zOlmo3RMSNorm.forwardc                 C   s   t | jj d| j S )Nz, eps=)tupler%   shaper&   )r'   r,   r,   r-   
extra_repr>   s   zOlmo3RMSNorm.extra_repr)r   )__name__
__module____qualname__r    r:   r=   __classcell__r,   r,   r*   r-   r   -   s    r   r7   n_repreturnc                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)r<   expandreshape)r7   rB   batchnum_key_value_headsslenhead_dimr,   r,   r-   	repeat_kvB   s
   0rJ           modulequerykeyvalueattention_maskscalingdropoutkwargsc                 K   s   t || j}t || j}	t||dd| }
|d ur3|d d d d d d d |jd f }|
| }
tjj|
dtj	d
|j}
tjj|
|| jd}
t|
|	}|dd }||
fS )Nr.   r   r/   )dimr1   )ptrainingr   )rJ   num_key_value_groupsr#   matmul	transposer<   r!   
functionalsoftmaxr3   r2   r1   rR   rW   
contiguous)rL   rM   rN   rO   rP   rQ   rR   rS   
key_statesvalue_statesattn_weightscausal_maskattn_outputr,   r,   r-   eager_attention_forwardN   s   
&rc   c           
      C   s^   | j |j }}||}||}| | t| |  }|| t||  }	|||	|fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )r1   	unsqueezerotate_halfr2   )
qkcossinposition_idsunsqueeze_dimq_typek_typeq_embedk_embedr,   r,   r-   apply_rotary_pos_embh   s   

rp   c                 C   sH   | dd| j d d f }| d| j d d df }tj| |fddS )z*Rotates half the hidden dims of the input..Nr/   r.   rU   )r<   r#   cat)xx1x2r,   r,   r-   re      s   re   c                       s   e Zd ZdZdedef fddZedddd		
	
ddej	de
ej	ej	f deej	 dee deej dee de
ej	eej	 f fddZ  ZS )Olmo3Attentionz=Multi-headed attention from 'Attention Is All You Need' paperconfig	layer_idxc                    s2  t    || _|| _t|d|j|j | _|j|j | _	| jd | _
|j| _d| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j| j |j|jd| _t|j| j |j| _t|j| j |j| _|jd usJ |j| | _| jdkr|j| _d S d | _d S )NrI   g      Tbiassliding_attention)r   r    rw   rx   getattrr(   num_attention_headsrI   rG   rX   rQ   attention_dropout	is_causalr!   Linearattention_biasq_projk_projv_projo_projr   rms_norm_epsq_normk_normlayer_typesattention_typesliding_windowr'   rw   rx   r*   r,   r-   r       s2   
 zOlmo3Attention.__init__past_key_valuepast_key_values4.58new_nameversionNr7   position_embeddingsrP   cache_positionrS   rC   c                 K   s@  |j d d }g |d| jR }| | |}	| | |}
| |}|	|dd}	|
|dd}
||dd}|\}}t	|	|
||\}	}
|d urc|||d}|
|
|| j|\}
}t}| jjdkrqt| jj }|| |	|
||f| js}dn| j| j| jd|\}}|jg |dR   }| |}||fS )Nr/   r   r.   )ri   rh   r   eagerrK   )rR   rQ   r   )r<   rI   r   r   r   r   r   viewrZ   rp   updaterx   rc   rw   _attn_implementationr   rW   r~   rQ   r   rE   r]   r   )r'   r7   r   rP   r   r   rS   input_shapehidden_shapequery_statesr^   r_   rh   ri   cache_kwargsattention_interfacerb   r`   r,   r,   r-   r:      s@   

	

zOlmo3Attention.forwardNN)r>   r?   r@   __doc__r   intr    r   r#   Tensorr;   r   r   
LongTensorr   r   r:   rA   r,   r,   r*   r-   rv      s*    rv   c                       s$   e Zd Z fddZdd Z  ZS )Olmo3MLPc                    sr   t    || _|j| _|j| _tj| j| jdd| _tj| j| jdd| _tj| j| jdd| _	t
|j | _d S NFry   )r   r    rw   r(   intermediate_sizer!   r   	gate_projup_proj	down_projr   
hidden_actact_fnr'   rw   r*   r,   r-   r       s   
zOlmo3MLP.__init__c                 C   s$   |  | | || | }|S )N)r   r   r   r   )r'   rs   r   r,   r,   r-   r:      s    zOlmo3MLP.forward)r>   r?   r@   r    r:   rA   r,   r,   r*   r-   r      s    
r   c                       s   e Zd Zdedef fddZedddd							
				ddejde	ej de	ej
 de	e de	e de	ej
 de	eejejf  dee dejfddZ  ZS )Olmo3DecoderLayerrw   rx   c                    sR   t    |j| _t||d| _t|| _t|j|jd| _	t|j|jd| _
d S )N)rw   rx   r)   )r   r    r(   rv   	self_attnr   mlpr   r   post_attention_layernormpost_feedforward_layernormr   r*   r,   r-   r       s   

zOlmo3DecoderLayer.__init__r   r   r   r   NFr7   rP   rj   	use_cacher   r   rS   rC   c              
   K   s^   |}	| j d|||||||d|\}}
| |}|	| }|}	| |}| |}|	| }|S )N)r7   rP   rj   r   r   r   r   r,   )r   r   r   r   )r'   r7   rP   rj   r   r   r   r   rS   residual_r,   r,   r-   r:      s&   




zOlmo3DecoderLayer.forward)NNNFNN)r>   r?   r@   r   r   r    r   r#   r   r   r   r   boolr;   r   r   r:   rA   r,   r,   r*   r-   r      s8    		
r   c                       sL   e Zd ZU ejed< d	dedee f fddZ	e
 edd Z  ZS )
Olmo3RotaryEmbeddinginv_freqNrw   	rope_typec                    s   t    |d ur|| _nt|dr%t|jtr%|jd|jd| _nd| _| jd us/J |j| _	|j| _
|| _t| j | _| | j|\}| _| jd|dd | j| _d S )Nrope_scalingr   typedefaultr   F)
persistent)r   r    r   hasattr
isinstancer   dictgetmax_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrw   r   rope_init_fnattention_scalingregister_bufferr   original_inv_freq)r'   rw   devicer   r   r*   r,   r-   r      s   
zOlmo3RotaryEmbedding.__init__c           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtr6|jjdkr6|jjnd}t	j
|dd/ | |  dd}t	j||fdd	}| | j }| | j }	||	fW  d    S 1 sqw   Y  d S )
Nr   r/   r   mpscpuF)device_typeenabledr.   rq   )r   floatrD   r<   r2   r   r   r   strr#   autocastrZ   rr   rh   r   ri   )
r'   rs   rj   inv_freq_expandedposition_ids_expandedr   freqsembrh   ri   r,   r,   r-   r:   0  s   0&$zOlmo3RotaryEmbedding.forwardr   )r>   r?   r@   r#   r   __annotations__r   r   r   r    no_gradr   r:   rA   r,   r,   r*   r-   r     s   
 
r   c                   @   sH   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZdS )Olmo3PreTrainedModelrw   modelTr   r   )r7   
attentionsN)r>   r?   r@   r   r   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   rv   _can_record_outputsr,   r,   r,   r-   r   ?  s   
 
r   c                       s   e Zd Zdef fddZee							ddeej	 deej
 deej	 dee d	eej d
eej	 dee dee defddZ  ZS )
Olmo3Modelrw   c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _t j jd| _d| _tt ddt dd| _|   d S )	Nc                    s   g | ]}t  |qS r,   )r   ).0rx   rw   r,   r-   
<listcomp>[  s    z'Olmo3Model.__init__.<locals>.<listcomp>r   Fr   )rw   r   r   r{   full_attention)r   r    pad_token_idpadding_idx
vocab_sizer!   	Embeddingr(   embed_tokens
ModuleListrangenum_hidden_layerslayersr   r   normgradient_checkpointing
ModuleDictr   rotary_embs	post_initr   r*   r   r-   r    T  s   
zOlmo3Model.__init__N	input_idsrP   rj   r   inputs_embedsr   r   rS   rC   c              	   K   sL  |d u |d uA rt d|d u r| |}|r!|d u r!t| jd}|d u r=|d ur-| nd}	tj|	|	|jd  |jd}|d u rF|	d}t
| }
tsf| j|||||d}tdi |tdi |d}
|}| jd ||| jd	 ||d
}| jd | jj D ]}||f|
|jj |||||jj d|}q| |}t||dS )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r   )r   )rw   input_embedsrP   r   r   rj   )r   r{   r{   r   r   )rP   rj   r   r   r   )last_hidden_stater   r,   )
ValueErrorr   r	   rw   get_seq_lengthr#   aranger<   r   rd   r   r   r   r   r   r   r   r   r   r   r   )r'   r   rP   rj   r   r   r   r   rS   past_seen_tokenscausal_mask_mappingmask_kwargsr7   position_embeddings_mappingdecoder_layerr,   r,   r-   r:   i  sZ   






zOlmo3Model.forward)NNNNNNN)r>   r?   r@   r   r    r   r   r   r#   r   r   r   FloatTensorr   r   r   r   r:   rA   r,   r,   r*   r-   r   R  s<    	
r   c                       s   e Zd ZdgZddiZddgdgfiZ fddZee										dd
e	e
j de	e
j de	e
j de	e de	e
j de	e
j de	e de	e
j deee
jf dee defddZ  ZS )Olmo3ForCausalLMzlm_head.weightlm_headcolwise_repr7   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S r   )
r   r    r   r   r   r!   r   r(   r  r   r   r*   r,   r-   r      s
   
zOlmo3ForCausalLM.__init__Nr   r   rP   rj   r   r   labelsr   r   logits_to_keeprS   rC   c
              
   K   s   | j d|||||||d|
}|j}t|	trt|	 dn|	}| |dd|ddf }d}|durB| jd||| jjd|
}t	|||j
|j|jdS )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, Olmo3ForCausalLM

        >>> model = Olmo3ForCausalLM.from_pretrained("meta-olmo3/Olmo3-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-olmo3/Olmo3-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r   rP   rj   r   r   r   r   N)r  r  r   )lossr  r   r7   r   r,   )r   r   r   r   slicer  loss_functionrw   r   r   r   r7   r   )r'   r   rP   rj   r   r   r  r   r   r	  rS   outputsr7   slice_indicesr  r
  r,   r,   r-   r:     s0    zOlmo3ForCausalLM.forward)	NNNNNNNNr   )r>   r?   r@   _tied_weights_keys_tp_plan_pp_planr    r   r   r   r#   r   r   r   r  r   r   r   r   r   r   r:   rA   r,   r,   r*   r-   r    sN    		
r  )r  r   r   )rK   )Nr   )<typingr   r   r   r#   torch.nnr!   transformers.utils.genericr   activationsr   cache_utilsr   r	   
generationr
   integrationsr   masking_utilsr   r   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   utils.deprecationr   utils.genericr   configuration_olmo3r   Moduler   r   r   rJ   r   rc   rp   re   rv   r   r   r   r   r   r  __all__r,   r,   r,   r-   <module>   sh   

Q,'^K