o
    ei[                     @   sp  d dl mZ d dlmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
 ddlmZ dd	lmZmZmZ dd
lmZmZ ddlmZ ddlmZmZmZmZ ddlmZmZ ddlmZm Z  ddl!m"Z"m#Z# ddl$m%Z% ddl&m'Z'm(Z(m)Z) ddl*m+Z+m,Z, ddl-m.Z. ddl/m0Z0 G dd dej1Z2dd Z3eddCddZ4dej5de6dej5fd d!Z7	"dDd#ej1d$ej5d%ej5d&ej5d'ej5dB d(e8d)e8d*e%e' fd+d,Z9ee4G d-d. d.ej1Z:ed/G d0d1 d1ej1Z;G d2d3 d3ej1Z<G d4d5 d5eZ=e(G d6d7 d7e#Z>e(G d8d9 d9e>Z?e(G d:d; d;e>eZ@G d<d= d=ee>ZAG d>d? d?ee>ZBG d@dA dAee>ZCg dBZDdS )E    )Callable)OptionalN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hubuse_kernel_func_from_hubuse_kernelized_func)create_causal_mask!create_sliding_window_causal_mask)FlashAttentionKwargs)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)maybe_autocastmerge_with_config_defaults)capture_outputs   )SmolLM3Configc                       s~   e Zd ZU ejed< ddef fddZe			ddedB de	d de
dB d	ed
ef fddZe edd Z  ZS )SmolLM3RotaryEmbeddinginv_freqNconfigc                    s   t    |j| _|j| _|| _| jjd | _| j}| jdkr$t	| j }|| j|\}| _
| jd|dd | jd| dd d S )N	rope_typedefaultr$   F)
persistentoriginal_inv_freq)super__init__max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenr%   rope_parametersr&   compute_default_rope_parametersr   attention_scalingregister_bufferclone)selfr%   devicerope_init_fnr$   	__class__ j/home/ubuntu/transcripts/venv/lib/python3.10/site-packages/transformers/models/smollm3/modeling_smollm3.pyr+   4   s   


zSmolLM3RotaryEmbedding.__init__r5   ztorch.deviceseq_lenreturnztorch.Tensorc                 C   sZ   | j d }t| ddp| j| j }d}d|tjd|dtjdj|tjd|   }||fS )	a  
        Computes the inverse frequencies according to the original RoPE implementation
        Args:
            config ([`~transformers.PreTrainedConfig`]):
                The model configuration.
            device (`torch.device`):
                The device to use for initialization of the inverse frequencies.
            seq_len (`int`, *optional*):
                The current sequence length. Unused for this type of RoPE.
        Returns:
            Tuple of (`torch.Tensor`, `float`), containing the inverse frequencies for the RoPE embeddings and the
            post-processing scaling factor applied to the computed cos/sin (unused in this type of RoPE).
        
rope_thetahead_dimNg      ?r      dtype)r5   rA   )	r/   getattrhidden_sizenum_attention_headstorcharangeint64tofloat)r%   r5   r;   basedimattention_factorr$   r9   r9   r:   r0   D   s   
&z6SmolLM3RotaryEmbedding.compute_default_rope_parametersc           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtr6|jjdkr6|jjnd}t	|dd+ | |  
dd}tj||fdd	}| | j }| | j }	W d    n1 slw   Y  |j|jd
|	j|jd
fS )Nr   r!   mpscpuF)device_typeenabledr?   rK   r@   )r$   rI   expandshaperH   r5   
isinstancetypestrr   	transposerE   catcosr1   sinrA   )
r4   xposition_idsinv_freq_expandedposition_ids_expandedrP   freqsembrZ   r[   r9   r9   r:   forwardb   s   0&zSmolLM3RotaryEmbedding.forwardN)NNN)__name__
__module____qualname__rE   Tensor__annotations__r"   r+   staticmethodr   inttuplerI   r0   no_gradr   rb   __classcell__r9   r9   r7   r:   r#   1   s&   
 

r#   c                 C   sH   | dd| j d d f }| d| j d d df }tj| |fddS )z*Rotates half the hidden dims of the input..NrM   r?   rR   )rT   rE   rY   )r\   x1x2r9   r9   r:   rotate_halfr   s   rp   rotary_pos_embc                 C   sD   | |}| |}| | t| |  }|| t||  }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezerp   )qkrZ   r[   unsqueeze_dimq_embedk_embedr9   r9   r:   apply_rotary_pos_emby   s
   

rx   hidden_statesn_repr<   c                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r!   N)rT   rS   reshape)ry   rz   batchnum_key_value_headsslenr>   r9   r9   r:   	repeat_kv   s
   0r           modulequerykeyvalueattention_maskscalingdropoutkwargsc                 K   s   t || j}t || j}	t||dd| }
|d ur |
| }
tjj|
dtjd	|j
}
tjj|
|| jd}
t|
|	}|dd }||
fS )Nr?   r   rM   )rK   rA   )ptrainingr!   )r   num_key_value_groupsrE   matmulrX   r   
functionalsoftmaxfloat32rH   rA   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightsattn_outputr9   r9   r:   eager_attention_forward   s   
r   c                       s   e Zd ZdZdedef fddZ		ddejde	ejejf d	ejdB d
e
dB dejdB dee de	ejejdB f fddZ  ZS )SmolLM3Attentionz=Multi-headed attention from 'Attention Is All You Need' paperr%   	layer_idxc                    s  t    || _|| _t|d|j|j | _|j|j | _	| jd | _
|j| _d| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j| j |j|jd| _|j| | _|jr||j| dkr||j| _d S d | _d S )Nr>   g      Tbiassliding_attention)r*   r+   r%   r   rB   rC   rD   r>   r}   r   r   attention_dropout	is_causalr   Linearattention_biasq_projk_projv_projo_projno_rope_layersuse_ropeuse_sliding_windowlayer_typessliding_windowr4   r%   r   r7   r9   r:   r+      s8   
zSmolLM3Attention.__init__Nry   position_embeddingsr   past_key_valuescache_positionr   r<   c                 K   s  |j d d }g |d| jR }| ||dd}	| ||dd}
| ||dd}| jrE|\}}t|	|
||\}	}
|d urXd|i}|	|
|| j
|\}
}t| jjt}|| |	|
||f| jsldn| j| j| jd|\}}|jg |dR   }| |}||fS )NrM   r!   r?   r   r   )r   r   r   )rT   r>   r   viewrX   r   r   r   rx   updater   r   get_interfacer%   _attn_implementationr   r   r   r   r   r{   r   r   )r4   ry   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rZ   r[   cache_kwargsattention_interfacer   r   r9   r9   r:   rb      s<   		

zSmolLM3Attention.forward)NN)rd   re   rf   __doc__r"   rj   r+   rE   rg   rk   r   
LongTensorr   r   rb   rm   r9   r9   r7   r:   r      s(    #r   RMSNormc                       sF   e Zd Zddeddf fddZdejdejfdd	Zd
d Z  Z	S )SmolLM3RMSNormư>epsr<   Nc                    s&   t    tt|| _|| _dS )z=
        SmolLM3RMSNorm is equivalent to T5LayerNorm
        N)r*   r+   r   	ParameterrE   onesweightvariance_epsilon)r4   rC   r   r7   r9   r:   r+   	  s   

zSmolLM3RMSNorm.__init__ry   c                 C   sJ   |j }|tj}|djddd}|t|| j  }| j|| S )Nr?   rM   T)keepdim)	rA   rH   rE   r   powmeanrsqrtr   r   )r4   ry   input_dtypevariancer9   r9   r:   rb     s
   zSmolLM3RMSNorm.forwardc                 C   s   t | jj d| j S )Nz, eps=)rk   r   rT   r   )r4   r9   r9   r:   
extra_repr  s   zSmolLM3RMSNorm.extra_repr)r   )
rd   re   rf   rI   r+   rE   rg   rb   r   rm   r9   r9   r7   r:   r     s    r   c                       s$   e Zd Z fddZdd Z  ZS )
SmolLM3MLPc                    sx   t    || _|j| _|j| _tj| j| j|jd| _tj| j| j|jd| _	tj| j| j|jd| _
t|j | _d S )Nr   )r*   r+   r%   rC   intermediate_sizer   r   mlp_bias	gate_projup_proj	down_projr   
hidden_actact_fnr4   r%   r7   r9   r:   r+     s   
zSmolLM3MLP.__init__c                 C   s$   |  | | || | }|S rc   )r   r   r   r   )r4   r\   r   r9   r9   r:   rb   '  s    zSmolLM3MLP.forward)rd   re   rf   r+   rb   rm   r9   r9   r7   r:   r     s    
r   c                       s   e Zd Zdedef fddZ						ddejdejdB d	ejdB d
e	dB de
dB dejdB deejejf dB dee dejfddZ  ZS )SmolLM3DecoderLayerr%   r   c                    s^   t    |j| _t||d| _t|| _t|j|jd| _	t|j|jd| _
|j| | _d S )N)r%   r   r   )r*   r+   rC   r   	self_attnr   mlpr   rms_norm_epsinput_layernormpost_attention_layernormr   attention_typer   r7   r9   r:   r+   -  s   

zSmolLM3DecoderLayer.__init__NFry   r   r]   r   	use_cacher   r   r   r<   c              
   K   s^   |}	|  |}| jd|||||||d|\}}
|	| }|}	| |}| |}|	| }|S )N)ry   r   r]   r   r   r   r   r9   )r   r   r   r   )r4   ry   r   r]   r   r   r   r   r   residual_r9   r9   r:   rb   8  s&   




zSmolLM3DecoderLayer.forward)NNNFNN)rd   re   rf   r"   rj   r+   rE   rg   r   r   boolrk   r   r   rb   rm   r9   r9   r7   r:   r   ,  s6    	
r   c                   @   sH   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZdS )SmolLM3PreTrainedModelr%   modelTr   r   )ry   
attentionsN)rd   re   rf   r"   rh   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsr9   r9   r9   r:   r   Z  s   
 
r   c                       s   e Zd Zdef fddZeee							ddej	dB dej
dB dej	dB dedB d	ejdB d
edB dej	dB dee defddZ  ZS )SmolLM3Modelr%   c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _t j jd| _t d| _d| _d| jjv | _|   d S )Nc                    s   g | ]}t  |qS r9   )r   ).0r   r%   r9   r:   
<listcomp>v  s    z)SmolLM3Model.__init__.<locals>.<listcomp>r   r   Fr   )r*   r+   pad_token_idpadding_idx
vocab_sizer   	EmbeddingrC   embed_tokens
ModuleListrangenum_hidden_layerslayersr   r   normr#   
rotary_embgradient_checkpointingr%   r   has_sliding_layers	post_initr   r7   r   r:   r+   o  s   zSmolLM3Model.__init__N	input_idsr   r]   r   inputs_embedsr   r   r   r<   c              
   K   sF  |d u |d uA rt d|d u r| |}|r!|d u r!t| jd}|d u r=|d ur-| nd}	tj|	|	|jd  |jd}|d u rF|	d}t
| }
tsl| j|||||d}dtdi |i}
| jrltdi ||
d< |}| ||}| jd | jj D ]}||f|
|j |||||d	|}q}| |}t||r|d
S d d
S )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r!   )r5   )r%   r   r   r   r   r]   full_attentionr   )r   r   r]   r   r   r   )last_hidden_stater   r9   )
ValueErrorr   r   r%   get_seq_lengthrE   rF   rT   r5   rr   rU   dictr   r   r   r   r   r   r   r   r   )r4   r   r   r]   r   r   r   r   r   past_seen_tokenscausal_mask_mappingmask_kwargsry   r   decoder_layerr9   r9   r:   rb     s^   



zSmolLM3Model.forward)NNNNNNN)rd   re   rf   r"   r+   r   r    r   rE   r   rg   r   FloatTensorr   r   r   r   rb   rm   r9   r9   r7   r:   r   m  s>    	
r   c                       s   e Zd ZddiZddiZddgdgfiZ fddZee																	
dde	j
d	B de	jd	B de	j
d	B ded	B de	jd	B de	j
d	B ded	B de	j
d	B dee	jB dee defddZ  ZS )SmolLM3ForCausalLMzlm_head.weightzmodel.embed_tokens.weightlm_headcolwise_gather_outputry   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S )NFr   )
r*   r+   r   r   r   r   r   rC   r  r   r   r7   r9   r:   r+     s
   
zSmolLM3ForCausalLM.__init__Nr   r   r   r]   r   r   labelsr   r   logits_to_keepr   r<   c
              
   K   s   | j d|||||||d|
}|j}t|	trt|	 dn|	}| |dd|ddf }d}|durB| jd||| jjd|
}t	|||j
|j|jdS )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, SmolLM3ForCausalLM

        >>> model = SmolLM3ForCausalLM.from_pretrained("meta-smollm3/SmolLM3-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-smollm3/SmolLM3-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r   r   r]   r   r   r   r   N)r	  r
  r   )lossr	  r   ry   r   r9   )r   r   rU   rj   slicer  loss_functionr%   r   r   r   ry   r   )r4   r   r   r]   r   r   r
  r   r   r  r   outputsry   slice_indicesr	  r  r9   r9   r:   rb     s0    zSmolLM3ForCausalLM.forward)	NNNNNNNNr   )rd   re   rf   _tied_weights_keys_tp_plan_pp_planr+   r   r   rE   r   rg   r   r  r   rj   r   r   r   rb   rm   r9   r9   r7   r:   r    sN    		
r  c                   @      e Zd ZdS ) SmolLM3ForSequenceClassificationNrd   re   rf   r9   r9   r9   r:   r        r  c                   @   r  )SmolLM3ForTokenClassificationNr  r9   r9   r9   r:   r    r  r  c                   @   s   e Zd ZdZdS )SmolLM3ForQuestionAnsweringtransformerN)rd   re   rf   r   r9   r9   r9   r:   r    s    r  )r   r   r  r  r  r  )r!   )r   )Ecollections.abcr   typingr   rE   r   activationsr   cache_utilsr   r   
generationr	   integrationsr
   r   r   masking_utilsr   r   modeling_flash_attention_utilsr   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.genericr   r   utils.output_capturingr    configuration_smollm3r"   Moduler#   rp   rx   rg   rj   r   rI   r   r   r   r   r   r   r   r  r  r  r  __all__r9   r9   r9   r:   <module>   st   A
N.[K