
    Z jL[                     ,   S SK Jr  S SKJr  S SKrS SKJr  SSKJr  SSKJ	r	J
r
  SSKJr  SS	KJr  SS
KJr  SSKJr  SSKJr  SSKJrJr  SSKJrJr  SSKJrJr  SSKJr  SSK J!r!J"r"J#r#  SSK$J%r%J&r&  SSK'J(r(  SSK)J*r*   " S S\RV                  5      r, " S S\RV                  5      r- " S S\RV                  5      r.S\R^                  S\0S\R^                  4S jr1 S6S \RV                  S!\R^                  S"\R^                  S#\R^                  S$\R^                  S-  S%\2S&\2S'\\!   4S( jjr3S) r4S7S* jr5\" \55       " S+ S,\RV                  5      5       r6 " S- S.\5      r7\" " S/ S0\5      5       r8\" " S1 S2\85      5       r9\" " S3 S4\8\5      5       r:/ S5Qr;g)8    )Callable)OptionalN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernelized_func)create_causal_mask)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)maybe_autocastmerge_with_config_defaults)capture_outputs   )CohereConfigc                   2   ^  \ rS rSrSU 4S jjrS rSrU =r$ )CohereLayerNorm4   c                    > [         TU ]  5         [        R                  " [        R
                  " U5      5      U l        X l        g)zcThe hidden size can be a tuple or an int. The tuple is used for QKNorm to normalize across head_dimN)super__init__r   	Parametertorchonesweightvariance_epsilon)selfhidden_sizeepsbias	__class__s       {/root/GenerationalWealth/GenerationalWealth/venv/lib/python3.13/site-packages/transformers/models/cohere/modeling_cohere.pyr#   CohereLayerNorm.__init__5   s-    ll5::k#:; #    c                    UR                   nUR                  [        R                  5      nUR	                  SSS9nX-
  R                  S5      R	                  SSS9nX-
  [        R                  " X@R                  -   5      -  nU R                  R                  [        R                  5      U-  nUR                  U5      $ )NT)keepdim   )	dtypetor%   float32meanpowrsqrtr(   r'   )r)   hidden_statesinput_dtyper8   variances        r.   forwardCohereLayerNorm.forward;   s    #))%((7!!"d!3!(--a055b$5G&-XH]H]=]1^^u}}5E,,r0   )r(   r'   )Ngh㈵>F__name__
__module____qualname____firstlineno__r#   r>   __static_attributes____classcell__r-   s   @r.   r   r   4   s    $- -r0   r   c                      ^  \ rS rSr% \R
                  \S'   SS\4U 4S jjjr\	   SS\S-  S\
S   S\S-  S	\S
\4   4S jj5       r\R                  " 5       \S 5       5       rSrU =r$ )CohereRotaryEmbeddingE   inv_freqNconfigc                   > [         TU ]  5         UR                  U l        UR                  U l        Xl        U R
                  R                  S   U l        U R                  nU R                  S:w  a  [        U R                     nU" U R
                  U5      u  o@l
        U R                  SUSS9  U R                  SUR                  5       SS9  g )N	rope_typedefaultrK   F)
persistentoriginal_inv_freq)r"   r#   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrL   rope_parametersrN   compute_default_rope_parametersr   attention_scalingregister_bufferclone)r)   rL   devicerope_init_fnrK   r-   s        r.   r#   CohereRotaryEmbedding.__init__H   s    "("@"@$*$B$B!44[A!%!E!E>>Y&.t~~>L+7V+L((ZeD0(..2BuUr0   rZ   ztorch.deviceseq_lenreturnztorch.Tensorc           	         U R                   S   n[        U SS5      =(       d    U R                  U R                  -  nSnSU[        R
                  " SUS[        R                  S9R                  U[        R                  S9U-  -  -  nXe4$ )	aH  
Computes the inverse frequencies according to the original RoPE implementation
Args:
    config ([`~transformers.PreTrainedConfig`]):
        The model configuration.
    device (`torch.device`):
        The device to use for initialization of the inverse frequencies.
    seq_len (`int`, *optional*):
        The current sequence length. Unused for this type of RoPE.
Returns:
    Tuple of (`torch.Tensor`, `float`), containing the inverse frequencies for the RoPE embeddings and the
    post-processing scaling factor applied to the computed cos/sin (unused in this type of RoPE).

rope_thetahead_dimNg      ?r   r4   r5   )rZ   r5   )	rU   getattrr*   num_attention_headsr%   arangeint64r6   float)rL   rZ   r]   basedimattention_factorrK   s          r.   rV   5CohereRotaryEmbedding.compute_default_rope_parametersX   s    & %%l3fj$/c63E3EIcIc3c U\\!S!5;;?BB&X]XcXcBdgjjk
 ))r0   c                    U R                   S S S 2S 4   R                  5       R                  UR                  S   SS5      nUS S 2S S S 24   R                  5       n[	        UR
                  R                  [        5      (       a0  UR
                  R                  S:w  a  UR
                  R                  OSn[        USS9   UR                  5       UR                  5       -  R                  SS5      n[        R                  " USSS	9nUR                  5       U R                  -  nUR                  5       U R                  -  n	S S S 5        WR                  UR                   S
9W	R                  UR                   S
94$ ! , (       d  f       N@= f)Nr   r2   r   mpscpuF)device_typeenabledr4   ri   rb   )rK   rg   expandshape
isinstancerZ   typestrr   	transposer%   repeat_interleavecosrW   sinr6   r5   )
r)   xposition_idsinv_freq_expandedposition_ids_expandedro   freqsembry   rz   s
             r.   r>   CohereRotaryEmbedding.forwardv   s>    !MM$4-8>>@GGHZHZ[\H]_acde ,QaZ 8 > > @'1!((--'E'E!((--[`J`ahhmmfkUC&,,.1F1L1L1NNYYZ[]^_E))%;C'')d444C'')d444C	 D vvAGGv$cff177f&;;; DCs    BE<<
F
)rW   rL   rS   rT   rN   N)NNN)rA   rB   rC   rD   r%   Tensor__annotations__r   r#   staticmethodr   inttuplerg   rV   no_gradr   r>   rE   rF   rG   s   @r.   rI   rI   E   s    llV| V V  &*+/"*t#*(* t* 
~u$	%	* *: ]]_<  <r0   rI   c                   .   ^  \ rS rSrU 4S jrS rSrU =r$ )	CohereMLP   c                   > [         TU ]  5         Xl        UR                  U l        UR                  U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l	        [        UR                     U l        g NFr,   )r"   r#   rL   r*   intermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnr)   rL   r-   s     r.   r#   CohereMLP.__init__   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../r0   c                     U R                  U R                  U R                  U5      5      U R                  U5      -  5      nU$ r   )r   r   r   r   )r)   r{   r   s      r.   r>   CohereMLP.forward   s6    NN4;;t~~a/@#ADLLQRO#ST	r0   )r   rL   r   r   r*   r   r   r@   rG   s   @r.   r   r      s    0 r0   r   r;   n_repr^   c                     U R                   u  p#pEUS:X  a  U $ U SS2SS2SSS2SS24   R                  X#XU5      n U R                  X#U-  XE5      $ )z
This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
r   N)rs   rr   reshape)r;   r   batchnum_key_value_headsslenra   s         r.   	repeat_kvr      s_    
 2?1D1D.Ez!!Qa"23::5W\dlmM  e(CTTTr0   modulequerykeyvalueattention_maskscalingdropoutkwargsc                    [        X R                  5      n[        X0R                  5      n	[        R                  " XR	                  SS5      5      U-  n
Ub  X-   n
[
        R                  R                  U
S[        R                  S9R                  UR                  5      n
[
        R                  R                  XU R                  S9n
[        R                  " X5      nUR	                  SS5      R                  5       nX4$ )Nr4   r   r2   )ri   r5   )ptrainingr   )r   num_key_value_groupsr%   matmulrw   r   
functionalsoftmaxr7   r6   r5   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightsattn_outputs               r.   eager_attention_forwardr      s     3 ; ;<JU$?$?@L<<';';Aq'ABWLL!#4==((2U]](SVVW\WbWbcL==((6??([L,,|:K''1-88:K$$r0   c                 |    U SS S S24   nU SSS S24   n[         R                  " U* U/SS9R                  S5      nU$ )N.r4   r   r2   rq   )r%   stackflatten)r{   x1x2rot_xs       r.   rotate_halfr      sL    	
3!8B	
319BKK"b	r*2226ELr0   c                 &   U R                   nU R                  5       n UR                  5       nUR                  U5      nUR                  U5      nX-  [        U 5      U-  -   nX-  [        U5      U-  -   nUR	                  US9UR	                  US94$ )aI  Applies Rotary Position Embedding to the query and key tensors.

Args:
    q (`torch.Tensor`): The query tensor.
    k (`torch.Tensor`): The key tensor.
    cos (`torch.Tensor`): The cosine part of the rotary embedding.
    sin (`torch.Tensor`): The sine part of the rotary embedding.
    unsqueeze_dim (`int`, *optional*, defaults to 1):
        The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
        sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
        that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
        k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
        cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
        the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
Returns:
    `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
rb   )r5   rg   	unsqueezer   r6   )qkry   rz   unsqueeze_dimr5   q_embedk_embeds           r.   apply_rotary_pos_embr      s    $ GGE		A		A
--
&C
--
&Cw;q>C/0Gw;q>C/0G::E:"GJJUJ$;;;r0   c                     ^  \ rS rSrSrSS\S\S-  4U 4S jjjr SS\R                  S\
\R                  \R                  4   S	\R                  S-  S
\S-  S\\   S\
\R                  \R                  S-  4   4S jjrSrU =r$ )CohereAttention   z=Multi-headed attention from 'Attention Is All You Need' paperNrL   	layer_idxc                 R  > [         TU ]  5         Xl        X l        [	        USUR
                  UR                  -  5      U l        UR                  UR                  -  U l	        U R                  S-  U l
        UR                  U l        SU l        [        R                  " UR
                  UR                  U R                  -  UR                  S9U l        [        R                  " UR
                  UR                  U R                  -  UR                  S9U l        [        R                  " UR
                  UR                  U R                  -  UR                  S9U l        [        R                  " UR                  U R                  -  UR
                  UR                  S9U l        UR(                  U l        U R(                  (       a_  [+        UR                  U R                  4UR,                  S9U l        [+        UR                  U R                  4UR,                  S9U l        g g )Nra   g      Tr   r*   r+   )r"   r#   rL   r   rc   r*   rd   ra   r   r   r   attention_dropout	is_causalr   r   attention_biasq_projk_projv_projo_projuse_qk_normr   layer_norm_epsq_normk_normr)   rL   r   r-   s      r.   r#   CohereAttention.__init__   s   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
 "--)#77GVMbMbDK *#77GVMbMbDK r0   r;   position_embeddingsr   past_key_valuesr   r^   c                 z   UR                   S S n/ UQSPU R                  P7nU R                  U5      R                  U5      nU R	                  U5      R                  U5      n	U R                  U5      R                  U5      n
U R                  (       a"  U R                  U5      nU R                  U	5      n	UR                  SS5      nU	R                  SS5      n	U
R                  SS5      n
Uu  p[        XX5      u  pUb  UR                  XU R                  5      u  p[        R                  " U R                  R                   ["        5      nU" U UU	U
U4U R$                  (       d  SOU R&                  U R(                  S.UD6u  pUR*                  " / UQSP76 R-                  5       nU R/                  U5      nX4$ )Nr2   r   r4           )r   r   )rs   ra   r   viewr   r   r   r   r   rw   r   updater   r   get_interfacerL   _attn_implementationr   r   r   r   r   r   r   )r)   r;   r   r   r   r   input_shapehidden_shapequery_statesr   r   ry   rz   attention_interfacer   r   s                   r.   r>   CohereAttention.forward  s    $))#2.88b8$--8{{=166|D[[/44\B
{{=166|D;;|4LZ0J#--a3))!Q/
#--a3&#7RU#[ &'6'='=jX\XfXf'g$J(?(M(MKK,,.E)
 %8	%
  $}}C$2H2HLL	%
 	%
! "));;;;FFHkk+.((r0   )r   rL   ra   r   r   r   r   r   r   r   r   r   r   r   r   )rA   rB   rC   rD   __doc__r   r   r#   r%   r   r   r   r   r   r>   rE   rF   rG   s   @r.   r   r      s    G| d
  J )-.)||.) #5<<#=>.) t+	.)
 .) -..) 
u||U\\D00	1.) .)r0   r   c                   T  ^  \ rS rSrS\S\4U 4S jjr     SS\R                  S\R                  S-  S\R                  S-  S	\
S-  S
\S-  S\\R                  \R                  4   S-  S\\   S\\R                  \\R                  \R                  4   S-  4   4S jjrSrU =r$ )CohereDecoderLayeri4  rL   r   c                    > [         TU ]  5         UR                  U l        [        XS9U l        [        U5      U l        [        UR                  UR                  S9U l	        g )N)rL   r   r   )
r"   r#   r*   r   	self_attnr   mlpr   r   input_layernormr   s      r.   r#   CohereDecoderLayer.__init__5  sP    !--(LV$.F<N<NU[UjUjkr0   Nr;   r   r|   r   	use_cacher   r   r^   c           
          UnU R                  U5      nU R                  " SUUUUUUS.UD6u  pU R                  U5      nX-   U-   nU$ )a<  
Args:
    hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
    attention_mask (`torch.FloatTensor`, *optional*):
        attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1,
        query_sequence_length, key_sequence_length)` if default attention is used.
    past_key_values (`Cache`, *optional*): cached past key and value projection states
    output_attentions (`bool`, *optional*):
        Whether or not to return the attentions tensors of all attention layers. See `attentions` under
        returned tensors for more detail.
    use_cache (`bool`, *optional*):
        If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
        (see `past_key_values`).
    position_embeddings (`tuple[torch.FloatTensor, torch.FloatTensor]`, *optional*):
        Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
        with `head_dim` being the embedding dimension of each attention head.
)r;   r   r|   r   r   r    )r   r   r   )r)   r;   r   r|   r   r   r   r   residualhidden_states_attention_hidden_states_mlps               r.   r>   CohereDecoderLayer.forward<  sq    6 !,,];%)^^ &
')%+ 3&
 &
" !HH]3 :=NNr0   )r*   r   r   r   )NNNFN)rA   rB   rC   rD   r   r   r#   r%   r   
LongTensorr   boolr   r   r   FloatTensorr>   rE   rF   rG   s   @r.   r   r   4  s    l| l l /304(,!&HL*||* t+* &&-	*
 * $;* #5<<#=>E* -.* 
u  %(9(95;L;L(L"MPT"TT	U* *r0   r   c                   R    \ rS rSr% \\S'   SrSrS/rS/r	Sr
SrSrSrSr\\S.rSrg	)
CoherePreTrainedModelii  rL   modelTr   r   )r;   
attentionsr   N)rA   rB   rC   rD   r   r   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsrE   r   r0   r.   r   r   i  sQ    &*#-.#4"5N!"&+%r0   r   c                     ^  \ rS rSrS\4U 4S jjr\\\      SS\	R                  S-  S\	R                  S-  S\	R                  S-  S\S-  S	\	R                  S-  S
\S-  S\\   S\4S jj5       5       5       rSrU =r$ )CohereModeli|  rL   c           	        > [         TU ]  U5        UR                  U l        UR                  U l        [
        R                  " UR                  UR                  U R                  5      U l        [
        R                  " [        UR                  5       Vs/ s H  n[        X5      PM     sn5      U l        [        UR                  UR                  S9U l        [#        US9U l        SU l        U R)                  5         g s  snf )Nr   rL   F)r"   r#   pad_token_idpadding_idx
vocab_sizer   	Embeddingr*   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normrI   
rotary_embgradient_checkpointing	post_initr   s      r.   r#   CohereModel.__init__~  s     !.. ++LL):):F<N<NPTP`P`ammDI&JbJbDcdDcy2Dcd
 $1C1C&J_J_`	/v>&+# 	 es   C?N	input_idsr   r|   r   inputs_embedsr   r   r^   c           
      >   US L US L-  (       a  [        S5      eUc  U R                  U5      nU(       a  Uc  [        U R                  S9nUcU  Ub  UR	                  5       OSn[
        R                  " UR                  S   UR                  S9U-   nUR                  S5      n[        U R                  UUUUS9n	Un
U R                  XS9nU R                  S U R                  R                    H  nU" U
4U	UUUUS.UD6n
M     U R                  U
5      n
[        U
US	9$ )
Nz:You must specify exactly one of input_ids or inputs_embedsr
  r   r   )rZ   )rL   r  r   r   r|   )r|   )r   r   r|   r   r   )last_hidden_stater   )
ValueErrorr  r	   rL   get_seq_lengthr%   re   rs   rZ   r   r   r  r  r  r  r   )r)   r  r   r|   r   r  r   r   past_seen_tokenscausal_maskr;   r   decoder_layers                r.   r>   CohereModel.forward  sF    -t";<YZZ *.*;*;I*FM0*$++>OCRC^==?de <<(;(;A(>}G[G[\_ooL'11!4L(;;')+%
 &"oomoW![[)H4;;+H+HIM)*$7) /# M J 		-0&++
 	
r0   )r  r  r  r  r  r  r  )NNNNNN)rA   rB   rC   rD   r   r#   r   r   r   r%   r   r   r   r   r   r   r   r   r>   rE   rF   rG   s   @r.   r  r  |  s    |     .2.204(,26!%2
##d*2
 t+2
 &&-	2

 2
 ((4/2
 $;2
 +,2
 
!2
    2
r0   r  c                   P  ^  \ rS rSrSS0rSS0rSS/S/40rU 4S jr\\	        SS
\
R                  S	-  S\
R                  S	-  S\
R                  S	-  S\S	-  S\
R                  S	-  S\
R                  S	-  S\S	-  S\\
R                  -  S\\   S\4S jj5       5       rSrU =r$ )CohereForCausalLMi  zlm_head.weightzmodel.embed_tokens.weightlm_headcolwise_gather_outputr;   logitsc                 (  > [         TU ]  U5        [        U5      U l        UR                  U l        [
        R                  " UR                  UR                  SS9U l        UR                  U l	        UR                  U l
        U R                  5         g r   )r"   r#   r  r   r  r   r   r*   r%  logit_scaletie_word_embeddingsr  r   s     r.   r#   CohereForCausalLM.__init__  sq      (
 ++yy!3!3V5F5FUS!--#)#=#=  	r0   Nr  r   r|   r   r  labelsr   logits_to_keepr   r^   c	           
         U R                   " SUUUUUUS.U	D6n
U
R                  n[        U[        5      (       a  [	        U* S5      OUnU R                  USS2USS24   5      nXR                  -  nSnUb)  U R                  " SXU R                  R                  S.U	D6n[        UUU
R                  U
R                  U
R                  S9$ )au  
Example:

```python
>> from transformers import AutoTokenizer, CohereForCausalLM

>> model = CohereForCausalLM.from_pretrained("CohereForAI/c4ai-command-r-v01")
>> tokenizer = AutoTokenizer.from_pretrained("CohereForAI/c4ai-command-r-v01")

>> prompt = "Hey, are you conscious? Can you talk to me?"
>> inputs = tokenizer(prompt, return_tensors="pt")

>> # Generate
>> generate_ids = model.generate(inputs.input_ids, max_length=30)
>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
```)r  r   r|   r   r  r   N)r'  r,  r  )lossr'  r   r;   r   r   )r   r  rt   r   slicer%  r)  loss_functionrL   r  r   r   r;   r   )r)   r  r   r|   r   r  r,  r   r-  r   outputsr;   slice_indicesr'  r/  s                  r.   r>   CohereForCausalLM.forward  s    > ,0:: ,
)%+',
 ,
  118B>SV8W8W~ot4]kmA}a,?@A***%%pVt{{OeOepiopD%#33!//))
 	
r0   )r%  r)  r   r*  r  )NNNNNNNr   )rA   rB   rC   rD   _tied_weights_keys_tp_plan_pp_planr#   r   r   r%   r   r   r   r   r   r   r   r   r   r>   rE   rF   rG   s   @r.   r$  r$    s   *,GH23H_-z:;H	  .2.204(,26*.!%-.6
##d*6
 t+6
 &&-	6

 6
 ((4/6
   4'6
 $;6
 ell*6
 +,6
 
 6
  6
r0   r$  )r$  r  r   )r   )r   )<collections.abcr   typingr   r%   r   activationsr   cache_utilsr   r	   
generationr
   integrationsr   masking_utilsr   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.genericr   r   utils.output_capturingr   configuration_coherer   Moduler   rI   r   r   r   r   rg   r   r   r   r   r   r   r  r$  __all__r   r0   r.   <module>rK     s  : %    ! . ) / / B 9 O K F & I I G 5 .-bii -"><BII ><B		  	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 LL4'% % % '(%2<8 )*Q)bii Q) +Q)h23 2j O  $ F
' F
 F
R H
- H
 H
V Hr0   