
    rh:                     6   d dl Z d dlmZmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
mZ ddlmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZmZmZmZ ddlmZmZ ddlmZmZ ddl m!Z! ddl"m#Z# ddl$m%Z%m&Z&m'Z'm(Z( ddl)m*Z* ddl+m,Z,  e(jZ                  e.      Z/ G d dej`                        Z1d Z2d8dZ3dejh                  de5dejh                  fdZ6d Z7 G d dej`                        Z8 G d d e8      Z9 G d! d"e8      Z: ed#       G d$ d%ej`                               Z;e8e9e:d&Z< G d' d(e      Z=e& G d) d*e!             Z> G d+ d,ej`                        Z?e& G d- d.e>             Z@e& G d/ d0e>e             ZA G d1 d2ee>      ZB G d3 d4ee>      ZC G d5 d6ee>      ZDg d7ZEy)9    N)OptionalUnion)nn   )ACT2FN)CacheDynamicCacheStaticCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask)_flash_attention_forward!flash_attn_supports_top_left_mask)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)PreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuplelogging)check_model_inputs   )DiffLlamaConfigc                   $     e Zd Z fdZd Z xZS )DiffLlamaMLPc                    t         |           || _        |j                  | _        |j                  | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _	        t        |j                     | _        y NFbias)super__init__confighidden_sizeintermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnselfr)   	__class__s     /var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/diffllama/modeling_diffllama.pyr(   zDiffLlamaMLP.__init__7   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../    c                     | j                  | j                  | j                  |            | j                  |      z        }|S N)r/   r1   r-   r.   )r3   xr/   s      r5   forwardzDiffLlamaMLP.forwardA   s6    NN4;;t~~a/@#ADLLQRO#ST	r6   )__name__
__module____qualname__r(   r:   __classcell__r4   s   @r5   r"   r"   6   s    0r6   r"   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..N   dim)shapetorchcat)r9   x1x2s      r5   rotate_halfrJ   F   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r6   c                     |j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezerJ   )qkcossinposition_idsunsqueeze_dimq_embedk_embeds           r5   apply_rotary_pos_embrU   M   sY    ( --
&C
--
&C3w;q>C/0G3w;q>C/0GGr6   hidden_statesn_repreturnc                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)rE   expandreshape)rV   rW   batchnum_key_value_headsslenhead_dims         r5   	repeat_kvr`   h   so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr6   c                 >    ddt        j                  d| z        z  z
  S )Ng?g333333?g333333ӿ)mathexp)	layer_idxs    r5   lambda_init_fnre   t   s     txxy 01111r6   c                   \    e Zd ZdZddedee   f fdZ	 	 	 	 	 ddej                  de
ej                  ej                  f   deej                     deej                     d	ee   d
edeej                     de
ej                  eej                     ee
ej                        f   fdZ xZS )DiffLlamaAttentionz=Multi-headed attention from 'Attention Is All You Need' paperr)   rd   c                    t         |           || _        || _        |-t        j                  d| j                  j                   d       |j                  | _        |j                  | _	        |j                  | _        t        |d| j                  | j                  z        | _        |j                  | _        | j                  | j                  z  | _        |j                   | _        |j"                  | _        d| _        t'        j(                  | j                  | j                  | j                  z  |j*                        | _        t'        j(                  | j                  | j                  | j                  z  |j*                        | _        t'        j(                  | j                  | j                  | j                  z  |j*                        | _        t'        j(                  | j                  | j                  z  | j                  |j*                        | _        t5        |      | _        t'        j8                  t;        j<                  d|j>                  | j                  f            | _         t'        j8                  t;        j<                  d|j>                  | j                  f            | _!        t'        j8                  t;        j<                  d|j>                  | j                  f            | _"        t'        j8                  t;        j<                  d|j>                  | j                  f            | _#        t'        jH                  d| j                  z  |jJ                  d	
      | _&        y )NzInstantiating z without passing a `layer_idx` is not recommended and will lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` when creating this class.r_   Tr%   r   )sizerB   F)epselementwise_affine)'r'   r(   r)   rd   loggerwarning_oncer4   r;   attention_dropoutr*   num_attention_heads	num_headsgetattrr_   r]   num_key_value_groupsmax_position_embeddings
rope_theta	is_causalr   r,   attention_biasq_projk_projv_projo_projre   lambda_init	ParameterrF   normallambda_std_dev	lambda_q1	lambda_k1	lambda_q2	lambda_k2RMSNormrms_norm_eps	groupnormr3   r)   rd   r4   s      r5   r(   zDiffLlamaAttention.__init__{   s~   " !8!8 9 :, , "(!9!9!--33
D4D4D4VW#)#=#= $(NNd6N6N$N!'-'E'E$ ++ii 0 0$..4==2PW]WlWlmii 0 0$2J2JT]]2Zagavavwii 0 0$2J2JT]]2Zagavavwii >@P@PW]WlWlm))4ell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdA$56;N;Nchir6   rV   position_embeddingsattention_maskrQ   past_key_value	use_cachecache_positionrX   c                    |j                         \  }	}
}|
}| j                  |      }| j                  |      }| j                  |      }|j	                  |	|| j
                  | j                        j                  dd      }|j	                  |	|| j                  | j                        j                  dd      }|j	                  |	|| j                  | j                        j                  dd      }|\  }}t        ||||      \  }}|'|||d}|j                  ||| j                  |      \  }}t        || j                        }t        || j                        }t        j                  t        j                   |dd      d      }|j#                  dddd      }t        j$                  ||j                  dd            t'        j(                  | j                        z  }|#|d d d d d d d |j*                  d   f   }||z   }t,        j.                  j1                  |dt        j2                        j5                  |j6                        }t,        j.                  j9                  || j:                  | j<                  	      }t        j>                  t        j@                  | jB                  | jD                  z  dt        j2                              j5                  |j6                        }t        j>                  t        j@                  | jF                  | jH                  z  dt        j2                              j5                  |j6                        }||z
  | jJ                  z   }t        j$                  ||      }t        j                   |dd      \  }}|||z  z
  }d| jJ                  z
  | jM                  |      z  }|j                  dd      jO                         }|jQ                  |	|d      }| jS                  |      }||fS )
Nr   rB   rP   rO   r   rC   rA   r   rD   dtype)ptraining)*ri   rw   rx   ry   viewrp   r_   	transposer]   rU   updaterd   r`   rr   rF   rG   chunkrepeatmatmulrb   sqrtrE   r   
functionalsoftmaxfloat32tor   dropoutrn   r   rc   sumr   r   r   r   r{   r   
contiguousr[   rz   )r3   rV   r   r   rQ   r   r   r   kwargsbsz
target_len_q_lenquery_states
key_statesvalue_statesrO   rP   cache_kwargsattn_weightscausal_masklambda_1lambda_2lambda_fullattn_outputattn_output1attn_output2s                              r5   r:   zDiffLlamaAttention.forward   s|    +//1Z{{=1[[/
{{=1#((eT^^T]]S]]^_abc__S%1I1I4==Yccdeghi
#((eT5M5Mt}}]gghiklm&S#7jRUWZ#[ j%#&snUL'5'<'<ZW[WeWegs't$Jz4+D+DE
 t/H/HIyy\1!!D"M#**1aA6||L*2F2Fq!2LMPTPYPYZ^ZgZgPhh%(Aq2HJ4D4DR4H2H)HIK'+5L }},,\r,WZZ[g[m[mn}},,\T=S=S^b^k^k,l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<ll<>%*[[aQ%G"l"[<%??4+++t~~k/JJ!++Aq1<<>!))#ub9kk+.L((r6   r8   NNNFN)r;   r<   r=   __doc__r    r   intr(   rF   Tensortuple
LongTensorr   boolr:   r>   r?   s   @r5   rg   rg   x   s    G j  j8C=  jL 2637*.59<)||<) #5<<#=><) !.	<)
 u//0<) !<) <) !!1!12<) 
u||Xell3XeELL>Q5RR	S<)r6   rg   c                   J    e Zd ZdZ fdZ	 	 	 	 	 ddej                  deej                  ej                  f   deej                     deej                     dee
   ded	eej                     d
eej                  eej                     eeej                        f   fdZ xZS )DiffLlamaFlashAttention2aN  
    DiffLlama flash attention module. This module inherits from `DiffLlamaAttention` as the weights of the module stays
    untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
    flash attention and deal with padding tokens in case the input contains any of them.
    c                 B    t        |   |i | t               | _        y r8   )r'   r(   r   _flash_attn_uses_top_left_mask)r3   argsr   r4   s      r5   r(   z!DiffLlamaFlashAttention2.__init__   s#    $)&)
 /P.Q+r6   rV   r   r   rQ   r   r   r   rX   c                 @
   t        |t              rt        d      |j                         \  }}	}
| j	                  |      }| j                  |      }| j                  |      }|j                  ||	| j                  | j                        j                  dd      }|j                  ||	| j                  | j                        j                  dd      }|j                  ||	| j                  | j                        j                  dd      }|+t        j                  d       | j                  ||      \  }}n|\  }}t        ||||      \  }}|'|||d}|j!                  ||| j"                  |      \  }}|j                  dd      }|j                  dd      }|j                  dd      }| j$                  r| j&                  nd}|j(                  }|j*                  j,                  dk7  r|j*                  j,                  nd}|t.        j0                  k(  rt/        j2                         r:t5        t.        d	      rt/        j6                  |      nt/        j8                         }nMt5        | j:                  d
      r| j:                  j<                  }n | j                  j>                  j(                  }t        j                  d| d       |jA                  |      }|jA                  |      }|jA                  |      }t/        jB                  |dd      \  }}|jE                  dddd      }|jE                  dddd      }tG        |||||	||tI        | dd       | jJ                  | jL                  
      }tG        |||||	||tI        | dd       | jJ                  | jL                  
      }t/        jN                  ||gd      }t/        jB                  |dd      \  }}t/        jP                  t/        jR                  | jT                  | jV                  z  dt.        j0                              jA                  |j(                        }t/        jP                  t/        jR                  | jX                  | jZ                  z  dt.        j0                              jA                  |j(                        }||z
  | j\                  z   }|||z  z
  }d| j\                  z
  | j_                  |      z  }|ja                  ||	d      jc                         }| je                  |      }|d fS )Nz`static` cache implementation is not compatible with `attn_implementation==flash_attention_2` make sure to use `sdpa` in the mean time, and open an issue at https://github.com/huggingface/transformersr   rB   aY  The attention layers in this model are transitioning from computing the RoPE embeddings internally through `position_ids` (2D tensor with the indexes of the tokens), to using externally computed `position_embeddings` (Tuple of tensors, containing cos and sin). In v4.46 `position_ids` will be removed and `position_embeddings` will be mandatory.r           mpscpuget_autocast_dtype_pre_quantization_dtypezThe input hidden states seems to be silently casted in float32, this might be related to the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in .rC   sliding_window)rQ   r   r   use_top_left_maskru   rA   r   )3
isinstancer
   
ValueErrorri   rw   rx   ry   r   rp   r_   r   r]   rl   rm   
rotary_embrU   r   rd   r   rn   r   devicetyperF   r   is_autocast_enabledhasattrr   get_autocast_gpu_dtyper)   r   weightr   r   r   r   rq   r   ru   rG   rc   r   r   r   r   r   r{   r   r[   r   rz   )r3   rV   r   r   rQ   r   r   r   r   r   r   r   r   r   rO   rP   r   dropout_rateinput_dtypedevice_typetarget_dtypevalue_states1value_states2r   r   r   r   r   r   s                                r5   r:   z DiffLlamaFlashAttention2.forward   s    nk2} 
 &**,UA{{=1[[/
{{=1
 $((eT^^T]]S]]^_abc__S%1I1I4==Yccdeghi
#((eT5M5Mt}}]gghiklm&G |\BHC*HC#7jRUWZ#[ j%#&snUL'5'<'<ZW[WeWegs't$J $--a3))!Q/
#--a315t--C #((2>2E2E2J2Je2Sl))..Y^%--'((* u&:; ,,[9557  &?@#{{BB#{{1177 >$ (??<8L#|4J'??<8L',{{<'J$}%,,Q1a8%,,Q1a8/% "4)94@"AAnn
 0% "4)94@"AAnn
 ii| <"E%*[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!))#ub9DDFkk+.D  r6   r   )r;   r<   r=   r   r(   rF   r   r   r   r   r   r   r:   r>   r?   s   @r5   r   r      s    R 6:37*.59B!||B! #5<<#=>B! !!1!12	B!
 u//0B! !B! B! !!1!12B! 
u||Xell3XeELL>Q5RR	SB!r6   r   c                   8   e Zd ZdZ	 	 	 	 	 ddej
                  deej
                  ej
                  f   deej
                     deej                     dee	   de
d	eej                     d
eej
                  eej
                     eeej
                        f   fdZy)DiffLlamaSdpaAttentiona   
    DiffLlama attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from
    `DiffLlamaAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to
    SDPA API.
    NrV   r   r   rQ   r   r   r   rX   c                    |j                         \  }	}
}| j                  |      }| j                  |      }| j                  |      }|j	                  |	|
| j
                  | j                        j                  dd      }|j	                  |	|
| j                  | j                        j                  dd      }|j	                  |	|
| j                  | j                        j                  dd      }|\  }}t        ||||      \  }}|'|||d}|j                  ||| j                  |      \  }}t        || j                        }t        || j                        }t        j                  t        j                   |dd      d      }|j#                  dddd      }|}||d d d d d d d |j$                  d   f   }|j&                  j(                  dk(  r2|0|j+                         }|j+                         }|j+                         }|d u xr |
dkD  }t        j,                  j.                  j1                  ||||| j2                  r| j4                  nd|	      }t        j                   |dd      \  }}t        j6                  t        j8                  | j:                  | j<                  z  dt        j>                  
            jA                  |jB                        }t        j6                  t        j8                  | jD                  | jF                  z  dt        j>                  
            jA                  |jB                        }||z
  | jH                  z   }|||z  z
  }d| jH                  z
  | jK                  |      z  }|j                  dd      j+                         }|j	                  |	|
d      }| jM                  |      }|d fS )Nr   rB   r   rC   rA   r   cudar   )	attn_mask	dropout_pru   r   )'ri   rw   rx   ry   r   rp   r_   r   r]   rU   r   rd   r`   rr   rF   rG   r   r   rE   r   r   r   r   r   scaled_dot_product_attentionr   rn   rc   r   r   r   r   r   r   r   r   r{   r   rz   )r3   rV   r   r   rQ   r   r   r   r   r   r   r   r   r   r   rO   rP   r   r   ru   r   r   r   r   r   r   s                             r5   r:   zDiffLlamaSdpaAttention.forwardx  sm    &**,UA{{=1[[/
{{=1#((eT^^T]]S]]^_abc__S%1I1I4==Yccdeghi
#((eT5M5Mt}}]gghiklm&S#7jRUWZ#[ j%#&snUL'5'<'<ZW[WeWegs't$Jz4+D+DE
 t/H/HIyy\1!!D"M#**1aA6$%%aA/E1A1A"1E/E&EFK ##v-+2I'224L#..0J'224L  4'5EAI	hh))FF!04d,,3 G 
 &+[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!++Aq1<<>!&&sE26kk+.D  r6   r   )r;   r<   r=   r   rF   r   r   r   r   r   r   r:    r6   r5   r   r   p  s     2637*.59I!||I! #5<<#=>I! !.	I!
 u//0I! !I! I! !!1!12I! 
u||Xell3XeELL>Q5RR	SI!r6   r   r   c                   ,     e Zd Zd fd	Zd Zd Z xZS )DiffLlamaRMSNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)z?
        DiffLlamaRMSNorm is equivalent to T5LayerNorm
        N)r'   r(   r   r|   rF   onesr   variance_epsilon)r3   r*   rj   r4   s      r5   r(   zDiffLlamaRMSNorm.__init__  s1     	ll5::k#:; #r6   c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S )NrB   rA   T)keepdim)	r   r   rF   r   powmeanrsqrtr   r   )r3   rV   r   variances       r5   r:   zDiffLlamaRMSNorm.forward  sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r6   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)r   r   rE   r   r3   s    r5   
extra_reprzDiffLlamaRMSNorm.extra_repr  s*    ))*+6$2G2G1HIIr6   )gư>)r;   r<   r=   r(   r:   r   r>   r?   s   @r5   r   r     s    $;Jr6   r   )eagerflash_attention_2sdpac                   (    e Zd Zdedef fdZ	 	 	 	 	 	 ddej                  deej                     deej                     dee
   dee   d	eej                     d
eeej                  ej                  f      dee   deej                     fdZ xZS )DiffLlamaDecoderLayerr)   rd   c                 :   t         |           |j                  | _        t        |j                     ||      | _        t        |      | _        t        |j                  |j                        | _
        t        |j                  |j                        | _        y )N)r)   rd   rj   )r'   r(   r*   DIFFLLAMA_ATTENTION_CLASSES_attn_implementation	self_attnr"   mlpr   r   input_layernormpost_attention_layernormr   s      r5   r(   zDiffLlamaDecoderLayer.__init__  sz    !--4V5P5PQY_ktu'/0B0BH[H[\(89K9KQWQdQd(e%r6   rV   r   rQ   r   r   r   r   r   rX   c                     |}	| j                  |      } | j                  d|||||||d|\  }}
|	|z   }|}	| j                  |      }| j                  |      }|	|z   }|S )N)rV   r   rQ   r   r   r   r   r   )r   r   r   r   )r3   rV   r   rQ   r   r   r   r   r   residualr   s              r5   r:   zDiffLlamaDecoderLayer.forward  s     !,,];)4>> 	
')%)) 3	
 	
q !=0 !55mD/ =0r6   )NNNFNN)r;   r<   r=   r    r   r(   rF   r   r   r   r   r   r   r   r   r:   r>   r?   s   @r5   r   r     s    f f3 f 2637*.$)59KO|| !. u//0	
 ! D> !!1!12 &eELL%,,,F&GH +, 
u||	r6   r   c                   \     e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZ fdZ xZS )	DiffLlamaPreTrainedModelr)   modelTr   past_key_valuesF)rV   
attentionsc                    t         |   |       t        |t              r|j                  j
                  j                  d| j                  j                         |j                  j
                  j                  d| j                  j                         |j                  j
                  j                  d| j                  j                         |j                  j
                  j                  d| j                  j                         y y )Nr   )r'   _init_weightsr   rg   r   datanormal_r)   r~   r   r   r   )r3   moduler4   s     r5   r  z&DiffLlamaPreTrainedModel._init_weights  s    f%f01!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH	 2r6   )r;   r<   r=   r    __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   rg   _can_record_outputsr  r>   r?   s   @r5   r   r     s^    &*#01#4"5N!"'.(
I Ir6   r   c                   ^     e Zd Zddef fdZ ej                         ed               Z xZ	S )DiffLlamaRotaryEmbeddingr)   c                    t         |           t        |d      rUt        |j                  t
              r;|j                  j                  d|j                  j                  d            | _        nd| _        |j                  | _	        |j                  | _
        || _        t        | j                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                   | _        y )Nrope_scaling	rope_typer   defaultinv_freqF)
persistent)r'   r(   r   r   r  dictgetr  rs   max_seq_len_cachedoriginal_max_seq_lenr)   r   rope_init_fnattention_scalingregister_bufferr  original_inv_freq)r3   r)   r   r  r4   s       r5   r(   z!DiffLlamaRotaryEmbedding.__init__)  s    6>*z&:M:Mt/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r6   c                 b   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   rA   r   r   r   F)r   enabledrB   rC   )r   )r  floatrZ   rE   r   r   r   r   strrF   autocastr   rG   rO   r  rP   r   )
r3   r9   rQ   inv_freq_expandedposition_ids_expandedr   freqsembrO   rP   s
             r5   r:   z DiffLlamaRotaryEmbedding.forward:  sV    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s    BF%%F.r8   )
r;   r<   r=   r    r(   rF   no_gradr   r:   r>   r?   s   @r5   r  r  (  s3    / /" U]]_<  <r6   r  c                       e Zd Zdef fdZee	 	 	 	 	 	 	 ddeej                     deej                     deej                     dee   deej                     deej                     d	ee   d
ee   defd              Z xZS )DiffLlamaModelr)   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   )r)   F)r'   r(   pad_token_idpadding_idx
vocab_sizer   	Embeddingr*   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normr  r   gradient_checkpointing	post_initr   s      r5   r(   zDiffLlamaModel.__init__L  s     !.. ++LL):):F<N<NPTP`P`ammGLVMeMeGfg)"695g
 %V%7%7V=P=PQ	2&A&+# 	 hs   D	input_idsr   rQ   r   inputs_embedsr   r   r   rX   c           
      *   |d u |d uz  rt        d      || j                  |      }|r|
t               }|F||j                         nd}	t	        j
                  |	|	|j                  d   z   |j                        }||j                  d      }t        | j                  |||||      }
|}| j                  ||      }| j                  d | j                  j                   D ]  } ||f|
||||d|} | j                  |      }t        ||      S )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   )r   )r)   input_embedsr   r   r   rQ   )r   rQ   r   r   r   )last_hidden_stater   )r   r1  r	   get_seq_lengthrF   arangerE   r   rL   r   r)   r   r5  r4  r6  r   )r3   r9  r   rQ   r   r:  r   r   r   past_seen_tokensr   rV   r   decoder_layers                 r5   r:   zDiffLlamaModel.forward\  sT    -t";<YZZ *.*;*;I*FM0*nO!CRC^==?de+0<< "2]5H5H5K"KTaThTh,N )33A6L(;;&))+%
 &"oom\J![[)H4;;+H+HI 		M)*).-$7 M		 		-0&++
 	
r6   )NNNNNNN)r;   r<   r=   r    r(   r   r   r   rF   r   r   r   FloatTensorr   r   r   r   r:   r>   r?   s   @r5   r+  r+  J  s        151537+/5959$(8
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 !!1!128
 D>8
 +,8
 
!8
  8
r6   r+  c                   p    e Zd ZdgZddiZddgdgfiZ fdZd Zd Ze	e
	 	 	 	 	 	 	 	 	 dd	eej                     d
eej                     deej                     dee   deej                      deej                     dee   deej                     deeej                  f   dee   defd              Z xZS )DiffLlamaForCausalLMzlm_head.weightlm_headcolwise_reprV   logitsc                     t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        | j                          y r$   )
r'   r(   r+  r   r/  r   r,   r*   rE  r8  r2   s     r5   r(   zDiffLlamaForCausalLM.__init__  sU     #F+
 ++yy!3!3V5F5FUS 	r6   c                     || _         y r8   r   )r3   decoders     r5   set_decoderz DiffLlamaForCausalLM.set_decoder  s	    
r6   c                     | j                   S r8   rJ  r   s    r5   get_decoderz DiffLlamaForCausalLM.get_decoder  s    zzr6   r9  r   rQ   r   r:  labelsr   r   logits_to_keepr   rX   c
                 z    | j                   d|||||||d|
}|j                  }t        |	t              rt	        |	 d      n|	}| j                  |dd|ddf         }d}|* | j                  d||| j                  j                  d|
}t        |||j                  |j                  |j                        S )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, DiffLlamaForCausalLM

        >>> model = DiffLlamaForCausalLM.from_pretrained("google/diffllama-7b")
        >>> tokenizer = AutoTokenizer.from_pretrained("google/diffllama-7b")

        >>> prompt = "What is your favorite condiment?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "What is your favorite condiment?"
        ```)r9  r   rQ   r   r:  r   r   N)rG  rO  r/  )lossrG  r   rV   r   r   )r   r=  r   r   slicerE  loss_functionr)   r/  r   r   rV   r   )r3   r9  r   rQ   r   r:  rO  r   r   rP  r   outputsrV   slice_indicesrG  rR  s                   r5   r:   zDiffLlamaForCausalLM.forward  s    @ ,64:: 	,
)%+')	,
 	,
  118B>SV8W~ot4]kmA}a,?@A%4%%pVFt{{OeOepiopD%#33!//))
 	
r6   )	NNNNNNNNr   )r;   r<   r=   _tied_weights_keys_tp_plan_pp_planr(   rL  rN  r   r   r   rF   r   r   r   rB  r   r   r   r   r   r   r:   r>   r?   s   @r5   rD  rD    s:   *+=)H_-z:;H  151537+/59-1$(59348
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 ))*8
 D>8
 !!1!128
 c5<</08
 +,8
 
 8
  8
r6   rD  c                       e Zd Zy)"DiffLlamaForSequenceClassificationNr;   r<   r=   r   r6   r5   r[  r[        r6   r[  c                       e Zd ZdZy)DiffLlamaForQuestionAnsweringtransformerN)r;   r<   r=   r  r   r6   r5   r_  r_    s    %r6   r_  c                       e Zd Zy)DiffLlamaForTokenClassificationNr\  r   r6   r5   rb  rb    r]  r6   rb  )r   r+  rD  r[  r_  rb  )Nr   )Frb   typingr   r   rF   r   activationsr   cache_utilsr   r	   r
   
generationr   integrationsr   masking_utilsr   modeling_flash_attention_utilsr   r   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   processing_utilsr   utilsr   r   r   r   utils.genericr   configuration_diffllamar    
get_loggerr;   rl   Moduler"   rJ   rU   r   r   r`   re   rg   r   r   r   r   r   r   r  r+  rD  r[  r_  rb  __all__r   r6   r5   <module>ru     s  0  "   ! ; ; ) 7 / i  P K - & R R / 4 
		H	%299  (6	UU\\ 	U# 	U%,, 	U2a) a)HQ!1 Q!hQ!/ Q!h Y'Jryy J (J*  1" *6 *Z I I I4<ryy <D K
- K
 K
\ N
3_ N
 N
b	)IKc 	&$?AY &	&CE] 	r6   