
    rhJ                       d dl mZmZmZmZmZ d dlZd dlmc m	Z
 d dlmZ d dlmZ ddlmZmZmZ ddlmZ ddlmZ dd	lmZ dd
lmZmZmZ ddlmZmZ ddlmZm Z  ddl!m"Z" ddl#m$Z$m%Z%m&Z&m'Z' ddl(m)Z)m*Z* ddl+m,Z,  e*       rd dl-m.Z. d dl/m0Z0m1Z1 ndZ. e)       r	d dl2m3Z3m4Z4 nd\  Z4Z3 e&       rd dl5m6Z6 ddl7m8Z8  e'jr                  e:      Z;d Z<dPdZ=dej|                  de?dej|                  fdZ@	 dQdej                  dej|                  d ej|                  d!ej|                  d"eej|                     d#eBd$eBfd%ZC G d& d'ej                        ZD G d( d)e      ZEd*ej|                  d+e?fd,ZFd- ZGd. ZH eIe.e3e4f      ZJd/ ZK G d0 d1ej                        ZL G d2 d3ej                  j                        ZM G d4 d5ej                        ZN G d6 d7ed89      ZO G d: d;ej                        ZP G d< d=ej                        ZQ G d> d?ej                        ZR G d@ dAej                        ZS G dB dCe      ZTe$ G dD dEe              ZU G dF dGej                        ZVe$ G dH dIeU             ZW	 	 	 dRdJeej|                  eXej|                     df   dKee?   d"eej|                     deej|                  e?f   fdLZY G dM dNeUe      ZZg dOZ[y)S    )AnyCallableOptional	TypedDictUnionN)nn)ACT2FN   )CacheDynamicCacheDynamicLayer)GenerationMixin)AttentionMaskConverter)GradientCheckpointingLayer)BaseModelOutputWithPastMoeCausalLMOutputWithPastMoeModelOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)auto_docstringcan_return_tupleis_torch_flex_attn_availablelogging)is_causal_conv1d_availableis_mamba_2_ssm_available   )GraniteMoeHybridConfig)selective_state_update)mamba_chunk_scan_combined mamba_split_conv1d_scan_combined)causal_conv1d_fncausal_conv1d_updateNN)	BlockMask)make_flex_block_causal_maskc                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..N   dim)shapetorchcat)xx1x2s      /var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/granitemoehybrid/modeling_granitemoehybrid.pyrotate_halfr5   @   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''    c                     |j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezer5   )qkcossinposition_idsunsqueeze_dimq_embedk_embeds           r4   apply_rotary_pos_embrA   G   sY    ( --
&C
--
&C3w;q>C/0G3w;q>C/0GGr6   hidden_statesn_repreturnc                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)r.   expandreshape)rB   rC   batchnum_key_value_headsslenhead_dims         r4   	repeat_kvrL   b   so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr6   modulequerykeyvalueattention_maskscalingdropoutc                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr+   r
   r*   )r-   dtype)ptrainingr   )rL   num_key_value_groupsr/   matmul	transposer.   r   
functionalsoftmaxfloat32torV   rS   rX   
contiguous)rM   rN   rO   rP   rQ   rR   rS   kwargs
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r4   eager_attention_forwardrg   n   s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1 ==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r6   c                   \    e Zd ZdZdedef fdZ	 	 	 	 	 	 ddej                  de	ej                     de	ej                     de	e   d	ed
e	ej                     de	eej                  ej                  f      deej                  e	ej                     e	eej                        f   fdZ xZS )GraniteMoeHybridAttentionz=Multi-headed attention from 'Attention Is All You Need' paperconfig	layer_idxc                    t         |           || _        || _        |-t        j                  d| j                  j                   d       |j                  | _        |j                  | _	        |j                  | _        | j                  | j                  z  | _        |j                  | _        | j                  | j                  z  | _        d| _        |j                   | _        | j                  | j                  z  | j                  k7  r&t%        d| j                   d| j                   d      t'        j(                  | j                  | j                  | j                  z  |j*                        | _        t'        j(                  | j                  | j                  | j                  z  |j*                        | _        t'        j(                  | j                  | j                  | j                  z  |j*                        | _        t'        j(                  | j                  | j                  |j*                        | _        y )NzInstantiating z without passing a `layer_idx` is not recommended and will lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` when creating this class.Tz?hidden_size must be divisible by num_heads (got `hidden_size`: z and `num_heads`: z).bias)super__init__rj   rk   loggerwarning_once	__class____name__attention_dropouthidden_sizenum_attention_heads	num_headsrK   rI   rY   	is_causalattention_multiplierrR   
ValueErrorr   Linearattention_biasq_projk_projv_projo_projselfrj   rk   rs   s      r4   rp   z"GraniteMoeHybridAttention.__init__   s   " !8!8 9 :, , "(!9!9!--33((DNN:#)#=#= $(NNd6N6N$N!22MMDNN*t/?/??QRVRbRbQc$T^^$4B8 
 ii 0 0$..4==2PW]WlWlmii 0 0$2J2JT]]2Zagavavwii 0 0$2J2JT]]2Zagavavwii 0 0$2B2BI^I^_r6   rB   rQ   r=   past_key_value	use_cachecache_positionposition_embeddingsrD   c                    |j                         \  }	}
}| j                  |      }| j                  |      }| j                  |      }|j	                  |	|
| j
                  | j                        j                  dd      }|j	                  |	|
| j                  | j                        j                  dd      }|j	                  |	|
| j                  | j                        j                  dd      }||nd\  }}|t        ||||      \  }}|'|||d}|j                  ||| j                  |      \  }}t        }| j                  j                  dk7  rt        | j                  j                     } || ||||f| j                   sdn| j"                  | j$                  d|\  }}|j	                  |	|
d      }| j'                  |      }||fS )	Nr   r+   r&   )r<   r;   r   eager        )rS   rR   r*   )sizer~   r   r   viewrx   rK   r[   rI   rA   updaterk   rg   rj   _attn_implementationr   rX   ru   rR   r   )r   rB   rQ   r=   r   r   r   r   ra   bszq_len_query_statesrb   rc   r;   r<   cache_kwargsattention_interfacerf   rd   s                        r4   forwardz!GraniteMoeHybridAttention.forward   s    &**,UA{{=1[[/
{{=1#((eT^^T]]S]]^_abc__S%1I1I4==Yccdeghi
#((eT5M5Mt}}]gghiklm*=*I&|S*';L*VY[^'_$L*%#&snUL'5'<'<ZW[WeWegs't$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ "&&sE26kk+.L((r6   )NNNFNN)rt   
__module____qualname____doc__r    intrp   r/   Tensorr   
LongTensorr   booltupler   __classcell__rs   s   @r4   ri   ri      s    G`5 `# `F 2637*.59KO0)||0) !.0) u//0	0)
 !0) 0) !!1!120) &eELL%,,,F&GH0) 
u||Xell3XeELL>Q5RR	S0)r6   ri   c                       e Zd ZdZdZdZdZej                  dfde	f fdZ
	 ddej                  dej                  ded	eeeef      d
eej                  ej                  f   f
dZdej&                  fdZddee   d
efdZd
eeej                     eej                     f   fdZeddeeeej0                           d
dfd       Z xZS ) HybridMambaAttentionDynamicCachea  
    A dynamic cache that can handle both the attention cache (which has a seq_len dimension) and the mamba cache
    (which has a constant shape regardless of seq_len).

    This cache has two sets of lists of tensors: `key_cache` and `value_cache` for attention cache and `conv_states`
    and `ssm_states` for mamba cache. Each of these lists has `num_layers` tensors. The expected shape for each tensor
    For attention layers, `key_cache` and `value_cache` have a shape of `(batch_size, num_heads, seq_len, head_dim)`,
    while `conv_states` and `ssm_states` have a shape of `(batch_size, 0)` (empty tensors).
    For mamba layers, `key_cache` and `value_cache` have a shape of `(batch_size, 0)` (empty tensors),
    while `conv_states` represents the convolution state and has a shape of `(batch_size, d_inner, d_conv)`,
    and `ssm_states` represents the ssm state and has a shape of `(batch_size, d_inner, d_state)`.
    NFrj   c                 V   t         	|   t               |j                  | _        d| _        |j
                  }|j                  }g | _        g | _        g | _	        t        |j                        D ]*  }| j                  |   dk(  r| xj                  t        j                  ||j                  |j                  z  d|j                   z  |z  z   |||      gz  c_        | xj                  t        j                  ||j"                  |j$                  |||      gz  c_        | xj                  t        j&                  g g|z  |      gz  c_        | xj                  t        j&                  g g|z  |      gz  c_        | j                  j)                  |       - t        |j                        D cg c]  }t        j&                  g g|z  |       c}| _        t        |j                        D cg c]  }t        j&                  g g|z  |       c}| _        y c c}w c c}w )N)layer_classesFmambar+   devicerV   r   )ro   rp   r   layers_block_typehas_previous_statemamba_d_convmamba_d_stateconv_states
ssm_statestransformer_layersrangenum_hidden_layersr/   zerosmamba_expandrv   mamba_n_groupsmamba_n_headsmamba_d_headtensorappend	key_cachevalue_cache)
r   rj   
batch_sizerV   r   conv_kernel_sizessm_state_sizeir   rs   s
            r4   rp   z)HybridMambaAttentionDynamicCache.__init__   s   |4!'!9!9"'!..--"$v//0 	2A%%a(G3  KK",,v/A/AAAH]H]D]`nDnn(%#%   KK",,++&%#	$ 	   U\\2$2CF%S$TT ELL"
1B6$R#SS''..q11	24 SXX^XpXpRqrQ%,,tj'8HrTYZ`ZrZrTstqELL"
):6Jt sts   5"H!6"H&rb   rc   rk   r   rD   c                    | j                   |   j                  d   dk(  r|| j                   |<   || j                  |<   nft        j                  | j                   |   |gd      | j                   |<   t        j                  | j                  |   |gd      | j                  |<   | j                   |   | j                  |   fS )Nr*   r   r+   r,   )r   r.   r   r/   r0   )r   rb   rc   rk   r   s        r4   r   z'HybridMambaAttentionDynamicCache.update  s     >>)$**2.!3(2DNN9%*6DY'(-		4>>)3Lj2Y_`(aDNN9%*/))T5E5Ei5PR^4_ef*gDY'~~i($*:*:9*EEEr6   beam_idxc                    t        t        | j                              D ]S  }| j                  |   j                  }| j                  |   j	                  d|j                  |            | j                  |<   | j                  |   j                  }| j                  |   j	                  d|j                  |            | j                  |<   | j                  |   j                  }| j                  |   j	                  d|j                  |            | j                  |<   | j                  |   j                  }| j                  |   j	                  d|j                  |            | j                  |<   V y)zDReorders the cache for beam search, given the selected beam indices.r   N)	r   lenr   r   index_selectr_   r   r   r   )r   r   rk   r   s       r4   reorder_cachez.HybridMambaAttentionDynamicCache.reorder_cache+  sD   s4>>23 		iI^^I.55F(,y(A(N(NqRZR]R]^dRe(fDNN9%%%i077F*.*:*:9*E*R*RSTV^VaVabhVi*jDY'%%i077F*.*:*:9*E*R*RSTV^VaVabhVi*jDY'__Y/66F)-)C)P)PQRT\T_T_`fTg)hDOOI&		ir6   c                     || j                   vr| j                   d   n|}t        | j                        |k  ry| j                  |   j                  d   S )zYReturns the sequence length of the cached states. A layer index can be optionally passed.r   rU   )r   r   r   r.   )r   rk   s     r4   get_seq_lengthz/HybridMambaAttentionDynamicCache.get_seq_length8  sR     3<4CZCZ2ZD++A.`i	t~~)+~~i(..r22r6   c                     t        d      NzIHybridMambaAttentionDynamicCache does not have a legacy cache equivalent.NotImplementedErrorr   s    r4   to_legacy_cachez0HybridMambaAttentionDynamicCache.to_legacy_cache@  s    !"mnnr6   past_key_valuesr   c                     t        d      r   r   )clsr   s     r4   from_legacy_cachez2HybridMambaAttentionDynamicCache.from_legacy_cacheC  s    !"mnnr6   N)r   )rt   r   r   r   r   r   is_compileabler/   float16r    rp   r   r   r   dictstrr   r   r   r   r   r   r   classmethodFloatTensorr   r   r   s   @r4   r   r      s+    IKNIN_c %u5 %uX 26FLLF llF 	F
 tCH~.F 
u||U\\)	*F"ie&6&6 i3 3c 3ouU\\':E%,,<O'O!P o ouUEVEV?W9X0Y oes o or6   r   input_tensorpad_sizec                     t        | j                        dk(  r
ddddd|ddfnddd|ddf}t        j                  j                  j                  | |dd      S )z
    Padding x tensor with `pad_size` on the seq_len dim (dim=1)

    Assumes that we only have tensors of either size 4 or 3
       r   constant)moderP   )r   r.   r/   r   r\   pad)r   r   	pad_shapes      r4   pad_tensor_by_sizer   K  sf     47|7I7I3Ja3OAq!Q!Q/VWYZ\]_gijlmUnI88""<ST"UUr6   c                    t        | |      } t        | j                        dk(  r.| j                  | j                  d   d|| j                  d         S | j                  | j                  d   d|| j                  d   | j                  d         S )z
    Padding input_tensor with `pad_size` on the seq_len dim (dim=1) and
    simultaneously splitting it into chunk sequences.

    Assumes that we only have tensors of either size 4 or 3
    r
   r   r*   r+   )r   r   r.   rG   )r   r   
chunk_sizes      r4   reshape_into_chunksr   V  s     &lH=L
<!###L$6$6q$92z<K]K]^_K`aa ##q!2z<3E3Ea3H,J\J\]^J_
 	
r6   c                 "   | j                  d      } | d   j                  g | j                         | } t        j                  t        j                  ||| j
                  t        j                        d      }| j                  | d      } t        j                  | d      }t        j                  t        j                  ||| j
                  t        j                        d      }|j                  | t        j                         }|S )zo
    More stable segment sum calculation. Uses cumulative sums and masking instead of direct subtractions.
    r*   .Nr   diagonalr   rU   r,   )
r   rF   r/   trilonesr   r   masked_fillcumsuminf)r   r   masktensor_segsums       r4   segment_sumr   j  s     ""2&J 2<	*11S<3D3D3FS
SL::ejjZ@S@S[`[e[efqstD++TE15LLL26M ::ejjZ@S@S[`[e[efqrsD!--teeiiZ@Mr6   c                     |N|j                   d   dkD  r<|j                   d   dkD  r*| j                  }| |dddddf   z  j                  |      } | S )zm
    Tunes out the hidden states for padding tokens, see https://github.com/state-spaces/mamba/issues/66
    Nr   r   )r.   rV   r_   )rB   rQ   rV   s      r4   apply_mask_to_padding_statesr     sa     !n&:&:1&=&AnFZFZ[\F]`aFa##&1d
)CCGGNr6   c                       e Zd ZdZdedef fdZ	 	 	 	 ddej                  de	e
   de	ej                     de	ej                     d	e	ej                     f
d
Z	 	 	 dde	e
   de	ej                     de	ej                     fdZ	 	 	 	 dde	e
   de	ej                     de	ej                     d	e	ej                     fdZ xZS )GraniteMoeHybridMambaLayeruO  
    Compute ∆, A, B, C, and D the state space parameters and compute the `contextualized_states`.
    A, D are input independent (see Mamba paper [1] Section 3.5.2 "Interpretation of A" for why A isn't selective)
    ∆, B, C are input-dependent (this is a key difference between Mamba and the linear time invariant S4,
    and is why Mamba is called **selective** state spaces)

    The are a few differences between this and Mamba2Mixer:
    - The variable use_precomputed_states is slightly different due to the HybridCache structure
    - There's a few non-obvious bugs fixed with batching in the slow path that exist in main
    - Some extra variables that our layer doesn't need have been removed
    - We ported most of the refactors in https://github.com/huggingface/transformers/pull/35154, which is (as of Dec 18, 2024) unmerged
    rj   rk   c           	         t         |           |j                  | _        |j                  | _        |j
                  | _        |j                  | _        t        |j                  | j                  z        | _        || _        |j                  | _        |j                  | _        t"        |j                     | _        |j&                  | _        |j*                  | _        |j.                  | _        |j2                  | _        |j6                  | _        dt;        d      f| _        d| _        d| _         | j                  d| j0                  z  | j                  z  z   | _!        tE        jF                  | jB                  | jB                  |j                  | j                  | jB                  | j                  dz
        | _$        | j                  | jB                  z   | j                  z   }tE        jJ                  | j                  || j(                        | _&        tE        jN                  tQ        jR                  | j                              | _*        tQ        jV                  d| j                  dz         }tE        jN                  tQ        jX                  |            | _-        d	| jZ                  _.        t_        | j                  | j,                  
      | _0        tE        jN                  tQ        jR                  | j                              | _1        d	| jb                  _.        tE        jJ                  | j                  | j                  | j(                        | _2        tf        sth        jk                  d       y th        jk                  d       y )Nr   r   gMbP?g?r+   r   )in_channelsout_channelsrn   kernel_sizegroupspaddingrm   Tepsa  The fast path is not available because on of `(selective_state_update, causal_conv1d_fn, causal_conv1d_update)` is None. Falling back to the naive implementation. To install follow https://github.com/state-spaces/mamba/#installation and https://github.com/Dao-AILab/causal-conv1dzOThe fast path for GraniteMoeHybrid will be used when running the model on a GPU)6ro   rp   r   rx   rv   r   r   r   r   r   r   intermediate_sizerk   mamba_conv_biasuse_conv_bias
hidden_act
activationr	   actmamba_proj_biasuse_biasrms_norm_epslayer_norm_epsilonr   n_groupsr   rK   mamba_chunk_sizer   floattime_step_limittime_step_mintime_step_maxconv_dimr   Conv1dconv1dr|   in_proj	Parameterr/   r   dt_biasarangelogA_log_no_weight_decayGraniteMoeHybridRMSNormGatednormDout_projis_fast_path_availablerq   rr   )r   rj   rk   projection_sizeArs   s        r4   rp   z#GraniteMoeHybridMambaLayer.__init__  s   --!--$22 & 3 3!$V%8%84;K;K%K!L"#33 ++&++,.."("5"5--++ 11 !$U5\2" ..T]]1BTEXEX1XXii''--==))A-
 004==@4>>Qyy
 ||EJJt~~$>? LLDNNQ./\\%))A,/
&*

#01G1GTMdMde	ejj89"&		$"8"8$:J:JQUQ^Q^_%>  qrr6   rB   cache_paramsr   rQ   seq_idxc                 P   t        ||      }| j                  |      }|j                  \  }}}	| j                  | j                  z  }
|d uxr} |j
                  xro |dk(  xrh |j                  | j                     j                  d   |j                  | j                     j                  d   cxk(  xr |k(  nc xr |d uxr |d   dkD  }|r|j                  d      j                  | j                  | j                  | j                  gd      \  }}}t        ||j                  | j                     | j                  j                   j                  d      | j                  j"                  | j$                        }t'        j                  || j                  |
|
gd      \  }}}t'        j(                  | j*                  j-                                }|d d d df   d d d d d f   j/                  d| j0                  | j                        j3                  t&        j4                        }|d d d d d f   j/                  dd| j0                        }| j6                  d d d df   j/                  d| j0                        }| j8                  d d d df   j/                  d| j0                        }|j;                  || j                  |j                  d   | j                  z        }|j;                  || j                  |j                  d   | j                  z        }|j;                  || j                  | j0                        }t=        |j                  | j                     ||||||d |d
      }|j;                  || j                  | j0                  z        }| j?                  ||      }| jA                  |      d d d df   }|S t'        j(                  | j*                  j-                                }| jB                  d	t-        d
      fk(  ri nd| jB                  i}| jD                  r|tG        || j                  j                   j                  d      | j                  j"                  | j6                  |f| j8                  | jH                  || j$                  | j>                  j                   | j>                  jJ                  | j@                  j                   | j@                  j"                  | j0                  | j                  ddd|}|S |j                  | j                  | j                  | j                  gd      \  }}}|v|jM                  dd      }tN        jP                  jS                  || jT                  |j                  d   z
  df      }|j                  | j                     jW                  |       | j$                  dvrH| jY                  | j                  |jM                  dd            dd |f   jM                  dd            }nqt[        |jM                  dd      | j                  j                   j                  d      | j                  j"                  | j$                  |      jM                  dd      }t        ||      }t'        j                  || j                  |
|
gd      \  }}}t]        |j;                  ||d| j0                        |||j;                  ||| j                  d      |j;                  ||| j                  d      f| jH                  | j8                  d |d| j6                  dd|\  }}|*|(|j                  | j                     jW                  |       |j;                  ||d      }| j?                  ||      }| jA                  |      }|S )Nr   r   r*   r,   .rV   T)zr  dt_softplusr   r   dt_limitF)r  r   r  r   rmsnorm_weightrmsnorm_epsoutproj_weightoutproj_biasheaddimngroupsnorm_before_gatereturn_final_statesr+   )siluswish)r1   weightrn   r   r  )r   r  r   r  r*  r  r!  )/r   r  r.   r  r   r   r   rk   r   squeezesplitr   r  rx   r%   r  r-  rn   r   r/   expr  r  rF   rK   r_   r^   r  r  r   r!   r  r  r  rX   r#   r   variance_epsilonr[   r   r\   r   r   copy_r   r$   r"   )r   rB   r  r   rQ   r  projected_statesr   seq_lenr   groups_time_state_sizeuse_precomputed_statesgatehidden_states_B_CdtBCr  r  r  hidden_states_reshapedoutdt_limit_kwargshidden_states_B_C_transposedr   scan_output	ssm_states                              r4   cuda_kernels_forwardz/GraniteMoeHybridMambaLayer.cuda_kernels_forward  s    5]NS<<6 "/!4!4
GQ!%1D1D!D $ &//&1& ((8>>qA&&t~~6<<Q?& d*& q!A% 	 "*:*B*B1*E*K*K''GR +L +'D#R
 !5!((8""**1-  ! #(++!'')?AWX#M1a 4::++-..A!T3,1d
+222t}}dFYFYZ]]didqdq]rAAq$J&&r2t}}=Bll1dC<077DMMJGq$|$++B>Az4==!''!*2MNAz4==!''!*2MNA%2%7%7
DNNTXTaTa%b"2''7& M *..z4>>DMM;YZM IImT:M --.q$|<C| 
w 4::++-..A$($8$8S%,<O$ObV`bfbvbvUwO }}!56$KK&&..q1KK$$LL ff####'99#3#3 $		 : :#'==#7#7!%!3!3 MM MM%*(-#$ &%l 
A /?.D.D++T]]DNNKQS /E /+'  + 4E3N3NqRS3T0"$--"3"34..1M1S1STV1WWYZ[#K !,,T^^<BB;O??*;;(,$5$?$?1$EFsHWH}U__`acde)% )9+55a;#{{1199!<![[--#'?? ')  i1o & %AARTb$c!&+kk%++-CE[\'#q! *C!&&z7BNFF:wrBFF:wrB*  $ff#(, LL $* &*&Y" (\-E ++DNN;AA)L)..z7BG"iiT: mmK0
r6   c                    |j                   \  }}}|j                  }t        ||      }| j                  |      }	|	j	                  | j
                  | j                  | j                  gd      \  }
}}|d uxr} |j                  xro |dk(  xrh |j                  | j                     j                   d   |j                  | j                     j                   d   cxk(  xr |k(  nc xr |d uxr |d   dkD  }|rY|j                  | j                     j                  dd      |j                  | j                  <   |d d dd d f   j                  |j                  | j                     j                        |j                  | j                     d d d d df<   |j                  | j                     j                  | j                  j                   j                        }t#        j$                  || j                  j                   j'                  d      z  d      }| j(                  r|| j                  j*                  z   }| j-                  |      }n|v|j/                  dd      }t0        j2                  j5                  || j6                  |j                   d   z
  df      }|j                  | j                     j9                  |       | j-                  | j                  |j/                  dd            dd |f   j/                  dd            }t        ||      }t#        j                  || j
                  | j:                  | j<                  z  | j:                  | j<                  z  gd      \  }}}t#        j>                  | j@                  jC                                }|r|j                  | j                     j                  }|d d dd d f   d d d df   }|j/                  dd      jE                  ||j                   d   | jF                        }| jH                  d	   jE                  | jH                  j                   d   | jF                        }t"        j0                  j2                  jK                  ||j                  |j                        z         }t#        jL                  || jN                  d   | jN                  d         }|d
   jE                  | j                  | jF                  | j<                        j                  t"        jP                        }t#        j>                  |d	   |z        j                  |      }|jS                  || j:                  d      dd d d f   }|jE                  || j:                  | j                  | j:                  z  |j                   d         jU                         }|jS                  |d|j                   d         }|d	   |dd d d f   z  }|jS                  |d| jF                        }||d	   z  j                  |      }|j                  | j                     j9                  |j                  | j                     |z  |z          |jS                  || j:                  d      dd d d f   }|jE                  || j:                  | j                  | j:                  z  |j                   d         jU                         }|jS                  |d|j                   d         }|j                  | j                     j                  |j                  |j                        }|jW                  || j                  z  | jF                  | j<                        }|jW                  || j                  z  | j<                  d      }t#        jX                  ||      }|jW                  || j                  | jF                        }| jZ                  d	   jE                  | jZ                  j                   d   | jF                        }|||z  z   j                  |j                        }|jS                  |d      d d d df   }nt0        j2                  jK                  || jH                  z         }t#        jL                  || jN                  d   | jN                  d         }|jS                  ||d| jF                        jC                         }|jS                  ||d| j<                        jC                         }|jS                  ||d| j<                        jC                         }|j]                  | j                  | j:                  z  d| j                        }|j]                  | j                  | j:                  z  d| j                        }| j^                  || j^                  z  z
  | j^                  z  }| jZ                  d	   ta        ||      z  }||d	   z  }|j                  |j                        |z  }||||fD  cg c]  } tc        | || j^                         c} \  }}}}|je                  dddd      }t#        jf                  |d      }!t#        j>                  ti        |            }"|d d d d d d d d d d d f   |d d d d d d d d d d d f   z  }#|#j%                  d      }$|$d	   |"je                  ddddd      d	   z  }%|%j%                  d      }&|&d	   |d d d d d f   z  j%                  d      }'t#        j>                  |!d d d d d d dd f   |!z
        }(||(je                  dddd      d	   z  })|)dd d d f   |d	   z  j%                  d      }*|r<|j                  | j                     d d d df   j                  |*j                        }+nt#        jj                  |*d d d df         }+t#        jl                  |+|*gd      }*t#        j>                  ti        t0        j2                  j5                  |!d d d d d d df   d                  },|,j/                  dd      },|,d
   |*d d d d d df   z  j%                  d      }-|-d d d df   |-d d df   }.}*t#        j>                  |!      }/|dd d d f   |*d d d d d df   z  }0|/je                  dddd      }1|0j%                  d      |1d	   z  }2|'|2z   }|jS                  |d| j                  | jF                        }||z   }|dkD  r|d d d |d d d d f   }|jS                  ||d      }|.*|(|j                  | j                     j9                  |.       | jo                  ||
      }3| jq                  |3j                  |            }4|4S c c} w )Nr*   r,   r   r   )shiftsdimsr   r+   .r   ).NNr  r   )r-   output_sizer
   r   rU   )r   r   )9r.   rV   r   r  r/  r   r  rx   r   r   rk   r   rollr_   r   r  r-  r/   sumr.  r   rn   r   r[   r   r\   r   r   r2  r  r   r0  r  r  rF   rK   r  softplusclampr  r^   rG   r`   r   bmmr  repeat_interleaver   r   r   permuter   r   
zeros_liker0   r  r  )5r   input_statesr  r   rQ   r   r4  r   rV   r3  r7  r8  r9  r6  r   r?  rB   r:  r;  r  cache_devicer  dAdBdBxr   ssm_states_reshaped
C_reshapedyr  r   
D_residualtA_cumsumLG_intermediateGM_intermediateMY_diagdecay_statesB_decaystatesprevious_statesdecay_chunk
new_statesrA  state_decay_outC_times_statesstate_decay_out_permutedY_offr@  contextualized_statess5                                                        r4   torch_forwardz(GraniteMoeHybridMambaLayer.torch_forward  sU    ".!3!3
GQ"" 4L.Q<<5&6&<&<''GR '= '
#
 $ &//&1& ((8>>qA&&t~~6<<Q?& d*& q!A% 	 "7C7O7OPTP^P^7_7d7dlnuw7d7xL$$T^^4ARSTVWYZSZA[A^A^_k_w_wx|  yG  yG  `H  `O  `O  BPL$$T^^4Q2X> '224>>BEET[[M_M_MfMfEgK %		dkk0088;;! !!$58H8H$H! $): ; '/@/J/J1a/P, mm//043H3HKgKmKmnpKq3qst2u ((8>>{K $5F5P5PQRTU5V)WX[]e^e]eXe)f)p)pqrtu)v w89JN[#kk##T]]T5H5H%H$--Z^ZmZmJmn
q! YYtzz'')**!'224>>BIIL Aq!GQc\*Ba#**:rxx|T]]SBll9-44T\\5G5G5JDMMZG$$--b7::bhh3G.GHBR!5!5a!8$:N:Nq:QRB/"))$..$--I\I\]``glgtgt`uA))ByMA-.22,2GB
 		*dmmR8dAFAT]]DNNdmm4SUVU\U\]_U`allnA		*b!''"+6AI3a<0B *11*b$--PMi0044L4IC ##DNN399''7"<sB 		*dmmR8dAFAT]]DNNdmm4SUVU\U\]_U`allnA		*b!''"+6A &00@CC188[\[b[bCcJ",//*t~~2Mt}}^b^q^q"r
T^^ ;T=P=PRSTJ		-z:Az4>>4==AA y!((a$--HA]Q&&**1773A 		*b)!T3,7A ''T\\(9:BR!5!5a!8$:N:Nq:QRB)11*gr4==Y__aM		*gr43F3FGMMOA		*gr43F3FGMMOA##DNNdmm$CX\XfXf#gA##DNNdmm$CX\XfXf#gA'DOO*CCtVH	*-?x-XXJ *ByM9M](()B.A cpqrtuwxay%z\]&9!Xt&W%z"M1a 		!Q1%A||A2.H 		+a.)A q!Qa23a1dAq!8K6LLN""r"*A y\AIIaAq!,DY,OON""r"*A 	l]1a:%>>CCCJF !99XaArsl%;h%FGL,..q"b!<YGGGc4l+mI.FFKKPQKRF &"."9"9$.."I!TSV,"W"Z"Zbhbobo"Z"p"'"2"26!RaR%="AYY8a@F))K0A0A(1aQRTV;BWY_0`$abK%//15K%o61dC9PPUUZ[U\J *1crc6 2Jq"u4EIF $ii1OT1oq!T30GGN'6'>'>q!Q'J$#''+.Fy.QQE A		*b$..$--HAJA!|a'1a'(		*gr2A $)A''7==iHii4(
 !%knnU.C D$$G &{s   v	c                 r   t         rAd| j                  j                  j                  j                  v r| j                  |||||      S |t        d      |j                  }|B|j                  d   dkD  r0|j                  d   dkD  r||d d d d d f   z  j                  |      }| j                  ||||      S )Ncudaz\`seq_idx` support requires fast path support. Please install `mamba_ssm` and `causal_conv1d`r   r   )r  r  r-  r   typerB  r   rV   r.   r_   rk  )r   rB   r  r   rQ   r  ra   rV   s           r4   r   z"GraniteMoeHybridMambaLayer.forwardW  s     "f0C0C0J0J0O0O&O,,]L.Zhjqrr%n  ##%.*>*>q*AA*E.J^J^_`JadeJe*^Aq$J-GGKKERM!!-~~^^r6   )NNNN)NNN)rt   r   r   r   r    r   rp   r/   r   r   r   r   	IntTensorrB  rk  r   r   r   s   @r4   r   r     sJ   As5 As# AsL DH5915-1g||g ?@g !!1!12	g
 !.g %//*gZ DH5915L% ?@L% !!1!12	L%
 !.L%d DH5915-1_ ?@_ !!1!12	_
 !._ %//*_r6   r   c                   (     e Zd Zd fd	ZddZ xZS )r  c                     t         |           t        j                  t	        j
                  |            | _        || _        y r   ro   rp   r   r  r/   r   r-  r1  r   rv   r   rs   s      r4   rp   z%GraniteMoeHybridRMSNormGated.__init__o  s/    ll5::k#:; #r6   c                    |j                   }|j                  t        j                        }|?|t        j
                  j                  |j                  t        j                              z  }|j                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S Nr+   r*   T)keepdim)rV   r_   r/   r^   r   r\   r+  powmeanrsqrtr1  r-  )r   rB   r7  input_dtypevariances        r4   r   z$GraniteMoeHybridRMSNormGated.forwardt  s    #))%((7)BMM,>,>twwu}}?U,VVM $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r6   gư>r   )rt   r   r   rp   r   r   r   s   @r4   r  r  n  s    $
	;r6   r  c                   `     e Zd ZdZdef fdZdej                  dej                  fdZ xZ	S )GraniteMoeHybridMLPz~
    MLP layer for shared experts

    Args:
        config:
            Configuration object with model hyperparameters.
    rj   c                 `   t         |           |j                  | _        |j                  | _        t
        |j                     | _        t        j                  | j                  | j                  dz  d      | _
        t        j                  | j                  | j                  d      | _        y )Nr+   Frm   )ro   rp   rv   
input_sizeshared_intermediate_sizer	   r   r   r   r|   input_linearoutput_linearr   rj   rs   s     r4   rp   zGraniteMoeHybridMLP.__init__  s     ,,!:: !2!23IIdoot7G7G!7KRWXYYt'7'7uUr6   rB   rD   c                     | j                  |      }|j                  dd      }| j                  |d         |d   z  }| j                  |      }|S )Nr+   r*   r,   r   r   )r  chunkr   r  )r   rB   chunked_hidden_statess      r4   r   zGraniteMoeHybridMLP.forward  s^    ))-8 - 3 3A2 3 >(=a(@ADYZ[D\\**=9r6   )
rt   r   r   r   r    rp   r/   r   r   r   r   s   @r4   r~  r~    s2    V5 VU\\ ell r6   r~  c                       e Zd ZU dZej
                  ed<   ej
                  ed<   eed<   eed<   ej                  ed<   y)GraniteFlashAttentionKwargsa  
    Keyword arguments for advanced Flash Attention, causal-conv1d, and mamba_ssm kernel usage.
    Use cases include padding-free training and fewer `torch.compile` graph breaks.

    Attributes:
        cu_seq_lens_q (`torch.LongTensor`)
            Gets cumulative sequence length for query state.
        cu_seq_lens_k (`torch.LongTensor`)
            Gets cumulative sequence length for key state.
        max_length_q (`int`):
            Maximum sequence length for query state.
        max_length_k (`int`):
            Maximum sequence length for key state.
        seq_idx (`torch.IntTensor):
            Index of each packed sequence.
    cu_seq_lens_qcu_seq_lens_kmax_length_qmax_length_kr  N)	rt   r   r   r   r/   r   __annotations__r   ro   r6   r4   r  r    s7    " ######__r6   r  F)totalc                   ,     e Zd Zd fd	Zd Zd Z xZS )GraniteMoeHybridRMSNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)zF
        GraniteMoeHybridRMSNorm is equivalent to T5LayerNorm
        Nrr  rs  s      r4   rp   z GraniteMoeHybridRMSNorm.__init__  s1     	ll5::k#:; #r6   c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S ru  )	rV   r_   r/   r^   rw  rx  ry  r1  r-  )r   rB   rz  r{  s       r4   r   zGraniteMoeHybridRMSNorm.forward  sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r6   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)r   r-  r.   r1  r   s    r4   
extra_reprz"GraniteMoeHybridRMSNorm.extra_repr  s*    ))*+6$2G2G1HIIr6   r|  )rt   r   r   rp   r   r  r   r   s   @r4   r  r    s    $;Jr6   r  c                   6     e Zd Zdedededdf fdZd Z xZS )GraniteMoeHybridParallelExpertsnum_expertsr  rF  rD   Nc                     t         |           t        j                  t	        j
                  |||            | _        || _        || _        || _	        y)a  
        Initialize the GraniteMoeHybridParallelExperts module.
        The experts weights are stored in [num_experts, output_size, input_size] format. Such that it's compatible with
        many MoE libraries, such as [Megablock](https://github.com/databricks/megablocks) and
        [ScatterMoE](https://github.com/shawntan/scattermoe), as well as the
        [MoE kernel](https://github.com/vllm-project/vllm/blob/main/vllm/model_executor/layers/fused_moe/fused_moe.py)
        used in vllm.

        Args:
            num_experts (int):
                Number of experts.
            input_size (int):
                Size of the input.
            output_size (int):
                Size of the output.
        N)
ro   rp   r   r  r/   emptyr-  r  r  rF  )r   r  r  rF  rs   s       r4   rp   z(GraniteMoeHybridParallelExperts.__init__  sD    " 	ll5;;{K#TU&$&r6   c                     |j                  |d      }g }t        | j                        D ]7  }|j                  t	        j
                  ||   | j                  |                9 t        j                  |d      }|S )a  
        Forward pass of the GraniteMoeHybridParallelExperts module.

        Args:
            inputs (Tensor):
                Input tensor.
            expert_size:
                Expert size information.

        Returns:
            Tensor: Output tensor.
        r   r,   )	r/  r   r  r   Flinearr-  r/   r0   )r   inputsexpert_size
input_listoutput_listr   resultss          r4   r   z'GraniteMoeHybridParallelExperts.forward  sq     \\+1\5
t''( 	HAqxx
1t{{1~FG	H))KQ/r6   rt   r   r   r   rp   r   r   r   s   @r4   r  r    s)    'C 'S 's 't '.r6   r  c                   2     e Zd Zdededef fdZd Z xZS )GraniteMoeHybridTopKGatingr  r  top_kc                     t         |           || _        || _        || _        t        j                  ||d      | _        y)a  
        Initialize the top-k gating mechanism.
        Args:
            input_size (`int`):
                Size of the input.
            num_experts (`int`):
                Number of experts.
            top_k (`int`):
                Number of top experts to select.
        Frm   N)ro   rp   r  r  r  r   r|   layer)r   r  r  r  rs   s       r4   rp   z#GraniteMoeHybridTopKGating.__init__  s:     	&$
YYz;UC
r6   c                    | j                  |      j                         }|j                  | j                  d      \  }}t	        j
                  |d      j                  |      }t	        j                  |j                  d      | j                  g|j                  |j                        }|j                  d|d      }|j                         j                  d      }|j                         }|j!                         }	|	j#                  d      \  }
}|j%                  | j                  d      }|j!                         }||   }|||||fS )Nr   r,   r   rV   r   trunc)rounding_mode)r  r  topkr  r/   r]   type_asr   r   r  rV   r   scatterlongrH  tolistflattensortdiv)r   rB   logitstop_k_logitstop_k_indicestop_k_gatesr   gatesr  top_k_expertsr   index_sorted_expertsbatch_indexbatch_gatess                 r4   r   z"GraniteMoeHybridTopKGating.forward  s.   M*002&,kk$**!k&D#mmmLa8@@O a $"2"23;;L;LU`UgUg
 a2jjl&&q) "((* &--/"/"4"4Q"7*..tzz.Q "))+!"67#[+{FRRr6   r  r   s   @r4   r  r    s'    D3 DS D D&Sr6   r  c                   .     e Zd ZdZdef fdZd Z xZS )GraniteMoeHybridMoEz
    A Sparsely gated mixture of experts layer with 1-layer Feed-Forward networks as experts.

    Args:
        config:
            Configuration object with model hyperparameters.
    rj   c                    t         |           |j                  | _        |j                  | _        t
        |j                     | _        t        |j                  | j                  | j                  dz        | _
        t        |j                  | j                  | j                        | _        t        | j                  |j                  |j                        | _        y )Nr+   )r  r  r  )ro   rp   rv   r  r   r	   r   r   r  num_local_expertsr  r  r  num_experts_per_tokrouterr  s     r4   rp   zGraniteMoeHybridMoE.__init__-  s     ,,!33 !2!23;$$doot7G7G!7K
 =$$d&6&6
 100,,
r6   c                    |j                         \  }}}|j                  d|      }| j                  |      \  }}}}}	||   }
| j                  |
|      }|j	                  dd      }| j                  |d         |d   z  }| j                  ||      }||dddf   z  }t        j                  ||z  | j                  f|j                  |j                        }|j                  d||      }|j                  ||| j                        }||	fS )a  
        Forward pass of the mixture of experts layer.

        Args:
            layer_input (Tensor):
                Input tensor.

        Returns:
            Tensor:
                Output tensor.
            Tensor:
                Router logits.
        r*   r+   r,   r   r   Nr  )r   rG   r  r  r  r   r  r/   r   r  rV   r   	index_addr   )r   layer_inputr   lengthemb_sizer   r  r  r  router_logitsexpert_inputsrB   r  expert_outputsr   layer_outputs                   r4   r   zGraniteMoeHybridMoE.forward@  s    !, 0 0 2VX!))"h7BF++kBZ?;[-#K0))-E - 3 3A2 3 >(=a(@ADYZ[D\\++M;G'+ag*>>S6\4??;>CWCW`n`u`uvq+~F#((fdooF]**r6   )rt   r   r   r   r    rp   r   r   r   s   @r4   r  r  $  s    
5 
&+r6   r  c                   \    e Zd Zdedef fdZ	 	 	 	 	 	 	 ddej                  deej                     dee	   dee
   dee
   d	eej                     d
ee
   deeej                  ej                  f      dee   deej                  eeej                  ej                  f      f   fdZ xZS )GraniteMoeHybridDecoderLayerrj   rk   c                 4   t         |           |j                  | _        d | _        |j                  dkD  rt        |      | _        t        |j                  |j                        | _	        t        |j                  |j                        | _
        |j                  | _        t        |      | _        d | _        |j                  |   dk(  rt!        ||      | _        nt#        ||      | _        |j                  |   | _        t'        |dd      dkD  | _        y )Nr   r   r   r  )ro   rp   rv   	self_attnr  r  block_sparse_moer  r  input_layernormpost_attention_layernormresidual_multiplierr~  
shared_mlpr   r   r   ri   
layer_typegetattrhas_expertsr   s      r4   rp   z%GraniteMoeHybridDecoderLayer.__init__a  s    !--##a'$7$?D!6v7I7IvObObc(?@R@RX^XkXk(l%#)#=#= -f5
##I.'93FIFDJ6vyIDN 229= #6+>BQFr6   rB   rQ   r   output_attentionsr   r   output_router_logitsr   ra   rD   c	                    |}
| j                  |      }| j                   | j                  d||||d|	}d}n | j                  d|||||||d|	\  }}|
|| j                  z  z   }|}
| j	                  |      }| j
                  r)| j                  |      \  }}|| j                  |      z   }n| j                  |      }d}|
|| j                  z  z   }|f}|r||fz  }|r||fz  }|S )aB  
        Args:
            hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
            attention_mask (`torch.FloatTensor`, *optional*):
                attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1,
                query_sequence_length, key_sequence_length)` if default attention is used.
            past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states
            output_attentions (`bool`, *optional*):
                Whether or not to return the attentions tensors of all attention layers. See `attentions` under
                returned tensors for more detail.
            use_cache (`bool`, *optional*):
                If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
                (see `past_key_values`).
            cache_position (`torch.LongTensor` of shape `(sequence_length)`, *optional*):
                Indices depicting the position of the input sequence tokens in the sequence
            output_router_logits (`bool`, *optional*):
                Whether or not to return the logits of all the routers. They are useful for computing the router loss, and
                should not be returned during inference.
            position_embeddings (`tuple[torch.FloatTensor, torch.FloatTensor]`, *optional*):
                Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
                with `head_dim` being the embedding dimension of each attention head.
            kwargs (`dict`, *optional*):
                Arbitrary kwargs.Can be used to provide `GraniteFlashAttentionKwargs` for
                padding-free training and/or improve torch.compile performance.
        N)rB   r   r  rQ   )rB   rQ   r   r  r   r   r   r  )r  r   r  r  r  r  r  r  )r   rB   rQ   r   r  r   r   r  r   ra   residualself_attn_weightsmoe_hidden_statesr  outputss                  r4   r   z$GraniteMoeHybridDecoderLayer.forwardx  sG   J !,,];::!&DJJ +-+-	
 M !%/=t~~ 	0+--"3#-$7	0 	0,M, !=43K3K#KK !55mD/3/D/D]/S,}-0NNM OOM:M M =43K3K#KK ")++G''Gr6   )NNFFNFN)rt   r   r   r    r   rp   r/   r   r   r   r   r   r   r   r  r   r   r   r   s   @r4   r  r  `  s   G5 G# G4 26*.,1$)59/4KOU||U !.U !	U
 $D>U D>U !!1!12U 'tnU &eELL%,,,F&GHU 45U 
u  (51B1BEDUDU1U+V"WW	XUr6   r  c                   N     e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZ fdZ xZS )GraniteMoeHybridPreTrainedModelrj   modelTr  r   Fc                 f   t         |   |       t        |t              r;|j                  j
                  j                  d| j                  j                         t        |t              r|j                  j
                  j                  d       t        j                  t        j                  d|j                  dz               |j                   _        |j"                  j
                  j                  d       y t        |t$              r&|j                  j
                  j                  d       y y )Nr   )rx  stdg      ?r   )ro   _init_weights
isinstancer  r-  datanormal_rj   initializer_ranger   r  fill_r/   r  r  rx   r  r  r  )r   rM   rs   s     r4   r  z-GraniteMoeHybridPreTrainedModel._init_weights  s    f%f=>MM&&CT[[5R5R&Sf89NN%%c* %		%,,q&:J:JQ:N*O PFLLHHMM$ <=MM$$S) >r6   )rt   r   r   r    r  base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_can_compile_fullgraph_is_statefulr  r   r   s   @r4   r  r    sG    ""&*#78#4"5N"L	* 	*r6   r  c                   ^     e Zd Zddef fdZ ej                         ed               Z xZ	S )GraniteMoeHybridRotaryEmbeddingrj   c                    t         |           t        |d      rUt        |j                  t
              r;|j                  j                  d|j                  j                  d            | _        nd| _        |j                  | _	        |j                  | _
        || _        t        | j                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                   | _        y )Nrope_scaling	rope_typern  defaultinv_freqF)
persistent)ro   rp   hasattrr  r  r   getr  max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrj   r   rope_init_fnattention_scalingregister_bufferr  original_inv_freq)r   rj   r   r  rs   s       r4   rp   z(GraniteMoeHybridRotaryEmbedding.__init__  s    6>*z&:M:Mt/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r6   c                 b   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   r*   r   mpscpuF)device_typeenabledr+   r,   r  )r  r  rF   r.   r_   r   r  rn  r   r/   autocastr[   r0   r;   r  r<   rV   )
r   r1   r=   inv_freq_expandedposition_ids_expandedr  freqsembr;   r<   s
             r4   r   z'GraniteMoeHybridRotaryEmbedding.forward  sV    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s    BF%%F.r   )
rt   r   r   r    rp   r/   no_gradr   r   r   r   s   @r4   r  r    s4    /5 /" U]]_<  <r6   r  c                   2    e Zd Zdef fdZee	 	 	 	 	 	 	 	 	 	 	 ddej                  de	ej                     de	ej                     de	eeeej                     f      de	ej                     de	e   d	e	e   d
e	e   de	e   de	e   de	ej                     dee   deeef   fd              Z	 ddeej                  df   dej                  dej                  ded	ef
dZedej                  dededej0                  dej                  defd       Zd Z xZS )GraniteMoeHybridModelrj   c           	      4   t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        d| _        |j$                  | _        |j                  | _        |j&                  | _        | j                  | j(                  z  | _        |j,                  | _        |j.                  | _        |j0                  | _        | j0                  dk(  rt3        |      nd | _        | j7                          y c c}w )Nr   Frope)ro   rp   pad_token_idpadding_idx
vocab_sizer   	Embeddingrv   embed_tokens
ModuleListr   r   r  layersr  r  r  gradient_checkpointingembedding_multiplierrw   rx   rK   r  
rope_thetaposition_embedding_typer  
rotary_emb	post_initr   s      r4   rp   zGraniteMoeHybridModel.__init__  sA    !.. ++LL):):F<N<NPTP`P`ammNSTZTlTlNmn)&)<n
 ,F,>,>FDWDWX	&+#$*$?$?!!--33((DNN:'-'E'E$ ++'-'E'E$EIEaEaekEk9&Aqu 	! os   F	input_idsrQ   r=   r   inputs_embedsr   r  output_hidden_statesr  return_dictr   ra   rD   c                    ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|
|
n| j                   j                  }
|d u |d uz  rt        d      | j                  r%| j                  r|rt        j                  d       d}|| j                  |      }|| j                  z  }|r|t        j                  d       |F||j                         nd}t        j                  |||j                  d   z   |j                         }||j#                  d      }| j%                  |||||      }| j'                  ||      }|}d }| j(                  | j)                  ||      }|rdnd }|rdnd }|	rdnd }| j*                  D ]U  }|j,                  d	k(  r|n|}|r||fz  } ||f||||||	|d
|}|d   }|r|d   	||d   fz  }|	sG|d   M||d   fz  }W | j/                  |      }|r||fz  }|r|j0                  sd|_        t3        |||||      S )Nz:You must specify exactly one of input_ids or inputs_embedszX`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`.FzGraniteMoeHybrid requires an initialized `HybridMambaAttentionDynamicCache` to return a cache. Because one was not provided, no cache will be returned.r   r   r   r  r   )rQ   r   r  r   r   r  r   r*   T)last_hidden_stater   rB   
attentionsr  )rj   r  r#  r   use_return_dictr{   r  rX   rq   rr   r  r  r   r/   r  r.   r   r8   _update_causal_mask_update_mamba_maskr  r  r  r  r   r   )r   r!  rQ   r=   r   r"  r   r  r#  r  r$  r   ra   past_seen_tokensre   
mamba_maskrB   r   all_hidden_statesall_self_attnsall_router_logitsdecoder_layer
layer_masklayer_outputss                           r4   r   zGraniteMoeHybridModel.forward&  s   " 2C1N-TXT_T_TqTq$8$D $++JjJj 	 "+!6IDKK<Q<Q	%0%<k$++B]B]-t";<YZZ&&4==Yj I  --i8M%(A(AA 0K
 !CRC^==?de"\\ "2]5H5H5K"KTaThThN )33A6L..M>?L]
 ,,^^L
 &"??&"&//-"N #7BD0d"6BD![[ 	>M'4'?'?7'JP[J#!m%55!)
)."3#-%9$7
 
M *!,M  #/"}Q'7&99N# $0%-*;)==%;	>> 		-0  -!11?#E#E15O.%+++%+
 	
r6   r'   r   c           	         | j                   j                  dk(  r||dk(  j                         r|S y | j                   j                  dk(  r't        |t        j
                        rt        |      }|S ||j                         nd}||j                  nd}| j                   j                  dk(  r(|s&|s$t        j                  |||| j                        ry |j                  }|j                  d   }	|r|j                         }
n1t        |t        j
                        r|j                  d	   n||	z   dz   }
| j                  ||	|
|||j                  d   
      }| j                   j                  dk(  rQ|O|j                   j"                  dv r7|s5t	        j$                  |      j&                  }t        j(                  ||      }|S )Nflash_attention_2r   flex_attentionr   Fsdpa)r"  past_key_values_lengthis_trainingr   r*   )sequence_lengthtarget_lengthrV   r   r   )rm  xpunpu)rj   r   anyr  r/   r   r(   r   r   r   _ignore_causal_mask_sdparX   rV   r.   get_max_cache_shape5_prepare_4d_causal_attention_mask_with_cache_positionr   rn  finfomin_unmask_unattended)r   rQ   r   r   r   r  r+  using_compilable_cacherV   r9  r:  re   	min_dtypes                r4   r)  z)GraniteMoeHybridModel._update_causal_mask  s    ;;++/BB)~/D.I.I.K%%;;++/??.%,,7!<^!L!!
 @O?Z?99;`aCRC^!?!?di ;;++v5>T]n%>>*'7 MM	 ""&,,Q/!+??AM nell; $$R(%7!;  PP+')#))!, Q 
 KK,,6*%%**.DD%
 E*..I0CCKQZ[Kr6   r9  r:  rV   r   c                    | | j                         dk(  r| }|S t        j                  |      j                  }t        j                  ||f|||j
                        }|dk7  rt        j                  |d      }|t        j                  ||j
                        |j                  dd      kD  z  }|ddddddf   j                  |ddd      }| |j                         }| j                  d   }	|ddddddd|	f   | ddddddf   j                  |j
                        z   }
|
dk(  }
|ddddddd|	f   j                  |
|      |ddddddd|	f<   |S )	aM  
        Creates a causal 4D mask of shape `(batch_size, 1, query_length, key_value_length)` from a 2D mask of shape
        `(batch_size, key_value_length)`, or if the input `attention_mask` is already 4D, do nothing.

        Args:
            attention_mask (`torch.Tensor`):
                A 2D attention mask of shape `(batch_size, key_value_length)` or a 4D attention mask of shape
                `(batch_size, 1, query_length, key_value_length)`.
            sequence_length (`int`):
                The sequence length being processed.
            target_length (`int`):
                The target length: when generating with static cache, the mask should be as long as the static cache,
                to account for the 0 padding, the part of the cache that is not filled yet.
            dtype (`torch.dtype`):
                The dtype to use for the 4D attention mask.
            cache_position (`torch.Tensor`):
                Indices depicting the position of the input sequence tokens in the sequence.
            batch_size (`torch.Tensor`):
                Batch size.
        Nr   )
fill_valuerV   r   r   r   r   r*   r   )r-   r/   rA  rB  fullr   triur  rG   rF   cloner.   r_   r   )rQ   r9  r:  rV   r   r   ra   re   rE  mask_lengthpadding_masks              r4   r@  zKGraniteMoeHybridModel._prepare_4d_causal_attention_mask_with_cache_position  s   < %.*<*<*>!*C(K* ' E*..I** -0Ye\j\q\qK !##jjqA5<<n>S>STWeWmWmnprsWtttK%dD!Q&67>>z1bRTUK))//1,2226*1aL[L+@ANSTVZ\`bcScDdDgDg&&E    ,q05@Aq,;,AV5W5c5c )6Aq!\k\12 r6   c                 R    |}|d   dkD  s|t        j                  |dk(        rd}|S )zv
        No need for zeroing states when
            1. Cached forward
            2. Attending to all inputs
        r   Nr   )r/   all)r   rQ   r   r,  s       r4   r*  z(GraniteMoeHybridModel._update_mamba_mask  s7     $
!q ^%?EIIn`aNaDbJr6   )NNNNNNNNNNN)F)rt   r   r   r    rp   r   r   r/   r   r   r   r   r   listr   r   r   r  r   r   r   r)  staticmethodr   rV   r@  r*  r   r   s   @r4   r  r    s   5 2  '+1537KO59$(,0/3/3&*59s
##s
 !.s
 u//0	s

 "%tE4E4E/F(F"GHs
   1 12s
 D>s
 $D>s
 'tns
 'tns
 d^s
 !!1!12s
 45s
 
u--	.s
  s
v #(BellK78B llB 	B
 B  BH 444 4 {{	4
 4 4 4l	r6   r  gate_logitsr  c                    | t        | t              syt        | t              rC| d   j                  }t        j                  | D cg c]  }|j                  |       c}d      }t        j                  j                  j                  d      }t        j                  ||d      \  }}	t        j                  j                  j                  |	|      }
|>t        j                  |
j                         d      }t        j                  |d      }n1|j                  \  }}|j                  d   ||z  z  }|dddddddf   j                  |||||f      j                  d||      j                        }t        j                   |
j                         |z  d      t        j                   |d      z  }|ddddddf   j                  ||||j                  d   f      j                  d|j                  d         j                  |      }t        j                   ||z  d      t        j                   |d      z  }|j                  d   t#        |j                  j$                        z  }t        j                   |dd|||j                  d   z   f   |j'                  d      z        }||z  S c c}w )a  
    Computes auxiliary load balancing loss as in Switch Transformer - implemented in Pytorch.

    See Switch Transformer (https://huggingface.co/papers/2101.03961) for more details. This function implements the loss
    function presented in equations (4) - (6) of the paper. It aims at penalizing cases where the routing between
    experts is too unbalanced.

    Args:
        gate_logits:
            Logits from the `gate`, should be a tuple of model.config.num_hidden_layers tensors of
            shape [batch_size X sequence_length, num_experts].
        num_experts:
            Number of experts
        top_k:
            The number of experts to route per-token, can be also interpreted as the `top-k` routing
            parameter.
        attention_mask (`torch.Tensor`, *optional*):
            The attention_mask used in forward function
            shape [batch_size X sequence_length] if not None.

    Returns:
        The auxiliary loss.
    Nr   r,   r*   r   )r  r   r   r/   r0   r_   r   r\   r]   r  one_hotrx  r  r.   rF   rG   rH  r   indexr8   )rQ  r  r  rQ   compute_device
layer_gateconcatenated_gate_logitsrouting_weightsr   selected_expertsexpert_masktokens_per_expertrouter_prob_per_expertr   r9  r   expert_attention_mask router_per_expert_attention_maskrankoverall_losss                       r4   load_balancing_loss_funcra  $  s   : *[%"@+u%$Q..#(99^i-jPZjmmN.K-jpq#r hh))112JPR1SO**_eDA((%%--.>LK!JJ{'8'8':B "'O!C&4&:&:#
O4::1=*B^_ 4AtT12V&
OUKXYWR,R	 	 "IIk&7&7&9<Q&QWXY\a\e\e!q]
 
 4At+,V&
O_EZEZ[\E]^_WR..q12R	 	) "'?=]+]cd!ehmhqhq,!i
 "
   #c/*@*@*F*F&GGD99!TD?+@+@+C$CCCDG]GgGghiGjjL +%%a .ks   J<c                        e Zd ZdgZdef fdZd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 	 dde	e
j                     de	e
j                     de	e
j                     d	e	eeee
j                      f      d
e	e
j                      de	e
j                     de	e   de	e   de	e   de	e   de	e   de	e
j                     deee
j                  f   deeef   fd       Z	 	 	 	 	 	 ddZ xZS )GraniteMoeHybridForCausalLMzlm_head.weightrj   c                 N   t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        |j                  | _	        |j                  | _        |j                  | _        | j                          y )NFrm   )ro   rp   r  r  r  r   r|   rv   lm_headrouter_aux_loss_coefr  r  r  r   r  s     r4   rp   z$GraniteMoeHybridForCausalLM.__init__|  s     *62
 ++yy!3!3V5F5FUS$*$?$?!!33#)#=#=  	r6   c                     || _         y r   r  )r   decoders     r4   set_decoderz'GraniteMoeHybridForCausalLM.set_decoder  s	    
r6   c                     | j                   S r   rh  r   s    r4   get_decoderz'GraniteMoeHybridForCausalLM.get_decoder  s    zzr6   r!  rQ   r=   r   r"  labelsr   r  r#  r  r$  r   logits_to_keeprD   c                    ||n| j                   j                  }|
|
n| j                   j                  }
|	|	n| j                   j                  }	||n| j                   j                  } | j
                  d||||||||	|
||d|}|d   }t        |t              rt        | d      n|}| j                  |dd|ddf         }|| j                   j                  z  }d}|:|j                         } | j                  ||fd| j                   j                  i|}d}|
r`t        |r|j                  n|d   | j                   | j"                  |      }|+|| j$                  |j'                  |j(                        z  z  }|s|f|dd z   }|
r|f|z   }||f|z   S |S t+        ||||j,                  |j.                  |j0                  |j                        S )	ax  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >>> from transformers import AutoTokenizer, GraniteMoeHybridForCausalLM

        >>> model = GraniteMoeHybridForCausalLM.from_pretrained("ibm/PowerMoE-3b")
        >>> tokenizer = AutoTokenizer.from_pretrained("ibm/PowerMoE-3b")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```N)r!  rQ   r=   r   r"  r   r  r#  r  r$  r   r   r  r*   r   )lossaux_lossr  r   rB   r'  r  r  )rj   r  r  r#  r(  r  r  r   slicere  logits_scalingr  loss_functionr  ra  r  r  r  rf  r_   r   r   r   rB   r'  )r   r!  rQ   r=   r   r"  rm  r   r  r#  r  r$  r   rn  ra   r  rB   slice_indicesr  rp  rq  outputs                         r4   r   z#GraniteMoeHybridForCausalLM.forward  s*   P 2C1N-TXT_T_TqTq$8$D $++JjJj 	 %9$D $++JjJj 	 &1%<k$++B]B] $** 
)%+'/!5!5#)
 
   
8B>SV8W~ot4]kmA}a,?@A$++444\\^F%4%%  ;;11 	D /)4%%'"+  ((	H !11HKK4LLLY,F#"v-'+'7D7V#CVC(#33!//))!//
 	
r6   c                 N   |d u }	|	sZ||d   |j                   d   k\  r|d d |j                   d    d f   }ne|j                   d   |j                   d   k7  rF|d d |f   }n<|r:t        | j                  |j                   d   | j                  | j                        }|T|R|j                         j                  d      dz
  }|j                  |dk(  d       |	s|d d |j                   d    d f   }||	rd|i}
nd|j                         i}
|
j                  |||||d       |
S )Nr*   r   r   r   r"  r!  )r=   r   r   rQ   r   )
r.   r   rj   rV   r   r  r   masked_fill_r`   r   )r   r!  r   rQ   r"  r   r=   r   ra   empty_past_kvmodel_inputss              r4   prepare_inputs_for_generationz9GraniteMoeHybridForCausalLM.prepare_inputs_for_generation  sW    (4/ )!"%);;%a.*>*>q*A)A)C&CD	#~';';A'>>%a&78	>Y__Q/DKKO %,*>)..077;a?L%%n&91= +A	0B/B/D,DE $+];L')=)=)?@L ,#2&"0"0	
 r6   )NNNNNNNNNNNNr   )NNNNNT)rt   r   r   _tied_weights_keysr    rp   rj  rl  r   r   r/   r   r   r   r   rO  r   r   r   r   r   r   r{  r   r   s   @r4   rc  rc  y  s   *+5   151537KO59-1$(,0/3/3&*5934k
E,,-k
 !.k
 u//0	k

 "%tE4E4E/F(F"GHk
   1 12k
 ))*k
 D>k
 $D>k
 'tnk
 'tnk
 d^k
 !!1!12k
 c5<</0k
  
u//	0!k
 k
` 7r6   rc  )rc  r  r  )Nr   )r   )Nr+   N)\typingr   r   r   r   r   r/   torch.nn.functionalr   r\   r  transformers.activationsr	   cache_utilsr   r   r   
generationr   modeling_attn_mask_utilsr   modeling_layersr   modeling_outputsr   r   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   r   utils.import_utilsr   r   configuration_granitemoehybridr    +mamba_ssm.ops.triton.selective_state_updater!   !mamba_ssm.ops.triton.ssd_combinedr"   r#   causal_conv1dr$   r%   !torch.nn.attention.flex_attentionr'   integrations.flex_attentionr(   
get_loggerrt   rq   r5   rA   r   r   rL   Moduler  rg   ri   r   r   r   r   rN  r  r   r   r  r~  r  r  r  r  r  r  r  r  r  r   ra  rc  __all__r  r6   r4   <module>r     s  , = <     + < < ) > 9 j j K F & \ \ V B Rmm!DD-7**  !;J 
		H	%(6	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % %:S)		 S)ldou doTVU\\ VS V
(( 46FH\]^ ^_ ^_B;588?? ;$")) 4)5 2Jbii J(*bii *Z-S -S`9+")) 9+xm#= m` *o * *0<bii <D U; U Ut "&
-1	R&u||U5<<%8$>?R&#R& U\\*	R&
 5<<R&j{"A? {| fr6   