
    rhL                       d dl Z d dlZd dlmZ d dlmZmZmZmZ d dl	Z	d dl	m
Z
 d dlmZmZmZ ddlmZ ddlmZmZ dd	lmZ dd
lmZ ddlmZ ddlmZmZmZ ddlmZm Z  ddl!m"Z"m#Z# ddl$m%Z% ddl&m'Z'm(Z( ddl)m*Z*m+Z+ ddl,m-Z-  e+       rd dl.m/Z/ d dl0m1Z1m2Z2 nd\  Z/Z1Z2 e*       r	d dl3m4Z4m5Z5 nd\  Z5Z4 e(jl                  e7      Z8 G d de	j                  jr                        Z: G d de
jr                        Z; G d de      Z< G d d e
jr                        Z=d!e	j|                  d"e?d#e	j|                  fd$Z@	 dKd%e
jr                  d&e	j|                  d'e	j|                  d(e	j|                  d)ee	j|                     d*eAd+eAfd,ZBd- ZCdLd.ZD G d/ d0e
jr                        ZEd1e	j|                  d2e?fd3ZFd4 ZGd5 ZH eIe/e4e5f      ZJ G d6 d7e
jr                        ZK G d8 d9e
jr                        ZL G d: d;e
jr                        ZM G d< d=e
jr                        ZN G d> d?e
jr                        ZO G d@ dAe#      ZPe' G dB dCeP             ZQ G dD dEePe      ZR e'dFG       G dH dIeP             ZSg dJZTy)M    N)cycle)AnyCallableOptionalUnion)nn)BCEWithLogitsLossCrossEntropyLossMSELoss   )ACT2FN)CacheDynamicCache)GenerationMixin)AttentionMaskConverter)FlashAttentionKwargs)BaseModelOutputWithPastCausalLMOutputWithPast SequenceClassifierOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)auto_docstringlogging)is_causal_conv1d_availableis_mamba_ssm_available   )Zamba2Config)selective_state_update)mamba_chunk_scan_combined mamba_split_conv1d_scan_combinedNNN)causal_conv1d_fncausal_conv1d_updateNNc                   (     e Zd Zd fd	ZddZ xZS )Zamba2RMSNormGatedc                     t         |           t        j                  t	        j
                  |            | _        || _        || _        y N)	super__init__r   	Parametertorchonesweightvariance_epsilon
group_size)selfhidden_sizer3   eps	__class__s       }/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/zamba2/modeling_zamba2.pyr-   zZamba2RMSNormGated.__init__<   s6    ll5::k#:; #$    c                 b   |j                   }|j                  t        j                        }|?|t        j
                  j                  |j                  t        j                              z  }|j                  ^ }}|| j                  z  } |j                  g ||| j                   }|j                  d      j                  dd      }|t        j                  || j                  z         z  } |j                  g ||| j                  z   }| j                  |j                  |      z  S N   T)keepdim)dtypetor/   float32r   
functionalsilushaper3   viewpowmeanrsqrtr2   r1   )	r4   hidden_statesgateinput_dtypeprefix_dimslast_dimgroup_counthidden_states_groupvariances	            r8   forwardzZamba2RMSNormGated.forwardB   s   #))%((7)BMM,>,>twwu}}?U,VVM!.!4!4h$//10m00\+\{\DOO\&**1-222t2D1EKK4K`K`@`4aa0+00]+]{T__?\]{{]--k:::r9   gư>r+   )__name__
__module____qualname__r-   rQ   __classcell__r7   s   @r8   r)   r)   ;   s    %;r9   r)   c                   ,     e Zd Zd fd	Zd Zd Z xZS )Zamba2RMSNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)z<
        Zamba2RMSNorm is equivalent to T5LayerNorm
        N)r,   r-   r   r.   r/   r0   r1   r2   )r4   r5   r6   r7   s      r8   r-   zZamba2RMSNorm.__init__Q   s1     	ll5::k#:; #r9   c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S r;   )	r?   r@   r/   rA   rF   rG   rH   r2   r1   )r4   rI   rK   rP   s       r8   rQ   zZamba2RMSNorm.forwardY   sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r9   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)tupler1   rD   r2   r4   s    r8   
extra_reprzZamba2RMSNorm.extra_repr`   s*    ))*+6$2G2G1HIIr9   rR   )rS   rT   rU   r-   rQ   r_   rV   rW   s   @r8   rY   rY   P   s    $;Jr9   rY   c                   X   e Zd ZdZdZdZdZej                  dfde	de
dej                  dee   fdZd	 Zd
e
deej"                  ej"                  f   fdZ	 ddej"                  dej"                  d
e
deeeef      deej"                  ej"                  f   f
dZdej,                  fdZdd
ee
   de
fdZdeeej"                     eej"                     f   fdZeddeeeej6                           ddfd       Zd
e
dej"                  dej,                  dej"                  fdZd Zy)Zamba2HybridDynamicCachea  
    A dynamic cache that can handle both the attention cache (which has a seq_len dimension) and the mamba cache
    (which has a constant shape regardless of seq_len).

    This cache has two sets of lists of tensors: `key_cache` and `value_cache` for attention cache and `conv_states`
    and `ssm_states` for mamba cache. Each of these lists has `num_layers` tensors. The expected shape for each tensor
    For attention layers, `key_cache` and `value_cache` have a shape of `(batch_size, num_heads, seq_len, head_dim)`,
    while `conv_states` and `ssm_states` have a shape of `(batch_size, 0)` (empty tensors).
    For mamba layers, `key_cache` and `value_cache` have a shape of `(batch_size, 0)` (empty tensors),
    while `conv_states` represents the convolution state and has a shape of `(batch_size, d_inner, d_conv)`,
    and `ssm_states` represents the ssm state and has a shape of `(batch_size, d_inner, d_state)`.
    NFconfig
batch_sizer?   devicec           	      .   || _         |j                  | _        d| _        t        |j                  |j
                  z        | _        |j                  | _        |j                  | _
        |j                  | _        g | _        i | _        i | _        i | _        i | _        i | _        t%        |j&                        D ]  }t)        j*                  || j                  d|j,                  z  |j                  z  z   | j                  ||      | j                   |<   t)        j*                  || j                  |j.                  | j                  ||      | j"                  |<   | j                  |   dk(  s| j                  j1                  |        t%        |j&                        D cg c]  }t)        j2                  g g|z  |       c}| _        t%        |j&                        D cg c]  }t)        j2                  g g|z  |       c}| _        y c c}w c c}w )NFr<   rd   r?   hybridrd   )r?   layers_block_typehas_previous_stateintmamba_expandr5   intermediate_sizemamba_d_statessm_state_sizemamba_d_convconv_kernel_sizen_mamba_headstransformer_layers_modules_parameters_buffersconv_states
ssm_statesrangenum_hidden_layersr/   zerosmamba_ngroupsmamba_headdimappendtensor	key_cachevalue_cache)r4   rb   rc   r?   rd   i_s          r8   r-   z!Zamba2HybridDynamicCache.__init__v   s    
!'!9!9"'!$V%8%86;M;M%M!N$22 & 3 3#11"$v//0 	2A"'++&&V-A-A)AFDXDX)XX%%#DQ "'D..0D0DdFYFYbhpu"DOOA %%a(H4''..q1	2 SXX^XpXpRqrQ%,,tj'8HrTYZ`ZrZrTstqELL"
):6Jt sts   !"H""Hc                 ,    t        | j                        S r+   )lenr   r^   s    r8   __len__z Zamba2HybridDynamicCache.__len__   s    4>>""r9   	layer_idxreturnc                 >    | j                   |   | j                  |   fS r+   )r   r   r4   r   s     r8   __getitem__z$Zamba2HybridDynamicCache.__getitem__   s!    ~~i($*:*:9*EEEr9   
key_statesvalue_statescache_kwargsc                    | j                   |   j                  d   dk(  r|| j                   |<   || j                  |<   nft        j                  | j                   |   |gd      | j                   |<   t        j                  | j                  |   |gd      | j                  |<   | j                   |   | j                  |   fS )Nr=   r   r<   dim)r   rD   r   r/   cat)r4   r   r   r   r   s        r8   updatezZamba2HybridDynamicCache.update   s     >>)$**2.!3(2DNN9%*6DY'(-		4>>)3Lj2Y_`(aDNN9%*/))T5E5Ei5PR^4_ef*gDY'~~i($*:*:9*EEEr9   beam_idxc                    t        t        | j                              D ]S  }| j                  |   j                  }| j                  |   j	                  d|j                  |            | j                  |<   | j                  |   j                  }| j                  |   j	                  d|j                  |            | j                  |<   | j                  |   j                  }| j                  |   j	                  d|j                  |            | j                  |<   | j                  |   j                  }| j                  |   j	                  d|j                  |            | j                  |<   V y)zDReorders the cache for beam search, given the selected beam indices.r   N)	ry   r   r   rd   index_selectr@   r   rw   rx   )r4   r   r   rd   s       r8   reorder_cachez&Zamba2HybridDynamicCache.reorder_cache   sD   s4>>23 		iI^^I.55F(,y(A(N(NqRZR]R]^dRe(fDNN9%%%i077F*.*:*:9*E*R*RSTV^VaVabhVi*jDY'%%i077F*.*:*:9*E*R*RSTV^VaVabhVi*jDY'__Y/66F)-)C)P)PQRT\T_T_`fTg)hDOOI&		ir9   c                     || j                   vr| j                   d   n|}t        | j                        |k  s | j                  |   j                         dk(  ry| j                  |   j                  d   S )zYReturns the sequence length of the cached states. A layer index can be optionally passed.r   )rs   r   r   numelrD   r   s     r8   get_seq_lengthz'Zamba2HybridDynamicCache.get_seq_length   sl     3<4CZCZ2ZD++A.`i	t~~)+t~~i/H/N/N/PTU/U~~i(..r22r9   c                     t        d      NzAZamba2HybridDynamicCache does not have a legacy cache equivalent.NotImplementedErrorr^   s    r8   to_legacy_cachez(Zamba2HybridDynamicCache.to_legacy_cache   s    !"effr9   past_key_valuesr   c                     t        d      r   r   )clsr   s     r8   from_legacy_cachez*Zamba2HybridDynamicCache.from_legacy_cache   s    !"effr9   new_conv_statecache_positionc                 T   | j                   |   }|j                  d| j                  dz
        }|j                  dd      }|j	                  |j
                        |d d d d |f<   | j                   |   j                          | j                   |xx   |z  cc<   | j                   |   S )Nr   r   r=   shiftsdims)rw   clamprq   rollr@   rd   zero_)r4   r   r   r   
conv_states        r8   update_conv_statez*Zamba2HybridDynamicCache.update_conv_state   s     %%i0
'--a1F1F1JK__BR_8
+9+<+<Z=N=N+O
1a'(#))+#z1#	**r9   c                 l    | j                   j                          | j                  j                          y r+   )rw   r   rx   r^   s    r8   resetzZamba2HybridDynamicCache.reset   s$     r9   r+   )r   )rS   rT   rU   __doc__r   r   is_compileabler/   float16r    rk   r?   r   strr-   r   r]   Tensorr   dictr   r   
LongTensorr   r   r   classmethodFloatTensorr   r   r    r9   r8   ra   ra   d   s    IKN KP--quu"u03u<AKKuaijmanu@#FS FU5<<3M-N F 26FLLF llF 	F
 tCH~.F 
u||U\\)	*F"ie&6&6 i3 3c 3guU\\':E%,,<O'O!P g guUEVEV?W9X0Y ges g g
+
+.3ll
+LQL\L\
+	
+ r9   ra   c                   ^     e Zd Zddef fdZ ej                         ed               Z xZ	S )Zamba2RotaryEmbeddingrb   c                    t         |           t        |d      rUt        |j                  t
              r;|j                  j                  d|j                  j                  d            | _        nd| _        |j                  | _	        |j                  | _
        || _        t        | j                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                   | _        y )Nrope_scaling	rope_typetypedefaultinv_freqF)
persistent)r,   r-   hasattr
isinstancer   r   getr   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrb   r   rope_init_fnattention_scalingregister_bufferr   original_inv_freq)r4   rb   rd   r   r7   s       r8   r-   zZamba2RotaryEmbedding.__init__   s    6>*z&:M:Mt/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r9   c                 b   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   r=   r   mpscpuF)device_typeenabledr<   r   r?   )r   floatexpandrD   r@   rd   r   r   r   r/   autocast	transposer   cosr   sinr?   )
r4   xposition_idsinv_freq_expandedposition_ids_expandedr   freqsembr   r   s
             r8   rQ   zZamba2RotaryEmbedding.forward   sV    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s    BF%%F.r+   )
rS   rT   rU   r    r-   r/   no_gradr   rQ   rV   rW   s   @r8   r   r      s3    /| /" U]]_<  <r9   r   rI   n_repr   c                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)rD   r   reshape)rI   r   batchnum_key_value_headsslenhead_dims         r8   	repeat_kvr      so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr9   modulequerykeyvalueattention_maskscalingdropoutc                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr<   r   r   r=   )r   r?   )ptrainingr   )r   num_key_value_groupsr/   matmulr   rD   r   rB   softmaxrA   r@   r?   r   r   
contiguous)r   r   r   r   r   r   r   kwargsr   r   attn_weightscausal_maskattn_outputs                r8   eager_attention_forwardr     s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r9   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..Nr=   r<   r   )rD   r/   r   )r   x1x2s      r8   rotate_halfr   "  sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r9   c                     |j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezer   )qkr   r   r   unsqueeze_dimq_embedk_embeds           r8   apply_rotary_pos_embr  )  sY    ( --
&C
--
&C3w;q>C/0G3w;q>C/0GGr9   c                   F    e Zd ZdZ	 	 	 ddedee   dee   dee   f fdZ	 	 	 ddej                  dedeej                     d	ee
   d
eeej                  ej                  f      dee   deej                  eej                     eeej                        f   fdZ xZS )Zamba2AttentionaZ  
    Multi-headed attention from 'Attention Is All You Need' paper.

    Adapted from transformers.models.mistral.modeling_mistral.MistralAttention:
    The input dimension here is attention_hidden_size = 2 * hidden_size, and head_dim = attention_hidden_size // num_heads.
    The extra factor of 2 comes from the input being the concatenation of original_hidden_states with the output of the previous (mamba) layer
    (see fig. 2 in https://huggingface.co/papers/2405.16712).
    Additionally, replaced
    attn_weights = torch.matmul(query_states, key_states.transpose(2, 3)) / math.sqrt(self.head_dim) with
    attn_weights = torch.matmul(query_states, key_states.transpose(2, 3)) / math.sqrt(self.head_dim/2)
    Finally, this attention layer contributes to tied transformer blocks aimed to increasing compute without increasing model size. Because this
    layer is tied, un-tied adapters (formally the same as LoRA but used in the base model) modules are added to the q, k, v projectors to increase
    expressivity with a small memory overhead (see Fig. 2 of https://huggingface.co/papers/2411.15242).
    rb   r   num_fwd_mem_blocksblock_idc           	         t         |           || _        || _        |j                  | _        |j
                  | _        |j                  |j                  z  | _	        |j                  | _
        | j                  dz  dz  | _        d| _        |j                  | _        t        j                  |j                  |j                  | j                  z  d      | _        t        j                  |j                  |j                  | j                  z  d      | _        t        j                  |j                  |j                  | j                  z  d      | _        t        j                  |j                  | j                  z  |j&                  d      | _        || _        |j,                  | _        || _        |j2                  rt        j4                  g       | _        t        j4                  g       | _        t        j4                  g       | _        t=        | j*                        D ]  }||j>                  z  |k(  r{t        j@                  t        j                  | j                  | j                  jB                  d      t        j                  | j                  jB                  | j                  d            }t        j@                  t        j                  | j                  | j                  jB                  d      t        j                  | j                  jB                  | j                  d            }t        j@                  t        j                  | j                  | j                  jB                  d      t        j                  | j                  jB                  | j                  d            }n<t        jD                         }t        jD                         }t        jD                         }| j6                  jG                  |       | j8                  jG                  |       | j:                  jG                  |       ! tI        | j.                        D 	
ci c]  \  }	}
|
|	
 c}
}	| _%        y c c}
}	w )Nr<   g      TFbias)&r,   r-   rb   r   attention_hidden_sizeattention_head_dimr   num_attention_headsr   r   r   r   	is_causalattention_dropoutr   Linearq_projk_projv_projr5   o_projr  hybrid_layer_idslayer_block_mapr  use_shared_attention_adapter
ModuleListlinear_q_adapter_listlinear_k_adapter_listlinear_v_adapter_listry   num_mem_blocks
Sequentialadapter_rankIdentityr~   	enumerate	layer_dic)r4   rb   r   r  r  r   linear_q_adapterlinear_k_adapterlinear_v_adapterindexr   r7   s              r8   r-   zZamba2Attention.__init__T  sJ    	"%+%A%A"11$*$>$>&B\B\$\!'-'E'E$)d2!'!9!9ii < <f>X>X[_[h[h>hotuii < <f>X>X[_[h[h>hotuii < <f>X>X[_[h[h>hotuii : :T]] JFL^L^ejk"4%66 ..)+r):D&)+r):D&)+r):D&4223 Dv,,,8')}}		$"<"<dkk>V>V]bc		$++":":D<V<V]bc($ (*}}		$"<"<dkk>V>V]bc		$++":":D<V<V]bc($ (*}}		$"<"<dkk>V>V]bc		$++":":D<V<V]bc($
 (*{{}$'){{}$'){{}$**112BC**112BC**112BC)D, <ETEYEY;Z[<5%%,[[s   Q6rI   r   past_key_valueposition_embeddingsr   r   c                 $   |j                   d d }g |d| j                  }| j                  |      }	| j                  |      }
| j	                  |      }| j
                  j                  rW| j                  |   }|	 | j                  |   |      z   }	|
 | j                  |   |      z   }
| | j                  |   |      z   }|	j                  |      j                  dd      }	|
j                  |      j                  dd      }
|j                  |      j                  dd      }| j
                  j                  r|\  }}t        |	|
||      \  }	}
||j                  |
||      \  }
}t         }| j
                  j"                  dk7  rt$        | j
                  j"                     } || |	|
||f| j&                  sdn| j(                  | j*                  d|\  }} |j,                  g |d j/                         }| j1                  |      }||fS )Nr=   r   r<   eager        )r   r   )rD   r   r  r  r  rb   r  r&  r  r  r   rE   r   use_mem_roper  r   r   _attn_implementationr   r   r  r   r   r   r  )r4   rI   r   r   r+  r,  r   input_shapehidden_shapequery_statesr   r   adapter_layer_idxr   r   attention_interfacer   r   s                     r8   rQ   zZamba2Attention.forward  s%    $))#2.88b8$--8{{=1[[/
{{=1;;33 $y 9'*W$*D*DEV*WXe*ffL#&Sd&@&@AR&STa&bbJ'*W$*D*DEV*WXe*ffL#((6@@AF__\2<<QB
#((6@@AF;;##*HC';L*VY[^'_$L*%'5'<'<ZW`'a$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFHkk+.L((r9   r$   )rS   rT   rU   r   r    r   rk   r-   r/   r   ra   r]   r   r   rQ   rV   rW   s   @r8   r
  r
  D  s    $ $(,0"&6\6\ C=6\ %SM	6\
 3-6\x 26=AKO1)||1) 1) !.	1)
 !!9:1) &eELL%,,,F&GH1) -.1) 
u||Xell3XeELL>Q5RR	S1)r9   r
  input_tensorpad_sizec                     t        | j                        dk(  r
ddddd|ddfnddd|ddf}t        j                  j                  j                  | |dd      S )z
    Padding x tensor with `pad_size` on the seq_len dim (dim=1)

    Assumes that we only have tensors of either size 4 or 3
       r   constant)moder   )r   rD   r/   r   rB   pad)r7  r8  	pad_shapes      r8   pad_tensor_by_sizer?    sf     47|7I7I3Ja3OAq!Q!Q/VWYZ\]_gijlmUnI88""<ST"UUr9   c                    t        | |      } t        | j                        dk(  r.| j                  | j                  d   d|| j                  d         S | j                  | j                  d   d|| j                  d   | j                  d         S )z
    Padding input_tensor with `pad_size` on the seq_len dim (dim=1) and
    simultaneously splitting it into chunk sequences.

    Assumes that we only have tensors of either size 4 or 3
    r   r   r=   r<   )r?  r   rD   r   )r7  r8  
chunk_sizes      r8   reshape_into_chunksrB    s     &lH=L
<!###L$6$6q$92z<K]K]^_K`aa ##q!2z<3E3Ea3H,J\J\]^J_
 	
r9   c                 "   | j                  d      } | d   j                  g | j                         | } t        j                  t        j                  ||| j
                  t        j                        d      }| j                  | d      } t        j                  | d      }t        j                  t        j                  ||| j
                  t        j                        d      }|j                  | t        j                         }|S )zo
    More stable segment sum calculation. Uses cumulative sums and masking instead of direct subtractions.
    r=   .Nrf   diagonalr   r   r   )
sizer   r/   trilr0   rd   boolmasked_fillcumsuminf)r7  rA  masktensor_segsums       r8   segment_sumrO    s     ""2&J 2<	*11S<3D3D3FS
SL::ejjZ@S@S[`[e[efqstD++TE15LLL26M ::ejjZ@S@S[`[e[efqrsD!--teeiiZ@Mr9   c                        e Zd ZdZddedee   f fdZ	 	 ddej                  dee
   deej                     fdZddee
   deej                     fd	Z	 	 ddee
   deej                     fd
Z xZS )Zamba2MambaMixeru  
    Compute ∆, A, B, C, and D the state space parameters and compute the `contextualized_states`.
    A, D are input independent (see Mamba paper [1] Section 3.5.2 "Interpretation of A" for why A isn't selective)
    ∆, B, C are input-dependent (this is a key difference between Mamba and the linear time invariant S4,
    and is why Mamba is called **selective** state spaces)
    rb   r   c           	         t         |           || _        |j                  | _        |j                  | _        |j                  | _        t        |j                  | j                  z        | _
        || _        |j                  | _        d| _        t        j                         | _        |j"                  | _        |j$                  | _        |j(                  | _        | j                  j,                  | _        |j0                  | _        |j2                  | _        |j4                  | _        |j6                  | _        | j                  d| j&                  z  | j
                  z  z   | _        t        j:                  | j8                  | j8                  d|j                  | j8                  |j                  dz
        | _        | j                  | j8                  z   | j.                  z   }t        j>                  | j                  ||j@                        | _!        t        jD                  tG        jH                  | j.                              | _%        tG        jL                  d| j.                  dz         }t        jD                  tG        jN                  |            | _(        d| jP                  _)        tU        | j                  | j                  | j&                  z  d      | _+        t        jD                  tG        jH                  | j.                              | _,        d| jX                  _)        t        j>                  | j                  | j                  |j@                        | _-        t\        st^        ja                  d	       y y )
NrC   r<   Tr   )in_channelsout_channelsr  kernel_sizegroupspaddingr  gh㈵>)r3   r6   a  The fast path is not available because on of `(selective_state_update, causal_conv1d_fn, causal_conv1d_update)` is None. Falling back to the naive implementation. To install follow https://github.com/state-spaces/mamba/#installation and https://github.com/Dao-AILab/causal-conv1d)1r,   r-   rb   r5   rn   ro   rp   rq   rk   rl   rm   r   use_conv_bias
activationr   SiLUactuse_mem_eff_pathr|   n_groupsr}   r   rr   	num_headsrA  time_step_limittime_step_mintime_step_maxconv_dimConv1dconv1dr  add_bias_linearin_projr.   r/   r0   dt_biasarangelogA_log_no_weight_decayr)   normDout_projis_fast_path_availableloggerwarning_once)r4   rb   r   projection_sizeAr7   s        r8   r-   zZamba2MambaMixer.__init__  s   !--$22 & 3 3!$V%8%84;K;K%K!L"#11 779 & 7 7,,,,22 ++%55#11#11..T]]1BTEXEX1XXii++==''!+
 004==@4>>Qyy''
 ||EJJt~~$>? LLDNNQ./\\%))A,/
&*

#&""t/E/E/V\`
	 ejj89"&		$"8"8$:J:JQWQgQgh%> &r9   rI   cache_paramsr   c                    |j                   \  }}}| j                  | j                  z  }d| j                  z  d| j                  z  | j                  z  z   | j                  z   }|4|j
                  r'| j                  |j                  d            }	|	j                   d   |z
  dz  }
|
|
| j                  | j                  | j                  g}t        j                  |	|d      \  }}}}}t        ||j                  | j                     | j                  j                  j                  d      | j                  j                   | j"                        }t        j                  || j                  ||gd      \  }}}t        j$                  | j&                  j)                                }|d d d df   d d d d d f   j+                  d| j,                  | j                        j/                  t        j0                        }|d d d d d f   j+                  dd| j,                        }| j2                  d d d df   j+                  d| j,                        }| j4                  d d d df   j+                  d| j,                        }|j7                  || j                  |j                   d   | j                  z        }|j7                  || j                  |j                   d   | j                  z        }|j7                  || j                  | j,                        }t9        |j:                  | j                     ||||||d |d
      }|j7                  || j                  | j,                  z        }| j=                  ||      }| j?                  |      d d d df   }|S |Bt        j@                  |dk(        s*|jB                  }||d d d d d f   z  j/                  |      }| j                  |      }t        j$                  | j&                  j)                                }| jD                  i nd	| jD                  i}|t        j@                  |dk(        }nd}| jF                  r| jH                  r||rtK        || j                  j                  j                  d      | j                  j                   | j2                  |f| j4                  | jL                  d | j"                  | j<                  j                  | j<                  jN                  | j>                  j                  | j>                  j                   | j,                  | j                  d
dd|\  }}|S t        j                  || j                  | j                  | j                  gd      \  }}}|v|jQ                  dd      }tR        jT                  jW                  || jX                  |j                   d   z
  df      }|j                  | j                     j[                  |       t\        | j"                  dvrJ| j_                  | j                  |jQ                  dd            jQ                  dd      d d d |f         }nyt]        |jQ                  dd      | j                  j                  j                  d      | j                  j                   | j"                        jQ                  dd      d d d |f   }t        j                  || j                  ||gd      \  }}}|Bt        j@                  |dk(        s*|jB                  }||d d d d d f   z  j/                  |      }ta        |j7                  ||d| j,                        |||j7                  ||| j                  d      |j7                  ||| j                  d      f| jL                  | j4                  d d d| j2                  dd|\  }}|*|(|j:                  | j                     j[                  |       |j7                  ||d      }| j=                  ||      }| j?                  |      }|S )Nr<   r   r=   r   .r   T)zrg  dt_softplusdt_limitF)rm  rA  seq_idxrY  rmsnorm_weightrmsnorm_epsoutproj_weightoutproj_biasheaddimngroupsnorm_before_gatereturn_final_statesr   )rC   swish)r   r1   r  rY  )rA  rm  rv  ry  r  rg  rw  )1rD   r]  ro   rm   r^  rj   rf  squeezerb  r/   splitr&   rw   r   rd  r1   r  rY  exprj  r   r   r   r@   rA   rg  rm  rE   r!   rx   rl  rn  allr?   r_  r\  r   r#   rA  r2   r   r   rB   r=  rq   copy_r%   r[  r"   )r4   rI   rt  r   rc   seq_lenr   groups_time_state_sized_to_removein_projected_statesd_mlpsplit_projection_dimrJ   hidden_states_B_CdtBCrs  rg  rm  hidden_states_reshapedoutr?   projected_statesdt_limit_kwargsinput_not_masked	ssm_state	time_stephidden_states_B_C_tr   scan_outputs                                  r8   cuda_kernels_forwardz%Zamba2MambaMixer.cuda_kernels_forwardB  sv    "/!4!4
GQ!%1D1D!D$0001t}}3DtGZGZ3ZZ]a]k]kk #(G(G"&,,}/D/DQ/G"H(..r2[@QFE$)5$2H2H$--Y]YgYg#h 05<OQekm0n-Aq$)2 4!((8""**1-  ! #(++!'')?AWX#M1a
 4::++-..A!T3,1d
+222t}}dFYFYZ]]didqdq]rAAq$J&&r2t}}=Bll1dC<077DMMJGq$|$++B>Az4==!''!*2MNAz4==!''!*2MNA%2%7%7
DNNTXTaTa%b"2''7& M *..z4>>DMM;YZM IImT:M--.q$|<Cz 
u )%))Na<O2P%++!.1d
1K!K O OPU V#||M:4::++-..A$($8$8$@bzSWSgSgFhO)#(99^q-@#A #' $$<;OTd!A$KK&&..q1KK$$LL" ff# ##'99#3#3 $		 : :#'==#7#7!%!3!3 MM MM%*(,#"$ &%"YX 
m 6;[[$++T]]DNNK62'  +*;*E*Ea*K'!#!2!2+d.C.CFYF_F_`bFc.cef-g"J !,,T^^<BB:N#+tFW/W(,$5$?$?1$EFPPQRTUVWXZb[bZbWbc)% )9+55a;#{{1199!<![[--#'??	)
  i1oa'k)3% ',kk%++-CE[\'#q!
 "-eiiRS@S6T)//E%2^Aq$J5O%O$S$STY$ZM)B!&&z7BNFF:wrBFF:wrB*  $ff (, LL $* &*&Y (\-E ++DNN;AA)L)..z7BG"iiT:mmK0
r9   c                    |j                   \  }}}|j                  }|-|j                  r!| j                  |j	                  d            }nI|6t        j                  |dk(        s||d d d d d f   z  j                  |      }| j                  |      }|j                   d   d| j                  z  z
  d| j                  z  | j                  z  z
  | j                  z
  dz  }	|j                  |	|	| j                  | j                  | j                  gd      \  }}}
}}|w|j                  | j                     j!                         }|j                  |j"                        }|j                  r1|
j%                  d      }
|j&                  | j                     }t        j(                  |dd      }|j*                  dk(  r|d d dd d f   n||d d d d df<   |j&                  | j                     j-                  |       t        j.                  |j                  |j"                        | j0                  j2                  d d dd d f   z  d      }| j4                  r|| j0                  j6                  z  }| j9                  |      j                  |      d d d df   }n|j;                  dd      }t<        j>                  jA                  || jB                  |j                   d   z
  df      }|j&                  | j                     j-                  |       | j9                  | j1                  |      j;                  dd            d d d |d d f   }|t        j                  |dk(        s|j                  }||d d d d d f   z  j                  |      }nt        jD                  || j                  | jF                  | j                  f|j"                  |	      }| j9                  | j1                  |j;                  dd            dd |f   j;                  dd            }t        j                  || j                  | j                  | j                  z  | j                  | j                  z  gd      \  }}}t        jH                  | jJ                  jM                                }|t|j                  rg|j*                  dk(  r
|d d d df   n|d d dd d f   d d d df   }|j;                  dd      jO                  ||j                   d   | jF                        }| jP                  d
   jO                  | jP                  j                   d   | jF                        }t
        j<                  j>                  jS                  ||j                  |j                        z         }t        jT                  || jV                        }|d   jO                  | j                  | jF                  | j                        j                  t
        jX                        }t        jH                  |d
   |z        }|j[                  || j                  d      dd d d f   }|jO                  || j                  | j                  | j                  z  |j                   d         j]                         }|j[                  |d|j                   d         }|d
   |dd d d f   z  }|j[                  |d| jF                        }||d
   z  }|j                  | j                     j-                  |j                  | j                     |z  |z          |j[                  || j                  d      dd d d f   }|jO                  || j                  | j                  | j                  z  |j                   d         j]                         }|j[                  |d|j                   d         }|j                  | j                     j                  |j                        }|j_                  || j                  z  | jF                  | j                        }|j_                  || j                  z  | j                  d      }t        j`                  ||      }|j_                  || j                  | jF                        }| jb                  d
   jO                  | jb                  j                   d   | jF                        }|||z  z   j                  |j                        }|j[                  |d      d d d df   }n4t<        j>                  jS                  || jP                  z         }t        jT                  || jV                        }|j[                  ||d| jF                        jM                         }|j[                  ||d| j                        jM                         }|j[                  ||d| j                        jM                         }|je                  | j                  | j                  z  d| j                        }|je                  | j                  | j                  z  d| j                        }| jf                  || jf                  z  z
  | jf                  z  }| jb                  d
   ti        ||      z  }||d
   z  }|j                  |j                        |z  }||||fD cg c]  }tk        ||| jf                         c}\  }}}}|jm                  dddd      }t        jn                  |d      }t        jH                  tq        |            }|d d d d d d d d d d d f   |d d d d d d d d d d d f   z  } | j/                  d      }!|!d
   |jm                  ddddd      d
   z  }"|"j/                  d      }#|#d
   |d d d d d f   z  j/                  d      }$t        jH                  |d d d d d d dd f   |z
        }%||%jm                  dddd      d
   z  }&|&jm                  ddddd      d
   |jm                  ddddd      dd d d f   z  j/                  d      jm                  ddddd      }'|.|j                  r"|j                  | j                     d d d df   }(nt        jr                  |'d d d df         }(t        jt                  |(|'gd      }'t        jH                  tq        t<        j>                  jA                  |d d d d d d df   d                  })|'jm                  ddddd      }*|)d   |*d d d d d df   z  j/                  d      }+|+jm                  ddddd      },|,d d d df   |,d d df   }}'t        jH                  |      }-|dd d d f   |'d d d d d df   z  }.|-jm                  dddd      }/|.j/                  d      |/d
   z  }0|$|0z   }|j[                  |d| j                  | jF                        }||z   }|dkD  r|d d d |d d d d f   }|j[                  ||d      }|*|(|j                  | j                     j-                  |       | jw                  ||
      }1| jy                  |1j                  |            }2|2S c c}w )Nr   r=   r<   r   r   r   r   .rf   rD  ).NNr   )r   output_sizer:  )r   r   )=rD   r?   rj   rf  r  r/   r  r@   rm   r]  ro   r^  r  rb  rx   r   clonerd   r  rw   r   ndimr  sumrd  r1   rX  r  r[  r   r   rB   r=  rq   r{   r   r  rj  r   r   rg  softplusr   r`  rA   r   r   rE   bmmrm  repeat_interleaverA  r?  rB  permuterK  rO  
zeros_liker   rl  rn  )3r4   input_statesrt  r   rc   r  r   r?   r  r  rJ   rI   r  r  r   r  r  rs  rg  dAdBdBxrx   ssm_states_reshaped
C_reshapedyrm  r8  
D_residualtA_cumsumLG_intermediateGM_intermediateMY_diagdecay_statesB_decay_contractionstatesprevious_statesdecay_chunkstates_permutedresult
new_statesstate_decay_outC_times_statesstate_decay_out_permutedY_offr  contextualized_statess3                                                      r8   torch_forwardzZamba2MambaMixer.torch_forward  s~   !-!3!3
GQ""#(G(G $\-A-A!-D E)%))NA<M2N$0>!Q*3M$M#Q#QRW#XL $\ :!''+a$2H2H.HHAPTP]P]L]`d`s`sLssuy  vD  vD  D  IJ  J(8(>(>t55t~~V\^ )? )
%1dM2
 #$//?EEGI!]%9%9:I..~~a()55dnnE
"ZZ
2BG
ANASASWXAX}Q1W'=^k
1a8$((8>>zJ %		*--8H8O8O*PSWS^S^SeSefgijlmfmSn*ntv w%%!T[[%5%55M $ 7 : :5 A!T3, O - 7 7! <]]..!**]-@-@-DDaH
 ((8>>zJ $])C)M)MaPQ)R STUW_X_W_abTb c!-eiiPQ@Q6R)//E%2^Aq$J5O%O$S$STY$ZMT^^T]]D<O<OP$++5I !HHT[[1H1HA1N%OPSU]V]U]P]%^%h%hijlm%noM#kk-$:P:PRVR_R_bfbubuRuw{  xE  xE  HL  H[  H[  x[  :\  bd  eq!YYtzz'')**#(G(G &(WW\AtSL!r!Q'{1dC<7PBa#**:rxx|T]]SBll9-44T\\5G5G5JDMMZG$$--b7::bhh3G.GHBR!3!34B/"))$..$--I\I\]``glgtgt`uA2i=1,-B
 		*dmmR8dAFAT]]DNNdmm4SUVU\U\]_U`allnA		*b!''"+6AI3a<0B *11*b$--PM}Y//C ##DNN399''7"<sB 		*dmmR8dAFAT]]DNNdmm4SUVU\U\]_U`allnA		*b!''"+6A &00@CCAGGLJ",//*t~~2Mt}}^b^q^q"r
T^^ ;T=P=PRSTJ		-z:Az4>>4==AA y!((a$--HA]Q&&**1773A 		*b)!T3,7A ''T\\(9:BR!3!34B)11*gr4==Y__aM		*gD4G4GHNNPA		*gr43F3FGMMOA##DNNdmm$CX\XfXf#gA##DNNdmm$CX\XfXf#gA'DOO*CCtVH	*-?x-XXJ *ByM9M](()B.A cpqrtuwxay%z\]&9!Xt&W%z"M1a 		!Q1%A||A2.H 		+a.)A q!Qa23a1dAq!8K6LLN""r"*A y\AIIaAq!,DY,OON""r"*A 	l]1a:%>>CCAFF !99XaArsl%;h%FGL"#l&:&:1aA&Fy&Q"Q)11!Q1a@K}OdOdefhiklnoqrOstwy}  @A  uA  PB  B  G  G  LM  G  N  V  V  WX  Z[  ]^  `a  cd  eF'L,K,K"."9"9$.."I!TSV,"W"'"2"26!RaR%="AYY8a@F))K0A0A(1aQRTV;BWY_0`$abK$nnQ1a;O!/2_Q4QT_5UUZZ_`ZaF1aA6J *1crc6 2Jq"u4EIF $ii1OT1oq!T30GGN'6'>'>q!Q'J$#''+.Fy.QQE A		*b$..$--HAJA!|a'1a'(		*gr2A$)A''7==iHii4(
 !%knnU.C D$$I &{s   zc                     t         r?d| j                  j                  j                  j                  v r| j                  |||      S | j                  |||      S )Ncuda)ro  rf  r1   rd   r   r  r  )r4   rI   rt  r   s       r8   rQ   zZamba2MambaMixer.forward  sN     "f0C0C0J0J0O0O&O,,]L.YY!!-~NNr9   r+   r'   )rS   rT   rU   r   r    r   rk   r-   r/   r   ra   r  r  rQ   rV   rW   s   @r8   rQ  rQ    s    ?| ? ?H <@15	T||T 78T !.	Tn%AY8Z %qyz  {G  {G  rH %J <@15		O 78	O !.		Or9   rQ  c                   8     e Zd Zddedee   f fdZddZ xZS )	Zamba2MLPrb   r  c           	          t         	|           || _        |j                  | _        |j                  | _        || _        || _        t        j                  | j                  d| j                  z  |j                        | _
        t        j                  | j                  | j                  |j                        | _        t        |j                     | _        t        j                  g       | _        t#        | j
                        D ]  }||j$                  z  |k(  rt        j&                  t        j                  | j                  j                  | j                  j(                  d      t        j                  | j                  j(                  d| j                  z  d            }nt        j*                         }| j                   j-                  |        |j.                  }t1        |      D ci c]  \  }}||
 c}}| _        yc c}}w )aQ  
        This MLP layer contributes to tied transformer blocks aimed to increasing compute without increasing model size. Because this layer
        is tied, un-tied adapter modules (formally same as LoRA, but used in the base model) are added to the up and gate projectors to increase expressivity with a small memory overhead.
        r<   r  FN)r,   r-   rb   r5   rm   r  r  r   r  re  gate_up_proj	down_projr   
hidden_actact_fnr  gate_up_proj_adapter_listry   r!  r"  r#  r$  r~   r  r%  r&  )
r4   rb   r  r  r   gate_up_proj_adapterr  r*  r   r7   s
            r8   r-   zZamba2MLP.__init__  s   
 	!--!'!9!9"4 IId&6&6D<R<R8RY_YoYop4#9#94;K;KRXRhRhiV../)+r):&t../ 	HA6(((H4')}}IIdkk55t{{7O7OV[\IIdkk66D<R<R8RY^_($
 (*{{}$**112FG	H !11;D_;UV<5%%,VVs   3H
c                     | j                  |      }| j                  |   }| | j                  |   |      z   }t        j                  |dd      }| j                  |d         |d   z  }| j                  |      }|S )Nr<   r=   r   r   r   )r  r&  r  r/   chunkr  r  )r4   hidden_stater   gate_up_stateoutputs        r8   rQ   zZamba2MLP.forward  s    )),7NN9-	%(Q(F(Fy(QR^(__M1"={{=#34}Q7GG-r9   r'   r+   )	rS   rT   rU   r    r   rk   r-   rQ   rV   rW   s   @r8   r  r    s%    W| WPXY\P] W<r9   r  c                   4    e Zd Zddedee   dee   f fdZ	 	 	 	 ddej                  dej                  dedeej                     dee	   d	ee
   d
eej                     dee   deej                  eeej                  ej                  f      f   fdZ xZS )Zamba2AttentionDecoderLayerrb   r  r   c                 @   t         |           || _        t        |j                        }t        |d||      | _        t        |||      | _        t        |j                  |j                        | _        t        |j                  |j                        | _        y )Nr=   )r   r  r  )r  r  r6   )r,   r-   r  r   r  r
  	self_attnr  feed_forwardrY   r  rms_norm_epsinput_layernormr5   pre_ff_layernorm)r4   rb   r  r   num_gsr7   s        r8   r-   z$Zamba2AttentionDecoderLayer.__init__  s     V,,-(2RXckl%fRZ[,V-I-IvObObc -f.@.@fFYFY Zr9   rI   original_hidden_statesr   r+  output_attentionsr,  r   r   c           
          t        j                  ||gd      }| j                  |      } | j                  d||||||d|\  }}	| j	                  |      }| j                  ||      }|f}
|r|
|	fz  }
|
S )a  
        Args:
            hidden_states (`torch.FloatTensor`): output of previous Mamba layer of shape `(batch, seq_len, embed_dim)`
            original_hidden_states (`torch.FloatTensor`): word embedding output of shape `(batch, seq_len, embed_dim)`.
                This is concatenated with `hidden_states` (which is the output of the previous (mamba) layer). The
                concatenated tensor is then used as input of the pre-attention RMSNorm
                (see fig. 2 in https://huggingface.co/papers/2405.16712).
            attention_mask (`torch.FloatTensor`, *optional*): attention mask of size
                `(batch, sequence_length)` where padding elements are indicated by 0.
            past_key_value (`Zamba2HybridDynamicCache`, *optional*): cached past key and value projection states
            output_attentions (`bool`, *optional*):
                Whether or not to return the attentions tensors of all attention layers. See `attentions` under
                returned tensors for more detail.
            use_cache (`bool`, *optional*):
                If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
                (see `past_key_values`).
            position_embeddings (`tuple[torch.FloatTensor, torch.FloatTensor]`, *optional*):
                Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
                with `head_dim` being the embedding dimension of each attention head.
        r=   r   )rI   r   r   r+  r  r,  r   )r/   concatenater  r  r  r  )r4   rI   r  r   r   r+  r  r,  r   self_attn_weightsoutputss              r8   rQ   z#Zamba2AttentionDecoderLayer.forward  s    > ))=:P*QWYZ,,];+94>> ,
'))/ 3,
 ,
(( --m<))-C ")++Gr9   r'   )NNFN)rS   rT   rU   r    r   rk   r-   r/   r   ra   rI  r   r   r   r]   r   rQ   rV   rW   s   @r8   r  r    s    [| [x} [X`adXe [ 26=A,1:>3||3 !&3 	3
 !.3 !!9:3 $D>3 &e&6&673 -.3 
u  (51B1BEDUDU1U+V"WW	X3r9   r  c                   t    e Zd Zdedef fdZ	 	 	 	 	 	 	 	 	 ddej                  deej                     dee   deej                     deej                     dee	   d	ee
   d
ee
   deej                     deej                     deej                  eeej                  ej                  f      f   fdZ xZS )Zamba2MambaDecoderLayerrb   r   c                     t         |           t        ||      | _        t	        |j
                  |j                        | _        || _        y )N)rb   r   r  )	r,   r-   rQ  mambarY   r5   r  r  r   )r4   rb   r   r7   s      r8   r-   z Zamba2MambaDecoderLayer.__init__  s>    %VyI
,V-?-?VEXEXY"r9   rI   r  r   r   r+  r  	use_cacher   transformer_hidden_statesr   c                     |}|
||
z   n|}| j                  |      }| j                  |||      }d}||z   }|f}|r||fz  }|r||fz  }|S )a  
        Args:
            hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
            attention_mask (`torch.FloatTensor`, *optional*): attention mask of size
                `(batch, sequence_length)` where padding elements are indicated by 0.
            past_key_value (`Zamba2HybridDynamicCache`, *optional*): cached past key and value projection states
            output_attentions (`bool`, *optional*):
                Whether or not to return the attentions tensors of all attention layers. See `attentions` under
                returned tensors for more detail.
            use_cache (`bool`, *optional*):
                If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
                (see `past_key_values`).
            cache_position (`torch.LongTensor` of shape `(sequence_length)`, *optional*):
                Indices depicting the position of the input sequence tokens in the sequence.
        N)rI   rt  r   )r  r  )r4   rI   r  r   r   r   r+  r  r  r   r  r   residualr  r  s                  r8   rQ   zZamba2MambaDecoderLayer.forward  s    < !
 :S9^M55dq 	 ,,];

'') # 
 ! !=0 ")++G((Gr9   )	NNNNNFFNN)rS   rT   rU   r    rk   r-   r/   r   r   ra   rI  r   r]   r   rQ   rV   rW   s   @r8   r  r    s   #| # # :>#'15.2=A,1$)59<@:||: !) 6: C=	:
 !.: ell+: !!9:: $D>: D>: !!1!12: $,ELL#9: 
u  (51B1BEDUDU1U+V"WW	X:r9   r  c                   l    e Zd Zdedej
                  def fdZ	 	 	 	 	 	 	 	 ddej                  de
ej                     de
e   de
ej                     d	e
ej                     d
e
e   de
e   de
e   de
ej                     deej                   e
eej                   ej                   f      f   fdZ xZS )Zamba2HybridLayershared_transformerlinearr  c                 L    t         |           || _        || _        || _        y r+   )r,   r-   r  mamba_decoderr  )r4   r  r  r  r7   s       r8   r-   zZamba2HybridLayer.__init__V  s'     	""4r9   rI   r  r   r   r   r+  r  r  r,  r   c
           	          | j                  |||||||	      }
|
d   }|r|
d   }| j                  |      }| j                  |||||||	      }
|r|
d   f|
dd z   }
|
S )aX  
        Args:
            hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
            original_hidden_states (`torch.FloatTensor`): word embedding output that will be concatenated with
            hidden activations to form the input of the shared transformer layer.
            layer_idx (`int`): layer number.
            attention_mask (`torch.FloatTensor`, *optional*): attention mask of size
                `(batch, sequence_length)` where padding elements are indicated by 0.
            past_key_value (`Zamba2HybridDynamicCache`, *optional*): cached past key and value projection states
            output_attentions (`bool`, *optional*):
                Whether or not to return the attentions tensors of all attention layers. See `attentions` under
                returned tensors for more detail.
            use_cache (`bool`, *optional*):
                If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
                (see `past_key_values`).
            position_embeddings (`tuple[torch.FloatTensor, torch.FloatTensor]`, *optional*):
                Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
                with `head_dim` being the embedding dimension of each attention head.
        )r  r   r   r+  r  r,  r   r   )r  r   r+  r  r  r,  r<   N)r  r  r  )r4   rI   r  r   r   r   r+  r  r  r,  layer_outputsr  r  s                r8   rQ   zZamba2HybridLayer.forward^  s    @ //#9&)/ 3 0 
 %2!$4! -a 0$(KK0I$J!**&?))/ 3 + 
 *1-/@AMRSRTDUUMr9   )NNNNNFFN)rS   rT   rU   r  r   r  r  r-   r/   r   r   rk   ra   rI  r   r]   r   rQ   rV   rW   s   @r8   r  r  U  s   5"=5GIyy5Yp5 :>#'15.2=A,1$):>>||> !) 6> C=	>
 !.> ell+> !!9:> $D>> D>> &e&6&67> 
u  (51B1BEDUDU1U+V"WW	X>r9   r  c                   N     e Zd ZU eed<   dZdZddgZdZdZ	dZ
dZdZ fdZ xZS )Zamba2PreTrainedModelrb   modelTr  r  r   c                    t         |   |       t        |t              rt	        j
                  t	        j                  | j                  j                        t        j                  | j                  j                        t        j                  | j                  j                        z
  z  t        j                  | j                  j                        z         j                  | j                  j                        }|t	        j                  t	        j                  |              z   }|j                   j"                  j%                  |       t	        j&                  d|j(                  dz         }|j*                  j"                  j%                  t	        j                  |             |j,                  j"                  j/                  d       y y )N)minr   g      ?)r,   _init_weightsr   rQ  r/   r  randrb   rr   mathri  ra  r`  r   time_step_floorexpm1rg  datar  rh  r^  rj  rm  fill_)r4   r   r  inv_dtrs  r7   s        r8   r  z#Zamba2PreTrainedModel._init_weights  s:   f%f./

4;;44588DKK556$++B[B[9\\^((4;;4456 e33e4	  %))U[["%5$566FNN%%f-Q 0 01 45ALL##EIIaL1HHMM$ 0r9   )rS   rT   rU   r    __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_flex_attn_supports_sdpa_is_statefulr  rV   rW   s   @r8   r  r    sG    &*#68QR"3NL% %r9   r  c                   2    e Zd ZdZdef fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     dee   deej                     d	ee   d
ee   dee   dee   deej                     deeef   fd       Zd Zd Z xZS )Zamba2Modelzh
    Model consisting of *config.num_hidden_layers* layers.

    Args:
        config: Zamba2Config
    rb   c                    t         |   |       || _        |j                  | _        |j
                  | _        t        j                  |j
                  |j                  | j                        | _	        t        |j                        D cg c]  }t        ||       }}g }g }|j                  | _        t        |j                        D ]  }|j                  |   dk(  r|j                  t!        ||             2|j                  |   dk(  sE|j                  t        j"                  | j                  j                  | j                  j                  d             |j                  t!        ||              t%        |      }t%        |      }t'        |      }| j)                  |||      }t        j*                  |      | _        |j.                  | _        t1        |j                  |j2                        | _        |j6                  r1|j8                  rt:        j=                  d       t?        |      | _         d| _!        | jE                          y c c}w )	N)r  r  r   rg   Fr  r  ze`use_long_context` set to `True`: using rescaled `rope_theta` and extended `max_position_embeddings`.)#r,   r-   rb   pad_token_idpadding_idx
vocab_sizer   	Embeddingr5   embed_tokensry   r!  r  ri   rz   r~   r  r  iterr   
get_layersr  layersr1  rY   r  final_layernormr0  use_long_contextrp  rq  r   
rotary_embgradient_checkpointing	post_init)	r4   rb   r  blocksmamba_layerslinear_layersr   r  r7   s	           r8   r-   zZamba2Model.__init__  s    !.. ++LL):):F<N<NPTP`P`aKPQWQfQfKgha-fqAhh!'!9!9v//0 	RA''*g5##$;Fa$PQ))!,8$$RYYt{{/F/FH_H_fk%lm##$;Fa$PQ	R L)]+vEmmF+$*$?$?!,V-?-?VEXEXY&&##{ 4F;DO&+# 	7 is   I	input_idsr   r   r   inputs_embedsr  r  output_hidden_statesreturn_dictr   r   c                 r   ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|	|	n| j                   j                  }	|d u |d uz  rt        d      | j                  r%| j                  r|rt        j                  d       d}|| j                  |      }|}t        j                  |      }|rO|M||j                  d   n|j                  d   }t        | j                   || j                  | j                         }|
R||j#                  | j$                        nd}t        j&                  |||j                  d   z   |j                         }
||
j)                  d      }| j+                  |||
      }| j                   j,                  r| j/                  ||      }nd }|rd	nd }|rd	nd }t1        | j2                        D ]r  \  }}|r||fz  }| j                  r1| j                  r%| j5                  |j6                  |||||||||
      }n ||||||||||
	      }|d   }|sd|d   j||d   fz  }t | j9                  |      }|r||fz  }||j:                  sd|_        t=        ||r|nd ||      }|	r|S |j?                         S )NzaYou cannot specify both input_ids and inputs_embeds at the same time, and must specify either onezX`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`.Fr   r?   rd   r  r   rh   r   )r  r   r   r   r+  r  r  r,  T)last_hidden_stater   rI   
attentions) rb   r  r   r  use_return_dict
ValueErrorr  r   rp  rq  r  r/   r  rD   ra   r?   rd   r   first_transformer_layer_idrh  r  _update_causal_maskr0  r  r%  r  _gradient_checkpointing_func__call__r  rj   r   to_tuple)r4   r  r   r   r   r  r  r  r   r!  r   rI   r  rc   past_seen_tokensr   r,  all_hidden_statesall_self_attnsr   layerr  r  s                          r8   rQ   zZamba2Model.forward  s    2C1N-TXT_T_TqTq$8$D $++JjJj 	 "+!6IDKK<Q<Q	%0%<k$++B]B]-t";<s  &&4==Yj I  --i8M%!&]!; 0/8/D+-J]J]^_J`J6t{{JVZV`V`imitituO! #.  ..9X9X.Y 
 #\\ "2]5H5H5K"KTaThThN )33A6L..~}n] ;;##"&//-"N"&"6BD0d )$++ 6 "	:Iu#!m%55!**t}} $ A ANN!*"#%'! !&!+A'#1 +#2&7'(;
! *!,M  #/"}Q'7&99NE"	:H ,,];  -!11&/Q/Q15O.(+/8Od+%	
 %v;&//*;;r9   c                    | j                   j                  dk(  r	|d|v r|S y |j                  |j                  }}t	        j
                  |      j                  }|j                  d   }|d   dz   }t	        j                  ||f|||      }	|dk7  rt	        j                  |	d      }	|	t	        j                  ||      |j                  dd      kD  z  }	|	d d d d d d f   j                  |j                  d   ddd      }	||	j                         }	|j                         d	k(  rd|j                  d   }
|	d
d |
f   j                  d      |d d d d d d f   j                  d      z  }|	d
d |
f   j!                  ||      |	d
d |
f<   | j                   j                  dk(  r0|.|j                  j"                  dv rt%        j&                  |	|      }	|	S )Nflash_attention_2r/  r   r=   )
fill_valuer?   rd   rE  rh   r   r<   .sdpa)r  xpunpu)rb   r1  r?   rd   r/   finfor  rD   fulltriurh  r   r   r  r   eqrJ  r   r   _unmask_unattended)r4   r   r7  r   r?   rd   	min_dtypesequence_lengthtarget_lengthr   mask_lengthpadding_masks               r8   r)  zZamba2Model._update_causal_maskb  s   ;;++/BB)c^.C%%$**L,?,?vKK&**	&,,Q/&r*Q.jj/=!Ai_dmsta**[1=Ku||M&ANDZDZ[]_`Daaa!$a"23::<;M;Ma;PRSUWY[\%%++-K!!#q(,2226*3+<=@@EWXZ^`dfgWgHhHkHkloHpp1<S,;,=N1O1[1[\hjs1tC+-. KK,,6*%%**.DD
 1CCKQZ[Kr9   c           
      x   g }g | _         d| _        t        | j                        D ]  \  }}|dk(  r| j                  dk(  r|| _        t	        |      }| j
                  j                  t        | j
                  j                        z  dkD  r_d| d}t        j                  |dz   dz   dz   d	z   d
z         }	| j                   j                  |	       d}
| j                  D ]q  }|dk(  re|
| j
                  j                  z  |j                  k(  r?t        j                  dt        |
      z   dz         }| j                   j                  |       |
dz  }
s | j
                  j                  rd}
| j                  D ]q  }|dk(  re|
| j
                  j                  z  |j                  k(  r?t        j                  dt        |
      z   dz         }| j                   j                  |       |
dz  }
s |j                  t        |t	        |      t	        |                   |j                  t	        |              |S )Nr   rg   r   z	^layers\.z\.shared_transformer\.z(?:z3self_attn\.(?:q_proj|k_proj|v_proj|o_proj)\.weight|z1feed_forward\.(?:gate_up_proj|down_proj)\.weight|z,(?:input_layernorm|pre_ff_layernorm)\.weightz)$z>^shared_transformer\.feed_forward\.gate_up_proj_adapter_list\.z\.(?:0|1)\.weight$zg^shared_transformer\.self_attn\.(?:linear_q_adapter_list|linear_k_adapter_list|linear_v_adapter_list)\.)_tied_weights_keysr(  r%  ri   nextrb   r!  r   r  recompiler~   r  r   r  r  )r4   r  r  r  r  layer_id
layer_typeblockprefix_patternmain_keys_pattern
adapter_id_layer_typeadapter_patternattn_adapter_patterns                 r8   r  zZamba2Model.get_layers  sH   "$*+'$-d.D.D$E )	2 HjX%22a76>D3V;;--DKK4P4P0QQTUU(1(;Q%RN(*

& !PQ OO J	J
   )% ++223DE!"J'+'='= (&(2zDKKD^D^7^bgbpbp7p.0jj a"%j/!2"7!8/O
 !33::?K"a
( {{??%&
+/+A+A 	,K*h6:HbHb;bfkftft;t79zz%q&)*o%6 '<%<8" 4 !% 7 7 > >?S T&!OJ	, /tM7JDQ]L^_`d<01S)	2T r9   
NNNNNNNNNN)rS   rT   rU   r   r    r-   r   r   r/   r   r   ra   r   rI  r   r]   r   rQ   r)  r  rV   rW   s   @r8   r  r    s$   "| "H  151537>B59$(,0/3&*59v<E,,-v< !.v< u//0	v<
 "":;v<   1 12v< D>v< $D>v< 'tnv< d^v< !!1!12v< 
u--	.v< v<p!F.r9   r  c                       e Zd Zdef fdZd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     dee   d	ee	j                     d
ee	j                     dee   dee   dee   dee   dee	j                     deee	j                  f   deeef   fd       Z	 	 	 	 	 	 ddZ xZS )Zamba2ForCausalLMrb   c                 $   t         |   |       t        |      | _        dg| j                  j                  | _        |j
                  | _        t        j                  |j                  |j
                  d      | _	        | j                          y )Nzlm_head.weightFr  )r,   r-   r  r  rB  r  r   r  r5   lm_headr  r4   rb   r7   s     r8   r-   zZamba2ForCausalLM.__init__  so      (
#3"Tdjj6S6S"T ++yy!3!3V5F5FUS 	r9   c                     || _         y r+   r  )r4   decoders     r8   set_decoderzZamba2ForCausalLM.set_decoder  s	    
r9   c                     | j                   S r+   rV  r^   s    r8   get_decoderzZamba2ForCausalLM.get_decoder  s    zzr9   r  r   r   r   r  labelsr  r  r   r!  r   logits_to_keepr   c                    ||n| j                   j                  }|	|	n| j                   j                  }	|
|
n| j                   j                  }
| j	                  ||||||||	||

      }|d   }t        |t              rt        | d      n|}| j                  |dd|ddf         }d}| | j                  ||| j                  fi |}|
s|f|dd z   }||f|z   S |S t        |||j                  |j                  |j                        S )al  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >>> from transformers import AutoTokenizer, Zamba2ForCausalLM

        >>> model = Zamba2ForCausalLM.from_pretrained("Zyphra/Zamba2-7B-v1")
        >>> tokenizer = AutoTokenizer.from_pretrained("Zyphra/Zamba2-7B-v1")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```N)
r  r   r   r   r  r  r  r   r   r!  r   r   losslogitsr   rI   r%  )rb   r  r   r&  r  r   rk   slicerS  loss_functionr  r   r   rI   r%  )r4   r  r   r   r   r  r[  r  r  r   r!  r   r\  r   r  rI   slice_indicesr`  r_  r  s                       r8   rQ   zZamba2ForCausalLM.forward  sL   P 2C1N-TXT_T_TqTq %9$D $++JjJj 	 &1%<k$++B]B] **)%+'/!5)#  
  
8B>SV8W~ot4]kmA}a,?@A%4%%ffdooPPDY,F'+'7D7V#CVC%#33!//))
 	
r9   c           	      t   |d u }	|	sZ||d   |j                   d   k\  r|d d |j                   d    d f   }nc|j                   d   |j                   d   k7  rD|d d |f   }n:t        | j                  |j                   d   | j                  | j                        }|T|R|j                         j                  d      dz
  }|j                  |dk(  d       |	s|d d |j                   d    d f   }||	rd|i}
nd|j                         i}
|
j                  ||||| j                  j                  |d       |
S )Nr=   r   r   r#  r  r  )r   r   r  r   r\  r   )rD   ra   rb   r?   rd   longrK  masked_fill_r   r   num_logits_to_keep)r4   r  r   r   r  r   r   r  r   empty_past_kvmodel_inputss              r8   prepare_inputs_for_generationz/Zamba2ForCausalLM.prepare_inputs_for_generation  sc    (4/  )!"%);;%a.*>*>q*A)A)C&CD	#~';';A'>>%a&78	6Y__Q/tzz$++O %,*>)..077;a?L%%n&91= +A	0B/B/D,DE $+];L')=)=)?@L ,#2&"0"&++"@"@"0		
 r9   )NNNNNNNNNNNr   )NNNNNT)rS   rT   rU   r    r-   rX  rZ  r   r   r/   r   r   ra   r   rI  r   rk   r]   r   rQ   rj  rV   rW   s   @r8   rQ  rQ    sk   |   151537>B59-1$(,0/3&*5934O
E,,-O
 !.O
 u//0	O

 "":;O
   1 12O
 ))*O
 D>O
 $D>O
 'tnO
 d^O
 !!1!12O
 c5<</0O
 
u,,	-O
 O
h 9r9   rQ  a  
    The Zamba2 Model with a sequence classification head on top (linear layer).

    [`Zamba2ForSequenceClassification`] uses the last token in order to do the classification, as other causal models
    (e.g. GPT-2) do.

    Since it does classification on the last token, it requires to know the position of the last token. If a
    `pad_token_id` is defined in the configuration, it finds the last token that is not a padding token in each row. If
    no `pad_token_id` is defined, it simply takes the last value in each row of the batch. Since it cannot guess the
    padding tokens when `inputs_embeds` are passed instead of `input_ids`, it does the same (take the last value in
    each row of the batch).
    )custom_introc                   @    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     dee	e
eej                     f      deej                     deej                     dee   d	ee   d
ee   dee   de	eef   fd       Z xZS )Zamba2ForSequenceClassificationc                    t         |   |       |j                  | _        t        |      | _        | j                  j
                  | _        t        j                  |j                  | j                  d      | _	        | j                          y )NFr  )r,   r-   
num_labelsr  r  rB  r   r  r5   scorer  rT  s     r8   r-   z(Zamba2ForSequenceClassification.__init__e  se      ++ (
"&**"?"?YYv114??O
 	r9   r  r   r   r   r  r[  r  r  r   r!  r   c                    |
|
n| j                   j                  }
| j                  ||||||||	|
	      }|d   }| j                  |      }||j                  d   }n|j                  d   }| j                   j
                  |dk7  rt        d      | j                   j
                  d}n||| j                   j
                  k7  j                  |j                  t        j                        }t        j                  |j                  d   |j                  t        j                        }||z  j                  d      }n.d}t        j                  | j                  j                    d       |t        j                  ||j                  	      |f   }d}||j                  |j                        }| j                   j"                  | j$                  dk(  rd
| j                   _        nl| j$                  dkD  rL|j&                  t        j(                  k(  s|j&                  t        j*                  k(  rd| j                   _        nd| j                   _        | j                   j"                  d
k(  rIt-               }| j$                  dk(  r& ||j/                         |j/                               }n |||      }n| j                   j"                  dk(  r=t1               } ||j3                  d| j$                        |j3                  d            }n,| j                   j"                  dk(  rt5               } |||      }|
s|f|dd z   }||f|z   S |S t7        |||j8                  |j:                  |j<                        S )a  
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
            `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        N)r   r   r   r  r  r  r   r!  r   r   z=Cannot handle batch sizes > 1 if no padding token is defined.r=   rf   z will not detect padding tokens in `inputs_embeds`. Results may be unexpected if using padding tokens in conjunction with `inputs_embeds.`rh   
regressionsingle_label_classificationmulti_label_classificationr^  )rb   r&  r  rp  rD   r  r'  r@   rd   r/   int32rh  argmaxrp  rq  r7   rS   problem_typero  r?   re  rk   r   r  r
   rE   r	   r   r   rI   r%  )r4   r  r   r   r   r  r[  r  r  r   r!  transformer_outputsrI   r`  rc   last_non_pad_tokennon_pad_masktoken_indicespooled_logitsr_  loss_fctr  s                         r8   rQ   z'Zamba2ForSequenceClassification.forwardo  s   ( &1%<k$++B]B]"jj)%+'/!5# ) 

 ,A.M* "+J&,,Q/J;;##+
a\]];;##+!#"%)A)AAEEfmmUZU`U`aL!LL)<V]]Z_ZeZefM"/,">!F!Fr!J!#>>**+ ,Z Z
 u||Jv}}MOaabYYv}}-F{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#M$9$9$;V^^=MND#M6:D))-JJ+- 2 22t GUWY))-II,.v6#%(;AB(??F)-)9TGf$EvE/ /??-;;*55
 	
r9   rO  )rS   rT   rU   r-   r   r   r/   r   r   r   r   listr   rI  r]   r   rQ   rV   rW   s   @r8   rm  rm  V  s     151537KO59-1$(,0/3&*[
E,,-[
 !.[
 u//0	[

 "%tE4E4E/F(F"GH[
   1 12[
 ))*[
 D>[
 $D>[
 'tn[
 d^[
 
u66	7[
 [
r9   rm  )rQ  rm  r  r  )r/  )Nr   )Ur  rD  	itertoolsr   typingr   r   r   r   r/   r   torch.nnr	   r
   r   activationsr   cache_utilsr   r   
generationr   modeling_attn_mask_utilsr   modeling_flash_attention_utilsr   modeling_outputsr   r   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   utils.import_utilsr   r   configuration_zamba2r    +mamba_ssm.ops.triton.selective_state_updater!   !mamba_ssm.ops.triton.ssd_combinedr"   r#   causal_conv1dr%   r&   
get_loggerrS   rp  Moduler)   rY   ra   r   r   rk   r   r   r   r   r  r
  r?  rB  rO  r  ro  rQ  r  r  r  r  r  r  rQ  rm  __all__r   r9   r8   <module>r     s  ,  	  1 1   A A ! . ) > B q q K F & , T . RmmZjW57WDD-7**			H	%; ;*JBII J(s u s l<BII <D	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % %4(6y)bii y)~VU\\ VS V
(( 46FH\]^ kOryy kO\'		 'T=")) =@Abii AHG		 GT%O %: v' v vt\- \~ g
&; g
g
T kr9   