
    rh`              
          d Z ddlZddlmZmZ ddlZddlZddlmZ ddlm	Z	m
Z
mZmZ ddlmZ ddlmZ dd	lmZmZmZ dd
lmZ ddlmZ ddlmZmZ ddlmZ ddlmZm Z m!Z!m"Z"m#Z# ddl$m%Z%m&Z& ddl'm(Z( ddl)m*Z*m+Z+ ddl,m-Z-  e       rddlm.Z.  e+j^                  e0      Z1 G d dejd                        Z3d Z4d>dZ5 G d dejl                        Z7dejp                  de9dejt                  dejp                  fdZ;d ejp                  d!ejp                  d"e<d#e=dejp                  f
d$Z> G d% d&ejl                        Z? G d' d(e?      Z@ G d) d*ejl                        ZAe?e?e@d+ZB G d, d-e      ZCe* G d. d/e(             ZDe* G d0 d1eD             ZE e*d23       G d4 d5eDe             ZF e*d63       G d7 d8eD             ZGe* G d9 d:eD             ZHe* G d; d<eD             ZIg d=ZJy)?zPyTorch Falcon model.    N)OptionalUnion)nn)BCEWithLogitsLossCrossEntropyLoss	LayerNormMSELoss)
functional   )get_activation)CacheDynamicCacheStaticCache)GenerationMixin)AttentionMaskConverter)!flash_attn_supports_top_left_maskis_flash_attn_available)GradientCheckpointingLayer))BaseModelOutputWithPastAndCrossAttentions!CausalLMOutputWithCrossAttentionsQuestionAnsweringModelOutput SequenceClassifierOutputWithPastTokenClassifierOutput)ROPE_INIT_FUNCTIONSdynamic_rope_update)PreTrainedModel)auto_docstringlogging   )FalconConfig)_flash_attention_forwardc                   D    e Zd Zdej                  dej                  fdZy)FalconLinearinputreturnc                 n    || j                   j                  z  }| j                  |S || j                  z   S N)weightTbias)selfr$   hidden_statess      }/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/falcon/modeling_falcon.pyforwardzFalconLinear.forward;   s3    -99  tyy((    N)__name__
__module____qualname__torchTensorr.    r/   r-   r#   r#   :   s    )U\\ )ell )r/   r#   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..N   dim)shaper3   cat)xx1x2s      r-   rotate_halfr@   C   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r/   c                     |j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezer@   )qkcossinposition_idsunsqueeze_dimq_embedk_embeds           r-   apply_rotary_pos_embrK   K   sY    ( --
&C
--
&C3w;q>C/0G3w;q>C/0GGr/   c                   ^     e Zd Zddef fdZ ej                         ed               Z xZ	S )FalconRotaryEmbeddingconfigc                    t         |           t        |d      rUt        |j                  t
              r;|j                  j                  d|j                  j                  d            | _        nd| _        |j                  | _	        |j                  | _
        || _        t        | j                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                   | _        y )Nrope_scaling	rope_typetypedefaultinv_freqF)
persistent)super__init__hasattr
isinstancerP   dictgetrQ   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrN   r   rope_init_fnattention_scalingregister_bufferrT   original_inv_freq)r+   rN   devicerT   	__class__s       r-   rW   zFalconRotaryEmbedding.__init__h   s    6>*z&:M:Mt/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r/   c                 b   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   r7   r   mpscpuF)device_typeenabledr8   r9   dtype)rT   floatexpandr;   torc   rY   rR   strr3   autocast	transposer<   rE   r`   rF   rk   )
r+   r=   rG   inv_freq_expandedposition_ids_expandedrh   freqsembrE   rF   s
             r-   r.   zFalconRotaryEmbedding.forwardy   sV    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s    BF%%F.r'   )
r0   r1   r2   r    rW   r3   no_gradr   r.   __classcell__rd   s   @r-   rM   rM   g   s3    /| /" U]]_<  <r/   rM   attention_mask	num_headsrk   r%   c                    | j                   \  }}dt        j                  t        j                  |            z  }t	        j
                  ddt        j                  |      dz
   z   z  | j                  t        j                        }t	        j                  dd|z   | j                  t        j                        }t	        j                  ||      }||k7  rt	        j
                  ddt        j                  d|z        dz
   z   z  | j                  t        j                        }	t        |||z
        }
t	        j                  ddd|
z  z   d| j                  t        j                        }t	        j                  |t	        j                  |	|      gd      }| j                  d      dz
  | z  d d d d d f   }|d   j                         |z  }|j                  ||z  d|      j!                  |      S )	Nr8   r   rc   rk   r   r   r9   r7   ).N)r;   mathfloorlog2r3   tensorrc   float32arangeint32powminr<   cumsumbfloat16reshapern   )ry   rz   rk   
batch_size
seq_lengthclosest_power_of_2basepowersslopes
extra_basenum_remaining_headsextra_powersarange_tensoralibis                 r-   build_alibi_tensorr      s   +11J
djj9)=>><<	tyy!34q899:;NDYDYafananD \\!Q!33N<Q<QY^YdYdeFYYtV$FY&\\A499Q);%;<q@AABCNLaLainiviv

 ""4iBT6TU||Aq1/B+B'BAnNcNckpkvkvwFEIIj,$GHaP %+++3a7>I1dTU:VM9&&(=8E==i/J?BB5IIr/   r=   residualprobtrainingc                 @    t        j                  | ||      }||z   }|S )a
  
    Dropout add function

    Args:
        x (`torch.tensor`):
            input tensor
        residual (`torch.tensor`):
            residual tensor
        prob (`float`):
            dropout probability
        training (`bool`):
            training mode
    )pr   )Fdropout)r=   r   r   r   outs        r-   dropout_addr      s$     ))A
1C
S.CJr/   c                       e Zd Zddef fdZdej                  deej                  ej                  ej                  f   fdZdej                  dej                  fdZ		 	 	 	 	 	 	 ddej                  d	e
ej                     d
ej                  de
ej                     de
e   de
ej                     dedede
ej                     de
eej                  ej                  f      fdZ xZS )FalconAttentionrN   c                 \   t         |           || _        |j                  | _        |j                  | _        | j                  | j
                  z  | _        | j                  | _        |j                  | _        |j                  | _	        |j                  | _
        d| _        |j                  dk(  | _        || _        |-t        j!                  d| j"                  j$                   d       | j                  | j
                  z  | j                  k7  r&t'        d| j                   d| j
                   d      dt)        j*                  | j                        z  | _        | j,                  | _        |j0                  r*|j2                  d	z  |j                  z   | j                  z  }n8|j4                  r| j                  d	| j                  z  z   }nd
| j                  z  }t7        | j                  ||j8                        | _        |j0                  | _        |j4                  | _        t7        | j                  | j                  |j8                        | _        t?        j@                  |jB                        | _!        | j0                  s| j4                  s|j2                  | _        y d| _        y )NTsdpazInstantiating z without passing a `layer_idx` is not recommended and will lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` when creating this class.zA`hidden_size` must be divisible by num_heads (got `hidden_size`: z and `num_heads`: z).      ?r8   r   r*   r   )"rV   rW   rN   hidden_sizenum_attention_headsrz   head_dim
split_sizehidden_dropoutr\   
rope_theta	is_causal_attn_implementation	_use_sdpa	layer_idxloggerwarning_oncerd   r0   
ValueErrorr}   sqrtinv_norm_factorbetanew_decoder_architecturenum_kv_headsmulti_queryr#   r*   query_key_valuedenser   Dropoutattention_dropout)r+   rN   r   qkv_out_dimrd   s       r-   rW   zFalconAttention.__init__   s?   !--33((DNN:**$33'-'E'E$ ++44>" !8!8 9 :, , ==4>>)T-=-==STXTdTdSe fNN#2'   #TYYt}}%==((	**!..2V5O5OOSWS`S``K**Q->>Kd...K+D,<,<kPVP[P[\(.(G(G%!--!$"2"2D4D4D6;;W
!#F,D,D!E484Q4QY]YiYiF//pqr/   	fused_qkvr%   c                 l   | j                   r|j                  \  }}}|j                  ||d| j                  | j                  z  dz   | j
                        }|ddddddddf   }|dddddddgf   }|dddddddgf   }t        j                  ||j                        }t        j                  ||j                        }|||fD 	cg c]  }	|	j                  dd       c}	\  }}}|||fS | j                  sV|j                  \  }
}}|j                  |
|| j                  d| j
                        }|ddddf   |ddddf   |ddddf   fS |j                  \  }
}}|j                  |
|| j                  dz   | j
                        }|dddddf   |ddgddf   |ddgddf   fS c c}	w )	a  
        Split the last dimension into (num_heads, head_dim), results share same memory storage as `fused_qkv`

        Args:
            fused_qkv (`torch.tensor`): [batch_size, seq_length, num_heads * 3 * head_dim]

        Returns:
            query: [batch_size, seq_length, num_heads, head_dim] key: [batch_size, seq_length, num_heads, head_dim]
            value: [batch_size, seq_length, num_heads, head_dim]
        r7   r8   Nr   .r   r   )
r   r;   viewrz   r   r   r3   broadcast_toflattenr   )r+   r   batchseq_len_qkvquerykeyvaluer=   r   r   three_times_hidden_sizes                r-   _split_headszFalconAttention._split_heads   s    (( )E7A..T^^tGXGX5X[\5\^b^k^klC1a"%EaAtm$C1a"&E$$S%++6C&&uekk:E;@#u:M NQ1a NE3#u$$!!>Goo;J
$;!z:t~~qRVR_R_`IS!QY'319)=yaQR?SSS>Goo;J
$;!z:t~~PQ?QSWS`S`aIS#2#q[)9S2$\+BIcTVSWYZlD[[[ !Os   F1r=   c                    |j                   \  }}}|| j                  z  }|j                  || j                  || j                        }|j	                  dddd      }|j                  ||| j                  | j                  z        S )z
        Merge heads together over the last dimension

        Args:
            x (`torch.tensor`): [batch_size * num_heads, seq_length, head_dim]

        Returns:
            torch.tensor: [batch_size, seq_length, num_heads * head_dim]
        r   r8   r   r   )r;   rz   r   r   permuter   )r+   r=   batch_size_and_num_headsr   r   r   s         r-   _merge_headszFalconAttention._merge_heads  sy     34''/ *a-?
 FF:t~~z4==I IIaAq! yyZ$--1OPPr/   r,   r   ry   rG   
layer_past	head_mask	use_cacheoutput_attentionscache_positionposition_embeddingsc                 4
   | j                  |      }| j                  r| j                  n| j                  }| j	                  |      \  }}}|j
                  \  }}}}|j                  dd      j                  || j                  || j                        }|j                  dd      j                  |||| j                        }|j                  dd      j                  |||| j                        }||
\  }}t        ||||      \  }}|;d|	i}||j                  d       |j                  ||| j                  |      \  }}|j
                  d   }| j                  rK|j                  j                  dk(  r2|0|j                         }|j                         }|j                         }||d d d d d d d |j
                  d   f   }|5| j                  rM|sK| j                   xr |d u xr |dkD  }t"        j$                  j&                  j)                  ||||d|      }d }na||j                  d	d      z  }|t+        j,                  | j                        z  }t/        j0                  ||z   d	|j2                  
      }||z  }|j5                  || j                  || j                        }|j7                  dddd      }|j                  ||| j                  | j                  z        }| j9                  |      }||fS | j                  r|s|| j                   xr |d u xr |dkD  }t"        j$                  j&                  j)                  ||||| j:                  r| j<                  j>                  nd|      }d }|j                  dd      }|j                  ||| j                  | j                  z        }| j9                  |      }||fS ||j                  d	d      z  }|j5                  || j                  ||      }|j2                  }|t"        j@                  k(  s|t"        jB                  k(  r|jE                  t"        jF                        }||j5                  || j                  dd	      z   }|| jH                  z  }t/        j0                  ||z   d	|j2                  
      }| j=                  |      }|||z  }|j5                  || j                  ||      }||z  jK                  dd      }| jM                  |      }| j9                  |      }||fS )Nr   r8   r   rF   rE   r   cuda        )	attn_mask	dropout_pr   r7   )r:   rk   r   r   )'r   r   rz   r   r   r;   rq   r   r   rK   updater   r   rc   rR   
contiguousr   r3   r   r
   scaled_dot_product_attentionr}   r   r   softmaxrk   r   r   r   r   r   r   float16r   rn   r   r   r   r   )r+   r,   r   ry   rG   r   r   r   r   r   r   r   r   query_layer	key_layervalue_layerr   query_lengthr   rE   rF   cache_kwargs	kv_lengthr   attn_outputattention_scoresattention_probsmatmul_resultinput_dtypeattention_logitsattention_probs_reshapeds                                  r-   r.   zFalconAttention.forward  s    ((7	)-)F)Ft~~DL]L]040A0A)0L-i)4):):&
L!Q!++Aq199*dnnVbdhdqdqr''1-55j,P\^b^k^kl	!++Aq199*lT`bfbobop=*HC%9+yRUWZ%["K!,n=L}##C$<=%/%6%6y+t~~_k%l"I{OOB'	>>k0055?ND^ &002K!,,.I%002K%+Aq!5Jyr7J5J,JKN=~~&7
 !NNZ~/EZ,YZJZ	#hh11NN,!' O  $( #.1D1DR1L#L  DIIdmm$<< #$99-=-NTV^k^q^q#r .<%**:t~~|UYUbUbcK%--aAq9K%--j,Y]YfYfHfgK**[1K 000 ~~&7I<M !NNZ~/EZ,YZJZ	#hh11NN,:>--d4466S' O  #')33Aq9)11*lDNN]a]j]jLjk"jj5B //? !,i.A.A"b.I I $1#5#5j$..R^`i#j  /44%--/;%..3P'7':':5=='I$#3ejjT^^]^`b6c#c  D$8$88 "#)),<~,MSU]j]p]p"q"&"8"8"I(&5	&AO ,;+?+?
DNN\hjs+t(  8+ENNqRST #//<"jj5//r/   r'   NNNFFNN)r0   r1   r2   r    rW   r3   r4   tupler   r   r   
LongTensorr   boolr.   rw   rx   s   @r-   r   r      s:   )r| )rV\ell \uU\\5<<Y^YeYe=e7f \@Qell Qu|| Q< 48&*,0"'59KO0||0 %0 	0
 u//00 UO0 ELL)0 0  0 !!1!120 &eELL%,,,F&GH0r/   r   c                   0    e Zd ZdZ fdZ	 	 	 	 	 	 	 ddej                  deej                     dej                  deej                     dee	   deej                     d	e
d
e
deej                     deeej                  ej                  f      fdZ xZS )FalconFlashAttention2aH  
    Falcon flash attention module. This module inherits from `FalconAttention` as the weights of the module stays
    untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
    flash attention and deal with padding tokens in case the input contains any of them.
    c                 B    t        |   |i | t               | _        y r'   )rV   rW   r   _flash_attn_uses_top_left_mask)r+   argskwargsrd   s      r-   rW   zFalconFlashAttention2.__init__  s#    $)&)
 /P.Q+r/   r,   r   ry   rG   r   r   r   r   r   r   c                    | j                  |      }| j                  r| j                  n| j                  }| j	                  |      \  }}}|j
                  \  }}}}|j                  dd      j                  || j                  || j                        }|j                  dd      j                  |||| j                        }|j                  dd      j                  |||| j                        }||
\  }}t        ||||      \  }}|;d|	i}||j                  d       |j                  ||| j                  |      \  }}|j                  dd      }|j                  dd      }|j                  dd      }|t        d      | j                  r| j                  j                  nd}|j                   }|j"                  j$                  dk7  r|j"                  j$                  nd}|t&        j(                  k(  rt'        j*                         r:t-        t&        d	      rt'        j.                  |      nt'        j0                         }nMt-        | j                  d
      r| j                  j2                  }n | j                   j4                  j                   }t6        j9                  d| d       |j;                  |      }|j;                  |      }|j;                  |      }t=        |||||||| j>                  | j@                  	      }|j                  ||| j                  | j                  z        }| jC                  |      }|sd }||fS )Nr   r8   r   r   z6`alibi` is not supported when `use_flash_attn` is Truer   rf   rg   get_autocast_dtype_pre_quantization_dtypezThe input hidden states seems to be silently casted in float32, this might be related to the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in .)rG   r   r   use_top_left_mask)"r   r   rz   r   r   r;   rq   r   r   rK   r   r   r   r   rN   r   rk   rc   rR   r3   r   is_autocast_enabledrX   r   get_autocast_gpu_dtyper   r(   r   r   rn   r!   r   r   r   )r+   r,   r   ry   rG   r   r   r   r   r   r   r   r   r   r   r   r   r   r   rE   rF   r   attn_dropoutr   rh   target_dtyper   attn_weightss                               r-   r.   zFalconFlashAttention2.forward  s    ((7	)-)F)Ft~~DL]L]040A0A)0L-i)4):):&
L!Q!++Aq199*dnnVbdhdqdqr''1-55j,P\^b^k^kl	!++Aq199*lT`bfbobop=*HC%9+yRUWZ%["K!,n=L}##C$<=%/%6%6y+t~~_k%l"I{ "++Aq1''1-	!++Aq1UVV8<t{{443
 "''1<1C1C1H1HE1Qk((--W\%--'((* u&:; ,,[9557  &?@#{{BB#33::@@ >$ &..6K!\2I%..6K.% nn"AA

 #**:|T^^VZVcVcEcdjj. LL((r/   r   )r0   r1   r2   __doc__rW   r3   r4   r   r   r   r   r   r.   rw   rx   s   @r-   r   r     s    R 48&*,0"'59KO[)||[) %[) 	[)
 u//0[) UO[) ELL)[) [)  [) !!1!12[) &eELL%,,,F&GH[)r/   r   c                   \     e Zd Zdef fdZdej                  dej                  fdZ xZS )	FalconMLPrN   c                 ,   t         |           |j                  }t        ||j                  |j
                        | _        t        |j                        | _	        t        |j                  ||j
                        | _
        |j                  | _        y )Nr   )rV   rW   r   r#   ffn_hidden_sizer*   dense_h_to_4hr   
activationactdense_4h_to_hr   )r+   rN   r   rd   s      r-   rW   zFalconMLP.__init__  su    (()+v7M7MTZT_T_`!&"3"34)&*@*@+TZT_T_`$33r/   r=   r%   c                 h    | j                  | j                  |            }| j                  |      }|S r'   )r  r  r	  )r+   r=   s     r-   r.   zFalconMLP.forward  s0    HHT''*+q!r/   )	r0   r1   r2   r    rW   r3   r4   r.   rw   rx   s   @r-   r  r    s*    4| 4 %,, r/   r  )eagerr   flash_attention_2c                   p    e Zd Zddef fdZ	 	 	 	 	 	 	 ddej                  deej                     dej                  deej                     dee	e
eej                  ej                  f   f      deej                     d	ed
edeej                     deeej                  ej                  f      fdZ xZS )FalconDecoderLayerrN   c                 x   t         |           |j                  }|j                  | _        t        |j                     ||      | _        t        |      | _	        |j                  | _
        || _        |j                  |j                  rd|_        |j                  s9t        ||j                         | _        t        ||j                         | _        y |j                  dk(  r9t        ||j                         | _        t        ||j                         | _        y t        ||j                         | _        y )Nr8   eps)rV   rW   r   r   rz   FALCON_ATTENTION_CLASSESr   self_attentionr  mlpr   rN   num_ln_in_parallel_attnr   parallel_attnr   layer_norm_epsilonpost_attention_layernorminput_layernormln_attnln_mlp)r+   rN   r   r   rd   s       r-   rW   zFalconDecoderLayer.__init__%  s    ((336v7R7RSTZ\efV$$33))1f6U6U-.F*##,5kvG`G`,aD)#,[f>W>W#XD --2(&:S:ST'9R9RS'0&B[B['\$r/   r,   r   ry   rG   r   r   r   r   r   r   c                 V   |}| j                   j                  r<| j                   j                  dk(  r#| j                  |      }| j	                  |      }n| j                  |      }| j                  |||||||||	|

      \  }}| j                   j                  sW| j                   j                  r|}n>t        ||| j                   j                  | j                        }| j                  |      }| j                   j                  r1| j                   j                  r| j                   j                  dk(  r|}| j                        }| j                   j                  s| j                   j                  r||z  }t        ||| j                   j                  | j                        }||fS )Nr8   )	r   ry   rG   r   r   r   r   r   r   )r   r   )rN   r   r  r  r  r  r  r  r   r   r   r  r  r   )r+   r,   r   ry   rG   r   r   r   r   r   r   r   r   attention_layernorm_outmlp_layernorm_outattention_outputr   
mlp_outputoutputs                      r-   r.   zFalconDecoderLayer.forward>  s    !;;//DKK4W4W[\4\&*ll=&A# $M :&*&:&:=&I# *.)<)<#!)%/) 3 *= *
&, {{33{{(($;!&$h0M0MX\XeXe %)$A$A($K! KK00))33q8 7 XX/0
;;//4;;3L3L**JZ4;;3M3MX\XeXef|##r/   r'   r   )r0   r1   r2   r    rW   r3   r4   r   r   r   r   r   r   r.   rw   rx   s   @r-   r  r  $  s    ]| ]< 48PT,0"'59KO<$||<$ %<$ 	<$
 u//0<$ U5%ell0J*K#KLM<$ ELL)<$ <$  <$ !!1!12<$ &eELL%,,,F&GH<$r/   r  c                   |     e Zd ZU eed<   dZdZdgZdZdZ	dZ
 fdZdej                  fdZed
defd	       Z xZS )FalconPreTrainedModelrN   transformerTr  c                 $    t        |   |i | y r'   )rV   rW   )r+   inputsr   rd   s      r-   rW   zFalconPreTrainedModel.__init__  s    &+F+r/   modulec                    t        |t        j                  t        f      rm|j                  j
                  j                  d| j                  j                         |j                  %|j                  j
                  j                          yyt        |t        j                        rz|j                  j
                  j                  d| j                  j                         |j                  2|j                  j
                  |j                     j                          yyt        |t              rJ|j                  j
                  j                          |j                  j
                  j                  d       yy)zInitialize the weights.r   )meanstdNr   )rY   r   Linearr#   r(   datanormal_rN   initializer_ranger*   zero_	Embeddingpadding_idxr   fill_)r+   r'  s     r-   _init_weightsz#FalconPreTrainedModel._init_weights  s   fryy,78 MM&&CT[[5R5R&S{{&  &&( '-MM&&CT[[5R5R&S!!-""6#5#56<<> .	*KK""$MM$$S) +r/   hard_check_onlyc                 :    t        | dd      }|r|S |sd|_        |S )Nuse_bettertransformerFr   )getattrr   )clsrN   r4  _is_bettertransformers       r-   _check_and_enable_sdpaz,FalconPreTrainedModel._check_and_enable_sdpa  s*     '-De L M*0F'r/   )F)r0   r1   r2   r    __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_supports_flash_attn_supports_sdpa_can_compile_fullgraphrW   r   Moduler3  classmethodr   r:  rw   rx   s   @r-   r#  r#  }  s_    %&*#-.N!,*BII *" T  r/   r#  c                       e Zd Zdef fdZd Zdej                  fdZe		 	 	 	 	 	 	 	 	 	 	 dde
ej                     de
eeeeej                  ej                  f   df   f      d	e
ej                     d
e
ej                     de
ej                     de
ej                     de
e   de
e   de
e   de
e   de
ej                     deeej                  df   ef   fd       Zd	ej                  dej                  dej                  dededej                  dej                  fdZed	ej                  dededej*                  dej                  defd       Z xZS )FalconModelrN   c           	      j   t         |   |       |j                  | _        |j                  | _        |j                  | _        t        j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        |j"                  dk(  | _        |j"                  dk(  | _        t)        | j                  |j*                        | _        t/        |      | _        d| _        | j5                          y c c}w )N)r   r  r   r  )rN   F)rV   rW   r   	embed_dimr   rz   r   	use_alibir   r0  
vocab_sizeword_embeddings
ModuleListrangenum_hidden_layersr  hr   _use_flash_attention_2r   r   r  ln_frM   
rotary_embgradient_checkpointing	post_init)r+   rN   ird   s      r-   rW   zFalconModel.__init__  s     ++33  "||F,=,=t~~N QVW]WoWoQpqA 26Q Gqr&,&A&AEX&X#44> dnn&2K2KL	/v>&+# 	  rs   D0c                     | j                   S r'   rJ  )r+   s    r-   get_input_embeddingsz FalconModel.get_input_embeddings  s    ###r/   new_embeddingsc                     || _         y r'   rV  r+   rX  s     r-   set_input_embeddingsz FalconModel.set_input_embeddings  s
    -r/   	input_idspast_key_values.ry   rG   r   inputs_embedsr   r   output_hidden_statesreturn_dictr   r%   c                 H   ||n| j                   j                  }|	|	n| j                   j                  }	||n| j                   j                  }|
|
n| j                   j                  }
|du |duz  rt        d      | j                  r%| j                  r|rt        j                  d       d}|| j                  |      }t        |t        d      t        f      st        d      |r|
t               }d}||j                         nd}|j                   \  }}}| j"                  r[|5t%        j&                  |||z   f|j(                  t$        j*                        n|}t-        || j.                  |j0                        }|%t%        j2                  |||z   |j(                  	      }||j5                  d      }| j7                  |||||||      }| j9                  || j                   j:                        }|}| j=                  ||      }|rd
nd}|	rd
nd}t?        | j@                        D ]3  \  }}|	r||fz   } |||||||   |||||
      }|d   }|s+||d   fz   }5 | jC                  |      }|	r||fz   }|
stE        d ||||fD              S tG        ||||      S )  
        input_ids (`torch.LongTensor` of shape `(batch_size, input_ids_length)`):
            `input_ids_length` = `sequence_length` if `past_key_values` is `None` else `past_key_values.get_seq_length()`
            (`sequence_length` of input past key value states). Indices of input sequence tokens in the vocabulary.

            If `past_key_values` is used, only `input_ids` that do not have their past calculated should be passed as
            `input_ids`.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are input IDs?](../glossary#input-ids)
        Nz:You must specify exactly one of input_ids or inputs_embedszZ`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...FzBThe `past_key_values` should be either a `Cache` object or `None`.r   r|   rj   rc   r5   )	r   ry   rG   r   r   r   r   r   r   r   c              3   &   K   | ]	  }||  y wr'   r5   ).0vs     r-   	<genexpr>z&FalconModel.forward.<locals>.<genexpr>B  s      ghgts   )last_hidden_stater]  r,   
attentions)$rN   r   r_  r   use_return_dictr   rR  r   r   r   rJ  rY   rR   r   r   get_seq_lengthr;   rH  r3   onesrc   longr   rz   rk   r   rB   _update_causal_maskget_head_maskrM  rQ  	enumeraterN  rP  r   r   )r+   r\  r]  ry   rG   r   r^  r   r   r_  r`  r   r   past_key_values_lengthr   r   r   maskcausal_maskr,   r   all_self_attentionsall_hidden_statesrT  blockoutputss                             r-   r.   zFalconModel.forward  s   8 2C1N-TXT_T_TqTq$8$D $++JjJj 	 "+!6IDKK<Q<Q	%0%<k$++B]B]-t";<YZZ&&4==##p "	  00;M /DJ+>?abb0*nO ETE`!?!?!Afg$1$7$7!
J>>
 ") 

.D!DEmNbNbjojtjt $  'tT^^=CVCVWE!"\\&(>(KTaThThN )33A6L..M>?L]_hjo
 &&y$++2O2OP	% #oom\J$5b4"6BD!$&&) 	JHAu#$58H$H!**)#A,#"3-$7G $AJM &9WQZM&I#'	J, 		-0 1]4D D )?<MObc   9+++*	
 	
r/   input_tensorr   c           
         | j                   j                  dk(  r	|d|v r|S y ||j                         nd}t        |t              }	| j                   j                  dk(  r,|	s*|s(|&|$t        j                  |||| j                        ry |j                  |j                  }}
t        j                  |
      j                  }|j                  \  }}}|	r|j                         }n.t        |t        j                        r|j                  d   n||z   }| j!                  ||||
|||j                  d         }|t|r |j"                  |dg|j                  dd   }t        j$                  |t'        j(                  | j                   j*                  | j,                  z        z  |dk  |      }| j                   j                  dk(  r2|0|j                  j.                  d	v r|st        j0                  ||      }|S )
Nr  r   r   r   )r^  rq  is_trainingr7   )sequence_lengthtarget_lengthrk   rc   r   r   r   )r   xpunpu)rN   r   rk  rY   r   r   _ignore_causal_mask_sdpar   rk   rc   r3   finfor   r;   get_max_cache_shaper4   5_prepare_4d_causal_attention_mask_with_cache_positionr   masked_fillr}   r   r   rz   rR   _unmask_unattended)r+   ry   rx  r   r]  r   r   r   past_seen_tokensusing_static_cacherk   rc   	min_dtyper   r{  r   r|  rs  s                     r-   rn  zFalconModel._update_causal_maskM  s    ;;++/BB)c^.C%%
 @O?Z?99;`a'E KK,,6&%!%>>*'7 MM	 $**L,?,?vKK&**	)5););&
OQ+??AM nell; $$R(%7  PP+')#))!, Q 
 !2!EMM*bC5;;qr?CE++		$++"9"9T^^"KLLb K KK,,6*%%**.DD%
 1CCKQZ[Kr/   r{  r|  rk   r   c                    | | j                         dk(  r| }|S t        j                  |      j                  }t        j                  ||f|||j
                        }|dk7  rt        j                  |d      }|t        j                  ||j
                        |j                  dd      kD  z  }|ddddddf   j                  |ddd      }| |j                         }| j                  d   }	|ddddddd|	f   | ddddddf   j                  |j
                        z   }
|
dk(  }
|ddddddd|	f   j                  |
|      |ddddddd|	f<   |S )	aM  
        Creates a causal 4D mask of shape `(batch_size, 1, query_length, key_value_length)` from a 2D mask of shape
        `(batch_size, key_value_length)`, or if the input `attention_mask` is already 4D, do nothing.

        Args:
            attention_mask (`torch.Tensor`):
                A 2D attention mask of shape `(batch_size, key_value_length)` or a 4D attention mask of shape
                `(batch_size, 1, query_length, key_value_length)`.
            sequence_length (`int`):
                The sequence length being processed.
            target_length (`int`):
                The target length: when generating with static cache, the mask should be as long as the static cache,
                to account for the 0 padding, the part of the cache that is not filled yet.
            dtype (`torch.dtype`):
                The dtype to use for the 4D attention mask.
            cache_position (`torch.Tensor`):
                Indices depicting the position of the input sequence tokens in the sequence.
            batch_size (`torch.Tensor`):
                Batch size.
        N   )
fill_valuerk   rc   r   )diagonalrc  r7   r   )r:   r3   r  r   fullrc   triur   r   rm   cloner;   rn   r  )ry   r{  r|  rk   r   r   r   rs  r  mask_lengthpadding_masks              r-   r  zAFalconModel._prepare_4d_causal_attention_mask_with_cache_position  s   > %.*<*<*>!*C(K* ' E*..I** -0Ye\j\q\qK !##jjqA5<<n>S>STWeWmWmnprsWtttK%dD!Q&67>>z1bRTUK))//1,2226*1aL[L+@ANSTVZ\`bcScDdDgDg&&E    ,q05@Aq,;,AV5W5c5c )6Aq!\k\12 r/   )NNNNNNNNNNN)r0   r1   r2   r    rW   rW  r3   r4   r[  r   r   r   r   r   r   r   r   r.   rn  staticmethodintrk   r  rw   rx   s   @r-   rE  rE    s3   | 2$.5<< .  15ae15370448$(,0/3&*59B
E,,-B
 "%uU5<<;U5VX[5[/\(\"]^B
 !.	B

 u//0B
 E,,-B
   0 01B
 D>B
 $D>B
 'tnB
 d^B
 !!1!12B
 
uU\\3&')RR	SB
 B
HUU llU 	U
 U  U <<U ||Un 444 4 {{	4
 4 4 4r/   rE  z
    The Falcon Model transformer with a language modeling head on top (linear layer with weights tied to the input embeddings).
    )custom_introc                        e Zd ZdgZdef fdZdej                  fdZe		 	 	 	 	 	 	 	 	 	 	 	 	 dde
ej                     de
eeeeej                  ej                  f   df   f      d	e
ej                     d
e
ej                     de
ej                     de
ej                     de
ej                     de
e   de
e   de
e   de
e   de
ej                     deeej                  f   deeej                     ef   fd       Z xZS )FalconForCausalLMzlm_head.weightrN   c                     t         |   |       t        |      | _        t	        j
                  |j                  |j                  d      | _        | j                          y NFr   )
rV   rW   rE  r$  r   r+  r   rI  lm_headrS  r+   rN   rd   s     r-   rW   zFalconForCausalLM.__init__  sI     &v.yy!3!3V5F5FUS 	r/   rX  c                     || _         y r'   )r  rZ  s     r-   set_output_embeddingsz'FalconForCausalLM.set_output_embeddings  s	    %r/   r\  r]  .ry   rG   r   r^  labelsr   r   r_  r`  r   logits_to_keepr%   c                    ||n| j                   j                  }| j                  ||||||||	|
||      }|d   }t        |t              rt        | d      n|}| j                  |dd|ddf         }d}|* | j                  ||fd| j                   j                  i|}|s|f|dd z   }||f|z   S |S t        |||j                  |j                  |j                        S )a\  
        input_ids (`torch.LongTensor` of shape `(batch_size, input_ids_length)`):
            `input_ids_length` = `sequence_length` if `past_key_values` is `None` else `past_key_values.get_seq_length()`
            (`sequence_length` of input past key value states). Indices of input sequence tokens in the vocabulary.

            If `past_key_values` is used, only `input_ids` that do not have their past calculated should be passed as
            `input_ids`.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are input IDs?](../glossary#input-ids)
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
            `labels = input_ids` Indices are selected in `[-100, 0, ..., config.vocab_size]` All labels set to `-100`
            are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size]`
        N)
r]  ry   rG   r   r^  r   r   r_  r`  r   r   rI  r   losslogitsr]  r,   ri  )rN   rj  r$  rY   r  slicer  loss_functionrI  r   r]  r,   ri  )r+   r\  r]  ry   rG   r   r^  r  r   r   r_  r`  r   r  r   transformer_outputsr,   slice_indices	lm_logitsr  r!  s                        r-   r.   zFalconForCausalLM.forward  s2   H &1%<k$++B]B]"..+)%'/!5#) / 
 ,A.8B>SV8W~ot4]kLLq-/B!CD	%4%%  ;;11 	D \$7$;;F)-)9TGf$EvE0/??-;;*55
 	
r/   )NNNNNNNNNNNNr   )r0   r1   r2   _tied_weights_keysr    rW   r3   r4   r  r   r   r   r   r   r   r   r  r   r.   rw   rx   s   @r-   r  r    s    ++| &ELL &  15ae1537,004)-$(,0/3&*5934J
E,,-J
 "%uU5<<;U5VX[5[/\(\"]^J
 !.	J

 u//0J
 ELL)J
  -J
 &J
 D>J
 $D>J
 'tnJ
 d^J
 !!1!12J
 c5<</0J
  
uU\\"$EE	F!J
 J
r/   r  a  
    The Falcon Model transformer with a sequence classification head on top (linear layer).

    [`FalconForSequenceClassification`] uses the last token in order to do the classification, as other causal models
    (e.g. GPT-1) do.

    Since it does classification on the last token, it requires to know the position of the last token. If a
    `pad_token_id` is defined in the configuration, it finds the last token that is not a padding token in each row. If
    no `pad_token_id` is defined, it simply takes the last value in each row of the batch. Since it cannot guess the
    padding tokens when `inputs_embeds` are passed instead of `input_ids`, it does the same (take the last value in
    each row of the batch).
    c                   x    e Zd Zdef fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     dee	e	ej                  ej                  f   df      deej                     deej                     deej                     d	eej                     d
ee   dee   dee   dee   dee	ej                     ef   fd       Z xZS )FalconForSequenceClassificationrN   c                     t         |   |       |j                  | _        t        |      | _        t        j                  |j                  |j                  d      | _        | j                          y r  )
rV   rW   
num_labelsrE  r$  r   r+  r   scorerS  r  s     r-   rW   z(FalconForSequenceClassification.__init__M  sV      ++&v.YYv1163D3D5Q
 	r/   r\  r]  .ry   r   r^  r  r   r   r_  r`  r%   c                 t   |
|
n| j                   j                  }
| j                  ||||||||	|
	      }|d   }| j                  |      }||j                  d   }n|j                  d   }| j                   j
                  |dk7  rt        d      | j                   j
                  d}n||| j                   j
                  k7  j                  |j                  t        j                        }t        j                  |j                  d   |j                  t        j                        }||z  j                  d      }n.d}t        j                  | j                  j                    d       |t        j                  ||j                  	      |f   }d}|^| j                   j"                  | j$                  dk(  rd
| j                   _        nl| j$                  dkD  rL|j&                  t        j(                  k(  s|j&                  t        j*                  k(  rd| j                   _        nd| j                   _        | j                   j"                  d
k(  rIt-               }| j$                  dk(  r& ||j/                         |j/                               }nc |||      }nY| j                   j"                  dk(  rt1               } |||      }n,| j                   j"                  dk(  rt3               } |||      }|
s|f|dd z   }||f|z   S |S t5        |||j6                  |j8                  |j:                        S )6  
        input_ids (`torch.LongTensor` of shape `(batch_size, input_ids_length)`):
            `input_ids_length` = `sequence_length` if `past_key_values` is `None` else `past_key_values.get_seq_length()`
            (`sequence_length` of input past key value states). Indices of input sequence tokens in the vocabulary.

            If `past_key_values` is used, only `input_ids` that do not have their past calculated should be passed as
            `input_ids`.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are input IDs?](../glossary#input-ids)
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
            `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        Nr]  ry   r   r^  r   r   r_  r`  r   r   z=Cannot handle batch sizes > 1 if no padding token is defined.r7   r|   z will not detect padding tokens in `inputs_embeds`. Results may be unexpected if using padding tokens in conjunction with `inputs_embeds.`rc  
regressionsingle_label_classificationmulti_label_classificationr  )rN   rj  r$  r  r;   pad_token_idr   rn   rc   r3   r   r   argmaxr   r   rd   r0   problem_typer  rk   rm  r  r	   squeezer   r   r   r]  r,   ri  )r+   r\  r]  ry   r   r^  r  r   r   r_  r`  r  r,   r  r   last_non_pad_tokennon_pad_masktoken_indicespooled_logitsr  loss_fctr!  s                         r-   r.   z'FalconForSequenceClassification.forwardV  s   @ &1%<k$++B]B]"..+)'/!5# / 

 ,A.M* "+J&,,Q/J;;##+
a\]];;##+!#"%)A)AAEEfmmUZU`U`aL!LL)<V]]Z_ZeZefM"/,">!F!Fr!J!#>>**+ ,Z Z
 u||Jv}}MOaab{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#M$9$9$;V^^=MND#M6:D))-JJ+-v6))-II,.v6#%(;AB(??F)-)9TGf$EvE/ /??-;;*55
 	
r/   
NNNNNNNNNN)r0   r1   r2   r    rW   r   r   r3   r   r   r4   r   r   r   r.   rw   rx   s   @r-   r  r  >  s0   |   15SW15,004)-$(,0/3&*g
E,,-g
 "%ellELL.H(I3(N"OPg
 !.	g

 ELL)g
  -g
 &g
 D>g
 $D>g
 'tng
 d^g
 
uU\\"$DD	Eg
 g
r/   r  c                   x    e Zd Zdef fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     dee	e	ej                  ej                  f   df      deej                     deej                     deej                     d	eej                     d
ee   dee   dee   dee   dee	ej                     ef   fd       Z xZS )FalconForTokenClassificationrN   c                    t         |   |       |j                  | _        t        |      | _        t        |dd       |j                  }nt        |dd       |j                  }nd}t        j                  |      | _
        t        j                  |j                  |j                        | _        | j                          y )Nclassifier_dropoutr   g?)rV   rW   r  rE  r$  r7  r  r   r   r   r   r+  r   
classifierrS  )r+   rN   r  rd   s      r-   rW   z%FalconForTokenClassification.__init__  s      ++&v.6/6B!'!:!:V-t4@!'!6!6!$zz"45))F$6$68I8IJ 	r/   r\  r]  .ry   r   r^  r  r   r   r_  r`  r%   c                    |
|
n| j                   j                  }
| j                  ||||||||	|
	      }|d   }| j                  |      }| j	                  |      }d}|Q|j
                  \  }}t               } ||j                  ||z  | j                        |j                  ||z              }|
s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )r  Nr  r   r8   )r  r  r,   ri  )rN   rj  r$  r   r  r;   r   r   r  r   r,   ri  )r+   r\  r]  ry   r   r^  r  r   r   r_  r`  r  r,   r  r  r   r   r  r!  s                      r-   r.   z$FalconForTokenClassification.forward  s   @ &1%<k$++B]B]"..+)'/!5# / 

 ,A.]3/%+\\"J
')HJ3T__Ev{{S]`jSjGkD Y!4QR!88F)-)9TGf$EvE$-;;*55	
 	
r/   r  )r0   r1   r2   r    rW   r   r   r3   r   r   r4   r   r   r   r.   rw   rx   s   @r-   r  r    s0   | "  15SW15,004)-$(,0/3&*B
E,,-B
 "%ellELL.H(I3(N"OPB
 !.	B

 ELL)B
  -B
 &B
 D>B
 $D>B
 'tnB
 d^B
 
uU\\"$99	:B
 B
r/   r  c                   $    e Zd Z fdZe	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     dee	   d	ee	   d
ee	   de
eef   fd       Z xZS )FalconForQuestionAnsweringc                     t         |   |       t        |      | _        t	        j
                  |j                  d      | _        | j                          y )Nr8   )	rV   rW   rE  r$  r   r+  r   
qa_outputsrS  r  s     r-   rW   z#FalconForQuestionAnswering.__init__  sA     &v.))F$6$6: 	r/   r\  ry   r   r^  start_positionsend_positionsr   r_  r`  r%   c
           	      $   |	|	n| j                   j                  }	| j                  |||||||	      }
|
d   }| j                  |      }|j	                  dd      \  }}|j                  d      j                         }|j                  d      j                         }d}||t        |j                               dkD  r|j                  d      }t        |j                               dkD  r|j                  d      }|j                  d      }|j                  d|      }|j                  d|      }t        |      } |||      } |||      }||z   dz  }|	s||f|
dd z   }||f|z   S |S t        ||||
j                  |
j                  	      S )
rb  N)ry   r   r^  r   r_  r`  r   r   r7   r9   )ignore_indexr8   )r  start_logits
end_logitsr,   ri  )rN   rj  r$  r  splitr  r   lensizeclampr   r   r,   ri  )r+   r\  ry   r   r^  r  r  r   r_  r`  rw  sequence_outputr  r  r  
total_lossignored_indexr  
start_lossend_lossr!  s                        r-   r.   z"FalconForQuestionAnswering.forward$  s   4 &1%<k$++B]B]"")'/!5# # 
 "!*1#)<<r<#: j#++B/::<''+668

&=+D?'')*Q."1"9"9""==%%'(1, - 5 5b 9(--a0M-33A}EO)//=AM']CH!,@J
M:H$x/14J"J/'!"+=F/9/EZMF*Q6Q+%!!//))
 	
r/   )	NNNNNNNNN)r0   r1   r2   rW   r   r   r3   r   FloatTensorr   r   r   r   r.   rw   rx   s   @r-   r  r    s      156:15596:48,0/3&*G
E,,-G
 !!2!23G
 E--.	G

   1 12G
 "%"2"23G
   0 01G
 $D>G
 'tnG
 d^G
 
u22	3G
 G
r/   r  )r  rE  r#  r  r  r  )Nr   )Kr  r}   typingr   r   r3   torch.utils.checkpointr   torch.nnr   r   r   r	   r
   r   activationsr   cache_utilsr   r   r   
generationr   modeling_attn_mask_utilsr   modeling_flash_attention_utilsr   r   modeling_layersr   modeling_outputsr   r   r   r   r   modeling_rope_utilsr   r   modeling_utilsr   utilsr   r   configuration_falconr    r!   
get_loggerr0   r   r+  r#   r@   rK   rB  rM   r4   r  rk   r   rl   r   r   r   r   r  r  r  r#  rE  r  r  r  r  __all__r5   r/   r-   <module>r     s<     "    L L $ ) ; ; ) i 9  L - / J			H	%
)299 )(8<BII <DJu|| J JEKK J\a\h\h J:5<< 5<< u PT Y^YeYe &d0bii d0Nj)O j)Z		 " . V$3 V$r &O & &R r' r rj	 
Y
- Y

Y
x r
&; r
r
j U
#8 U
 U
p Q
!6 Q
 Q
hr/   