
    rhR                     <   d dl mZmZmZ d dlZd dlmZ ddlmZ ddl	m
Z
mZ ddlmZ ddlmZ ddlmZ dd	lmZmZmZ dd
lmZmZ ddlmZmZ ddlmZmZ ddlm Z  ddl!m"Z"m#Z#m$Z$ ddl%m&Z& ddl'm(Z(  G d dejR                        Z*dejV                  de,dejV                  fdZ-	 d7dejR                  dejV                  dejV                  dejV                  deejV                     de.de.de e"   fd Z/d! Z0d8d"Z1 G d# d$ejR                        Z2 ed%       G d& d'ejR                               Z3 G d( d)ejR                        Z4 G d* d+e      Z5e# G d, d-e             Z6e# G d. d/e6             Z7e# G d0 d1e6e             Z8 G d2 d3ee6      Z9 G d4 d5ee6      Z:g d6Z;y)9    )CallableOptionalUnionN   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask) GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)check_model_inputs   )	GlmConfigc                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )GlmMLPc                 *   t         |           || _        t        j                  |j
                  d|j                  z  d      | _        t        j                  |j                  |j
                  d      | _        t        |j                     | _        y )N   Fbias)super__init__confignnLinearhidden_sizeintermediate_sizegate_up_proj	down_projr   
hidden_actactivation_fnselfr%   	__class__s     w/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/glm/modeling_glm.pyr$   zGlmMLP.__init__/   sp    IIf&8&8!f>V>V:V]bc6#;#;V=O=OV[\#F$5$56    hidden_statesreturnc                     | j                  |      }|j                  dd      \  }}|| j                  |      z  }| j                  |      S )Nr    dim)r*   chunkr-   r+   )r/   r3   	up_statesgates       r1   forwardzGlmMLP.forward7   sL    %%m4	#//!/4i 2 24 88	~~i((r2   )__name__
__module____qualname__r$   torchFloatTensorr<   __classcell__r0   s   @r1   r   r   .   s'    7)U%6%6 )5;L;L )r2   r   r3   n_repr4   c                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)shapeexpandreshape)r3   rD   batchnum_key_value_headsslenhead_dims         r1   	repeat_kvrM   @   so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr2   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr    r   r6   )r8   dtype)ptrainingr   )rM   num_key_value_groupsr@   matmul	transposerF   r&   
functionalsoftmaxfloat32torX   rT   rZ   
contiguous)rN   rO   rP   rQ   rR   rS   rT   rU   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r1   eager_attention_forwardrh   L   s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r2   c                 |    | ddddf   }| ddddf   }t        j                  | |fd      j                  d      S )	z*Rotates half the hidden dims of the input..r   Nr    r   r6   r7   rW   )r@   stackflatten)xx1x2s      r1   rotate_halfro   f   sJ    	
319B	
319B;;Ryb)11"55r2   c                    |j                  |      }|j                  |      }|dd|j                  d   dz  f   j                  dd      }|dd|j                  d   dz  f   j                  dd      }|j                  d   }| dd|f   | d|df   }}|dd|f   |d|df   }
}	||z  t        |      |z  z   }|	|z  t        |	      |z  z   }t	        j
                  ||gd      }t	        j
                  ||
gd      }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    .Nr6   r    r7   )	unsqueezerF   repeat_interleavero   r@   cat)qkcossinposition_idsunsqueeze_dim
rotary_dimq_rotq_passk_rotk_passq_embedk_embeds                r1   apply_rotary_pos_embr   m   sD   ( --
&C
--
&C c'SYYr]a'''
(
:
:1"
:
EC
c'SYYr]a'''
(
:
:1"
:
EC 2Jc;J;&'3
+;)<6Ec;J;&'3
+;)<6E s{{51C78Gs{{51C78G ii&)r2Gii&)r2GGr2   c                       e Zd ZdZddedee   f fdZ	 	 ddej                  de
ej                  ej                  f   deej                     dee   d	eej                     d
ee   de
ej                  ej                  f   fdZ xZS )GlmAttentionz=Multi-headed attention from 'Attention Is All You Need' paperr%   	layer_idxc                 P   t         |           || _        || _        t	        |d|j
                  |j                  z        | _        |j                  |j                  z  | _	        | j                  dz  | _
        |j                  | _        d| _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j                  | j                  z  |j
                  d      | _        y )NrL   g      Tr!   F)r#   r$   r%   r   getattrr(   num_attention_headsrL   rJ   r[   rS   attention_dropout	is_causalr&   r'   attention_biasq_projk_projv_projo_projr/   r%   r   r0   s      r1   r$   zGlmAttention.__init__   sD   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JFL^L^ejkr2   r3   position_embeddingsrR   past_key_valuecache_positionrU   r4   c                 4   |j                   d d }g |d| j                  }| j                  |      j                  |      j	                  dd      }	| j                  |      j                  |      j	                  dd      }
| j                  |      j                  |      j	                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        }| j                  j                  dk7  rt        | j                  j                     } || |	|
||f| j                  sdn| j                  | j                   d|\  }} |j"                  g |d j%                         }| j'                  |      }||fS )Nr6   r   r    )rw   rv   r   eager        )rT   rS   )rF   rL   r   viewr]   r   r   r   updater   rh   r%   _attn_implementationr   rZ   r   rS   rH   rb   r   )r/   r3   r   rR   r   r   rU   input_shapehidden_shapequery_statesrc   rd   rv   rw   cache_kwargsattention_interfacerg   re   s                     r1   r<   zGlmAttention.forward   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&S#7jRUWZ#[ j%#&snUL'5'<'<ZW[WeWegs't$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFHkk+.L((r2   N)NN)r=   r>   r?   __doc__r   r   intr$   r@   Tensortupler   
LongTensorr   r   r<   rB   rC   s   @r1   r   r      s    Gly lXc] l4 +/59))||)) #5<<#=>)) !.	))
 !)) !!1!12)) +,)) 
u||U\\)	*))r2   r   RMSNormc                   ,     e Zd Zd fd	Zd Zd Z xZS )
GlmRMSNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)z9
        GlmRMSNorm is equivalent to T5LayerNorm
        N)r#   r$   r&   	Parameterr@   onesweightvariance_epsilon)r/   r(   epsr0   s      r1   r$   zGlmRMSNorm.__init__   s1     	ll5::k#:; #r2   c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S )Nr    r6   T)keepdim)	rX   ra   r@   r`   powmeanrsqrtr   r   )r/   r3   input_dtypevariances       r1   r<   zGlmRMSNorm.forward   sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r2   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)r   r   rF   r   r/   s    r1   
extra_reprzGlmRMSNorm.extra_repr   s*    ))*+6$2G2G1HIIr2   )gư>)r=   r>   r?   r$   r<   r   rB   rC   s   @r1   r   r      s    $;Jr2   r   c                   ^     e Zd Zddef fdZ ej                         ed               Z xZ	S )GlmRotaryEmbeddingr%   c                    t         |           t        |d      rUt        |j                  t
              r;|j                  j                  d|j                  j                  d            | _        nd| _        |j                  | _	        |j                  | _
        || _        t        | j                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                   | _        y )Nrope_scaling	rope_typetypedefaultinv_freqF)
persistent)r#   r$   hasattr
isinstancer   dictgetr   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenr%   r   rope_init_fnattention_scalingregister_bufferr   original_inv_freq)r/   r%   devicer   r0   s       r1   r$   zGlmRotaryEmbedding.__init__   s    6>*z&:M:Mt/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r2   c                 b   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   r6   r   mpscpuF)device_typeenabledr    r7   )rX   )r   floatrG   rF   ra   r   r   r   strr@   autocastr]   rs   rv   r   rw   rX   )
r/   rl   rx   inv_freq_expandedposition_ids_expandedr   freqsembrv   rw   s
             r1   r<   zGlmRotaryEmbedding.forward  sV    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s    BF%%F.r   )
r=   r>   r?   r   r$   r@   no_gradr   r<   rB   rC   s   @r1   r   r      s3    /y /" U]]_<  <r2   r   c                   (    e Zd Zdedef fdZ	 	 	 	 	 	 ddej                  deej                     deej                     dee
   dee   d	eej                     d
eeej                  ej                  f      dee   deej                     fdZ xZS )GlmDecoderLayerr%   r   c                     t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                  |j                        | _	        t        |j                  |j                        | _
        y )N)r%   r   r   )r#   r$   r(   r   	self_attnr   mlpr   rms_norm_epsinput_layernormpost_attention_layernormr   s      r1   r$   zGlmDecoderLayer.__init__  sk    !--%VyI&>)&*<*<&BUBUV(263E3E6K^K^(_%r2   r3   rR   rx   r   	use_cacher   r   rU   r4   c                     |}	| j                  |      } | j                  d|||||||d|\  }}
|	|z   }|}	| j                  |      }| j                  |      }|	|z   }|S )N)r3   rR   rx   r   r   r   r    )r   r   r   r   )r/   r3   rR   rx   r   r   r   r   rU   residual_s              r1   r<   zGlmDecoderLayer.forward  s     !,,];)4>> 	
')%)) 3	
 	
q !=0 !55mD/ =0r2   )NNNFNN)r=   r>   r?   r   r   r$   r@   r   r   r   r   boolr   r   r   r<   rB   rC   s   @r1   r   r     s    `y `S ` 2637*.$)59KO|| !. u//0	
 ! D> !!1!12 &eELL%,,,F&GH +, 
u||	r2   r   c                   J    e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZy)GlmPreTrainedModelr%   modelTr   past_key_values)r3   
attentionsN)r=   r>   r?   r   __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsr   r2   r1   r   r   ?  sQ    &*#*+#4"5N!"&("r2   r   c                       e Zd Zdef fdZee	 	 	 	 	 	 	 ddeej                     deej                     deej                     dee   deej                     deej                     d	ee   d
ee   defd              Z xZS )GlmModelr%   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   )r%   F)r#   r$   pad_token_idpadding_idx
vocab_sizer&   	Embeddingr(   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normr   
rotary_embgradient_checkpointing	post_initr   s      r1   r$   zGlmModel.__init__T  s     !.. ++LL):):F<N<NPTP`P`ammAFvG_G_A`aI_VY/a
 v11v7J7JK	,F;&+# 	 bs   D	input_idsrR   rx   r   inputs_embedsr   r   rU   r4   c           
      *   |d u |d uz  rt        d      || j                  |      }|r|
t               }|F||j                         nd}	t	        j
                  |	|	|j                  d   z   |j                        }||j                  d      }t        | j                  |||||      }
|}| j                  ||      }| j                  d | j                  j                   D ]  } ||f|
||||d|} | j                  |      }t        ||      S )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   )r   )r%   input_embedsrR   r   r   rx   )rR   rx   r   r   r   )last_hidden_stater   )
ValueErrorr   r	   get_seq_lengthr@   arangerF   r   rq   r   r%   r  r  r  r  r   )r/   r  rR   rx   r   r	  r   r   rU   past_seen_tokensrf   r3   r   decoder_layers                 r1   r<   zGlmModel.forwardd  sT    -t";<YZZ *.*;*;I*FM0*nO!CRC^==?de+0<< "2]5H5H5K"KTaThTh,N )33A6L(;;&))+%
 &"oom\J![[)H4;;+H+HI 		M)*).-$7 M		 		-0&++
 	
r2   )NNNNNNN)r=   r>   r?   r   r$   r   r   r   r@   r   r   r   rA   r   r   r   r   r<   rB   rC   s   @r1   r   r   R  s    y    151537+/5959$(8
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 !!1!128
 D>8
 +,8
 
!8
  8
r2   r   c                   p    e Zd ZdgZddiZddgdgfiZ fdZd Zd Ze	e
	 	 	 	 	 	 	 	 	 dd	eej                     d
eej                     deej                     dee   deej                      deej                     dee   deej                     deeej                  f   dee   defd              Z xZS )GlmForCausalLMzlm_head.weightlm_headcolwise_repr3   logitsc                     t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        | j                          y )NFr!   )
r#   r$   r   r   r   r&   r'   r(   r  r  r.   s     r1   r$   zGlmForCausalLM.__init__  sU     f%
 ++yy!3!3V5F5FUS 	r2   c                     || _         y r   r   )r/   decoders     r1   set_decoderzGlmForCausalLM.set_decoder  s	    
r2   c                     | j                   S r   r  r   s    r1   get_decoderzGlmForCausalLM.get_decoder  s    zzr2   r  rR   rx   r   r	  labelsr   r   logits_to_keeprU   r4   c
                 z    | j                   d|||||||d|
}|j                  }t        |	t              rt	        |	 d      n|	}| j                  |dd|ddf         }d}|* | j                  d||| j                  j                  d|
}t        |||j                  |j                  |j                        S )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, GlmForCausalLM

        >>> model = GlmForCausalLM.from_pretrained("meta-glm/Glm-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-glm/Glm-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r  rR   rx   r   r	  r   r   N)r  r  r   )lossr  r   r3   r   r   )r   r  r   r   slicer  loss_functionr%   r   r   r   r3   r   )r/   r  rR   rx   r   r	  r  r   r   r  rU   outputsr3   slice_indicesr  r!  s                   r1   r<   zGlmForCausalLM.forward  s    @ ,64:: 	,
)%+')	,
 	,
  118B>SV8W~ot4]kmA}a,?@A%4%%pVFt{{OeOepiopD%#33!//))
 	
r2   )	NNNNNNNNr   )r=   r>   r?   _tied_weights_keys_tp_plan_pp_planr$   r  r  r   r   r   r@   r   r   r   rA   r   r   r   r   r   r   r<   rB   rC   s   @r1   r  r    s:   *+=)H_-z:;H  151537+/59-1$(59348
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 ))*8
 D>8
 !!1!128
 c5<</08
 +,8
 
 8
  8
r2   r  c                       e Zd Zy)GlmForSequenceClassificationNr=   r>   r?   r   r2   r1   r*  r*        r2   r*  c                       e Zd Zy)GlmForTokenClassificationNr+  r   r2   r1   r.  r.    r,  r2   r.  )r   r   r  r*  r.  )r   )Nr   )<typingr   r   r   r@   torch.nnr&   activationsr   cache_utilsr   r	   
generationr
   integrationsr   masking_utilsr   modeling_layersr   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.genericr   configuration_glmr   Moduler   r   r   rM   r   rh   ro   r   r   r   r   r   r   r   r  r*  r.  __all__r   r2   r1   <module>r@     s  , - ,   ! . ) 7 / 
 P K F & I I / ()RYY )$	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % % '(%46'TA)299 A)H Y'J J (J(< <D*0 *Z   $ K
! K
 K
\ N
' N
 N
b	#CEW 		 =?Q 	r2   