
    rh4                       d dl Z d dlmZ d dlmZmZmZmZ d dlZd dl	m
Z
 d dlm
c mZ d dl	mZ ddlmZ ddlmZmZ ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZm Z  ddl!m"Z"m#Z# ddl$m%Z%m&Z& ddl'm(Z( ddl)m*Z*m+Z+m,Z,m-Z-m.Z. ddl/m0Z0m1Z1m2Z2  e.jf                  e4      Z5 ed       G d de
jl                               Z7 G d de
jl                        Z8 G d de
jl                        Z9 G d de
jl                        Z: G d de
jl                        Z; G d d e
jl                        Z<d! Z=d"ej|                  d#ej|                  d$ej|                  d%ej|                  d&e?ej|                  ej|                  f   f
d'Z@d(ej|                  d)eAd&ej|                  fd*ZB	 dTd+e
jl                  d,ej|                  d-ej|                  d.ej|                  d/eej|                     d0eCd1eCd2e(e*   fd3ZD G d4 d5e
jl                        ZE G d6 d7e      ZFe+ G d8 d9e&             ZG G d: d;eG      ZH G d< d=e
jl                        ZId> ZJdUd?ZK G d@ dAe
jl                        ZL G dB dCe
jl                        ZM G dD dEe      ZNe e+dFG       G dH dIe                     ZOe+ G dJ dKeG             ZPe+ G dL dMeG             ZQe e+dNG       G dO dPe                     ZR G dQ dReGe      ZSg dSZTy)V    N)	dataclass)AnyCallableOptionalUnion)	LayerNorm   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutputWithPastModelOutput)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tupleis_torchdynamo_compilinglogging   )Glm4vConfigGlm4vTextConfigGlm4vVisionConfigRMSNormc                   ,     e Zd Zd fd	Zd Zd Z xZS )Glm4vRMSNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)z;
        Glm4vRMSNorm is equivalent to T5LayerNorm
        N)super__init__nn	Parametertorchonesweightvariance_epsilon)selfhidden_sizeeps	__class__s      {/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/glm4v/modeling_glm4v.pyr'   zGlm4vRMSNorm.__init__2   s1     	ll5::k#:; #    c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S )N   Tkeepdim)	dtypetor*   float32powmeanrsqrtr-   r,   )r.   hidden_statesinput_dtypevariances       r2   forwardzGlm4vRMSNorm.forward:   sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r3   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)tupler,   shaper-   r.   s    r2   
extra_reprzGlm4vRMSNorm.extra_reprA   s*    ))*+6$2G2G1HIIr3   )gư>)__name__
__module____qualname__r'   rB   rG   __classcell__r1   s   @r2   r$   r$   0   s    $;Jr3   r$   c                   ,     e Zd Zddef fdZd Z xZS )Glm4VisionMlpbiasc                    t         |           |j                  | _        |j                  | _        t        j                  | j                  | j                  |      | _        t        j                  | j                  | j                  |      | _        t        j                  | j                  | j                  |      | _	        t        |j                     | _        y NrO   )r&   r'   r/   out_hidden_sizeintermediate_sizer(   Linear	gate_projup_proj	down_projr
   
hidden_actact_fn)r.   configrO   r1   s      r2   r'   zGlm4VisionMlp.__init__F   s    !--!'!7!74#3#3T5K5KRVWyy!1!143I3IPTU4#9#94;K;KRVWV../r3   c                     | j                  | j                  | j                  |            | j                  |      z        S N)rX   rZ   rV   rW   r.   hidden_states     r2   rB   zGlm4VisionMlp.forwardO   s2    ~~dkk$..*FG$,,WcJddeer3   F)rH   rI   rJ   boolr'   rB   rK   rL   s   @r2   rN   rN   E   s    0T 0fr3   rN   c                   `     e Zd Zdeddf fdZdej                  dej                  fdZ xZS )Glm4vVisionPatchEmbedr[   returnNc                 T   t         |           |j                  | _        |j                  | _        |j                  | _        |j
                  | _        | j                  | j                  | j                  g}t        j                  | j                  | j                  ||      | _	        y )N)kernel_sizestride)
r&   r'   
patch_sizetemporal_patch_sizein_channelsr/   	embed_dimr(   Conv3dproj)r.   r[   rf   r1   s      r2   r'   zGlm4vVisionPatchEmbed.__init__T   s     ++#)#=#= !--++//$//RIId..K`kl	r3   r?   c                 6   | j                   j                  j                  }|j                  d| j                  | j
                  | j                  | j                        }| j                  |j                  |            j                  d| j                        }|S )Nr6   r9   )	rm   r,   r9   viewrj   ri   rh   r:   rk   )r.   r?   target_dtypes      r2   rB   zGlm4vVisionPatchEmbed.forward^   s~    yy''--%**  $":":DOOT__
 		-"2"2"2"FGLLRQUQ_Q_`r3   	rH   rI   rJ   r!   r'   r*   TensorrB   rK   rL   s   @r2   rc   rc   S   s5    m0 mT mU\\ ell r3   rc   c                   R     e Zd Zddededdf fdZdedej                  fdZ xZ	S )	Glm4vVisionRotaryEmbeddingdimthetard   Nc                     t         |           d|t        j                  d|dt        j                        |z  z  z  }| j                  d|d       y )N      ?r   r5   ro   inv_freqF
persistent)r&   r'   r*   arangefloatregister_buffer)r.   rv   rw   rz   r1   s       r2   r'   z#Glm4vVisionRotaryEmbedding.__init__h   sK    %ELLC%++$NQT$TUVZeDr3   seqlenc                     t        j                  || j                  j                  | j                  j                        }t        j
                  || j                        }|S )Ndevicer9   )r*   r}   rz   r   r9   outer)r.   r   seqfreqss       r2   rB   z"Glm4vVisionRotaryEmbedding.forwardm   sA    ll6$--*>*>dmmFYFYZC/r3   )g     @)
rH   rI   rJ   intr~   r'   r*   rs   rB   rK   rL   s   @r2   ru   ru   g   s8    EC E ED E
c ell r3   ru   c                   n     e Zd Zd
dededededdf
 fdZdej                  dej                  fd	Z	 xZ
S )Glm4vVisionPatchMergerrv   context_dimrY   rO   rd   Nc                 x   t         |           t        j                  |||      | _        t        |      | _        t        j                  |||      | _        t        j                  |||      | _        t        j                  |||      | _	        t        j                         | _        t        |   | _        y rQ   )r&   r'   r(   rU   rm   r   post_projection_normrV   rW   rX   GELUact1r
   rZ   )r.   rv   r   rY   rO   r1   s        r2   r'   zGlm4vVisionPatchMerger.__init__t   s    IIc3T2	$-cN!3$?yyk=;$?GGI	Z(r3   r_   c                     | j                  |      }| j                  | j                  |            }| j                  | j	                  | j                  |            | j                  |      z        S r]   )rm   r   r   rX   rZ   rV   rW   r^   s     r2   rB   zGlm4vVisionPatchMerger.forward~   sY    yy.yy!:!:<!HI~~dkk$..*FG$,,WcJddeer3   r`   )rH   rI   rJ   r   strra   r'   r*   rs   rB   rK   rL   s   @r2   r   r   s   sJ    )C )c )s )$ )[_ )fELL fU\\ fr3   r   c                   D     e Zd Zdef fdZdej                  fdZ xZS )Glm4vVisionEmbeddingsr[   c                    t         |           || _        |j                  | _        |j
                  | _        |j                  | _        | j
                  | j                  z  dz  | _        | j                  | _        t        j                  | j                  | j                        | _        | j                  dt        j                  | j                        j                  d      d       y )Nr5   position_ids)r   r6   Fr{   )r&   r'   r[   r/   rk   
image_sizerh   num_patchesnum_positionsr(   	Embeddingposition_embeddingr   r*   r}   expandr.   r[   r1   s     r2   r'   zGlm4vVisionEmbeddings.__init__   s    ++ ++ ++ OOt>1D!--"$,,t/A/A4>>"R^U\\$:L:L-M-T-TU\-]jopr3   rd   c           	         | j                   j                  }|j                  d   }|j                  d   }|j                  }	|j	                  |	      |j	                  |	      }}|dk(  r%t        j                  d||	|j                        }
nt        |t              r&t        j                  ||	t
        j                        }t        |t
        j                        s&t        j                  ||	t
        j                        }|j                  d   }t        |dz        }|j                  |||      j                  ddd      j!                  d      j	                  |	t
        j"                        }t        j$                  t'        t)        |            D cg c]  }||df   j+                  ||          c}      j	                  |	t
        j"                        }t        j$                  t'        t)        |            D cg c]  }||df   j+                  ||          c}      j	                  |	t
        j"                        }|j	                  |	t
        j"                        }|j	                  |	t
        j"                        }|dz   |z  dz  dz
  }|dz   |z  dz  dz
  }t        j,                  ||fd      j!                  d      j!                  d      }t/        j0                  ||dd	d
      }|j3                  d      j3                  d      j                  dd      }|j	                  |j                        j	                  |j                        }
||
z   }|S c c}w c c}w )a  
        Forward pass with integrated position encoding adaptation using 2D interpolation.

        Args:
            embeddings: Input embeddings tensor
            lengths (torch.Tensor): Sequence lengths for each image in the batch.
            image_shapes (torch.Tensor): Tensor of shape [batch_size, 3] representing the image shapes (t, h, w).
            h_coords (torch.Tensor): Tensor of shape [total_seq] representing the h coordinate for each patch.
            w_coords (torch.Tensor): Tensor of shape [total_seq] representing the w coordinate for each patch.

        Returns:
            torch.Tensor: Embeddings with adapted position encoding added.
        r   r   r   g      ?r5   r6   rv   bicubicFborder)modealign_cornerspadding_mode)r   r,   rE   r   r:   r*   emptyr9   
isinstancelisttensorlongrs   r   rp   permute	unsqueezer;   catrangelenrepeatstackFgrid_samplesqueeze)r.   
embeddingslengthsimage_shapesh_coordsw_coordspos_embed_weightr/   	total_seqr   adapted_pos_embedorig_size_sq	orig_sizepos_embed_2ditarget_htarget_wnorm_wnorm_hgridinterpolated_embed_fp32adapted_pos_embed_fp32s                         r2   rB   zGlm4vVisionEmbeddings.forward   s     2299&,,Q/NN1%	!(( &[[0(++f2E( > %A{6QaQgQg h '4(,,wvUZZPlELL9$||LuzzZ ,11!4LL#-.I %%iKHAq!167	  yyQVWZ[bWcQd!eA,q!t"4";";GAJ"G!efiiU]] j H yyQVWZ[bWcQd!eA,q!t"4";";GAJ"G!efiiU]] j H
  {{&{FH{{&{FH#~1Q6:F#~1Q6:F ;;/R8BB1EOOPQRD '(mmd%V^'#
 &=%D%DQ%G%O%OPR%S%[%[\]_`%a" 6 9 9:J:P:P Q T TU_UfUf g  "33
7 "f "fs    M" M'rr   rL   s   @r2   r   r      s'    
q0 
qGPUP\P\ Gr3   r   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )*Rotates half the hidden dims of the input..Nr6   r5   r   )rE   r*   r   xx1x2s      r2   rotate_halfr      sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r3   qkcossinrd   c                    | j                   }|j                   }| j                         |j                         }} |j                  d      j                         |j                  d      j                         }}| |z  t        |       |z  z   }||z  t        |      |z  z   }|j	                  |      }|j	                  |      }||fS )N)r9   r~   r   r   r:   )r   r   r   r   orig_q_dtypeorig_k_dtypeq_embedk_embeds           r2   apply_rotary_pos_emb_visionr      s     77L77L779aggiqA}}R &&(#--*;*A*A*CC3w;q>C/0G3w;q>C/0Gjj&Gjj&GGr3   r?   n_repc                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)rE   r   reshape)r?   r   batchnum_key_value_headsslenhead_dims         r2   	repeat_kvr      so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr3   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr5   r	   r   r6   rv   r9   )ptrainingr   )r   num_key_value_groupsr*   matmul	transposerE   r(   
functionalsoftmaxr;   r:   r9   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r2   eager_attention_forwardr      s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r3   c                        e Zd Zdeddf fdZ	 	 d
dej                  dej                  deej                     deeej                  ej                  f      dej                  f
d	Z	 xZ
S )Glm4vVisionAttentionr[   rd   Nc                    t         |           |j                  | _        |j                  | _        | j                  | j                  z  | _        d| _        t        j                  |j                  |j                  dz  |j                        | _
        t        j                  |j                  |j                  d      | _        | j
                  dz  | _        || _        |j                  | _        d| _        y )Nr   r	   rR   F      )r&   r'   r/   rv   	num_headsr   r   r(   rU   attention_biasqkvrm   r   r[   attention_dropout	is_causalr   s     r2   r'   zGlm4vVisionAttention.__init__  s    %%))DNN2$%!99V//1C1Ca1GfNcNcdIIf00&2D2D5Q	}}d*!'!9!9r3   r?   
cu_seqlensrotary_pos_embposition_embeddingsc                 z   |j                   d   }| j                  |      j                  |d| j                  d      j	                  dddd      j                  d      \  }}}	|Ot        j                  d       t        j                  ||fd      }
|
j                         }|
j                         }n|\  }}t        ||||      \  }}|j                  dd      j                  d      }|j                  dd      j                  d      }|	j                  dd      j                  d      }	t        }| j                   j"                  dk7  rt$        | j                   j"                     }| j                   j"                  d	k(  rT|dd  |d d z
  j'                         } || |||	fd | j(                  | j*                  sd
n| j,                  ||||dd|\  }}n|dd  |d d z
  }|||	fD cg c](  }t        j.                  ||j1                         d      * }}t3        | D cg c]<  \  }}} || |||fd | j(                  | j*                  sd
n| j,                  dd|d   > }}}}t        j                  |d      }|j                  |d      j5                         }| j7                  |      }|S c c}w c c}}}w )Nr   r	   r6   r   r5   aS  The attention layers in this model are transitioning from computing the RoPE embeddings internally through `rotary_pos_emb` (2D tensor of RoPE theta values), to using externally computed `position_embeddings` (Tuple of tensors, containing cos and sin). In v4.54 `rotary_pos_emb` will be removed and `position_embeddings` will be mandatory.r   eagerflash_attention_2        F)r   r   r   cu_seq_lens_qcu_seq_lens_kmax_length_qmax_length_kr   )r   r   r   r   )rE   r   r   r   r   unbindloggerwarning_oncer*   r   r   r   r   r   r   r   r[   _attn_implementationr   maxr   r   r   splittolistzipr   rm   )r.   r?   r   r  r  r   
seq_lengthquery_statesr   r   embr   r   attention_interface
max_seqlenr   _r   r   splitsr   r   vattn_outputss                           r2   rB   zGlm4vVisionAttention.forward$  s    #((+
HH]#++J4>>2NVVWXZ[]^`abiijkl 	/j, &G ))^^<"EC'')C'')C*HC#>|ZY\^a#b j#--a3==a@))!Q/99!<
#--a3==a@(?;;++w6"9$++:Z:Z"[;;++/BB$QR.:cr?:??AJ0	
  $#'==d6L6L(('' NK" !nz#26GLXZdfrKsAGFGNN$4!<F    #F|  Aq! $	

 $( LL'+}}C$:P:P#
 
 
L   ))La8K!))*b9DDFii,-s   '-J1"AJ6NN)rH   rI   rJ   r!   r'   r*   rs   r   rD   rB   rK   rL   s   @r2   r   r     s    0 T " 26KOM||M LLM !.	M
 &eELL%,,,F&GHM 
Mr3   r   c                        e Zd Zd fdZ	 	 d	dej
                  dej
                  deej
                     deeej
                  ej
                  f      dej
                  f
dZ xZ	S )
Glm4vVisionBlockrd   c                     t         |           t        |j                  |j                        | _        t        |j                  |j                        | _        t        |      | _        t        |d      | _
        y )Nr0   FrR   )r&   r'   r$   r/   rms_norm_epsnorm1norm2r   attnrN   mlpr   s     r2   r'   zGlm4vVisionBlock.__init__u  s\    !&"4"4&:M:MN
!&"4"4&:M:MN
(0	 e4r3   r?   r   r  r  c                     | | j                   | j                  |      f|||d|z   }|| j                  | j                  |            z   }|S )N)r   r  r  )r$  r"  r%  r#  )r.   r?   r   r  r  r   s         r2   rB   zGlm4vVisionBlock.forward|  sc     &			JJ}%)
!) 3	)

 )
 
 &M1J(KKr3   rd   Nr  )
rH   rI   rJ   r'   r*   rs   r   rD   rB   rK   rL   s   @r2   r  r  t  sq    5 26KO|| LL !.	
 &eELL%,,,F&GH 
r3   r  c                   <    e Zd ZU eed<   dZdZddgZdZdZ	dZ
dZdZy)Glm4vPreTrainedModelr[   modelTGlm4vTextDecoderLayerr  past_key_valuesN)rH   rI   rJ   r   __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_can_compile_fullgraph_supports_attention_backend r3   r2   r)  r)    s=    &*#02DE"3N!"&r3   r)  c                        e Zd ZU eed<   dgZd	 fdZd Zdej                  dej                  dej                  fdZ
 xZS )
Glm4vVisionModelr[   r  rd   c                 F   t         |   |       |j                  | _        |j                  | _        t	        |      | _        t        |      | _        |j                  |j                  z  }t        |dz        | _        t        j                  t        |j                        D cg c]  }t!        |       c}      | _        t%        |j&                  |j(                  |j*                        | _        t/        |j                  |j0                        | _        t        j4                  |j                  |j&                  |j                  |j                        | _        t/        |j                  |j0                        | _        d| _        | j=                          y c c}w )Nr5   )rv   r   rY   r   )rj   out_channelsrf   rg   F)r&   r'   spatial_merge_sizerh   r   r   rc   patch_embedr/   r   ru   r  r(   
ModuleListr   depthr  blocksr   rS   rT   rY   mergerr$   r!  post_conv_layernormConv2d
downsamplepost_layernormgradient_checkpointing	post_init)r.   r[   r   r  r1   s       r2   r'   zGlm4vVisionModel.__init__  sA    "(";"; ++/708%%)9)998QGmmuV\\GZ$[!%5f%=$[\,&&F4L4LY_YjYj
 $00B0BH[H[#\ ))**//11,,	
 +6+=+=6CVCVW&+# %\s   %Fc                    g }|D ]s  \  }}}t        j                  |      j                  d      j                  d|      }|j	                  || j
                  z  | j
                  || j
                  z  | j
                        }|j                  dddd      }|j                         }t        j                  |      j                  d      j                  |d      }|j	                  || j
                  z  | j
                  || j
                  z  | j
                        }|j                  dddd      }|j                         }|j                  t        j                  ||gd      j                  |d             v t        j                  |d      }|d d dd f   j                         }| j                  |      }	|	|   j                  d      }
|
|fS )Nr   r6   r   r5   r	   r   )r*   r}   r   r   r   r;  r   flattenappendr   r   r   r  r  )r.   grid_thwpos_idsthwhpos_idswpos_idsmax_grid_sizerotary_pos_emb_fullr  s              r2   rot_pos_embzGlm4vVisionModel.rot_pos_emb  s    	SGAq!||A003::2qAH''T,,,''T,,,''	H  ''1a3H'')H||A003::1bAH''T,,,''T,,,''	H  ''1a3H'')HNN5;;(';DKKAqQR)	S* ))G+ AB++-"11-@,W5==a@w&&r3   r?   rJ  c           	         | j                  |      }| j                  |      }| j                  |      \  }}t        j                  ||fd      }|j                         |j                         f}t        j                  |dddf   |dddf   z  |dddf         j                  dt        j                  j                         r|j                  nt        j                        }t        j                  |dd	      }|dd |dd z
  j                         }| j!                  ||||dddf   |dddf         }| j"                  D ]  }	 |	|||
      } | j%                  |      }|j'                  d| j(                  | j(                  |j*                  d         }|j-                  dddd      }| j/                  |      j'                  d| j0                  j2                        }| j5                  |      }|S )az  
        Args:
            hidden_states (`torch.Tensor` of shape `(seq_len, hidden_size)`):
                The final hidden states of the model.
            grid_thw (`torch.Tensor` of shape `(num_images_or_videos, 3)`):
                The temporal, height and width of feature shape of each image in LLM.

        Returns:
            `torch.Tensor`: hidden_states.
        r6   r   Nr   r5   r   r   )r   r   )r   )r   r  r	   )r<  rA  rS  r*   r   r   r   repeat_interleavecumsumjit
is_tracingr9   int32r   padr  r   r?  rD  rp   r;  rE   r   rC  r[   rS   r@  )
r.   r?   rJ  r  image_type_idsr  r  r   seqlensblks
             r2   rB   zGlm4vVisionModel.forward  s    ((700?)-)9)9()C&ii8bA"wwy#'')4,,Xad^hq!tn-LhWXZ[W[n]dd
 %*II$8$8$:(.. e 

 UU:vQ7
ab>JsO3;;=w.YZ\]Y]J^`noprsos`tu;; 	C%$7M	 ++M:%**'')@)@-BUBUVXBY
 &--aAq96;;B@[@[\M2r3   r'  )rH   rI   rJ   r!   r-  r0  r'   rS  r*   rs   rB   rK   rL   s   @r2   r8  r8    sB    +,8':.U\\ .U\\ .ell .r3   r8  c                   ^     e Zd Zddef fdZ ej                         ed               Z xZ	S )Glm4vTextRotaryEmbeddingr[   c                    t         |           t        |d      rG|j                  ;|j                  j	                  d|j                  j	                  d            | _        nd| _        |j                  | _        |j                  | _        || _	        t        | j
                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                  | _        y )Nrope_scaling	rope_typetypedefaultrz   Fr{   )r&   r'   hasattrra  getrb  max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenr[   r   rope_init_fnattention_scalingr   rz   original_inv_freq)r.   r[   r   rz   r1   s       r2   r'   z!Glm4vTextRotaryEmbedding.__init__  s    6>*v/B/B/N#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r3   c                 :   | j                   d d d d d f   j                         j                  d|j                  d   dd      }|d d d d d d d f   j                         }t	        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j                  |j                   
      	j                  |j                   
      fS # 1 sw Y   AxY w)Nr	   r   r6   mpscpuF)device_typeenabledr5   r   ro   )rz   r~   r   rE   r   r   rc  r   r*   autocastr   r   r   rk  r   r:   r9   )
r.   r   r   inv_freq_expandedposition_ids_expandedrp  r   r  r   r   s
             r2   rB   z Glm4vTextRotaryEmbedding.forward  sL   
 !MM$a*=>DDFMMaQ]QcQcdeQfhjlmn ,Q4] ; A A C'1!((--'E!((--[`J`ahhmmfk^^UC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s   BFFr]   )
rH   rI   rJ   r    r'   r*   no_gradr   rB   rK   rL   s   @r2   r_  r_    s3    / /" U]]_<  <r3   r_  c                 |    | ddddf   }| ddddf   }t        j                  | |fd      j                  d      S )	r   .r   Nr5   r   r6   r   r   )r*   r   rH  r   s      r2   rotate_half_llmrw  /  sJ    	
319B	
319B;;Ryb)11"55r3   c           	      R   |dz  }t        j                  t        |j                  |d            D cg c]  \  }}||dz      c}}d      j	                  |      }t        j                  t        |j                  |d            D cg c]  \  }}||dz      c}}d      j	                  |      }|dd|j
                  d   dz  f   j                  dd      }|dd|j
                  d   dz  f   j                  dd      }|j
                  d   }| dd|f   | d|df   }
}	|dd|f   |d|df   }}|	|z  t        |	      |z  z   }||z  t        |      |z  z   }t        j                  ||
gd      }t        j                  ||gd      }||fS c c}}w c c}}w )aX  Applies Rotary Position Embedding with Multimodal Sections to the query and key tensors (https://qwenlm.github.io/blog/qwen2-vl/).

    Explanation:
        Multimodal 3D rotary position embedding is an extension to 1D rotary position embedding. The input embedding
        sequence contains vision (images / videos) embedding and text embedding or just contains text embedding. For
        vision embedding part, we apply rotary position embedding on temporal, height and width dimension separately.
        Here we split the channel dimension to 3 chunks for the temporal, height and width rotary position embedding.
        For text embedding part, we just apply 1D rotary position embedding. The three rotary position index (temporal,
        height and width) of text embedding is always the same, so the text embedding rotary position embedding has no
        difference with modern LLMs.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        mrope_section(`List(int)`):
            Multimodal rope section is for channel dimension of temporal, height and width in rope calculation.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    r5   r6   r   r	   .N)r*   r   	enumerater  r   rE   rU  rw  )r   r   r   r   mrope_sectionunsqueeze_dimr   m
rotary_dimq_rotq_passk_rotk_passr   r   s                  r2   apply_multimodal_rotary_pos_embr  6  s   : "A%M
)))CIImQSI4T*UV$!QQq1uXV\^
_
i
iC )))CIImQSI4T*UV$!QQq1uXV\^
_
i
iC
 c'SYYr]a'''
(
:
:1"
:
EC
c'SYYr]a'''
(
:
:1"
:
EC 2Jc;J;&'3
+;)<6Ec;J;&'3
+;)<6E s{u5;<Gs{u5;<G ii&)r2Gii&)r2GG1 W Ws   F
F#
c                   l    e Zd ZdZddedee   f fdZ	 	 	 	 	 	 ddej                  de
ej                  ej                  f   deej                     deej                     d	ee   d
ededeej                     dee   de
ej                  eej                     ee
ej                        f   fdZ xZS )Glm4vTextAttentionz
    Multi-headed attention from 'Attention Is All You Need' paper.
    and "Generating Long Sequences with Sparse Transformers".
    r[   	layer_idxc                    t         |           || _        || _        |j                  | _        |j
                  | _        | j                  | j                  z  | _        |j                  | _        | j                  | j                  z  | _	        d| _
        |j                  | _        |j                  | _        | j                  dz  | _        t        j                  | j                  | j                  | j                  z  d      | _        t        j                  | j                  | j                  | j                  z  d      | _        t        j                  | j                  | j                  | j                  z  d      | _        t        j                  | j                  | j                  z  | j                  d      | _        y )NTr   rR   F)r&   r'   r[   r  r/   num_attention_headsr   r   r   r   r   r   ra  r   r(   rU   q_projk_projv_projo_projr.   r[   r  r1   s      r2   r'   zGlm4vTextAttention.__init__u  sI   "!--33((DNN:#)#=#= $(NNd6N6N$N!!'!9!9"//}}d*ii 0 0$..4==2PW[\ii 0 0$2J2JT]]2Zaefii 0 0$2J2JT]]2Zaefii >@P@PW\]r3   r?   r  r   r   past_key_valueoutput_attentions	use_cachecache_positionr   rd   c	                    |j                         \  }
}}| j                  |      }| j                  |      }| j                  |      }|j	                  |
|d| j
                        j                  dd      }|j	                  |
|d| j
                        j                  dd      }|j	                  |
|d| j
                        j                  dd      }|\  }}t        ||||| j                  d         \  }}|'|||d}|j                  ||| j                  |      \  }}t        }| j                  j                  dk7  rt        | j                  j                     } || ||||f| j                  sdn| j                   | j"                  d|	\  }}|j%                  |
|d      j'                         }| j)                  |      }|||fS )	Nr6   r   r5   rz  )r   r   r  r  r  )r   r   )sizer  r  r  rp   r   r   r  ra  updater  r   r[   r  r   r   r   r   r   r   r  )r.   r?   r  r   r   r  r  r  r  r   bszq_lenr  r  r   r   r   r   cache_kwargsr  r   r   s                         r2   rB   zGlm4vTextAttention.forward  s    &**,UA{{=1[[/
{{=1#((eRGQQRSUVW__S%T]]CMMaQRS
#((eRGQQRSUVW&S#B*c30A0A/0R$
 j %#&snUL'5'<'<ZW[WeWegs't$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ "))#ub9DDFkk+.L.88r3   r]   NNNFFN)rH   rI   rJ   __doc__r    r   r   r'   r*   rs   rD   
LongTensorr   ra   r   r   rB   rK   rL   s   @r2   r  r  o  s   
^ ^8C= ^0 2637*."'5909||09 #5<<#=>09 !.	09
 u//009 !09  09 09 !!1!1209 -.09 
u||Xell3XeELL>Q5RR	S09r3   r  c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )Glm4vTextMLPc                 *   t         |           || _        t        j                  |j
                  d|j                  z  d      | _        t        j                  |j                  |j
                  d      | _        t        |j                     | _        y )Nr5   FrR   )r&   r'   r[   r(   rU   r/   rT   gate_up_projrX   r
   rY   activation_fnr   s     r2   r'   zGlm4vTextMLP.__init__  sp    IIf&8&8!f>V>V:V]bc6#;#;V=O=OV[\#F$5$56r3   r?   rd   c                     | j                  |      }|j                  dd      \  }}|| j                  |      z  }| j                  |      S )Nr5   r6   r   )r  chunkr  rX   )r.   r?   	up_statesgates       r2   rB   zGlm4vTextMLP.forward  sL    %%m4	#//!/4i 2 24 88	~~i((r3   )rH   rI   rJ   r'   r*   FloatTensorrB   rK   rL   s   @r2   r  r    s'    7)U%6%6 )5;L;L )r3   r  c                   x    e Zd Zdedef fdZ	 	 	 	 	 	 ddej                  deej                  ej                  f   de	ej                     de	ej                     de	eej                        d	e	e   d
e	e   de	ej                     deej                  e	eej                  ej                  f      f   fdZ xZS )r+  r[   r  c                    t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                  |j                        | _	        t        |j                  |j                        | _
        t        |j                  |j                        | _        t        |j                  |j                        | _        y )Nr   )r&   r'   r/   r  	self_attnr  r%  r$   r!  input_layernormpost_attention_layernormpost_self_attn_layernormpost_mlp_layernormr  s      r2   r'   zGlm4vTextDecoderLayer.__init__  s    !--+FI>'+F,>,>FDWDWX(4V5G5GVM`M`(a%(4V5G5GVM`M`(a%".v/A/AvGZGZ"[r3   r?   r  r   r   r  r  r  r  rd   c	                 0   |}
| j                  |      } | j                  d||||||||d|	\  }}}| j                  |      }|
|z   }|}
| j                  |      }| j	                  |      }| j                  |      }|
|z   }|f}|r||fz  }|r||fz  }|S )N)r?   r  r   r   r  r  r  r  r6  )r  r  r  r  r%  r  )r.   r?   r  r   r   r  r  r  r  r   residualself_attn_weightspresent_key_valueoutputss                 r2   rB   zGlm4vTextDecoderLayer.forward  s     !,,]; ?Mdnn 
?
' 3)%)/)
?
 
?
;(*; 55mD =0 !55mD///> =0 ")++G)++Gr3   r  )rH   rI   rJ   r    r   r'   r*   rs   rD   r   r  ra   r  rB   rK   rL   s   @r2   r+  r+    s    \ \3 \ 26378<,1$)59/||/ #5<<#=>/ !.	/
 u//0/ !u||!45/ $D>/ D>/ !!1!12/ 
u  (51B1BEDUDU1U+V"WW	X/r3   r+  zJ
    Base class for Llava outputs, with hidden states and attentions.
    )custom_introc                       e Zd ZU dZdZej                  ed<   dZe	e
ej                        ed<   dZe	eej                        ed<   dZe	eej                        ed<   dZe	ej                     ed<   y)Glm4vModelOutputWithPasta  
    past_key_values (`Cache`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`):
        Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape
        `(batch_size, num_heads, sequence_length, embed_size_per_head)`)

        Contains pre-computed hidden-states (key and values in the self-attention blocks) that can be used (see
        `past_key_values` input) to speed up sequential decoding.
    rope_deltas (`torch.LongTensor` of shape `(batch_size, )`, *optional*):
        The rope index difference between sequence length and multimodal rope.
    Nlast_hidden_stater,  r?   
attentionsrope_deltas)rH   rI   rJ   r  r  r*   r  r-  r,  r   r   r?   rD   r  r  r  r6  r3   r2   r  r    s}    	 ,0u((/9=OXd5#4#456=8<M8E%"3"345<59Ju00129.2K%**+2r3   r  c                   P    e Zd ZU eed<   def fdZee	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     deee	j                        dee	j                     dee   d	ee   d
ee   dee	j                     dee   deeef   fd              Z xZS )Glm4vTextModelr[   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   )r[   F)r&   r'   pad_token_idpadding_idx
vocab_sizer(   r   r/   embed_tokensr=  r   num_hidden_layersr+  layersr$   r!  normr_  
rotary_embrE  rF  r  s      r2   r'   zGlm4vTextModel.__init__(  s     !.. ++LL):):F<N<NPTP`P`ammGLVMeMeGfg)"695g
 !!3!39L9LM	2&A&+# hs   D	input_idsr   r   r,  inputs_embedsr  r  output_hidden_statesr  r   rd   c
                 N   ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|d u |d uz  rt	        d      | j
                  r%| j                  r|rt        j                  d       d}|r*|(t        j                  j                         s
t               }|| j                  |      }|	F||j                         nd}t        j                  |||j                   d   z   |j"                        }	|2|	j%                  ddd      j'                  d|j                   d   d      }n6|j)                         d	k(  r#|d
   j'                  d|j                   d   d      }t+        | j                   |||	||      }|}| j-                  ||      }|rdnd }|rdnd }| j.                  D ],  }|r||fz  } ||f|||||||	d|
}|d   }|s$||d   fz  }. | j1                  |      }|r||fz  }t3        ||r|nd ||      S )N:You must specify exactly one of input_ids or inputs_embedszZ`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...Fr   r   r   r6   r	   r5   )N.)r[   input_embedsr   r  r,  r   r6  )r  r   r   r  r  r  r  )r  r,  r?   r  )r[   r  r  r  
ValueErrorrE  r   r  r  r*   rW  rX  r   r  get_seq_lengthr}   rE   r   rp   r   rv   r   r  r  r  r   )r.   r  r   r   r,  r  r  r  r  r  r   past_seen_tokensr   r?   r  all_hidden_statesall_self_attnsdecoder_layerlayer_outputss                      r2   rB   zGlm4vTextModel.forward8  s    2C1N-TXT_T_TqTq$8$D $++JjJj 	 "+!6IDKK<Q<Q	-t";<YZZ&&4==##p "	 09M9M9O*nO  --i8M!CRC^==?de"\\ "2]5H5H5K"KTaThThN
 )..q!R8??=CVCVWXCY[]^L1$'	299!\=O=OPQ=RTVWL(;;&))+%
 & #oom\J #7BD0d![[ 	6M#!m%55!)
$7*)."3#-
 
M *!,M =#3"55'	6* 		-0  -!11&+/8Od+%	
 	
r3   )	NNNNNNNNN)rH   rI   rJ   r    r-  r'   r   r   r   r*   r  rs   r   r  ra   r   r   r   rD   r   rB   rK   rL   s   @r2   r  r  $  s)       151537=A59$(,0/359b
E,,-b
 !.b
 u//0	b

 "$u'8'8"9:b
   1 12b
 D>b
 $D>b
 'tnb
 !!1!12b
 -.b
 
u--	.b
  b
r3   r  c            %           e Zd ZU dZi Zeed<   ddgZ fdZd Z	d Z
d Zd	 Z	 	 	 	 d!d
eej                     deej                     deej                     deej                      deej                   ej                   f   f
dZ	 d"dej&                  deej                     fdZd"dej&                  deej                     fdZ	 	 d#d
ej                  dej&                  dej&                  dej&                  fdZee	 	 	 	 	 	 	 	 	 	 	 	 	 	 d$d
ej                  deej                      deej                     deeej&                        deej&                     dee   dee   dee   deej                      deej&                     deej                     deej                     deej                     deej                     dee   deeef   f d               Z xZ S )%
Glm4vModel r[   r+  r  c                     t         |   |       t        j                  |j                        | _        t        j                  |j                        | _        d | _	        | j                          y r]   )r&   r'   r8  _from_configvision_configvisualr  text_configlanguage_modelr  rF  r   s     r2   r'   zGlm4vModel.__init__  sU     &33F4H4HI,99&:L:LM 	r3   c                 6    | j                   j                         S r]   )r  get_input_embeddingsrF   s    r2   r  zGlm4vModel.get_input_embeddings  s    ""7799r3   c                 :    | j                   j                  |       y r]   )r  set_input_embeddingsr.   r   s     r2   r  zGlm4vModel.set_input_embeddings  s    007r3   c                     || _         y r]   r  r.   decoders     r2   set_decoderzGlm4vModel.set_decoder  s
    %r3   c                     | j                   S r]   r  rF   s    r2   get_decoderzGlm4vModel.get_decoder  s    """r3   r  image_grid_thwvideo_grid_thwr   rd   c           
      F   | j                   j                  j                  }| j                   j                  }| j                   j                  }| j                   j
                  }g }	|h||c|}
|t        j                  |
      }t        j                  d|j                  d   |j                  d   |j                  |j                        }d\  }}d}|j                  |
j                        }t        |
      D ]  \  }}|||   dk(     }|j                         }g }d}|D ]T  }||k(  rd}n||k(  rd}||k(  r|s|j                  d       +||k(  r|r|j                  d	       D|j                  d
       V g }t!        j"                  t        |      d       D ]7  \  }}t%        |      }|d   d   }|d   d   dz   }|j                  |||f       9 g }d}|D ]:  \  }}}t'        |      dkD  r|d   j)                         dz   nd}|dk(  rQ||   d   ||   d   ||   d   }!} }|j+                         | j+                         |z  |!j+                         |z  }$}#}"t        j,                  |"      j/                  dd      j1                  d|#|$z        j3                         }%t        j,                  |#      j/                  ddd      j1                  |"d|$      j3                         }&t        j,                  |$      j/                  ddd      j1                  |"|#d      j3                         }'|j                  t        j4                  |%|&|'g      |z          |dz  }d}|d	k(  rb|||   d   ||   d   }!} }|| j+                         |z  |!j+                         |z  }$}#}"t7        |"      D ]  }(t        j8                  |(      j/                  dd      j1                  d|#|$z        j3                         }%t        j,                  |#      j/                  ddd      j1                  dd|$      j3                         }&t        j,                  |$      j/                  ddd      j1                  d|#d      j3                         }'|j                  t        j4                  |%|&|'g      |z           |dz  }|||   d   k\  r|dz  }d}|dz  }||z
  })|j                  t        j,                  |)      j/                  dd      j1                  dd      |z          d}= t        j:                  |d      j=                  dd      }*|*j                  |j                        |d|||   dk(  f<   |	j                  |*j)                         dz   t'        |
|         z
          t        j8                  |	|j                        j?                  d      }	||	fS ||jA                         jC                  d      dz
  }|jE                  |dk(  d       |j?                  d      j1                  ddd      j                  |j                        }|j)                  dd      d   j)                  dd      d   }+|+dz   |j                  d   z
  }	||	fS t        j,                  |j                  d   |j                        j/                  ddd      j1                  d|j                  d   d      }t        jF                  |j                  d   dg|j                  |j                        }	||	fS )aU  
        Calculate the 3D rope index based on image and video's temporal, height and width in LLM.

        Explanation:
            Each embedding sequence contains vision embedding and text embedding or just contains text embedding.

            For pure text embedding sequence, the rotary position embedding has no difference with modern LLMs.
            Examples:
                input_ids: [T T T T T], here T is for text.
                temporal position_ids: [0, 1, 2, 3, 4]
                height position_ids: [0, 1, 2, 3, 4]
                width position_ids: [0, 1, 2, 3, 4]

            For vision and text embedding sequence, we calculate 3D rotary position embedding for vision part
            and 1D rotary position embedding for text part.
            Examples:
                Temporal (Time): 3 patches, representing different segments of the video in time.
                Height: 2 patches, dividing each frame vertically.
                Width: 2 patches, dividing each frame horizontally.
                We also have some important parameters:
                fps (Frames Per Second): The video's frame rate, set to 1. This means one frame is processed each second.
                tokens_per_second: This is a crucial parameter. It dictates how many "time-steps" or "temporal tokens" are conceptually packed into a one-second interval of the video. In this case, we have 25 tokens per second. So each second of the video will be represented with 25 separate time points. It essentially defines the temporal granularity.
                temporal_patch_size: The number of frames that compose one temporal patch. Here, it's 2 frames.
                interval: The step size for the temporal position IDs, calculated as tokens_per_second * temporal_patch_size / fps. In this case, 25 * 2 / 1 = 50. This means that each temporal patch will be have a difference of 50 in the temporal position IDs.
                input_ids: [V V V V V V V V V V V V T T T T T], here V is for vision.
                vision temporal position_ids: [0, 0, 0, 0, 50, 50, 50, 50, 100, 100, 100, 100]
                vision height position_ids: [0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1]
                vision width position_ids: [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1]
                text temporal position_ids: [101, 102, 103, 104, 105]
                text height position_ids: [101, 102, 103, 104, 105]
                text width position_ids: [101, 102, 103, 104, 105]
                Here we calculate the text start position_ids as the max vision position_ids plus 1.

        Args:
            input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
                Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide
                it.
            image_grid_thw (`torch.LongTensor` of shape `(num_images, 3)`, *optional*):
                The temporal, height and width of feature shape of each image in LLM.
            video_grid_thw (`torch.LongTensor` of shape `(num_videos, 3)`, *optional*):
                The temporal, height and width of feature shape of each video in LLM.
            attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
                Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

                - 1 for tokens that are **not masked**,
                - 0 for tokens that are **masked**.

        Returns:
            position_ids (`torch.LongTensor` of shape `(3, batch_size, sequence_length)`)
            mrope_position_deltas (`torch.Tensor` of shape `(batch_size)`)
        r	   r   r   r9   r   )r   r   FTimagevideotextc                     | d   S )Nr   r6  )r   s    r2   <lambda>z+Glm4vModel.get_rope_index.<locals>.<lambda>  s    [\]^[_ r3   r6   r5   r   .r  r7   r   )$r[   r  r;  image_token_idvideo_start_token_idvideo_end_token_idr*   	ones_liker+   rE   r9   r   r:   ry  r  rI  	itertoolsgroupbyr   r   r  itemr}   rp   r   rH  r   r   r   r   r   r   r   rV  masked_fill_zeros),r.   r  r  r  r   r;  r  r  r  mrope_position_deltastotal_input_idsr   image_indexvideo_indexvideo_group_indexr   input_tokensinput_token_typevideo_check_flgtokeninput_type_groupr   groupstart_index	end_indexllm_pos_ids_listvideo_frame_nummodality_type	start_idxend_idxst_idxrL  rM  rN  
llm_grid_t
llm_grid_h
llm_grid_wt_indexh_indexw_indext_idxtext_lenllm_positionsmax_position_idss,                                               r2   get_rope_indexzGlm4vModel.get_rope_index  sL   v "[[66II33#{{??![[;; " n&@ND^'O%!&!A ::""oo ''L (,$K !+../E/EFN )/ : W`9%nQ&71&<=	(//1#% "') 8E 44*."44*/.(//8.0_(//8(//78 $& "+"3"3I>N4OQ_"` KJC KE"'(1+K %b	!q 0I$++S+y,IJ	K $& "#9I 7,5M9g?BCS?TWX?X-b1557!;^_F$/*;7:*;7:*;7:  1 FFHFFH(::FFH(:: 1;J
 #(,,z":"?"?A"F"M"MbR\_iRi"j"r"r"t"',,z":"?"?2q"I"P"PQ[]_ak"l"t"t"v"',,z":"?"?1b"I"P"PQ[]gik"l"t"t"v(//Wgw<W0X[a0ab#q(*+&'1+*;7:*;7:  1 FFH(::FFH(:: 1;J
 &+:%6 gE&+ll5&9&>&>r1&E&L&LRQ[^hQh&i&q&q&sG&+ll:&>&C&CAr1&M&T&TUVXZ\f&g&o&o&qG&+ll:&>&C&CAq"&M&T&TUVXbdf&g&o&o&qG,33EKK'SZ@[4\_e4efg *Q.),{0KA0NN'1,K01-'1, $+Y#6(//X0F0K0KAr0R0Y0YZ[]_0`ci0ij*+o7,r !&		*: B J J1b Q?L?O?OP\PcPc?dS!^A%6!%;;<%,,]->->-@1-Ds?[\K]G^-^_oW`p %*LL1FyO_O_$`$j$jkl$m!!666)-224;;B?!C)).A*=qA+55a8??2rJMMnNcNcd#/#3#3Au#3#Ea#H#L#LRY]#L#^_`#a (81(<~?S?STV?W(W%  !666 LL!3I<L<LMT!Q^VAyq126 
 ).__Q'+$++#//)%  !666r3   pixel_values_videosc                 $   |j                  | j                  j                        }g }|D ]j  \  }}}t        j                  d|j                         |j                         g      j                  d      j                  |d      }|j                  |       l t        j                  |d      }| j                  ||      }	|j                  d      | j                  j                  dz  z  j                         }
t        j                  |	|
      }	|	S )a  
        Encodes videos into continuous embeddings that can be forwarded to the language model.

        Args:
            pixel_values_videos (`torch.FloatTensor` of shape `(batch_size, num_channels, image_size, image_size)`):
                The tensors corresponding to the input videos.
            video_grid_thw (`torch.LongTensor` of shape `(num_videos, 3)`, *optional*):
                The temporal, height and width of feature shape of each video in LLM.
        r   r   r   rJ  r6   r5   )rc  r  r9   r*   r   r  r   r   rI  r   prodr;  r  r  )r.   r  r  temp_frames_hwrL  rM  rN  repeated_rowflattened_video_grid_thwvideo_embedssplit_sizess              r2   get_video_featureszGlm4vModel.get_video_featuresy  s     266t{{7H7HI% 	0GAq! <<AFFHaffh(?@JJ1MTTUVXYZL!!,/	0 $)99^#C {{#6AY{Z%**2.$++2P2PRS2SS[[]{{<=r3   pixel_valuesc                    |j                  | j                  j                        }| j                  ||      }|j                  d      | j                  j                  dz  z  j                         }t        j                  ||      }|S )a  
        Encodes images into continuous embeddings that can be forwarded to the language model.

        Args:
            pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, image_size, image_size)`):
                The tensors corresponding to the input images.
            image_grid_thw (`torch.LongTensor` of shape `(num_images, 3)`, *optional*):
                The temporal, height and width of feature shape of each image in LLM.
        r  r6   r5   )rc  r  r9   r  r;  r  r*   r  )r.   r  r  image_embedsr  s        r2   get_image_featureszGlm4vModel.get_image_features  st     $(():):;{{<.{I%**2.$++2P2PRS2SS[[]{{<=r3   r  image_featuresvideo_featuresc                 X   || | j                         t        j                  | j                  j                  t        j
                  |j                              k(  }|j                  d      }| | j                         t        j                  | j                  j                  t        j
                  |j                              k(  }|j                  d      }n2|| j                  j                  k(  }|| j                  j                  k(  }|j                         }|j                  d      j                  |      j                  |j                        }|B||   j                         |j                         k7  rt        d| d|j                  d          |j                         }|j                  d      j                  |      j                  |j                        }|B||   j                         |j                         k7  rt        d| d|j                  d          ||fS )z
        Obtains multimodal placeholdr mask from `input_ids` or `inputs_embeds`, and checks that the placeholder token count is
        equal to the length of multimodal features. If the lengths are different, an error is raised.
        r  r6   z6Image features and image tokens do not match: tokens: z, features r   z7Videos features and video tokens do not match: tokens: )r  r*   r   r[   r  r   r   allvideo_token_idsumr   	expand_asr:   numelr  rE   )	r.   r  r  r"  r#  special_image_maskspecial_video_maskn_image_tokensn_video_tokenss	            r2   get_placeholder_maskzGlm4vModel.get_placeholder_mask  s    !.2M$2K2K2MT[[77uzzR_RfRfg3 " "4!7!7!;!.2M$2K2K2MT[[77uzzR_RfRfg3 " "4!7!7!;!*dkk.H.H!H!*dkk.H.H!H+//1/99"=GGVYYZgZnZno%-8J*K*Q*Q*SWeWkWkWm*mHHXXcdrdxdxyzd{c|}  ,//1/99"=GGVYYZgZnZno%-8J*K*Q*Q*SWeWkWkWm*mI.IYYdeseyeyz{e|d}~  "#555r3   r   r,  r  r  r  r  r  r   c                    ||n| j                   j                  }||n| j                   j                  }|du |duz  rt        d      | | j	                         |      }|	v| j                  |	|      }t        j                  |d      j                  |j                  |j                        }| j                  |||      \  }}|j                  ||      }|
v| j                  |
|      }t        j                  |d      j                  |j                  |j                        }| j                  |||      \  }}|j                  ||      }|t        |t              s|n|d   }||j                   dk(  rtt        j"                  |dddf   d	d
      }|j                  j$                  r?|t        j&                  |j                        j(                  z  }d|z
  j+                         }t-               xr2 |duxr |j.                  d	   d	k7  xs |duxr |j.                  d	   d	k7  }t-                xr) |duxr |d   dk(  xs |du xs |j1                         dk(  }|s|s| j2                   | j5                  ||||      \  }}|| _        n|j.                  \  }}}|+|d   | j2                  z   j                  |j                        nd}t        j6                  ||j                        }|j9                  d	d      j;                  |d      }|#|j=                  ||j.                  d   z  d      }|j?                  |      }|jA                  d      j;                  ddd      } | jB                  dd|||||||d|d
|}tE        |jF                  |jH                  |jJ                  |jL                  | j2                        S )a  
        image_grid_thw (`torch.LongTensor` of shape `(num_images, 3)`, *optional*):
            The temporal, height and width of feature shape of each image in LLM.
        video_grid_thw (`torch.LongTensor` of shape `(num_videos, 3)`, *optional*):
            The temporal, height and width of feature shape of each video in LLM.
        rope_deltas (`torch.LongTensor` of shape `(batch_size, )`, *optional*):
            The rope index difference between sequence length and multimodal rope.
        Nr  r   r   )r"  )r#  full_attention   r   r5   )dim1dim2ry   )r   r  r6   r	   T)
r  r   r   r,  r  r  r  r  return_dictr  )r  r,  r?   r  r  r6  )'r[   r  r  r  r  r!  r*   r   r:   r   r9   r.  masked_scatterr  r   dictndimdiagonalis_floating_pointfinfominr   r   rE   r  r  r  r}   rp   r   rU  addr   r  r  r  r,  r?   r  )r.   r  r   r   r,  r  r  r  r  r  r  r  r  r  r  r   r   
image_maskr  r  
video_maskattention_mask_tensorprefill_compiled_stageprefill_noncompiled_stage
batch_sizer  deltar  s                               r2   rB   zGlm4vModel.forward  s(   : 2C1N-TXT_T_TqTq$8$D $++JjJj 	 -t";<YZZ 7D557	BM#22<PL 99\q9<<]=Q=QS`SfSfgL 55i_k5lMJ)88\RM*223FWL 99\q9<<]=Q=QS`SfSfgL 55i_k5lMAz)88\RM&0&FN[kLl " %05J5O5OST5T(-7LQPQT7RYZab(c%(..@@,AEKKPePkPkDlDpDp,p)-03H-H,M,M,O) &>%? &$&B9??1+=+B O!-M-2E2Ea2HA2M # -E,F(F )t+Fq0AQ0F V#t+T/M/M/OST/T & '*CHXHXH`,0,?,?""#8	 -@ -)k $/  -:,?,?)
J &1 $A&)9)99==m>R>RS 
  %||J}?S?ST+00B7>>z2N!-!33J%++a.4PVW3XE+//6+55a8??2rJ%$%% 
%)+'/!5)
 
 (%77#33!//))((
 	
r3   )NNNNr]   r  )NNNNNNNNNNNNNN)!rH   rI   rJ   r.  _checkpoint_conversion_mappingr   r-  r0  r'   r  r  r  r  r   r*   r  rs   rD   r  r  r  r!  r.  r   r   r   ra   r   r   r   r  rB   rK   rL   s   @r2   r  r    s   %'"02DE:8&#
 15595915|7E,,-|7 !!1!12|7 !!1!12	|7
 !.|7 
u||U\\)	*|7~ dh#(#4#4FNuO_O_F`0u/@/@ RZ[`[k[kRl ( -1,0&6##&6 ((&6 ))	&6
 ))&6P  '+1537=A59$(,0/3/3;?59592659t
##t
 !.t
 u//0	t

 "$u'8'8"9:t
   1 12t
 D>t
 $D>t
 'tnt
 u||,t
 &e&7&78t
 !!1!12t
 !!1!12t
 e../t
 !!1!12t
  +,!t
" 
u..	/#t
  t
r3   r  zQ
    Base class for Glm4v causal language model (or autoregressive) outputs.
    c                      e Zd ZU dZdZeej                     ed<   dZ	eej                     ed<   dZ
eeej                        ed<   dZeeej                        ed<   dZeeej                        ed<   dZeej                      ed<   y)	Glm4vCausalLMOutputWithPasta  
    loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided):
        Language modeling loss (for next-token prediction).
    logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.vocab_size)`):
        Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax).
    past_key_values (`Cache`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`):
        Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape
        `(batch_size, num_heads, sequence_length, embed_size_per_head)`)

        Contains pre-computed hidden-states (key and values in the self-attention blocks) that can be used (see
        `past_key_values` input) to speed up sequential decoding.
    rope_deltas (`torch.LongTensor` of shape `(batch_size, )`, *optional*):
        The rope index difference between sequence length and multimodal rope.
    Nlosslogitsr,  r?   r  r  )rH   rI   rJ   r  rG  r   r*   r  r-  rH  r,  r   r?   rD   r  r  r  r6  r3   r2   rF  rF  B  s     )-D(5$$
%,*.FHU&&'.9=OXd5#4#456=8<M8E%"3"345<59Ju00129.2K%**+2r3   rF  c            +           e Zd Zi ZdgZ fdZd Zd Zd Zd Z		 d$de
j                  dee
j                     fd	Zd$d
e
j                  dee
j                     fdZed        Zed        Zee	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d%de
j                  dee
j*                     dee
j                     deee
j                        dee
j                     dee
j                     dee   dee   dee   dee   d
ee
j*                     dee
j                     dee
j                     dee
j                     dee
j                     dee
j                     deee
j*                  f   dee   deeef   f&d              Z	 	 	 	 	 	 	 	 	 	 d& fd	Z	 d$dee
j                     dee
j*                     dee
j*                  e
j*                  f   fd Z 	 	 	 d'd!ed"edee
j                     dee
j                  e!e"e#f   f   fd#Z$ xZ%S )(Glm4vForConditionalGenerationzlm_head.weightc                     t         |   |       t        |      | _        t	        j
                  |j                  j                  |j                  j                  d      | _	        | j                          y )NFrR   )r&   r'   r  r*  r(   rU   r  r/   r  lm_headrF  r   s     r2   r'   z&Glm4vForConditionalGeneration.__init__d  sS     '
yy!3!3!?!?ASASA^A^ejkr3   c                 6    | j                   j                         S r]   )r*  r  rF   s    r2   r  z2Glm4vForConditionalGeneration.get_input_embeddingsk  s    zz..00r3   c                 :    | j                   j                  |       y r]   )r*  r  r  s     r2   r  z2Glm4vForConditionalGeneration.set_input_embeddingsn  s    

''.r3   c                 :    | j                   j                  |       y r]   )r*  r  r  s     r2   r  z)Glm4vForConditionalGeneration.set_decoderq  s    

w'r3   c                 6    | j                   j                         S r]   )r*  r  rF   s    r2   r  z)Glm4vForConditionalGeneration.get_decodert  s    zz%%''r3   r  r  c                 :    | j                   j                  ||      S r]   )r*  r  )r.   r  r  s      r2   r  z0Glm4vForConditionalGeneration.get_video_featuresw  s     zz,,-@.QQr3   r  r  c                 :    | j                   j                  ||      S r]   )r*  r!  )r.   r  r  s      r2   r!  z0Glm4vForConditionalGeneration.get_image_features|  s    zz,,\>JJr3   c                 .    | j                   j                  S r]   )r*  r  rF   s    r2   r  z,Glm4vForConditionalGeneration.language_model  s    zz(((r3   c                 .    | j                   j                  S r]   )r*  r  rF   s    r2   r  z$Glm4vForConditionalGeneration.visual  s    zz   r3   r  r   r   r,  r  labelsr  r  r  r4  r  r  logits_to_keepr   rd   c                    ||n| j                   j                  }|	|	n| j                   j                  }	 | j                  d||||||||||||	|d|}|d   }t	        |t
              rt        | d      n|}| j                  |dd|ddf         }d}|2| j                  ||| j                   j                  j                        }t        |||j                  |j                  |j                  |j                        S )a  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
        image_grid_thw (`torch.LongTensor` of shape `(num_images, 3)`, *optional*):
            The temporal, height and width of feature shape of each image in LLM.
        video_grid_thw (`torch.LongTensor` of shape `(num_videos, 3)`, *optional*):
            The temporal, height and width of feature shape of each video in LLM.
        rope_deltas (`torch.LongTensor` of shape `(batch_size, )`, *optional*):
            The rope index difference between sequence length and multimodal rope.

        Example:

        ```python
        >>> from PIL import Image
        >>> import requests
        >>> from transformers import AutoProcessor, Glm4vForConditionalGeneration

        >>> model = Glm4vForConditionalGeneration.from_pretrained("THUDM/GLM-4.1V-9B-Thinking")
        >>> processor = AutoProcessor.from_pretrained("THUDM/GLM-4.1V-9B-Thinking")

        >>> messages = [
            {
                "role": "user",
                "content": [
                    {"type": "image"},
                    {"type": "text", "text": "What is shown in this image?"},
                ],
            },
        ]
        >>> url = "https://www.ilankelman.org/stopsigns/australia.jpg"
        >>> image = Image.open(requests.get(url, stream=True).raw)

        >>> text = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
        >>> inputs = processor(text=[text], images=[image], vision_infos=[vision_infos])

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "The image shows a street scene with a red stop sign in the foreground. In the background, there is a large red gate with Chinese characters ..."
        ```N)r  r  r  r  r  r   r   r,  r  r  r  r  r  r   )rH  rU  r  )rG  rH  r,  r?   r  r  r6  )r[   r  r  r*  r   r   slicerL  loss_functionr  r  rF  r,  r?   r  r  )r.   r  r   r   r,  r  rU  r  r  r  r4  r  r  r  r  r  r  rV  r   r  r?   slice_indicesrH  rG  s                           r2   rB   z%Glm4vForConditionalGeneration.forward  s/   D 2C1N-TXT_T_TqTq$8$D $++JjJj 	 $** 
% 3))%)+'/!5)
 
"  
 9C>SV8W~ot4]kmA}a,?@A%%VFt{{OfOfOqOq%rD*#33!//))++
 	
r3   c                 l    t        |   |f|||||||	|
||d
|}d |d<   |d   dk7  r
d |d<   d |d<   |S )N)
r,  r   r  r  r   r  r  r  r  r  r   r   r  r  )r&   prepare_inputs_for_generation)r.   r  r,  r   r  r  r   r  r  r  r  r  r   model_inputsr1   s                 r2   r\  z;Glm4vForConditionalGeneration.prepare_inputs_for_generation  sx    " w<
+)')%% 3))
 
  (,^$!!+/L(26L./r3   c                    || | j                         t        j                  | j                  j                  t        j
                  |j                              k(  d   }| | j                         t        j                  | j                  j                  t        j
                  |j                              k(  d   }| | j                         t        j                  | j                  j                  t        j
                  |j                              k(  d   }nK|| j                  j                  k(  }|| j                  j                  k(  }|| j                  j                  k(  }t        j                  |j                         |j                         z
  d      }|dkD  }|| z  }|j                  d      }	|j                  d      }
|	|
fS )aa  
        Get the number of images and videos for each sample to calculate the separation length of the sample tensor.
        These parameters are not passed through the processor to avoid unpredictable impacts from interface modifications.

        Args:
            input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
                Indices of input sequence tokens in the vocabulary.

        Returns:
            image_nums (`torch.LongTensor` of shape `(batch_size, num_images_sample)`)
            video_nums (`torch.LongTensor` of shape `(batch_size, num_videos_sample)`)
        r  ).r   r   r   r   )r  r*   r   r[   image_start_token_idr   r   r  r  rV  r   r'  )r.   r  r  is_imageis_video_startis_video_endvideo_levelinside_videostandalone_imagesimage_countsvideo_countss              r2   _get_image_nums_and_video_numsz<Glm4vForConditionalGeneration._get_image_nums_and_video_nums  s   $ $.4,,.LL!A!A\i\p\pq H .4,,.LL!A!A\i\p\pq N .4,,.LL!?!?uzzZgZnZno L !DKK$D$DDH&$++*J*JJN$(F(FFL ll>#5#5#7,:J:J:L#LRST"Q %6 ),,,3%))a)0\))r3   expand_sizeis_encoder_decoderc                      dk(  rfS g d fd}fd} |      j                  d       |      |r*j                  d      t        d       |d         d<   fS )	Nr   )r  r  r  r  second_per_grid_tsc           
         j                  dd       }j                  dd       }j                  j                  dd             \  }}d }| D ]  }|dk(  rct        j                  |t	        |            }|D cg c]'  }t        j
                  |d      j                         ) }	} || |   |		      | |<   l|dk(  rt	        |      }	 || |   |		      | |<   |d
k(  rct        j                  |t	        |            }|D cg c]'  }t        j
                  |d      j                         ) }	} || |   |		      | |<   |dk(  rt	        |      }	 || |   |		      | |<   |dk(  s t        | |   t              st        d| dt        | |          d      t        j                  | |         }
t	        |      }	 ||
|		      }
|
j                         | |<    | S c c}w c c}w )Nr  r  r  )r  c                     t        j                  | |      }|gdg| j                         dz
  z  z   }t        j                  |D cg c]  } |j                  |  c}d      }|S c c}w )Nr   r   r   )r*   r  rv   r   r   )r   r   repeat_timessamplesrepeat_argssampleresults          r2   _repeat_interleave_sampleszGlm4vForConditionalGeneration._expand_inputs_for_generation.<locals>._expand_dict_for_generation_visual.<locals>._repeat_interleave_samplesl  sa    ++a1+nsaeegk/BBg#VFMFMM;$?#V\]^ $Ws   A&r  r   r   )r   ro  r  rl  zExpected value for key 'z' to be a list, but got z	 instead.)rf  rh  r*   r  r   r  r'  r   	TypeErrorrc  r   r  )dict_to_expandr  r  
image_nums
video_numsrt  r   rp  rr  r   r   ri  r  model_kwargsr.   s              r2   "_expand_dict_for_generation_visualzgGlm4vForConditionalGeneration._expand_inputs_for_generation.<locals>._expand_dict_for_generation_visuale  s*   )--.>EN)--.>EN%)%H%H)9)9/4)P &I &"J
 & ":.(#kk.$z:JKGMTU6uzz&a8<<>UGU*D&s+W;+N3' ,,":.G*D&s+W;+N3' 11#kk.$z:JKGMTU6uzz&a8<<>UGU*D&s+W;+N3' ,,":.G*D&s+W;+N3' 00%nS&94@'6se;STXYghkYlTmSnnwx  #\\.*=>F":.G7^ijF*0--/N3'E":F "!= V Vs   =,G(,G-c                     | D ]J  }|dk7  s	| |   t        | |   t        j                        s-|vs2| |   j                  d      | |<   L | S )Nr  r   r   )r   r*   rs   rU  )rv  r   ri  visual_keyss     r2   _expand_dict_for_generationz`Glm4vForConditionalGeneration._expand_inputs_for_generation.<locals>._expand_dict_for_generation  sl    % d++&s+7">##6E;.*8*=*O*OP[ab*O*cN3'd "!r3   r   r   encoder_outputszMIf `is_encoder_decoder` is True, make sure that `encoder_outputs` is defined.)rU  rf  r  )r.   ri  rj  r  ry  rz  r}  r|  s   `` ``  @r2   _expand_inputs_for_generationz;Glm4vForConditionalGeneration._expand_inputs_for_generationT  s     !l**w0	"d		" :,G !33KQ3GI2<@ 12: !pqq.I,WhJi.jL*+,&&r3   r]   )NNNNNNNNNNNNNNNNr   )
NNNNNTNNNN)r   FN)&rH   rI   rJ   rD  _tied_weights_keysr'   r  r  r  r  r*   r  r   r  r  r!  propertyr  r  r   r   rs   r   ra   r   r   r   r   rD   rF  rB   r\  rh  r6  r   r   r  rK   rL   s   @r2   rJ  rJ  `  sO   %'"*+1/(( dhR#(#4#4RFNuO_O_F`R
Ku/@/@ KRZ[`[k[kRl K ) ) ! !  '+1537=A59-1$(,0/3&*/3;?5959265934%g
##g
 !.g
 u//0	g

 "$u'8'8"9:g
   1 12g
 ))*g
 D>g
 $D>g
 'tng
 d^g
 u||,g
 &e&7&78g
 !!1!12g
 !!1!12g
  e../!g
" !!1!12#g
$ c5<</0%g
& +,'g
( 
u11	2)g
  g
X  'X 156*E,,-6*  -6* 
u||U\\)	*	6*t #(04	Z'Z' !Z' E,,-	Z' 
uc3h/	0Z'r3   rJ  )rJ  r  r)  r  )r  )r   )Ur  dataclassesr   typingr   r   r   r   r*   torch.nnr(   torch.nn.functionalr   r   r   activationsr
   cache_utilsr   r   
generationr   integrationsr   masking_utilsr   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   r   r   configuration_glm4vr   r    r!   
get_loggerrH   r  Moduler$   rN   rc   ru   r   r   r   rs   rD   r   r   r   r~   r   r   r  r)  r8  r_  rw  r  r  r  r+  r  r  r  rF  rJ  __all__r6  r3   r2   <module>r     s  *  ! 1 1      ! . ) 7 / B 9 D K F & l l P P 
		H	% Y'J299 J (J(fBII fBII (	 	fRYY f"TBII Tn(||+0<<>Cll
5<<%&	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % % '(%4[299 [|1 6 
'? 
' 
'k+ k\!<ryy !<H66rJ9 J9Z)299 )$:6 :z 
3{ 3 3& w
) w
 w
t _
% _
 _
D 
3+ 3 30N'$8/ N'b
 dr3   