
    rh\                       d Z ddlZddlZddlmZ ddlmZmZmZ ddl	Z	ddl
Z	ddl	mZ ddlmZmZmZ ddlmZmZ dd	lmZmZ dd
lmZ ddlmZ ddlmZmZmZmZmZm Z m!Z!m"Z" ddl#m$Z$ ddl%m&Z&m'Z'm(Z( ddl)m*Z*m+Z+m,Z, ddl-m.Z.  e,j^                  e0      Z1dHdZ2 G d dejf                        Z4 G d dejf                        Z5 G d dejf                        Z6de5iZ7 G d dejf                        Z8 G d dejf                        Z9 G d dejf                        Z: G d  d!e      Z; G d" d#ejf                        Z< G d$ d%ejf                        Z= G d& d'ejf                        Z>e+ G d( d)e$             Z?e e+d*+       G d, d-e*                    Z@e+ G d. d/e?             ZA G d0 d1ejf                        ZB G d2 d3ejf                        ZC e+d4+       G d5 d6e?             ZD e+d7+       G d8 d9e?             ZE e+d:+       G d; d<e?             ZF e+d=+       G d> d?e?             ZGe+ G d@ dAe?             ZHe+ G dB dCe?             ZI e+dD+       G dE dFe?e             ZJg dGZKy)IzPyTorch ELECTRA model.    N)	dataclass)CallableOptionalUnion)nn)BCEWithLogitsLossCrossEntropyLossMSELoss   )ACT2FNget_activation)CacheEncoderDecoderCache)GenerationMixin)GradientCheckpointingLayer)"BaseModelOutputWithCrossAttentions)BaseModelOutputWithPastAndCrossAttentions!CausalLMOutputWithCrossAttentionsMaskedLMOutputMultipleChoiceModelOutputQuestionAnsweringModelOutputSequenceClassifierOutputTokenClassifierOutput)PreTrainedModel)apply_chunking_to_forward find_pruneable_heads_and_indicesprune_linear_layer)ModelOutputauto_docstringlogging   )ElectraConfigc                    	 ddl }ddl}ddl}t        j                  j                  |      }t        j                  d|        |j                  j                  |      }g }	g }
|D ]^  \  }}t        j                  d| d|        |j                  j                  ||      }|	j                  |       |
j                  |       ` t        |	|
      D ]  \  }}|}	 t        | t               r|j#                  dd      }|d	k(  r$|j#                  d
d      }|j#                  dd
      }|j#                  dd      }|j#                  dd      }|j%                  d      }t'        d |D              rt        j                  d|        | }|D ]  }|j)                  d|      r|j%                  d|      }n|g}|d   dk(  s|d   dk(  rt+        |d      }nV|d   dk(  s|d   dk(  rt+        |d      }n9|d   dk(  rt+        |d      }n$|d   dk(  rt+        |d      }nt+        ||d         }t-        |      dk\  st/        |d          }||   } j1                  d!      rt+        |d      }n|dk(  r|j3                  |      }	 |j4                  |j4                  k7  r&t7        d"|j4                   d#|j4                   d$      	 t;        d%| |       t=        j>                  |      |_          | S # t        $ r t        j                  d        w xY w# t6        $ r1}|xj8                  |j4                  |j4                  fz  c_         d}~ww xY w# tB        $ r}t;        d| ||       Y d}~d}~ww xY w)&z'Load tf checkpoints in a pytorch model.r   NzLoading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see https://www.tensorflow.org/install/ for installation instructions.z&Converting TensorFlow checkpoint from zLoading TF weight z with shape zelectra/embeddings/zgenerator/embeddings/	generatorzelectra/zdiscriminator/z
generator/dense_1dense_predictionz!generator_predictions/output_biaszgenerator_lm_head/bias/c              3   $   K   | ]  }|d v  
 yw))global_steptemperatureN ).0ns     /var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/electra/modeling_electra.py	<genexpr>z-load_tf_weights_in_electra.<locals>.<genexpr>\   s     E1166E   z	Skipping z[A-Za-z]+_\d+z_(\d+)kernelgammaweightoutput_biasbetabiasoutput_weightssquad
classifier   r!   _embeddingszPointer shape z and array shape z mismatchedzInitialize PyTorch weight )"renumpy
tensorflowImportErrorloggererrorospathabspathinfotrainlist_variablesload_variableappendzip
isinstanceElectraForMaskedLMreplacesplitany	fullmatchgetattrlenintendswith	transposeshape
ValueErrorargsprinttorch
from_numpydataAttributeError)modelconfigtf_checkpoint_pathdiscriminator_or_generatorr<   nptftf_path	init_varsnamesarraysnamerV   arrayoriginal_namepointerm_namescope_namesnumes                       r.   load_tf_weights_in_electrarp   2   sz   
 ggoo01G
KK8	BC''0IEF  e(l5'BC&&w5Te	
 5&) 6e!3	%!34||$9;RS)[8||J0@A||L*=<<	+=>D<< CE]^D::c?D EEEi78G +<< 0&9"$((9f"=K#)(Kq>X-Q71J%gx8G ^}4A&8P%gv6G ^'77%gx8G ^w.%g|<G%g{1~>G{#q(k!n-C%clG#+$ }-!'848#U+==EKK/$~gmm_DUV[VaVaUbbm%noo 0
 .tf5}E ++E2GLg6n LQ  Q	
 	@  7==%++66
  	Im_-tQ7	sV   K1 B*MB0M9A	M?L*M1 L	M,M		MM	M5M00M5c                        e Zd ZdZ fdZ	 	 	 	 	 d
deej                     deej                     deej                     deej                     de	dej                  fd	Z xZS )ElectraEmbeddingszGConstruct the embeddings from word, position and token_type embeddings.c                 >   t         |           t        j                  |j                  |j
                  |j                        | _        t        j                  |j                  |j
                        | _	        t        j                  |j                  |j
                        | _        t        j                  |j
                  |j                        | _        t        j                  |j                        | _        | j#                  dt%        j&                  |j                        j)                  d      d       t+        |dd      | _        | j#                  d	t%        j.                  | j0                  j3                         t$        j4                  
      d       y )N)padding_idxepsposition_ids)r!   F)
persistentposition_embedding_typeabsolutetoken_type_idsdtype)super__init__r   	Embedding
vocab_sizeembedding_sizepad_token_idword_embeddingsmax_position_embeddingsposition_embeddingstype_vocab_sizetoken_type_embeddings	LayerNormlayer_norm_epsDropouthidden_dropout_probdropoutregister_bufferrZ   arangeexpandrQ   rz   zerosrw   sizelongselfr_   	__class__s     r.   r   zElectraEmbeddings.__init__   s1   !||F,=,=v?T?Tbhbubuv#%<<0N0NPVPePe#f %'\\&2H2H&J_J_%`" f&;&;AVAVWzz&"<"<= 	ELL)G)GHOOPWXej 	 	
 (/v7PR\']$ekk$*;*;*@*@*B%**Ubg 	 	
    	input_idsr|   rw   inputs_embedspast_key_values_lengthreturnc                 Z   ||j                         }n|j                         d d }|d   }|| j                  d d |||z   f   }|st        | d      r-| j                  d d d |f   }|j	                  |d   |      }	|	}n:t        j                  |t
        j                  | j                  j                        }|| j                  |      }| j                  |      }
||
z   }| j                  dk(  r| j                  |      }||z  }| j                  |      }| j                  |      }|S )Nrx   r!   r|   r   r~   devicer{   )r   rw   hasattrr|   r   rZ   r   r   r   r   r   rz   r   r   r   )r   r   r|   rw   r   r   input_shape
seq_lengthbuffered_token_type_ids buffered_token_type_ids_expandedr   
embeddingsr   s                r.   forwardzElectraEmbeddings.forward   sH     #..*K',,.s3K ^
,,Q0FVlIl0l-lmL
 !t-.*.*=*=a*n*M'3J3Q3QR]^_R`bl3m0!A!&[

SWSdSdSkSk!l  00;M $ : :> J"%::
'':5"&":":<"H--J^^J/
\\*-
r   )NNNNr   )__name__
__module____qualname____doc__r   r   rZ   
LongTensorFloatTensorrS   Tensorr   __classcell__r   s   @r.   rr   rr      s    Q
. 15593759&''E,,-' !!1!12' u//0	'
   1 12' !$' 
'r   rr   c                        e Zd Zd fd	Z	 	 	 	 	 	 ddej
                  deej                     deej                     deej                     dee   dee	   deej
                     d	e
ej
                     fd
Z xZS )ElectraSelfAttentionc                    t         |           |j                  |j                  z  dk7  r2t	        |d      s&t        d|j                   d|j                   d      |j                  | _        t        |j                  |j                  z        | _        | j                  | j                  z  | _        t        j                  |j                  | j                        | _        t        j                  |j                  | j                        | _        t        j                  |j                  | j                        | _        t        j                  |j                        | _        |xs t#        |dd      | _        | j$                  dk(  s| j$                  d	k(  rF|j&                  | _        t        j(                  d
|j&                  z  dz
  | j                        | _        |j,                  | _        || _        y )Nr   r   zThe hidden size (z6) is not a multiple of the number of attention heads ()rz   r{   relative_keyrelative_key_queryr:   r!   )r   r   hidden_sizenum_attention_headsr   rW   rS   attention_head_sizeall_head_sizer   Linearquerykeyvaluer   attention_probs_dropout_probr   rQ   rz   r   r   distance_embedding
is_decoder	layer_idxr   r_   rz   r   r   s       r.   r   zElectraSelfAttention.__init__   s    : ::a?PVXhHi#F$6$6#7 8 445Q8 
 $*#=#= #&v'9'9F<V<V'V#W !558P8PPYYv1143E3EF
99V//1C1CDYYv1143E3EF
zz&"E"EF'> (
'-zC
$ ''>9T=Y=Y]q=q+1+I+ID(&(ll1v7U7U3UXY3Y[_[s[s&tD# ++"r   hidden_statesattention_mask	head_maskencoder_hidden_statespast_key_valueoutput_attentionscache_positionr   c                 	   |j                   \  }}	}
| j                  |      }|j                  |d| j                  | j                        j                  dd      }|d u}|St        |t              rA|j                  j                  | j                        }|r|j                  }n|j                  }n|}|r|n|}|rK|IrGj                  | j                     j                  }|j                  | j                     j                  }n| j!                  |      }|j                  |d| j                  | j                        j                  dd      }| j#                  |      }|j                  |d| j                  | j                        j                  dd      }|D|s|nd }j%                  ||| j                  d|i      \  }}|rd|j                  | j                  <   t'        j(                  ||j                  dd            }| j*                  dk(  s| j*                  dk(  r|j                   d   |j                   d   }}|Dt'        j,                  |dz
  t&        j.                  |j0                  	      j                  dd      }n@t'        j2                  |t&        j.                  |j0                  	      j                  dd      }t'        j2                  |t&        j.                  |j0                  	      j                  dd      }||z
  }| j5                  || j6                  z   dz
        }|j9                  |j:                  
      }| j*                  dk(  rt'        j<                  d||      }||z   }nE| j*                  dk(  r6t'        j<                  d||      }t'        j<                  d||      }||z   |z   }|t?        j@                  | j                        z  }|||z   }tB        jD                  jG                  |d      }| jI                  |      }|||z  }t'        j(                  ||      }|jK                  dddd      jM                         }|jO                         d d | jP                  fz   }|j                  |      }||fS )Nrx   r!   r:   r   Tr   r   r   r}   zbhld,lrd->bhlrzbhrd,lrd->bhlrdimr   r   ))rV   r   viewr   r   rU   rK   r   
is_updatedgetr   cross_attention_cacheself_attention_cachelayerskeysvaluesr   r   updaterZ   matmulrz   tensorr   r   r   r   r   tor~   einsummathsqrtr   
functionalsoftmaxr   permute
contiguousr   r   )r   r   r   r   r   r   r   r   
batch_sizer   _query_layeris_cross_attentionr   curr_past_key_valuecurrent_states	key_layervalue_layerattention_scoresquery_length
key_lengthposition_ids_lposition_ids_rdistancepositional_embeddingrelative_position_scoresrelative_position_scores_queryrelative_position_scores_keyattention_probscontext_layernew_context_layer_shapes                                  r.   r   zElectraSelfAttention.forward   sN    %2$7$7!
Jjj/!&&z2t7O7OQUQiQijttq
 3$>%.*=>+66::4>>J
%*8*N*N'*8*M*M'&4#2D.-."<+224>>BGGI-44T^^DKKK0I!z2t7O7OQUQiQijtt1I **^4K%**B 8 8$:R:Ri1o  )7It)<)C)C{DNN=M~<^*&	; &@DN--dnn= !<<Y5H5HR5PQ''>9T=Y=Y]q=q'2'8'8';Y__Q=O*L)!&j1nEJJWdWkWk!l!q!q" "'l%**UbUiUi!j!o!oprtu!v"\\*EJJ}OcOcdiijkmopN%6H#'#:#:8dFbFb;bef;f#g #7#:#:ARAR#:#S ++~=+0<<8H+Wk+l(#36N#N --1EE16>NP[]q1r./4||<LiYm/n,#36T#TWs#s +dii8P8P.QQ%/.@ --//0@b/I ,,7  -	9O_kB%--aAq9DDF"/"4"4"6s";t?Q?Q>S"S%**+BCo--r   NNNNNNFN)r   r   r   r   rZ   r   r   r   r   booltupler   r   r   s   @r.   r   r      s    #< 7;15=A*.,115d.||d. !!2!23d. E--.	d.
  ((9(9:d. !d. $D>d. !.d. 
u||	d.r   r   c                   n     e Zd Z fdZdej
                  dej
                  dej
                  fdZ xZS )ElectraSelfOutputc                 (   t         |           t        j                  |j                  |j                        | _        t        j                  |j                  |j                        | _        t        j                  |j                        | _
        y Nru   )r   r   r   r   r   denser   r   r   r   r   r   s     r.   r   zElectraSelfOutput.__init__L  s`    YYv1163E3EF
f&8&8f>S>STzz&"<"<=r   r   input_tensorr   c                 r    | j                  |      }| j                  |      }| j                  ||z         }|S Nr   r   r   r   r   r   s      r.   r   zElectraSelfOutput.forwardR  7    

=1]3}|'CDr   r   r   r   r   rZ   r   r   r   r   s   @r.   r   r   K  1    >U\\  RWR^R^ r   r   eagerc                        e Zd Zd fd	Zd Z	 	 	 	 	 	 ddej                  deej                     deej                     deej                     dee	   dee
   d	eej                     d
eej                     fdZ xZS )ElectraAttentionc                     t         |           t        |j                     |||      | _        t        |      | _        t               | _        y )Nrz   r   )	r   r   ELECTRA_SELF_ATTENTION_CLASSES_attn_implementationr   r   outputsetpruned_headsr   s       r.   r   zElectraAttention.__init__`  sF    263N3NO$;
	
 (/Er   c                 >   t        |      dk(  ry t        || j                  j                  | j                  j                  | j
                        \  }}t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _	        t        | j                  j                  |d      | j                  _        | j                  j                  t        |      z
  | j                  _        | j                  j                  | j                  j                  z  | j                  _        | j
                  j                  |      | _        y )Nr   r!   r   )rR   r   r   r   r   r  r   r   r   r   r  r   r   union)r   headsindexs      r.   prune_headszElectraAttention.prune_headsj  s   u:?749900$))2O2OQUQbQb
u
 -TYY__eD		*499==%@		,TYY__eD		.t{{/@/@%QO )-		(E(EE
(R		%"&))"?"?$))B_B_"_		 --33E:r   r   r   r   r   r   r   r   r   c           	      r    | j                  |||||||      }| j                  |d   |      }	|	f|dd  z   }
|
S )Nr   r   r   r   r   r   r   r!   )r   r  )r   r   r   r   r   r   r   r   self_outputsattention_outputoutputss              r.   r   zElectraAttention.forward|  s\     yy)"7)/) ! 
  ;;|AF#%QR(88r   r   r   )r   r   r   r   r  rZ   r   r   r   r   r   r   r   r   r   s   @r.   r	  r	  _  s    ";* 7;15=A*.,115|| !!2!23 E--.	
  ((9(9: ! $D> !. 
u||	r   r	  c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )ElectraIntermediatec                    t         |           t        j                  |j                  |j
                        | _        t        |j                  t              rt        |j                     | _        y |j                  | _        y r  )r   r   r   r   r   intermediate_sizer   rK   
hidden_actstrr   intermediate_act_fnr   s     r.   r   zElectraIntermediate.__init__  s]    YYv1163K3KL
f''-'-f.?.?'@D$'-'8'8D$r   r   r   c                 J    | j                  |      }| j                  |      }|S r  )r   r!  )r   r   s     r.   r   zElectraIntermediate.forward  s&    

=100?r   r  r   s   @r.   r  r    s#    9U\\ ell r   r  c                   n     e Zd Z fdZdej
                  dej
                  dej
                  fdZ xZS )ElectraOutputc                 (   t         |           t        j                  |j                  |j
                        | _        t        j                  |j
                  |j                        | _        t        j                  |j                        | _        y r   )r   r   r   r   r  r   r   r   r   r   r   r   r   s     r.   r   zElectraOutput.__init__  s`    YYv779K9KL
f&8&8f>S>STzz&"<"<=r   r   r   r   c                 r    | j                  |      }| j                  |      }| j                  ||z         }|S r  r  r  s      r.   r   zElectraOutput.forward  r  r   r  r   s   @r.   r$  r$    r  r   r$  c                       e Zd Zd fd	Z	 	 	 	 	 	 	 ddej
                  deej                     deej                     deej                     deej                     dee   dee	   d	eej
                     d
e
ej
                     fdZd Z xZS )ElectraLayerc                 l   t         |           |j                  | _        d| _        t	        ||      | _        |j                  | _        |j                  | _        | j                  r-| j                  st        |  d      t	        |d|      | _	        t        |      | _        t        |      | _        y )Nr!   r   z> should be used as a decoder model if cross attention is addedr{   r  )r   r   chunk_size_feed_forwardseq_len_dimr	  	attentionr   add_cross_attentionrW   crossattentionr  intermediater$  r  )r   r_   r   r   s      r.   r   zElectraLayer.__init__  s    '-'E'E$)&IF ++#)#=#= ##?? D6)g!hii"26S]ir"sD/7#F+r   r   r   r   r   encoder_attention_maskr   r   r   r   c	           	      H   | j                  ||||||      }	|	d   }
|	dd  }| j                  rB|@t        | d      st        d|  d      | j	                  |
||||||      }|d   }
||dd  z   }t        | j                  | j                  | j                  |
      }|f|z   }|S )N)r   r   r   r   r   r   r!   r/  z'If `encoder_hidden_states` are passed, z` has to be instantiated with cross-attention layers by setting `config.add_cross_attention=True`r  )	r-  r   r   rW   r/  r   feed_forward_chunkr+  r,  )r   r   r   r   r   r1  r   r   r   self_attention_outputsr  r  cross_attention_outputslayer_outputs                 r.   r   zElectraLayer.forward  s    "&)/)) "0 "
 2!4(,??4@4!12 =dV DD D 
 '+&9&9 5#&;-"3- ': '#  7q9 7 ;;G0##T%A%A4CSCSUe
  /G+r   c                 L    | j                  |      }| j                  ||      }|S r  )r0  r  )r   r  intermediate_outputr6  s       r.   r3  zElectraLayer.feed_forward_chunk  s,    "//0@A{{#68HIr   r  )NNNNNFN)r   r   r   r   rZ   r   r   r   r   r   r   r   r3  r   r   s   @r.   r(  r(    s    ," 7;15=A>B*.,115.||. !!2!23. E--.	.
  ((9(9:. !)):): ;. !. $D>. !.. 
u||	.`r   r(  c                   f    e Zd Zd fd	Z	 	 	 	 	 	 	 	 	 	 ddej
                  deej                     deej                     deej                     deej                     deeeej                           dee	   d	ee	   d
ee	   dee	   deej
                     de
eej
                     ef   fdZ xZS )ElectraEncoderc           	          t         |           || _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        d| _	        y c c}w )Nr*  F)
r   r   r_   r   
ModuleListrangenum_hidden_layersr(  layergradient_checkpointing)r   r_   r   ir   s       r.   r   zElectraEncoder.__init__  sQ    ]]uU[UmUmOn#o!L1$E#op
&+# $ps   A%r   r   r   r   r1  past_key_values	use_cacher   output_hidden_statesreturn_dictr   r   c                    |	rdnd }|rdnd }|r| j                   j                  rdnd }| j                  r%| j                  r|rt        j                  d       d}d}|rR| j                   j                  r<t        |t              s,t        j                  d       d}t        j                  |      }t        | j                        D ]W  \  }}|	r||fz   }|||   nd } |||||||||      }|d   }|s/||d   fz   }| j                   j                  sO||d	   fz   }Y |	r||fz   }|r|j                         }|
st        d
 |||||fD              S t        |||||      S )Nr+   zZ`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...FzPassing a tuple of `past_key_values` is deprecated and will be removed in Transformers v4.58.0. You should pass an instance of `EncoderDecoderCache` instead, e.g. `past_key_values=EncoderDecoderCache.from_legacy_cache(past_key_values)`.T)r1  r   r   r   r   r!   r:   c              3   $   K   | ]  }|| 
 y wr  r+   )r,   vs     r.   r/   z)ElectraEncoder.forward.<locals>.<genexpr>B  s      
 = 
r0   )last_hidden_staterB  r   
attentionscross_attentions)r_   r.  r@  trainingr@   warning_oncer   rK   r   r   from_legacy_cache	enumerater?  to_legacy_cacher   r   )r   r   r   r   r   r1  rB  rC  r   rD  rE  r   all_hidden_statesall_self_attentionsall_cross_attentionsreturn_legacy_cacherA  layer_modulelayer_head_masklayer_outputss                       r.   r   zElectraEncoder.forward  s    #7BD$5b4%64;;;Z;Zr`d&&4==##p "	#//
?TY8Z\
 #'1CCOTO(4 	VOA|#$58H$H!.7.CilO(%'=."3-	M *!,M &9]1=M<O&O#;;22+?=QRCSBU+U(+	V.   1]4D D-==?O 
 "#%'(
 
 
 9+++*1
 	
r   r  )
NNNNNNFFTN)r   r   r   r   rZ   r   r   r   r   r   r   r   r   r   r   s   @r.   r:  r:    s"   , 7;15=A>BEI$(,1/4&*15R
||R
 !!2!23R
 E--.	R

  ((9(9:R
 !)):): ;R
 "%e.?.?(@"ABR
 D>R
 $D>R
 'tnR
 d^R
 !.R
 
uU\\"$MM	NR
r   r:  c                   (     e Zd ZdZ fdZd Z xZS )ElectraDiscriminatorPredictionszEPrediction module for the discriminator, made up of two dense layers.c                    t         |           t        j                  |j                  |j                        | _        t        |j                        | _        t        j                  |j                  d      | _	        || _
        y Nr!   )r   r   r   r   r   r   r   r  
activationr&   r_   r   s     r.   r   z(ElectraDiscriminatorPredictions.__init__Y  s^    YYv1163E3EF
():):; "		&*<*<a @r   c                     | j                  |      }| j                  |      }| j                  |      j                  d      }|S )Nrx   )r   r\  r&   squeeze)r   discriminator_hidden_statesr   logitss       r.   r   z'ElectraDiscriminatorPredictions.forwarda  s?    

#>?6&&}5==bAr   r   r   r   r   r   r   r   r   s   @r.   rY  rY  V  s    Or   rY  c                   (     e Zd ZdZ fdZd Z xZS )ElectraGeneratorPredictionszAPrediction module for the generator, made up of two dense layers.c                     t         |           t        d      | _        t	        j
                  |j                  |j                        | _        t	        j                  |j                  |j                        | _
        y )Ngeluru   )r   r   r   r\  r   r   r   r   r   r   r   r   s     r.   r   z$ElectraGeneratorPredictions.__init__l  sV    (0f&;&;AVAVWYYv1163H3HI
r   c                 l    | j                  |      }| j                  |      }| j                  |      }|S r  )r   r\  r   )r   generator_hidden_statesr   s      r.   r   z#ElectraGeneratorPredictions.forwards  s3    

#:;6}5r   ra  r   s   @r.   rc  rc  i  s    KJr   rc  c                   *    e Zd ZU eed<   eZdZdZd Z	y)ElectraPreTrainedModelr_   electraTc                    t        |t        j                        rm|j                  j                  j                  d| j                  j                         |j                  %|j                  j                  j                          yyt        |t        j                        rz|j                  j                  j                  d| j                  j                         |j                  2|j                  j                  |j                     j                          yyt        |t        j                        rJ|j                  j                  j                          |j                  j                  j                  d       yy)zInitialize the weightsg        )meanstdNg      ?)rK   r   r   r3   r\   normal_r_   initializer_ranger6   zero_r   rt   r   fill_)r   modules     r.   _init_weightsz$ElectraPreTrainedModel._init_weights  s   fbii( MM&&CT[[5R5R&S{{&  &&( '-MM&&CT[[5R5R&S!!-""6#5#56<<> .-KK""$MM$$S) .r   N)
r   r   r   r"   __annotations__rp   load_tf_weightsbase_model_prefixsupports_gradient_checkpointingrs  r+   r   r.   ri  ri  {  s    0O!&*#*r   ri  z3
    Output type of [`ElectraForPreTraining`].
    )custom_introc                       e Zd ZU dZdZeej                     ed<   dZ	eej                     ed<   dZ
eeej                        ed<   dZeeej                        ed<   y)ElectraForPreTrainingOutputa+  
    loss (*optional*, returned when `labels` is provided, `torch.FloatTensor` of shape `(1,)`):
        Total loss of the ELECTRA objective.
    logits (`torch.FloatTensor` of shape `(batch_size, sequence_length)`):
        Prediction scores of the head (scores for each token before SoftMax).
    Nlossr`  r   rJ  )r   r   r   r   r{  r   rZ   r   rt  r`  r   r   rJ  r+   r   r.   rz  rz    sg     )-D(5$$
%,*.FHU&&'.8<M8E%"3"345<59Ju00129r   rz  c                        e Zd Z fdZd Zd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee	j                     deee	j                        dee   dee   dee   dee   deee	j                     ef   fd       Z xZS )ElectraModelc                 "   t         |   |       t        |      | _        |j                  |j
                  k7  r/t        j                  |j                  |j
                        | _        t        |      | _
        || _        | j                          y r  )r   r   rr   r   r   r   r   r   embeddings_projectr:  encoderr_   	post_initr   s     r.   r   zElectraModel.__init__  sl     +F3  F$6$66&(ii0E0EvGYGY&ZD#%f-r   c                 .    | j                   j                  S r  r   r   r   s    r.   get_input_embeddingsz!ElectraModel.get_input_embeddings  s    ...r   c                 &    || j                   _        y r  r  )r   r   s     r.   set_input_embeddingsz!ElectraModel.set_input_embeddings  s    */'r   c                     |j                         D ]7  \  }}| j                  j                  |   j                  j	                  |       9 y)z
        Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
        class PreTrainedModel
        N)itemsr  r?  r-  r  )r   heads_to_pruner?  r  s       r.   _prune_headszElectraModel._prune_heads  sE    
 +002 	CLE5LLu%//;;EB	Cr   r   r   r|   rw   r   r   r   r1  rB  rC  r   rD  rE  r   c                    ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }||t	        d      |#| j                  ||       |j                         }n!||j                         d d }nt	        d      |\  }}||j                  n|j                  }d}|	5t        |	t              s|	d   d   j                  d   n|	j                         }|t        j                  ||      }|pt        | j                  d      r4| j                  j                   d d d |f   }|j#                  ||      }|}n&t        j$                  |t        j&                  |      }| j)                  ||      }| j                   j*                  rE|C|j                         \  }}}||f}|t        j                  ||      }| j-                  |      }nd }| j/                  || j                   j0                        }| j                  |||||	      }t        | d
      r| j3                  |      }| j5                  ||||||	|
|||
      }|S )NzDYou cannot specify both input_ids and inputs_embeds at the same timerx   z5You have to specify either input_ids or inputs_embedsr   r   )r   r|   r   )r   rw   r|   r   r   r  )	r   r   r   r1  rB  rC  r   rD  rE  )r_   r   rD  use_return_dictrW   %warn_if_padding_and_no_attention_maskr   r   rK   r   rV   get_seq_lengthrZ   onesr   r   r|   r   r   r   get_extended_attention_maskr   invert_attention_maskget_head_maskr>  r  r  )r   r   r   r|   rw   r   r   r   r1  rB  rC  r   rD  rE  r   r   r   r   r   r   r   extended_attention_maskencoder_batch_sizeencoder_sequence_lengthr   encoder_hidden_shapeencoder_extended_attention_maskr   s                               r.   r   zElectraModel.forward  s   " 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B] ]%>cdd"66y.Q#..*K&',,.s3KTUU!,
J%.%:!!@T@T!"& "/59  "1%++B/$335 # !"ZZFCN!t(89*.//*H*HKZK*X'3J3Q3QR\^h3i0!A!&[

SY!Z"&"B"B>S^"_ ;;!!&;&G=R=W=W=Y: 7$68O#P %-).4HQW)X&.2.H.HI_.`+.2+&&y$++2O2OP	%)'#9 ( 
 4-. 33MBM2"7#B+/!5# % 
 r   )NNNNNNNNNNNNN)r   r   r   r   r  r  r  r   r   rZ   r   listr   r   r   r   r   r   r   r   s   @r.   r}  r}    sk   
/0C  -11515/3,0048<9==A$(,0/3&*\ELL)\ !.\ !.	\
 u||,\ ELL)\  -\  (5\ !) 6\ "$u'8'8"9:\ D>\ $D>\ 'tn\ d^\ 
uU\\"$FF	G\ \r   r}  c                   (     e Zd ZdZ fdZd Z xZS )ElectraClassificationHeadz-Head for sentence-level classification tasks.c                 z   t         |           t        j                  |j                  |j                        | _        |j                  |j                  n|j                  }t        d      | _	        t        j                  |      | _        t        j                  |j                  |j                        | _        y )Nre  )r   r   r   r   r   r   classifier_dropoutr   r   r\  r   r   
num_labelsout_projr   r_   r  r   s      r.   r   z"ElectraClassificationHead.__init__&  s    YYv1163E3EF
)/)B)B)NF%%TZTnTn 	 )0zz"45		&"4"4f6G6GHr   c                     |d d dd d f   }| j                  |      }| j                  |      }| j                  |      }| j                  |      }| j                  |      }|S )Nr   )r   r   r\  r  )r   featureskwargsxs       r.   r   z!ElectraClassificationHead.forward0  sZ    Q1WLLOJJqMOOALLOMM!r   ra  r   s   @r.   r  r  #  s    7Ir   r  c                        e Zd ZdZdef fdZ	 ddej                  deej                     dej                  fdZ
 xZS )	ElectraSequenceSummarya  
    Compute a single vector summary of a sequence hidden states.

    Args:
        config ([`ElectraConfig`]):
            The config used by the model. Relevant arguments in the config class of the model are (refer to the actual
            config class of your model for the default values it uses):

            - **summary_type** (`str`) -- The method to use to make this summary. Accepted values are:

                - `"last"` -- Take the last token hidden state (like XLNet)
                - `"first"` -- Take the first token hidden state (like Bert)
                - `"mean"` -- Take the mean of all tokens hidden states
                - `"cls_index"` -- Supply a Tensor of classification token position (GPT/GPT-2)
                - `"attn"` -- Not implemented now, use multi-head attention

            - **summary_use_proj** (`bool`) -- Add a projection after the vector extraction.
            - **summary_proj_to_labels** (`bool`) -- If `True`, the projection outputs to `config.num_labels` classes
              (otherwise to `config.hidden_size`).
            - **summary_activation** (`Optional[str]`) -- Set to `"tanh"` to add a tanh activation to the output,
              another string or `None` will add no activation.
            - **summary_first_dropout** (`float`) -- Optional dropout probability before the projection and activation.
            - **summary_last_dropout** (`float`)-- Optional dropout probability after the projection and activation.
    r_   c                 f   t         |           t        |dd      | _        | j                  dk(  rt        t        j                         | _        t        |d      rq|j                  ret        |d      r(|j                  r|j                  dkD  r|j                  }n|j                  }t        j                  |j                  |      | _        t        |dd       }|rt        |      nt        j                         | _        t        j                         | _        t        |d      r3|j"                  dkD  r$t        j$                  |j"                        | _        t        j                         | _        t        |d	      r5|j(                  dkD  r%t        j$                  |j(                        | _        y y y )
Nsummary_typelastattnsummary_use_projsummary_proj_to_labelsr   summary_activationsummary_first_dropoutsummary_last_dropout)r   r   rQ   r  NotImplementedErrorr   Identitysummaryr   r  r  r  r   r   r   r\  first_dropoutr  r   last_dropoutr  )r   r_   num_classesactivation_stringr   s       r.   r   zElectraSequenceSummary.__init__U  sU   #FNFC& &%{{}6-.63J3Jv78V=Z=Z_e_p_pst_t$//$0099V%7%7EDL#F,@$GIZN3D$E`b`k`k`m[[]6238T8TWX8X!#F,H,H!IDKKM612v7R7RUV7V "

6+F+F GD 8W2r   r   	cls_indexr   c                    | j                   dk(  r|dddf   }n| j                   dk(  r|dddf   }n| j                   dk(  r|j                  d      }n| j                   d	k(  r|At        j                  |d
ddddf   |j                  d   dz
  t        j
                        }nX|j                  d      j                  d      }|j                  d|j                         dz
  z  |j                  d      fz         }|j                  d|      j                  d      }n| j                   dk(  rt        | j                        }| j                  |      }| j                  |      }| j!                  |      }|S )ak  
        Compute a single vector summary of a sequence hidden states.

        Args:
            hidden_states (`torch.FloatTensor` of shape `[batch_size, seq_len, hidden_size]`):
                The hidden states of the last layer.
            cls_index (`torch.LongTensor` of shape `[batch_size]` or `[batch_size, ...]` where ... are optional leading dimensions of `hidden_states`, *optional*):
                Used if `summary_type == "cls_index"` and takes the last token of the sequence as classification token.

        Returns:
            `torch.FloatTensor`: The summary of the sequence hidden states.
        r  Nrx   firstr   rl  r!   r   r  .r   r}   )rx   r  )r  rl  rZ   	full_likerV   r   	unsqueezer   r   r   gatherr^  r  r  r  r\  r  )r   r   r  r  s       r.   r   zElectraSequenceSummary.forwardr  sn    &"1b5)F')"1a4(F&("''A'.F+- !OO!#rr1*-!''+a/**	 &//3==bA	%,,Uimmo6I-JmN`N`acNdMf-fg	"))"i8@@DF&(%%##F+f%(""6*r   r  )r   r   r   r   r"   r   rZ   r   r   r   r   r   r   s   @r.   r  r  ;  sQ    2H} H< Y])"..);CEDTDT;U)			)r   r  z
    ELECTRA Model transformer with a sequence classification/regression head on top (a linear layer on top of the
    pooled output) e.g. for GLUE tasks.
    c                   ^    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	ee   d
ee   dee   de	e
ej                     ef   fd       Z xZS ) ElectraForSequenceClassificationc                     t         |   |       |j                  | _        || _        t	        |      | _        t        |      | _        | j                          y r  )	r   r   r  r_   r}  rj  r  r9   r  r   s     r.   r   z)ElectraForSequenceClassification.__init__  sH      ++#F+3F; 	r   r   r   r|   rw   r   r   labelsr   rD  rE  r   c                    |
|
n| j                   j                  }
| j                  ||||||||	|
	      }|d   }| j                  |      }d}|| j                   j                  | j
                  dk(  rd| j                   _        nl| j
                  dkD  rL|j                  t        j                  k(  s|j                  t        j                  k(  rd| j                   _        nd| j                   _        | j                   j                  dk(  rIt               }| j
                  dk(  r& ||j                         |j                               }n |||      }n| j                   j                  dk(  r=t               } ||j                  d| j
                        |j                  d            }n,| j                   j                  dk(  rt               } |||      }|
s|f|dd z   }||f|z   S |S t        |||j                   |j"                  	      S )
a  
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
            `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        Nr   r|   rw   r   r   r   rD  rE  r   r!   
regressionsingle_label_classificationmulti_label_classificationrx   r{  r`  r   rJ  )r_   r  rj  r9   problem_typer  r~   rZ   r   rS   r
   r^  r	   r   r   r   r   rJ  )r   r   r   r|   rw   r   r   r  r   rD  rE  r_  sequence_outputr`  r{  loss_fctr  s                    r.   r   z(ElectraForSequenceClassification.forward  s   ( &1%<k$++B]B]&*ll))%'/!5# '3 
'
# 6a81{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#FNN$4fnn6FGD#FF3D))-JJ+-B @&++b/R))-II,./Y!<QR!@@F)-)9TGf$EvE'5CC2==	
 	
r   
NNNNNNNNNN)r   r   r   r   r   r   rZ   r   r   r   r   r   r   r   r   s   @r.   r  r    s     -11515/3,004)-,0/3&*D
ELL)D
 !.D
 !.	D

 u||,D
 ELL)D
  -D
 &D
 $D>D
 'tnD
 d^D
 
uU\\"$<<	=D
 D
r   r  z
    Electra model with a binary classification head on top as used during pretraining for identifying generated tokens.

    It is recommended to load the discriminator checkpoint into that model.
    c                   ^    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	ee   d
ee   dee   de	e
ej                     ef   fd       Z xZS )ElectraForPreTrainingc                     t         |   |       t        |      | _        t	        |      | _        | j                          y r  )r   r   r}  rj  rY  discriminator_predictionsr  r   s     r.   r   zElectraForPreTraining.__init__  s3     #F+)H)P&r   r   r   r|   rw   r   r   r  r   rD  rE  r   c                 d   |
|
n| j                   j                  }
| j                  ||||||||	|
	      }|d   }| j                  |      }d}|t	        j
                         }|a|j                  d|j                  d         dk(  }|j                  d|j                  d         |   }||   } |||j                               }n4 ||j                  d|j                  d         |j                               }|
s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )am  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the ELECTRA loss. Input should be a sequence of tokens (see `input_ids` docstring)
            Indices should be in `[0, 1]`:

            - 0 indicates the token is an original token,
            - 1 indicates the token was replaced.

        Examples:

        ```python
        >>> from transformers import ElectraForPreTraining, AutoTokenizer
        >>> import torch

        >>> discriminator = ElectraForPreTraining.from_pretrained("google/electra-base-discriminator")
        >>> tokenizer = AutoTokenizer.from_pretrained("google/electra-base-discriminator")

        >>> sentence = "The quick brown fox jumps over the lazy dog"
        >>> fake_sentence = "The quick brown fox fake over the lazy dog"

        >>> fake_tokens = tokenizer.tokenize(fake_sentence, add_special_tokens=True)
        >>> fake_inputs = tokenizer.encode(fake_sentence, return_tensors="pt")
        >>> discriminator_outputs = discriminator(fake_inputs)
        >>> predictions = torch.round((torch.sign(discriminator_outputs[0]) + 1) / 2)

        >>> fake_tokens
        ['[CLS]', 'the', 'quick', 'brown', 'fox', 'fake', 'over', 'the', 'lazy', 'dog', '[SEP]']

        >>> predictions.squeeze().tolist()
        [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0]
        ```Nr  r   rx   r!   r  )r_   r  rj  r  r   r   r   rV   floatrz  r   rJ  )r   r   r   r|   rw   r   r   r  r   rD  rE  r_  discriminator_sequence_outputr`  r{  r  active_lossactive_logitsactive_labelsr  s                       r.   r   zElectraForPreTraining.forward  sp   Z &1%<k$++B]B]&*ll))%'/!5# '3 
'
# )DA(F%//0MN++-H),11"6S6Y6YZ[6\]abb &B0M0S0STU0V WXc d &{ 3}/B/B/DEB0M0S0STU0V WY_YeYeYghY!<QR!@@F)-)9TGf$EvE*5CC2==	
 	
r   r  )r   r   r   r   r   r   rZ   r   r   r   r   rz  r   r   r   s   @r.   r  r    s     -11515/3,004)-,0/3&*Q
ELL)Q
 !.Q
 !.	Q

 u||,Q
 ELL)Q
  -Q
 &Q
 $D>Q
 'tnQ
 d^Q
 
uU\\"$??	@Q
 Q
r   r  z
    Electra model with a language modeling head on top.

    Even though both the discriminator and generator may be loaded into this model, the generator is the only model of
    the two to have been trained for the masked language modeling task.
    c                   p    e Zd ZdgZ fdZd Zd Ze	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee   dee   dee   deee	j                     ef   fd       Z xZS )rL   generator_lm_head.weightc                     t         |   |       t        |      | _        t	        |      | _        t        j                  |j                  |j                        | _
        | j                          y r  )r   r   r}  rj  rc  generator_predictionsr   r   r   r   generator_lm_headr  r   s     r.   r   zElectraForMaskedLM.__init__g  sR     #F+%@%H"!#6+@+@&BSBS!Tr   c                     | j                   S r  r  r  s    r.   get_output_embeddingsz(ElectraForMaskedLM.get_output_embeddingsq      %%%r   c                     || _         y r  r  )r   r   s     r.   set_output_embeddingsz(ElectraForMaskedLM.set_output_embeddingst  s
    !0r   r   r   r|   rw   r   r   r  r   rD  rE  r   c                    |
|
n| j                   j                  }
| j                  ||||||||	|
	      }|d   }| j                  |      }| j	                  |      }d}|Pt        j                         } ||j                  d| j                   j                        |j                  d            }|
s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )a  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ...,
            config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the
            loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`
        Nr  r   rx   r!   r  )r_   r  rj  r  r  r   r	   r   r   r   r   rJ  )r   r   r   r|   rw   r   r   r  r   rD  rE  rg  generator_sequence_outputprediction_scoresr{  r  r  s                    r.   r   zElectraForMaskedLM.forwardw  s   ( &1%<k$++B]B]"&,,))%'/!5# #/ 
#
 %<A$>! 667PQ 223DE**,H-222t{{7M7MNPVP[P[\^P_`D'),CAB,GGF)-)9TGf$EvE$1??.99	
 	
r   r  )r   r   r   _tied_weights_keysr   r  r  r   r   rZ   r   r   r   r   r   r   r   r   s   @r.   rL   rL   \  s    55&1  -11515/3,004)-,0/3&*4
ELL)4
 !.4
 !.	4

 u||,4
 ELL)4
  -4
 &4
 $D>4
 'tn4
 d^4
 
uU\\"N2	34
 4
r   rL   z
    Electra model with a token classification head on top.

    Both the discriminator and generator may be loaded into this model.
    c                   ^    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	ee   d
ee   dee   de	e
ej                     ef   fd       Z xZS )ElectraForTokenClassificationc                 `   t         |   |       |j                  | _        t        |      | _        |j
                  |j
                  n|j                  }t        j                  |      | _	        t        j                  |j                  |j                        | _        | j                          y r  )r   r   r  r}  rj  r  r   r   r   r   r   r   r9   r  r  s      r.   r   z&ElectraForTokenClassification.__init__  s      ++#F+)/)B)B)NF%%TZTnTn 	 zz"45))F$6$68I8IJr   r   r   r|   rw   r   r   r  r   rD  rE  r   c                    |
|
n| j                   j                  }
| j                  ||||||||	|
	      }|d   }| j                  |      }| j	                  |      }d}|<t               } ||j                  d| j                        |j                  d            }|
s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )z
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the token classification loss. Indices should be in `[0, ..., config.num_labels - 1]`.
        Nr  r   rx   r!   r  )r_   r  rj  r   r9   r	   r   r  r   r   rJ  )r   r   r   r|   rw   r   r   r  r   rD  rE  r_  r  r`  r{  r  r  s                    r.   r   z%ElectraForTokenClassification.forward  s    $ &1%<k$++B]B]&*ll))%'/!5# '3 
'
# )DA(F%(,5R(S%!>?')HFKKDOO<fkk"oNDY!<QR!@@F)-)9TGf$EvE$5CC2==	
 	
r   r  )r   r   r   r   r   r   rZ   r   r   r   r   r   r   r   r   s   @r.   r  r    s     -11515/3,004)-,0/3&*1
ELL)1
 !.1
 !.	1

 u||,1
 ELL)1
  -1
 &1
 $D>1
 'tn1
 d^1
 
uU\\"$99	:1
 1
r   r  c                       e Zd ZU eed<   dZ fdZe	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee   dee   dee   deee	j                     ef   fd       Z xZS )ElectraForQuestionAnsweringr_   rj  c                     t         |   |       |j                  | _        t        |      | _        t        j                  |j                  |j                        | _        | j                          y r  )
r   r   r  r}  rj  r   r   r   
qa_outputsr  r   s     r.   r   z$ElectraForQuestionAnswering.__init__  sS      ++#F+))F$6$68I8IJ 	r   r   r   r|   rw   r   r   start_positionsend_positionsr   rD  rE  r   c           
      &   ||n| j                   j                  }| j                  |||||||	|
      }|d   }| j                  |      }|j	                  dd      \  }}|j                  d      j                         }|j                  d      j                         }d }||t        |j                               dkD  r|j                  d      }t        |j                               dkD  r|j                  d      }|j                  d      }|j                  d|      }|j                  d|      }t        |      } |||      } |||      }||z   dz  }|s||f|dd  z   }||f|z   S |S t        ||||j                  |j                        S )	N)r   r|   rw   r   r   r   rD  r   r!   rx   r   )ignore_indexr:   )r{  start_logits
end_logitsr   rJ  )r_   r  rj  r  rN   r^  r   rR   r   clampr	   r   r   rJ  )r   r   r   r|   rw   r   r   r  r  r   rD  rE  r_  r  r`  r  r  
total_lossignored_indexr  
start_lossend_lossr  s                          r.   r   z#ElectraForQuestionAnswering.forward  s    &1%<k$++B]B]&*ll))%'/!5 '3 	'
# 6a81#)<<r<#: j#++B/::<''+668

&=+D?'')*Q."1"9"9""==%%'(1, - 5 5b 9(--a0M-33A}EO)//=AM']CH!,@J
M:H$x/14J ,AB/0F 0:/EZMF*Q6Q+%!5CC2==
 	
r   )NNNNNNNNNNN)r   r   r   r"   rt  rv  r   r   r   rZ   r   r   r   r   r   r   r   r   s   @r.   r  r    s7   !  -11515/3,0042604,0/3&*@
ELL)@
 !.@
 !.	@

 u||,@
 ELL)@
  -@
 "%,,/@
  -@
 $D>@
 'tn@
 d^@
 
uU\\"$@@	A@
 @
r   r  c                   ^    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	ee   d
ee   dee   de	e
ej                     ef   fd       Z xZS )ElectraForMultipleChoicec                     t         |   |       t        |      | _        t	        |      | _        t        j                  |j                  d      | _	        | j                          y r[  )r   r   r}  rj  r  sequence_summaryr   r   r   r9   r  r   s     r.   r   z!ElectraForMultipleChoice.__init__N  sM     #F+ 6v >))F$6$6: 	r   r   r   r|   rw   r   r   r  r   rD  rE  r   c                 L   |
|
n| j                   j                  }
||j                  d   n|j                  d   }|!|j                  d|j	                  d            nd}|!|j                  d|j	                  d            nd}|!|j                  d|j	                  d            nd}|!|j                  d|j	                  d            nd}|1|j                  d|j	                  d      |j	                  d            nd}| j                  ||||||||	|
	      }|d   }| j                  |      }| j                  |      }|j                  d|      }d}|t               } |||      }|
s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )a[  
        input_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`):
            Indices of input sequence tokens in the vocabulary.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are input IDs?](../glossary#input-ids)
        token_type_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
            Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
            1]`:

            - 0 corresponds to a *sentence A* token,
            - 1 corresponds to a *sentence B* token.

            [What are token type IDs?](../glossary#token-type-ids)
        position_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
            Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
            config.max_position_embeddings - 1]`.

            [What are position IDs?](../glossary#position-ids)
        inputs_embeds (`torch.FloatTensor` of shape `(batch_size, num_choices, sequence_length, hidden_size)`, *optional*):
            Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
            is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
            model's internal embedding lookup matrix.
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the multiple choice classification loss. Indices should be in `[0, ...,
            num_choices-1]` where `num_choices` is the size of the second dimension of the input tensors. (See
            `input_ids` above)
        Nr!   rx   r   r  r   r  )r_   r  rV   r   r   rj  r  r9   r	   r   r   rJ  )r   r   r   r|   rw   r   r   r  r   rD  rE  num_choicesr_  r  pooled_outputr`  reshaped_logitsr{  r  r  s                       r.   r   z ElectraForMultipleChoice.forwardX  s   X &1%<k$++B]B],5,Aiooa(}GZGZ[\G]>G>SINN2y~~b'9:Y]	M[Mg,,R1D1DR1HImqM[Mg,,R1D1DR1HImqGSG_|((\->->r-BCei ( r=#5#5b#9=;M;Mb;QR 	 '+ll))%'/!5# '3 
'
# 6a8--o>/ ++b+6')HOV4D%'*Eab*IIF)-)9TGf$EvE("5CC2==	
 	
r   r  )r   r   r   r   r   r   rZ   r   r   r   r   r   r   r   r   s   @r.   r  r  L  s     -11515/3,004)-,0/3&*X
ELL)X
 !.X
 !.	X

 u||,X
 ELL)X
  -X
 &X
 $D>X
 'tnX
 d^X
 
uU\\"$==	>X
 X
r   r  zS
    ELECTRA Model with a `language modeling` head on top for CLM fine-tuning.
    c            "           e Zd ZdgZ fdZd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee	j                     dee	j                     deee	j                        dee   dee   dee   dee   deee	j                     ef   fd       Z xZS )ElectraForCausalLMr  c                 $   t         |   |       |j                  st        j	                  d       t        |      | _        t        |      | _        t        j                  |j                  |j                        | _        | j                          y )NzOIf you want to use `ElectraForCausalLM` as a standalone, add `is_decoder=True.`)r   r   r   r@   warningr}  rj  rc  r  r   r   r   r   r  init_weightsr   s     r.   r   zElectraForCausalLM.__init__  sj       NNlm#F+%@%H"!#6+@+@&BSBS!Tr   c                     | j                   S r  r  r  s    r.   r  z(ElectraForCausalLM.get_output_embeddings  r  r   c                     || _         y r  r  )r   new_embeddingss     r.   r  z(ElectraForCausalLM.set_output_embeddings  s
    !/r   r   r   r|   rw   r   r   r   r1  r  rB  rC  r   rD  rE  r   c                    ||n| j                   j                  }|	d}| j                  |||||||||
||||      }|d   }| j                  | j	                  |            }d}|	* | j
                  ||	fd| j                   j                  i|}|s|f|dd z   }||f|z   S |S t        |||j                  |j                  |j                  |j                        S )a3  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in
            `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are
            ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`

        Example:

        ```python
        >>> from transformers import AutoTokenizer, ElectraForCausalLM, ElectraConfig
        >>> import torch

        >>> tokenizer = AutoTokenizer.from_pretrained("google/electra-base-generator")
        >>> config = ElectraConfig.from_pretrained("google/electra-base-generator")
        >>> config.is_decoder = True
        >>> model = ElectraForCausalLM.from_pretrained("google/electra-base-generator", config=config)

        >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
        >>> outputs = model(**inputs)

        >>> prediction_logits = outputs.logits
        ```NF)r   r|   rw   r   r   r   r1  rB  rC  r   rD  rE  r   r   r!   )r{  r`  rB  r   rJ  rK  )r_   r  rj  r  r  loss_functionr   r   rB  r   rJ  rK  )r   r   r   r|   rw   r   r   r   r1  r  rB  rC  r   rD  rE  r  r  r  r  lm_lossr  s                        r.   r   zElectraForCausalLM.forward  s.   R &1%<k$++B]B]I,,))%'"7#9+/!5#  
  "!* 2243M3Mo3^_(d((!  ;;11 	G ')GABK7F,3,?WJ'KVK0$#33!//))$55
 	
r   )NNNNNNNNNNNNNN)r   r   r   r  r   r  r  r   r   rZ   r   r  r   r   r   r   r   r   r   s   @r.   r   r     s    55
&0  -11515/3,0048<9=)-8<$(,0/3&*S
ELL)S
 !.S
 !.	S

 u||,S
 ELL)S
  -S
  (5S
 !) 6S
 &S
 "$u||"45S
 D>S
 $D>S
 'tnS
 d^S
" 
uU\\"$EE	F#S
 S
r   r   )
r   rL   r  r  r  r  r  r}  ri  rp   )discriminator)Lr   r   rB   dataclassesr   typingr   r   r   rZ   torch.utils.checkpointr   torch.nnr   r	   r
   activationsr   r   cache_utilsr   r   
generationr   modeling_layersr   modeling_outputsr   r   r   r   r   r   r   r   modeling_utilsr   pytorch_utilsr   r   r   utilsr   r   r    configuration_electrar"   
get_loggerr   r@   rp   Modulerr   r   r   r  r	  r  r$  r(  r:  rY  rc  ri  rz  r}  r  r  r  r  rL   r  r  r  r   __all__r+   r   r.   <module>r     s     	 ! , ,    A A 1 5 ) 9	 	 	 . l l 9 9 0 
		H	%Od?		 ?F@.299 @.H		  !" 2ryy 2l"))  BII B- BLY
RYY Y
xbii &")) $ *_ * *. 
:+ : : x) x xv		 0`RYY `F P
'= P
P
f [
2 [
[
| H
/ H
H
V @
$: @
@
F O
"8 O
 O
d d
5 d
 d
N 
i
/ i

i
Xr   