
    rhn                     p   d dl Zd dlmZmZmZ d dlZd dlmZ d dlm	Z	m
Z
mZ ddlmZ ddlmZ ddlmZmZmZ ddlmZmZ dd	lmZmZ dd
lmZmZmZ ddlmZ  ej@                  e!      Z" G d dejF                        Z$ G d dejF                        Z%e G d de             Z&	 d2dejF                  dejN                  dejN                  dejN                  deejN                     de(de(fdZ) G d dejF                        Z* G d dejF                        Z+ G d d ejF                        Z, G d! d"ejF                        Z- G d# d$ejF                        Z. G d% d&e      Z/ G d' d(ejF                        Z0 G d) d*ejF                        Z1e G d+ d,e&             Z2 ed-.       G d/ d0e&             Z3g d1Z4y)3    N)CallableOptionalUnion)BCEWithLogitsLossCrossEntropyLossMSELoss   )ACT2FN)GradientCheckpointingLayer)BaseModelOutputBaseModelOutputWithPoolingImageClassifierOutput)ALL_ATTENTION_FUNCTIONSPreTrainedModel) find_pruneable_heads_and_indicesprune_linear_layer)auto_docstringlogging	torch_int   )IJepaConfigc                   `     e Zd ZdZ fdZddej                  dedej                  fdZ xZ	S )IJepaPatchEmbeddingsz
    This class turns `pixel_values` of shape `(batch_size, num_channels, height, width)` into the initial
    `hidden_states` (patch embeddings) of shape `(batch_size, seq_length, hidden_size)` to be consumed by a
    Transformer.
    c                    t         |           |j                  |j                  }}|j                  |j
                  }}t        |t        j                  j                        r|n||f}t        |t        j                  j                        r|n||f}|d   |d   z  |d   |d   z  z  }|| _        || _        || _        || _
        t        j                  ||||      | _        y )Nr   r   )kernel_sizestride)super__init__
image_size
patch_sizenum_channelshidden_size
isinstancecollectionsabcIterablenum_patchesnnConv2d
projection)selfconfigr   r    r!   r"   r'   	__class__s          {/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/ijepa/modeling_ijepa.pyr   zIJepaPatchEmbeddings.__init__!   s    !'!2!2F4E4EJ
$*$7$79K9Kk#-j+//:R:R#SZZdfpYq
#-j+//:R:R#SZZdfpYq
!!}
15*Q-:VW=:XY$$(&))L+:^hi    pixel_valuesinterpolate_pos_encodingreturnc                    |j                   \  }}}}|| j                  k7  rt        d| j                   d| d      |sV|| j                  d   k7  s|| j                  d   k7  r2t        d| d| d| j                  d    d| j                  d    d		      | j	                  |      j                  d
      j                  dd
      }|S )NzoMake sure that the channel dimension of the pixel values match with the one set in the configuration. Expected z	 but got .r   r   zInput image size (*z) doesn't match model (z).   )shaper!   
ValueErrorr   r*   flatten	transpose)r+   r0   r1   
batch_sizer!   heightwidth
embeddingss           r.   forwardzIJepaPatchEmbeddings.forward0   s    2>2D2D/
L&%4,,,!../yaI  (++u8J/J (% 9+,Adooa.@-AE  __\2::1=GG1M
r/   F)
__name__
__module____qualname____doc__r   torchTensorboolr?   __classcell__r-   s   @r.   r   r      s3    jELL D ]b]i]i r/   r   c            	            e Zd ZdZddededdf fdZdej                  de	d	e	dej                  fd
Z
	 	 ddej                  deej                     dedej                  fdZ xZS )IJepaEmbeddingszb
    Construct the CLS token, position and patch embeddings. Optionally, also the mask token.
    r,   use_mask_tokenr2   Nc                    t         |           |r4t        j                  t	        j
                  dd|j                              nd | _        t        |      | _	        | j                  j                  }t        j                  t	        j                  d||j                              | _        t        j                  |j                        | _        |j                   | _        || _        y )Nr   )r   r   r(   	ParameterrE   zerosr"   
mask_tokenr   patch_embeddingsr'   randnposition_embeddingsDropouthidden_dropout_probdropoutr    r,   )r+   r,   rL   r'   r-   s       r.   r   zIJepaEmbeddings.__init__F   s    Q_",,u{{1a9K9K'LMei 4V <++77#%<<A{FL^L^0_#` zz&"<"<= ++r/   r>   r<   r=   c                 0   |j                   d   }| j                  j                   d   }t        j                  j	                         s||k(  r||k(  r| j                  S | j                  }|j                   d   }|| j
                  z  }|| j
                  z  }	t        |dz        }
|j                  d|
|
|      }|j                  dddd      }t        j                  j                  |||	fdd	      }|j                  dddd      j                  dd|      }|S )
a   
        This method allows to interpolate the pre-trained position encodings, to be able to use the model on higher resolution
        images. This method is also adapted to support torch.jit tracing.

        Adapted from:
        - https://github.com/facebookresearch/dino/blob/de9ee3df6cf39fac952ab558447af1fa1365362a/vision_transformer.py#L174-L194, and
        - https://github.com/facebookresearch/dinov2/blob/e1277af2ba9496fbadf7aec6eba56e8d882d1e35/dinov2/models/vision_transformer.py#L179-L211
        r   g      ?r   r	   r6   bicubicF)sizemodealign_corners)r7   rS   rE   jit
is_tracingr    r   reshapepermuter(   
functionalinterpolateview)r+   r>   r<   r=   r'   num_positionspatch_pos_embeddim
new_height	new_widthsqrt_num_positionss              r.   r1   z(IJepaEmbeddings.interpolate_pos_encodingP   s#    !&&q)0066q9 yy##%+*F6UZ?+++22r"t.
T__,	&}c'9:)11!5GI[]`a)11!Q1=--33i(	 4 
 *11!Q1=BB1b#Nr/   r0   bool_masked_posr1   c                 x   |j                   \  }}}}| j                  ||      }|Z|j                   d   }	| j                  j                  ||	d      }
|j	                  d      j                  |
      }|d|z
  z  |
|z  z   }|r|| j                  |||      z   }n|| j                  z   }| j                  |      }|S )N)r1   r   rX         ?)	r7   rQ   rP   expand	unsqueezetype_asr1   rS   rV   )r+   r0   rj   r1   r;   _r<   r=   r>   
seq_lengthmask_tokensmasks               r.   r?   zIJepaEmbeddings.forwardw   s     (4'9'9$
Avu**<Rj*k
&#))!,J//00ZLK",,R088ED#sTz2[45GGJ $#d&C&CJPVX]&^^J#d&>&>>J\\*-
r/   r@   NF)rA   rB   rC   rD   r   rG   r   rE   rF   intr1   r   
BoolTensorr?   rH   rI   s   @r.   rK   rK   A   s    { D T %5<< % %UX %]b]i]i %T 7;).	ll "%"2"23 #'	
 
r/   rK   c                       e Zd ZU eed<   dZdZdZddgZdZ	dZ
dZdZdeej                  ej                   ej"                  f   dd	fd
Zy	)IJepaPreTrainedModelr,   ijepar0   TrK   
IJepaLayermoduler2   Nc                 l   t        |t        j                  t        j                  f      rt        j                  j                  |j                  j                  j                  t        j                        d| j                  j                        j                  |j                  j                        |j                  _        |j                  %|j                  j                  j                          yyt        |t        j                         rJ|j                  j                  j                          |j                  j                  j#                  d       yt        |t$              rt        j                  j                  |j&                  j                  j                  t        j                        d| j                  j                        j                  |j&                  j                        |j&                  _        |j(                  %|j(                  j                  j                          yyy)zInitialize the weights        )meanstdNrl   )r#   r(   Linearr)   inittrunc_normal_weightdatatorE   float32r,   initializer_rangedtypebiaszero_	LayerNormfill_rK   rS   rP   )r+   r{   s     r.   _init_weightsz"IJepaPreTrainedModel._init_weights   s   fryy"))45 "$!6!6""%%emm43DKKDaDa "7 "b$$% MM {{&  &&( '-KK""$MM$$S)0.0gg.C.C**//225==AKK11 /D / b++112	 &&+
   ,!!&&,,. - 1r/   )rA   rB   rC   r   __annotations__base_model_prefixmain_input_namesupports_gradient_checkpointing_no_split_modules_supports_sdpa_supports_flash_attn_supports_flex_attn_supports_attention_backendr   r(   r   r)   r   r    r/   r.   rx   rx      sg    $O&*#*L9N"&/E"))RYY*L$M /RV /r/   rx   r{   querykeyvalueattention_maskscalingrV   c                    t        j                  ||j                  dd            |z  }t        j                  j                  |dt         j                        j                  |j                        }t        j                  j                  ||| j                        }|||z  }t        j                  ||      }	|	j                  dd      j                         }	|	|fS )NrX   )rf   r   )ptrainingr   r6   )rE   matmulr:   r(   ra   softmaxr   r   r   rV   r   
contiguous)
r{   r   r   r   r   r   rV   kwargsattn_weightsattn_outputs
             r.   eager_attention_forwardr      s     <<s}}R'<=GL ==((2U]](SVVW\WbWbcL ==((6??([L !#n4,,|U3K''1-88:K$$r/   c            
            e Zd Zdeddf fdZ	 	 ddeej                     dede	e
ej                  ej                  f   e
ej                     f   fdZ xZS )	IJepaSelfAttentionr,   r2   Nc                 2   t         |           |j                  |j                  z  dk7  r2t	        |d      s&t        d|j                   d|j                   d      || _        |j                  | _        t        |j                  |j                  z        | _        | j                  | j                  z  | _	        |j                  | _        | j                  dz  | _        d| _        t        j                  |j                  | j                  |j                         | _        t        j                  |j                  | j                  |j                         | _        t        j                  |j                  | j                  |j                         | _        y )	Nr   embedding_sizezThe hidden size z4 is not a multiple of the number of attention heads r4   g      F)r   )r   r   r"   num_attention_headshasattrr8   r,   ru   attention_head_sizeall_head_sizeattention_probs_dropout_probdropout_probr   	is_causalr(   r   qkv_biasr   r   r   r+   r,   r-   s     r.   r   zIJepaSelfAttention.__init__   sF    : ::a?PVXhHi"6#5#5"6 7334A7 
 #)#=#= #&v'9'9F<V<V'V#W !558P8PP"??//5YYv1143E3EFOO\
99V//1C1C&//ZYYv1143E3EFOO\
r/   	head_maskoutput_attentionsc           
         |j                   \  }}}| j                  |      j                  |d| j                  | j                        j                  dd      }| j                  |      j                  |d| j                  | j                        j                  dd      }| j                  |      j                  |d| j                  | j                        j                  dd      }	t        }
| j                  j                  dk7  rN| j                  j                  dk(  r|rt        j                  d       nt        | j                  j                     }
 |
| |	|||| j                  | j                  | j                   sdn| j"                        \  }}|j%                         d d	 | j&                  fz   }|j)                  |      }|r||f}|S |f}|S )
NrX   r   r6   eagersdpaz`torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to eager attention. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.r}   )r   r   rV   r   )r7   r   rc   r   r   r:   r   r   r   r,   _attn_implementationloggerwarning_oncer   r   r   r   r   rZ   r   r_   )r+   hidden_statesr   r   r;   rq   rp   	key_layervalue_layerquery_layerattention_interfacecontext_layerattention_probsnew_context_layer_shapeoutputss                  r.   r?   zIJepaSelfAttention.forward   s    %2$7$7!
JHH]#T*b$":":D<T<TUYq!_ 	 JJ}%T*b$":":D<T<TUYq!_ 	 JJ}%T*b$":":D<T<TUYq!_ 	 )@;;++w6{{//69>O##L
 '>dkk>^>^&_#)<nnLL#}}C$2C2C	*
& #0"4"4"6s";t?Q?Q>S"S%--.EF6G=/2 O\M]r/   rt   )rA   rB   rC   r   r   r   rE   rF   rG   r   tupler?   rH   rI   s   @r.   r   r      sr    ]{ ]t ]. -1"'	1 ELL)1  	1
 
uU\\5<</0%2EE	F1r/   r   c                   |     e Zd ZdZdeddf fdZdej                  dej                  dej                  fdZ xZ	S )	IJepaSelfOutputz
    The residual connection is defined in IJepaLayer instead of here (as is the case with other models), due to the
    layernorm applied before each block.
    r,   r2   Nc                     t         |           t        j                  |j                  |j                        | _        t        j                  |j                        | _        y N)	r   r   r(   r   r"   denserT   rU   rV   r   s     r.   r   zIJepaSelfOutput.__init__"  sB    YYv1163E3EF
zz&"<"<=r/   r   input_tensorc                 J    | j                  |      }| j                  |      }|S r   r   rV   r+   r   r   s      r.   r?   zIJepaSelfOutput.forward'  s$    

=1]3r/   )
rA   rB   rC   rD   r   r   rE   rF   r?   rH   rI   s   @r.   r   r     sD    
>{ >t >
U\\  RWR^R^ r/   r   c                        e Zd Zdeddf fdZdee   ddfdZ	 	 ddej                  de
ej                     d	edeeej                  ej                  f   eej                     f   fd
Z xZS )IJepaAttentionr,   r2   Nc                     t         |           t        |      | _        t	        |      | _        t               | _        y r   )r   r   r   	attentionr   outputsetpruned_headsr   s     r.   r   zIJepaAttention.__init__/  s0    +F3%f-Er/   headsc                 >   t        |      dk(  ry t        || j                  j                  | j                  j                  | j
                        \  }}t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _	        t        | j                  j                  |d      | j                  _        | j                  j                  t        |      z
  | j                  _        | j                  j                  | j                  j                  z  | j                  _        | j
                  j                  |      | _        y )Nr   r   rf   )lenr   r   r   r   r   r   r   r   r   r   r   r   union)r+   r   indexs      r.   prune_headszIJepaAttention.prune_heads5  s   u:?74>>55t~~7Y7Y[_[l[l
u
  2$..2F2FN/0B0BEJ1$..2F2FN.t{{/@/@%QO .2^^-O-ORUV[R\-\*'+~~'I'IDNNLnLn'n$ --33E:r/   r   r   r   c                 h    | j                  |||      }| j                  |d   |      }|f|dd  z   }|S )Nr   r   )r   r   )r+   r   r   r   self_outputsattention_outputr   s          r.   r?   zIJepaAttention.forwardG  sE     ~~mY@QR;;|AF#%QR(88r/   rt   )rA   rB   rC   r   r   r   ru   r   rE   rF   r   rG   r   r   r?   rH   rI   s   @r.   r   r   .  s    "{ "t ";S ;d ;* -1"'	|| ELL)  	
 
uU\\5<</0%2EE	Fr/   r   c                   `     e Zd Zdeddf fdZdej                  dej                  fdZ xZS )IJepaIntermediater,   r2   Nc                    t         |           t        j                  |j                  |j
                        | _        t        |j                  t              rt        |j                     | _        y |j                  | _        y r   )r   r   r(   r   r"   intermediate_sizer   r#   
hidden_actstrr
   intermediate_act_fnr   s     r.   r   zIJepaIntermediate.__init__V  s]    YYv1163K3KL
f''-'-f.?.?'@D$'-'8'8D$r/   r   c                 J    | j                  |      }| j                  |      }|S r   )r   r   )r+   r   s     r.   r?   zIJepaIntermediate.forward^  s&    

=100?r/   	rA   rB   rC   r   r   rE   rF   r?   rH   rI   s   @r.   r   r   U  s1    9{ 9t 9U\\ ell r/   r   c                   x     e Zd Zdeddf fdZdej                  dej                  dej                  fdZ xZS )IJepaOutputr,   r2   Nc                     t         |           t        j                  |j                  |j
                        | _        t        j                  |j                        | _	        y r   )
r   r   r(   r   r   r"   r   rT   rU   rV   r   s     r.   r   zIJepaOutput.__init__f  sB    YYv779K9KL
zz&"<"<=r/   r   r   c                 T    | j                  |      }| j                  |      }||z   }|S r   r   r   s      r.   r?   zIJepaOutput.forwardk  s.    

=1]3%4r/   r   rI   s   @r.   r   r   e  s?    >{ >t >
U\\  RWR^R^ r/   r   c                        e Zd ZdZdeddf fdZ	 	 d
dej                  deej                     de	de
eej                  ej                  f   eej                     f   fd	Z xZS )rz   z?This corresponds to the Block class in the timm implementation.r,   r2   Nc                 r   t         |           |j                  | _        d| _        t	        |      | _        t        |      | _        t        |      | _	        t        j                  |j                  |j                        | _        t        j                  |j                  |j                        | _        y )Nr   eps)r   r   chunk_size_feed_forwardseq_len_dimr   r   r   intermediater   r   r(   r   r"   layer_norm_epslayernorm_beforelayernorm_afterr   s     r.   r   zIJepaLayer.__init__w  s    '-'E'E$'/-f5!&) "V-?-?VEZEZ [!||F,>,>FDYDYZr/   r   r   r   c                     | j                  | j                  |      ||      }|d   }|dd  }||z   }| j                  |      }| j                  |      }| j	                  ||      }|f|z   }|S )N)r   r   r   )r   r   r   r   r   )r+   r   r   r   self_attention_outputsr   r   layer_outputs           r.   r?   zIJepaLayer.forward  s     "&!!-0/ "0 "

 2!4(, )=8 ++M:((6 {{<?/G+r/   rt   )rA   rB   rC   rD   r   r   rE   rF   r   rG   r   r   r?   rH   rI   s   @r.   rz   rz   t  s    I[{ [t [ -1"'	|| ELL)  	
 
uU\\5<</0%2EE	Fr/   rz   c                        e Zd Zdeddf fdZ	 	 	 	 ddej                  deej                     deded	ede	e
ef   fd
Z xZS )IJepaEncoderr,   r2   Nc                     t         |           || _        t        j                  t        |j                        D cg c]  }t        |       c}      | _        d| _	        y c c}w rt   )
r   r   r,   r(   
ModuleListrangenum_hidden_layersrz   layergradient_checkpointing)r+   r,   rp   r-   s      r.   r   zIJepaEncoder.__init__  sN    ]]fF^F^@_#`1Jv$6#`a
&+# $as   A#r   r   r   output_hidden_statesreturn_dictc                    |rdnd }|rdnd }t        | j                        D ]1  \  }}	|r||fz   }|||   nd }
 |	||
|      }|d   }|s)||d   fz   }3 |r||fz   }|st        d |||fD              S t        |||      S )Nr   r   r   c              3   &   K   | ]	  }||  y wr   r   ).0vs     r.   	<genexpr>z'IJepaEncoder.forward.<locals>.<genexpr>  s     mq_`_lms   )last_hidden_stater   
attentions)	enumerater  r   r   )r+   r   r   r   r  r  all_hidden_statesall_self_attentionsilayer_modulelayer_head_masklayer_outputss               r.   r?   zIJepaEncoder.forward  s     #7BD$5b4(4 	POA|#$58H$H!.7.CilO(IZ[M)!,M &9]1=M<O&O#	P   1]4D Dm]4EGZ$[mmm++*
 	
r/   )NFFT)rA   rB   rC   r   r   rE   rF   r   rG   r   r   r   r?   rH   rI   s   @r.   r   r     sz    ,{ ,t , -1"'%* !
||!
 ELL)!
  	!

 #!
 !
 
uo%	&!
r/   r   c                   *     e Zd Zdef fdZd Z xZS )IJepaPoolerr,   c                     t         |           t        j                  |j                  |j
                        | _        t        |j                     | _	        y r   )
r   r   r(   r   r"   pooler_output_sizer   r
   
pooler_act
activationr   s     r.   r   zIJepaPooler.__init__  s>    YYv1163L3LM
 !2!23r/   c                 \    |d d df   }| j                  |      }| j                  |      }|S )Nr   )r   r  )r+   r   first_token_tensorpooled_outputs       r.   r?   zIJepaPooler.forward  s6     +1a40

#566r/   )rA   rB   rC   r   r   r?   rH   rI   s   @r.   r  r    s    4{ 4
r/   r  c                       e Zd Zddededef fdZdefdZdee	e
e	   f   ddfd	Ze	 	 	 	 	 	 	 dd
eej                     deej                      deej                     dee   dee   dee   dee   deeef   fd       Z xZS )
IJepaModelr,   add_pooling_layerrL   c                    t         |   |       || _        t        ||      | _        t        |      | _        t        j                  |j                  |j                        | _        |rt        |      nd| _        | j                          y)z
        add_pooling_layer (bool, *optional*, defaults to `True`):
            Whether to add a pooling layer
        use_mask_token (`bool`, *optional*, defaults to `False`):
            Whether to use a mask token for masked image modeling.
        )rL   r   N)r   r   r,   rK   r>   r   encoderr(   r   r"   r   	layernormr  pooler	post_init)r+   r,   r  rL   r-   s       r.   r   zIJepaModel.__init__  sm     	 )&P#F+f&8&8f>S>ST->k&)D 	r/   r2   c                 .    | j                   j                  S r   )r>   rQ   )r+   s    r.   get_input_embeddingszIJepaModel.get_input_embeddings  s    ///r/   heads_to_pruneNc                     |j                         D ]7  \  }}| j                  j                  |   j                  j	                  |       9 y)z
        Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
        class PreTrainedModel
        N)itemsr   r  r   r   )r+   r&  r  r   s       r.   _prune_headszIJepaModel._prune_heads  sE    
 +002 	CLE5LLu%//;;EB	Cr/   r0   rj   r   r   r  r1   r  c                    ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|t	        d      | j                  || j                   j                        }| j                  j                  j                  j                  j                  }|j                  |k7  r|j                  |      }| j                  |||      }	| j                  |	||||      }
|
d   }| j                  |      }| j                  | j                  |      nd}|s|||fn|f}||
dd z   S t!        |||
j"                  |
j$                        S )z
        bool_masked_pos (`torch.BoolTensor` of shape `(batch_size, num_patches)`, *optional*):
            Boolean masked positions. Indicates which patches are masked (1) and which aren't (0).
        Nz You have to specify pixel_values)rj   r1   )r   r   r  r  r   r   )r
  pooler_outputr   r  )r,   r   r  use_return_dictr8   get_head_maskr   r>   rQ   r*   r   r   r   r   r!  r"  r   r   r  )r+   r0   rj   r   r   r  r1   r  expected_dtypeembedding_outputencoder_outputssequence_outputr  head_outputss                 r.   r?   zIJepaModel.forward  s    2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]?@@ &&y$++2O2OP	 99DDKKQQ/'??>:L??/Tl + 
 ,,/!5# ' 
 *!,..98<8OO4UY?L?XO];_n^pL/!""555)-')77&11	
 	
r/   )FFNNNNNNN)rA   rB   rC   r   rG   r   r   r%  dictru   listr)  r   r   rE   rF   rv   r   r   r   r?   rH   rI   s   @r.   r  r    s    { t ]a $0&: 0C4T#Y+? CD C  046:,0,0/337&*;
u||,;
 "%"2"23;
 ELL)	;

 $D>;
 'tn;
 #+4.;
 d^;
 
u00	1;
 ;
r/   r  a  
    IJepa Model transformer with an image classification head on top (a linear layer on top of the final hidden states)
    e.g. for ImageNet.

    <Tip>

        Note that it's possible to fine-tune IJepa on higher resolution images than the ones it has been trained on, by
        setting `interpolate_pos_encoding` to `True` in the forward of the model. This will interpolate the pre-trained
        position embeddings to the higher resolution.

    </Tip>
    )custom_introc                        e Zd Zdeddf fdZe	 	 	 	 	 	 	 ddeej                     deej                     deej                     dee	   d	ee	   d
ee	   dee	   de
eef   fd       Z xZS )IJepaForImageClassificationr,   r2   Nc                 .   t         |   |       |j                  | _        t        |d      | _        |j                  dkD  r*t        j                  |j                  |j                        nt        j                         | _	        | j                          y )NF)r  r   )r   r   
num_labelsr  ry   r(   r   r"   Identity
classifierr#  r   s     r.   r   z$IJepaForImageClassification.__init__E  ss      ++%@
 OUN_N_bcNc"))F$6$68I8IJikititiv 	r/   r0   r   labelsr   r  r1   r  c                 n   ||n| j                   j                  }| j                  ||||||      }|d   }	| j                  |	j	                  d            }
d}||j                  |
j                        }| j                   j                  | j                  dk(  rd| j                   _        nl| j                  dkD  rL|j                  t        j                  k(  s|j                  t        j                  k(  rd| j                   _        nd| j                   _        | j                   j                  dk(  rIt               }| j                  dk(  r& ||
j                         |j                               }n ||
|      }n| j                   j                  dk(  r=t               } ||
j!                  d	| j                        |j!                  d	            }n,| j                   j                  dk(  rt#               } ||
|      }|s|
f|dd z   }||f|z   S |S t%        ||
|j&                  |j(                  
      S )a  
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the image classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
            `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        N)r   r   r  r1   r  r   r   r   
regressionsingle_label_classificationmulti_label_classificationrX   )losslogitsr   r  )r,   r,  ry   r<  r~   r   deviceproblem_typer:  r   rE   longru   r   squeezer   rc   r   r   r   r  )r+   r0   r   r=  r   r  r1   r  r   r1  rC  rB  loss_fctr   s                 r.   r?   z#IJepaForImageClassification.forwardQ  s   " &1%<k$++B]B]**/!5%=#  
 "!*!5!5!!5!<=YYv}}-F{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#FNN$4fnn6FGD#FF3D))-JJ+-B @&++b/R))-II,./Y,F)-)9TGf$EvE$!//))	
 	
r/   r3  )rA   rB   rC   r   r   r   r   rE   rF   rG   r   r   r   r?   rH   rI   s   @r.   r8  r8  6  s    
{ 
t 
  04,0)-,0/337&*A
u||,A
 ELL)A
 &	A

 $D>A
 'tnA
 #+4.A
 d^A
 
u++	,A
 A
r/   r8  )rx   r  r8  )r}   )5collections.abcr$   typingr   r   r   rE   torch.nnr(   r   r   r   activationsr
   modeling_layersr   modeling_outputsr   r   r   modeling_utilsr   r   pytorch_utilsr   r   utilsr   r   r   configuration_ijepar   
get_loggerrA   r   Moduler   rK   rx   rF   floatr   r   r   r   r   r   rz   r   r  r  r8  __all__r   r/   r.   <module>rW     s    , ,   A A ! 9 b b F Q 7 7 , 
		H	%$299 $NNbii Nb /? / /R %II%<<% 
% <<	%
 U\\*% % %<F FRbii $$RYY $N		  ")) '+ 'T(
299 (
V"))  Z
% Z
 Z
z O
"6 O
O
d Pr/   