
    rhX                     (   d Z ddlZddlmZmZmZ ddlZddlZddlm	Z	 ddl
mZmZmZ ddlmZ ddlmZ dd	lmZmZmZmZ dd
lmZmZ ddlmZmZ ddlmZmZm Z  ddl!m"Z" ddl#m$Z$  ejJ                  e&      Z' G d de	jP                        Z) G d de	jP                        Z*	 d?de	jP                  dejV                  dejV                  dejV                  deejV                     de,de,fdZ- G d de	jP                        Z. G d de	jP                        Z/ G d  d!e	jP                        Z0 G d" d#e	jP                        Z1d@d$ejV                  d%e,d&e2d'ejV                  fd(Z3 G d) d*e	jP                        Z4 G d+ d,e	jP                        Z5 G d- d.e	jP                        Z6 G d/ d0e      Z7 G d1 d2e	jP                        Z8e G d3 d4e             Z9e G d5 d6e9             Z: ed78       G d9 d:e9             Z; ed;8       G d< d=e9e"             Z<g d>Z=y)AzPyTorch DINOv2 model.    N)CallableOptionalUnion)nn)BCEWithLogitsLossCrossEntropyLossMSELoss   )ACT2FN)GradientCheckpointingLayer)BackboneOutputBaseModelOutputBaseModelOutputWithPoolingImageClassifierOutput)ALL_ATTENTION_FUNCTIONSPreTrainedModel) find_pruneable_heads_and_indicesprune_linear_layer)auto_docstringlogging	torch_int)BackboneMixin   )Dinov2Configc                        e Zd ZdZdeddf fdZdej                  dededej                  fd	Z	dd
ej                  de
ej                     dej                  fdZ xZS )Dinov2EmbeddingszM
    Construct the CLS token, mask token, position and patch embeddings.
    configreturnNc                 z   t         |           t        j                  t	        j
                  dd|j                              | _        |j                  r8t        j                  t	        j                  d|j                              | _
        t        |      | _        | j                  j                  }t        j                  t	        j
                  d|dz   |j                              | _        t        j                  |j                         | _        |j$                  | _        |j                  | _        || _        y )Nr   )super__init__r   	Parametertorchrandnhidden_size	cls_tokenuse_mask_tokenzeros
mask_tokenDinov2PatchEmbeddingspatch_embeddingsnum_patchesposition_embeddingsDropouthidden_dropout_probdropout
patch_sizer   )selfr   r,   	__class__s      }/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/dinov2/modeling_dinov2.pyr!   zDinov2Embeddings.__init__+   s    ekk!Q8J8J&KL   ll5;;q&:L:L+MNDO 5f =++77#%<<A{QPVPbPb0c#d zz&"<"<= ++$33    
embeddingsheightwidthc                    |j                   d   dz
  }| j                  j                   d   dz
  }t        j                  j	                         s||k(  r||k(  r| j                  S | j                  ddddf   }| j                  ddddf   }|j                   d   }|| j
                  z  }	|| j
                  z  }
t        |dz        }|j                  d|||      }|j                  dddd      }|j                  }t        j                  j                  |j                  t        j                        |	|
fdd	
      j                  |      }|j                  dddd      j                  dd|      }t        j                   ||fd      S )a-  
        This method allows to interpolate the pre-trained position encodings, to be able to use the model on higher resolution
        images. This method is also adapted to support torch.jit tracing and interpolation at torch.float32 precision.

        Adapted from:
        - https://github.com/facebookresearch/dino/blob/de9ee3df6cf39fac952ab558447af1fa1365362a/vision_transformer.py#L174-L194, and
        - https://github.com/facebookresearch/dinov2/blob/e1277af2ba9496fbadf7aec6eba56e8d882d1e35/dinov2/models/vision_transformer.py#L179-L211
        r   Ng      ?r   r
      bicubicF)sizemodealign_cornersdtypedim)shaper-   r#   jit
is_tracingr1   r   reshapepermuterA   r   
functionalinterpolatetofloat32viewcat)r2   r6   r7   r8   r,   num_positionsclass_pos_embedpatch_pos_embedrC   
new_height	new_widthsqrt_num_positionstarget_dtypes                r4   interpolate_pos_encodingz)Dinov2Embeddings.interpolate_pos_encoding9   s    !&&q)A-0066q9A= yy##%+*F6UZ?+++221bqb59221ab59r"t.
T__,	&}c'9:)11!5GI[]`a)11!Q1=&,,--33u}}-i(	 4 

 "<"
  	 *11!Q1=BB1b#Nyy/?;CCr5   pixel_valuesbool_masked_posc                 D   |j                   \  }}}}| j                  j                  j                  j                  }| j                  |j                  |            }|d| j                  rXt        j                  |j                  d      | j                  j                  |j                        j                  d      |      }| j                  j                  |dd      }	t        j                  |	|fd      }|| j                  |||      z   }| j                  |      }|S )Nr@   r:   r   r   rB   )rD   r+   
projectionweightrA   rK   r'   r#   where	unsqueezer)   r&   expandrN   rV   r0   )
r2   rW   rX   
batch_size_r7   r8   rU   r6   
cls_tokenss
             r4   forwardzDinov2Embeddings.forwarda   s    '3'9'9$
Avu,,77>>DD**<???+NO
&4+>+>))"-t/A/A*BRBR/S/]/]^_/`blJ
 ^^**:r2>
YY
J7Q?
  $"?"?
FTY"ZZ
\\*-
r5   N)__name__
__module____qualname____doc__r   r!   r#   TensorintrV   r   rb   __classcell__r3   s   @r4   r   r   &   s|    |  &D5<< &D &DUX &D]b]i]i &DPELL 8ELLCY ejeqeq r5   r   c                   Z     e Zd ZdZ fdZdej                  dej                  fdZ xZS )r*   z
    This class turns `pixel_values` of shape `(batch_size, num_channels, height, width)` into the initial
    `hidden_states` (patch embeddings) of shape `(batch_size, seq_length, hidden_size)` to be consumed by a
    Transformer.
    c                    t         |           |j                  |j                  }}|j                  |j
                  }}t        |t        j                  j                        r|n||f}t        |t        j                  j                        r|n||f}|d   |d   z  |d   |d   z  z  }|| _        || _        || _        || _
        t        j                  ||||      | _        y )Nr   r   )kernel_sizestride)r    r!   
image_sizer1   num_channelsr%   
isinstancecollectionsabcIterabler,   r   Conv2drZ   )r2   r   rp   r1   rq   r%   r,   r3   s          r4   r!   zDinov2PatchEmbeddings.__init__~   s    !'!2!2F4E4EJ
$*$7$79K9Kk#-j+//:R:R#SZZdfpYq
#-j+//:R:R#SZZdfpYq
!!}
15*Q-:VW=:XY$$(&))L+:^hir5   rW   r   c                     |j                   d   }|| j                  k7  rt        d| j                   d| d      | j                  |      j	                  d      j                  dd      }|S )Nr   zoMake sure that the channel dimension of the pixel values match with the one set in the configuration. Expected z	 but got .r;   )rD   rq   
ValueErrorrZ   flatten	transpose)r2   rW   rq   r6   s       r4   rb   zDinov2PatchEmbeddings.forward   sz    #))!,4,,,!../yaI  __\2::1=GG1M
r5   )	rd   re   rf   rg   r!   r#   rh   rb   rj   rk   s   @r4   r*   r*   w   s)    jELL U\\ r5   r*   modulequerykeyvalueattention_maskscalingr0   c                    t        j                  ||j                  dd            |z  }t        j                  j                  |dt         j                        j                  |j                        }t        j                  j                  ||| j                        }|||z  }t        j                  ||      }	|	j                  dd      j                         }	|	|fS )Nr:   )rC   rA   )ptrainingr   r;   )r#   matmulr{   r   rI   softmaxrL   rK   rA   r0   r   
contiguous)
r|   r}   r~   r   r   r   r0   kwargsattn_weightsattn_outputs
             r4   eager_attention_forwardr      s     <<s}}R'<=GL ==((2U]](SVVW\WbWbcL ==((6??([L !#n4,,|U3K''1-88:K$$r5   c            
            e Zd Zdeddf fdZ	 	 ddeej                     dede	e
ej                  ej                  f   e
ej                     f   fdZ xZS )	Dinov2SelfAttentionr   r   Nc                 2   t         |           |j                  |j                  z  dk7  r2t	        |d      s&t        d|j                   d|j                   d      || _        |j                  | _        t        |j                  |j                  z        | _        | j                  | j                  z  | _	        |j                  | _        | j                  dz  | _        d| _        t        j                  |j                  | j                  |j                         | _        t        j                  |j                  | j                  |j                         | _        t        j                  |j                  | j                  |j                         | _        y )	Nr   embedding_sizezThe hidden size z4 is not a multiple of the number of attention heads rx   g      Fbias)r    r!   r%   num_attention_headshasattrry   r   ri   attention_head_sizeall_head_sizeattention_probs_dropout_probdropout_probr   	is_causalr   Linearqkv_biasr}   r~   r   r2   r   r3   s     r4   r!   zDinov2SelfAttention.__init__   sF    : ::a?PVXhHi"6#5#5"6 7334A7 
 #)#=#= #&v'9'9F<V<V'V#W !558P8PP"??//5YYv1143E3EFOO\
99V//1C1C&//ZYYv1143E3EFOO\
r5   	head_maskoutput_attentionsc           
         |j                   \  }}}| j                  |      j                  |d| j                  | j                        j                  dd      }| j                  |      j                  |d| j                  | j                        j                  dd      }| j                  |      j                  |d| j                  | j                        j                  dd      }	t        }
| j                  j                  dk7  rN| j                  j                  dk(  r|rt        j                  d       nt        | j                  j                     }
 |
| |	|||| j                  | j                  | j                   sdn| j"                        \  }}|j%                         d d	 | j&                  fz   }|j)                  |      }|r||f}|S |f}|S )
Nr:   r   r;   eagersdpaz`torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to eager attention. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.        )r   r   r0   r   )rD   r~   rM   r   r   r{   r   r}   r   r   _attn_implementationloggerwarning_oncer   r   r   r   r   r=   r   rG   )r2   hidden_statesr   r   r_   
seq_lengthr`   	key_layervalue_layerquery_layerattention_interfacecontext_layerattention_probsnew_context_layer_shapeoutputss                  r4   rb   zDinov2SelfAttention.forward   s    %2$7$7!
JHH]#T*b$":":D<T<TUYq!_ 	 JJ}%T*b$":":D<T<TUYq!_ 	 JJ}%T*b$":":D<T<TUYq!_ 	 )@;;++w6{{//69>O##L
 '>dkk>^>^&_#)<nnLL#}}C$2C2C	*
& #0"4"4"6s";t?Q?Q>S"S%--.EF6G=/2 O\M]r5   NF)rd   re   rf   r   r!   r   r#   rh   boolr   tuplerb   rj   rk   s   @r4   r   r      sr    ]| ] ]. -1"'	1 ELL)1  	1
 
uU\\5<</0%2EE	F1r5   r   c                   |     e Zd ZdZdeddf fdZdej                  dej                  dej                  fdZ xZ	S )	Dinov2SelfOutputz
    The residual connection is defined in Dinov2Layer instead of here (as is the case with other models), due to the
    layernorm applied before each block.
    r   r   Nc                     t         |           t        j                  |j                  |j                        | _        t        j                  |j                        | _        y rc   )	r    r!   r   r   r%   denser.   r/   r0   r   s     r4   r!   zDinov2SelfOutput.__init__  sB    YYv1163E3EF
zz&"<"<=r5   r   input_tensorc                 J    | j                  |      }| j                  |      }|S rc   )r   r0   )r2   r   r   s      r4   rb   zDinov2SelfOutput.forward  s$    

=1]3r5   )
rd   re   rf   rg   r   r!   r#   rh   rb   rj   rk   s   @r4   r   r     sD    
>| > >
U\\  RWR^R^ r5   r   c                        e Zd Zdeddf fdZdee   ddfdZ	 	 ddej                  de
ej                     d	edeeej                  ej                  f   eej                     f   fd
Z xZS )Dinov2Attentionr   r   Nc                     t         |           t        |      | _        t	        |      | _        t               | _        y rc   )r    r!   r   	attentionr   outputsetpruned_headsr   s     r4   r!   zDinov2Attention.__init__  s0    ,V4&v.Er5   headsc                 >   t        |      dk(  ry t        || j                  j                  | j                  j                  | j
                        \  }}t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _	        t        | j                  j                  |d      | j                  _        | j                  j                  t        |      z
  | j                  _        | j                  j                  | j                  j                  z  | j                  _        | j
                  j                  |      | _        y )Nr   r   rB   )lenr   r   r   r   r   r   r}   r~   r   r   r   r   union)r2   r   indexs      r4   prune_headszDinov2Attention.prune_heads  s   u:?74>>55t~~7Y7Y[_[l[l
u
  2$..2F2FN/0B0BEJ1$..2F2FN.t{{/@/@%QO .2^^-O-ORUV[R\-\*'+~~'I'IDNNLnLn'n$ --33E:r5   r   r   r   c                 h    | j                  |||      }| j                  |d   |      }|f|dd  z   }|S )Nr   r   )r   r   )r2   r   r   r   self_outputsattention_outputr   s          r4   rb   zDinov2Attention.forward.  sE     ~~mY@QR;;|AF#%QR(88r5   r   )rd   re   rf   r   r!   r   ri   r   r#   rh   r   r   r   r   rb   rj   rk   s   @r4   r   r     s    "| " ";S ;d ;* -1"'	|| ELL)  	
 
uU\\5<</0%2EE	Fr5   r   c                   X     e Zd Zd fdZdej
                  dej
                  fdZ xZS )Dinov2LayerScaler   c                     t         |           t        j                  |j                  t        j                  |j                        z        | _        y rc   )	r    r!   r   r"   layerscale_valuer#   onesr%   lambda1r   s     r4   r!   zDinov2LayerScale.__init__=  s8    ||F$;$;ejjI[I[>\$\]r5   hidden_statec                      || j                   z  S rc   )r   r2   r   s     r4   rb   zDinov2LayerScale.forwardA  s    dll**r5   r   Nrd   re   rf   r!   r#   rh   rb   rj   rk   s   @r4   r   r   <  s$    ^+ELL +U\\ +r5   r   input	drop_probr   r   c                    |dk(  s|s| S d|z
  }| j                   d   fd| j                  dz
  z  z   }|t        j                  || j                  | j
                        z   }|j                          | j                  |      |z  }|S )aF  
    Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).

    Comment by Ross Wightman: This is the same as the DropConnect impl I created for EfficientNet, etc networks,
    however, the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
    See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for changing the
    layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use 'survival rate' as the
    argument.
    r   r   r   )r   )rA   device)rD   ndimr#   randrA   r   floor_div)r   r   r   	keep_probrD   random_tensorr   s          r4   	drop_pathr   F  s     CxII[[^

Q 77E

5ELL YYMYYy!M1FMr5   c                   x     e Zd ZdZd	dee   ddf fdZdej                  dej                  fdZ	de
fdZ xZS )
Dinov2DropPathzXDrop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).Nr   r   c                 0    t         |           || _        y rc   )r    r!   r   )r2   r   r3   s     r4   r!   zDinov2DropPath.__init__^  s    "r5   r   c                 D    t        || j                  | j                        S rc   )r   r   r   )r2   r   s     r4   rb   zDinov2DropPath.forwardb  s    FFr5   c                      d| j                    S )Nzp=)r   r2   s    r4   
extra_reprzDinov2DropPath.extra_repre  s    DNN#$$r5   rc   )rd   re   rf   rg   r   floatr!   r#   rh   rb   strr   rj   rk   s   @r4   r   r   [  sG    b#(5/ #T #GU\\ Gell G%C %r5   r   c                   X     e Zd Zd fdZdej
                  dej
                  fdZ xZS )	Dinov2MLPr   c                 ~   t         |           |j                  x}}t        |j                  |j                  z        }t        j                  ||d      | _        t        |j                  t              rt        |j                     | _        n|j                  | _        t        j                  ||d      | _        y )NTr   )r    r!   r%   ri   	mlp_ratior   r   fc1rr   
hidden_actr   r   
activationfc2r2   r   in_featuresout_featureshidden_featuresr3   s        r4   r!   zDinov2MLP.__init__j  s    %+%7%77lf0063C3CCD99[/Ef''-$V%6%67DO$//DO99_lFr5   r   c                 l    | j                  |      }| j                  |      }| j                  |      }|S rc   )r   r   r   r   s     r4   rb   zDinov2MLP.forwardu  s2    xx-|4xx-r5   r   r   rk   s   @r4   r   r   i  s$    	GELL U\\ r5   r   c                   X     e Zd Zd fdZdej
                  dej
                  fdZ xZS )Dinov2SwiGLUFFNr   c                 0   t         |           |j                  x}}t        |j                  |j                  z        }t        |dz  dz        dz   dz  dz  }t        j                  |d|z  d      | _        t        j                  ||d      | _        y )Nr;   r
         Tr   )	r    r!   r%   ri   r   r   r   
weights_inweights_outr   s        r4   r!   zDinov2SwiGLUFFN.__init__}  s    %+%7%77lf0063C3CCD2Q67!;AAE))K_1D4P99_lNr5   r   c                     | j                  |      }|j                  dd      \  }}t        j                  j	                  |      |z  }| j                  |      S )Nr;   r:   rB   )r   chunkr   rI   silur   )r2   r   x1x2hiddens        r4   rb   zDinov2SwiGLUFFN.forward  sS    |4##A2#.B##B'",''r5   r   r   rk   s   @r4   r   r   |  s$    O(ELL (U\\ (r5   r   c                        e Zd ZdZdeddf fdZ	 	 d
dej                  deej                     de	de
eej                  ej                  f   eej                     f   fd	Z xZS )Dinov2LayerzCThis corresponds to the Block class in the original implementation.r   r   Nc                    t         |           t        j                  |j                  |j
                        | _        t        |      | _        t        |      | _
        |j                  dkD  rt        |j                        nt        j                         | _        t        j                  |j                  |j
                        | _        |j                   rt#        |      | _        nt'        |      | _        t        |      | _        y )Nepsr   )r    r!   r   	LayerNormr%   layer_norm_epsnorm1r   r   r   layer_scale1drop_path_rater   Identityr   norm2use_swiglu_ffnr   mlpr   layer_scale2r   s     r4   r!   zDinov2Layer.__init__  s    \\&"4"4&:O:OP
(0,V4BHBWBWZ]B](=(=>cecncncp\\&"4"4&:O:OP
  &v.DH (DH,V4r5   r   r   r   c                 D   | j                  | j                  |      ||      }|d   }| j                  |      }|dd  }| j                  |      |z   }| j	                  |      }| j                  |      }| j                  |      }| j                  |      |z   }|f|z   }|S )N)r   r   r   )r   r	  r
  r   r  r  r  )r2   r   r   r   self_attention_outputsr   r   layer_outputs           r4   rb   zDinov2Layer.forward  s     "&JJ}%/ "0 "

 2!4,,-=>(, '78=H zz-0xx-((6 ~~l3mC/G+r5   r   )rd   re   rf   rg   r   r!   r#   rh   r   r   r   r   rb   rj   rk   s   @r4   r  r    s~    M5| 5 5& -1"'	|| ELL)  	
 
uU\\5<</0%2EE	Fr5   r  c                        e Zd Zdeddf fdZ	 	 	 	 ddej                  deej                     deded	ede	e
ef   fd
Z xZS )Dinov2Encoderr   r   Nc                     t         |           || _        t        j                  t        |j                        D cg c]  }t        |       c}      | _        d| _	        y c c}w r   )
r    r!   r   r   
ModuleListrangenum_hidden_layersr  layergradient_checkpointingr2   r   r`   r3   s      r4   r!   zDinov2Encoder.__init__  sN    ]]vG_G_A`#aAK$7#ab
&+# $bs   A#r   r   r   output_hidden_statesreturn_dictc                    |rdnd }|rdnd }t        | j                        D ]1  \  }}	|r||fz   }|||   nd }
 |	||
|      }|d   }|s)||d   fz   }3 |r||fz   }|st        d |||fD              S t        |||      S )N r   r   c              3   &   K   | ]	  }||  y wrc   r   ).0vs     r4   	<genexpr>z(Dinov2Encoder.forward.<locals>.<genexpr>  s     mq_`_lms   )last_hidden_stater   
attentions)	enumerater  r   r   )r2   r   r   r   r  r  all_hidden_statesall_self_attentionsilayer_modulelayer_head_masklayer_outputss               r4   rb   zDinov2Encoder.forward  s     #7BD$5b4(4 	POA|#$58H$H!.7.CilO(IZ[M)!,M &9]1=M<O&O#	P   1]4D Dm]4EGZ$[mmm++*
 	
r5   )NFFT)rd   re   rf   r   r!   r#   rh   r   r   r   r   r   rb   rj   rk   s   @r4   r  r    sz    ,| , , -1"'%* !
||!
 ELL)!
  	!

 #!
 !
 
uo%	&!
r5   r  c                       e Zd ZU eed<   dZdZdZdgZdZ	dZ
dZdZdeej                  ej                   ej"                  f   ddfd	Zy)
Dinov2PreTrainedModelr   dinov2rW   Tr  r|   r   Nc                 H   t        |t        j                  t        j                  f      rt        j                  j                  |j                  j                  j                  t        j                        d| j                  j                        j                  |j                  j                        |j                  _        |j                  %|j                  j                  j                          yyt        |t        j                         rJ|j                  j                  j                          |j                  j                  j#                  d       yt        |t$              rnt        j                  j                  |j&                  j                  j                  t        j                        d| j                  j                        j                  |j&                  j                        |j&                  _        t        j                  j                  |j(                  j                  j                  t        j                        d| j                  j                        j                  |j(                  j                        |j(                  _        | j                  j*                  r%|j,                  j                  j                          yyt        |t.              r:|j0                  j                  j#                  | j                  j2                         yy)zInitialize the weightsr   )meanstdNg      ?)rr   r   r   rv   inittrunc_normal_r[   datarK   r#   rL   r   initializer_rangerA   r   zero_r  fill_r   r-   r&   r'   r)   r   r   r   )r2   r|   s     r4   _init_weightsz#Dinov2PreTrainedModel._init_weights  s!   fryy"))45 "$!6!6""%%emm43DKKDaDa "7 "b$$% MM {{&  &&( '-KK""$MM$$S) 01.0gg.C.C**//225==AKK11 /D / b++112	 &&+ %'GG$9$9  %%((7KK11 %: % b!!''(	 ! {{))!!&&,,. * 01NN%%dkk&B&BC 2r5   )rd   re   rf   r   __annotations__base_model_prefixmain_input_namesupports_gradient_checkpointing_no_split_modules_supports_sdpa_supports_flash_attn_supports_flex_attn_supports_attention_backendr   r   r   rv   r  r:  r   r5   r4   r/  r/    sg     $O&*#&N"&DE"))RYY*L$M DRV Dr5   r/  c                        e Zd Zdef fdZdefdZdeee	e   f   ddfdZ
e	 	 	 	 	 	 ddeej                     d	eej                     d
eej                     dee   dee   dee   deeef   fd       Z xZS )Dinov2Modelr   c                     t         |   |       || _        t        |      | _        t        |      | _        t        j                  |j                  |j                        | _        | j                          y )Nr  )r    r!   r   r   r6   r  encoderr   r  r%   r  	layernorm	post_initr   s     r4   r!   zDinov2Model.__init__  sY     *62$V,f&8&8f>S>ST 	r5   r   c                 .    | j                   j                  S rc   r6   r+   r   s    r4   get_input_embeddingsz Dinov2Model.get_input_embeddings&      ///r5   heads_to_pruneNc                     |j                         D ]7  \  }}| j                  j                  |   j                  j	                  |       9 y)z
        Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
        class PreTrainedModel
        N)itemsrG  r  r   r   )r2   rN  r  r   s       r4   _prune_headszDinov2Model._prune_heads)  sE    
 +002 	CLE5LLu%//;;EB	Cr5   rW   rX   r   r   r  r  c                    ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|t	        d      | j                  || j                   j                        }| j                  ||      }| j                  |||||      }|d   }	| j                  |	      }	|	dddddf   }
|s|	|
f}||dd z   S t        |	|
|j                  |j                        S )z
        bool_masked_pos (`torch.BoolTensor` of shape `(batch_size, sequence_length)`):
            Boolean masked positions. Indicates which patches are masked (1) and which aren't (0). Only relevant for
            pre-training.
        Nz You have to specify pixel_values)rX   r   r   r  r  r   r   )r%  pooler_outputr   r&  )r   r   r  use_return_dictry   get_head_maskr  r6   rG  rH  r   r   r&  )r2   rW   rX   r   r   r  r  embedding_outputencoder_outputssequence_outputpooled_outputhead_outputss               r4   rb   zDinov2Model.forward1  s%    2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]?@@ &&y$++2O2OP	??<?Y,,/!5# ' 
 *!,..9'1a0+];L/!""555)-')77&11	
 	
r5   NNNNNN)rd   re   rf   r   r!   r*   rL  dictri   listrQ  r   r   r#   rh   r   r   r   r   rb   rj   rk   s   @r4   rE  rE    s    
| 
0&; 0C4T#Y+? CD C  0426,0,0/3&*4
u||,4
 "%,,/4
 ELL)	4

 $D>4
 'tn4
 d^4
 
u00	14
 4
r5   rE  z
    Dinov2 Model transformer with an image classification head on top (a linear layer on top of the final hidden state
    of the [CLS] token) e.g. for ImageNet.
    )custom_introc                        e Zd Zdeddf fdZe	 	 	 	 	 	 ddeej                     deej                     deej                     dee	   d	ee	   d
ee	   de
eef   fd       Z xZS )Dinov2ForImageClassificationr   r   Nc                 0   t         |   |       |j                  | _        t        |      | _        |j                  dkD  r-t        j                  |j                  dz  |j                        nt        j                         | _	        | j                          y )Nr   r;   )r    r!   
num_labelsrE  r0  r   r   r%   r  
classifierrI  r   s     r4   r!   z%Dinov2ForImageClassification.__init__p  sy      ++!&) EKDUDUXYDYBIIf((1,f.?.?@_a_j_j_l 	
 	r5   rW   r   labelsr   r  r  c                    ||n| j                   j                  }| j                  |||||      }|d   }|dddf   }	|ddddf   }
t        j                  |	|
j                  d      gd      }| j                  |      }d}||j                  |j                        }| j                   j                  | j                  dk(  rd| j                   _	        nl| j                  dkD  rL|j                  t        j                  k(  s|j                  t        j                  k(  rd| j                   _	        nd| j                   _	        | j                   j                  dk(  rIt               }| j                  dk(  r& ||j                         |j                               }n |||      }n| j                   j                  dk(  r=t!               } ||j#                  d	| j                        |j#                  d	            }n,| j                   j                  dk(  rt%               } |||      }|s|f|d
d z   }||f|z   S |S t'        |||j(                  |j*                        S )a  
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the image classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
            `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        NrS  r   r   rB   
regressionsingle_label_classificationmulti_label_classificationr:   r;   )losslogitsr   r&  )r   rU  r0  r#   rN   r2  rd  rK   r   problem_typerc  rA   longri   r	   squeezer   rM   r   r   r   r&  )r2   rW   r   re  r   r  r  r   rY  r&   patch_tokenslinear_inputrk  rj  loss_fctr   s                   r4   rb   z$Dinov2ForImageClassification.forward~  s!     &1%<k$++B]B]++/!5#  
 "!*#AqD)	&q!"u-yy)\->->1->-E!FAN.YYv}}-F{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#FNN$4fnn6FGD#FF3D))-JJ+-B @&++b/R))-II,./Y,F)-)9TGf$EvE$!//))	
 	
r5   r\  )rd   re   rf   r   r!   r   r   r#   rh   r   r   r   r   rb   rj   rk   s   @r4   ra  ra  i  s    |    04,0)-,0/3&*D
u||,D
 ELL)D
 &	D

 $D>D
 'tnD
 d^D
 
u++	,D
 D
r5   ra  zO
    Dinov2 backbone, to be used with frameworks like DETR and MaskFormer.
    c                   ~     e Zd Z fdZdefdZe	 	 	 d	dej                  de	e
   de	e
   de	e
   def
d       Z xZS )
Dinov2Backbonec                 v   t         |   |       t         | 	  |       t        |j                  dz         D cg c]  }|j
                   c}| _        t        |      | _        t        |      | _
        t        j                  |j
                  |j                        | _        | j                          y c c}w )Nr   r  )r    r!   _init_backboner  r  r%   num_featuresr   r6   r  rG  r   r  r  rH  rI  r  s      r4   r!   zDinov2Backbone.__init__  s     v&9>v?W?WZ[?[9\]AV//]*62$V,f&8&8f>S>ST 	 ^s   B6r   c                 .    | j                   j                  S rc   rK  r   s    r4   rL  z#Dinov2Backbone.get_input_embeddings  rM  r5   rW   r  r   r  c                 b   ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }| j	                  |      }| j                  |d||      }|r|j                  n|d   }d}t        | j                  |      D ]  \  }	}
|	| j                  v s| j                   j                  r| j                  |
      }
| j                   j                  rn|
ddddf   }
|j                  \  }}}}| j                   j                  }|
j                  |||z  ||z  d      }
|
j!                  dddd	      j#                         }
||
fz  } |s|r|f|dd z   }|S |f|d	d z   }|S t%        ||r|j                  nd|r|j&                  
      S d
      S )a%  
        Examples:

        ```python
        >>> from transformers import AutoImageProcessor, AutoBackbone
        >>> import torch
        >>> from PIL import Image
        >>> import requests

        >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
        >>> image = Image.open(requests.get(url, stream=True).raw)

        >>> processor = AutoImageProcessor.from_pretrained("facebook/dinov2-base")
        >>> model = AutoBackbone.from_pretrained(
        ...     "facebook/dinov2-base", out_features=["stage2", "stage5", "stage8", "stage11"]
        ... )

        >>> inputs = processor(image, return_tensors="pt")

        >>> outputs = model(**inputs)
        >>> feature_maps = outputs.feature_maps
        >>> list(feature_maps[-1].shape)
        [1, 768, 16, 16]
        ```NT)r  r   r  r   r   r:   r   r
   r;   )feature_mapsr   r&  )r   rU  r  r   r6   rG  r   zipstage_namesr   apply_layernormrH  reshape_hidden_statesrD   r1   rG   rH   r   r   r&  )r2   rW   r  r   r  rW  r   r   ry  stager   r_   r`   r7   r8   r1   r   s                    r4   rb   zDinov2Backbone.forward  s   @ &1%<k$++B]B]$8$D $++JjJj 	 2C1N-TXT_T_TqTq??<8,,4K\ju  
 2=--'!*#&t'7'7#G 	0E<)));;..#'>>,#?L;;44#/12#6L 4@3E3E0J65!%!7!7J#/#7#7
FjDXZ_cmZmoq#rL#/#7#71a#C#N#N#PL/	0 #&712;6 M '712;6M%3G'//T->w))
 	
 EI
 	
r5   )NNN)rd   re   rf   r!   r*   rL  r   r#   rh   r   r   r   rb   rj   rk   s   @r4   rs  rs    s{    0&; 0  04,0&*G
llG
 'tnG
 $D>	G

 d^G
 
G
 G
r5   rs  )ra  rE  r/  rs  )r   )r   F)>rg   collections.abcrs   typingr   r   r   r#   torch.utils.checkpointr   torch.nnr   r   r	   activationsr   modeling_layersr   modeling_outputsr   r   r   r   modeling_utilsr   r   pytorch_utilsr   r   utilsr   r   r   utils.backbone_utilsr   configuration_dinov2r   
get_loggerrd   r   Moduler   r*   rh   r   r   r   r   r   r   r   r   r   r   r   r  r  r/  rE  ra  rs  __all__r   r5   r4   <module>r     s9     , ,    A A ! 9 r r F Q 7 7 1 . 
		H	%Nryy NbBII R %II%<<% 
% <<	%
 U\\*% % %>F")) FTryy &$bii $N+ryy +U\\ e T V[VbVb *%RYY %		 &(bii ("0, 0h(
BII (
V (DO (D (DV M
' M
 M
` T
#8 T
T
n 
Y
*M Y

Y
x er5   