
    rhs              	          d Z ddlZddlZddlmZ ddlZddlZddlZddlm	Z	m
Z
 ddlmZmZmZ ddlmZ ddlmZmZmZmZ dd	lmZ dd
lmZmZ ddlmZ ddlmZ  ej>                  e       Z!d7de"e"e#f   fdZ$ G d de
jJ                        Z& G d de
jN                        Z( G d de
jR                        Z* G d de
jV                        Z, G d de
jR                        Z-d8dej                  de.de#dej                  fdZ/ G d de
jR                        Z0d9d Z1 G d! d"e
jR                        Z2 G d# d$e
jR                        Z3 G d% d&e
jR                        Z4 G d' d(e
jR                        Z5 G d) d*e
jR                        Z6e G d+ d,e             Z7e G d- d.e7             Z8 ed/0       G d1 d2e7             Z9 ed30       G d4 d5e7e             Z:g d6Z;y):z9PyTorch BiT model. Also supports backbone for ViT hybrid.    N)Optional)Tensornn)BCEWithLogitsLossCrossEntropyLossMSELoss   )ACT2FN)BackboneOutputBaseModelOutputWithNoAttention(BaseModelOutputWithPoolingAndNoAttention$ImageClassifierOutputWithNoAttention)PreTrainedModel)auto_docstringlogging)BackboneMixin   )	BitConfigreturnc                    d}| |dz
  ||dz
  z  z   dz  } | |fS t        | t              ra| j                         } | dk(  r0|dk(  r#||dz
  z  dz  dk(  r|dz
  ||dz
  z  z   dz  } | |fS d} d}| |fS | dk(  rd} | |fS |dz
  ||dz
  z  z   dz  } | |fS )al  
    Utility function to get the tuple padding value given the kernel_size and padding.

    Args:
        padding (Union[`str`, `int`], *optional*):
            Padding value, can be either `"same"`, `"valid"`. If a different value is provided the default padding from
            PyTorch is used.
        kernel_size (`int`, *optional*, defaults to 7):
            Kernel size of the convolution layers.
        stride (`int`, *optional*, defaults to 1):
            Stride value of the convolution layers.
        dilation (`int`, *optional*, defaults to 1):
            Dilation value of the convolution layers.
    Fr      samer   Tvalid)
isinstancestrlower)paddingkernel_sizestridedilationdynamics        w/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/bit/modeling_bit.pyget_padding_valuer#   +   s     GQJ(kAo">>1D'3--/f{K!O <AQF"QJ(kAo*FF1L G  G G G 
h+/&BBqHGG    c                   6     e Zd ZdZ	 	 	 	 	 	 d fd	Zd Z xZS )WeightStandardizedConv2dzConv2d with Weight Standardization. Includes TensorFlow compatible SAME padding. Used for ViT Hybrid model.

    Paper: [Micro-Batch Training with Batch-Channel Normalization and Weight
    Standardization](https://huggingface.co/papers/1903.10520v2)
    c
           
          t        ||||      \  }}
t        | 	  ||||||||       |
rt        |||      | _        |	| _        y d | _        |	| _        y )N)r   r    )r   r   r    groupsbias)r#   super__init__DynamicPad2dpadeps)self
in_channelout_channelsr   r   r   r    r(   r)   r.   
is_dynamic	__class__s              r"   r+   z!WeightStandardizedConv2d.__init__[   ss     0V^fg 	 		
 #KBDH  DHr$   c           	         | j                   | j                  |      }t        j                  j                  | j                  j                  d| j                  d      d d dd| j                        j                  | j                        }t        j                  j                  ||| j                  | j                  | j                  | j                  | j                        }|S )Nr   T        )trainingmomentumr.   )r-   r   
functional
batch_normweightreshaper1   r.   
reshape_asconv2dr)   r   r   r    r(   )r/   hidden_stater;   s      r"   forwardz WeightStandardizedConv2d.forwardx   s    8888L1L))KK4#4#4b94PT_bhlhphp * 

*T[[
! 	 }}++&$))T[[$,,W[WbWb
 r$   )r   SAMEr   r   Fgư>__name__
__module____qualname____doc__r+   r@   __classcell__r3   s   @r"   r&   r&   T   s&     :	r$   r&   c                   *     e Zd ZdZd fd	Zd Z xZS )BitGroupNormActivationzQ
    A module that combines group normalization with an activation function.
    c                     t         |   |j                  |||       |rt        |j                     | _        y t        j                         | _        y )N)r.   affine)r*   r+   
num_groupsr
   
hidden_act
activationr   Identity)r/   confignum_channelsr.   rL   apply_activationr3   s         r"   r+   zBitGroupNormActivation.__init__   sA    **Lc&Q$V%6%67DO kkmDOr$   c                     t         j                  j                  || j                  | j                  | j
                  | j                        }| j                  |      }|S N)r   r9   
group_normrM   r;   r)   r.   rO   )r/   r?   s     r"   r@   zBitGroupNormActivation.forward   sH    }}//doot{{\`\e\egkgogop|4r$   )gh㈵>TTrB   rH   s   @r"   rJ   rJ      s    ,r$   rJ   c                   *     e Zd ZdZd fd	Zd Z xZS )r,   z
    A module that wraps dynamic padding of any input, given the parameters of the convolutional layer and the input
    hidden states.
    c                     t         |           t        |t              r||f}t        |t              r||f}t        |t              r||f}|| _        || _        || _        || _        d }|| _        y )Nc                 p    t        t        j                  | |z        dz
  |z  |dz
  |z  z   dz   | z
  d      S )Nr   r   )maxmathceil)xr   r   r    s       r"   compute_paddingz.DynamicPad2d.__init__.<locals>.compute_padding   sB    		!f*-1V;{QRZ>ZZ]^^abbdeffr$   )	r*   r+   r   intr   r   r    valuer^   )r/   r   r   r    r`   r^   r3   s         r"   r+   zDynamicPad2d.__init__   sw    k3'&4Kfc"f%Fh$ (+H& 
	g  /r$   c           	         |j                         dd  \  }}| j                  || j                  d   | j                  d   | j                  d         }| j                  || j                  d   | j                  d   | j                  d         }|dkD  s|dkD  rBt
        j                  j                  ||dz  ||dz  z
  |dz  ||dz  z
  g| j                        }|S )Nr   r   r   )r`   )	sizer^   r   r   r    r   r9   r-   r`   )r/   inputinput_heightinput_widthpadding_heightpadding_widths         r"   r@   zDynamicPad2d.forward   s    $)JJL$5!k --lD<L<LQ<OQUQ\Q\]^Q_aeananopaqr,,[$:J:J1:Mt{{[\~_c_l_lmn_op A!2MM%%!Q&!MQ$66"a'"^q%88	 jj & 	E r$   )r   rB   rH   s   @r"   r,   r,      s    
/,r$   r,   c                   <     e Zd ZdZ	 	 	 	 	 	 ddef fdZd Z xZS )BitMaxPool2dz1Tensorflow like 'SAME' wrapper for 2D max poolingr   c                    t        |t        j                  j                        r|n||f}t        |t        j                  j                        r|n||f}t        |t        j                  j                        r|n||f}t        |   |||||       |rt        ||||      | _        y t        j                         | _        y rU   )
r   collectionsabcIterabler*   r+   r,   r-   r   rP   )	r/   r   r   r    	ceil_moder   padding_valueuse_dynamic_paddingr3   s	           r"   r+   zBitMaxPool2d.__init__   s     &0[__=U=U%Vk]hju\v%fkoo.F.FGfV\M])(KOO4L4LM8T\^fSgfgxK#K=QDH{{}DHr$   c                     | j                  |      }t        j                  j                  || j                  | j
                  | j                  | j                  | j                        S rU   )	r-   r   r9   
max_pool2dr   r   r   r    ro   r/   hidden_statess     r"   r@   zBitMaxPool2d.forward   sM    /}}''4++T[[$,,W[WeWe
 	
r$   )Nr   F)r   r   r   T)rC   rD   rE   rF   r_   r+   r@   rG   rH   s   @r"   rj   rj      s,    ;
  %%&
r$   rj   c                   8     e Zd ZdZdef fdZdedefdZ xZS )BitEmbeddingszL
    BiT Embeddings (stem) composed of a single aggressive convolution.
    rQ   c                 .   t         |           t        |j                  |j                  ddd|j
                        | _        t        dd|j                        | _	        |j
                  7|j
                  j                         dk(  rt        j                         | _        nt        j                  dd	
      | _        |j                  dk7  rt!        ||j                        | _        nt        j                         | _        |j                  | _        y )N   r   :0yE>)r   r   r.   r   r	   )r   r   rq   rA   )r   r   r   r   r6   )r   r`   preactivationrR   )r*   r+   r&   rR   embedding_sizeglobal_paddingconvolutionrj   embedding_dynamic_paddingpoolerupperr   rP   r-   ConstantPad2d
layer_typerJ   normr/   rQ   r3   s     r"   r+   zBitEmbeddings.__init__   s    3!!))
 #qPVPpPpq   ,1F1F1L1L1NRX1X{{}DH''CHDH/.vFDYDYZDIDI"//r$   pixel_valuesr   c                     |j                   d   }|| j                  k7  rt        d      | j                  |      }| j	                  |      }| j                  |      }| j                  |      }|S )Nr   zeMake sure that the channel dimension of the pixel values match with the one set in the configuration.)shaperR   
ValueErrorr   r-   r   r   )r/   r   rR   	embeddings       r"   r@   zBitEmbeddings.forward  sr    #))!,4,,,w  $$\2	HHY'	IIi(	KK	*	r$   )	rC   rD   rE   rF   r   r+   r   r@   rG   rH   s   @r"   rw   rw      s'    0y 06F v r$   rw   rd   	drop_probr7   c                    |dk(  s|s| S d|z
  }| j                   d   fd| j                  dz
  z  z   }|t        j                  || j                  | j
                        z   }|j                          | j                  |      |z  }|S )aF  
    Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).

    Comment by Ross Wightman: This is the same as the DropConnect impl I created for EfficientNet, etc networks,
    however, the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
    See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for changing the
    layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use 'survival rate' as the
    argument.
    r6   r   r   )r   )dtypedevice)r   ndimtorchrandr   r   floor_div)rd   r   r7   	keep_probr   random_tensoroutputs          r"   	drop_pathr     s     CxII[[^

Q 77E

5ELL YYMYYy!M1FMr$   c                   x     e Zd ZdZd	dee   ddf fdZdej                  dej                  fdZ	de
fdZ xZS )
BitDropPathzXDrop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).Nr   r   c                 0    t         |           || _        y rU   )r*   r+   r   )r/   r   r3   s     r"   r+   zBitDropPath.__init__1  s    "r$   ru   c                 D    t        || j                  | j                        S rU   )r   r   r7   rt   s     r"   r@   zBitDropPath.forward5  s    FFr$   c                      d| j                    S )Nzp=)r   )r/   s    r"   
extra_reprzBitDropPath.extra_repr8  s    DNN#$$r$   rU   )rC   rD   rE   rF   r   floatr+   r   r   r@   r   r   rG   rH   s   @r"   r   r   .  sG    b#(5/ #T #GU\\ Gell G%C %r$   r   c                 f    |}t        |t        | |dz  z         |z  |z        }|d| z  k  r||z  }|S )Nr   g?)rZ   r_   )r`   divisor	min_value	new_values       r"   make_divr   <  sG    IIs57Q;#677BWLMI3;W	r$   c                   :     e Zd ZdZ	 	 	 	 	 	 	 	 d fd	Zd Z xZS )BitPreActivationBottleneckLayera  Pre-activation (v2) bottleneck block.
    Follows the implementation of "Identity Mappings in Deep Residual Networks":
    https://github.com/KaimingHe/resnet-1k-layers/blob/master/resnet-pre-act.lua

    Except it puts the stride on 3x3 conv when available.
    c           	         t         |           |xs |}|xs |}t        ||z        }|
rt        ||||d      | _        nd | _        t        ||      | _        t        ||dd|j                        | _	        t        ||      | _
        t        ||d||d|j                        | _        t        ||      | _        t        ||dd|j                        | _        |	d	kD  rt        |	      | _        y t        j                          | _        y )
NTr   preactr   rz   r.   r   r|   r	   )r   r(   r.   r   r   )r*   r+   r   BitDownsampleConv
downsamplerJ   norm1r&   r~   conv1norm2conv2norm3conv3r   r   rP   r   )r/   rQ   in_channelsr1   bottle_ratior   r    first_dilationr(   drop_path_rateis_first_layermid_channelsr3   s               r"   r+   z(BitPreActivationBottleneckLayer.__init__L  s    	'38#2{| ;</DO #DO+FK@
-k<PT^d^s^st
+FN
-,&T[a[p[p

 ,FLA
-lL!QU_e_t_tu
8F8J^4PRP[P[P]r$   c                 0   | j                  |      }|}| j                  | j                  |      }| j                  |      }| j                  | j	                  |            }| j                  | j                  |            }| j                  |      }||z   S rU   )r   r   r   r   r   r   r   r   )r/   ru   hidden_states_preactshortcuts       r"   r@   z'BitPreActivationBottleneckLayer.forwardx  s    #zz-8 !??&';<H 

#78

4::m#<=

4::m#<=}5x''r$   N      ?r   r   Nr   r6   FrB   rH   s   @r"   r   r   D  s.     *^X(r$   r   c                   :     e Zd ZdZ	 	 	 	 	 	 	 	 d fd	Zd Z xZS )BitBottleneckLayerz\Non Pre-activation bottleneck block, equivalent to V1.5/V1b bottleneck. Used for ViT Hybrid.c           
      D   t         |           |xs |}|xs |}t        ||z        }|
rt        ||||d      | _        nd | _        t        ||dd|j                        | _        t        ||      | _	        t        ||d|||d|j                        | _
        t        ||      | _        t        ||dd|j                        | _        t        ||d	      | _        |	d
kD  rt        |	      nt        j                          | _        t$        |j&                     | _        y )NFr   r   rz   r   r|   r	   )r   r    r(   r.   r   rR   rS   r   )r*   r+   r   r   r   r&   r~   r   rJ   r   r   r   r   r   r   r   rP   r   r
   rN   rO   )r/   rQ   r   r1   r   r   r    r   r(   r   r   mid_chsr3   s               r"   r+   zBitBottleneckLayer.__init__  s    	'38#2{<,67/DO #DO-k7A4Y_YnYno
+FI
-#))	

 ,FI
-g|QDZ`ZoZop
+F`ef
8F8J^4PRP[P[P] !2!23r$   c                 Z   |}| j                   | j                  |      }| j                  |      }| j                  |      }| j                  |      }| j	                  |      }| j                  |      }| j                  |      }| j                  |      }| j                  ||z         }|S rU   )	r   r   r   r   r   r   r   r   rO   )r/   ru   r   s      r"   r@   zBitBottleneckLayer.forward  s     ??&}5H 

=1

=1

=1

=1

=1

=1}5(@Ar$   r   rB   rH   s   @r"   r   r     s+    f /4br$   r   c                   *     e Zd Z	 	 d fd	Zd Z xZS )r   c                     t         |           t        ||d|d|j                        | _        |rt        j                         | _        y t        ||d      | _        y )Nr   rz   )r   r.   r   Fr   )	r*   r+   r&   r~   convr   rP   rJ   r   )r/   rQ   r   r1   r   r   r3   s         r"   r+   zBitDownsampleConv.__init__  s\     	,qT6K`K`
	
  KKM 		 (\\ab 		r$   c                 B    | j                  | j                  |            S rU   )r   r   )r/   r]   s     r"   r@   zBitDownsampleConv.forward  s    yy1&&r$   )r   T)rC   rD   rE   r+   r@   rG   rH   s   @r"   r   r     s     
$'r$   r   c                   >     e Zd ZdZ	 	 d fd	Zd ZdedefdZ xZS )BitStagez7
    A ResNet v2 stage composed by stacked layers.
    c	                 ^   t         |           |dv rdnd}	|j                  dk(  rt        }
nt        }
|}t        j                         | _        t        |      D ]Q  }| j                  |||      \  }}}| j                  j                  t        |       |
|||||||	||	             |}|}	S y )N)r   r   r   r   
bottleneck)r   r    r   r   r   r   )r*   r+   r   r   r   r   
Sequentiallayersrange_get_updated_hyperparameters
add_moduler   )r/   rQ   r   r1   r   r    depthr   layer_dropoutr   	layer_clsprev_chs	layer_idxr   r   r3   s                  r"   r+   zBitStage.__init__  s     	&&0a ,*I7Immou 	&I595V5V6=62FNN KK""I !%!-#1#1#1
 $H%N+	&r$   c                 8    |r||   }nd}|dk7  rd}|dk(  }|||fS )zt
        Get the new hyper-parameters with respect to the previous ones and the index of the current layer.
        r6   r   r    )r/   r   r   r   r   r   s         r"   r   z%BitStage._get_updated_hyperparameters  s8     *95N N>F"a~~55r$   rd   r   c                 T    |}t        | j                        D ]  \  }} ||      } |S rU   )	enumerater   )r/   rd   r?   _layers        r"   r@   zBitStage.forward+  s3    !$++. 	/HAu .L	/r$   )r   N)	rC   rD   rE   rF   r+   r   r   r@   rG   rH   s   @r"   r   r     s.     ,&\6 V  r$   r   c            	       F     e Zd Zdef fdZd Z	 d	dedededefdZ	 xZ
S )

BitEncoderrQ   c           
         t         |           t        j                  g       | _        |j
                  }d}d}t        j                  t        j                  d|j                  t        |j                                    j                  |j                        D cg c]  }|j                          }}t        t!        |j                  |j"                  |            D ]`  \  }\  }}	}
| j%                  |||	||      \  }}}t'        |||||||
      }|}||z  }| j                  j)                  t+        |      |       b y c c}w )N   r   r   )r   r    r   r   )r*   r+   r   
ModuleListstagesr}   r   r   nplinspacer   sumdepthssplittolistr   ziphidden_sizesr   r   r   r   )r/   rQ   r   current_strider    r]   layer_dropouts	stage_idxcurrent_depthcurrent_hidden_sizer   r1   r   stager3   s                 r"   r+   zBitEncoder.__init__3  sA   mmB'((  \\"++a1F1FFMMHZ"[\bbcicpcpq
 HHJ
 

 OXv22NCO
 	:JIJ':M .2-N-N>+>&.*L&( !#+E $Hf$NKK""3y>59+	:
s   Ec                 z    t        ||j                  z        }|dk(  rdnd}||j                  k\  r||z  }d}|||fS )Nr   r   r   )r   width_factoroutput_stride)r/   r   r   r   r    rQ   r1   r   s           r"   r   z'BitEncoder._get_updated_hyperparametersY  sO     3f6I6I IJ1n!V111HFVX--r$   r?   output_hidden_statesreturn_dictr   c                     |rdnd }| j                   D ]  }|r||fz   } ||      } |r||fz   }|st        d ||fD              S t        ||      S )Nr   c              3   &   K   | ]	  }||  y wrU   r   ).0vs     r"   	<genexpr>z%BitEncoder.forward.<locals>.<genexpr>p  s     SqQ]Ss   )last_hidden_stateru   )r   tupler   )r/   r?   r   r   ru   stage_modules         r"   r@   zBitEncoder.forwarda  sv     3 KK 	6L# - ?'5L		6  )\O;MS\=$ASSS-*'
 	
r$   )FT)rC   rD   rE   r   r+   r   r   boolr   r@   rG   rH   s   @r"   r   r   2  sA    $:y $:L. ]a
"
:>
UY
	'
r$   r   c                   ,    e Zd ZU eed<   dZdZdgZd Zy)BitPreTrainedModelrQ   bitr   rw   c                 J   t        |t        j                        r-t        j                  j	                  |j
                  dd       y t        |t        j                        rt        j                  j                  |j
                  t        j                  d             |j                  xt        j                  j                  |j
                        \  }}|dkD  rdt        j                  |      z  nd}t        j                  j                  |j                  | |       y y t        |t        j                  t        j                  f      rUt        j                  j                  |j
                  d       t        j                  j                  |j                  d       y y )Nfan_outrelu)modenonlinearity   )ar   r   )r   r   Conv2dinitkaiming_normal_r;   Linearkaiming_uniform_r[   sqrtr)   _calculate_fan_in_and_fan_outuniform_BatchNorm2d	GroupNorm	constant_)r/   modulefan_inr   bounds        r"   _init_weightsz BitPreTrainedModel._init_weights  s   fbii(GG##FMM	PV#W		*GG$$V]]diil$C{{&GGAA&--P	17!DIIf--  ufe< '  >?GGfmmQ/GGfkk1- @r$   N)	rC   rD   rE   r   __annotations__base_model_prefixmain_input_name_no_split_modulesr  r   r$   r"   r   r   x  s!    $O().r$   r   c            
       P     e Zd Z fdZe	 ddedee   dee   defd       Z	 xZ
S )BitModelc                 J   t         |   |       || _        t        |      | _        t        |      | _        |j                  dk(  rt        ||j                  d         nt        j                         | _        t        j                  d      | _        | j                          y )Nr{   r5   r|   )r   r   )r*   r+   rQ   rw   embedderr   encoderr   rJ   r   r   rP   r   AdaptiveAvgPool2dr   	post_initr   s     r"   r+   zBitModel.__init__  s     %f-!&)   O3 #68K8KB8OP 		 **62r$   r   r   r   r   c                 J   ||n| j                   j                  }||n| j                   j                  }| j                  |      }| j	                  |||      }|d   }| j                  |      }| j                  |      }|s
||f|dd  z   S t        |||j                        S )Nr   r   r   r   )r   pooler_outputru   )	rQ   r   use_return_dictr  r  r   r   r   ru   )r/   r   r   r   embedding_outputencoder_outputsr   pooled_outputs           r"   r@   zBitModel.forward  s    
 %9$D $++JjJj 	 &1%<k$++B]B]==6,,3GU` ' 
 ,A. II&78$56%}58KKK7/')77
 	
r$   NN)rC   rD   rE   r+   r   r   r   r   r   r@   rG   rH   s   @r"   r  r    sI    " os
"
:B4.
^fgk^l
	1
 
r$   r  z
    BiT Model with an image classification head on top (a linear layer on top of the pooled features), e.g. for
    ImageNet.
    )custom_introc                        e Zd Z fdZe	 	 	 	 ddeej                     deej                     dee	   dee	   de
f
d       Z xZS )	BitForImageClassificationc                 |   t         |   |       |j                  | _        t        |      | _        t        j                  t        j                         |j                  dkD  r-t        j                  |j                  d   |j                        nt        j                               | _        | j                          y )Nr   r5   )r*   r+   
num_labelsr  r   r   r   Flattenr  r   rP   
classifierr!  r   s     r"   r+   z"BitForImageClassification.__init__  s      ++F#--JJLEKEVEVYZEZBIIf))"-v/@/@A`b`k`k`m

 	r$   r   labelsr   r   r   c                    ||n| j                   j                  }| j                  |||      }|r|j                  n|d   }| j	                  |      }d}|| j                   j
                  | j                  dk(  rd| j                   _        nl| j                  dkD  rL|j                  t        j                  k(  s|j                  t        j                  k(  rd| j                   _        nd| j                   _        | j                   j
                  dk(  rIt               }	| j                  dk(  r& |	|j                         |j                               }n |	||      }n| j                   j
                  dk(  r=t               }	 |	|j                  d| j                        |j                  d            }n,| j                   j
                  dk(  rt               }	 |	||      }|s|f|dd z   }
||f|
z   S |
S t!        |||j"                  	      S )
a0  
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the image classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        Nr#  r   
regressionsingle_label_classificationmulti_label_classificationr5   r   )losslogitsru   )rQ   r%  r   r$  r0  problem_typer.  r   r   longr_   r   squeezer   viewr   r   ru   )r/   r   r1  r   r   outputsr(  r7  r6  loss_fctr   s              r"   r@   z!BitForImageClassification.forward  s    &1%<k$++B]B]((<>R`k(l1<--'!*/{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#FNN$4fnn6FGD#FF3D))-JJ+-B @&++b/R))-II,./Y,F'+'7D7V#CVC3f\c\q\qrrr$   )NNNN)rC   rD   rE   r+   r   r   r   FloatTensor
LongTensorr   r   r@   rG   rH   s   @r"   r,  r,    s    
  59-1/3&*/su001/s ))*/s 'tn	/s
 d^/s 
./s /sr$   r,  zL
    BiT backbone, to be used with frameworks like DETR and MaskFormer.
    c            
       P     e Zd Z fdZe	 ddedee   dee   defd       Z	 xZ
S )BitBackbonec                     t         |   |       t         | 	  |       t        |      | _        |j
                  g|j                  z   | _        | j                          y rU   )	r*   r+   _init_backboner  r   r}   r   num_featuresr!  r   s     r"   r+   zBitBackbone.__init__  sQ     v&F##223f6I6II 	r$   r   r   r   r   c                    ||n| j                   j                  }||n| j                   j                  }| j                  |dd      }|j                  }d}t        | j                        D ]  \  }}|| j                  v s|||   fz  } |s|f}	|r|	|j                  fz  }	|	S t        ||r|j                  d      S dd      S )aN  
        Examples:

        ```python
        >>> from transformers import AutoImageProcessor, AutoBackbone
        >>> import torch
        >>> from PIL import Image
        >>> import requests

        >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
        >>> image = Image.open(requests.get(url, stream=True).raw)

        >>> processor = AutoImageProcessor.from_pretrained("google/bit-50")
        >>> model = AutoBackbone.from_pretrained("google/bit-50")

        >>> inputs = processor(image, return_tensors="pt")
        >>> outputs = model(**inputs)
        ```NTr#  r   )feature_mapsru   
attentions)	rQ   r%  r   r   ru   r   stage_namesout_featuresr   )
r/   r   r   r   r<  ru   rF  idxr   r   s
             r"   r@   zBitBackbone.forward  s    , &1%<k$++B]B]$8$D $++JjJj 	 ((<dPT(U--#D$4$45 	6JC)))s!3 55	6 "_F#70022M%3G'//
 	
MQ
 	
r$   r)  )rC   rD   rE   r+   r   r   r   r   r   r@   rG   rH   s   @r"   rA  rA    sI     os-
"-
:B4.-
^fgk^l-
	-
 -
r$   rA  )r,  r  r   rA  )Nry   r   r   )r6   F)   )<rF   rl   r[   typingr   numpyr   r   torch.utils.checkpointr   r   torch.nnr   r   r   activationsr
   modeling_outputsr   r   r   r   modeling_utilsr   utilsr   r   utils.backbone_utilsr   configuration_bitr   
get_loggerrC   loggerr   r   r#   r  r&   r  rJ   Moduler,   	MaxPool2drj   rw   r   r   r   r   r   r   r   r   r   r   r  r,  rA  __all__r   r$   r"   <module>r[     s   @        A A !  . , 1 ( 
		H	%&ERWY]R]L^ &R-ryy -`R\\ $0299 0f
2<< 
:/BII /fU\\ e T V[VbVb *%")) %A(bii A(HF FR'		 '.Gryy GTC
 C
L . . .* .
! .
 .
b =s 2 =s=s@ 
9
$m 9

9
x Yr$   