
    rh>                    H   d dl Z d dlZd dlZd dlmZ d dlmZmZmZ d dl	Z
ddlmZmZmZmZ ddlmZ  ed      j%                         dz  Zg d	Zd
dddddZdhZdhZddddddZ e
j2                  d      Z G d d      Z G d d      Z G d d      Z G d d       Z G d! d"      Zh d#Z d$ Z!d% Z"d& Z#d' Z$d( Z%dBd)Z&d*e'e(ee)   f   fd+Z*d, Z+d-e,d.e-d*e.fd/Z/d0 Z0d1ee)e,e)   f   d*e.fd2Z1d3 Z2dCd4Z3d5 Z4d6 Z5d7 Z6d8 Z7d9 Z8d: Z9d; Z:d< Z;d= Z<	 dDd>Z=dEd?Z>dFdddd@dAZ?y)G    N)Path)OptionalUnionget_args   )MODELS_TO_PIPELINE#PIPELINE_TASKS_TO_SAMPLE_DOCSTRINGSPT_SAMPLE_DOCSTRINGS_prepare_output_docstrings)ModelOutputsrctransformers)zconfiguration_*.pyzmodeling_*.pyztokenization_*.pyzprocessing_*.pyzimage_processing_*_fast.pyzimage_processing_*.pyzfeature_extractor_*.py)image_processing_autoIMAGE_PROCESSOR_MAPPING_NAMES)video_processing_autoVIDEO_PROCESSOR_MAPPING_NAMES)feature_extraction_autoFEATURE_EXTRACTOR_MAPPING_NAMES)processing_autoPROCESSOR_MAPPING_NAMES)configuration_autoCONFIG_MAPPING_NAMES)image_processor_classvideo_processor_classfeature_extractor_classprocessor_classconfig_class
preprocessImageProcessorFastOpenAIGPTConfigXCLIPConfigKosmos2ConfigDonutSwinConfig	EsmConfig)openaizx-clipkosmos2donutesmfoldz*\[(.+?)\]\((https://huggingface\.co/.+?)\)c                       e Zd ZdddZdddZdddZdddZdddZdddZd	ddZ	d
ddZ
dddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdddZy)ImageProcessorArgsz
    Image to preprocess. Expects a single or batch of images with pixel values ranging from 0 to 255. If
    passing in images with pixel values between 0 and 1, set `do_rescale=False`.
    Ndescriptionshapez
    Video to preprocess. Expects a single or batch of videos with pixel values ranging from 0 to 255. If
    passing in videos with pixel values between 0 and 1, set `do_rescale=False`.
    z&
    Whether to resize the image.
    z>
    Describes the maximum input dimensions to the model.
    zP
    Whether to default to a square image when resizing, if size is an int.
    z
    Resampling filter to use if resizing the image. This can be one of the enum `PILImageResampling`. Only
    has an effect if `do_resize` is set to `True`.
    z+
    Whether to center crop the image.
    z@
    Size of the output image after applying `center_crop`.
    z'
    Whether to rescale the image.
    zR
    Rescale factor to rescale the image by if `do_rescale` is set to `True`.
    z)
    Whether to normalize the image.
    ze
    Image mean to use for normalization. Only has an effect if `do_normalize` is set to `True`.
    zw
    Image standard deviation to use for normalization. Only has an effect if `do_normalize` is set to
    `True`.
    z.
    Whether to convert the image to RGB.
    zU
    Returns stacked tensors if set to `pt, otherwise returns a list of tensors.
    zc
    Only `ChannelDimension.FIRST` is supported. Added for compatibility with slow processors.
    a  
    The channel dimension format for the input image. If unset, the channel dimension format is inferred
    from the input image. Can be one of:
    - `"channels_first"` or `ChannelDimension.FIRST`: image in (num_channels, height, width) format.
    - `"channels_last"` or `ChannelDimension.LAST`: image in (height, width, num_channels) format.
    - `"none"` or `ChannelDimension.NONE`: image in (height, width) format.
    zf
    The device to process the images on. If unset, the device is inferred from the input images.
    a5  
    Whether to disable grouping of images by size to process them individually and not in batches.
    If None, will be set to True if the images are on CPU, and False otherwise. This choice is based on
    empirical observations, as detailed here: https://github.com/huggingface/transformers/pull/38157
    )__name__
__module____qualname__imagesvideos	do_resizesizedefault_to_squareresampledo_center_crop	crop_size
do_rescalerescale_factordo_normalize
image_mean	image_stddo_convert_rgbreturn_tensorsdata_formatinput_data_formatdevicedisable_grouping     t/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/utils/auto_docstring.pyr*   r*   I   sH    F F 	I 	D 	 H 	N 	I 	J 	N 	L 	J I 	N 	N 	K 	 	F
 rE   r*   c                      e Zd ZdddZdddZdddZdddZdddZd	d
dZdddZ	dddZ
dddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdddZdddZd d!dZd"ddZd#ddZd$ddZd%ddZd&d'dZd(d'dZd)ddZ d*ddZ!d+ddZ"d,d-dZ#d.d/dZ$d0ddZ%d1ddZ&d2d3dZ'd4d5dZ(d6d7dZ)y)8	ModelArgsa7  
    Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
    config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
    (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
    (of shape `(batch_size, sequence_length)`r+   aC  
    Calculate logits for the last `num_logits_to_keep` tokens. If `0`, calculate logits for all
    `input_ids` (special case). Only last token logits are needed for generation, and calculating them only for that
    token can save memory, which becomes pretty significant for long sequences or large vocabulary size.
    Na"  
    Indices of input sequence tokens in the vocabulary. Padding will be ignored by default.

    Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
    [`PreTrainedTokenizer.__call__`] for details.

    [What are input IDs?](../glossary#input-ids)
    a  
    Float values of input raw speech waveform. Values can be obtained by loading a `.flac` or `.wav` audio file
    into an array of type `list[float]`, a `numpy.ndarray` or a `torch.Tensor`, *e.g.* via the torchcodec library
    (`pip install torchcodec`) or the soundfile library (`pip install soundfile`).
    To prepare the array into `input_values`, the [`AutoProcessor`] should be used for padding and conversion
    into a tensor of type `torch.FloatTensor`. See [`{processor_class}.__call__`] for details.
    z
    Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:

    - 1 for tokens that are **not masked**,
    - 0 for tokens that are **masked**.

    [What are attention masks?](../glossary#attention-mask)
    z
    Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.
    z4of shape `(num_heads,)` or `(num_layers, num_heads)`z
    Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.
    z"of shape `(num_layers, num_heads)`z
    Mask to avoid performing attention on certain token indices. By default, a causal mask will be used, to
    make sure the model can only look at previous inputs in order to predict the future.
    z/of shape `(batch_size, target_sequence_length)`z
    Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.
    z4of shape `(decoder_layers, decoder_attention_heads)`z
    Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention
    if the model is configured as a decoder.
    5of shape `(batch_size, sequence_length, hidden_size)`a,  
    Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in
    the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`:

    - 1 for tokens that are **not masked**,
    - 0 for tokens that are **masked**.
    a  
    Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`:

    - 0 corresponds to a *sentence A* token,
    - 1 corresponds to a *sentence B* token.

    [What are token type IDs?](../glossary#token-type-ids)
    z
    Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, config.n_positions - 1]`.

    [What are position IDs?](../glossary#position-ids)
    a  
    Pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention
    blocks) that can be used to speed up sequential decoding. This typically consists in the `past_key_values`
    returned by the model at a previous stage of decoding, when `use_cache=True` or `config.use_cache=True`.

    Only [`~cache_utils.Cache`] instance is allowed as input, see our [kv cache guide](https://huggingface.co/docs/transformers/en/kv_cache).
    If no `past_key_values` are passed, [`~cache_utils.DynamicCache`] will be initialized by default.

    The model will output the same cache format that is fed as input.

    If `past_key_values` are used, the user is expected to input only unprocessed `input_ids` (those that don't
    have their past key value states given to this model) of shape `(batch_size, unprocessed_length)` instead of all `input_ids`
    of shape `(batch_size, sequence_length)`.
    z2
    deprecated in favor of `past_key_values`
    a  
    Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
    is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
    model's internal embedding lookup matrix.
    a  
    Indices of decoder input sequence tokens in the vocabulary.

    Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
    [`PreTrainedTokenizer.__call__`] for details.

    [What are decoder input IDs?](../glossary#decoder-input-ids)
    a(  
    Optionally, instead of passing `decoder_input_ids` you can choose to directly pass an embedded
    representation. If `past_key_values` is used, optionally only the last `decoder_inputs_embeds` have to be
    input (see `past_key_values`). This is useful if you want more control over how to convert
    `decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix.

    If `decoder_input_ids` and `decoder_inputs_embeds` are both unset, `decoder_inputs_embeds` takes the value
    of `inputs_embeds`.
    z<of shape `(batch_size, target_sequence_length, hidden_size)`z
    If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
    `past_key_values`).
    z
    Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
    tensors for more detail.
    z
    Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
    more detail.
    zU
    Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
    a  
    Indices depicting the position of the input sequence tokens in the sequence. Contrarily to `position_ids`,
    this tensor is not affected by padding. It is used to update the cache in the correct position and to infer
    the complete sequence length.
    zof shape `(sequence_length)`z9 input to the layer of shape `(batch, seq_len, embed_dim)zD
    Whether to interpolate the pre-trained position encodings.
    z
    Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
    with `head_dim` being the embedding dimension of each attention head.
    a  
    Model configuration class with all the parameters of the model. Initializing with a config file does not
    load the weights associated with the model, only the configuration. Check out the
    [`~PreTrainedModel.from_pretrained`] method to load the model weights.
    a  
    Labels for position (index) of the start of the labelled span for computing the token classification loss.
    Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
    are not taken into account for computing the loss.
    zof shape `(batch_size,)`a  
    Labels for position (index) of the end of the labelled span for computing the token classification loss.
    Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
    are not taken into account for computing the loss.
    aB  
    Tuple consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`)
    `last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of
    hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder.
    z
    Whether or not to return the logits of all the routers. They are useful for computing the router loss, and
    should not be returned during inference.
    a  
    If an `int`, compute logits for the last `logits_to_keep` tokens. If `0`, calculate logits for all
    `input_ids` (special case). Only last token logits are needed for generation, and calculating them only for that
    token can save memory, which becomes pretty significant for long sequences or large vocabulary size.
    If a `torch.Tensor`, must be 1D corresponding to the indices to keep in the sequence length dimension.
    This is useful when using packed tensor format (single dimension for batch and sequence length).
    a  
    The tensors corresponding to the input images. Pixel values can be obtained using
    [`{image_processor_class}`]. See [`{image_processor_class}.__call__`] for details ([`{processor_class}`] uses
    [`{image_processor_class}`] for processing images).
    z=of shape `(batch_size, num_channels, image_size, image_size)`a  
    The tensors corresponding to the input video. Pixel values for videos can be obtained using
    [`{video_processor_class}`]. See [`{video_processor_class}.__call__`] for details ([`{processor_class}`] uses
    [`{video_processor_class}`] for processing videos).
    zIof shape `(batch_size, num_frames, num_channels, frame_size, frame_size)`z
    The index of the layer to select the vision feature. If multiple indices are provided,
    the vision feature of the corresponding indices will be concatenated to form the
    vision features.
    z
    The feature selection strategy used to select the vision feature from the vision backbone.
    Can be one of `"default"` or `"full"`.
    zU
    The sizes of the images in the batch, being (height, width) for each image.
    zof shape `(batch_size, 2)`a  
    Mask to avoid performing attention on padding pixel values. Mask values selected in `[0, 1]`:

    - 1 for pixels that are real (i.e. **not masked**),
    - 0 for pixels that are padding (i.e. **masked**).

    [What are attention masks?](../glossary#attention-mask)
    &of shape `(batch_size, height, width)`a  
    The tensors corresponding to the input audio features. Audio features can be obtained using
    [`{feature_extractor_class}`]. See [`{feature_extractor_class}.__call__`] for details ([`{processor_class}`] uses
    [`{feature_extractor_class}`] for processing audios).
    z5of shape `(batch_size, sequence_length, feature_dim)`)*r.   r/   r0   labelsnum_logits_to_keep	input_idsinput_valuesattention_mask	head_maskcross_attn_head_maskdecoder_attention_maskdecoder_head_maskencoder_hidden_statesencoder_attention_masktoken_type_idsposition_idspast_key_valuespast_key_valueinputs_embedsdecoder_input_idsdecoder_inputs_embeds	use_cacheoutput_attentionsoutput_hidden_statesreturn_dictcache_positionhidden_statesinterpolate_pos_encodingposition_embeddingsconfigstart_positionsend_positionsencoder_outputsoutput_router_logitslogits_to_keeppixel_valuespixel_values_videosvision_feature_layervision_feature_select_strategyimage_sizes
pixel_maskinput_featuresrD   rE   rF   rH   rH      s   
 <F
  <
I <	L <
N HI 6 C H I <	 <
N
 <L O& 	N
 IM C
 P I   	K
 0N WM 	  
 F
 ,O
 ,M
 O  	N
 QL
 ]
  &" .	K :
J
 INrE   rH   c                   Z   e Zd ZdddZddddZddd	dZd
dddZdddZddddZddd	dZ	ddddZ
dddZddd	dZddddZddddZddddZddddZddddZdddZdddZdddZd ddZd!d"dZd#d$dZd%d&dZd'd(dZd)d*dZd+d*dZd,d-dZd.d/dZd0d1dZd2d3dZd4d5dZ d6d7dZ!y)8ModelOutputArgszQ
    Sequence of hidden-states at the output of the last layer of the model.
    rJ   r+   a  
    It is a [`~cache_utils.Cache`] instance. For more details, see our [kv cache guide](https://huggingface.co/docs/transformers/en/kv_cache).

    Contains pre-computed hidden-states (key and values in the self-attention blocks and optionally if
    `config.is_encoder_decoder=True` in the cross-attention blocks) that can be used (see `past_key_values`
    input) to speed up sequential decoding.
    NzHreturned when `use_cache=True` is passed or when `config.use_cache=True`)r,   r-   additional_infoa:  
    Tuple of `torch.FloatTensor` (one for the output of the embeddings, if the model has an embedding layer, +
    one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`.

    Hidden-states of the model at the output of each layer plus the optional initial embedding outputs.
    z^returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`a   
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,
    sequence_length)`.

    Attentions weights after the attention softmax, used to compute the weighted average in the self-attention
    heads.
    zXreturned when `output_attentions=True` is passed or when `config.output_attentions=True`zV
    Last layer hidden-state after a pooling operation on the spatial dimensions.
    z$of shape `(batch_size, hidden_size)`a)  
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,
    sequence_length)`.

    Attentions weights of the decoder's cross-attention layer, after the attention softmax, used to compute the
    weighted average in the cross-attention heads.
    a3  
    Tuple of `torch.FloatTensor` (one for the output of the embeddings, if the model has an embedding layer, +
    one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`.

    Hidden-states of the decoder at the output of each layer plus the initial embedding outputs.
    a  
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,
    sequence_length)`.

    Attentions weights of the decoder, after the attention softmax, used to compute the weighted average in the
    self-attention heads.
    z`
    Sequence of hidden-states at the output of the last layer of the encoder of the model.
    a3  
    Tuple of `torch.FloatTensor` (one for the output of the embeddings, if the model has an embedding layer, +
    one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`.

    Hidden-states of the encoder at the output of each layer plus the initial embedding outputs.
    a  
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,
    sequence_length)`.

    Attentions weights of the encoder, after the attention softmax, used to compute the weighted average in the
    self-attention heads.
    z
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, sequence_length, num_experts)`.

    Router logits of the model, useful to compute the auxiliary loss for Mixture of Experts models.
    zZreturned when `output_router_logits=True` is passed or when `config.add_router_probs=True`a  
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, sequence_length, num_experts)`.

    Raw router probabilities that are computed by MoE routers, these terms are used to compute the auxiliary
    loss and the z_loss for Mixture of Experts models.
    zreturned when `output_router_probs=True` and `config.add_router_probs=True` is passed or when `config.output_router_probs=True`z(
    z_loss for the sparse modules.
    z"returned when `labels` is providedz*
    aux_loss for the sparse modules.
    z-
    Span-start scores (before SoftMax).
    rI   z+
    Span-end scores (before SoftMax).
    z%
    Feature maps of the stages.
    z4of shape `(batch_size, num_channels, height, width)`z+
    Reconstructed / completed images.
    z$
    The predicted spectrogram.
    z2of shape `(batch_size, sequence_length, num_bins)`z)
    Predicted depth for each pixel.
    rK   z6
    Sampled values from the chosen distribution.
    zuof shape `(batch_size, num_samples, prediction_length)` or `(batch_size, num_samples, prediction_length, input_size)`z0
    Parameters of the chosen distribution.
    z0of shape `(batch_size, num_samples, num_params)`z
    Shift values of each time series' context window which is used to give the model inputs of the same
    magnitude and then used to shift back to the original magnitude.
    z6of shape `(batch_size,)` or `(batch_size, input_size)`z
    Scaling values of each time series' context window which is used to give the model inputs of the same
    magnitude and then used to rescale back to the original magnitude.
    zo
    Static features of each time series' in a batch which are copied to the covariates at inference time.
    z%of shape `(batch_size, feature size)`zJ
    Utterance embeddings used for vector similarity-based retrieval.
    z2of shape `(batch_size, config.xvector_output_dim)`z]
    Sequence of extracted feature vectors of the last convolutional layer of the model.
    z6of shape `(batch_size, sequence_length, conv_dim[-1])`zr
    Text embeddings before the projection layer, used to mimic the last hidden state of the teacher encoder.
    z*of shape `(batch_size,config.project_dim)`zu
    Image hidden states of the model produced by the vision encoder and after projecting the last hidden state.
    zAof shape `(batch_size, num_images, sequence_length, hidden_size)`zu
    Video hidden states of the model produced by the vision encoder and after projecting the last hidden state.
    zNof shape `(batch_size * num_frames, num_images, sequence_length, hidden_size)`)"r.   r/   r0   last_hidden_staterY   rc   
attentionspooler_outputcross_attentionsdecoder_hidden_statesdecoder_attentionsencoder_last_hidden_staterU   encoder_attentionsrouter_logitsrouter_probsz_lossaux_lossstart_logits
end_logitsfeature_mapsreconstructionspectrogrampredicted_depth	sequencesparamslocscalestatic_features
embeddingsextract_featuresprojection_stateimage_hidden_statesvideo_hidden_statesrD   rE   rF   rt   rt   L  sP    I	 e
O {	M u
J 8	M u
 {	 u
 I	! {	 u

 wM  ]	L ?F ?H <	L <	J H	L H	N F	K :	O I	I D	F JC JE 9	O F	J J	 >	 U	 b	rE   rt   c                   \    e Zd ZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZdZdZdZdZdZdZdZdZdZy)ClassDocstringa  
    This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
    library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
    etc.)

    This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
    Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage
    and behavior.
    zd
    The bare {model_name} Model outputting raw hidden-states without any specific head on top.
    zJ
    The {model_name} Model with a specified pretraining head on top.
    zf
    The bare {model_name} Decoder outputting raw hidden-states without any specific head on top.
    zh
    The bare {model_name} Text Model outputting raw hidden-states without any specific head on to.
    zk
    The {model_name} Model with a sequence classification/regression head on top e.g. for GLUE tasks.
    z
    The {model_name} transformer with a span classification head on top for extractive question-answering tasks like
    SQuAD (a linear layer on top of the hidden-states output to compute `span start logits` and `span end logits`).
    z
    The {model_name} Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a
    softmax) e.g. for RocStories/SWAG tasks.
    zI
    The {model_name} Model with a `language modeling` head on top."
    z
    The {model_name} transformer with a token classification head on top (a linear layer on top of the hidden-states
    output) e.g. for Named-Entity-Recognition (NER) tasks.
    z{
    The {model_name} Model for token generation conditioned on other modalities (e.g. image-text-to-text generation).
    z>
    The {model_name} Model for causal language modeling.
    z9
    Constructs a fast {model_name} image processor.
    z$
    The {model_name} backbone.
    z\
    The {model_name} Model with an image classification head on top e.g. for ImageNet.
    zf
    The {model_name} Model with a semantic segmentation head on top e.g. for ADE20K, CityScapes.
    z{
    The {model_name} Model with an audio classification head on top (a linear layer on top of the pooled
    output).
    zl
    The {model_name} Model with a frame classification head on top for tasks like Speaker Diarization.
    z]
    The {model_name} Model with a distribution head on top for time-series forecasting.
    zm
    The {model_name} Model with a projection layer on top (a linear layer on top of the pooled output).
    N)r.   r/   r0   PreTrainedModelModelForPreTrainingDecoder	TextModelForSequenceClassificationForQuestionAnsweringForMultipleChoiceForMaskedLMForTokenClassificationForConditionalGenerationForCausalLMr   BackboneForImageClassificationForSemanticSegmentationForAudioClassificationForAudioFrameClassificationForPredictionWithProjectionrD   rE   rF   r   r   W  s    OENGI!

K
 KH
#MNrE   r   c                   4    e Zd ZdZdZdZdZdZdZdZ	dZ
d	Zd
Zy)
ClassAttrsz
    A string indicating the attribute associated to the base model in derived classes of the same architecture adding modules on top of the base model.
    a  
    Whether the model supports gradient checkpointing or not. Gradient checkpointing is a memory-saving technique that trades compute for memory, by storing only a subset of activations (checkpoints) and recomputing the activations that are not stored during the backward pass.
    ab  
    Layers of modules that should not be split across devices should be added to `_no_split_modules`. This can be useful for modules that contains skip connections or other operations that are not compatible with splitting the module across devices. Setting this attribute will enable the use of `device_map="auto"` in the `from_pretrained` method.
    zu
    A list of keys to ignore when moving inputs or outputs between devices when using the `accelerate` library.
    zO
    Whether the model's attention implementation supports FlashAttention.
    zd
    Whether the model's attention implementation supports SDPA (Scaled Dot Product Attention).
    zN
    Whether the model's attention implementation supports FlexAttention.
    z
    Whether the model can `torch.compile` fullgraph without graph breaks. Models will auto-compile if this flag is set to `True`
    in inference, if a compilable cache is used.
    z
    Whether the model supports attention interface functions. This flag signal that the model can be used as an efficient backend in TGI and vLLM.
    za
    A list of `state_dict` keys that are potentially tied to another key in the state_dict.
    N)r.   r/   r0   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backend_tied_weights_keysrD   rE   rF   r   r     sQ    '##N#rE   r   >   argsselfkwargsdeprecated_argumentsc                 V    t        | j                  j                  d            dz
  dz  S )N.r      )lenr0   split)funcs    rF   get_indent_levelr     s(    !!'',-1Q66rE   c                     dj                  | j                         D cg c]  }|j                          c}      } t        j                  | d|z        S c c}w )T
    Adjust the indentation of a docstring to match the specified indent level.
    
 )join
splitlineslstriptextwrapindent)	docstringindent_levellines      rF   equalize_indentr     sH    
 		Y5I5I5KLT4;;=LMI??9cL&899 Ms   Ac                 Z    t        j                  t        j                  |       d|z        S )r   r   )r   r   dedent)r   r   s     rF   set_min_indentr     s#     ??8??95s\7IJJrE   c                 |    t        j                  d      }|j                  |       }|rd|j                  d      z   S y )Nz(of shape\s*(?:`.*?`|\(.*?\)))r   r   recompilesearchgroup)r   shape_patternmatchs      rF   parse_shaper     s:    JJ@AM  +EU[[^##rE   c                 |    t        j                  d      }|j                  |       }|rd|j                  d      z   S y )Nz(defaults to \s*[^)]*)r   r   r   )r   default_patternr   s      rF   parse_defaultr     s:    jj!:;O""9-EU[[^##rE   c                    t        j                  d|       }|r'| |j                         d }| d|j                          } nd}t        j                  dt         j                        }|j                  |       }d}|r| d|j                          }|j                  d      d   j                         dk(  r#dj                  |j                  d      dd       }|j                  d      d   j                         d	k(  s%|j                  d      d   j                         dk(  r#dj                  |j                  d      d
d       }|j                         dk(  rd}|r |j                  d
      j                  d      n| }|j                  d      d   j                         dk(  r#dj                  |j                  d      dd       }|j                  d      d   j                         d	k(  s%|j                  d      d   j                         dk(  r#dj                  |j                  d      d
d       }t        |d      }i }	|rt        j                  d| d| dt         j                  t         j                  z        }
|
j                  |      D ]  }|j                  d
      }|j                  d      }|j                  d      }d|v }t        |      }t        |      }|j                  d      j                         }t        j                  dd|d
      }d| }||||||d|	|<    |	r|rd|z   }t        |d      }|r|	||fS |	|fS )a  
    Parse the docstring to extract the Args section and return it as a dictionary.
    The docstring is expected to be in the format:
    Args:
        arg1 (type):
            Description of arg1.
        arg2 (type):
            Description of arg2.

    # This function will also return the remaining part of the docstring after the Args section.
    Returns:/Example:
    ...
    z(?m)^([ \t]*)(?=Example|Return)N z(?:Args:)(\n.*)?(\n)?$r   z"""r   zr"""r   z^\s{0,z;}(\w+)\s*\(\s*([^, \)]*)(\s*.*?)\s*\)\s*:\s*((?:(?!\n^\s{0,z}\w+\s*\().)*)      optionalr   ^z    )typer,   r   r-   defaultru   )r   r   startr   DOTALLr   stripr   r   r   r   	MULTILINEfinditerr   r   sub)r   max_indent_levelreturn_intror   remainder_docstringargs_pattern
args_matchdocstring_introargs_sectionr   param_pattern
param_name
param_typeru   r   r-   r   param_descriptions                     rF   parse_docstringr     sO    II8)DE'8o.	 ::7CL$$Y/JO#$8j&6&6&89  &r*002e;"ii(=(=d(CCR(HIO  &q)//1V;?T?TUY?Z[\?]?c?c?ein?n"ii(=(=d(CAB(GHO  "b("O7A:##A&--d3yL$#))+u4yy!3!3D!9#2!>?$"((*f48J8J48PQR8S8Y8Y8[_d8dyy!3!3D!9!"!=>!,2LF

'((efvew  xG  HII$

 #++L9 	EQJQJ#kk!nO!_4H0E#O4G %A 4 4 6 "tW6G K"$%6$7 8"0$"#2"F:	* %"%88()<a@*O;;&&&rE   returnc                 &   t        |       }|dk(  r	 t        | |      | fS |D cg c]  }t	        ||      d    }}t        |      }|r||j                  d         } || fS # t        $ r!}t        t        |       |      | fcY d}~S d}~ww xY wc c}w )z
    Check if a "nested" type hint contains a specific target type,
    return the first-level type containing the target_type if found.
    rD   Nr   T)r   
issubclass	Exceptionr   contains_typeanyindex)	type_hinttarget_typer   _argfound_type_tuple
found_types          rF   r   r   J  s    
 IDrz	Gi5y@@ GKKsc;7:KK%&J)//56	y    	Gd9o{;YFF	GKs"   A! B!	B*B BBc                    t        j                  |       }|j                  t        j                  j
                        d   dk7  ry|j                  t        j                  j
                        d   }t        D ]o  }|j                  d      d   }d|v r|j                  d      d   nd}|j                  |      sC|j                  |      sU|t        |      t        |        }|c S  t        d|        y	)
z>
    Get the model name from the file path of the object.
    modelsNr   *r   r   uE   🚨 Something went wrong trying to find the model name in the path: model)inspectgetsourcefiler   ospathsepAUTODOC_FILES
startswithendswithr   print)objr
  	file_name	file_typer   endmodel_name_lowercases          rF   get_model_namer  \  s       %Dzz"''++r"h.

277;;'+I" 	$Q'*-*:iooc"2&&9+=+=c+B#,SZ3s8)#D '' 	UVZU[\]rE   placeholders
model_namec                 &   ddl m} i }| D ]r  }|t        v s	 t        t        |t        |   d         t        |   d         j	                  |d      }|%t        |t        t        f      r|d   }||n|||<   n|||<   t |S # t
        $ r d}Y =w xY w)zF
    Get the dictionary of placeholders for the given model name.
    r   autor   N)	transformers.modelsr  PLACEHOLDER_TO_AUTO_MODULEgetattrgetImportError
isinstancelisttuple)r  r  auto_moduleplaceholders_dictplaceholderplace_holder_values         rF   get_placeholders_dictr'  o  s    
 8# =44*%,K)CK)PQR)ST.{;A>& #j$' # "-04-@);A)>&GYGe1Ckv!+.1<!+.!=$   *%)"*s   :BBBc                     t        t        j                  d|             }|s| S t        ||      }|j	                         D ]  \  }}|		 | j                  d| d|      } ! | S # t        $ r Y /w xY w)z
    Replaces placeholders such as {image_processor_class} in the docstring with the actual values,
    deducted from the model name and the auto modules.
    z{(.*?)}{})setr   findallr'  itemsreplacer   )r   r  r  r$  r%  values         rF   format_args_docstringr0    s     rzz*i89L .lJG/557 U"%--;-r.BEJ	   s   
A$$	A0/A0args_classesc                     t        | t        t        f      r&i }| D ]  }|j                  |j                          |S | j                  S N)r   r!  r"  update__dict__)r1  args_classes_dict
args_classs      rF   get_args_doc_from_sourcer8    sK    ,u.& 	:J$$Z%8%89	:     rE   c                     d }| j                   }t        j                  |      }|D ]*  \  }}|j                  d      r|d d }d| }||k(  s'|} |S  |S )N/r   zhttps://huggingface.co/)__doc___re_checkpointr,  r  )r   
checkpointconfig_sourcecheckpoints	ckpt_name	ckpt_linkckpt_link_from_names          rF    get_checkpoint_from_config_classrC    s    J !((M ((7K !, 		9c"!#2I !8	{C++"J	 rE   c                 R    d}| j                   dk(  rd| d}t        ||dz         }|S )Nr   forwardzThe [`a  `] forward method, overrides the `__call__` special method.

        <Tip>

        Although the recipe for forward pass needs to be defined within this function, one should call the [`Module`]
        instance afterwards instead of this since the former takes care of running the pre and post processing steps while
        the latter silently ignores them.

        </Tip>

        r   )r.   r   )r   
class_nameparent_classr   intro_docstrings        rF   add_intro_docstringrI    s?    O}}	!%j\ 
2	 
 */<!;KLrE   c                    ddl m} |t        |      }nt        |       }|r>|t        t        |t        d   d         t        d   d         vr|j                  dd      }| j                  j                  d      d   }|d}n.	 t        t        |t        d   d         t        d   d         |   }|||fS # t        $ r& |t        v r
t        |   }nd	}t        d
| d       Y 3w xY w)z
    Extract model information from a function or its parent class.

    Args:
        func (`function`): The function to extract information from
        parent_class (`class`): Optional parent class of the function
    r   r  Nr   r   r   -r   ModelConfigu   🚨 Config not found for zS. You can manually add it to HARDCODED_CONFIG_FOR_MODELS in utils/auto_docstring.py)r  r  r  r  r  r.  r0   r   KeyErrorHARDCODED_CONFIG_FOR_MODELSr  )r   rG  r#  r  rF  r   s         rF   _get_model_inforO    s.    8 -l;-d3  4G7GJK">215= !  4;;CE ""((-a0J #	"%?%OPQ%RS*>:1= #$L  \99  	#'BB:;OP,01E0F  GZ  [	s   -B6 6,C%$C%c                 >   d}| j                   t        j                  j                  k7  r| j                   }dt	        |      v r:dj                  t	        |      j                  d            j                  dd      }n`t        |d      rS|j                  j                  dd      j                  dd       d	| j                   j                   }|d
   d	k(  r|dd }n	 d|v rt        j                   dd|      }d|v rt        j                   dd|      }d}||fS d}||fS )a  
    Process and format a parameter's type annotation.

    Args:
        param (`inspect.Parameter`): The parameter from the function signature
        param_name (`str`): The name of the parameter
        func (`function`): The function the parameter belongs to
    Ftypingr   typing.transformers.~r/   builtinsr   r   r   N
ForwardRefForwardRef\('([\w.]+)'\)\1r   Optional\[(.*?)\]T)
annotationr  	Parameteremptystrr   r   r.  hasattrr/   r.   r  r0   __code__co_filenamer   r   )paramr   r   r   r   s        rF   _process_parameter_typerb    s6    H7,,222%%
s:&Z!6!6y!ABJJ?\_`JZ.&1199/3OWWXbdfghhijojzjz  kD  kD  jE  FJ!}#'^
 :% ;UJOJ# 4eZHJH x 
xrE   c                 n   d}d}d}d}d}	|rdnd}
| |v rX|dk(  r||    j                  dd      ||    d   r||    d   }||    d   }||    d   }|r|nd}||    d   xs d}	||    d	    d
}nB| |v r<||    d   }|rd|z   nd}||    d	   }||    j                  dd      }	|	r||
z   dz   |	z   }	nd}||
||	||fS )a  
    Get parameter documentation details from the appropriate source.
    Tensor shape, optional status and description are taken from the custom docstring in priority if available.
    Type is taken from the function signature first, then from the custom docstring if missing from the signature

    Args:
        param_name (`str`): Name of the parameter
        documented_params (`dict`): Dictionary of documented parameters (manually specified in the docstring)
        source_args_dict (`dict`): Default source args dictionary to use if not in documented_params
        param_type (`str`): Current parameter type (may be updated)
        optional (`bool`): Whether the parameter is optional (may be updated)
    Nr   Tz, *optional*r   ru   r   r-   r,   r   r   z, F)r  )r   documented_paramssource_args_dictr   r   r,   r-   shape_stringis_documentedru   optional_strings              rF   _get_parameter_infori  '  s?    KELMO)1orO&& "!*-11&$?K ,->?*:6v>J$Z0<!*-g6 %u2+J78IJPb*:6}EFbI	'	' ,W5&+sU{&z2=A*:6::;LdS*_<tCoUO o{TaaarE   c           
         d}|t        t        t        g      n|}i }	| j                  j	                         D ]  \  }
}|
t
        v sN|j                  t        j                  j                  k(  s'|j                  t        j                  j                  k(  r^t        ||
|      \  }}d}|j                  t        j                  k7  r%|j                  dt        |j                         d}t        |
||||      \  }}}}}}|ro|
dk(  r%|dk(  rd| d}nd|j!                  d      d    d}n|dk(  r	 d|v r|nd| d}|r|
 d	| | d
| }n|
 d	| | | | d
| }|t+        ||dz         z  }9|r|nd|||r|nd|d|	|
<   |j-                  d|
 d|j$                   d|j&                  j(                   d        ||	fS )a  
    Process all regular parameters (not kwargs parameters) from the function signature.

    Args:
        sig (`inspect.Signature`): Function signature
        func (`function`): Function the parameters belong to
        class_name (`str`): Name of the class
        documented_params (`dict`): Dictionary of parameters that are already documented
        indent_level (`int`): Indentation level
        undocumented_parameters (`list`): List to append undocumented parameters to
    r   , defaults to ``rf   z[`z`]r   r    ():   z<fill_type>z
    <fill_description>)r   r   r-   r,   r      🚨 `` is part of zZ's signature, but not documented. Make sure to add it to the docstring of the function in )r8  rH   r*   
parametersr-  ARGS_TO_IGNOREkindr  r[  VAR_POSITIONALVAR_KEYWORDrb  r   _emptyr]  ri  r   r  r0   r_  r`  r   append)sigr   rF  rd  r   undocumented_parametersre  rG  r   missing_argsr   ra  r   r   param_defaultrh  rf  ru   r,   rg  param_docstrings                        rF   _process_regular_parametersr~  W  sa    IEUE] )-?!@Acs  L ^^113 3
E .(zzW..===zzW..:::  7uj$O
H ==GNN*u}}/H-c%--.@-ACMat)+;Zb
^
O\?KQ^ X%##%j\!4J#%j&6&6s&;B&?%@!CJr!e'*j'8*Q>OJ%/L:,>OrR]Q^"_ "l"ZL>OP]^`al`mn   q  I '1
m$%.9{?Y((L$ $**M$2C2C1D  E_  `d  `m  `m  `y  `y  _z  z{  |c3j l""rE   c                 x    d}|}d}|s1| |   D ]!  }|dk(  r|dz  }|dk(  s|dz  }|dk(  sd} n |dz  }|s1|S )Nr   F(r   )TrD   )linesline_endparenthesis_countsig_line_end	found_sigchars         rF   find_sig_liner    sv    LI,' 	Ds{!Q&!!Q&!$) $I	 	  rE   c                    d}t        t              }|j                  t        v }	|	st	        fdt
        D              }	|	r| j                  j                         D 
cg c].  \  }
}|j                  t        j                  j                  k(  r|0 }}
}|D ]f  }|j                  t        j                  j                  k(  r,|j                  j                  d   j                  }|t!        |      \  }}
|j                  j                  d   j"                  j                         D ]  \  }}t%        |      }d}d|v r1dj'                  |j)                  d            j+                  dd      }n'|j+                  dd      j+                  d	d       d
| }d|v rt-        j.                  dd|      }d|v rt-        j.                  dd|      }d}d}#t%        t1        |d            }|dk7  rd| dnd}t3        |||||      \  }}}}}}|r|dk(  rJt5        d| d|j                  j                  d   j6                   d|j8                  j:                   d       d|v r|nd| d}|r|t=        | d| | d| |dz         z  }b|t=        | d| | | | d| |dz         z  }|j?                  d| d|j                  j                  d   j6                   d|j8                  j:                   d
        i |S c c}}
w )a  
    Process **kwargs parameters if needed.

    Args:
        sig (`inspect.Signature`): Function signature
        func (`function`): Function the parameters belong to
        parent_class (`class`): Parent class of the function
        model_name_lowercase (`str`): Lowercase model name
        documented_kwargs (`dict`): Dictionary of kwargs that are already documented
        indent_level (`int`): Indentation level
        undocumented_parameters (`list`): List to append undocumented parameters to
    r   c              3   :   K   | ]  }|j                   v   y wr3  r.   ).0unroll_kwargs_classrG  s     rF   	<genexpr>z-_process_kwargs_parameters.<locals>.<genexpr>  s"      
=P<#8#88
s   r   FrQ  rR  rS  rT  rU  r   rV  rW  rX  r   rY  Trk  rl     🚨  for 	 in file  has no typerm  rn  ro  rp  rq  zN, but not documented. Make sure to add it to the docstring of the function in ) r8  r*   r.   UNROLL_KWARGS_METHODSr   UNROLL_KWARGS_CLASSESrr  r-  rt  r  r[  rv  rZ  r\  __args__r;  r   __annotations__r]  r   r   r.  r   r   r  ri  r  r0   r_  r`  r   rx  )ry  r   rG  r  documented_kwargsr   rz  r   re  unroll_kwargsr   kwargs_paramkwargs_parameterskwarg_paramkwargs_documentationr   param_type_annotationr   r   r|  rh  rf  ru   r,   rg  s     `                      rF   _process_kwargs_parametersr    s    I/0BC MM%::M\5 
Ti
 
  $'>>#7#7#9
<  G$5$5$A$AA 
 

 - 9	K%%):):)@)@@ $/#9#9#B#B1#E#M#M #/'67K'L$!1 6A5K5K5T5TUV5W5g5g5m5m5o .1
1 !67
  z)!#)9)9))D!E!M!Mo_b!cJ$.$6$6$L$T$TU_ac$d#eefgqfr!sJ:-!#(CUJ!WJ+!#(<eZ!PJ#H !#+$'j"(M$NMJW[]J]om_A$FceM (
4EGWYcemn g
O\?KYf !!R'#J<u[5K5K5T5TUV5W5d5d4eenoso|o|  pI  pI  oJ  JV  W 03j/@*UVFWJ&!^)l"ZL8IK=Y(1,& 	
 "^)l"ZLFWXeWffhithuv(1,& 	
 ,22 M+:P:P:Y:YZ[:\:i:i9j  ky  z~  zG  zG  zS  zS  yT  TU  VY.9	v A
s   !3Lc           
          t        d|dz         }g }	i }
i }| t        |       \  }
} t        ||||
||	||      \  }}||z  }t        |||||||	      }||z  }t	        |	      dkD  rt        dj                  |	             |S )a  
    Process the parameters section of the docstring.

    Args:
        func_documentation (`str`): Existing function documentation (manually specified in the docstring)
        sig (`inspect.Signature`): Function signature
        func (`function`): Function the parameters belong to
        class_name (`str`): Name of the class the function belongs to
        model_name_lowercase (`str`): Lowercase model name
        parent_class (`class`): Parent class of the function (if any)
        indent_level (`int`): Indentation level
    zArgs:
r   r   r   )r   r   r~  r  r   r  r   )func_documentationry  r   rF  r  rG  r   re  r   rz  rd  r  r}  r{  kwargs_docstrings                  rF   _process_parameters_sectionr    s      y,*:;I  %0?@R0S-- %@T:0,@WYikw%!O\  I 2T<!57H,Xo !!I "#a'dii/01rE   c                    d}| t        j                  d|       x}ut        j                  d|       }|r5| |j                         |j                          }| |j                         d } n| |j                         d }d} t        ||dz         }|| fS |j                  i|j                  t
        j                  k7  rLt        |j                  t              \  }}t        |||      }|j                  dd      }t        ||dz         }|| fS )aK  
    Process the returns section of the docstring.

    Args:
        func_documentation (`str`): Existing function documentation (manually specified in the docstring)
        sig (`inspect.Signature`): Function signature
        config_class (`str`): Config class for the model
        indent_level (`int`): Indentation level
    r   Nz(?m)^([ \t]*)(?=Return)(?m)^([ \t]*)(?=Example)r   )	add_introrR  )r   r   r   r   return_annotationr  rw  r   r   r   r.  )	r  ry  r   r   return_docstringmatch_start	match_endr  r  s	            rF   _process_returns_sectionr  ?  s)     	&II&@BTUU[bII9;MN	1+2C2C2E	HYZ!3IOO4E4G!H1+2C2C2E2GH!#)*:L1<LM /// 
			*s/D/D/V'4S5J5JK'X$	$56Gajk+33IrB)*:L1<LM///rE   c           	      2   ddl m} d}	| ?t        j                  d|       x}
r'| |
j	                         d }	dt        |	|dz         z   }	|	S |<|9dd	j                  t        j                                d
}t        j                  ||      }|j                  j                  }|x}	 t        ||         }|e|Bd}|j#                         }t        |   j%                  ||ddddd      }t        ||dz         }	|	S t'        d| d|j(                   d| d       |	S t*        D ]c  }t-        |j.                  |      }||j                         v s,t*        |   }t0        |   j%                  ||dddd      }t        ||dz         }	 |	S  |	S # t        $ r |t        v rv|j                  j                  }t        |   }||j                         v rE|j!                         D cg c]  \  }}||k(  s| nc c}}w c}}d   }||v rt        ||         }Y _w xY w)a!  
    Process the example section of the docstring.

    Args:
        func_documentation (`str`): Existing function documentation (manually specified in the docstring)
        func (`function`): Function being processed
        parent_class (`class`): Parent class of the function
        class_name (`str`): Name of the class
        model_name_lowercase (`str`): Lowercase model name
        config_class (`str`): Config class for the model
        checkpoint: Checkpoint to use in examples
        indent_level (`int`): Indentation level
    r   r  r   Nr  r   r   r  |r  z...      z<mask>)model_classr=  expected_outputexpected_lossqa_target_start_indexqa_target_end_indexmasku   🚨 No checkpoint found for r   zB. Please add a `checkpoint` arg to `auto_docstring` or add one in z's docstring)r  r=  r  r  r  r  )r  r  r   r   r   r   r   r
   keysr   CONFIG_MAPPINGrC  rM  rN  r   valuesr-  r   formatr  r.   r   r  modeling_autor	   )r  r   rG  rF  r  r   r=  r   r#  example_docstringr   task
model_taskr  checkpoint_exampler   config_class_namekvmodel_name_for_auto_configexample_annotationname_model_list_for_taskmodel_list_for_taskpipeline_names                           rF   _process_example_sectionr  b  s   " 8 %BII>Y[m4n+n5+n.u{{}? >2C\TUEU#VVz w 
	"6"BCHH16689:!<YYtZ0
$77FF #-,5%EnUiFj%k"  !!-%'"!'')%9$%?%F%F *1$)"'*,(*! &G &" %33E|VWGW$X!, ) 3J<q  PR  S_  R`  `l  m(  -? (&-k.G.GIa&b#!4!;!;!==$67O$PM)L])[)b)b$.#5(-&+.0,. *c *& )77I<Z[K[(\% e  '+FF+6+I+I+^+^((CDX(Y%(,@,G,G,II*>*D*D*F6"&!Q!O`J`A6 662 6G1Q ./I J2.s%   (F AH G3.G32 HHc                    t        j                  |       }|st        |       n
t        |      }t        | |      \  }}	}
| j                  }||t        ||dz         dz   |z   }n||}|4t        ||dz         }|j                         j                  d      s|dz  }nt        | |	||      }|t        ||| |	||||      z  }t        |||
|      \  }}||z  }t        || ||	||
||      }||z  }t        ||      }|| _        | S )z9
    Wrapper that automatically generates docstring.
    r   r   )rF  rG  r   )r  	signaturer   rO  r;  r   r   r  rI  r  r  r  r0  )r   rG  custom_introcustom_argsr=  re  ry  r   r  rF  r   r  r   r  r  s                  rF   auto_method_docstringr    sm    

D
!C1=#D)CST`CaL 6ET<5X2*l#5#A+K9IJTQTff		 ( "<1AB	 ))$/I'ZlQ]
	
 ,Cz3GWceu I
 ,DC|,(( !!I 1	 ""I &i1EFI DLKrE   c                 	   ddl m} d}d}dd | j                  D        v r3t        | j                  | |      j
                  j                  dd	      }nYd
d | j                  D        v rDd}| j
                  }||r|}t        | j                  | |t        t                    j
                  }t        |       }	t        |       }
|
r<dj                  |
j                  d      D cg c]  }|j                          c}      nd}|
r>|
t        t        |t        d   d         t        d   d         vr|
j                  dd      }
t!        j"                  ddj                  t$        j&                  j)                                d| j*                        }|g k(  r<|:|s8t-        d| j*                   dt$        j&                  j)                          d      |g k7  s||r|r|d   nd}|#t/        ||	      }|j1                  d      s-|dz  }n'||d}n t        t$        |      j3                  |      }t5        |      rt7        | |	      nd}|dk7  r3dd | j                  D        v r|t7        t$        j8                   |	      z  }|r|t7        d| |	      z  }n|r|sdn|z  }t        t              }| j
                  r| j
                  nd}t;        |      \  }}| j<                  j?                         D ]i  \  }}tA        |      }d}d|v r1dj                  |j                  d            j                  d d!      }n'|j                  d d!      j                  d"d       d#| }d$|v rt!        jB                  d%d&|      }d'|v rt!        jB                  d(d&|      }d}d}tA        t        | |d            }|dk7  rd)| dnd}tE        |||||      \  }}}}}}|s|dk(  r3tG        d*| d+| jH                   d,| jJ                  jL                   d-       d|v r|nd| d}|r|t7        | d.| | d/| |	d0z         z  }J|t7        | d.| | | | d/| |	d0z         z  }l ntG        d1| j*                   d2       | _        | S c c}w )3zm
    Wrapper that automatically generates a docstring for classes based on their attributes and methods.
    r   r  Fr   r   c              3   4   K   | ]  }|j                     y wr3  r  r  xs     rF   r  z'auto_class_docstring.<locals>.<genexpr>  s     =AQZZ=   )rG  r  zArgs:zParameters:r   c              3   4   K   | ]  }|j                     y wr3  r  r  s     rF   r  z'auto_class_docstring.<locals>.<genexpr>  s     ;!1::;r  TN)rG  r  re  r   r   r   r   rK  r  r  z)$rl  zE` is not registered in the auto doc. Here are the available classes: zv.
Add a `custom_intro` to the decorator if you want to use `auto_docstring` on a class not registered in the auto doc.r   )r  c              3   4   K   | ]  }|j                     y wr3  r  r  s     rF   r  z'auto_class_docstring.<locals>.<genexpr>,  s     >_aqzz>_r  z
Args:
rQ  rR  rS  rT  rU  r   rV  rW  rX  r   rY  rk  r  r  r  r  rm  rn  ro  z/You used `@auto_class_docstring` decorator on `zF` but this class is not part of the AutoMappings. Remove the decorator)'r  r  __mro__r  __init__r;  r.  r8  rt   r   r  r   r   titler  r  r   r,  r   r5  r  r.   
ValueErrorr   r  r  r   r   r   r   r  r-  r]  r   ri  r  r0   r_  r`  )clsr  r  r=  r#  is_dataclassdocstring_init	doc_classdocstring_argsr   r  r  model_name_titlename	pre_blockr   re  r  r   r   r  r   r   r|  rh  rf  ru   r,   rg  s                                rF   auto_class_docstringr    s	   
 8LN===.LLs

''''=1 	 
;s{{;	;KK	9#K.LL#5oF	

 ' 	 $C(L)#.Ymsxx4H4N4Ns4S Tq TUsw 4G7GJK">215= !  4;;CE::388N$;$;$@$@$BCDBGVDrzl*<~bcqczczcc  dB  cC CC C
 	
 rz\-tAwD#'lCI%%d+T!	%I5<<HX<YIDG	NNi[<@XZ	$$):>_SVS^S^>_)_>+I+I*J\ZZI"^,<(=|LLINNI7H'*{{I#29#= q585H5H5N5N5P '1
1 !67
  z)!#)9)9))D!E!M!Mo_b!cJ$.$6$6$L$T$TU_ac$d#eefgqfr!sJ:-!#(CUJ!WJ+!#(<eZ!PJ#H !# #GCR$@ AFSWYFY/- B_a (
4EGWYcemn g
O\?KYf !!R'j\s7G7G6H	RUR^R^RjRjQkkwxy/2j/@*UVFWJ&!^)l"ZL8IK=Y(1,& 	
 "^)l"ZLFWXeWffhithuv(1,& 	I'V 	=cll^  LR  S	
 CKJk !Us   "R)r  r  r=  c                .    fd}| r ||       S |S )a  
    Automatically generates comprehensive docstrings for model classes and methods in the Transformers library.

    This decorator reduces boilerplate by automatically including standard argument descriptions while allowing
    overrides to add new or custom arguments. It inspects function signatures, retrieves predefined docstrings
    for common arguments (like `input_ids`, `attention_mask`, etc.), and generates complete documentation
    including examples and return value descriptions.

    For complete documentation and examples, read this [guide](https://huggingface.co/docs/transformers/auto_docstring).

    Examples of usage:

        Basic usage (no parameters):
        ```python
        @auto_docstring
        class MyAwesomeModel(PreTrainedModel):
            def __init__(self, config, custom_parameter: int = 10):
                r'''
                custom_parameter (`int`, *optional*, defaults to 10):
                    Description of the custom parameter for MyAwesomeModel.
                '''
                super().__init__(config)
                self.custom_parameter = custom_parameter
        ```

        Using `custom_intro` with a class:
        ```python
        @auto_docstring(
            custom_intro="This model implements a novel attention mechanism for improved performance."
        )
        class MySpecialModel(PreTrainedModel):
            def __init__(self, config, attention_type: str = "standard"):
                r'''
                attention_type (`str`, *optional*, defaults to "standard"):
                    Type of attention mechanism to use.
                '''
                super().__init__(config)
        ```

        Using `custom_intro` with a method, and specify custom arguments and example directly in the docstring:
        ```python
        @auto_docstring(
            custom_intro="Performs forward pass with enhanced attention computation."
        )
        def forward(
            self,
            input_ids: Optional[torch.Tensor] = None,
            attention_mask: Optional[torch.Tensor] = None,
        ):
            r'''
            custom_parameter (`int`, *optional*, defaults to 10):
                Description of the custom parameter for MyAwesomeModel.

            Example:

            ```python
            >>> model = MyAwesomeModel(config)
            >>> model.forward(input_ids=torch.tensor([1, 2, 3]), attention_mask=torch.tensor([1, 1, 1]))
            ```
            '''
        ```

        Using `custom_args` to define reusable arguments:
        ```python
        VISION_ARGS = r'''
        pixel_values (`torch.FloatTensor`, *optional*):
            Pixel values of the input images.
        image_features (`torch.FloatTensor`, *optional*):
            Pre-computed image features for efficient processing.
        '''

        @auto_docstring(custom_args=VISION_ARGS)
        def encode_images(self, pixel_values=None, image_features=None):
            # ... method implementation
        ```

        Combining `custom_intro` and `custom_args`:
        ```python
        MULTIMODAL_ARGS = r'''
        vision_features (`torch.FloatTensor`, *optional*):
            Pre-extracted vision features from the vision encoder.
        fusion_strategy (`str`, *optional*, defaults to "concat"):
            Strategy for fusing text and vision modalities.
        '''

        @auto_docstring(
            custom_intro="Processes multimodal inputs combining text and vision.",
            custom_args=MULTIMODAL_ARGS
        )
        def forward(
            self,
            input_ids,
            attention_mask=None,
            vision_features=None,
            fusion_strategy="concat"
        ):
            # ... multimodal processing
        ```

        Using with ModelOutput classes:
        ```python
        @dataclass
        @auto_docstring(
            custom_intro="Custom model outputs with additional fields."
        )
        class MyModelOutput(ImageClassifierOutput):
            r'''
            loss (`torch.FloatTensor`, *optional*):
                The loss of the model.
            custom_field (`torch.FloatTensor` of shape `(batch_size, hidden_size)`, *optional*):
                A custom output field specific to this model.
            '''

            # Standard fields like hidden_states, logits, attentions etc. can be automatically documented
            # However, given that the loss docstring is often different per model, you should document it above
            loss: Optional[torch.FloatTensor] = None
            logits: Optional[torch.FloatTensor] = None
            hidden_states: Optional[tuple[torch.FloatTensor, ...]] = None
            attentions: Optional[tuple[torch.FloatTensor, ...]] = None
            custom_field: Optional[torch.FloatTensor] = None
        ```

    Args:
        custom_intro (`str`, *optional*):
            Custom introduction text to add to the docstring. This replaces the default
            introduction text generated by the decorator before the Args section. Use this to describe what
            makes your model or method special.
        custom_args (`str`, *optional*):
            Custom argument documentation in docstring format. This allows you to define
            argument descriptions once and reuse them across multiple methods. The format should follow the
            standard docstring convention: `arg_name (`type`, *optional*, defaults to `value`): Description.`
        checkpoint (`str`, *optional*):
            Checkpoint name to use in examples within the docstring. This is typically
            automatically inferred from the model configuration class, but can be overridden if needed for
            custom examples.

    Note:
        - Standard arguments (`input_ids`, `attention_mask`, `pixel_values`, etc.) are automatically documented
          from predefined descriptions and should not be redefined unless their behavior differs in your model.
        - New or custom arguments should be documented in the method's docstring using the `r''' '''` block
          or passed via the `custom_args` parameter.
        - For model classes, the decorator derives parameter descriptions from the `__init__` method's signature
          and docstring.
        - Return value documentation is automatically generated for methods that return ModelOutput subclasses.
    c                     t        | j                  j                  d            dkD  rt        |       S t	        |       S )Nr   r   )r  r  r=  )r   r0   r   r  r  )r  r=  r  r  s    rF   auto_docstring_decoratorz0auto_docstring.<locals>.auto_docstring_decorator  sM    s%%c*+a/(<T^  (S_lvwwrE   rD   )r  r  r  r=  r  s    ``` rF   auto_docstringr  k  s     fx ',,##rE   )r   F)Nr   )NNNNN)NNNr3  )@r  r	  r   pathlibr   rQ  r   r   r   regexr   docr   r	   r
   r   genericr   resolvePATH_TO_TRANSFORMERSr  r  r  r  rN  r   r<  r*   rH   rt   r   r   rs  r   r   r   r   r   r   r"  boolobjectr   r  r!  r]  dictr'  r0  r8  rC  rI  rO  rb  ri  r~  r  r  r  r  r  r  r  r  rD   rE   rF   <module>r     s     	   , ,   ! E{**,~=  XW]EB   
  
    IJN Nbo odH HVW Wt   H D7
:KJ'Z!U4&9I3I-J !$& # $ :,!5f1E+F !4 !.&-:` D-b`I#X"\~)X 0FVt ei:zpf^$TtPT ^$rE   