
    rh                     ^!   d Z ddlZddlZddlZddlZddlmZ ddlmZm	Z	m
Z
 ddlmZ ddlmZ ddlmZmZ dd	lmZ dd
lmZ ddlmZ ddlmZmZmZmZmZmZ ddlm Z  ddl!m"Z" ddl#m$Z$m%Z%m&Z&m'Z'm(Z(  e       rddl)m*Z* ndZ* ejV                  e,      Z- ee.e/e	e.   e	e.   f   f   g dd e       rdndffd e       rdnd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffd d e       rd!ndffd"d e       rdndffd#d$ e       rd%nd e       rd&ndffd'd(d e       rdndffd) e       rd*nddffd+d,d- e       rd.nd e       rd/ndffd0d1 e       rd2ndffd3d4d e       rd5ndffd6d7d8d e       rdndffd9d: e       rd;ndffd<d e       rd=ndffd>d? e       rd@ndffdAd e       rdndffdBdC e       rdDnd e       rdEndffdFdG e       rdnd e       rdndffdHd e       rdndffdId? e       rd@ndffdJd e       rdndffdKd e       rdndffdLdM e       rdNnd e       rdOndffdPdQ e       rdRndffdSd e       rd!ndffdTd e       rd!ndffdUd e       rdndffdVdW e       rdXndffdYdZ e       rd[ndffd\ e       rd]nd e       rd^ndffd_d`dadbd? e       rd@ndffdcd: e       rd;ndffddde e       rdfndffdg e       rdhnd e       rdindffdj e       rdnd e       rdndffdk e       rdnd e       rdndffdl e       rdnd e       rdndffdm e       rdnd e       rdndffdndo e       rdnd e       rdndffdpdq e       rdrndffdsdt e       rdundffdvdw e       rdxndffdyd: e       rd;ndffdzd e       rdndffd{d e       rdndffd|d e       rdndffd} e       rd~nddffdd e       rd:nd e       rd;ndffdd e       rd5ndffdd e       rdndffd e       rdnddffddd e       rdndffddd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffdd e       rdndffdd e       rd5ndffdd e       rd5ndffdd e       rd5ndffdd e       rd5ndffd e       rdnddffdd: e       rd;ndffdd: e       rd;ndffdd: e       rd;ndffdd e       rdndffddd e       rd5ndffdd: e       rd;ndffddddddd e       rdndffdd e       rdndffdd e       rd5ndffdd e       rdndffddd? e       rd@ndffdd e       rdndffdd e       rdndffdd e       rdndffdd: e       rd;ndffdd: e       rd;ndffddW e       rdXndffd e       rdnd e       rdndffdd e       rdndffd e       rdnd e       rdndffdd e       rdnd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffd e       rdnd e       rdndffdۑdd e       rdndffd e       rdnddffdd e       rdndffdd e       rdndffd e       rdnddffd e       rdnd e       rdndffd e       rdnd e       rdndffdd? e       rd@ndffdd e       rdndffdd e       rd:nd e       rd;ndffd e       rdn
 e       rdnd e       r	 e       sdndffd e       rdn
 e       rdnd e       r	 e       sdndffdd e       rdndffd e       rdnddffdd e       rdndffdd e       rdndffdd e       rd5ndffdd e       rd5ndffdd e       rd5ndffdd e       rdndffdd e       rdndffd d? e       rd@ndffd e       rdnd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffd	d
d e       rd5ndffdd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd e       rdndffdd: e       rd;ndffdd e       rdndffdd e       rdndffdd e       rdndffd e       rd1nd e       rd2ndffd e       rd1nd e       rd2ndffdd  e       rdnd e       rdndffd!dQ e       rdRndffd"d e       rdndffd#d e       rdndffd$d%d e       rdndffd&d e       rdn
 e       rd5ndffd' e       rd(nddffd)d*d e       rdndffd+dW e       rdXndffd,dW e       rdXndffd-dW e       rdXndffd.dW e       rdXndffd/dW e       rdXndffd0dW e       rdXndffd1dW e       rdXndffd2dW e       rdXndffd3d4d5 e       rd6ndffd7 e       rdnd e       rdndffd8 e       rd9nd e       rd:ndffd; e       rd<nd e       rd=ndffd>d? e       rd@ndffdAd? e       rd@ndffdBd? e       rd@ndffdCdDdE e       rdFndffdGd e       rdndffdH e       rdInd e       rdJndffdK e       rdInd e       rdJndffdL e       rdnd e       rdndffdM e       rdNnddffdO e       rdnd e       rdndffdPd e       rd5ndffdQ e       rdRnddffdSdT e       rdUnddffdVdWdX e       rdYndffdZd e       rdndffd[d: e       rd;ndffd\ e       rdnd e       rdndffd] e       rdnd e       rdndffd^ e       rdnd e       rdndffd_d`dadbd e       rdndffdc e       rddnd e       rdendffdf e       rdnd e       rdndffdgd e       rdndffdhd e       rdndffdid e       rdndffdjd e       rdndffdkdl e       rdn
 e       rdnd e       r	 e       sdndffdmdndodpdqdr e       rdsndffdtd e       rdndffdu e       rdvnd e       rdwndffdxdy e       rdznddffd{ e       rdnd e       rdndffd| e       rdnd e       rdndffd} e       rd~nd e       rdndffdd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndffd e       rdnd e       rdndff      Z0 e"e$e0      Z1 e$jd                         D  ci c]  \  } }|| 
 c}} Z3de.de
e4e   df   fdZ5	 	 	 	 	 	 	 	 dde
e.ejl                  e.   f   de	e
e.ejl                  e.   f      de7de	e7   de	e8e.e.f      de	e
e7e.f      de	e.   de7de.de8e.ef   fdZ9 G d d      Z:ddgZ;yc c}} w (  zAuto Tokenizer class.    N)OrderedDict)AnyOptionalUnion)is_mistral_common_available   )PretrainedConfig)get_class_from_dynamic_moduleresolve_trust_remote_code)load_gguf_checkpoint)PreTrainedTokenizer)TOKENIZER_CONFIG_FILE)cached_fileextract_commit_hashis_g2p_en_availableis_sentencepiece_availableis_tokenizers_availablelogging   )EncoderDecoderConfig   )_LazyAutoMapping)CONFIG_MAPPING_NAMES
AutoConfigconfig_class_to_model_typemodel_type_to_module_name!replace_list_option_in_docstrings)PreTrainedTokenizerFastaimv2CLIPTokenizerCLIPTokenizerFastalbertAlbertTokenizerAlbertTokenizerFastalignBertTokenizerBertTokenizerFastarceeLlamaTokenizerLlamaTokenizerFastaria
aya_visionCohereTokenizerFastbark)bart)BartTokenizerBartTokenizerFastbarthezBarthezTokenizerBarthezTokenizerFast)bartpho)BartphoTokenizerNbertzbert-generationBertGenerationTokenizer)zbert-japanese)BertJapaneseTokenizerN)bertweet)BertweetTokenizerNbig_birdBigBirdTokenizerBigBirdTokenizerFastbigbird_pegasusPegasusTokenizerPegasusTokenizerFast)biogpt)BioGptTokenizerNbitnetr   )
blenderbot)BlenderbotTokenizerBlenderbotTokenizerFast)zblenderbot-small)BlenderbotSmallTokenizerNblipzblip-2GPT2TokenizerGPT2TokenizerFastbloomBloomTokenizerFastbridgetowerRobertaTokenizerRobertaTokenizerFastbros)byt5)ByT5TokenizerN	camembertCamembertTokenizerCamembertTokenizerFast)canine)CanineTokenizerN	chameleonchinese_clipclapclipclipseg)clvp)ClvpTokenizerN
code_llamaCodeLlamaTokenizerCodeLlamaTokenizerFastcodegenCodeGenTokenizerCodeGenTokenizerFastcoherecohere2colpalicolqwen2Qwen2TokenizerQwen2TokenizerFastconvbertConvBertTokenizerConvBertTokenizerFastcpmCpmTokenizerCpmTokenizerFast)cpmant)CpmAntTokenizerN)ctrl)CTRLTokenizerN)zdata2vec-audioWav2Vec2CTCTokenizerNzdata2vec-textdbrxdebertaDebertaTokenizerDebertaTokenizerFastz
deberta-v2DebertaV2TokenizerDebertaV2TokenizerFastdeepseek_v2deepseek_v3deepseek_vldeepseek_vl_hybrid)dia)DiaTokenizerN	diffllama
distilbertDistilBertTokenizerDistilBertTokenizerFastdprDPRQuestionEncoderTokenizerDPRQuestionEncoderTokenizerFastelectraElectraTokenizerElectraTokenizerFastemu3ernieernie4_5ernie4_5_moeernie_mErnieMTokenizer)esm)EsmTokenizerNexaone4falconfalcon_mambaGPTNeoXTokenizerFastfastspeech2_conformerFastSpeech2ConformerTokenizer)flaubert)FlaubertTokenizerNfnetFNetTokenizerFNetTokenizerFast)fsmt)FSMTTokenizerNfunnelFunnelTokenizerFunnelTokenizerFastgemmaGemmaTokenizerGemmaTokenizerFastgemma2gemma3gemma3_textgemma3ngemma3n_textgitglmglm4glm4_moeglm4vzgpt-sw3GPTSw3Tokenizergpt2gpt_bigcodegpt_neogpt_neox)gpt_neox_japanese)GPTNeoXJapaneseTokenizerNgpt_ossgptj)zgptsan-japanese)GPTSanJapaneseTokenizerN)graniterJ   N)
granitemoer   )granitemoehybridr   )granitemoesharedr   zgrounding-dinogroupvitheliumherbertHerbertTokenizerHerbertTokenizerFast)hubertrv   ibertideficsidefics2idefics3instructblipinstructblipvideointernvljambajanusjetmoe)jukebox)JukeboxTokenizerNzkosmos-2XLMRobertaTokenizerXLMRobertaTokenizerFastlayoutlmLayoutLMTokenizerLayoutLMTokenizerFast
layoutlmv2LayoutLMv2TokenizerLayoutLMv2TokenizerFast
layoutlmv3LayoutLMv3TokenizerLayoutLMv3TokenizerFast	layoutxlmLayoutXLMTokenizerLayoutXLMTokenizerFastledLEDTokenizerLEDTokenizerFastliltllamallama4llama4_textllava
llava_nextllava_next_videollava_onevision
longformerLongformerTokenizerLongformerTokenizerFastlongt5T5TokenizerT5TokenizerFast)luke)LukeTokenizerNlxmertLxmertTokenizerLxmertTokenizerFastm2m_100M2M100Tokenizermambamamba2marianMarianTokenizermbartMBartTokenizerMBartTokenizerFastmbart50MBart50TokenizerMBart50TokenizerFastmegazmegatron-bert)zmgp-str)MgpstrTokenizerNminimaxmistralMistralCommonTokenizermixtralmllamamlukeMLukeTokenizerzmm-grounding-dino
mobilebertMobileBertTokenizerMobileBertTokenizerFast
modernbert	moonshinemoshimpnetMPNetTokenizerMPNetTokenizerFastmptmramt5MT5TokenizerMT5TokenizerFastmusicgenmusicgen_melodymvpMvpTokenizerMvpTokenizerFast)myt5)MyT5TokenizerNnemotronnezhanllbNllbTokenizerNllbTokenizerFastznllb-moenystromformerolmoolmo2olmoezomdet-turbo	oneformerz
openai-gptOpenAIGPTTokenizerOpenAIGPTTokenizerFastoptowlv2owlvit	paligemmapegasus	pegasus_x)	perceiver)PerceiverTokenizerN	persimmonphiphi3phimoe)phobert)PhobertTokenizerN
pix2structpixtralplbartPLBartTokenizer)
prophetnet)ProphetNetTokenizerNqdqbertqwen2qwen2_5_omni
qwen2_5_vlqwen2_audio	qwen2_moeqwen2_vlqwen3	qwen3_moe)rag)RagTokenizerNrealmRealmTokenizerRealmTokenizerFastrecurrent_gemmareformerReformerTokenizerReformerTokenizerFastrembertRemBertTokenizerRemBertTokenizerFast	retribertRetriBertTokenizerRetriBertTokenizerFastrobertazroberta-prelayernorm)roc_bert)RoCBertTokenizerNroformerRoFormerTokenizerRoFormerTokenizerFastrwkvseamless_m4tSeamlessM4TTokenizerSeamlessM4TTokenizerFastseamless_m4t_v2shieldgemma2siglipSiglipTokenizersiglip2smollm3speech_to_textSpeech2TextTokenizer)speech_to_text_2)Speech2Text2TokenizerNspeecht5SpeechT5Tokenizer)splinter)SplinterTokenizerSplinterTokenizerFastsqueezebertSqueezeBertTokenizerSqueezeBertTokenizerFaststablelm
starcoder2switch_transformerst5t5gemma)tapas)TapasTokenizerN)tapex)TapexTokenizerN)z
transfo-xl)TransfoXLTokenizerNtvpudopUdopTokenizerUdopTokenizerFastumt5video_llavaviltvipllavavisual_bert)vits)VitsTokenizerNvoxtral)wav2vec2rv   )zwav2vec2-bertrv   )zwav2vec2-conformerrv   )wav2vec2_phoneme)Wav2Vec2PhonemeCTCTokenizerNwhisperWhisperTokenizerWhisperTokenizerFastxclipxglmXGLMTokenizerXGLMTokenizerFast)xlm)XLMTokenizerNzxlm-prophetnetXLMProphetNetTokenizerzxlm-robertazxlm-roberta-xlxlnetXLNetTokenizerXLNetTokenizerFastxlstmxmodyosozambazamba2
class_namereturnc                    | dk(  rt         S t        j                         D ]\  \  }}| |v st        |      }|dv r| dk(  rt	        j
                  dd      }nt	        j
                  d| d      }	 t        ||       c S  t        j                  j                         D ]  }|D ]  }t        |dd       | k(  s|c c S  ! t	        j
                  d      }t        ||       rt        ||       S y # t        $ r Y w xY w)	Nr   )r  r	  r  z.tokenization_mistral_commontransformers.ztransformers.models__name__)r   TOKENIZER_MAPPING_NAMESitemsr   	importlibimport_modulegetattrAttributeErrorTOKENIZER_MAPPING_extra_contentvalueshasattr)r  module_name
tokenizersmodule	tokenizermain_modules         }/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/auto/tokenization_auto.pytokenizer_class_from_namer    s   ..&&#:#@#@#B 
Z#3K@K44G_9_"001OQ_`"001[M1BDYZvz22
 (66==? !
# 	!Iy*d3zA  	!! )).9K{J'{J// " s   0C--	C98C9pretrained_model_name_or_path	cache_dirforce_downloadresume_downloadproxiestokenrevisionlocal_files_only	subfolderc	                    |	j                  dd      }
|
)t        j                  dt               |t	        d      |
}|	j                  d      }t        | t        ||||||||ddd|      }|t        j                  d       i S t        ||      }t        |d	
      5 }t        j                  |      }ddd       |d<   |S # 1 sw Y   xY w)a  
    Loads the tokenizer configuration from a pretrained model tokenizer configuration.

    Args:
        pretrained_model_name_or_path (`str` or `os.PathLike`):
            This can be either:

            - a string, the *model id* of a pretrained model configuration hosted inside a model repo on
              huggingface.co.
            - a path to a *directory* containing a configuration file saved using the
              [`~PreTrainedTokenizer.save_pretrained`] method, e.g., `./my_model_directory/`.

        cache_dir (`str` or `os.PathLike`, *optional*):
            Path to a directory in which a downloaded pretrained model configuration should be cached if the standard
            cache should not be used.
        force_download (`bool`, *optional*, defaults to `False`):
            Whether or not to force to (re-)download the configuration files and override the cached versions if they
            exist.
        resume_download:
            Deprecated and ignored. All downloads are now resumed by default when possible.
            Will be removed in v5 of Transformers.
        proxies (`dict[str, str]`, *optional*):
            A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
            'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
        token (`str` or *bool*, *optional*):
            The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
            when running `hf auth login` (stored in `~/.huggingface`).
        revision (`str`, *optional*, defaults to `"main"`):
            The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
            git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
            identifier allowed by git.
        local_files_only (`bool`, *optional*, defaults to `False`):
            If `True`, will only try to load the tokenizer configuration from local files.
        subfolder (`str`, *optional*, defaults to `""`):
            In case the tokenizer config is located inside a subfolder of the model repo on huggingface.co, you can
            specify the folder name here.

    <Tip>

    Passing `token=True` is required when you want to use a private model.

    </Tip>

    Returns:
        `dict`: The configuration of the tokenizer.

    Examples:

    ```python
    # Download configuration from huggingface.co and cache.
    tokenizer_config = get_tokenizer_config("google-bert/bert-base-uncased")
    # This model does not have a tokenizer config so the result will be an empty dict.
    tokenizer_config = get_tokenizer_config("FacebookAI/xlm-roberta-base")

    # Save a pretrained tokenizer locally and you can reload its config
    from transformers import AutoTokenizer

    tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-cased")
    tokenizer.save_pretrained("tokenizer-test")
    tokenizer_config = get_tokenizer_config("tokenizer-test")
    ```use_auth_tokenNrThe `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.V`token` and `use_auth_token` are both specified. Please set only the argument `token`._commit_hashF)r  r  r  r  r  r  r  r   _raise_exceptions_for_gated_repo%_raise_exceptions_for_missing_entries'_raise_exceptions_for_connection_errorsr  z\Could not locate the tokenizer configuration file, will try to use the model config instead.zutf-8)encoding)popwarningswarnFutureWarning
ValueErrorgetr   r   loggerinfor   openjsonload)r  r  r  r  r  r  r  r  r  kwargsr  commit_hashresolved_config_filereaderresults                  r  get_tokenizer_configr    s    R ZZ 0$7N! A	
 uvv**^,K&%%'))..305   #rs	%&:KHK	"W	5 #6"#(F>M# #s   CCc                   N    e Zd ZdZd Ze ee      d               Ze	dd       Z
y)AutoTokenizera  
    This is a generic tokenizer class that will be instantiated as one of the tokenizer classes of the library when
    created with the [`AutoTokenizer.from_pretrained`] class method.

    This class cannot be instantiated directly using `__init__()` (throws an error).
    c                     t        d      )Nz}AutoTokenizer is designed to be instantiated using the `AutoTokenizer.from_pretrained(pretrained_model_name_or_path)` method.)OSError)selfs    r  __init__zAutoTokenizer.__init__  s    _
 	
    c           	      B	   |j                  dd      }|;t        j                  dt               |j	                  d      t        d      ||d<   |j                  dd      }d|d<   |j                  d	d      }|j                  d
d      }|j                  dd      }|j	                  d      }	|d}
t        j	                  |d      }|,t        d| ddj                  d t        D               d      |\  }}|r#|t        |      }
nt        j                  d       |
t        |      }
|
t        d| d       |
j                  |g|i |S t        |fi |}d|v r|d   |d<   |j	                  d      }d}d|v r4t        |d   t        t        f      r|d   }n|d   j	                  dd      }|t        |t               sM|	r3t#        ||	fi |}t%        |d      d   }t'        j(                  d(i |}nt'        j                  |fd|i|}|j*                  }t-        |d      rd|j.                  v r|j.                  d   }|du}t1        |      t2        v xs% |duxr t        |      duxs t        |dz         du}|r<|r|d   |d   }n|d   }d|v r|j5                  d      d   }nd}t7        |||||      }|rI|rGt9        |fi |}
|j                  dd      }|
j;                           |
j                  |g|d|i|S |[d}
|r!|j=                  d      s| d}t        |      }
|
|}t        |      }
|
t        d d        |
j                  |g|i |S t        |t>              rzt1        |j@                        t1        |jB                        urDt        j                  d!|jB                  jD                   d"|j@                  jD                   d#       |jB                  }tG        t1        |      jH                        }|Tt2        t1        |         \  }}|r|s| |j                  |g|i |S | |j                  |g|i |S t        d$      t        d%|jD                   d&dj                  d' t2        D               d      ))a]  
        Instantiate one of the tokenizer classes of the library from a pretrained model vocabulary.

        The tokenizer class to instantiate is selected based on the `model_type` property of the config object (either
        passed as an argument or loaded from `pretrained_model_name_or_path` if possible), or when it's missing, by
        falling back to using pattern matching on `pretrained_model_name_or_path`:

        List options

        Params:
            pretrained_model_name_or_path (`str` or `os.PathLike`):
                Can be either:

                    - A string, the *model id* of a predefined tokenizer hosted inside a model repo on huggingface.co.
                    - A path to a *directory* containing vocabulary files required by the tokenizer, for instance saved
                      using the [`~PreTrainedTokenizer.save_pretrained`] method, e.g., `./my_model_directory/`.
                    - A path or url to a single saved vocabulary file if and only if the tokenizer only requires a
                      single vocabulary file (like Bert or XLNet), e.g.: `./my_model_directory/vocab.txt`. (Not
                      applicable to all derived classes)
            inputs (additional positional arguments, *optional*):
                Will be passed along to the Tokenizer `__init__()` method.
            config ([`PretrainedConfig`], *optional*)
                The configuration object used to determine the tokenizer class to instantiate.
            cache_dir (`str` or `os.PathLike`, *optional*):
                Path to a directory in which a downloaded pretrained model configuration should be cached if the
                standard cache should not be used.
            force_download (`bool`, *optional*, defaults to `False`):
                Whether or not to force the (re-)download the model weights and configuration files and override the
                cached versions if they exist.
            resume_download:
                Deprecated and ignored. All downloads are now resumed by default when possible.
                Will be removed in v5 of Transformers.
            proxies (`dict[str, str]`, *optional*):
                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.
            revision (`str`, *optional*, defaults to `"main"`):
                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
                identifier allowed by git.
            subfolder (`str`, *optional*):
                In case the relevant files are located inside a subfolder of the model repo on huggingface.co (e.g. for
                facebook/rag-token-base), specify it here.
            use_fast (`bool`, *optional*, defaults to `True`):
                Use a [fast Rust-based tokenizer](https://huggingface.co/docs/tokenizers/index) if it is supported for
                a given model. If a fast tokenizer is not available for a given model, a normal Python-based tokenizer
                is returned instead.
            tokenizer_type (`str`, *optional*):
                Tokenizer type to be loaded.
            trust_remote_code (`bool`, *optional*, defaults to `False`):
                Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
                should only be set to `True` for repositories you trust and in which you have read the code, as it will
                execute code present on the Hub on your local machine.
            kwargs (additional keyword arguments, *optional*):
                Will be passed to the Tokenizer `__init__()` method. Can be used to set special tokens like
                `bos_token`, `eos_token`, `unk_token`, `sep_token`, `pad_token`, `cls_token`, `mask_token`,
                `additional_special_tokens`. See parameters in the `__init__()` for more details.

        Examples:

        ```python
        >>> from transformers import AutoTokenizer

        >>> # Download vocabulary from huggingface.co and cache.
        >>> tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased")

        >>> # Download vocabulary from huggingface.co (user-uploaded) and cache.
        >>> tokenizer = AutoTokenizer.from_pretrained("dbmdz/bert-base-german-cased")

        >>> # If vocabulary files are in a directory (e.g. tokenizer was saved using *save_pretrained('./test/saved_model/')*)
        >>> # tokenizer = AutoTokenizer.from_pretrained("./test/bert_saved_model/")

        >>> # Download vocabulary from huggingface.co and define model-specific arguments
        >>> tokenizer = AutoTokenizer.from_pretrained("FacebookAI/roberta-base", add_prefix_space=True)
        ```r  Nr  r  r  configT
_from_autouse_fasttokenizer_typetrust_remote_code	gguf_filezPassed `tokenizer_type` z3 does not exist. `tokenizer_type` should be one of z, c              3       K   | ]  }|  y wN .0cs     r  	<genexpr>z0AutoTokenizer.from_pretrained.<locals>.<genexpr>  s      Dq Ds   r  zt`use_fast` is set to `True` but the tokenizer class does not have a fast version.  Falling back to the slow version.zTokenizer class z is not currently imported.r  tokenizer_classauto_mapr  F)return_tensorsFastr   r   z--code_revisionz- does not exist or is not currently imported.z The encoder model config class: z3 is different from the decoder model config class: z. It is not recommended to use the `AutoTokenizer.from_pretrained()` method in this case. Please use the encoder and decoder specific tokenizer classes.zzThis tokenizer cannot be instantiated. Please make sure you have `sentencepiece` installed in order to use this tokenizer.z!Unrecognized configuration class z8 to build an AutoTokenizer.
Model type should be one of c              3   4   K   | ]  }|j                     y wr  )r  r  s     r  r  z0AutoTokenizer.from_pretrained.<locals>.<genexpr>{  s     4[AQZZ4[s   r  )%r  r  r  r  r  r  r  joinr  r  warningfrom_pretrainedr  
isinstancetuplelistr	   r   r   r   	for_modelr  r  r  typer  splitr   r
   register_for_auto_classendswithr   decoderencoder	__class__r   r  )clsr  inputsr  r  r  r  r  r  r  r  tokenizer_class_tupletokenizer_class_nametokenizer_fast_class_nametokenizer_configconfig_tokenizer_classtokenizer_auto_map	gguf_pathconfig_dicthas_remote_codehas_local_code	class_refupstream_repo_tokenizer_class_candidate
model_typetokenizer_class_pytokenizer_class_fasts                               r  r  zAutoTokenizer.from_pretrained  s   Z  $4d;%MM E zz'". l  -F7OHd+#|::j$/$4d;"JJ':DAJJ{+	 %"O$;$?$?PT$U!$, .~.>>qyy D,C DDEQH 
 ?T; ";,8&?@Y&ZONN= &";<P"Q& #34H3IId!eff2?223PdSYd]cdd 00MXQWX--%5n%EF>"!1!5!56G!H!))*:6F%5j%A"%5j%A%E%EoW[%\" ")f&67 +,I9 _X^ _I"6yQV"WX`"aK'11@K@F'775IZ^dF &,%;%;"vz*&///Q%+___%E",D8f):: 
"$. )*@AM Z,-Cf-LMUYY	 	 .q1=.q1	.q1	y  ) 5a 8 $ 9!#@.Racp! 0;IGdohnoO

?D1A3352?22-06J[_e  $/"O 6 ? ? G/E.Fd,K)";<U"V&,B)";<U"V& &'@&AAno  3?223PdSYd]cdd f23FNN#4+??6v~~7O7O6P Q%%+^^%=%=$> ?22 ^^F/V0E0EF
!7Hf7V4 4#5G5O;+;;<Ym\bmflmm%1=-==>[o^dohnoo$: 
 /0@0@/A B++/994[IZ4[+[*\\]_
 	
r  Nc                    ||t        d      |t        |t              rt        d      |t        |t              rt        d      |=|;t        |t              r+|j                  |k7  rt        d|j                   d| d      | t
        j                  v rt
        |    \  }}||}||}t
        j                  | ||f|       y)	a  
        Register a new tokenizer in this mapping.


        Args:
            config_class ([`PretrainedConfig`]):
                The configuration corresponding to the model to register.
            slow_tokenizer_class ([`PretrainedTokenizer`], *optional*):
                The slow tokenizer to register.
            fast_tokenizer_class ([`PretrainedTokenizerFast`], *optional*):
                The fast tokenizer to register.
        NzKYou need to pass either a `slow_tokenizer_class` or a `fast_tokenizer_classz:You passed a fast tokenizer in the `slow_tokenizer_class`.z:You passed a slow tokenizer in the `fast_tokenizer_class`.zThe fast tokenizer class you are passing has a `slow_tokenizer_class` attribute that is not consistent with the slow tokenizer class you passed (fast tokenizer has z and you passed z!. Fix one of those so they match!)exist_ok)r  
issubclassr   r   slow_tokenizer_classr  r  register)config_classr  fast_tokenizer_classr  existing_slowexisting_fasts         r  r  zAutoTokenizer.register~  s     ',@,Hjkk+
;OQh0iYZZ+
;OQd0eYZZ !,$0/1HI$99=QQ['<<==MNbMc d!!  ,;;;+<\+J(M=#+'4$#+'4$""<2FH\1]hp"qr  )NNF)r  
__module____qualname____doc__r  classmethodr   r  r  staticmethodr  r  r  r  r  r    sH    
 &'>?`
 @ `
D )r )rr  r  r  )NFNNNNF )<r"  r  r  osr  collectionsr   typingr   r   r   transformers.utils.import_utilsr   configuration_utilsr	   dynamic_module_utilsr
   r   modeling_gguf_pytorch_utilsr   tokenization_utilsr   tokenization_utils_baser   utilsr   r   r   r   r   r   encoder_decoderr   auto_factoryr   configuration_autor   r   r   r   r   tokenization_utils_fastr   
get_loggerr  r  strr  r  r  r  CONFIG_TO_TYPEr  r  PathLikebooldictr  r  __all__)kvs   00r  <module>r=     s"      	  # ' ' G 3 \ ? 5 <  3 *  B" 
		H	% P+c5#1M+N&NO{
'>'@#d	
{
 %?%A!t)@)B%	
{
 
?;R;T$7Z^_`{
  
#=T=V%9\`ab!{
" 
"<S<U$8[_`a#{
$ 
7N7P3VZ[\%{
& 
/:Q:S#6Y]^_'{
( 	9){
, &@&B"*A*C&	
+{
8 	09{
: 
/:Q:S#6Y]^_;{
< 
:T:V6\`bfgh={
> 	;?{
@ 	2A{
D &@&B"*A*C&	
C{
P 
/KbKd1GjnopQ{
R 	.S{
T 
D7N7P3VZ[\U{
V 	KW{
X 	AY{
Z 
/:Q:S#6Y]^_[{
\ 
O<S<U%8[_`a]{
^ 
41H1J-PTUV_{
` 
+G^G`-Cfjkla{
b 
/:Q:S#6Y]^_c{
d 	*e{
h (B(D$$,C,E(4	
g{
t 	.u{
x $>$@ d(?(A$t	
w{
D 
/BYB[+>aefgE{
H "*A*C&	
G{
V '>'@#d	
U{
d '>'@#d	
c{
p 	*q{
t (B(D$$,C,E(4	
s{
@ 
'CZC\)?bfghA{
B 
D3J3L/RVWXC{
D 
T4K4M0SWXYE{
F 
%?V?X';^bcdG{
H 
&@W@Y(<_cdeI{
J 
)F]F_+BeijkK{
N "<">D&=&?"T	
M{
Z 	.[{
\ 	*]{
^ 	;_{
` 
-I`Ib/Ehlmna{
b 
/:Q:S#6Y]^_c{
d 
'CZC\)?bfghe{
h (B(D$$,C,E(4	
g{
v $>$@ d(?(A$t	
u{
D $>$@ d(?(A$t	
C{
R $>$@ d(?(A$t	
Q{
` !$>$@ d(?(A$t	
_{
l 	(m{
p $>$@ d(?(A$t	
o{
| 
-LcLe/Hkopq}{
@ -5L5N1TX	
{
L 
'CZC\)?bfghM{
N 
/:Q:S#6Y]^_O{
P 
?;R;T$7Z^_`Q{
R 
d4K4M0SWXYS{
T 
$8O8Q 4W[\]U{
V 
*D*F&DRVWXW{
X 	(Y{
\ #:#<$'>'@#d	
[{
h 
D7N7P3VZ[\i{
j 
$:Q:S 6Y]^_k{
n $0C0E,4QUV	
m{
t 	2u{
v 
/:Q:S#6Y]^_w{
x 	*y{
z 
%@W@Y'<_cde{{
~ $>$@ d(?(A$t	
}{
L $>$@ d(?(A$t	
K{
Z $>$@ d(?(A$t	
Y{
h $>$@ d(?(A$t	
g{
v $>$@ d(?(A$t	
u{
D $>$@ d(?(A$t	
C{
P 
9P9R"5X\]^Q{
R 
4K4M0SWXYS{
T 
$5L5N1TXYZU{
V 
d9P9R5X\]^W{
X 
46M6O2UYZ[Y{
Z 
*D*F&DRVWX[{
\ 
/:Q:S#6Y]^_]{
^ 
AXAZ*=`def_{
` 
_=T=V&9\`aba{
b 
d6M6O2UYZ[c{
d 	Be{
f 
T8O8Q4W[\]g{
h 
/:Q:S#6Y]^_i{
j 	?k{
l 	-m{
n 	0o{
p 	6q{
r 	6s{
t 
OD[D]-@cghiu{
v 
o>U>W':]abcw{
x 
D7N7P3VZ[\y{
z 
'CZC\)?bfgh{{
| 	3}{
~ 
%AXAZ'=`def{
@ 
T3J3L/RVWXA{
B 
&@W@Y(<_cdeC{
D 
&@W@Y(<_cdeE{
F 
/BYB[+>aefgG{
H 
G^G`0CfjklI{
J 
&@W@Y(<_cdeK{
N $>$@ d(?(A$t	
M{
Z 
41H1J-PTUV[{
^ $>$@ d(?(A$t	
]{
j 	0k{
n )C)E%4-D-F)D	
m{
z 
)F]F_+Beijk{{
| 
-LcLe/Hkopq}{
~ 
-LcLe/Hkopq{
@	 
+I`Ib-EhlmnA	{
B	 
7N7P!3VZ[\C	{
D	 
'F]F_)BeijkE	{
H	 $>$@ d(?(A$t	
G	{
V	 $>$@ d(?(A$t	
U	{
d	 $>$@ d(?(A$t	
c	{
p	 
#=T=V%9\`abq	{
r	 
(BYB[*>aefgs	{
t	 
.H_Ha0Dgklmu	{
v	 
-G^G`/Cfjklw	{
x	 
-LcLe/Hkopqy	{
|	 !;!=4%<%>!D	
{	{
H
 	*I
{
J
 
%@W@Y'<_cdeK
{
L
 
*D*F&DRVWXM
{
N
 
43J3L/RVWXO
{
P
 
D4K4M0SWXYQ
{
R
 
)C)E%4QUVWS
{
V
 $>$@ d(?(A$t	
U
{
d
 &@&B"*A*C&	
c
{
p
 
$@W@Y&<_cdeq
{
r
 
?CZC\,?bfghs
{
t
 	/u
{
x
 #=#?T'>'@#d	
w
{
F  /0 )*D*F&D(?(AJeJg$mq		
E{
X  /0 )*D*F&D(?(AJeJg$mq		
W{
h 
$>U>W&:]abci{
j 
'A'C#tTUk{
l 
G^G`0Cfjklm{
n 
-LcLe/Hkopqo{
p 
;R;T7Z^_`q{
r 
t:Q:S6Y]^_s{
t 
46M6O2UYZ[u{
v 
#=T=V%9\`abw{
x 
1H1J-PTUVy{
z 
#?V?X%;^bcd{{
~ "<">D&=&?"T	
}{
J 
m:Q:S%6Y]^_K{
L 
]AXAZ,=`defM{
N 
7N7P!3VZ[\O{
P 	*Q{
R 
d9P9R5X\]^S{
T 
?;R;T$7Z^_`U{
X #=#?T'>'@#d	
W{
f #=#?T'>'@#d	
e{
t %?%A!t)@)B%	
s{
@ 
$2I2K.QUVWA{
B 
43J3L/RVWXC{
D 
43J3L/RVWXE{
H 5L5N1TXY	
G{
N 
?V?X(;^bcdO{
R !?V?X#;^bc	
Q{
X 
9P9R"5X\]^Y{
Z 
?;R;T$7Z^_`[{
\ 
O<S<U%8[_`a]{
^ 
'AXAZ)=`def_{
b &@&B"*A*C&	
a{
p &@&B"*A*C&	
o{
|	
}{
L $>$@ d(?(A$t	
K{
X 
#?V?X%;^bcdY{
Z 
"<S<U$8[_`a[{
\ 
$>U>W&:]abc]{
^ 	0_{
` 
<S<U'8[_`aa{
d .0 )3J3L/RV		
c{
t 
)C)E%4QUVWu{
v 	6w{
x 
_=T=V&9\`aby{
|  (?(A$t	
{{
H 
*D[D],@cghiI{
J 
(BYB[*>aefgK{
L 
)CZC\+?bfghM{
P  (?(A$t	
O{
\ 
&@W@Y(<_cde]{
`  (?(A$t	
_{
n  (?(A$t	
m{
z 	({{
| 
#=T=V%9\`ab}{
@ $>$@ d(?(A$t	
{
N 'A'C#+B+D'$	
M{
\ &@&B"*A*C&	
[{
h 
+I`Ib-Ehlmni{
j 
'CZC\)?bfghk{
n #;R;T!7Z^_	
m{
t 	1u{
v 
)F]F_+Beijkw{
x 
$2I2K.QUVWy{
| *D*F&D.E.G*T	
{{
J *D*F&D.E.G*T	
I{
X $>$@ d(?(A$t	
W{
d 
)C)E%4QUVWe{
h $>$@ d(?(A$t	
g{
t 
T8O8Q4W[\]u{
v 
6P6R2X\^bcdw{
x 	>y{
z 
-G-I)tUYZ[{{
| 	E}{
@ #CZC\%?bfg	
{
F 
d6M6O2UYZ[G{
H 
@W@Y)<_cdeI{
L "!;!=4%<%>!D	
K{
Z !;!=4%<%>!D	
Y{
h $>$@ d(?(A$t	
g{
t 	,u{
v 	,w{
x 	5y{
z 
9P9R"5X\]^{{
~ #=#?T'>'@#d	
}{
L !;!=4%<%>!D	
K{
X 
)CZC\+?bfghY{
Z 
/:Q:S#6Y]^_[{
\ 
&@W@Y(<_cde]{
^ 
AXAZ*=`def_{
` 	*a{
d  /0 )*D*F&D(?(AJeJg$mq		
c{
t 	5u{
v 	:w{
x 	?y{
z 	D{{
| 
'CZC\)?bfgh}{
~ 
?;R;T$7Z^_`{
B #=#?T'>'@#d	
A{
N 	(O{
P 
8R8T4Z^`defQ{
T )C)E%4-D-F)D	
S{
b )C)E%4-D-F)D	
a{
p $>$@ d(?(A$t	
o{
| 
43J3L/RVWX}{
@ )C)E%4-D-F)D	
{
N %?%A!t)@)B%	
M{
\ $>$@ d(?(A$t	
[{
j $>$@ d(?(A$t	
i{
}
 ~ %%9;RS #=#7#=#=#?@41a!Q$@# %S	42H @ 9= &*(,(,""l#(bkk#.>)>#?lc2;;s#3345l l d^	l
 d38n%l E$)$%l sml l l 
#s(^l^\r \r~ 
0_ As   @AB)