
    rhy                     T   d Z ddlmZmZ ddlZddlmZ ddlmZ ddl	m
Z
 ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZmZmZmZmZmZmZ ddlmZ  ej:                  e      Z G d de      Z  G d de      Z! G d de      Z" G d de      Z# G d de      Z$ G d de      Z%g dZ&y)zPyTorch BitNet model.    )CallableOptionalN   )Cache)FlashAttentionKwargs)CausalLMOutputWithPast)ALL_ATTENTION_FUNCTIONS)Unpack)logging   )GemmaMLP)LlamaAttentionLlamaDecoderLayerLlamaForCausalLM
LlamaModelLlamaRMSNormapply_rotary_pos_embeager_attention_forward   )BitNetConfigc                       e Zd Zy)BitNetRMSNormN__name__
__module____qualname__     |/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/transformers/models/bitnet/modular_bitnet.pyr   r   *       r   r   c                   *     e Zd Zdef fdZd Z xZS )	BitNetMLPconfigc                 p    t         |   |       t        |j                  |j                        | _        y N)eps)super__init__r   intermediate_sizerms_norm_epsffn_sub_norm)selfr#   	__class__s     r   r(   zBitNetMLP.__init__/   s+     )&*B*BH[H[\r   c           	          | j                  | j                  | j                  | j                  |            | j	                  |      z              }|S )N)	down_projr+   act_fn	gate_projup_proj)r,   xr/   s      r   forwardzBitNetMLP.forward3   sF    NN4#4#4T[[PQAR5SVZVbVbcdVe5e#fg	r   )r   r   r   r   r(   r4   __classcell__r-   s   @r   r"   r"   .   s    ]| ]r   r"   c                   2    e Zd Zdedef fdZ	 	 ddej                  deej                  ej                  f   de	ej                     de	e
   de	ej                     d	ee   d
eej                  e	ej                     e	eej                        f   fdZ xZS )BitNetAttentionr#   	layer_idxc                 r    t         |   ||       t        |j                  |j                        | _        y r%   )r'   r(   r   hidden_sizer*   attn_sub_norm)r,   r#   r9   r-   s      r   r(   zBitNetAttention.__init__9   s-    +*6+=+=6CVCVWr   hidden_statesposition_embeddingsattention_maskpast_key_valuecache_positionkwargsreturnc                 V   |j                   d d }g |d| j                  }| j                  |      j                  |      j	                  dd      }	| j                  |      j                  |      j	                  dd      }
| j                  |      j                  |      j	                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        }| j                  j                  dk7  rt        | j                  j                     } || |	|
||f| j                  sdn| j                  | j                   d|\  }} |j"                  g |d j%                         }| j'                  |      }| j)                  |      }||fS )Nr   r   )sincosrA   eagerg        )dropoutscaling)shapehead_dimq_projview	transposek_projv_projr   updater9   r   r#   _attn_implementationr	   trainingattention_dropoutrJ   reshape
contiguousr<   o_proj)r,   r=   r>   r?   r@   rA   rB   input_shapehidden_shapequery_states
key_statesvalue_statesrG   rF   cache_kwargsattention_interfaceattn_outputattn_weightss                     r   r4   zBitNetAttention.forward=   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&S#7jRUWZ#[ j%#&snUL'5'<'<ZW[WeWegs't$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFH((5kk+.L((r   )NN)r   r   r   r   intr(   torchTensortupler   r   
LongTensorr
   r   r4   r5   r6   s   @r   r8   r8   8   s    X| X X +/59+)||+) #5<<#=>+) !.	+)
 !+) !!1!12+) -.+) 
u||Xell3XeELL>Q5RR	S+)r   r8   c                       e Zd Zy)BitNetDecoderLayerNr   r   r   r   rh   rh   k   r    r   rh   c                       e Zd Zy)BitNetModelNr   r   r   r   rj   rj   o   r    r   rj   c                   2     e Zd ZdgZdZdZdef fdZ xZS )BitNetForCausalLMzlm_head.weightNrC   c                 "    t        |   di |S )a$  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, transformers.,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, transformers., config.vocab_size]`.

        Example:

        ```python
        >>> from transformers import AutoTokenizer, BitNetForCausalLM

        >>> model = BitNetForCausalLM.from_pretrained("microsoft/bitnet-b1.58-2B-4T")
        >>> tokenizer = AutoTokenizer.from_pretrained("microsoft/bitnet-b1.58-2B-4T")

        >>> prompt = f'<|begin_of_text|>User: Hey, are you conscious? Can you talk to me?<|eot_id|>Assistant: '
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=100)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "User: Hey, are you conscious? Can you talk to me?Assistant: No, I'm not conscious. I'm an artificial intelligence designed to assist with information and tasks. How can I help you today?"
        ```r   )r'   r4   )r,   super_kwargsr-   s     r   r4   zBitNetForCausalLM.forwardx   s    4 w...r   )	r   r   r   _tied_weights_keys_tp_plan_pp_planr   r4   r5   r6   s   @r   rl   rl   s   s*    *+HH/ 
 / /r   rl   )rl   rj   BitNetPreTrainedModel)'__doc__typingr   r   rc   cache_utilsr   modeling_flash_attention_utilsr   modeling_outputsr   modeling_utilsr	   processing_utilsr
   utilsr   gemma.modeling_gemmar   llama.modeling_llamar   r   r   r   r   r   r   configuration_bitnetr   
get_loggerr   loggerr   r"   r8   rh   rj   rl   __all__r   r   r   <module>r      s     %    B 6 5 &  +   / 
		H	%	L 	 0)n 0)f	* 		* 	/( /Dr   