
    rh                    b    d dl mZ d dlmZmZ d dlmZ d dlmZ  G d dej                        Z
y)    )annotations)Tensornn)CrossEncoder)fullnamec                  P     e Zd Z ej                         fd fdZddZd Z xZS )MSELossc                   t         |           || _        || _        t	        j
                  di || _        t        | j                  t              s8t        | j                  j                   dt        | j                         d      | j                  j                  dk7  r9t        | j                  j                   d| j                  j                   d      y)a  
        Computes the MSE loss between the computed query-passage score and a target query-passage score. This loss
        is used to distill a cross-encoder model from a teacher cross-encoder model or gold labels.

        Args:
            model (:class:`~sentence_transformers.cross_encoder.CrossEncoder`): A CrossEncoder model to be trained.
            activation_fn (:class:`~torch.nn.Module`): Activation function applied to the logits before computing the loss.
            **kwargs: Additional keyword arguments passed to the underlying :class:`torch.nn.MSELoss`.

        .. note::

            Be mindful of the magnitude of both the labels and what the model produces. If the teacher model produces
            logits with Sigmoid to bound them to [0, 1], then you may wish to use a Sigmoid activation function in the loss.

        References:
            - Improving Efficient Neural Ranking Models with Cross-Architecture Knowledge Distillation: https://arxiv.org/abs/2010.02666
            - `Cross Encoder > Training Examples > Distillation <../../../examples/cross_encoder/training/distillation/README.html>`_

        Requirements:
            1. Your model must be initialized with `num_labels = 1` (a.k.a. the default) to predict one class.
            2. Usually uses a finetuned CrossEncoder teacher M in a knowledge distillation setup.

        Inputs:
            +-----------------------------------------+-----------------------------+-------------------------------+
            | Texts                                   | Labels                      | Number of Model Output Labels |
            +=========================================+=============================+===============================+
            | (sentence_A, sentence_B) pairs          | similarity score            | 1                             |
            +-----------------------------------------+-----------------------------+-------------------------------+

        Relations:
            - :class:`MarginMSELoss` is similar to this loss, but with a margin through a negative pair.

        Example:
            ::

                from sentence_transformers.cross_encoder import CrossEncoder, CrossEncoderTrainer, losses
                from datasets import Dataset

                student_model = CrossEncoder("microsoft/mpnet-base")
                teacher_model = CrossEncoder("cross-encoder/ms-marco-MiniLM-L12-v2")
                train_dataset = Dataset.from_dict({
                    "query": ["What are pandas?", "What is the capital of France?"],
                    "answer": ["Pandas are a kind of bear.", "The capital of France is Paris."],
                })

                def compute_labels(batch):
                    return {
                        "label": teacher_model.predict(list(zip(batch["query"], batch["answer"])))
                    }

                train_dataset = train_dataset.map(compute_labels, batched=True)
                loss = losses.MSELoss(student_model)

                trainer = CrossEncoderTrainer(
                    model=student_model,
                    train_dataset=train_dataset,
                    loss=loss,
                )
                trainer.train()
        z? expects a model of type CrossEncoder, but got a model of type .   z; expects a model with 1 output label, but got a model with z output labels.N )super__init__modelactivation_fnr   r	   loss_fct
isinstancer   
ValueError	__class____name__type
num_labels)selfr   r   kwargsr   s       /var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/sentence_transformers/cross_encoder/losses/MSELoss.pyr   zMSELoss.__init__
   s    z 	
*

,V,$**l3>>**+ ,++/

+;*<A? 
 ::  A%>>**+ ,((,

(=(='>oO  &    c                   t        |      dk7  rt        dt        |       d      t        t        |d   |d               }| j                  j                  |ddd      }|j                  | j                  j                          | j                  d
i |d   j                  d	      }| j                  |      }| j                  ||j                               }|S )N   zMMSELoss expects a dataset with two non-label columns, but got a dataset with z	 columns.r   r   Tpt)padding
truncationreturn_tensorsr   )lenr   listzipr   	tokenizertodeviceviewr   r   float)r   inputslabelspairstokenslogitslosss          r   forwardzMSELoss.forwardX   s    v;!_`cdj`k_lluv  SF1I./%%	 & 
 			$**##$%f%a(--b1##F+}}VV\\^4r   c                0    dt        | j                        iS )Nr   )r   r   )r   s    r   get_config_dictzMSELoss.get_config_dictk   s    Xd&8&89
 	
r   )r   r   r   z	nn.ModulereturnNone)r,   zlist[list[str]]r-   r   r5   r   )	r   
__module____qualname__r   Identityr   r2   r4   __classcell__)r   s   @r   r	   r	   	   s    GRr{{} L\&
r   r	   N)
__future__r   torchr   r   0sentence_transformers.cross_encoder.CrossEncoderr   sentence_transformers.utilr   Moduler	   r   r   r   <module>r@      s#    "  I /e
bii e
r   