
    rh                        d dl mZ d dlmZ d dlmZ d dlmZ d dlm	c m
Z d dlmZm	Z	 d dlmZ d dlmZmZmZ  G d	 d
e      Z G d de	j*                        Zy)    )annotations)Iterable)Enum)AnyN)Tensornn)SentenceTransformer)pairwise_cos_simpairwise_euclidean_simpairwise_manhattan_simc                  "    e Zd ZdZd Zd Zd Zy)TripletDistanceMetriczThe metric for the triplet lossc                     dt        | |      z
  S )N   )r
   xys     {/var/www/html/ai-insurance-compliance-backend/venv/lib/python3.12/site-packages/sentence_transformers/losses/TripletLoss.py<lambda>zTripletDistanceMetric.<lambda>   s    !.q!44     c                    t        | |      S N)r   r   s     r   r   zTripletDistanceMetric.<lambda>       3Aq9 r   c                    t        | |      S r   )r   r   s     r   r   zTripletDistanceMetric.<lambda>   r   r   N)__name__
__module____qualname____doc__COSINE	EUCLIDEAN	MANHATTAN r   r   r   r      s    )4F9I9Ir   r   c                  n     e Zd Zej                  df	 	 	 	 	 d fdZddZd	dZd
dZe	dd       Z
 xZS )TripletLoss   c                L    t         |           || _        || _        || _        y)a  
        This class implements triplet loss. Given a triplet of (anchor, positive, negative),
        the loss minimizes the distance between anchor and positive while it maximizes the distance
        between anchor and negative. It compute the following loss function:

        ``loss = max(||anchor - positive|| - ||anchor - negative|| + margin, 0)``.

        Margin is an important hyperparameter and needs to be tuned respectively.

        Args:
            model: SentenceTransformerModel
            distance_metric: Function to compute distance between two
                embeddings. The class TripletDistanceMetric contains
                common distance metrices that can be used.
            triplet_margin: The negative should be at least this much
                further away from the anchor than the positive.

        References:
            - For further details, see: https://en.wikipedia.org/wiki/Triplet_loss

        Requirements:
            1. (anchor, positive, negative) triplets

        Inputs:
            +---------------------------------------+--------+
            | Texts                                 | Labels |
            +=======================================+========+
            | (anchor, positive, negative) triplets | none   |
            +---------------------------------------+--------+

        Example:
            ::

                from sentence_transformers import SentenceTransformer, SentenceTransformerTrainer, losses
                from datasets import Dataset

                model = SentenceTransformer("microsoft/mpnet-base")
                train_dataset = Dataset.from_dict({
                    "anchor": ["It's nice weather outside today.", "He drove to work."],
                    "positive": ["It's so sunny.", "He took the car to the office."],
                    "negative": ["It's quite rainy, sadly.", "She walked to the store."],
                })
                loss = losses.TripletLoss(model=model)

                trainer = SentenceTransformerTrainer(
                    model=model,
                    train_dataset=train_dataset,
                    loss=loss,
                )
                trainer.train()
        N)super__init__modeldistance_metrictriplet_margin)selfr)   r*   r+   	__class__s       r   r(   zTripletLoss.__init__   s(    l 	
.,r   c                r    |D cg c]  }| j                  |      d    }}| j                  ||      S c c}w )Nsentence_embedding)r)   compute_loss_from_embeddings)r,   sentence_featureslabelssentence_feature
embeddingss        r   forwardzTripletLoss.forwardR   s?    arsM]djj!123GHs
s00VDD ts   4c                    |\  }}}| j                  ||      }| j                  ||      }t        j                  ||z
  | j                  z         }|j	                         S )z
        Compute the CoSENT loss from embeddings.

        Args:
            embeddings: List of embeddings

        Returns:
            Loss value
        )r*   Frelur+   mean)	r,   r4   r2   
rep_anchorrep_posrep_negdistance_posdistance_neglossess	            r   r0   z(TripletLoss.compute_loss_from_embeddingsW   s^     (2$
GW++J@++J@|3d6I6IIJ{{}r   c                    | j                   j                  }t        t              j	                         D ]  \  }}|| j                   k(  sd| } n || j
                  dS )NzTripletDistanceMetric.)r*   r+   )r*   r   varsr   itemsr+   )r,   distance_metric_namenamevalues       r   get_config_dictzTripletLoss.get_config_dicth   sg    #33<< 56<<> 	KD%,,,)?v'F$	
 $84K^K^__r   c                     y)Na  
@misc{hermans2017defense,
    title={In Defense of the Triplet Loss for Person Re-Identification},
    author={Alexander Hermans and Lucas Beyer and Bastian Leibe},
    year={2017},
    eprint={1703.07737},
    archivePrefix={arXiv},
    primaryClass={cs.CV}
}
r"   )r,   s    r   citationzTripletLoss.citationq   s    	r   )r)   r	   r+   floatreturnNone)r1   zIterable[dict[str, Tensor]]r2   r   rJ   r   )r4   zlist[Tensor]r2   r   rJ   r   )rJ   zdict[str, Any])rJ   str)r   r   r   r   r    r(   r5   r0   rF   propertyrH   __classcell__)r-   s   @r   r$   r$      sR    :O:Y:Yst9-(9-kp9-	9-vE
"` 
 
r   r$   )
__future__r   collections.abcr   enumr   typingr   torch.nn.functionalr   
functionalr7   torchr   )sentence_transformers.SentenceTransformerr	   sentence_transformers.utilr
   r   r   r   Moduler$   r"   r   r   <module>rY      sA    " $      I g g:D :f")) fr   