
    Ng                    ~    d dl mZ d dlmZ d dlmc mZ d dlm	Z	mZ d dl
mZ ddlmZ  G d d	ej                  ZdS )
    )annotations)IterableN)Tensornn)SentenceTransformer   )SiameseDistanceMetricc                  :     e Zd Zej        dfd fdZdddZ xZS )OnlineContrastiveLossg      ?modelr   marginfloatreturnNonec                r    t                                                       || _        || _        || _        dS )a	  
        This Online Contrastive loss is similar to :class:`ConstrativeLoss`, but it selects hard positive (positives that
        are far apart) and hard negative pairs (negatives that are close) and computes the loss only for these pairs.
        This loss often yields better performances than ContrastiveLoss.

        Args:
            model: SentenceTransformer model
            distance_metric: Function that returns a distance between
                two embeddings. The class SiameseDistanceMetric contains
                pre-defined metrics that can be used
            margin: Negative samples (label == 0) should have a distance
                of at least the margin value.

        References:
            - `Training Examples > Quora Duplicate Questions <../../examples/training/quora_duplicate_questions/README.html>`_

        Requirements:
            1. (anchor, positive/negative) pairs
            2. Data should include hard positives and hard negatives

        Inputs:
            +-----------------------------------------------+------------------------------+
            | Texts                                         | Labels                       |
            +===============================================+==============================+
            | (anchor, positive/negative) pairs             | 1 if positive, 0 if negative |
            +-----------------------------------------------+------------------------------+

        Relations:
            - :class:`ContrastiveLoss` is similar, but does not use hard positive and hard negative pairs.
            :class:`OnlineContrastiveLoss` often yields better results.

        Example:
            ::

                from sentence_transformers import SentenceTransformer, SentenceTransformerTrainer, losses
                from datasets import Dataset

                model = SentenceTransformer("microsoft/mpnet-base")
                train_dataset = Dataset.from_dict({
                    "sentence1": ["It's nice weather outside today.", "He drove to work."],
                    "sentence2": ["It's so sunny.", "She walked to the store."],
                    "label": [1, 0],
                })
                loss = losses.OnlineContrastiveLoss(model)

                trainer = SentenceTransformerTrainer(
                    model=model,
                    train_dataset=train_dataset,
                    loss=loss,
                )
                trainer.train()
        N)super__init__r   r   distance_metric)selfr   r   r   	__class__s       n/var/www/html/ai-engine/env/lib/python3.11/site-packages/sentence_transformers/losses/OnlineContrastiveLoss.pyr   zOnlineContrastiveLoss.__init__   s8    n 	
.    Fsentence_featuresIterable[dict[str, Tensor]]labelsr   c                     fd|D             }                      |d         |d                   }||dk             }||dk             }||t          |          dk    r|                                n|                                k              }||t          |          dk    r|                                n|                                k             }	|	                    d                                          }
t          j         j	        |z
                                d                                          }|
|z   }|S )Nc                F    g | ]}                     |          d          S )sentence_embedding)r   ).0sentence_featurer   s     r   
<listcomp>z1OnlineContrastiveLoss.forward.<locals>.<listcomp>K   s-    sssM]djj!1223GHsssr   r   r      )
r   lenmaxmeanminpowsumFrelur   )r   r   r   size_average
embeddingsdistance_matrixnegspossnegative_pairspositive_pairspositive_lossnegative_losslosss   `            r   forwardzOnlineContrastiveLoss.forwardJ   s   ssssarsss
..z!}jmLLv{+v{+ dCIIMMdhhjjjtyy{{STdCIIMMdhhjjjtyy{{ST&**1--1133t{^;<<@@CCGGII},r   )r   r   r   r   r   r   )F)r   r   r   r   r   r   )__name__
__module____qualname__r	   COSINE_DISTANCEr   r5   __classcell__)r   s   @r   r   r      se        :O:_qt:/ :/ :/ :/ :/ :/ :/x        r   r   )
__future__r   collections.abcr   torch.nn.functionalr   
functionalr)   torchr   )sentence_transformers.SentenceTransformerr   ContrastiveLossr	   Moduler    r   r   <module>rD      s    " " " " " " $ $ $ $ $ $                  I I I I I I 2 2 2 2 2 2K K K K KBI K K K K Kr   