
    Ng,                       d dl mZ d dlZd dlZd dlmZ d dlmZ d dlm	Z	 d dl
mZmZmZ d dlZd dlmZ d dlmZ d d	lmZmZmZmZ d d
lmZmZmZmZ d dlmZ d dlm Z  d dl!m"Z" d dl#m$Z$ d dl%m&Z& d dl'm(Z( d dl)m*Z*m+Z+ d dl,m-Z- d dl.m/Z/ d dl0m1Z1 d dl2m3Z3 d dl4m5Z5m6Z6m7Z7m8Z8m9Z9 d dl:m;Z;m<Z<m=Z= d dl>m?Z?m@Z@mAZA  e@            rd dlBmCZCmDZDmEZEmFZFmGZG  ejH        eI          ZJerd dlKmLZL  G d de          ZMdS )    )annotationsN)OrderedDict)nullcontext)partial)TYPE_CHECKINGAnyCallable)parse)nn)BatchSamplerConcatDataset
DataLoaderSubsetRandomSampler)EvalPredictionPreTrainedTokenizerBaseTrainerTrainerCallback)__version__)DataCollator)WandbCallback)TRAINING_ARGS_NAME)EvalLoopOutput)SentenceTransformerDataCollator)SentenceEvaluatorSequentialEvaluator)
CoSENTLoss)ModelCardCallback)Pooling)Transformer)DefaultBatchSamplerGroupByLabelBatchSamplerNoDuplicatesBatchSamplerProportionalBatchSamplerRoundRobinBatchSampler)BatchSamplersMultiDatasetBatchSamplers$SentenceTransformerTrainingArguments)disable_loggingis_datasets_availableis_training_available)DatasetDatasetDictIterableDatasetIterableDatasetDictValueSentenceTransformerc                      e Zd ZdZ	 	 	 	 	 	 	 	 	 	 	 	 	 dd fdZdd"Zdd fd$Zdd&Zdd(Zdd+Z		 	 ddd2Z
dd4Z	 	 	 dd fd<Z	 	 	 dd fdCZd fdDZdddIZ	 	 dddQZ	 	 dddYZddZZddd\Zdd_ZdddaZddcZddeZddfZe	 	 	 	 dddo            Z	 	 dddrZ	 	 	 	 dddvZ	 	 	 	 	 	 	 	 	 dddZ	 dd fdZ xZS )SentenceTransformerTraineru  
    SentenceTransformerTrainer is a simple but feature-complete training and eval loop for PyTorch
    based on the 🤗 Transformers :class:`~transformers.Trainer`.

    This trainer integrates support for various :class:`transformers.TrainerCallback` subclasses, such as:

    - :class:`~transformers.integrations.WandbCallback` to automatically log training metrics to W&B if `wandb` is installed
    - :class:`~transformers.integrations.TensorBoardCallback` to log training metrics to TensorBoard if `tensorboard` is accessible.
    - :class:`~transformers.integrations.CodeCarbonCallback` to track the carbon emissions of your model during training if `codecarbon` is installed.

        - Note: These carbon emissions will be included in your automatically generated model card.

    See the Transformers `Callbacks <https://huggingface.co/docs/transformers/main/en/main_classes/callback>`_
    documentation for more information on the integrated callbacks and how to write your own callbacks.

    Args:
        model (:class:`~sentence_transformers.SentenceTransformer`, *optional*):
            The model to train, evaluate or use for predictions. If not provided, a `model_init` must be passed.
        args (:class:`~sentence_transformers.training_args.SentenceTransformerTrainingArguments`, *optional*):
            The arguments to tweak for training. Will default to a basic instance of
            :class:`~sentence_transformers.training_args.SentenceTransformerTrainingArguments` with the
            `output_dir` set to a directory named *tmp_trainer* in the current directory if not provided.
        train_dataset (Union[:class:`datasets.Dataset`, :class:`datasets.DatasetDict`, :class:`datasets.IterableDataset`, Dict[str, :class:`datasets.Dataset`]], *optional*):
            The dataset to use for training. Must have a format accepted by your loss function, see
            `Training Overview > Dataset Format <../../../docs/sentence_transformer/training_overview.html#dataset-format>`_.
        eval_dataset (Union[:class:`datasets.Dataset`, :class:`datasets.DatasetDict`, :class:`datasets.IterableDataset`, Dict[str, :class:`datasets.Dataset`]], *optional*):
            The dataset to use for evaluation. Must have a format accepted by your loss function, see
            `Training Overview > Dataset Format <../../../docs/sentence_transformer/training_overview.html#dataset-format>`_.
        loss (Optional[Union[:class:`torch.nn.Module`, Dict[str, :class:`torch.nn.Module`],            Callable[[:class:`~sentence_transformers.SentenceTransformer`], :class:`torch.nn.Module`],            Dict[str, Callable[[:class:`~sentence_transformers.SentenceTransformer`]]]], *optional*):
            The loss function to use for training. Can either be a loss class instance, a dictionary mapping
            dataset names to loss class instances, a function that returns a loss class instance given a model,
            or a dictionary mapping dataset names to functions that return a loss class instance given a model.
            In practice, the latter two are primarily used for hyper-parameter optimization. Will default to
            :class:`~sentence_transformers.losses.CoSENTLoss` if no ``loss`` is provided.
        evaluator (Union[:class:`~sentence_transformers.evaluation.SentenceEvaluator`,            List[:class:`~sentence_transformers.evaluation.SentenceEvaluator`]], *optional*):
            The evaluator instance for useful evaluation metrics during training. You can use an ``evaluator`` with
            or without an ``eval_dataset``, and vice versa. Generally, the metrics that an ``evaluator`` returns
            are more useful than the loss value returned from the ``eval_dataset``. A list of evaluators will be
            wrapped in a :class:`~sentence_transformers.evaluation.SequentialEvaluator` to run them sequentially.
        callbacks (List of [:class:`transformers.TrainerCallback`], *optional*):
            A list of callbacks to customize the training loop. Will add those to the list of default callbacks
            detailed in [here](callback).

            If you want to remove one of the default callbacks used, use the [`Trainer.remove_callback`] method.
        optimizers (`Tuple[:class:`torch.optim.Optimizer`, :class:`torch.optim.lr_scheduler.LambdaLR`]`, *optional*, defaults to `(None, None)`):
            A tuple containing the optimizer and the scheduler to use. Will default to an instance of :class:`torch.optim.AdamW`
            on your model and a scheduler given by :func:`transformers.get_linear_schedule_with_warmup` controlled by `args`.

    Important attributes:

        - **model** -- Always points to the core model. If using a transformers model, it will be a [`PreTrainedModel`]
          subclass.
        - **model_wrapped** -- Always points to the most external model in case one or more other modules wrap the
          original model. This is the model that should be used for the forward pass. For example, under `DeepSpeed`,
          the inner model is wrapped in `DeepSpeed` and then again in `torch.nn.DistributedDataParallel`. If the inner
          model hasn't been wrapped, then `self.model_wrapped` is the same as `self.model`.
        - **is_model_parallel** -- Whether or not a model has been switched to a model parallel mode (different from
          data parallelism, this means some of the model layers are split on different GPUs).
        - **place_model_on_device** -- Whether or not to automatically place the model on the device - it will be set
          to `False` if model parallel or deepspeed is used, or if the default
          `TrainingArguments.place_model_on_device` is overridden to return `False` .
        - **is_in_train** -- Whether or not a model is currently running `train` (e.g. when `evaluate` is called while
          in `train`)

    NNNmodelSentenceTransformer | Noneargsr'   train_datasetCDataset | DatasetDict | IterableDataset | dict[str, Dataset] | Noneeval_datasetlossnn.Module | dict[str, nn.Module] | Callable[[SentenceTransformer], torch.nn.Module] | dict[str, Callable[[SentenceTransformer], torch.nn.Module]] | None	evaluator2SentenceEvaluator | list[SentenceEvaluator] | Nonedata_collatorDataCollator | None	tokenizer)PreTrainedTokenizerBase | Callable | None
model_init(Callable[[], SentenceTransformer] | Nonecompute_metrics'Callable[[EvalPrediction], dict] | None	callbackslist[TrainerCallback] | None
optimizers?tuple[torch.optim.Optimizer, torch.optim.lr_scheduler.LambdaLR]preprocess_logits_for_metrics;Callable[[torch.Tensor, torch.Tensor], torch.Tensor] | NonereturnNonec                
    t                      st          d          |1d}t                              d| d           t	          |          }n$t          |t                    st          d          -|	|	 _                                         n2t          d          |	t          	                    d           |	 _        |
t          	                    d	           t	          d
          
                                }|j        r+j        j        sj                            |j                   |!t          j        t                     rj        }|t#          j                  }t'          ddg||g          D ]\  }}t          |t(                    r}|j        vt-          t/          |                    }t0          dt2          dt4          dt6          difd|                                D             }t          d| d| d| d| d	          t          |t:                    r$t          |t<                    st=          |          }t          |t:                    r$t          |t<                    st=          |          } j        rd n||||||nd|	|
|||d
}t?          t@                    t?          d          k    r||d<   n||d<    tC                      j"        d.i |  j#        dk    rd  _#        d _$        i  _%              tM          d  j'        j(        D                       r tR          j*        +                    dd            |.t                              d!           tY           j-                  }t          |t:                    r fd"|                                D              _.        t'          ddg||g          D ]\  }}|t          |t:                    st          d#| d$          t_          |0                                          t_          |0                                          z
  x}r>t          d%| d&tc          |           d'te          |          d(k    rd)nd* d+| d,	          n 3                    |           _.        |$t          |th                    stk          |          }| _6         j7        " 8                    ||j9        d-           _7         j#        " 8                    ||j9        d-           _#         :                    |           d S )/NzTo train a SentenceTransformer model, you need to install the `accelerate` and `datasets` modules. You can do so with the `train` extra:
pip install -U "sentence-transformers[train]"tmp_trainerz1No `TrainingArguments` passed, using `output_dir=z`.)
output_dirzEPlease use `TrainingArguments` imported from `sentence_transformers`.z<`Trainer` requires either a `model` or `model_init` argumentz`Trainer` requires either a `model` or `model_init` argument, but not both. `model_init` will overwrite your model when calling the `train` method.z`compute_metrics` is currently not compatible with the SentenceTransformerTrainer. Please use the `evaluator` argument instead for detailed evaluation metrics, or the `eval_dataset` argument for the evaluation loss.unused)tokenize_fntrainevalstringint64float32boolc           
     x    i | ]6\  }}|t                              t          |          d                     7S )null)r/   gettype).0keyvaluenaive_type_mappings      Y/var/www/html/ai-engine/env/lib/python3.11/site-packages/sentence_transformers/trainer.py
<dictcomp>z7SentenceTransformerTrainer.__init__.<locals>.<dictcomp>   sM     $ $ $PZPSUZC155d5kk6JJKK$ $ $    zThe provided `z6_dataset` must have Features. Specify them with e.g.:
z_dataset = z_dataset.cast(Features(z))
or by providing the Features to the IterableDataset initialization method. See the Datasets documentation for more information on dataset Features: https://huggingface.co/docs/datasets/en/about_dataset_featuresdummy)
r5   r7   r?   r8   r:   rC   rE   rG   rI   rK   4.46.0processing_classrA   Tc                8    g | ]}t          |t                    S  )
isinstancer   )r^   callbacks     rb   
<listcomp>z7SentenceTransformerTrainer.__init__.<locals>.<listcomp>   s"    ddd
8]33dddrd   WANDB_PROJECTzsentence-transformersz@No `loss` passed, using `losses.CoSENTLoss` as a default option.c                D    i | ]\  }}|                     |          S ri   )prepare_loss)r^   dataset_nameloss_fnr5   selfs      rb   rc   z7SentenceTransformerTrainer.__init__.<locals>.<dictcomp>   s1    sssMb\[bt'8'8%'H'Hsssrd   z,If the provided `loss` is a dict, then the `z"_dataset` must be a `DatasetDict`.z:If the provided `loss` is a dict, then all keys from the `z;_dataset` dictionary must occur in `loss` also. Currently, z occur   s z in `z_dataset` but not in `loss`.rp   ri   );r*   RuntimeErrorloggerinfor'   rj   
ValueErrorrC   call_model_initwarningto_dicthub_model_idmodel_card_datamodel_idset_model_idrA   r   r   tokenizezipr-   column_namesnextiterstrintfloatrY   itemsdictr,   parse_versiontransformers_versionsuper__init__r:   can_return_loss_prompt_length_mappinganycallback_handlerrG   osenviron
setdefaultr   r5   r;   setkeyssortedlenro   r   r   r=   r8   (maybe_add_prompts_or_dataset_name_columnpromptsadd_model_card_callback)rr   r5   r7   r8   r:   r;   r=   r?   rA   rC   rE   rG   rI   rK   rQ   default_args_dictrp   datasetsampleexample_featuressuper_kwargsmissingra   	__class__s   ``                    @rb   r   z#SentenceTransformerTrainer.__init__x   sL   ( %&& 	@   <&JKKZJZZZ[[[7:NNNDDD"FGG 	fdeee=%",,,.."#abbb%M   )DO&NN'   AHUUU]]__  	BU%:%C 	B!..t/@AAAEO=T!U!UI ;WWWM%('6):]L<Y%Z%Z 	 	!L''?33 8L8Td7mm,,&)8S'5)UY[a%b"$ $ $ $^d^j^j^l^l$ $ $  !U\ U U#U U0<U UUeU U U   mT** 	7:m[3Y3Y 	7'66MlD)) 	5*\;2W2W 	5&|44L!_7TT%**,8,D	HYLL_f$."$-J
 
 -..-2I2III/8L+,,(1L%((<((( '' $D  $&(#'7;ddDDYDcdddee 	LJ!!/3JKKK<KKZ[[[dj))DdD!! 	7sssssfjfpfpfrfrsssDI),gv->P\@])^)^  %g?!'400 $w|www   "',,..11C		4D4DDD7 $OUa O O&,WooO ODGLLTUDUDUSS[]O OdpO O O   ))$66DI  I?P)Q)Q +I66I")!%!N!Nt|' "O " "D ( $ M Mdl !N ! !D 	$$%677777rd   r   dict[str, Any]c                    t          | |          }|                     |           |                    | j        | j        | j        | j                   dS )ah  
        Add a callback responsible for automatically tracking data required for the automatic model card generation

        This method is called in the ``__init__`` method of the
        :class:`~sentence_transformers.trainer.SentenceTransformerTrainer` class.

        Args:
            default_args_dict (Dict[str, Any]): A dictionary of the default training arguments, so we can determine
                which arguments have been changed for the model card.

        .. note::

            This method can be overriden by subclassing the trainer to remove/customize this callback in custom uses cases
        N)r   add_callbackon_init_endr7   statecontrolr5   )rr   r   model_card_callbacks      rb   r   z2SentenceTransformerTrainer.add_model_card_callback  sR      06GHH-...''	4:t|TZXXXXXrd   r1   c                   t                                          |          }t          | d          s|S t          | j        t
                    r| j                                        D ]h\  }}t          |t          j        j	                  s ||          | j        |<   8t          |d          r | 
                    | j        |          | _        intt          | j        t          j        j	                  s|                     |          | _        n5t          | j        d          r | 
                    | j        |          | _        |S )N)trialr;   r5   )r   r{   hasattrrj   r;   r   r   torchr   Moduleoverride_model_in_loss)rr   r   r5   r_   rq   r   s        rb   r{   z*SentenceTransformerTrainer.call_model_init.  s0   ''e'44tV$$ 	L di&& 	F $	 1 1 N NW!'58?;; N%,WU^^DIcNNWg.. N $ ; ;DIu M MDIN DIux77 	F		%((DII TY(( 	F33DIuEEDIrd   torch.nn.Modulec           	     
   ddl m} |                                D ]g\  }}|dk    rt          ||          r||_        #t          |t
          j        j                  r%t          ||| 	                    ||                     h|S )Nr   r0   r5   )
sentence_transformersr1   named_childrenrj   r5   r   r   r   setattrr   )rr   r;   r5   r1   namechilds         rb   r   z1SentenceTransformerTrainer.override_model_in_lossG  s    ======..00 	O 	OKD%w:e5H#I#I"

E58?33 OdD$?$?u$M$MNNNrd   BCallable[[SentenceTransformer], torch.nn.Module] | torch.nn.Modulec                    t          |t          j        j                  r|                    |j                  S  ||                              |j                  S N)rj   r   r   r   todevice)rr   r;   r5   s      rb   ro   z'SentenceTransformerTrainer.prepare_lossQ  sJ    
 dEHO,, 	)775<(((tE{{~~el+++rd   dataset_dictr,   c                    |                                 D ]8\  }}d|j        vr*|                    d|gt          |          z            ||<   9|S )Nrp   )r   r   
add_columnr   )rr   r   r_   r   s       rb   add_dataset_name_columnz2SentenceTransformerTrainer.add_dataset_name_columnZ  s`    (..00 	] 	]LCW%999$+$6$6~usSZ||G[$\$\S!rd   Finputsdict[str, torch.Tensor | Any]return_outputsrY   2torch.Tensor | tuple[torch.Tensor, dict[str, Any]]c                `   |                     dd          }|                     |          \  }}| j        }t          |t                    r
|r||         }|| j        k    r<|| j        k    r1t          |d          r!|j        |k    r|                     ||          } |||          }	|r|	i fS |	S )a  
        Computes the loss for the SentenceTransformer model.

        It uses ``self.loss`` to compute the loss, which can be a single loss function or a dictionary of loss functions
        for different datasets. If the loss is a dictionary, the dataset name is expected to be passed in the inputs
        under the key "dataset_name". This is done automatically in the ``add_dataset_name_column`` method.
        Note that even if ``return_outputs = True``, the outputs will be empty, as the SentenceTransformers losses do not
        return outputs.

        Args:
            model (SentenceTransformer): The SentenceTransformer model.
            inputs (Dict[str, Union[torch.Tensor, Any]]): The input data for the model.
            return_outputs (bool, optional): Whether to return the outputs along with the loss. Defaults to False.
            num_items_in_batch (int, optional): The number of items in the batch. Defaults to None. Unused, but required by the transformers Trainer.

        Returns:
            Union[torch.Tensor, Tuple[torch.Tensor, Dict[str, Any]]]: The computed loss. If `return_outputs` is True, returns a tuple of loss and outputs. Otherwise, returns only the loss.
        rp   Nr5   )	popcollect_featuresr;   rj   r   model_wrappedr5   r   r   )
rr   r5   r   r   num_items_in_batchrp   featureslabelsrq   r;   s
             rb   compute_lossz'SentenceTransformerTrainer.compute_loss`  s    2 zz.$770088&)gt$$ 	, 	,l+G
 T'''##)) $&&11'5AAGwx(( 	
 8Ord   9tuple[list[dict[str, torch.Tensor]], torch.Tensor | None]c                   g }|D ]}|                     d          r|dt          d                    n]|                     d          r|dt          d                    n/|                     d          r|dt          d                    n|                    fd|                                D                        |                    d	d          }||fS )
a  Turn the inputs from the dataloader into the separate model inputs & the labels.

        Example::

            >>> list(inputs.keys())
            ['return_loss', 'label', 'sentence_0_input_ids', 'sentence_0_token_type_ids', 'sentence_0_attention_mask', 'sentence_1_input_ids', 'sentence_1_token_type_ids', 'sentence_1_attention_mask']
            >>> features, labels = self.collect_features(inputs)
            >>> len(features)
            2
            >>> list(features[0].keys())
            ['input_ids', 'token_type_ids', 'attention_mask']
            >>> list(features[1].keys())
            ['input_ids', 'token_type_ids', 'attention_mask']
            >>> torch.equal(labels, inputs["label"])
            True
        
_input_idsN	input_ids_sentence_embeddingsentence_embedding_pixel_valuespixel_valuesc                p    i | ]2\  }}|                               |t                    d          |3S r   )
startswithr   )r^   r_   r`   prefixs      rb   rc   z?SentenceTransformerTrainer.collect_features.<locals>.<dictcomp>  sC    rrr:3[^[i[ijp[q[qrSV/rrrrd   label)endswithr   appendr   r\   )rr   r   r   columnr   r   s        @rb   r   z+SentenceTransformerTrainer.collect_features  s   *  		t 		tF|,,  33{#3#3"3 34!677  <3';#<#<"< <=11  63~#6#6"6 67OOrrrrrrrssssGT**rd   rU   #Dataset | dict[str, Dataset] | Noneignore_keyslist[str] | Nonemetric_key_prefixr   dict[str, float]c                    |r#|                      || j        j        d          }n| j        }t	                                          |||          S )NrU   rv   )r   r7   r   r:   r   evaluate)rr   r:   r   r   r   s       rb   r   z#SentenceTransformerTrainer.evaluate  s]      	-HHdi/f I  LL  ,Lwwk;LMMMrd   
dataloaderr   descriptionprediction_loss_onlybool | Noner   c                >   t                                          |||||          }| j        |S | j        rlt	          | j        t                    rR|                    d          r=|dd          t          | j        	                                          d         k    rd}n|S | 
                                rt                      nt          t          j                  5  |                     | j                  }d d d            n# 1 swxY w Y   t	          |t                    sd|i}t          |	                                          D ]7}|                    | d          s|                    |          || d| <   8|j                            |           |S )N)r   r   r   r   r   eval_   r   rU   r=   _)r   evaluation_loopr=   is_in_trainrj   r:   r   r   listr   is_local_process_zeror   r(   loggingINFOr5   r   metricsupdate)
rr   r   r   r   r   r   outputevaluator_metricsr_   r   s
            rb   r   z*SentenceTransformerTrainer.evaluation_loop  s    ((!#!5#/ ) 
 
 >!M
  	
4+<d C C 	HYHdHdelHmHm 	 $T->-C-C-E-E(F(Fq(III$*!!"88::][]]]PWP\@]@] 	; 	; $tz : :	; 	; 	; 	; 	; 	; 	; 	; 	; 	; 	; 	; 	; 	; 	;+T22 	A!,.? @ )..0011 	] 	]C>>%6"9"9"9:: ]BSBWBWX[B\B\!%6">">">">?/000s   DD
D
c                T   t          | j        d         t                    st                              d           d S 	 | j        j        x}rH|                    dd          d         }| j        j        	                    t          |                     n# t          $ r Y nw xY w| j        }| j        d         j        | _        	 t                                                      | j        }|| _        || j        d         _        S # | j        }|| _        || j        d         _        w xY w)Nr   zLCould not load best model, as the model is not a `transformers`-based model.-rs   )rj   r5   r   rx   ry   r   best_model_checkpointrsplitr   set_best_model_stepr   	Exception
auto_modelr   _load_best_model)rr   
checkpointstep
full_modelloaded_auto_modelr   s        rb   r   z+SentenceTransformerTrainer._load_best_model  s$    $*Q-55 	KKfgggF	!Z==z J!((a004
*>>s4yyIII 	 	 	D	
 Z
Z]-
	977++-- $
#DJ'8DJqM$$ !%
#DJ'8DJqM$8888s   AB 
B#"B#D "D'r   r+   rp   
str | Nonec                    t          |t                    r3|                                D ]\  }}|                     ||           d S t	          |j                  ddhz  x}r*t          d|r|dz   nd dt          |           d          d S )	Nrv   return_lossrp   z/The following column names are invalid in your  ru   z	dataset: zH. Avoid using these column names, as they are reserved for internal use.)rj   r   r   validate_column_namesr   r   rz   r   )rr   r   rp   overlaps       rb   r  z0SentenceTransformerTrainer.validate_column_names  s    gt$$ 	)0 O O%g**7*NNNNF'.//=.2QQQ7 	ZXdBl,QTBTBTjl Z Zw{  }D  xE  xE Z Z Z  	 	rd   
batch_sizer   	drop_lastvalid_label_columns	generatortorch.Generator | NoneBatchSampler | Nonec           	        t          |t                    r6| j        j        t          j        k    rt                              d           dS | j        j        t          j        k    rt          |||||          S | j        j        t          j
        k    rt          ||||          S | j        j        t          j        k    r;t          t          t          t          |                    |          ||          S dS )a  
        Returns the appropriate batch sampler based on the ``batch_sampler`` argument in ``self.args``.
        This batch sampler class supports ``__len__`` and ``__iter__`` methods, and is used as the ``batch_sampler``
        to create the :class:`torch.utils.data.DataLoader`.

        .. note::
            Override this method to provide a custom batch sampler.

        Args:
            dataset (Dataset): The dataset to sample from.
            batch_size (int): Number of samples per batch.
            drop_last (bool): If True, drop the last incomplete batch if the dataset size
                is not divisible by the batch size.
            valid_label_columns (List[str]): List of column names to check for labels.
                The first column name from ``valid_label_columns`` found in the dataset will
                be used as the label column.
            generator (torch.Generator, optional): Optional random number generator for shuffling
                the indices.
        BWhen using an IterableDataset, you cannot specify a batch sampler.N)r   r	  r
  r  r  )r   r	  r
  r  )r  r	  r
  )rj   r-   r7   batch_samplerr%   BATCH_SAMPLERrx   r|   NO_DUPLICATESr"   GROUP_BY_LABELr!   r    r   ranger   )rr   r   r	  r
  r  r  s         rb   get_batch_samplerz,SentenceTransformerTrainer.get_batch_sampler  s   6 g// 	y&-*EEEcddd49"m&AAA+%#$7#    9"m&BBB+%#$7	    9"m&AAA&#E#g,,$7$79MMM%#    BArd   r   r   batch_samplerslist[BatchSampler]seed
int | Noner   c                    | j         j        t          j        k    rt	          ||||          S | j         j        t          j        k    rt          ||||          S dS )a/  
        Returns the appropriate multi-dataset batch sampler based on the ``multi_dataset_batch_sampler`` argument
        in ``self.args``. This batch sampler class supports ``__len__`` and ``__iter__`` methods, and is used as the
        ``batch_sampler`` to create the :class:`torch.utils.data.DataLoader`.

        .. note::
            Override this method to provide a custom multi-dataset batch sampler.

        Args:
            dataset (ConcatDataset): The concatenation of all datasets.
            batch_samplers (List[BatchSampler]): List of batch samplers for each dataset in the concatenated dataset.
            generator (torch.Generator, optional): Optional random number generator for shuffling the indices.
            seed (int, optional): Optional seed for the random number generator
        r   r  r  r  N)r7   multi_dataset_batch_samplerr&   ROUND_ROBINr$   PROPORTIONALr#   )rr   r   r  r  r  s        rb   get_multi_dataset_batch_samplerz:SentenceTransformerTrainer.get_multi_dataset_batch_samplerK  s}    * 904M4YYY)-#	    904M4ZZZ+-#	    [Zrd   c                     j         t          d           j         } j        t          j                     j        j        r                     j        j                    j        j         j        j	         j        j
         j        j        d}t          |t                    rb|                     j        j         j        j        d            j        j        t$          j        k    rt(                              d           n=t          |t,                    rt          d          t          |t.                    r|                                D ]&}t          |t                    rt          d          ' fd|                                D             }t3          |                                          }                     || j        j        	          }||d
<   n]t          |t6                    r9                     | j        j         j        j        j                  }||d
<   nt          d          d j        _         j                             tC          |fi |           _"         j"        S )a@  
        Returns the training [`~torch.utils.data.DataLoader`].

        Will use no sampler if `train_dataset` does not implement `__len__`, a random sampler (adapted to distributed
        training if necessary) otherwise.

        Subclass and override this method if you want to inject some custom behavior.
        NzOTraining requires specifying a train_dataset to the SentenceTransformerTrainer.
collate_fnnum_workers
pin_memorypersistent_workersprefetch_factorr  r  cSentence Transformers is not compatible with IterableDatasetDict. Please use a DatasetDict instead.YSentence Transformers is not compatible with a DatasetDict containing an IterableDataset.c           	     v    g | ]5}                     |j        j        j        j        j                   6S r	  r
  r  r  )r  r7   train_batch_sizedataloader_drop_lastr  r^   r   r?   r  rr   s     rb   rl   zCSentenceTransformerTrainer.get_train_dataloader.<locals>.<listcomp>  s\     	 	 	  &&#y9"i<(5(I' '  	 	 	rd   r  r  r-  z^Unsupported `train_dataset` type. Use a Dataset, DatasetDict, or IterableDataset for training.F)#r8   rz   r?   r   	Generatorr7   r  manual_seeddataloader_num_workersdataloader_pin_memorydataloader_persistent_workersdataloader_prefetch_factorrj   r-   r   r.  r/  r  r%   r  rx   r|   r.   r,   valuesr   r!  r+   r  r  acceleratoreven_batchespreparer   _train_dataloader)rr   r8   dataloader_paramsr   r  r  r?   r  s   `     @@rb   get_train_dataloaderz/SentenceTransformerTrainer.get_train_dataloaderp  s    %nooo**O%%	9> 	2!!$).111 (9;)9"&)"I#yC
 
 m_55 6	$$"&)"<!%!?    y&-*EEEcddd':;; ,	u   {33 '	(//11  g77 $s  
	 	 	 	 	 	  -3355	 	 	N *-*>*>*@*@AAM @@%-#Y^	 A  M 2?o..w// 	 2295)8$1$E# 3  M 2?o..p   ).%!%!1!9!9*]:h:hVg:h:h!i!i%%rd   .Dataset | DatasetDict | IterableDataset | Nonec                    |, j         % j        t          g           S t          d          ||n j         } j        t          j                     j        j        r	                     j        j                    j        j
         j        j         j        j         j        j        d}t          |t                    r.|                     j        j         j        j        d           n=t          |t&                    rt          d          t          |t(                    r|                                D ]&}t          |t                    rt          d          ' fd|                                D             }t-          |                                          }                     || j        j                  }||d	<   n]t          |t0                    r9                     | j        j         j        j        j        
          }||d	<   nt          d          d j        _         j                            t          |fi |          S )a  
        Returns the evaluation [`~torch.utils.data.DataLoader`].

        Subclass and override this method if you want to inject some custom behavior.

        Args:
            eval_dataset (`torch.utils.data.Dataset`, *optional*):
                If provided, will override `self.eval_dataset`. If it is a [`~datasets.Dataset`], columns not accepted
                by the `model.forward()` method are automatically removed. It must implement `__len__`.
        NzQEvaluation requires specifying an eval_dataset to the SentenceTransformerTrainer.r#  r  r)  r*  c           	     v    g | ]5}                     |j        j        j        j        j                   6S r,  r  r7   eval_batch_sizer/  r  r0  s     rb   rl   zBSentenceTransformerTrainer.get_eval_dataloader.<locals>.<listcomp>  \     	 	 	  &&#y8"i<(5(I' '  	 	 	rd   r  r  r-  z_Unsupported `eval_dataset` type. Use a Dataset, DatasetDict, or IterableDataset for evaluation.T)r:   r=   r   rz   r?   r   r1  r7   r  r2  r3  r4  r5  r6  rj   r-   r   rB  r/  r.   r,   r7  r   r!  r+   r  r  r8  r9  r:  )rr   r:   r<  r   r  r  r?   r  s   `     @@rb   get_eval_dataloaderz.SentenceTransformerTrainer.get_eval_dataloader  s    D$5$=~)!"~~%pqqq'3'?||TEV*O%%	9> 	2!!$).111 (9;)9"&)"I#yC
 
 lO44 5	$$"&)";!%!?     &9:: -	u   k22 (	'..00  g77 $s  
	 	 	 	 	 	  ,2244	 	 	N ))<)<)>)>??L @@$-#Y^	 A  M 2?o..g.. 	 2294)8$1$E# 3  M 2?o.. q   )-%''
<(U(UCT(U(UVVVrd   test_dataset'Dataset | DatasetDict | IterableDatasetc                T     j         t          j                     j        j        r                     j        j                    j        j         j        j         j        j         j        j	        d}t          |t                    r.|                     j        j         j        j        d           n=t          |t                    rt!          d          t          |t"                    r|                                D ]&}t          |t                    rt!          d          ' fd|                                D             }t'          |                                          }                     || j        j                  }||d<   n]t          |t*                    r9                     | j        j         j        j        j                  }||d<   nt!          d	          d
 j        _         j                            t7          |fi |          S )a  
        Returns the training [`~torch.utils.data.DataLoader`].

        Subclass and override this method if you want to inject some custom behavior.

        Args:
            test_dataset (`torch.utils.data.Dataset`, *optional*):
                The test dataset to use. If it is a [`~datasets.Dataset`], columns not accepted by the
                `model.forward()` method are automatically removed. It must implement `__len__`.
        r#  r  r)  r*  c           	     v    g | ]5}                     |j        j        j        j        j                   6S r,  rA  r0  s     rb   rl   zBSentenceTransformerTrainer.get_test_dataloader.<locals>.<listcomp>S  rC  rd   r  r  r-  z\Unsupported `test_dataset` type. Use a Dataset, DatasetDict, or IterableDataset for testing.T)r?   r   r1  r7   r  r2  r3  r4  r5  r6  rj   r-   r   rB  r/  r.   rz   r,   r7  r   r!  r+   r  r  r8  r9  r:  r   )rr   rE  r<  r   r  r  r?   r  s   `     @@rb   get_test_dataloaderz.SentenceTransformerTrainer.get_test_dataloader&  s|    *O%%	9> 	2!!$).111 (9;)9"&)"I#yC
 
 lO44 5	$$"&)";!%!?     &9:: -	u   k22 (	'..00  g77 $s  
	 	 	 	 	 	  ,2244	 	 	N ))<)<)>)>??L @@$-#Y^	 A  M 2?o..g.. 	 2294)8$1$E# 3  M 2?o.. n   )-%''
<(U(UCT(U(UVVVrd   rQ   c                "   ||n| j         j        }t          j        |d           t                              d|            | j                            || j         j                   t          t                    t          d          k    r"| j        | j                            |           n!| j        | j                            |           t          j        | j         t          j                            |t"                               d S )NT)exist_okzSaving model checkpoint to )safe_serializationrf   )r7   rQ   r   makedirsrx   ry   r5   save_pretrainedsave_safetensorsr   r   rg   rA   r   savepathjoinr   )rr   rQ   
state_dicts      rb   _savez SentenceTransformerTrainer._save|  s    #-#9ZZty?S

J....>*>>???
"":$)B\"]]] -..-2I2III$0%55jAAA~)..z::: 	
49bgll:7IJJKKKKKrd   checkpoint_pathc                    ddl m}  ||| j        j                  }| j                            |                                           d S )Nr   r0   )trust_remote_code)r   r1   r5   rW  load_state_dictrS  )rr   rU  r1   loaded_models       rb   _load_from_checkpointz0SentenceTransformerTrainer._load_from_checkpoint  sV    ======**?djNjkkk
""<#:#:#<#<=====rd   promptc                    	 | j         |         S # t          $ r> | j                            |g          d         j        d         dz
  }|| j         |<   |cY S w xY w)Nr   r   rs   )r   KeyErrorr5   r   shape)rr   r[  prompt_lengths      rb   _get_prompt_lengthz-SentenceTransformerTrainer._get_prompt_length  ss    	!.v66 	! 	! 	! J//99+FLRPSTTM2?D'/    	!s    AAAc                X    | j         D ]!}t          |t                    r
|j         c S "dS )a  
        Return whether the prompt length should be passed to the model's forward method.

        True if the model does not include the prompt in the pooling layer. Can be
        overridden by the user if it's useful to include the prompt length.
        F)r5   rj   r   include_prompt)rr   modules     rb   _include_prompt_lengthz1SentenceTransformerTrainer._include_prompt_length  sC     j 	1 	1F&'** 1!000001urd   batchdict[str, list[Any]]r   dict[str, str] | str | Noneprompt_lengthsdict[str, int] | int | None	transform6Callable[[dict[str, list[Any]]], dict[str, list[Any]]]c                  	 |r ||           } | s| S t          |                                           d         }| |         s| S t          t                    rnt          |                                           D ]L\  }}t          |d         t                    r,fd|D             | |<   ||gt          |          z  | | d<   Mt          t                    r\                                D ]G\  }	|| v r>	fd| |         D             | |<   |r%|	         gt          | |                   z  | | d<   H|r|gt          | |                   z  | d<   | S )a~  A transform/map function that adds prompts or dataset names to the batch.

        Args:
            batch (dict[str, list[Any]]): The batch of data, where each key is a column name and each value
                is a list of values.
            prompts (dict[str, str] | str | None, optional): An optional mapping of column names to string
                prompts, or a string prompt for all columns. Defaults to None.
            prompt_lengths (dict[str, int] | int | None, optional): An optional mapping of prompts names to
                prompt token length, or a prompt token length if the prompt is a string. Defaults to None.
            dataset_name (str | None, optional): The name of this dataset, only if there are multiple datasets
                that use a different loss. Defaults to None.
            transform (Callable[[dict[str, list[Any]]], dict[str, list[Any]]], optional): An optional transform
                function to apply on the batch before adding prompts, etc. Defaults to None.

        Returns:
            dict[str, list[Any]]: The "just-in-time" transformed batch with prompts and/or dataset names added.
        r   c                    g | ]}|z   S ri   ri   )r^   r`   r   s     rb   rl   zTSentenceTransformerTrainer.add_prompts_or_dataset_name_transform.<locals>.<listcomp>  s    )N)N)Ne'E/)N)N)Nrd   N_prompt_lengthc                    g | ]}|z   S ri   ri   )r^   r`   r[  s     rb   rl   zTSentenceTransformerTrainer.add_prompts_or_dataset_name_transform.<locals>.<listcomp>  s    )Y)Y)YU&5.)Y)Y)Yrd   rp   )r   r   rj   r   r   r   r   )
re  r   rh  rp   rj  kwargsfirst_columncolumn_namer   r[  s
    `       @rb   %add_prompts_or_dataset_name_transformz@SentenceTransformerTrainer.add_prompts_or_dataset_name_transform  s   8  	%Ie$$E  	L EJJLL))!,\" 	L gs## 	_'+EKKMM':': _ _#VfQi-- _)N)N)N)Nv)N)N)NE+&%1AO@PSVW]S^S^@^<<<= gt$$ 	s'.}} s s#V%'')Y)Y)Y)YeKFX)Y)Y)YE+&% sAOPVAW@X[^_dep_q[r[r@r<<<=  	N%1NS|9L5M5M$ME.!rd   DatasetDict | Dataset | None7dict[str, dict[str, str]] | dict[str, str] | str | Nonec                    |dS t          | j        t                    }t          |d          r|S |                     ||           |s|r-|                                 }|                     ||||          }|S )a  
        Maybe add prompts or dataset names to the dataset. We add the dataset_name column to the dataset if:

        1. The loss is a dictionary and the dataset is a DatasetDict, or
        2. The prompts contain a mapping to dataset names.

        There are 4 cases for the prompts:

        1. `str`: One prompt for all datasets and columns.
        2. `dict[str, str]`: A column to prompt mapping.
        3. `dict[str, str]`: A dataset to prompt mapping.
        4. `dict[str, dict[str, str]]`: A dataset to column to prompt mapping.

        And 2 cases for the dataset:

        A. `Dataset`: A single dataset.
        B. `DatasetDict`: A dictionary of datasets.

        3A is not allowed, and 2A doesn't make sense.

        Args:
            dataset_dict (DatasetDict | Dataset | None): The dataset to add prompts or dataset names to.

        Returns:
            DatasetDict | Dataset | None: The dataset with prompts or dataset names added.
        N#_sentence_transformers_preprocessedrv   )r   include_prompt_lengthsinclude_dataset_name)rj   r;   r   r   r  rd  "add_prompts_or_dataset_name_column)rr   r   r   rp   ry  rx  s         rb   r   zCSentenceTransformerTrainer.maybe_add_prompts_or_dataset_name_column  s    @ 4)$)T:: <!FGG 	  	""<l"KKK  	* 	%)%@%@%B%B"BB'=%9	 C  L rd   =DatasetDict | IterableDatasetDict | Dataset | IterableDatasetrx  ry  c           	         t          t          t          f          rj                                D ]S\  }}t          |t                    r|                    ||          n|}                     |||r|nd ||          |<   TS d }|rt          |t                    r|r                     |          }nt          |t                    rt          |
                                          d         }	t          ||	         t                    r1t          dt          |
                                                    |r! fd|                                D             }t          t                    r0                    t           j        f|||dj                   n:t          t"                    rj        }
|rt'          d          |
d<   |rt          |t                    rFj        D ]=}|
|         }t          |t&                    r|j        dv rt'          d	          |
| d
<   >nnt          |t                    rY|                                D ]D\  }}|
|         }||v r3t          |t&                    r|j        dv rt'          d	          |
| d
<   E                    t           j        |||          d|
          nt          d          d_        S )N)r   r   rp   rx  ry  r   a=  The prompts provided to the trainer are a nested dictionary. In this setting, the first level of the dictionary should map to dataset names and the second level to column names. However, as the provided dataset is a not a DatasetDict, no dataset names can be inferred. The keys to the provided prompts dictionary are c                T    i | ]$\  }}|j         v |                    |          %S ri   )r   r`  )r^   rr  r[  r   rr   s      rb   rc   zQSentenceTransformerTrainer.add_prompts_or_dataset_name_column.<locals>.<dictcomp>G  sE     & & &/K&,*CCC  7 7 ? ?CCCrd   )r   rh  rp   rV   rp   )rV   large_stringint16rn  T)batchedr   zUnsupported dataset type.)rj   r.   r,   r   r   r\   rz  r   r`  r   r   rz   r+   set_transformr   rs  _format_kwargsr-   r   r/   r   dtypemaprw  )rr   r   r   rp   rx  ry  r   nested_promptsrh  	first_keyr   rr  featurer[  s   ``            rb   rz  z=SentenceTransformerTrainer.add_prompts_or_dataset_name_column   s    l%8+$FGG 	 )5););)=)= 	 	%gGQRY[_G`G`!m\7!C!C!Cfm-1-T-T!(*1E!O4+A)= .U . .\**    	'3'' ) F%)%<%<W%E%ENGT**  003	gi0$77 $d LPPWP\P\P^P^K_K_d d   * & & & & &3:==??& & &N lG,, -	:&&>##1!-	 
 #1     o66 !	:#,H ;+0??( Vgs++ V'3'@ V V"*;"7%gu55 V'-Ke:e:eGLW~~H%C%C%CDV  .. V/6}} V V+V"*;"7"n44 *7E : : 5 '1K K KGLW~~H%C%C%CD'++>##1!-	   ! , 	 	LL 8999 <@8rd   languagelicensetagsstr | list[str] | None
model_namefinetuned_fromtasksdataset_tagsdataset_argsc
                D   |                                  sd S |r| j        j                            |           |r| j        j                            |           |r| j        j                            |           | j                            | j        j        |           d S )N)r  )	is_world_process_zeror5   r   set_languageset_licenseadd_tags_create_model_cardr7   rQ   )rr   r  r  r  r  r  r  r  r   r  rp  s              rb   create_model_cardz,SentenceTransformerTrainer.create_model_card  s     ))++ 	F 	>J&33H=== 	<J&227;;; 	6J&//555
%%di&:z%RRRRRrd   tuple[Any, Any]c                   t          | j        t                    r't          j        t          | j                            }n| j        }t                                          ||          \  }}h dt          |	                                          z  sh| 
                    |          fd|                                D             | j        j        dfd|                                D             ddg|d<   ||fS )a?  
        We have to override the optimizer_grouped_parameters because the Trainer superclass bases it on the `model`
        itself, but the SentenceTransformer losses can have weights that should be updated as well, e.g.
        SoftmaxLoss (see #2872).

        This method requires `transformers` >= 4.43.0.
        >   r5   paramsoptimizer_dictc                0    g | ]\  }}|v 	|j         |S ri   requires_gradr^   npdecay_parameterss      rb   rl   zKSentenceTransformerTrainer.get_optimizer_cls_and_kwargs.<locals>.<listcomp>  s8       "aM]H]H]bcbqH]H]H]H]rd   )r  weight_decayc                0    g | ]\  }}|v	|j         |S ri   r  r  s      rb   rl   zKSentenceTransformerTrainer.get_optimizer_cls_and_kwargs.<locals>.<listcomp>  s8       "aQaHaHafgfuHaHaHaHard   g        r  )rj   r;   r   r   
Sequentialr   r   get_optimizer_cls_and_kwargsr   r   get_decay_parameter_namesnamed_parametersr7   r  )rr   r7   r5   
loss_modeloptimizer_clsoptimizer_kwargsr  r   s         @rb   r  z7SentenceTransformerTrainer.get_optimizer_cls_and_kwargs  s=    di&& 	#{49'='=>>JJJ*/''*N*NtU_*`*`'' 544s;K;P;P;R;R7S7SS 	#==jII   &0&A&A&C&C   %)I$:	    &0&A&A&C&C   %(	 2-. ...rd   )NNNNNNNNNNNr4   N)r5   r6   r7   r'   r8   r9   r:   r9   r;   r<   r=   r>   r?   r@   rA   rB   rC   rD   rE   rF   rG   rH   rI   rJ   rK   rL   rM   rN   )r   r   rM   rN   r   )rM   r1   )r;   r   r5   r1   rM   r   )r;   r   r5   r1   rM   r   )r   r,   rM   r,   )FN)r5   r1   r   r   r   rY   rM   r   )r   r   rM   r   )NNrU   )r:   r   r   r   r   r   rM   r   )r   r   r   r   r   r   r   r   r   r   rM   r   )rM   rN   )r   r+   rp   r  rM   rN   )r   r+   r	  r   r
  rY   r  r   r  r  rM   r  )Nr   )
r   r   r  r  r  r  r  r  rM   r   )rM   r   )r:   r>  rM   r   )rE  rF  rM   r   )rQ   r  rM   rN   )rU  r   rM   rN   )r[  r   rM   r   )rM   rY   )NNNN)re  rf  r   rg  rh  ri  rp   r  rj  rk  rM   rf  )r   rt  r   ru  rp   r  rM   rt  )NNFF)r   r{  r   rg  rp   r  rx  rY   ry  rY   rM   rt  )	NNNNNNNNN)r  r  r  r  r  r  r  r  r  r  r  r  r  r  r   r  r  r  rM   rN   )r7   r'   r5   r6   rM   r  ) __name__
__module____qualname____doc__r   r   r{   r   ro   r   r   r   r   r   r   r  r  r!  r=  rD  rI  rT  rZ  r`  rd  staticmethodrs  r   rz  r  r  __classcell__)r   s   @rb   r3   r3   2   s       C CN -159]a\`
 HL-1?C?CCG26Vbei%`8 `8 `8 `8 `8 `8 `8DY Y Y Y(      2   , , , ,     %0 0 0 0 0d!  !  !  ! J =A(,!'	N N N N N N N$ -1(,!') ) ) ) ) ) )V9 9 9 9 9 92
 
 
 
 
" 15,06 6 6 6 6x -1# # # # #JX& X& X& X&tZW ZW ZW ZW ZWxTW TW TW TWlL L L L L&> > > >! ! ! !
 
 
 
  046:#'LP= = = = \=D LP#'	5 5 5 5 5t 04#'',%*` ` ` ` `H  $"'+!%%)(,/3*./3S S S S S4 _c$/ $/ $/ $/ $/ $/ $/ $/ $/ $/ $/rd   r3   )N
__future__r   r   r   collectionsr   
contextlibr   	functoolsr   typingr   r   r	   r   packaging.versionr
   r   r   torch.utils.datar   r   r   r   transformersr   r   r   r   r   r   transformers.data.data_collatorr   transformers.integrationsr   transformers.trainerr   transformers.trainer_utilsr   #sentence_transformers.data_collatorr    sentence_transformers.evaluationr   r   'sentence_transformers.losses.CoSENTLossr    sentence_transformers.model_cardr   sentence_transformers.modelsr   (sentence_transformers.models.Transformerr   sentence_transformers.samplerr    r!   r"   r#   r$   #sentence_transformers.training_argsr%   r&   r'   sentence_transformers.utilr(   r)   r*   datasetsr+   r,   r-   r.   r/   	getLoggerr  rx   )sentence_transformers.SentenceTransformerr1   r3   ri   rd   rb   <module>r     s   " " " " " "  				 # # # # # # " " " " " "       / / / / / / / / / /  4 4 4 4 4 4       Y Y Y Y Y Y Y Y Y Y Y Y Z Z Z Z Z Z Z Z Z Z Z Z < < < < < < 8 8 8 8 8 8 3 3 3 3 3 3 3 3 3 3 3 3 5 5 5 5 5 5 O O O O O O S S S S S S S S > > > > > > > > > > > > 0 0 0 0 0 0 @ @ @ @ @ @                      
 e d d d d d d d d d [ZZZZZZZZZZZZZZ		8	$	$ NMMMMMMM/ M/ M/ M/ M/ M/ M/ M/ M/ M/rd   