
    Ng(                        d dl mZ d dlZd dlmZ d dlmZmZ d dlm	Z	m
Z
mZmZ d dlmZ d dlmZ  ej        e          Z G d d	ej                  ZdS )
    )annotationsN)Iterable)Tensornn)
AutoConfigAutoModelForCausalLMAutoTokenizerPreTrainedModel)SentenceTransformer)StaticEmbeddingc                  L     e Zd Z	 dd fdZddZddZedd            Z xZS )DenoisingAutoEncoderLossNTmodelr   decoder_name_or_path
str | Nonetie_encoder_decoderboolreturnNonec                   t                                                       t          |d         t                    rt	          d          || _        |j        | _        |d         j        j	        j
        }||s
J d            |r|rt                              d           |}t          j        |          | _        t          | j        t!          | j                             | _        t%          j        |          }d|_        d|_        d|i}	 t+          j        |fi || _        n2# t          $ r%}t                              d| d	           |d}~ww xY w|d         j        j	        j        |j        k    s
J d
            | j        j        6| j        j        | j        _        | j        j	        j        | j        j	        _        t;          t          j        |                    t;          | j                  k    rt                              d           |r$| j        r
J d            t;          | j                  t;          | j                  k    rR| j        | _        | j                            t;          | j                             t                              d           | j        j        }	 tA          j!        |d         j        | j        j"        |         | j        j                   dS # tF          $ r@ tA          j!        |d         j        | j        j"        |         | j        j        |           Y dS w xY wdS )aC  
        This loss expects as input a pairs of damaged sentences and the corresponding original ones.
        During training, the decoder reconstructs the original sentences from the encoded sentence embeddings.
        Here the argument 'decoder_name_or_path' indicates the pretrained model (supported by Hugging Face) to be used as the decoder.
        Since decoding process is included, here the decoder should have a class called XXXLMHead (in the context of Hugging Face's Transformers).
        The 'tie_encoder_decoder' flag indicates whether to tie the trainable parameters of encoder and decoder,
        which is shown beneficial to model performance while limiting the amount of required memory.
        Only when the encoder and decoder are from the same architecture, can the flag 'tie_encoder_decoder' work.

        The data generation process (i.e. the 'damaging' process) has already been implemented in ``DenoisingAutoEncoderDataset``,
        allowing you to only provide regular sentences.

        Args:
            model (SentenceTransformer): The SentenceTransformer model.
            decoder_name_or_path (str, optional): Model name or path for initializing a decoder (compatible with Hugging Face's Transformers). Defaults to None.
            tie_encoder_decoder (bool): Whether to tie the trainable parameters of encoder and decoder. Defaults to True.

        References:
            * TSDAE paper: https://arxiv.org/pdf/2104.06979.pdf
            * `Unsupervised Learning > TSDAE <../../examples/unsupervised_learning/TSDAE/README.html>`_

        Requirements:
            1. The decoder should have a class called XXXLMHead (in the context of Hugging Face's Transformers)
            2. Should use a large corpus

        Inputs:
            +------------------------------------------------------+--------+
            | Texts                                                | Labels |
            +======================================================+========+
            | (damaged\_sentence, original\_sentence) pairs        | none   |
            +------------------------------------------------------+--------+
            | sentence fed through ``DenoisingAutoEncoderDataset`` | none   |
            +------------------------------------------------------+--------+

        Example:
            ::

                from sentence_transformers import SentenceTransformer, losses
                from sentence_transformers.datasets import DenoisingAutoEncoderDataset
                from torch.utils.data import DataLoader

                model_name = "bert-base-cased"
                model = SentenceTransformer(model_name)
                train_sentences = [
                    "First training sentence", "Second training sentence", "Third training sentence", "Fourth training sentence",
                ]
                batch_size = 2
                train_dataset = DenoisingAutoEncoderDataset(train_sentences)
                train_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True, drop_last=True)
                train_loss = losses.DenoisingAutoEncoderLoss(
                    model, decoder_name_or_path=model_name, tie_encoder_decoder=True
                )
                model.fit(
                    train_objectives=[(train_dataloader, train_loss)],
                    epochs=10,
                )
        r   zgDenoisingAutoEncoderLoss is not compatible with a SentenceTransformer model based on a StaticEmbedding.NzOMust indicate the decoder_name_or_path argument when tie_encoder_decoder=False!zHWhen tie_encoder_decoder=True, the decoder_name_or_path will be invalid.TconfigzModel name or path "zc" does not support being as a decoder. Please make sure the decoder model has an "XXXLMHead" class.zHidden sizes do not match!znWARNING: The vocabulary of the encoder has been changed. One might need to change the decoder vocabulary, too.z@The tokenizers should be the same when tie_encoder_decoder=True.zSince the encoder vocabulary has been changed and --tie_encoder_decoder=True, now the new vocabulary has also been used for the decoder.)$super__init__
isinstancer   
ValueErrorencoder	tokenizertokenizer_encoder
auto_modelr   _name_or_pathloggerwarningr	   from_pretrainedtokenizer_decodertypeneed_retokenizationr   
is_decoderadd_cross_attentionr   decodererrorhidden_size	pad_token	eos_tokeneos_token_idpad_token_idlenresize_token_embeddingsbase_model_prefixr
   _tie_encoder_decoder_weights_modules	TypeError)
selfr   r   r   encoder_name_or_pathdecoder_configkwargs_decoderedecoder_base_model_prefix	__class__s
            q/var/www/html/ai-engine/env/lib/python3.11/site-packages/sentence_transformers/losses/DenoisingAutoEncoderLoss.pyr   z!DenoisingAutoEncoderLoss.__init__   s   x 	eAh00 	y   !&$Qx29G'#a a`a a a  	8# kijjj#7 !.!>?S!T!T'1$2H$tOeJfJf'g'g#g #34HII$(!-1*"N3	/?@TggXfggDLL 	 	 	LL a';  a  a  a   G		
 Qx")59SSSSUqSSS!+3/3/E/OD",/3|/B/ODL,},-ABBCCs4KaGbGbbbNN A    	/ss1ssss4)**c$2H.I.III)-)?&44S9O5P5PQQQ _   )-(F%<!H'L)*CDL2    
    <!H'L)*CDL2(	     !	 	s+   D) )
E3 EE5;K2 2AL<;L<sentence_featuresdict[str, Tensor]c                    |d         }|j         }| j                            |dd          }|                     |dddd                               |          }|S )N	input_idsT)skip_special_tokensclean_up_tokenization_spaceslongest_firstpt)padding
truncationreturn_tensors
max_length)devicer   batch_decoder$   to)r6   r>   rA   rJ   sentences_decodedretokenizeds         r=   
retokenizez#DenoisingAutoEncoderLoss.retokenize   sy    %k2	! 2??4d @ 
 
 ,,tX\im - 
 

"V** 	     Iterable[dict[str, Tensor]]labelsr   c           
     t   t          |          \  }}| j        r|                     |          }|                     |          d         }|d         j        d         }|d                                         d d d |dz
  f         }|d         d d dd f         }|                     |d d |d d d f         |d         d d ddf         d d d          }	|	d         }
t          j        | j	        j
                  } ||
                    d	|
j        d	                   |                    d	                    }|S )
Nsentence_embeddingrA      attention_maskr   F)rA   inputs_embedsrV   encoder_hidden_statesencoder_attention_maskrR   return_dict	use_cache)ignore_index)tupler&   rO   r   shapecloner)   r   CrossEntropyLossr$   r/   viewreshape)r6   r>   rR   source_featurestarget_featuresrepstarget_lengthdecoder_input_ids	label_idsdecoder_outputs	lm_logitsce_loss_fctlosss                r=   forwardz DenoisingAutoEncoderLoss.forward   s^   +01B+C+C(# 	? #ooo>>O||O,,-AB (4:1=+K8>>@@DWmVWFWDWAWX#K0ABB7	 ,,'"&qqq$w-#23C#DQQQ!V#L ' 	
 	
 $A&	)t7M7Z[[[{9>>"iob.ABBIDUDUVXDYDYZZrP   strc                    dS )Na  
@inproceedings{wang-2021-TSDAE,
    title = "TSDAE: Using Transformer-based Sequential Denoising Auto-Encoderfor Unsupervised Sentence Embedding Learning",
    author = "Wang, Kexin and Reimers, Nils and Gurevych, Iryna",
    booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2021",
    month = nov,
    year = "2021",
    address = "Punta Cana, Dominican Republic",
    publisher = "Association for Computational Linguistics",
    pages = "671--688",
    url = "https://arxiv.org/abs/2104.06979",
}
 )r6   s    r=   citationz!DenoisingAutoEncoderLoss.citation   s     rP   )NT)r   r   r   r   r   r   r   r   )r>   r?   r   r?   )r>   rQ   rR   r   r   r   )r   ro   )	__name__
__module____qualname__r   rO   rn   propertyrr   __classcell__)r<   s   @r=   r   r      s        os@ @ @ @ @ @ @D	 	 	 	   >    X    rP   r   )
__future__r   loggingcollections.abcr   torchr   r   transformersr   r   r	   r
   sentence_transformersr   sentence_transformers.modelsr   	getLoggerrs   r!   Moduler   rq   rP   r=   <module>r      s    " " " " " "  $ $ $ $ $ $         Y Y Y Y Y Y Y Y Y Y Y Y 5 5 5 5 5 5 8 8 8 8 8 8		8	$	${ { { { {ry { { { { {rP   