
    g"                     6   d dl Z d dlmZmZ ddlmZmZmZmZm	Z	m
Z
 ddlmZmZ  e            rd dlmZ ddlmZ  e            rdd	lmZ  e            r
d dlZdd
lmZ  e	j        e          Z e edd                     G d de                      ZdS )    N)ListUnion   )add_end_docstringsis_tf_availableis_torch_availableis_vision_availableloggingrequires_backends   )Pipelinebuild_pipeline_init_args)Image)
load_image)'TF_MODEL_FOR_VISION_2_SEQ_MAPPING_NAMES)$MODEL_FOR_VISION_2_SEQ_MAPPING_NAMEST)has_tokenizerhas_image_processorc                   |     e Zd ZdZ fdZddZddeeee         ded         f         f fdZ	ddZ
d	 Zd
 Z xZS )ImageToTextPipelinea  
    Image To Text pipeline using a `AutoModelForVision2Seq`. This pipeline predicts a caption for a given image.

    Example:

    ```python
    >>> from transformers import pipeline

    >>> captioner = pipeline(model="ydshieh/vit-gpt2-coco-en")
    >>> captioner("https://huggingface.co/datasets/Narsil/image_dummy/raw/main/parrots.png")
    [{'generated_text': 'two birds are standing next to each other '}]
    ```

    Learn more about the basics of using a pipeline in the [pipeline tutorial](../pipeline_tutorial)

    This image to text pipeline can currently be loaded from pipeline() using the following task identifier:
    "image-to-text".

    See the list of available models on
    [huggingface.co/models](https://huggingface.co/models?pipeline_tag=image-to-text).
    c                      t                      j        |i | t          | d           |                     | j        dk    rt
          nt                     d S )Nvisiontf)super__init__r   check_model_type	frameworkr   r   )selfargskwargs	__class__s      `/var/www/html/ai-engine/env/lib/python3.11/site-packages/transformers/pipelines/image_to_text.pyr   zImageToTextPipeline.__init__F   sg    $)&)))$)))7;~7M7M33Sw	
 	
 	
 	
 	
    Nc                     i }i }|||d<   |t          j        dt                     ||d<   |||d<   |*|d|v rt          d          |                    |           ||i fS )NpromptzUThe `timeout` argument is deprecated and will be removed in version 5 of Transformerstimeoutmax_new_tokenszp`max_new_tokens` is defined both as an argument and inside `generate_kwargs` argument, please use only 1 version)warningswarnFutureWarning
ValueErrorupdate)r   r'   generate_kwargsr%   r&   forward_paramspreprocess_paramss          r"   _sanitize_parametersz(ImageToTextPipeline._sanitize_parametersM   s    *0h'Mgiv   ,3i(%/=N+,&).>/.Q.Q &   !!/222 ."44r#   inputszImage.Imagec                     d|v r|                     d          }|t          d           t                      j        |fi |S )a  
        Assign labels to the image(s) passed as inputs.

        Args:
            inputs (`str`, `List[str]`, `PIL.Image` or `List[PIL.Image]`):
                The pipeline handles three types of images:

                - A string containing a HTTP(s) link pointing to an image
                - A string containing a local path to an image
                - An image loaded in PIL directly

                The pipeline accepts either a single image or a batch of images.

            max_new_tokens (`int`, *optional*):
                The amount of maximum tokens to generate. By default it will use `generate` default.

            generate_kwargs (`Dict`, *optional*):
                Pass it to send all of these arguments directly to `generate` allowing full control of this function.

        Return:
            A list or a list of list of `dict`: Each result comes as a dictionary with the following key:

            - **generated_text** (`str`) -- The generated text.
        imagesNzBCannot call the image-to-text pipeline without an inputs argument!)popr+   r   __call__)r   r1   r    r!   s      r"   r5   zImageToTextPipeline.__call__e   sT    4 vZZ))F>abbbuww11&111r#   c                    t          ||          }|t          |t                    s t          dt	          |           d          | j        j        j        }|dk    r|                     || j	                  }| j	        dk    r|
                    | j                  }|                     |d          j        }| j        j        g|z   }t          j        |                              d	          }|                    d
|i           n|dk    rC|                     ||| j	                  }| j	        dk    r|
                    | j                  }n|dk    rs|                     || j	                  }| j	        dk    r|
                    | j                  }|                     || j	                  }|                    |           nTt          d| d          |                     || j	                  }| j	        dk    r|
                    | j                  }| j        j        j        dk    r|d |d
<   |S )N)r&   z&Received an invalid text input, got - zy - but expected a single string. Note also that one single text can be provided for conditional image to text generation.git)r3   return_tensorsptF)textadd_special_tokensr   	input_ids
pix2struct)r3   header_textr8   zvision-encoder-decoder)r8   zModel type z- does not support conditional text generation)r   
isinstancestrr+   typemodelconfig
model_typeimage_processorr   totorch_dtype	tokenizerr<   cls_token_idtorchtensor	unsqueezer,   )r   imager%   r&   rD   model_inputsr<   text_inputss           r"   
preprocesszImageToTextPipeline.preprocess   sZ   5'222fc**  oT&\\ o o o  
 *5JU""#335QUQ_3``>T))#/??43C#D#DL NN5NQQ[	!^89IE	!L33==a@@	##[)$<====|++#335feies3tt>T))#/??43C#D#DL777#335QUQ_3``>T))#/??43C#D#DL"nnVDNnSS##K0000 !!hz!h!h!hiii  //uT^/\\L~%%+t/?@@:'500V^(,L%r#   c                    d|v r?t          |d         t                    r$t          d |d         D                       rd |d<   d|vr
| j        |d<   |                    | j        j                  } | j        j        |fi ||}|S )Nr<   c              3      K   | ]}|d u V  	d S N ).0xs     r"   	<genexpr>z/ImageToTextPipeline._forward.<locals>.<genexpr>   s&      AA!AIAAAAAAr#   generation_config)r?   listallrX   r4   rB   main_input_namegenerate)r   rN   r-   r1   model_outputss        r"   _forwardzImageToTextPipeline._forward   s     <''<4d;; (AA|K'@AAAAA ( )-L% o55373IO/0 !!$*"<==+
+FVVlVoVVr#   c                 z    g }|D ]5}d| j                             |d          i}|                    |           6|S )Ngenerated_textT)skip_special_tokens)rH   decodeappend)r   r]   records
output_idsrecords        r"   postprocesszImageToTextPipeline.postprocess   s[    ' 	# 	#J $."7"7(, #8 # #F NN6""""r#   )NNNNrS   )NN)__name__
__module____qualname____doc__r   r0   r   r@   r   r5   rP   r^   rg   __classcell__)r!   s   @r"   r   r   .   s         ,
 
 
 
 
5 5 5 502 2uS$s)]DDW%WX 2 2 2 2 2 2@- - - -^  ,
 
 
 
 
 
 
r#   r   )r(   typingr   r   utilsr   r   r   r	   r
   r   baser   r   PILr   image_utilsr   models.auto.modeling_tf_autor   rJ   models.auto.modeling_autor   
get_loggerrh   loggerr   rT   r#   r"   <module>rv      s                            5 4 4 4 4 4 4 4  )((((((? WVVVVVV QLLLPPPPPP		H	%	% ,,4UYZZZ[[e e e e e( e e \[e e er#   