
    Ng
                    N    d dl mZ d dlmZmZmZ d dlmZmZ d dl	m
Z
mZ ddZdS )    )annotations)AnyDictUnion)BaseRetrieverRetrieverOutput)RunnableRunnablePassthrough	retriever5Union[BaseRetriever, Runnable[dict, RetrieverOutput]]combine_docs_chainRunnable[Dict[str, Any], str]returnr	   c                    t          | t                    s| }nd | z  }t          j        |                    d                                        |                              d          }|S )af  Create retrieval chain that retrieves documents and then passes them on.

    Args:
        retriever: Retriever-like object that returns list of documents. Should
            either be a subclass of BaseRetriever or a Runnable that returns
            a list of documents. If a subclass of BaseRetriever, then it
            is expected that an `input` key be passed in - this is what
            is will be used to pass into the retriever. If this is NOT a
            subclass of BaseRetriever, then all the inputs will be passed
            into this runnable, meaning that runnable should take a dictionary
            as input.
        combine_docs_chain: Runnable that takes inputs and produces a string output.
            The inputs to this will be any original inputs to this chain, a new
            context key with the retrieved documents, and chat_history (if not present
            in the inputs) with a value of `[]` (to easily enable conversational
            retrieval.

    Returns:
        An LCEL Runnable. The Runnable return is a dictionary containing at the very
        least a `context` and `answer` key.

    Example:
        .. code-block:: python

            # pip install -U langchain langchain-community

            from langchain_community.chat_models import ChatOpenAI
            from langchain.chains.combine_documents import create_stuff_documents_chain
            from langchain.chains import create_retrieval_chain
            from langchain import hub

            retrieval_qa_chat_prompt = hub.pull("langchain-ai/retrieval-qa-chat")
            llm = ChatOpenAI()
            retriever = ...
            combine_docs_chain = create_stuff_documents_chain(
                llm, retrieval_qa_chat_prompt
            )
            retrieval_chain = create_retrieval_chain(retriever, combine_docs_chain)

            chain.invoke({"input": "..."})

    c                    | d         S )Ninput )xs    V/var/www/html/ai-engine/env/lib/python3.11/site-packages/langchain/chains/retrieval.py<lambda>z(create_retrieval_chain.<locals>.<lambda>=   s
    AgJ     retrieve_documents)run_name)context)answerretrieval_chain)
isinstancer   r
   assignwith_config)r   r   retrieval_docsr   s       r   create_retrieval_chainr!      s    \ i// <:C..); 	""..8L.MM	
 	
 	

&*&
+
+k,k--	  r   N)r   r   r   r   r   r	   )
__future__r   typingr   r   r   langchain_core.retrieversr   r   langchain_core.runnablesr	   r
   r!   r   r   r   <module>r&      s    " " " " " " # # # # # # # # # #        C B B B B B B B9 9 9 9 9 9r   