Skip to content

Openai legacy

ContextRetrieverOpenAIAgent #

Bases: BaseOpenAIAgent

ContextRetriever OpenAI Agent.

This agent performs retrieval from BaseRetriever before calling the LLM. Allows it to augment user message with context.

NOTE: this is a beta feature, function interfaces might change.

Parameters:

Name Type Description Default
tools List[BaseTool]

A list of tools.

required
retriever BaseRetriever

A retriever.

required
qa_prompt Optional[PromptTemplate]

A QA prompt.

required
context_separator str

A context separator.

required
llm Optional[OpenAI]

An OpenAI LLM.

required
chat_history Optional[List[ChatMessage]]

A chat history.

required
prefix_messages List[ChatMessage]

List[ChatMessage]: A list of prefix messages.

required
verbose bool

Whether to print debug statements.

False
max_function_calls int

Maximum number of function calls.

DEFAULT_MAX_FUNCTION_CALLS
callback_manager Optional[CallbackManager]

A callback manager.

None
Source code in llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/context_retriever_agent.py
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
class ContextRetrieverOpenAIAgent(BaseOpenAIAgent):
    """ContextRetriever OpenAI Agent.

    This agent performs retrieval from BaseRetriever before
    calling the LLM. Allows it to augment user message with context.

    NOTE: this is a beta feature, function interfaces might change.

    Args:
        tools (List[BaseTool]): A list of tools.
        retriever (BaseRetriever): A retriever.
        qa_prompt (Optional[PromptTemplate]): A QA prompt.
        context_separator (str): A context separator.
        llm (Optional[OpenAI]): An OpenAI LLM.
        chat_history (Optional[List[ChatMessage]]): A chat history.
        prefix_messages: List[ChatMessage]: A list of prefix messages.
        verbose (bool): Whether to print debug statements.
        max_function_calls (int): Maximum number of function calls.
        callback_manager (Optional[CallbackManager]): A callback manager.

    """

    def __init__(
        self,
        tools: List[BaseTool],
        retriever: BaseRetriever,
        qa_prompt: PromptTemplate,
        context_separator: str,
        llm: OpenAI,
        memory: BaseMemory,
        prefix_messages: List[ChatMessage],
        verbose: bool = False,
        max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS,
        callback_manager: Optional[CallbackManager] = None,
    ) -> None:
        super().__init__(
            llm=llm,
            memory=memory,
            prefix_messages=prefix_messages,
            verbose=verbose,
            max_function_calls=max_function_calls,
            callback_manager=callback_manager,
        )
        self._tools = tools
        self._qa_prompt = qa_prompt
        self._retriever = retriever
        self._context_separator = context_separator

    @classmethod
    def from_tools_and_retriever(
        cls,
        tools: List[BaseTool],
        retriever: BaseRetriever,
        qa_prompt: Optional[PromptTemplate] = None,
        context_separator: str = "\n",
        llm: Optional[LLM] = None,
        chat_history: Optional[List[ChatMessage]] = None,
        memory: Optional[BaseMemory] = None,
        memory_cls: Type[BaseMemory] = ChatMemoryBuffer,
        verbose: bool = False,
        max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS,
        callback_manager: Optional[CallbackManager] = None,
        system_prompt: Optional[str] = None,
        prefix_messages: Optional[List[ChatMessage]] = None,
    ) -> "ContextRetrieverOpenAIAgent":
        """Create a ContextRetrieverOpenAIAgent from a retriever.

        Args:
            retriever (BaseRetriever): A retriever.
            qa_prompt (Optional[PromptTemplate]): A QA prompt.
            context_separator (str): A context separator.
            llm (Optional[OpenAI]): An OpenAI LLM.
            chat_history (Optional[ChatMessageHistory]): A chat history.
            verbose (bool): Whether to print debug statements.
            max_function_calls (int): Maximum number of function calls.
            callback_manager (Optional[CallbackManager]): A callback manager.

        """
        qa_prompt = qa_prompt or DEFAULT_QA_PROMPT
        chat_history = chat_history or []
        llm = llm or Settings.llm
        if not isinstance(llm, OpenAI):
            raise ValueError("llm must be a OpenAI instance")
        if callback_manager is not None:
            llm.callback_manager = callback_manager

        memory = memory or memory_cls.from_defaults(chat_history=chat_history, llm=llm)

        if not llm.metadata.is_function_calling_model:
            raise ValueError(
                f"Model name {llm.model} does not support function calling API."
            )
        if system_prompt is not None:
            if prefix_messages is not None:
                raise ValueError(
                    "Cannot specify both system_prompt and prefix_messages"
                )
            prefix_messages = [ChatMessage(content=system_prompt, role="system")]

        prefix_messages = prefix_messages or []

        return cls(
            tools=tools,
            retriever=retriever,
            qa_prompt=qa_prompt,
            context_separator=context_separator,
            llm=llm,
            memory=memory,
            prefix_messages=prefix_messages,
            verbose=verbose,
            max_function_calls=max_function_calls,
            callback_manager=callback_manager,
        )

    def _get_tools(self, message: str) -> List[BaseTool]:
        """Get tools."""
        return self._tools

    def _build_formatted_message(self, message: str) -> str:
        # augment user message
        retrieved_nodes_w_scores: List[NodeWithScore] = self._retriever.retrieve(
            message
        )
        retrieved_nodes = [node.node for node in retrieved_nodes_w_scores]
        retrieved_texts = [node.get_content() for node in retrieved_nodes]

        # format message
        context_str = self._context_separator.join(retrieved_texts)
        return self._qa_prompt.format(context_str=context_str, query_str=message)

    def chat(
        self,
        message: str,
        chat_history: Optional[List[ChatMessage]] = None,
        tool_choice: Union[str, dict] = "auto",
    ) -> AgentChatResponse:
        """Chat."""
        formatted_message = self._build_formatted_message(message)
        if self._verbose:
            print_text(formatted_message + "\n", color="yellow")

        return super().chat(
            formatted_message, chat_history=chat_history, tool_choice=tool_choice
        )

    async def achat(
        self,
        message: str,
        chat_history: Optional[List[ChatMessage]] = None,
        tool_choice: Union[str, dict] = "auto",
    ) -> AgentChatResponse:
        """Chat."""
        formatted_message = self._build_formatted_message(message)
        if self._verbose:
            print_text(formatted_message + "\n", color="yellow")

        return await super().achat(
            formatted_message, chat_history=chat_history, tool_choice=tool_choice
        )

    def get_tools(self, message: str) -> List[BaseTool]:
        """Get tools."""
        return self._get_tools(message)

from_tools_and_retriever classmethod #

from_tools_and_retriever(tools: List[BaseTool], retriever: BaseRetriever, qa_prompt: Optional[PromptTemplate] = None, context_separator: str = '\n', llm: Optional[LLM] = None, chat_history: Optional[List[ChatMessage]] = None, memory: Optional[BaseMemory] = None, memory_cls: Type[BaseMemory] = ChatMemoryBuffer, verbose: bool = False, max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS, callback_manager: Optional[CallbackManager] = None, system_prompt: Optional[str] = None, prefix_messages: Optional[List[ChatMessage]] = None) -> ContextRetrieverOpenAIAgent

Create a ContextRetrieverOpenAIAgent from a retriever.

Parameters:

Name Type Description Default
retriever BaseRetriever

A retriever.

required
qa_prompt Optional[PromptTemplate]

A QA prompt.

None
context_separator str

A context separator.

'\n'
llm Optional[OpenAI]

An OpenAI LLM.

None
chat_history Optional[ChatMessageHistory]

A chat history.

None
verbose bool

Whether to print debug statements.

False
max_function_calls int

Maximum number of function calls.

DEFAULT_MAX_FUNCTION_CALLS
callback_manager Optional[CallbackManager]

A callback manager.

None
Source code in llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/context_retriever_agent.py
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
@classmethod
def from_tools_and_retriever(
    cls,
    tools: List[BaseTool],
    retriever: BaseRetriever,
    qa_prompt: Optional[PromptTemplate] = None,
    context_separator: str = "\n",
    llm: Optional[LLM] = None,
    chat_history: Optional[List[ChatMessage]] = None,
    memory: Optional[BaseMemory] = None,
    memory_cls: Type[BaseMemory] = ChatMemoryBuffer,
    verbose: bool = False,
    max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS,
    callback_manager: Optional[CallbackManager] = None,
    system_prompt: Optional[str] = None,
    prefix_messages: Optional[List[ChatMessage]] = None,
) -> "ContextRetrieverOpenAIAgent":
    """Create a ContextRetrieverOpenAIAgent from a retriever.

    Args:
        retriever (BaseRetriever): A retriever.
        qa_prompt (Optional[PromptTemplate]): A QA prompt.
        context_separator (str): A context separator.
        llm (Optional[OpenAI]): An OpenAI LLM.
        chat_history (Optional[ChatMessageHistory]): A chat history.
        verbose (bool): Whether to print debug statements.
        max_function_calls (int): Maximum number of function calls.
        callback_manager (Optional[CallbackManager]): A callback manager.

    """
    qa_prompt = qa_prompt or DEFAULT_QA_PROMPT
    chat_history = chat_history or []
    llm = llm or Settings.llm
    if not isinstance(llm, OpenAI):
        raise ValueError("llm must be a OpenAI instance")
    if callback_manager is not None:
        llm.callback_manager = callback_manager

    memory = memory or memory_cls.from_defaults(chat_history=chat_history, llm=llm)

    if not llm.metadata.is_function_calling_model:
        raise ValueError(
            f"Model name {llm.model} does not support function calling API."
        )
    if system_prompt is not None:
        if prefix_messages is not None:
            raise ValueError(
                "Cannot specify both system_prompt and prefix_messages"
            )
        prefix_messages = [ChatMessage(content=system_prompt, role="system")]

    prefix_messages = prefix_messages or []

    return cls(
        tools=tools,
        retriever=retriever,
        qa_prompt=qa_prompt,
        context_separator=context_separator,
        llm=llm,
        memory=memory,
        prefix_messages=prefix_messages,
        verbose=verbose,
        max_function_calls=max_function_calls,
        callback_manager=callback_manager,
    )

chat #

chat(message: str, chat_history: Optional[List[ChatMessage]] = None, tool_choice: Union[str, dict] = 'auto') -> AgentChatResponse

Chat.

Source code in llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/context_retriever_agent.py
166
167
168
169
170
171
172
173
174
175
176
177
178
179
def chat(
    self,
    message: str,
    chat_history: Optional[List[ChatMessage]] = None,
    tool_choice: Union[str, dict] = "auto",
) -> AgentChatResponse:
    """Chat."""
    formatted_message = self._build_formatted_message(message)
    if self._verbose:
        print_text(formatted_message + "\n", color="yellow")

    return super().chat(
        formatted_message, chat_history=chat_history, tool_choice=tool_choice
    )

achat async #

achat(message: str, chat_history: Optional[List[ChatMessage]] = None, tool_choice: Union[str, dict] = 'auto') -> AgentChatResponse

Chat.

Source code in llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/context_retriever_agent.py
181
182
183
184
185
186
187
188
189
190
191
192
193
194
async def achat(
    self,
    message: str,
    chat_history: Optional[List[ChatMessage]] = None,
    tool_choice: Union[str, dict] = "auto",
) -> AgentChatResponse:
    """Chat."""
    formatted_message = self._build_formatted_message(message)
    if self._verbose:
        print_text(formatted_message + "\n", color="yellow")

    return await super().achat(
        formatted_message, chat_history=chat_history, tool_choice=tool_choice
    )

get_tools #

get_tools(message: str) -> List[BaseTool]

Get tools.

Source code in llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/context_retriever_agent.py
196
197
198
def get_tools(self, message: str) -> List[BaseTool]:
    """Get tools."""
    return self._get_tools(message)

FnRetrieverOpenAIAgent #

Bases: OpenAIAgent

Function Retriever OpenAI Agent.

Uses our object retriever module to retrieve openai agent.

NOTE: This is deprecated, you can just use the base OpenAIAgent class by specifying the following:

agent = OpenAIAgent.from_tools(tool_retriever=retriever, ...)

Source code in llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/retriever_openai_agent.py
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
class FnRetrieverOpenAIAgent(OpenAIAgent):
    """Function Retriever OpenAI Agent.

    Uses our object retriever module to retrieve openai agent.

    NOTE: This is deprecated, you can just use the base `OpenAIAgent` class by
    specifying the following:
    ```
    agent = OpenAIAgent.from_tools(tool_retriever=retriever, ...)
    ```

    """

    @classmethod
    def from_retriever(
        cls, retriever: ObjectRetriever[BaseTool], **kwargs: Any
    ) -> "FnRetrieverOpenAIAgent":
        return cast(
            FnRetrieverOpenAIAgent, cls.from_tools(tool_retriever=retriever, **kwargs)
        )