Skip to content

Types

ChatMessage #

Bases: BaseModel

Chat message.

TODO: Temp copy of class from llama-index, to avoid pydantic v1/v2 issues.

Source code in llama_deploy/llama_deploy/types.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
class ChatMessage(BaseModel):
    """Chat message.

    TODO: Temp copy of class from llama-index, to avoid pydantic v1/v2 issues.
    """

    role: MessageRole = MessageRole.USER
    content: Optional[Any] = ""
    additional_kwargs: dict = Field(default_factory=dict)

    def __str__(self) -> str:
        return f"{self.role.value}: {self.content}"

    @classmethod
    def from_str(
        cls,
        content: str,
        role: Union[MessageRole, str] = MessageRole.USER,
        **kwargs: Any,
    ) -> "ChatMessage":
        if isinstance(role, str):
            role = MessageRole(role)
        return cls(role=role, content=content, **kwargs)

    def _recursive_serialization(self, value: Any) -> Any:
        if isinstance(value, (V1BaseModel, BaseModel)):
            return value.dict()
        if isinstance(value, dict):
            return {
                key: self._recursive_serialization(value)
                for key, value in value.items()
            }
        if isinstance(value, list):
            return [self._recursive_serialization(item) for item in value]
        return value

    def dict(self, **kwargs: Any) -> dict:
        # ensure all additional_kwargs are serializable
        msg = super().dict(**kwargs)

        for key, value in msg.get("additional_kwargs", {}).items():
            value = self._recursive_serialization(value)
            if not isinstance(value, (str, int, float, bool, dict, list, type(None))):
                raise ValueError(
                    f"Failed to serialize additional_kwargs value: {value}"
                )
            msg["additional_kwargs"][key] = value

        return msg

ActionTypes #

Bases: str, Enum

Action types for messages. Different consumers will handle (or ignore) different action types.

Source code in llama_deploy/llama_deploy/types.py
69
70
71
72
73
74
75
76
77
78
79
class ActionTypes(str, Enum):
    """
    Action types for messages.
    Different consumers will handle (or ignore) different action types.
    """

    NEW_TASK = "new_task"
    COMPLETED_TASK = "completed_task"
    REQUEST_FOR_HELP = "request_for_help"
    NEW_TOOL_CALL = "new_tool_call"
    COMPLETED_TOOL_CALL = "completed_tool_call"

TaskDefinition #

Bases: BaseModel

The definition and state of a task.

Attributes:

Name Type Description
input str

The task input.

session_id str

The session ID that the task belongs to.

task_id str

The task ID. Defaults to a random UUID.

agent_id str

The agent ID that the task should be sent to. If blank, the orchestrator decides.

Source code in llama_deploy/llama_deploy/types.py
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
class TaskDefinition(BaseModel):
    """
    The definition and state of a task.

    Attributes:
        input (str):
            The task input.
        session_id (str):
            The session ID that the task belongs to.
        task_id (str):
            The task ID. Defaults to a random UUID.
        agent_id (str):
            The agent ID that the task should be sent to.
            If blank, the orchestrator decides.
    """

    input: str
    task_id: str = Field(default_factory=generate_id)
    session_id: Optional[str] = None
    agent_id: Optional[str] = None

SessionDefinition #

Bases: BaseModel

The definition of a session.

Attributes:

Name Type Description
session_id str

The session ID. Defaults to a random UUID.

task_definitions List[str]

The task ids in order, representing the session.

state dict

The current session state.

Source code in llama_deploy/llama_deploy/types.py
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
class SessionDefinition(BaseModel):
    """
    The definition of a session.

    Attributes:
        session_id (str):
            The session ID. Defaults to a random UUID.
        task_definitions (List[str]):
            The task ids in order, representing the session.
        state (dict):
            The current session state.
    """

    session_id: str = Field(default_factory=generate_id)
    task_ids: List[str] = Field(default_factory=list)
    state: dict = Field(default_factory=dict)

    @property
    def current_task_id(self) -> Optional[str]:
        if len(self.task_ids) == 0:
            return None

        return self.task_ids[-1]

NewTask #

Bases: BaseModel

The payload for a new task message.

Source code in llama_deploy/llama_deploy/types.py
129
130
131
132
133
class NewTask(BaseModel):
    """The payload for a new task message."""

    task: TaskDefinition
    state: Dict[str, Any] = Field(default_factory=dict)

TaskResult #

Bases: BaseModel

The result of a task.

Attributes:

Name Type Description
task_id str

The task ID.

history List[ChatMessage]

The task history.

result str

The task result.

data dict

Additional data about the task or result.

Source code in llama_deploy/llama_deploy/types.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
class TaskResult(BaseModel):
    """
    The result of a task.

    Attributes:
        task_id (str):
            The task ID.
        history (List[ChatMessage]):
            The task history.
        result (str):
            The task result.
        data (dict):
            Additional data about the task or result.
    """

    task_id: str
    history: List[ChatMessage]
    result: str
    data: dict = Field(default_factory=dict)

ToolCallBundle #

Bases: BaseModel

A bundle of information for a tool call.

Attributes:

Name Type Description
tool_name str

The name of the tool.

tool_args List[Any]

The tool arguments.

tool_kwargs Dict[str, Any]

The tool keyword arguments

Source code in llama_deploy/llama_deploy/types.py
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
class ToolCallBundle(BaseModel):
    """
    A bundle of information for a tool call.

    Attributes:
        tool_name (str):
            The name of the tool.
        tool_args (List[Any]):
            The tool arguments.
        tool_kwargs (Dict[str, Any]):
            The tool keyword arguments
    """

    tool_name: str
    tool_args: List[Any]
    tool_kwargs: Dict[str, Any]

ToolCall #

Bases: BaseModel

A tool call.

Attributes:

Name Type Description
id_ str

The tool call ID. Defaults to a random UUID.

tool_call_bundle ToolCallBundle

The tool call bundle.

source_id str

The source ID.

Source code in llama_deploy/llama_deploy/types.py
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
class ToolCall(BaseModel):
    """
    A tool call.

    Attributes:
        id_ (str):
            The tool call ID. Defaults to a random UUID.
        tool_call_bundle (ToolCallBundle):
            The tool call bundle.
        source_id (str):
            The source ID.
    """

    id_: str = Field(default_factory=generate_id)
    tool_call_bundle: ToolCallBundle
    source_id: str

ToolCallResult #

Bases: BaseModel

A tool call result.

Attributes:

Name Type Description
id_ str

The tool call ID. Should match the ID of the tool call.

tool_message ChatMessage

The tool message.

result str

The tool result.

Source code in llama_deploy/llama_deploy/types.py
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
class ToolCallResult(BaseModel):
    """
    A tool call result.

    Attributes:
        id_ (str):
            The tool call ID. Should match the ID of the tool call.
        tool_message (ChatMessage):
            The tool message.
        result (str):
            The tool result.
    """

    id_: str
    tool_message: ChatMessage
    result: str

ServiceDefinition #

Bases: BaseModel

The definition of a service, bundles useful information describing the service.

Attributes:

Name Type Description
service_name str

The name of the service.

description str

A description of the service and it's purpose.

prompt List[ChatMessage]

Specific instructions for the service.

host Optional[str]

The host of the service, if its a network service.

port Optional[int]

The port of the service, if its a network service.

Source code in llama_deploy/llama_deploy/types.py
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
class ServiceDefinition(BaseModel):
    """
    The definition of a service, bundles useful information describing the service.

    Attributes:
        service_name (str):
            The name of the service.
        description (str):
            A description of the service and it's purpose.
        prompt (List[ChatMessage]):
            Specific instructions for the service.
        host (Optional[str]):
            The host of the service, if its a network service.
        port (Optional[int]):
            The port of the service, if its a network service.
    """

    service_name: str = Field(description="The name of the service.")
    description: str = Field(
        description="A description of the service and it's purpose."
    )
    prompt: List[ChatMessage] = Field(
        default_factory=list, description="Specific instructions for the service."
    )
    host: Optional[str] = None
    port: Optional[int] = None

HumanResponse #

Bases: BaseModel

A simple human response.

Attributes:

Name Type Description
response str

The human response.

Source code in llama_deploy/llama_deploy/types.py
239
240
241
242
243
244
245
246
247
248
class HumanResponse(BaseModel):
    """
    A simple human response.

    Attributes:
        response (str):
            The human response.
    """

    result: str

options: members: - ActionTypes - NewTask - ServiceDefinition - SessionDefinition - TaskDefinition - TaskResult