Skip to content

types#

DeploymentDefinition #

Bases: BaseModel

Parameters:

Name Type Description Default
name str
required
Source code in llama_deploy/types/apiserver.py
19
20
class DeploymentDefinition(BaseModel):
    name: str

Status #

Bases: BaseModel

Parameters:

Name Type Description Default
status StatusEnum
required
status_message str
required
max_deployments int | None
None
deployments list[str] | None
None
Source code in llama_deploy/types/apiserver.py
12
13
14
15
16
class Status(BaseModel):
    status: StatusEnum
    status_message: str
    max_deployments: int | None = None
    deployments: list[str] | None = None

ActionTypes #

Bases: str, Enum

Action types for messages. Different consumers will handle (or ignore) different action types.

Source code in llama_deploy/types/core.py
17
18
19
20
21
22
23
24
25
26
27
28
29
class ActionTypes(str, Enum):
    """
    Action types for messages.
    Different consumers will handle (or ignore) different action types.
    """

    NEW_TASK = "new_task"
    COMPLETED_TASK = "completed_task"
    REQUEST_FOR_HELP = "request_for_help"
    NEW_TOOL_CALL = "new_tool_call"
    COMPLETED_TOOL_CALL = "completed_tool_call"
    TASK_STREAM = "task_stream"
    SEND_EVENT = "send_event"

ChatMessage #

Bases: BaseModel

Chat message.

Parameters:

Name Type Description Default
role MessageRole
<MessageRole.USER: 'user'>
blocks list[Annotated[Union[TextBlock, ImageBlock, AudioBlock], FieldInfo]]

Built-in mutable sequence.

If no argument is given, the constructor creates a new empty list. The argument must be an iterable if specified.

<dynamic>
Source code in llama-index-core/llama_index/core/base/llms/types.py
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
class ChatMessage(BaseModel):
    """Chat message."""

    role: MessageRole = MessageRole.USER
    additional_kwargs: dict[str, Any] = Field(default_factory=dict)
    blocks: list[ContentBlock] = Field(default_factory=list)

    def __init__(self, /, content: Any | None = None, **data: Any) -> None:
        """Keeps backward compatibility with the old `content` field.

        If content was passed and contained text, store a single TextBlock.
        If content was passed and it was a list, assume it's a list of content blocks and store it.
        """
        if content is not None:
            if isinstance(content, str):
                data["blocks"] = [TextBlock(text=content)]
            elif isinstance(content, list):
                data["blocks"] = content

        super().__init__(**data)

    @model_validator(mode="after")
    def legacy_additional_kwargs_image(self) -> Self:
        """Provided for backward compatibility.

        If `additional_kwargs` contains an `images` key, assume the value is a list
        of ImageDocument and convert them into image blocks.
        """
        if documents := self.additional_kwargs.get("images"):
            documents = cast(list[ImageDocument], documents)
            for doc in documents:
                img_base64_bytes = doc.resolve_image(as_base64=True).read()
                self.blocks.append(ImageBlock(image=img_base64_bytes))
        return self

    @property
    def content(self) -> str | None:
        """Keeps backward compatibility with the old `content` field.

        Returns:
            The cumulative content of the TextBlock blocks, None if there are none.
        """
        content = ""
        for block in self.blocks:
            if isinstance(block, TextBlock):
                content += block.text

        return content or None

    @content.setter
    def content(self, content: str) -> None:
        """Keeps backward compatibility with the old `content` field.

        Raises:
            ValueError: if blocks contains more than a block, or a block that's not TextBlock.
        """
        if not self.blocks:
            self.blocks = [TextBlock(text=content)]
        elif len(self.blocks) == 1 and isinstance(self.blocks[0], TextBlock):
            self.blocks = [TextBlock(text=content)]
        else:
            raise ValueError(
                "ChatMessage contains multiple blocks, use 'ChatMessage.blocks' instead."
            )

    def __str__(self) -> str:
        return f"{self.role.value}: {self.content}"

    @classmethod
    def from_str(
        cls,
        content: str,
        role: Union[MessageRole, str] = MessageRole.USER,
        **kwargs: Any,
    ) -> Self:
        if isinstance(role, str):
            role = MessageRole(role)
        return cls(role=role, blocks=[TextBlock(text=content)], **kwargs)

    def _recursive_serialization(self, value: Any) -> Any:
        if isinstance(value, BaseModel):
            value.model_rebuild()  # ensures all fields are initialized and serializable
            return value.model_dump()  # type: ignore
        if isinstance(value, dict):
            return {
                key: self._recursive_serialization(value)
                for key, value in value.items()
            }
        if isinstance(value, list):
            return [self._recursive_serialization(item) for item in value]
        return value

    @field_serializer("additional_kwargs", check_fields=False)
    def serialize_additional_kwargs(self, value: Any, _info: Any) -> Any:
        return self._recursive_serialization(value)

content property writable #

content: str | None

Keeps backward compatibility with the old content field.

Returns:

Type Description
str | None

The cumulative content of the TextBlock blocks, None if there are none.

legacy_additional_kwargs_image #

legacy_additional_kwargs_image() -> Self

Provided for backward compatibility.

If additional_kwargs contains an images key, assume the value is a list of ImageDocument and convert them into image blocks.

Source code in llama-index-core/llama_index/core/base/llms/types.py
208
209
210
211
212
213
214
215
216
217
218
219
220
@model_validator(mode="after")
def legacy_additional_kwargs_image(self) -> Self:
    """Provided for backward compatibility.

    If `additional_kwargs` contains an `images` key, assume the value is a list
    of ImageDocument and convert them into image blocks.
    """
    if documents := self.additional_kwargs.get("images"):
        documents = cast(list[ImageDocument], documents)
        for doc in documents:
            img_base64_bytes = doc.resolve_image(as_base64=True).read()
            self.blocks.append(ImageBlock(image=img_base64_bytes))
    return self

EventDefinition #

Bases: BaseModel

The definition of event.

To be used as payloads for service endpoints when wanting to send serialized Events.

Parameters:

Name Type Description Default
agent_id str
required
event_obj_str str
required

Attributes:

Name Type Description
event_object_str str

serialized string of event.

Source code in llama_deploy/types/core.py
79
80
81
82
83
84
85
86
87
88
89
90
class EventDefinition(BaseModel):
    """The definition of event.

    To be used as payloads for service endpoints when wanting to send serialized
    Events.

    Attributes:
        event_object_str (str): serialized string of event.
    """

    agent_id: str
    event_obj_str: str

HumanResponse #

Bases: BaseModel

A simple human response.

Parameters:

Name Type Description Default
result str
required

Attributes:

Name Type Description
response str

The human response.

Source code in llama_deploy/types/core.py
219
220
221
222
223
224
225
226
227
228
class HumanResponse(BaseModel):
    """
    A simple human response.

    Attributes:
        response (str):
            The human response.
    """

    result: str

ServiceDefinition #

Bases: BaseModel

The definition of a service, bundles useful information describing the service.

Parameters:

Name Type Description Default
service_name str

The name of the service.

required
description str

A description of the service and it's purpose.

required
prompt list[ChatMessage]

Specific instructions for the service.

<dynamic>
host str | None
None
port int | None
None

Attributes:

Name Type Description
service_name str

The name of the service.

description str

A description of the service and it's purpose.

prompt list[ChatMessage]

Specific instructions for the service.

host str | None

The host of the service, if its a network service.

port int | None

The port of the service, if its a network service.

Source code in llama_deploy/types/core.py
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
class ServiceDefinition(BaseModel):
    """
    The definition of a service, bundles useful information describing the service.

    Attributes:
        service_name (str):
            The name of the service.
        description (str):
            A description of the service and it's purpose.
        prompt (list[ChatMessage]):
            Specific instructions for the service.
        host (str | None):
            The host of the service, if its a network service.
        port (int | None):
            The port of the service, if its a network service.
    """

    service_name: str = Field(description="The name of the service.")
    description: str = Field(
        description="A description of the service and it's purpose."
    )
    prompt: list[ChatMessage] = Field(
        default_factory=list, description="Specific instructions for the service."
    )
    host: str | None = None
    port: int | None = None

SessionDefinition #

Bases: BaseModel

The definition of a session.

Parameters:

Name Type Description Default
session_id str
'0f36b17c-d5a0-4447-9097-c701209e0971'
task_ids list[str]

Built-in mutable sequence.

If no argument is given, the constructor creates a new empty list. The argument must be an iterable if specified.

<dynamic>

Attributes:

Name Type Description
session_id str

The session ID. Defaults to a random UUID.

task_definitions list[str]

The task ids in order, representing the session.

state dict

The current session state.

Source code in llama_deploy/types/core.py
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
class SessionDefinition(BaseModel):
    """
    The definition of a session.

    Attributes:
        session_id (str):
            The session ID. Defaults to a random UUID.
        task_definitions (list[str]):
            The task ids in order, representing the session.
        state (dict):
            The current session state.
    """

    session_id: str = Field(default_factory=generate_id)
    task_ids: list[str] = Field(default_factory=list)
    state: dict = Field(default_factory=dict)

    @property
    def current_task_id(self) -> str | None:
        if len(self.task_ids) == 0:
            return None

        return self.task_ids[-1]

TaskDefinition #

Bases: BaseModel

The definition and state of a task.

Parameters:

Name Type Description Default
input str
required
task_id str
'd705268b-100f-4306-8b4b-452e0b90f27b'
session_id str | None
None
agent_id str | None
None

Attributes:

Name Type Description
input str

The task input.

session_id str

The session ID that the task belongs to.

task_id str

The task ID. Defaults to a random UUID.

agent_id str

The agent ID that the task should be sent to. If blank, the orchestrator decides.

Source code in llama_deploy/types/core.py
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
class TaskDefinition(BaseModel):
    """
    The definition and state of a task.

    Attributes:
        input (str):
            The task input.
        session_id (str):
            The session ID that the task belongs to.
        task_id (str):
            The task ID. Defaults to a random UUID.
        agent_id (str):
            The agent ID that the task should be sent to.
            If blank, the orchestrator decides.
    """

    input: str
    task_id: str = Field(default_factory=generate_id)
    session_id: str | None = None
    agent_id: str | None = None

TaskResult #

Bases: BaseModel

The result of a task.

Parameters:

Name Type Description Default
task_id str
required
history list[ChatMessage]
required
result str
required

Attributes:

Name Type Description
task_id str

The task ID.

history list[ChatMessage]

The task history.

result str

The task result.

data dict

Additional data about the task or result.

is_last bool

If not true, there are more results to be streamed.

index int

The index of the task in the session.

Source code in llama_deploy/types/core.py
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
class TaskResult(BaseModel):
    """
    The result of a task.

    Attributes:
        task_id (str):
            The task ID.
        history (list[ChatMessage]):
            The task history.
        result (str):
            The task result.
        data (dict):
            Additional data about the task or result.
        is_last (bool):
            If not true, there are more results to be streamed.
        index (int):
            The index of the task in the session.
    """

    task_id: str
    history: list[ChatMessage]
    result: str
    data: dict = Field(default_factory=dict)

TaskStream #

Bases: BaseModel

A stream of data generated by a task.

Parameters:

Name Type Description Default
task_id str
required
session_id str | None
required
data dict
required
index int
required

Attributes:

Name Type Description
task_id str

The associated task ID.

data list[dict]

The stream data.

index int

The index of the stream data.

Source code in llama_deploy/types/core.py
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
class TaskStream(BaseModel):
    """
    A stream of data generated by a task.

    Attributes:
        task_id (str):
            The associated task ID.
        data (list[dict]):
            The stream data.
        index (int):
            The index of the stream data.
    """

    task_id: str
    session_id: str | None
    data: dict
    index: int

ToolCall #

Bases: BaseModel

A tool call.

Parameters:

Name Type Description Default
id_ str
'6f029859-f5e7-4e06-a23d-39c4d05c9906'
tool_call_bundle ToolCallBundle
required
source_id str
required

Attributes:

Name Type Description
id_ str

The tool call ID. Defaults to a random UUID.

tool_call_bundle ToolCallBundle

The tool call bundle.

source_id str

The source ID.

Source code in llama_deploy/types/core.py
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
class ToolCall(BaseModel):
    """
    A tool call.

    Attributes:
        id_ (str):
            The tool call ID. Defaults to a random UUID.
        tool_call_bundle (ToolCallBundle):
            The tool call bundle.
        source_id (str):
            The source ID.
    """

    id_: str = Field(default_factory=generate_id)
    tool_call_bundle: ToolCallBundle
    source_id: str

ToolCallBundle #

Bases: BaseModel

A bundle of information for a tool call.

Parameters:

Name Type Description Default
tool_name str
required
tool_args list[Any]
required
tool_kwargs dict[str, Any]
required

Attributes:

Name Type Description
tool_name str

The name of the tool.

tool_args list[Any]

The tool arguments.

tool_kwargs dict[str, Any]

The tool keyword arguments

Source code in llama_deploy/types/core.py
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
class ToolCallBundle(BaseModel):
    """
    A bundle of information for a tool call.

    Attributes:
        tool_name (str):
            The name of the tool.
        tool_args (list[Any]):
            The tool arguments.
        tool_kwargs (dict[str, Any]):
            The tool keyword arguments
    """

    tool_name: str
    tool_args: list[Any]
    tool_kwargs: dict[str, Any]

ToolCallResult #

Bases: BaseModel

A tool call result.

Parameters:

Name Type Description Default
id_ str
required
tool_message ChatMessage
required
result str
required

Attributes:

Name Type Description
id_ str

The tool call ID. Should match the ID of the tool call.

tool_message ChatMessage

The tool message.

result str

The tool result.

Source code in llama_deploy/types/core.py
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
class ToolCallResult(BaseModel):
    """
    A tool call result.

    Attributes:
        id_ (str):
            The tool call ID. Should match the ID of the tool call.
        tool_message (ChatMessage):
            The tool message.
        result (str):
            The tool result.
    """

    id_: str
    tool_message: ChatMessage
    result: str