Skip to content

Event types

BaseEvent #

Bases: BaseModel

Parameters:

Name Type Description Default
timestamp datetime
datetime.datetime(2024, 11, 18, 19, 21, 32, 186153)
id_ str
'd77e9329-b945-4a60-9e28-7468d2064d4c'
span_id str | None
None
tags Dict[str, Any]
{}
Source code in llama-index-core/llama_index/core/instrumentation/events/base.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
class BaseEvent(BaseModel):
    model_config = ConfigDict(
        arbitrary_types_allowed=True,
        # copy_on_model_validation = "deep"  # not supported in Pydantic V2...
    )
    timestamp: datetime = Field(default_factory=lambda: datetime.now())
    id_: str = Field(default_factory=lambda: str(uuid4()))
    span_id: Optional[str] = Field(default_factory=active_span_id.get)
    tags: Dict[str, Any] = Field(default={})

    @classmethod
    def class_name(cls) -> str:
        """Return class name."""
        return "BaseEvent"

    def dict(self, **kwargs: Any) -> Dict[str, Any]:
        """Keep for backwards compatibility."""
        return self.model_dump(**kwargs)

    def model_dump(self, **kwargs: Any) -> Dict[str, Any]:
        data = super().model_dump(**kwargs)
        data["class_name"] = self.class_name()
        return data

class_name classmethod #

class_name() -> str

Return class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/base.py
19
20
21
22
@classmethod
def class_name(cls) -> str:
    """Return class name."""
    return "BaseEvent"

dict #

dict(**kwargs: Any) -> Dict[str, Any]

Keep for backwards compatibility.

Source code in llama-index-core/llama_index/core/instrumentation/events/base.py
24
25
26
def dict(self, **kwargs: Any) -> Dict[str, Any]:
    """Keep for backwards compatibility."""
    return self.model_dump(**kwargs)

AgentChatWithStepEndEvent #

Bases: BaseEvent

AgentChatWithStepEndEvent.

Parameters:

Name Type Description Default
response Optional[AGENT_CHAT_RESPONSE_TYPE]

Agent chat response.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
class AgentChatWithStepEndEvent(BaseEvent):
    """AgentChatWithStepEndEvent.

    Args:
        response (Optional[AGENT_CHAT_RESPONSE_TYPE]): Agent chat response.
    """

    response: Optional[AGENT_CHAT_RESPONSE_TYPE]

    @model_validator(mode="before")
    @classmethod
    def validate_response(cls: Any, values: Any) -> Any:
        """Validate response."""
        response = values.get("response")
        if response is None:
            pass
        elif not isinstance(response, AgentChatResponse) and not isinstance(
            response, StreamingAgentChatResponse
        ):
            raise ValueError(
                "response must be of type AgentChatResponse or StreamingAgentChatResponse"
            )

        return values

    @field_validator("response", mode="before")
    @classmethod
    def validate_response_type(cls: Any, response: Any) -> Any:
        """Validate response type."""
        if response is None:
            return response
        if not isinstance(response, AgentChatResponse) and not isinstance(
            response, StreamingAgentChatResponse
        ):
            raise ValueError(
                "response must be of type AgentChatResponse or StreamingAgentChatResponse"
            )
        return response

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "AgentChatWithStepEndEvent"

validate_response classmethod #

validate_response(values: Any) -> Any

Validate response.

Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
@model_validator(mode="before")
@classmethod
def validate_response(cls: Any, values: Any) -> Any:
    """Validate response."""
    response = values.get("response")
    if response is None:
        pass
    elif not isinstance(response, AgentChatResponse) and not isinstance(
        response, StreamingAgentChatResponse
    ):
        raise ValueError(
            "response must be of type AgentChatResponse or StreamingAgentChatResponse"
        )

    return values

validate_response_type classmethod #

validate_response_type(response: Any) -> Any

Validate response type.

Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
@field_validator("response", mode="before")
@classmethod
def validate_response_type(cls: Any, response: Any) -> Any:
    """Validate response type."""
    if response is None:
        return response
    if not isinstance(response, AgentChatResponse) and not isinstance(
        response, StreamingAgentChatResponse
    ):
        raise ValueError(
            "response must be of type AgentChatResponse or StreamingAgentChatResponse"
        )
    return response

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
102
103
104
105
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "AgentChatWithStepEndEvent"

AgentChatWithStepStartEvent #

Bases: BaseEvent

AgentChatWithStepStartEvent.

Parameters:

Name Type Description Default
user_msg str

User input message.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
48
49
50
51
52
53
54
55
56
57
58
59
60
class AgentChatWithStepStartEvent(BaseEvent):
    """AgentChatWithStepStartEvent.

    Args:
        user_msg (str): User input message.
    """

    user_msg: str

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "AgentChatWithStepStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
57
58
59
60
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "AgentChatWithStepStartEvent"

AgentRunStepEndEvent #

Bases: BaseEvent

AgentRunStepEndEvent.

Parameters:

Name Type Description Default
step_output TaskStepOutput

Task step output.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
33
34
35
36
37
38
39
40
41
42
43
44
45
class AgentRunStepEndEvent(BaseEvent):
    """AgentRunStepEndEvent.

    Args:
        step_output (TaskStepOutput): Task step output.
    """

    step_output: TaskStepOutput

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "AgentRunStepEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
42
43
44
45
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "AgentRunStepEndEvent"

AgentRunStepStartEvent #

Bases: BaseEvent

AgentRunStepStartEvent.

Parameters:

Name Type Description Default
task_id str

Task ID.

required
step Optional[TaskStep]

Task step.

required
input Optional[str]

Optional input.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
class AgentRunStepStartEvent(BaseEvent):
    """AgentRunStepStartEvent.

    Args:
        task_id (str): Task ID.
        step (Optional[TaskStep]): Task step.
        input (Optional[str]): Optional input.
    """

    task_id: str
    step: Optional[TaskStep]
    input: Optional[str]

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "AgentRunStepStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
27
28
29
30
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "AgentRunStepStartEvent"

AgentToolCallEvent #

Bases: BaseEvent

AgentToolCallEvent.

Parameters:

Name Type Description Default
arguments str

Arguments.

required
tool ToolMetadata

Tool metadata.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
class AgentToolCallEvent(BaseEvent):
    """AgentToolCallEvent.

    Args:
        arguments (str): Arguments.
        tool (ToolMetadata): Tool metadata.
    """

    arguments: str
    tool: ToolMetadata

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "AgentToolCallEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/agent.py
119
120
121
122
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "AgentToolCallEvent"

StreamChatDeltaReceivedEvent #

Bases: BaseEvent

StreamChatDeltaReceivedEvent.

Parameters:

Name Type Description Default
delta str

Delta received from the stream chat.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
45
46
47
48
49
50
51
52
53
54
55
56
57
class StreamChatDeltaReceivedEvent(BaseEvent):
    """StreamChatDeltaReceivedEvent.

    Args:
        delta (str): Delta received from the stream chat.
    """

    delta: str

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "StreamChatDeltaReceivedEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
54
55
56
57
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "StreamChatDeltaReceivedEvent"

StreamChatEndEvent #

Bases: BaseEvent

StreamChatEndEvent.

Fired at the end of writing to the stream chat-engine queue.

Source code in llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
16
17
18
19
20
21
22
23
24
25
class StreamChatEndEvent(BaseEvent):
    """StreamChatEndEvent.

    Fired at the end of writing to the stream chat-engine queue.
    """

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "StreamChatEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
22
23
24
25
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "StreamChatEndEvent"

StreamChatErrorEvent #

Bases: BaseEvent

StreamChatErrorEvent.

Fired when an exception is raised during the stream chat-engine operation.

Parameters:

Name Type Description Default
exception Exception

Exception raised during the stream chat operation.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
class StreamChatErrorEvent(BaseEvent):
    """StreamChatErrorEvent.

    Fired when an exception is raised during the stream chat-engine operation.

    Args:
        exception (Exception): Exception raised during the stream chat operation.
    """

    exception: Exception

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "StreamChatErrorEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
39
40
41
42
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "StreamChatErrorEvent"

StreamChatStartEvent #

Bases: BaseEvent

StreamChatStartEvent.

Fired at the start of writing to the stream chat-engine queue.

Source code in llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
 4
 5
 6
 7
 8
 9
10
11
12
13
class StreamChatStartEvent(BaseEvent):
    """StreamChatStartEvent.

    Fired at the start of writing to the stream chat-engine queue.
    """

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "StreamChatStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
10
11
12
13
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "StreamChatStartEvent"

EmbeddingEndEvent #

Bases: BaseEvent

EmbeddingEndEvent.

Parameters:

Name Type Description Default
chunks List[str]

List of chunks.

required
embeddings List[List[float]]

List of embeddings.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/embedding.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
class EmbeddingEndEvent(BaseEvent):
    """EmbeddingEndEvent.

    Args:
        chunks (List[str]): List of chunks.
        embeddings (List[List[float]]): List of embeddings.

    """

    chunks: List[str]
    embeddings: List[List[float]]

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "EmbeddingEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/embedding.py
35
36
37
38
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "EmbeddingEndEvent"

EmbeddingStartEvent #

Bases: BaseEvent

EmbeddingStartEvent.

Parameters:

Name Type Description Default
model_dict dict

Model dictionary containing details about the embedding model.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/embedding.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
class EmbeddingStartEvent(BaseEvent):
    """EmbeddingStartEvent.

    Args:
        model_dict (dict): Model dictionary containing details about the embedding model.
    """

    model_config = ConfigDict(protected_namespaces=("pydantic_model_",))
    model_dict: dict

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "EmbeddingStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/embedding.py
17
18
19
20
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "EmbeddingStartEvent"

LLMChatEndEvent #

Bases: BaseEvent

LLMChatEndEvent.

Parameters:

Name Type Description Default
messages List[ChatMessage]

List of chat messages.

required
response Optional[ChatResponse]

Last chat response.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
class LLMChatEndEvent(BaseEvent):
    """LLMChatEndEvent.

    Args:
        messages (List[ChatMessage]): List of chat messages.
        response (Optional[ChatResponse]): Last chat response.
    """

    messages: List[ChatMessage]
    response: Optional[ChatResponse]

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "LLMChatEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
197
198
199
200
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "LLMChatEndEvent"

LLMChatStartEvent #

Bases: BaseEvent

LLMChatStartEvent.

Parameters:

Name Type Description Default
messages List[ChatMessage]

List of chat messages.

required
additional_kwargs dict

Additional keyword arguments.

required
model_dict dict

Model dictionary.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
class LLMChatStartEvent(BaseEvent):
    """LLMChatStartEvent.

    Args:
        messages (List[ChatMessage]): List of chat messages.
        additional_kwargs (dict): Additional keyword arguments.
        model_dict (dict): Model dictionary.
    """

    model_config = ConfigDict(protected_namespaces=("pydantic_model_",))
    messages: List[ChatMessage]
    additional_kwargs: dict
    model_dict: dict

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "LLMChatStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
163
164
165
166
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "LLMChatStartEvent"

LLMCompletionEndEvent #

Bases: BaseEvent

LLMCompletionEndEvent.

Parameters:

Name Type Description Default
prompt str

The prompt to be completed.

required
response CompletionResponse

Completion response.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
class LLMCompletionEndEvent(BaseEvent):
    """LLMCompletionEndEvent.

    Args:
        prompt (str): The prompt to be completed.
        response (CompletionResponse): Completion response.
    """

    prompt: str
    response: CompletionResponse

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "LLMCompletionEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
143
144
145
146
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "LLMCompletionEndEvent"

LLMCompletionStartEvent #

Bases: BaseEvent

LLMCompletionStartEvent.

Parameters:

Name Type Description Default
prompt str

The prompt to be completed.

required
additional_kwargs dict

Additional keyword arguments.

required
model_dict dict

Model dictionary.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
class LLMCompletionStartEvent(BaseEvent):
    """LLMCompletionStartEvent.

    Args:
        prompt (str): The prompt to be completed.
        additional_kwargs (dict): Additional keyword arguments.
        model_dict (dict): Model dictionary.
    """

    model_config = ConfigDict(protected_namespaces=("pydantic_model_",))
    prompt: str
    additional_kwargs: dict
    model_dict: dict

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "LLMCompletionStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
109
110
111
112
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "LLMCompletionStartEvent"

LLMPredictEndEvent #

Bases: BaseEvent

LLMPredictEndEvent.

The result of an llm.predict() call.

Parameters:

Name Type Description Default
output str

Output.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
class LLMPredictEndEvent(BaseEvent):
    """LLMPredictEndEvent.

    The result of an llm.predict() call.

    Args:
        output (str): Output.
    """

    output: str

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "LLMPredictEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
40
41
42
43
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "LLMPredictEndEvent"

LLMPredictStartEvent #

Bases: BaseEvent

LLMPredictStartEvent.

Parameters:

Name Type Description Default
template BasePromptTemplate

Prompt template.

required
template_args Optional[dict]

Prompt template arguments.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
class LLMPredictStartEvent(BaseEvent):
    """LLMPredictStartEvent.

    Args:
        template (BasePromptTemplate): Prompt template.
        template_args (Optional[dict]): Prompt template arguments.
    """

    template: SerializeAsAny[BasePromptTemplate]
    template_args: Optional[dict]

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "LLMPredictStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/llm.py
23
24
25
26
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "LLMPredictStartEvent"

QueryEndEvent #

Bases: BaseEvent

QueryEndEvent.

Parameters:

Name Type Description Default
query QueryType

Query as a string or query bundle.

required
response RESPONSE_TYPE

Response.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/query.py
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
class QueryEndEvent(BaseEvent):
    """QueryEndEvent.

    Args:
        query (QueryType): Query as a string or query bundle.
        response (RESPONSE_TYPE): Response.
    """

    query: QueryType
    response: RESPONSE_TYPE

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "QueryEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/query.py
32
33
34
35
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "QueryEndEvent"

QueryStartEvent #

Bases: BaseEvent

QueryStartEvent.

Parameters:

Name Type Description Default
query QueryType

Query as a string or query bundle.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/query.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
class QueryStartEvent(BaseEvent):
    """QueryStartEvent.

    Args:
        query (QueryType): Query as a string or query bundle.
    """

    query: QueryType

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "QueryStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/query.py
15
16
17
18
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "QueryStartEvent"

RetrievalEndEvent #

Bases: BaseEvent

RetrievalEndEvent.

Parameters:

Name Type Description Default
str_or_query_bundle QueryType

Query bundle.

required
nodes List[NodeWithScore]

List of nodes with scores.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/retrieval.py
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
class RetrievalEndEvent(BaseEvent):
    """RetrievalEndEvent.

    Args:
        str_or_query_bundle (QueryType): Query bundle.
        nodes (List[NodeWithScore]): List of nodes with scores.
    """

    str_or_query_bundle: QueryType
    nodes: List[NodeWithScore]

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "RetrievalEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/retrieval.py
32
33
34
35
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "RetrievalEndEvent"

RetrievalStartEvent #

Bases: BaseEvent

RetrievalStartEvent.

Parameters:

Name Type Description Default
str_or_query_bundle QueryType

Query bundle.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/retrieval.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
class RetrievalStartEvent(BaseEvent):
    """RetrievalStartEvent.

    Args:
        str_or_query_bundle (QueryType): Query bundle.
    """

    str_or_query_bundle: QueryType

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "RetrievalStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/retrieval.py
15
16
17
18
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "RetrievalStartEvent"

GetResponseEndEvent #

Bases: BaseEvent

GetResponseEndEvent.

Source code in llama-index-core/llama_index/core/instrumentation/events/synthesis.py
57
58
59
60
61
62
63
64
65
66
class GetResponseEndEvent(BaseEvent):
    """GetResponseEndEvent."""

    # TODO: consumes the first chunk of generators??
    # response: RESPONSE_TEXT_TYPE

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "GetResponseEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/synthesis.py
63
64
65
66
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "GetResponseEndEvent"

GetResponseStartEvent #

Bases: BaseEvent

GetResponseStartEvent.

Parameters:

Name Type Description Default
query_str str

Query string.

required
text_chunks List[str]

List of text chunks.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/synthesis.py
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
class GetResponseStartEvent(BaseEvent):
    """GetResponseStartEvent.

    Args:
        query_str (str): Query string.
        text_chunks (List[str]): List of text chunks.
    """

    query_str: str
    text_chunks: List[str]

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "GetResponseStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/synthesis.py
51
52
53
54
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "GetResponseStartEvent"

SynthesizeEndEvent #

Bases: BaseEvent

SynthesizeEndEvent.

Parameters:

Name Type Description Default
query QueryType

Query as a string or query bundle.

required
response RESPONSE_TYPE

Response.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/synthesis.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
class SynthesizeEndEvent(BaseEvent):
    """SynthesizeEndEvent.

    Args:
        query (QueryType): Query as a string or query bundle.
        response (RESPONSE_TYPE): Response.
    """

    query: QueryType
    response: RESPONSE_TYPE

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "SynthesizeEndEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/synthesis.py
34
35
36
37
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "SynthesizeEndEvent"

SynthesizeStartEvent #

Bases: BaseEvent

SynthesizeStartEvent.

Parameters:

Name Type Description Default
query QueryType

Query as a string or query bundle.

required
Source code in llama-index-core/llama_index/core/instrumentation/events/synthesis.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
class SynthesizeStartEvent(BaseEvent):
    """SynthesizeStartEvent.

    Args:
        query (QueryType): Query as a string or query bundle.
    """

    query: QueryType

    @classmethod
    def class_name(cls) -> str:
        """Class name."""
        return "SynthesizeStartEvent"

class_name classmethod #

class_name() -> str

Class name.

Source code in llama-index-core/llama_index/core/instrumentation/events/synthesis.py
17
18
19
20
@classmethod
def class_name(cls) -> str:
    """Class name."""
    return "SynthesizeStartEvent"