Skip to content

Index

BaseMemory #

Bases: BaseComponent

Base class for all memory types.

NOTE: The interface for memory is not yet finalized and is subject to change.

Source code in llama-index-core/llama_index/core/memory/types.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
class BaseMemory(BaseComponent):
    """Base class for all memory types.

    NOTE: The interface for memory is not yet finalized and is subject to change.
    """

    @classmethod
    def class_name(cls) -> str:
        """Get class name."""
        return "BaseMemory"

    @classmethod
    @abstractmethod
    def from_defaults(
        cls,
        **kwargs: Any,
    ) -> "BaseMemory":
        """Create a chat memory from defaults."""

    @abstractmethod
    def get(self, input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]:
        """Get chat history."""

    @abstractmethod
    def get_all(self) -> List[ChatMessage]:
        """Get all chat history."""

    @abstractmethod
    def put(self, message: ChatMessage) -> None:
        """Put chat history."""

    async def aput(self, message: ChatMessage) -> None:
        """Put chat history."""
        self.put(message)

    def put_messages(self, messages: List[ChatMessage]) -> None:
        """Put chat history."""
        for message in messages:
            self.put(message)

    @abstractmethod
    def set(self, messages: List[ChatMessage]) -> None:
        """Set chat history."""

    @abstractmethod
    def reset(self) -> None:
        """Reset chat history."""

class_name classmethod #

class_name() -> str

Get class name.

Source code in llama-index-core/llama_index/core/memory/types.py
19
20
21
22
@classmethod
def class_name(cls) -> str:
    """Get class name."""
    return "BaseMemory"

from_defaults abstractmethod classmethod #

from_defaults(**kwargs: Any) -> BaseMemory

Create a chat memory from defaults.

Source code in llama-index-core/llama_index/core/memory/types.py
24
25
26
27
28
29
30
@classmethod
@abstractmethod
def from_defaults(
    cls,
    **kwargs: Any,
) -> "BaseMemory":
    """Create a chat memory from defaults."""

get abstractmethod #

get(input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]

Get chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
32
33
34
@abstractmethod
def get(self, input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]:
    """Get chat history."""

get_all abstractmethod #

get_all() -> List[ChatMessage]

Get all chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
36
37
38
@abstractmethod
def get_all(self) -> List[ChatMessage]:
    """Get all chat history."""

put abstractmethod #

put(message: ChatMessage) -> None

Put chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
40
41
42
@abstractmethod
def put(self, message: ChatMessage) -> None:
    """Put chat history."""

aput async #

aput(message: ChatMessage) -> None

Put chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
44
45
46
async def aput(self, message: ChatMessage) -> None:
    """Put chat history."""
    self.put(message)

put_messages #

put_messages(messages: List[ChatMessage]) -> None

Put chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
48
49
50
51
def put_messages(self, messages: List[ChatMessage]) -> None:
    """Put chat history."""
    for message in messages:
        self.put(message)

set abstractmethod #

set(messages: List[ChatMessage]) -> None

Set chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
53
54
55
@abstractmethod
def set(self, messages: List[ChatMessage]) -> None:
    """Set chat history."""

reset abstractmethod #

reset() -> None

Reset chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
57
58
59
@abstractmethod
def reset(self) -> None:
    """Reset chat history."""

BaseChatStoreMemory #

Bases: BaseMemory

Base class for any .

NOTE: The interface for memory is not yet finalized and is subject to change.

Parameters:

Name Type Description Default
chat_store BaseChatStore
SimpleChatStore(store={})
chat_store_key str
'chat_history'
Source code in llama-index-core/llama_index/core/memory/types.py
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
class BaseChatStoreMemory(BaseMemory):
    """Base class for any .

    NOTE: The interface for memory is not yet finalized and is subject to change.
    """

    chat_store: SerializeAsAny[BaseChatStore] = Field(default_factory=SimpleChatStore)
    chat_store_key: str = Field(default=DEFAULT_CHAT_STORE_KEY)

    @field_serializer("chat_store")
    def serialize_courses_in_order(self, chat_store: BaseChatStore) -> dict:
        res = chat_store.model_dump()
        res.update({"class_name": chat_store.class_name()})
        return res

    @classmethod
    def class_name(cls) -> str:
        """Get class name."""
        return "BaseChatStoreMemory"

    @classmethod
    @abstractmethod
    def from_defaults(
        cls,
        chat_history: Optional[List[ChatMessage]] = None,
        llm: Optional[LLM] = None,
        **kwargs: Any,
    ) -> "BaseChatStoreMemory":
        """Create a chat memory from defaults."""

    def get_all(self) -> List[ChatMessage]:
        """Get all chat history."""
        return self.chat_store.get_messages(self.chat_store_key)

    def put(self, message: ChatMessage) -> None:
        """Put chat history."""
        # ensure everything is serialized
        self.chat_store.add_message(self.chat_store_key, message)

    async def aput(self, message: ChatMessage) -> None:
        """Put chat history."""
        # ensure everything is serialized
        await self.chat_store.async_add_message(self.chat_store_key, message)

    def set(self, messages: List[ChatMessage]) -> None:
        """Set chat history."""
        self.chat_store.set_messages(self.chat_store_key, messages)

    def reset(self) -> None:
        """Reset chat history."""
        self.chat_store.delete_messages(self.chat_store_key)

class_name classmethod #

class_name() -> str

Get class name.

Source code in llama-index-core/llama_index/core/memory/types.py
77
78
79
80
@classmethod
def class_name(cls) -> str:
    """Get class name."""
    return "BaseChatStoreMemory"

from_defaults abstractmethod classmethod #

from_defaults(chat_history: Optional[List[ChatMessage]] = None, llm: Optional[LLM] = None, **kwargs: Any) -> BaseChatStoreMemory

Create a chat memory from defaults.

Source code in llama-index-core/llama_index/core/memory/types.py
82
83
84
85
86
87
88
89
90
@classmethod
@abstractmethod
def from_defaults(
    cls,
    chat_history: Optional[List[ChatMessage]] = None,
    llm: Optional[LLM] = None,
    **kwargs: Any,
) -> "BaseChatStoreMemory":
    """Create a chat memory from defaults."""

get_all #

get_all() -> List[ChatMessage]

Get all chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
92
93
94
def get_all(self) -> List[ChatMessage]:
    """Get all chat history."""
    return self.chat_store.get_messages(self.chat_store_key)

put #

put(message: ChatMessage) -> None

Put chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
96
97
98
99
def put(self, message: ChatMessage) -> None:
    """Put chat history."""
    # ensure everything is serialized
    self.chat_store.add_message(self.chat_store_key, message)

aput async #

aput(message: ChatMessage) -> None

Put chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
101
102
103
104
async def aput(self, message: ChatMessage) -> None:
    """Put chat history."""
    # ensure everything is serialized
    await self.chat_store.async_add_message(self.chat_store_key, message)

set #

set(messages: List[ChatMessage]) -> None

Set chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
106
107
108
def set(self, messages: List[ChatMessage]) -> None:
    """Set chat history."""
    self.chat_store.set_messages(self.chat_store_key, messages)

reset #

reset() -> None

Reset chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
110
111
112
def reset(self) -> None:
    """Reset chat history."""
    self.chat_store.delete_messages(self.chat_store_key)