Skip to content

Index

BaseMemory #

Bases: BaseComponent

Base class for all memory types.

NOTE: The interface for memory is not yet finalized and is subject to change.

Source code in llama-index-core/llama_index/core/memory/types.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
class BaseMemory(BaseComponent):
    """Base class for all memory types.

    NOTE: The interface for memory is not yet finalized and is subject to change.
    """

    @classmethod
    def class_name(cls) -> str:
        """Get class name."""
        return "BaseMemory"

    @classmethod
    @abstractmethod
    def from_defaults(
        cls,
        chat_history: Optional[List[ChatMessage]] = None,
        llm: Optional[LLM] = None,
    ) -> "BaseMemory":
        """Create a chat memory from defaults."""

    @abstractmethod
    def get(self, input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]:
        """Get chat history."""

    @abstractmethod
    def get_all(self) -> List[ChatMessage]:
        """Get all chat history."""

    @abstractmethod
    def put(self, message: ChatMessage) -> None:
        """Put chat history."""

    def put_messages(self, messages: List[ChatMessage]) -> None:
        """Put chat history."""
        for message in messages:
            self.put(message)

    @abstractmethod
    def set(self, messages: List[ChatMessage]) -> None:
        """Set chat history."""

    @abstractmethod
    def reset(self) -> None:
        """Reset chat history."""

class_name classmethod #

class_name() -> str

Get class name.

Source code in llama-index-core/llama_index/core/memory/types.py
19
20
21
22
@classmethod
def class_name(cls) -> str:
    """Get class name."""
    return "BaseMemory"

from_defaults abstractmethod classmethod #

from_defaults(chat_history: Optional[List[ChatMessage]] = None, llm: Optional[LLM] = None) -> BaseMemory

Create a chat memory from defaults.

Source code in llama-index-core/llama_index/core/memory/types.py
24
25
26
27
28
29
30
31
@classmethod
@abstractmethod
def from_defaults(
    cls,
    chat_history: Optional[List[ChatMessage]] = None,
    llm: Optional[LLM] = None,
) -> "BaseMemory":
    """Create a chat memory from defaults."""

get abstractmethod #

get(input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]

Get chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
33
34
35
@abstractmethod
def get(self, input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]:
    """Get chat history."""

get_all abstractmethod #

get_all() -> List[ChatMessage]

Get all chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
37
38
39
@abstractmethod
def get_all(self) -> List[ChatMessage]:
    """Get all chat history."""

put abstractmethod #

put(message: ChatMessage) -> None

Put chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
41
42
43
@abstractmethod
def put(self, message: ChatMessage) -> None:
    """Put chat history."""

put_messages #

put_messages(messages: List[ChatMessage]) -> None

Put chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
45
46
47
48
def put_messages(self, messages: List[ChatMessage]) -> None:
    """Put chat history."""
    for message in messages:
        self.put(message)

set abstractmethod #

set(messages: List[ChatMessage]) -> None

Set chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
50
51
52
@abstractmethod
def set(self, messages: List[ChatMessage]) -> None:
    """Set chat history."""

reset abstractmethod #

reset() -> None

Reset chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
54
55
56
@abstractmethod
def reset(self) -> None:
    """Reset chat history."""

BaseChatStoreMemory #

Bases: BaseMemory

Base class for any .

NOTE: The interface for memory is not yet finalized and is subject to change.

Source code in llama-index-core/llama_index/core/memory/types.py
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
class BaseChatStoreMemory(BaseMemory):
    """Base class for any .

    NOTE: The interface for memory is not yet finalized and is subject to change.
    """

    chat_store: BaseChatStore = Field(default_factory=SimpleChatStore)
    chat_store_key: str = Field(default=DEFAULT_CHAT_STORE_KEY)

    @classmethod
    def class_name(cls) -> str:
        """Get class name."""
        return "BaseChatStoreMemory"

    @classmethod
    @abstractmethod
    def from_defaults(
        cls,
        chat_history: Optional[List[ChatMessage]] = None,
        llm: Optional[LLM] = None,
    ) -> "BaseChatStoreMemory":
        """Create a chat memory from defaults."""

    def get_all(self) -> List[ChatMessage]:
        """Get all chat history."""
        return self.chat_store.get_messages(self.chat_store_key)

    def put(self, message: ChatMessage) -> None:
        """Put chat history."""
        # ensure everything is serialized
        self.chat_store.add_message(self.chat_store_key, message)

    def set(self, messages: List[ChatMessage]) -> None:
        """Set chat history."""
        self.chat_store.set_messages(self.chat_store_key, messages)

    def reset(self) -> None:
        """Reset chat history."""
        self.chat_store.delete_messages(self.chat_store_key)

class_name classmethod #

class_name() -> str

Get class name.

Source code in llama-index-core/llama_index/core/memory/types.py
68
69
70
71
@classmethod
def class_name(cls) -> str:
    """Get class name."""
    return "BaseChatStoreMemory"

from_defaults abstractmethod classmethod #

from_defaults(chat_history: Optional[List[ChatMessage]] = None, llm: Optional[LLM] = None) -> BaseChatStoreMemory

Create a chat memory from defaults.

Source code in llama-index-core/llama_index/core/memory/types.py
73
74
75
76
77
78
79
80
@classmethod
@abstractmethod
def from_defaults(
    cls,
    chat_history: Optional[List[ChatMessage]] = None,
    llm: Optional[LLM] = None,
) -> "BaseChatStoreMemory":
    """Create a chat memory from defaults."""

get_all #

get_all() -> List[ChatMessage]

Get all chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
82
83
84
def get_all(self) -> List[ChatMessage]:
    """Get all chat history."""
    return self.chat_store.get_messages(self.chat_store_key)

put #

put(message: ChatMessage) -> None

Put chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
86
87
88
89
def put(self, message: ChatMessage) -> None:
    """Put chat history."""
    # ensure everything is serialized
    self.chat_store.add_message(self.chat_store_key, message)

set #

set(messages: List[ChatMessage]) -> None

Set chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
91
92
93
def set(self, messages: List[ChatMessage]) -> None:
    """Set chat history."""
    self.chat_store.set_messages(self.chat_store_key, messages)

reset #

reset() -> None

Reset chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
95
96
97
def reset(self) -> None:
    """Reset chat history."""
    self.chat_store.delete_messages(self.chat_store_key)