Index
BaseMemory #
Bases: BaseComponent
Base class for all memory types.
Source code in llama-index-core/llama_index/core/memory/types.py
14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
|
class_name
classmethod
#
class_name() -> str
Get class name.
Source code in llama-index-core/llama_index/core/memory/types.py
17 18 19 20 |
|
from_defaults
abstractmethod
classmethod
#
from_defaults(**kwargs: Any) -> BaseMemory
Create a chat memory from defaults.
Source code in llama-index-core/llama_index/core/memory/types.py
22 23 24 25 26 27 28 |
|
get
abstractmethod
#
get(input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]
Get chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
30 31 32 |
|
aget
async
#
aget(input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]
Get chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
34 35 36 37 38 |
|
get_all
abstractmethod
#
get_all() -> List[ChatMessage]
Get all chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
40 41 42 |
|
aget_all
async
#
aget_all() -> List[ChatMessage]
Get all chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
44 45 46 |
|
put
abstractmethod
#
put(message: ChatMessage) -> None
Put chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
48 49 50 |
|
aput
async
#
aput(message: ChatMessage) -> None
Put chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
52 53 54 |
|
put_messages #
put_messages(messages: List[ChatMessage]) -> None
Put chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
56 57 58 59 |
|
aput_messages
async
#
aput_messages(messages: List[ChatMessage]) -> None
Put chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
61 62 63 |
|
set
abstractmethod
#
set(messages: List[ChatMessage]) -> None
Set chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
65 66 67 |
|
aset
async
#
aset(messages: List[ChatMessage]) -> None
Set chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
69 70 71 |
|
reset
abstractmethod
#
reset() -> None
Reset chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
73 74 75 |
|
areset
async
#
areset() -> None
Reset chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
77 78 79 |
|
BaseChatStoreMemory #
Bases: BaseMemory
Base class for storing multi-tenant chat history.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
chat_store
|
BaseChatStore
|
Simple chat store. Async methods provide same functionality as sync methods in this class. |
<dynamic>
|
chat_store_key
|
str
|
|
'chat_history'
|
Source code in llama-index-core/llama_index/core/memory/types.py
82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 |
|
class_name
classmethod
#
class_name() -> str
Get class name.
Source code in llama-index-core/llama_index/core/memory/types.py
94 95 96 97 |
|
from_defaults
abstractmethod
classmethod
#
from_defaults(chat_history: Optional[List[ChatMessage]] = None, llm: Optional[LLM] = None, **kwargs: Any) -> BaseChatStoreMemory
Create a chat memory from defaults.
Source code in llama-index-core/llama_index/core/memory/types.py
99 100 101 102 103 104 105 106 107 |
|
get_all #
get_all() -> List[ChatMessage]
Get all chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
109 110 111 |
|
aget_all
async
#
aget_all() -> List[ChatMessage]
Get all chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
113 114 115 |
|
get #
get(input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]
Get chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
117 118 119 |
|
aget
async
#
aget(input: Optional[str] = None, **kwargs: Any) -> List[ChatMessage]
Get chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
121 122 123 124 125 |
|
put #
put(message: ChatMessage) -> None
Put chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
127 128 129 130 |
|
aput
async
#
aput(message: ChatMessage) -> None
Put chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
132 133 134 135 |
|
set #
set(messages: List[ChatMessage]) -> None
Set chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
137 138 139 |
|
aset
async
#
aset(messages: List[ChatMessage]) -> None
Set chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
141 142 143 144 |
|
reset #
reset() -> None
Reset chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
146 147 148 |
|
areset
async
#
areset() -> None
Reset chat history.
Source code in llama-index-core/llama_index/core/memory/types.py
150 151 152 |
|