LlamaIndex Quickstart
Install
pip install remembr llama-index
Initialize
from llama_index.core.base.llms.types import ChatMessage, MessageRole
from remembr import RemembrClient
from adapters.llamaindex.remembr_llamaindex_memory import RemembrChatStore
client = RemembrClient(api_key="rk_demo", base_url="http://localhost:8000/api/v1")
chat_store = RemembrChatStore(client=client)
Store
chat_store.add_message(
"llama-session",
ChatMessage(role=MessageRole.USER, content="The migration must preserve webhook delivery logs."),
)
Search
results = chat_store.get_messages("llama-session")
print(results)
Delete
chat_store.delete_messages("llama-session")
Real sample
from remembr import RemembrClient
from adapters.llamaindex.remembr_llamaindex_memory import RemembrSemanticMemory
client = RemembrClient(api_key="rk_demo", base_url="http://localhost:8000/api/v1")
store = RemembrSemanticMemory.from_client(client=client, session_id="index-session")
matches = store.load_context({"input": "What compatibility requirement do we have?"})
print(matches)