From 313d2c8e6ed5e2c2bc6a88f0fb41c71a8dc1e9d6 Mon Sep 17 00:00:00 2001 From: Morven <81895400+nullure@users.noreply.github.com> Date: Sun, 7 Dec 2025 21:13:04 +0530 Subject: [PATCH] Create openmemry.mdx --- .../integrations/retrievers/openmemry.mdx | 129 ++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 src/oss/python/integrations/retrievers/openmemry.mdx diff --git a/src/oss/python/integrations/retrievers/openmemry.mdx b/src/oss/python/integrations/retrievers/openmemry.mdx new file mode 100644 index 0000000000..5f6003fc27 --- /dev/null +++ b/src/oss/python/integrations/retrievers/openmemry.mdx @@ -0,0 +1,129 @@ +--- +title: OpenMemory +--- + +# OpenMemory + +[OpenMemory](https://github.com/CaviraOSS/OpenMemory) is a local-first, persistent temporal memory engine implemented entirely through a Python client. It provides storage and retrieval of conversational and factual memory without requiring vector databases or embedding pipelines. + +`OpenMemoryRetriever` is an integration module that returns documents from a temporal memory store. It is based on the `BaseRetriever` class and enables persistent recall across sessions using only the `openmemory-py` Python package. + +This guide will help you get started with the OpenMemory [retriever](/oss/langchain/retrieval). For detailed documentation of configuration options and storage behavior, see the OpenMemory repository. + +### Integration details + + + +## Setup + +To use this module, you need: + +- Python 3.9 or higher +- The `openmemory-py` Python package +- The `langchain-openmemory` integration package + +If you want to enable tracing, you can also set your LangSmith API key: + +```python +import os + +os.environ["LANGSMITH_API_KEY"] = "your_api_key" +os.environ["LANGSMITH_TRACING"] = "true" +``` + +## Installation + +This retriever lives in the `langchain-openmemory` package: + +```bash +pip install openmemory-py langchain-openmemory +``` + +## Instantiation + +The `Memory` class exposes the retriever interface directly. No configuration is required by default. + +```python +from langchain_openmemory import Memory + +memory = Memory() +retriever = memory.retriever +``` + +Optional explicit user separation can be provided: + +```python +memory = Memory(user="user123") +retriever = memory.retriever +``` + +If the `user` parameter is omitted, OpenMemory manages session identity internally. + +## Usage + +Once instantiated, the retriever can be used to retrieve documents from persistent memory. + +```python +retriever.invoke("Where does the user live?") +``` + +This returns a list of LangChain Document objects. + +## Example + +This section demonstrates basic usage of OpenMemory as a persistent memory retriever. + +```python +from langchain_openmemory import Memory + +memory = Memory() + +memory.store("The user lives in Hyderabad.") +docs = memory.retriever.invoke("Where does the user live?") + +for doc in docs: + print(doc.page_content) +``` + +## Use within a chain + +```python +from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import ChatPromptTemplate +from langchain_core.runnables import RunnablePassthrough +from langchain_openai import ChatOpenAI +from langchain_openmemory import Memory + +memory = Memory() +retriever = memory.retriever + +prompt = ChatPromptTemplate.from_template( + """Answer the question based only on the context provided. + +Context: {context} + +Question: {question}""" +) + +llm = ChatOpenAI() + + +def format_docs(docs): + return "\n\n".join(doc.page_content for doc in docs) + + +chain = ( + {"context": retriever | format_docs, "question": RunnablePassthrough()} + | prompt + | llm + | StrOutputParser() +) + +chain.invoke("Where does the user live?") +``` + +## API reference + +For implementation details and configuration options, see the OpenMemory repository: + +https://github.com/CaviraOSS/OpenMemory