IntegrationsUpdated 2026-03-18
LangChain Integration
Integrate RetainDB memory with LangChain for intelligent conversation memory in your LLM applications.
Applies to: LangChain.js
Use RetainDB as memory for LangChain chains and agents.
Installation
bash
npm install @retaindb/sdk langchainQuick Start
typescript
import { RetainDBClient } from "@retaindb/sdk";
import { createLangChainMemoryAdapter } from "@retaindb/sdk/langchain";
import { ConversationChain } from "langchain.chains";
import { ChatOpenAI } from "langchain.chat_models";
import { BufferMemory } from "langchain.memory";
// Initialize RetainDB client
const client = RetainDBClient.fromEnv();
// Create memory adapter
const RetainDBMemory = createLangChainMemoryAdapter(client, {
user_id: "user-123",
session_id: "chat-456",
memoryType: "conversation",
returnMessages: true,
});
// Use with LangChain
const chain = new ConversationChain({
llm: new ChatOpenAI({ temperature: 0 }),
memory: RetainDBMemory,
});
const response = await chain.call({
input: "Hi, I'm looking for a restaurant recommendation."
});
console.log(response.response);Memory Adapter Options
typescript
const memory = createLangChainMemoryAdapter(client, {
user_id: string, // Required: user identifier
session_id?: string, // Optional: session for context
memoryType?: string, // Default: "conversation"
topK?: number, // Default: 10: memories to retrieve
includePending?: boolean, // Default: true
returnMessages?: boolean, // Default: true for LangChain format
});Using with LCEL
typescript
import { createLangChainMemoryAdapter } from "@retaindb/sdk/langchain";
import { ChatOpenAI } from "langchain.chat_models";
import { RunnableSequence } from "langchain.schema.runnable";
const client = RetainDBClient.fromEnv();
// Create memory
const memory = createLangChainMemoryAdapter(client, {
user_id: "user-123",
returnMessages: true,
});
// Build chain with memory
const chain = RunnableSequence.from([
async (input) => {
// Get context from RetainDB
const context = await client.memory.search({
user_id: "user-123",
query: input.input,
top_k: 5,
});
return { ...input, context: context.results };
},
// ... continue with LLM
]);Advanced: Custom Memory Class
Create a custom RetainDB memory class:
typescript
import { BaseChatMemory } from "langchain.memory";
import { RetainDBClient } from "@retaindb/sdk";
export class RetainDBChatMemory extends BaseChatMemory {
private client: RetainDBClient;
private userId: string;
private sessionId?: string;
constructor(fields: {
client: RetainDBClient;
userId: string;
sessionId?: string;
}) {
super({
returnMessages: fields.returnMessages ?? true,
inputKey: fields.inputKey,
outputKey: fields.outputKey,
});
this.client = fields.client;
this.userId = fields.userId;
this.sessionId = fields.sessionId;
}
async loadMemoryVariables(values: Record<string, any>): Promise<Record<string, any>> {
const query = values.input || "";
const results = await this.client.memory.search({
user_id: this.userId,
session_id: this.sessionId,
query,
top_k: 10,
});
return {
history: results.results.map(r => ({
type: "human",
content: r.memory.content,
})),
};
}
async saveContext(inputValues: Record<string, any>, outputValues: Record<string, any>): Promise<void> {
const input = inputValues[this.inputKey];
const output = outputValues[this.outputKey];
await this.client.memory.add({
user_id: this.userId,
session_id: this.sessionId,
content: `Human: ${input}\nAI: ${output}`,
memory_type: "event",
});
}
}
// Usage
const memory = new RetainDBChatMemory({
client,
userId: "user-123",
sessionId: "chat-456",
});Using with Agents
typescript
import { createLangChainMemoryAdapter } from "@retaindb/sdk/langchain";
import { AgentExecutor } from "langchain.agents";
import { OpenAIFunctionsAgent } from "langchain.agents.openai_functions";
import { ChatOpenAI } from "langchain.chat_models";
const client = RetainDBClient.fromEnv();
const agent = OpenAIFunctionsAgent.fromLLMAndTools(
new ChatOpenAI({ temperature: 0 }),
tools
);
const executor = AgentExecutor.fromAgentAndTools({
agent,
tools,
memory: createLangChainMemoryAdapter(client, {
user_id: "user-123",
returnMessages: true,
}),
});
const result = await executor.run("What's the weather like?");Complete Example
typescript
import { RetainDBClient } from "@retaindb/sdk";
import { createLangChainMemoryAdapter } from "@retaindb/sdk/langchain";
import { ChatOpenAI } from "langchain.chat_models";
import { ConversationChain } from "langchain.chains";
const client = RetainDBClient.fromEnv({
baseUrl: "https://api.retaindb.com",
});
async function chat(userId: string, sessionId: string, message: string) {
// Create session-aware memory
const memory = createLangChainMemoryAdapter(client, {
user_id: userId,
session_id: sessionId,
memoryType: "conversation",
returnMessages: true,
topK: 10,
});
// Create chain
const chain = new ConversationChain({
llm: new ChatOpenAI({ temperature: 0.7 }),
memory,
});
// Send message
const response = await chain.call({ input: message });
return response.response;
}
// Use
chat("user-123", "session-456", "My name is John and I prefer concise responses");
chat("user-123", "session-456", "What's my name?"); // Should remember "John"Next step
- SDK LangChain Adapter — Adapter details
- LangChain Memory — LangChain docs
- SDK Quickstart — Getting started
Was this page helpful?
Your feedback helps us prioritize docs improvements weekly.