You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I just submitted an issuse(#16525).
But I don't know why my description is missing.
It goes something like this:
This is why BaseChatMemory.chat memory doesn't pruning #14957 (comment)
So I made some mokey patches to fix the problem temporarily.
And the following demo implements to use history and memory in LCEL.
Because there are too many modules involved, I wanted core contributors to help me refine this idea.
importjsonfromtypingimportUnion, Any, List, Optionalfromlangchain.memory.chat_memoryimportBaseChatMemoryfromlangchain_community.chat_message_historiesimportRedisChatMessageHistoryfromlangchain_core.loadimportloadfromlangchain_core.messagesimportBaseMessage, message_to_dictfromlangchain_core.runnablesimportRunnable, RunnableConfigfromlangchain_core.runnables.historyimport (
RunnableWithMessageHistory,
MessagesOrDictWithMessages,
GetSessionHistoryCallable
)
fromlangchain_core.tracers.schemasimportRunclassMemoryList(list):
def__init__(self, *args, history=None, **kwargs):
self.__history: RedisChatMessageHistory=historysuper().__init__(*args, **kwargs)
defpop(self, __index=-1):
if__index==0:
self.__history.redis_client.rpop(self.__history.key)
elif__index==-1:
self.__history.redis_client.lpop(self.__history.key)
else:
raiseIndexError("Redis doesn't support pop by index.")
returnsuper().pop(__index)
defclear(self):
self.__history.clear()
super().clear()
classRedisChatMessageHistoryFixed(RedisChatMessageHistory):
def__init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
msgs=super().messages.copy()
self._messages: MemoryList=MemoryList(msgs, history=self)
@propertydefmessages(self) ->List[BaseMessage]: # type: ignoremsgs=super().messages.copy()
self._messages: MemoryList=MemoryList(msgs, history=self)
returnself._messages@messages.setterdefmessages(self, msgs):
self._messages.clear()
ifmsgs:
self.redis_client.lpush(self.key, *[json.dumps(message_to_dict(msg)) formsginmsgs])
classRunnableWithMessageHistoryWithMemory(RunnableWithMessageHistory):
memory: Optional[BaseChatMemory] =Nonedef__init__(
self,
runnable: Runnable[
MessagesOrDictWithMessages,
Union[str, BaseMessage, MessagesOrDictWithMessages]
],
get_session_history: GetSessionHistoryCallable,
memory: BaseChatMemory=None,
**kwargs: Any
):
super().__init__(runnable, get_session_history, **kwargs)
ifmemory:
self.memory=memoryself.memory.input_key=self.input_messages_keyself.memory.output_key=self.output_messages_keydef_enter_history(self, input: Any, config: RunnableConfig) ->List[BaseMessage]:
hist=config["configurable"]["message_history"]
ifnotisinstance(self.memory.chat_memory, RedisChatMessageHistoryFixed):
self.memory.chat_memory=hist# return only historic messagesifself.history_messages_key:
# Some of the 'BaseChatMemory' pruning features are in `load_memory_variables()`,# such as `ConversationSummaryBufferMemory`.# So we should extract the `messages` from 'load_memory_variables()'.messages=self.memory.load_memory_variables({})[self.history_messages_key].copy()
hist.messages=messagesreturnmessages# return all messageselse:
input_val= (
inputifnotself.input_messages_keyelseinput[self.input_messages_key]
)
returnhist.messages.copy() +self._get_input_messages(input_val)
def_exit_history(self, run: Run, config: RunnableConfig) ->None:
hist=config["configurable"]["message_history"]
ifnotisinstance(self.memory.chat_memory, RedisChatMessageHistoryFixed):
self.memory.chat_memory=hist# Get the input messagesinputs=load(run.inputs)
input_val=inputs[self.input_messages_keyor"input"]
input_messages=self._get_input_messages(input_val)
# If historic messages were prepended to the input messages, remove them to# avoid adding duplicate messages to history.ifnotself.history_messages_key:
historic_messages=config["configurable"]["message_history"].messagesinput_messages=input_messages[len(historic_messages):]
# Get the output messagesoutput_val=load(run.outputs)
output_messages=self._get_output_messages(output_val)
messages=zip(input_messages, output_messages)
# `BaseChatMemory.save_context()` will call `add_message()` and `prune()`.# `RunnableWithMessageHistory` just call the `add_message()`.fori, oinmessages:
self.memory.save_context(
{self.input_messages_keyor'input': i.content},
{self.output_messages_keyor'output': o.content}
)
if__name__=='__main__':
REDIS_URL= ...
prompt=ChatPromptTemplate.from_messages(
[
("system", 'You are a helpful assistant.'),
MessagesPlaceholder(variable_name="history"),
("human", "{question}"),
]
)
model=ChatOpenAI(
model="gpt-3.5-turbo",
)
chain=prompt|modelchain_with_history=RunnableWithMessageHistoryPlus(
chain,
lambdasession_id: RedisChatMessageHistoryFixed(session_id, url=REDIS_URL),
memory=ConversationSummaryBufferMemory(
llm=model,
memory_key="history",
return_messages=True,
max_token_limit=2000
),
input_messages_key="question",
history_messages_key="history",
)
defchat(question):
res=chain_with_history.stream(
{"question": question},
config={"configurable": {"session_id": 'test'}},
)
formessageinres:
print(message.content, end='')
while_question:=input('human:'):
chat(_question)
print()
reacted with thumbs up emoji reacted with thumbs down emoji reacted with laugh emoji reacted with hooray emoji reacted with confused emoji reacted with heart emoji reacted with rocket emoji reacted with eyes emoji
-
I just submitted an issuse(#16525).
But I don't know why my description is missing.
It goes something like this:
This is why BaseChatMemory.chat memory doesn't pruning #14957 (comment)
So I made some mokey patches to fix the problem temporarily.
And the following demo implements to use
history
andmemory
in LCEL.Because there are too many modules involved, I wanted core contributors to help me refine this idea.
Beta Was this translation helpful? Give feedback.
All reactions