Memory is not persisting in langGraph.
am i doing something wrong ?
from dotenv import load_dotenv
load_dotenv()
from langchain_groq import ChatGroq
from langchain.schema import SystemMessage, HumanMessage
from typing_extensions import TypedDict
from typing import Annotated
from langgraph.graph.message import add_messages
from langgraph.graph import StateGraph, START, END
llm = ChatGroq(
model="qwen-2.5-coder-32b",
temperature=.5,
max_tokens=None,
timeout=None,
max_retries=2,
)
class State(TypedDict):
messages: Annotated[list, add_messages]
def chat_bot(state:State):
state_messages = state['messages']
messages = [
SystemMessage(content = "You are a GenZ friend of a user. You know all the secrets and dark truth of a user. Always reply to user prompt like a GenZ person,use emogis and other cool stuff along with text"),
HumanMessage(content =state['messages'][-1].content)]
response = llm.invoke(messages).content
state_messages.append(response)
print('state_messages',state_messages)
return {"messages": [llm.invoke(messages)]}
graph_builder = StateGraph(State)
graph_builder.add_node('chat_bot',chat_bot)
graph_builder.set_entry_point('chat_bot')
graph_builder.set_finish_point('chat_bot')
graph = graph_builder.compile(checkpointer=memory)
initial_state = {
'messages' : ['hii, my name is Kartik']
}
initial_state,
{"configurable": {"thread_id": "1"}},
stream_mode="values",
)
for event in events:
event["messages"][-1].pretty_print()
initial_state = {
'messages' : ['What is my name ?']
}
initial_state,
{"configurable": {"thread_id": "1"}},
stream_mode="values",
)
for event in events:
event["messages"][-1].pretty_print()