7
7
from utils import format_ai_message_content
8
8
from datetime import datetime
9
9
10
- async def chat_with_agent (message , history ):
11
- # Initialize agent if not already done
12
- if not hasattr (chat_with_agent , "agent" ):
13
- agent_executor , config , twitter_api_wrapper , knowledge_base = await initialize_agent ()
14
- chat_with_agent .agent = agent_executor
15
- chat_with_agent .config = config
10
+ # Global variables to store initialized agent and config
11
+ agent = None
12
+ agent_config = None
16
13
14
+ async def chat_with_agent (message , history ):
15
+ global agent , agent_config
16
+
17
+ # Convert history into messages format that the agent expects
18
+ messages = []
19
+ if history :
20
+ print ("History:" , history ) # Debug print
21
+ for msg in history :
22
+ if isinstance (msg , dict ):
23
+ if msg .get ("role" ) == "user" :
24
+ messages .append (HumanMessage (content = msg ["content" ]))
25
+ elif msg .get ("role" ) == "assistant" :
26
+ messages .append ({"role" : "assistant" , "content" : msg ["content" ]})
27
+
28
+ # Add the current message
29
+ messages .append (HumanMessage (content = message ))
30
+
31
+ print ("Final messages:" , messages ) # Debug print
32
+
17
33
runnable_config = RunnableConfig (
18
- recursion_limit = config ["configurable" ]["recursion_limit" ],
34
+ recursion_limit = agent_config ["configurable" ]["recursion_limit" ],
19
35
configurable = {
20
- "thread_id" : config ["configurable" ]["thread_id" ],
36
+ "thread_id" : agent_config ["configurable" ]["thread_id" ],
21
37
"checkpoint_ns" : "chat_mode" ,
22
38
"checkpoint_id" : str (datetime .now ().timestamp ())
23
39
}
24
40
)
25
41
26
- messages = []
27
- yield messages
42
+ response_messages = []
43
+ yield response_messages
28
44
# Process message with agent
29
- async for chunk in chat_with_agent . agent .astream (
30
- {"messages" : [ HumanMessage ( content = message )]},
45
+ async for chunk in agent .astream (
46
+ {"messages" : messages }, # Pass the full message history
31
47
runnable_config
32
48
):
33
49
if "agent" in chunk :
34
50
print ("agent in chunk" )
35
51
response = chunk ["agent" ]["messages" ][0 ].content
36
- messages .append (dict (
52
+ response_messages .append (dict (
37
53
role = "assistant" ,
38
54
content = format_ai_message_content (response , format_mode = "markdown" )
39
55
))
40
- print (messages )
41
- yield messages
56
+ print (response_messages )
57
+ yield response_messages
42
58
elif "tools" in chunk :
43
59
print ("tools in chunk" )
44
60
tool_message = str (chunk ["tools" ]["messages" ][0 ].content )
45
- messages .append (dict (
61
+ response_messages .append (dict (
46
62
role = "assistant" ,
47
63
content = tool_message ,
48
64
metadata = {"title" : "🛠️ Tool Call" }
49
65
))
50
- print (messages )
51
- yield messages
66
+ print (response_messages )
67
+ yield response_messages
52
68
53
69
def create_ui ():
54
70
# Create the Gradio interface
@@ -95,8 +111,20 @@ def create_ui():
95
111
96
112
return demo
97
113
98
- if __name__ == "__main__" :
114
+ async def main ():
115
+ global agent , agent_config
116
+ # Initialize agent before creating UI
117
+ print ("Initializing agent..." )
118
+ agent_executor , config , runnable_config = await initialize_agent ()
119
+ agent = agent_executor
120
+ agent_config = config
121
+
99
122
# Create and launch the UI
123
+ print ("Starting Gradio UI..." )
100
124
demo = create_ui ()
101
125
demo .queue ()
102
- demo .launch (share = True )
126
+ demo .launch (share = True )
127
+
128
+ if __name__ == "__main__" :
129
+ # Run the async main function
130
+ asyncio .run (main ())
0 commit comments