DrishtiSharma commited on
Commit
7417989
Β·
verified Β·
1 Parent(s): 2f9148b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -12
app.py CHANGED
@@ -66,7 +66,6 @@ async def stream_or_async_response(messages: Union[Iterable[ResponseChunk], Asyn
66
  """
67
  Handles both synchronous and asynchronous message streams.
68
  Processes each response chunk and manages intermediate outputs.
69
-
70
  :param messages: Union[Iterable, AsyncIterable]
71
  """
72
  layer_outputs = {}
@@ -78,7 +77,8 @@ async def stream_or_async_response(messages: Union[Iterable[ResponseChunk], Asyn
78
  if layer not in layer_outputs:
79
  layer_outputs[layer] = []
80
  layer_outputs[layer].append(message['delta'])
81
- else:
 
82
  # Final message processing
83
  for layer, outputs in layer_outputs.items():
84
  st.write(f"Layer {layer}")
@@ -87,7 +87,9 @@ async def stream_or_async_response(messages: Union[Iterable[ResponseChunk], Asyn
87
  with cols[i]:
88
  st.expander(label=f"Agent {i + 1}", expanded=False).write(output)
89
  layer_outputs.clear()
90
- return message['delta']
 
 
91
 
92
  # Check if the input is an async or sync iterable
93
  if hasattr(messages, "__aiter__"): # Asynchronous iterable
@@ -101,8 +103,6 @@ async def stream_or_async_response(messages: Union[Iterable[ResponseChunk], Asyn
101
  else:
102
  raise TypeError("'messages' must be an Iterable or AsyncIterable.")
103
 
104
-
105
-
106
  # Set up the MOAgent
107
  def set_moa_agent(
108
  main_model: str = default_config['main_model'],
@@ -272,13 +272,10 @@ if query := st.chat_input("Ask a question"):
272
  # Stream and display responses from `stream_or_async_response`
273
  final_response = ""
274
  async for response in stream_or_async_response(messages):
275
- # Await the response if it's a coroutine
276
- if asyncio.iscoroutine(response):
277
- response = await response
278
-
279
- # Accumulate and display the response
280
- final_response += response
281
- message_placeholder.markdown(final_response)
282
 
283
  # Save the final response to session state
284
  st.session_state.messages.append({"role": "assistant", "content": final_response})
@@ -290,6 +287,7 @@ if query := st.chat_input("Ask a question"):
290
 
291
 
292
 
 
293
  # Add acknowledgment at the bottom
294
  st.markdown("---")
295
  st.markdown("""
 
66
  """
67
  Handles both synchronous and asynchronous message streams.
68
  Processes each response chunk and manages intermediate outputs.
 
69
  :param messages: Union[Iterable, AsyncIterable]
70
  """
71
  layer_outputs = {}
 
77
  if layer not in layer_outputs:
78
  layer_outputs[layer] = []
79
  layer_outputs[layer].append(message['delta'])
80
+ return "" # Intermediate messages don't contribute to the final response
81
+ elif message['response_type'] == 'final':
82
  # Final message processing
83
  for layer, outputs in layer_outputs.items():
84
  st.write(f"Layer {layer}")
 
87
  with cols[i]:
88
  st.expander(label=f"Agent {i + 1}", expanded=False).write(output)
89
  layer_outputs.clear()
90
+ return message['delta'] or "" # Ensure no None value is returned
91
+ else:
92
+ return "" # Default to empty string if no valid response type
93
 
94
  # Check if the input is an async or sync iterable
95
  if hasattr(messages, "__aiter__"): # Asynchronous iterable
 
103
  else:
104
  raise TypeError("'messages' must be an Iterable or AsyncIterable.")
105
 
 
 
106
  # Set up the MOAgent
107
  def set_moa_agent(
108
  main_model: str = default_config['main_model'],
 
272
  # Stream and display responses from `stream_or_async_response`
273
  final_response = ""
274
  async for response in stream_or_async_response(messages):
275
+ # Skip None or empty responses
276
+ if response:
277
+ final_response += response
278
+ message_placeholder.markdown(final_response)
 
 
 
279
 
280
  # Save the final response to session state
281
  st.session_state.messages.append({"role": "assistant", "content": final_response})
 
287
 
288
 
289
 
290
+
291
  # Add acknowledgment at the bottom
292
  st.markdown("---")
293
  st.markdown("""