diff --git a/crates/code_assistant/assets/icons/circle_stop.svg b/crates/code_assistant/assets/icons/circle_stop.svg
new file mode 100644
index 0000000..9d4e89f
--- /dev/null
+++ b/crates/code_assistant/assets/icons/circle_stop.svg
@@ -0,0 +1 @@
+
diff --git a/crates/code_assistant/assets/icons/file_icons/file_types.json b/crates/code_assistant/assets/icons/file_icons/file_types.json
index 57db051..8aab58e 100644
--- a/crates/code_assistant/assets/icons/file_icons/file_types.json
+++ b/crates/code_assistant/assets/icons/file_icons/file_types.json
@@ -386,6 +386,9 @@
},
"send": {
"icon": "icons/send.svg"
+ },
+ "stop": {
+ "icon": "icons/stop.svg"
}
}
}
diff --git a/crates/code_assistant/assets/icons/stop.svg b/crates/code_assistant/assets/icons/stop.svg
new file mode 100644
index 0000000..5158a8b
--- /dev/null
+++ b/crates/code_assistant/assets/icons/stop.svg
@@ -0,0 +1 @@
+
diff --git a/crates/code_assistant/src/agent/runner.rs b/crates/code_assistant/src/agent/runner.rs
index 48995d7..6c71e47 100644
--- a/crates/code_assistant/src/agent/runner.rs
+++ b/crates/code_assistant/src/agent/runner.rs
@@ -117,13 +117,18 @@ impl Agent {
}
/// Handles the interaction with the LLM to get the next assistant message.
- /// Appends the assistant's message to the history.
+ /// Appends the assistant's message to the history only if it has content.
async fn obtain_llm_response(&mut self, messages: Vec) -> Result {
let llm_response = self.get_next_assistant_message(messages).await?;
- self.append_message(Message {
- role: MessageRole::Assistant,
- content: MessageContent::Structured(llm_response.content.clone()),
- })?;
+
+ // Only add to message history if there's actual content
+ if !llm_response.content.is_empty() {
+ self.append_message(Message {
+ role: MessageRole::Assistant,
+ content: MessageContent::Structured(llm_response.content.clone()),
+ })?;
+ }
+
Ok(llm_response)
}
@@ -227,14 +232,7 @@ impl Agent {
request_counter += 1;
// 1. Obtain LLM response (includes adding assistant message to history)
- let llm_response = match self.obtain_llm_response(messages).await {
- Ok(response) => response,
- Err(e) => {
- // Log critical error and break loop
- tracing::error!("Critical error obtaining LLM response: {}", e);
- return Err(e);
- }
- };
+ let llm_response = self.obtain_llm_response(messages).await?;
// 2. Extract tool requests from LLM response and determine the next flow action
let (tool_requests, flow) = self
@@ -594,9 +592,18 @@ impl Agent {
// Create a StreamProcessor and use it to process streaming chunks
let ui = Arc::clone(&self.ui);
- let processor = Arc::new(Mutex::new(create_stream_processor(self.tool_mode, ui)));
+ let processor = Arc::new(Mutex::new(create_stream_processor(
+ self.tool_mode,
+ ui.clone(),
+ )));
let streaming_callback: StreamingCallback = Box::new(move |chunk: &StreamingChunk| {
+ // Check if streaming should continue
+ if !ui.should_streaming_continue() {
+ debug!("Streaming should stop - user requested cancellation");
+ return Err(anyhow::anyhow!("Streaming cancelled by user"));
+ }
+
let mut processor_guard = processor.lock().unwrap();
processor_guard
.process(chunk)
@@ -604,10 +611,30 @@ impl Agent {
});
// Send message to LLM provider
- let response = self
+ let response = match self
.llm_provider
.send_message(request, Some(&streaming_callback))
- .await?;
+ .await
+ {
+ Ok(response) => response,
+ Err(e) => {
+ // Check for streaming cancelled error
+ if e.to_string().contains("Streaming cancelled by user") {
+ debug!("Streaming cancelled by user in LLM request {}", request_id);
+ // End LLM request with cancelled=true
+ let _ = self.ui.end_llm_request(request_id, true).await;
+ // Return empty response
+ return Ok(llm::LLMResponse {
+ content: Vec::new(),
+ usage: llm::Usage::zero(),
+ });
+ }
+
+ // For other errors, still end the request but not cancelled
+ let _ = self.ui.end_llm_request(request_id, false).await;
+ return Err(e);
+ }
+ };
// Print response for debugging
debug!("Raw LLM response:");
@@ -631,8 +658,8 @@ impl Agent {
response.usage.cache_read_input_tokens
);
- // Inform UI that the LLM request has completed
- let _ = self.ui.end_llm_request(request_id).await;
+ // Inform UI that the LLM request has completed (normal completion)
+ let _ = self.ui.end_llm_request(request_id, false).await;
debug!("Completed LLM request with ID: {}", request_id);
Ok(response)
diff --git a/crates/code_assistant/src/tests/mocks.rs b/crates/code_assistant/src/tests/mocks.rs
index 5896f5c..d8b26c9 100644
--- a/crates/code_assistant/src/tests/mocks.rs
+++ b/crates/code_assistant/src/tests/mocks.rs
@@ -245,10 +245,15 @@ impl UserInterface for MockUI {
Ok(42)
}
- async fn end_llm_request(&self, _request_id: u64) -> Result<(), UIError> {
+ async fn end_llm_request(&self, _request_id: u64, _cancelled: bool) -> Result<(), UIError> {
// Mock implementation does nothing with request completion
Ok(())
}
+
+ fn should_streaming_continue(&self) -> bool {
+ // Mock implementation always continues streaming
+ true
+ }
}
// Mock Explorer
diff --git a/crates/code_assistant/src/ui/gpui/elements.rs b/crates/code_assistant/src/ui/gpui/elements.rs
index e3d0035..2b82f51 100644
--- a/crates/code_assistant/src/ui/gpui/elements.rs
+++ b/crates/code_assistant/src/ui/gpui/elements.rs
@@ -23,6 +23,8 @@ pub enum MessageRole {
pub struct MessageContainer {
elements: Arc>>>,
role: MessageRole,
+ current_request_id: Arc>,
+ waiting_for_content: Arc>,
}
impl MessageContainer {
@@ -30,6 +32,46 @@ impl MessageContainer {
Self {
elements: Arc::new(Mutex::new(Vec::new())),
role,
+ current_request_id: Arc::new(Mutex::new(0)),
+ waiting_for_content: Arc::new(Mutex::new(false)),
+ }
+ }
+
+ // Set the current request ID for this message container
+ pub fn set_current_request_id(&self, request_id: u64) {
+ *self.current_request_id.lock().unwrap() = request_id;
+ }
+
+ // Set waiting for content flag
+ pub fn set_waiting_for_content(&self, waiting: bool) {
+ *self.waiting_for_content.lock().unwrap() = waiting;
+ }
+
+ // Check if waiting for content
+ pub fn is_waiting_for_content(&self) -> bool {
+ *self.waiting_for_content.lock().unwrap()
+ }
+
+ // Remove all blocks with the given request ID
+ pub fn remove_blocks_with_request_id(&self, request_id: u64, cx: &mut Context) {
+ let mut elements = self.elements.lock().unwrap();
+ let mut blocks_to_remove = Vec::new();
+
+ // Find indices of blocks to remove
+ for (index, element) in elements.iter().enumerate() {
+ let should_remove = element.read(cx).request_id == request_id;
+ if should_remove {
+ blocks_to_remove.push(index);
+ }
+ }
+
+ // Remove blocks in reverse order to maintain indices
+ for &index in blocks_to_remove.iter().rev() {
+ elements.remove(index);
+ }
+
+ if !blocks_to_remove.is_empty() {
+ cx.notify();
}
}
@@ -46,11 +88,16 @@ impl MessageContainer {
// Add a new text block
pub fn add_text_block(&self, content: impl Into, cx: &mut Context) {
self.finish_any_thinking_blocks(cx);
+
+ // Clear waiting_for_content flag on first content
+ self.set_waiting_for_content(false);
+
+ let request_id = *self.current_request_id.lock().unwrap();
let mut elements = self.elements.lock().unwrap();
let block = BlockData::TextBlock(TextBlock {
content: content.into(),
});
- let view = cx.new(|cx| BlockView::new(block, cx));
+ let view = cx.new(|cx| BlockView::new(block, request_id, cx));
elements.push(view);
cx.notify();
}
@@ -59,9 +106,14 @@ impl MessageContainer {
#[allow(dead_code)]
pub fn add_thinking_block(&self, content: impl Into, cx: &mut Context) {
self.finish_any_thinking_blocks(cx);
+
+ // Clear waiting_for_content flag on first content
+ self.set_waiting_for_content(false);
+
+ let request_id = *self.current_request_id.lock().unwrap();
let mut elements = self.elements.lock().unwrap();
let block = BlockData::ThinkingBlock(ThinkingBlock::new(content.into()));
- let view = cx.new(|cx| BlockView::new(block, cx));
+ let view = cx.new(|cx| BlockView::new(block, request_id, cx));
elements.push(view);
cx.notify();
}
@@ -74,6 +126,11 @@ impl MessageContainer {
cx: &mut Context,
) {
self.finish_any_thinking_blocks(cx);
+
+ // Clear waiting_for_content flag on first content
+ self.set_waiting_for_content(false);
+
+ let request_id = *self.current_request_id.lock().unwrap();
let mut elements = self.elements.lock().unwrap();
let block = BlockData::ToolUse(ToolUseBlock {
name: name.into(),
@@ -84,7 +141,7 @@ impl MessageContainer {
output: None,
is_collapsed: false, // Default to expanded
});
- let view = cx.new(|cx| BlockView::new(block, cx));
+ let view = cx.new(|cx| BlockView::new(block, request_id, cx));
elements.push(view);
cx.notify();
}
@@ -131,6 +188,9 @@ impl MessageContainer {
pub fn add_or_append_to_text_block(&self, content: impl Into, cx: &mut Context) {
self.finish_any_thinking_blocks(cx);
+ // Clear waiting_for_content flag on first content
+ self.set_waiting_for_content(false);
+
let content = content.into();
let mut elements = self.elements.lock().unwrap();
@@ -151,10 +211,11 @@ impl MessageContainer {
}
// If we reach here, we need to add a new text block
+ let request_id = *self.current_request_id.lock().unwrap();
let block = BlockData::TextBlock(TextBlock {
content: content.to_string(),
});
- let view = cx.new(|cx| BlockView::new(block, cx));
+ let view = cx.new(|cx| BlockView::new(block, request_id, cx));
elements.push(view);
cx.notify();
}
@@ -165,6 +226,9 @@ impl MessageContainer {
content: impl Into,
cx: &mut Context,
) {
+ // Clear waiting_for_content flag on first content
+ self.set_waiting_for_content(false);
+
let content = content.into();
let mut elements = self.elements.lock().unwrap();
@@ -185,8 +249,9 @@ impl MessageContainer {
}
// If we reach here, we need to add a new thinking block
+ let request_id = *self.current_request_id.lock().unwrap();
let block = BlockData::ThinkingBlock(ThinkingBlock::new(content.to_string()));
- let view = cx.new(|cx| BlockView::new(block, cx));
+ let view = cx.new(|cx| BlockView::new(block, request_id, cx));
elements.push(view);
cx.notify();
}
@@ -260,6 +325,7 @@ impl MessageContainer {
// If we didn't find a matching tool, create a new one with this parameter
if !tool_found {
+ let request_id = *self.current_request_id.lock().unwrap();
let mut tool = ToolUseBlock {
name: "unknown".to_string(), // Default name since we only have ID
id: tool_id.clone(),
@@ -276,7 +342,7 @@ impl MessageContainer {
});
let block = BlockData::ToolUse(tool);
- let view = cx.new(|cx| BlockView::new(block, cx));
+ let view = cx.new(|cx| BlockView::new(block, request_id, cx));
elements.push(view);
cx.notify();
}
@@ -368,11 +434,12 @@ impl BlockData {
/// Entity view for a block
pub struct BlockView {
block: BlockData,
+ request_id: u64,
}
impl BlockView {
- pub fn new(block: BlockData, _cx: &mut Context) -> Self {
- Self { block }
+ pub fn new(block: BlockData, request_id: u64, _cx: &mut Context) -> Self {
+ Self { block, request_id }
}
fn toggle_thinking_collapsed(&mut self, cx: &mut Context) {
@@ -721,13 +788,20 @@ impl Render for BlockView {
if let Some(output_content) = &block.output {
if !output_content.is_empty() {
// Also check if output is not empty
+ let output_color =
+ if block.status == crate::ui::ToolStatus::Error {
+ cx.theme().danger
+ } else {
+ cx.theme().foreground
+ };
+
elements.push(
div()
.id(SharedString::from(block.id.clone()))
.p_2()
.mt_1()
.w_full()
- .text_color(cx.theme().foreground)
+ .text_color(output_color)
.text_size(px(13.))
.child(output_content.clone())
.into_any(),
@@ -736,9 +810,11 @@ impl Render for BlockView {
}
}
- // Error message (always shown for error status, regardless of collapsed state)
+ // Error message (only shown for error status when collapsed, or when there's no output)
if block.status == crate::ui::ToolStatus::Error
&& block.status_message.is_some()
+ && (block.is_collapsed
+ || block.output.as_ref().map_or(true, |o| o.is_empty()))
{
elements.push(
div()
diff --git a/crates/code_assistant/src/ui/gpui/file_icons.rs b/crates/code_assistant/src/ui/gpui/file_icons.rs
index 79c70d9..d0d0033 100644
--- a/crates/code_assistant/src/ui/gpui/file_icons.rs
+++ b/crates/code_assistant/src/ui/gpui/file_icons.rs
@@ -52,6 +52,7 @@ pub const THEME_DARK: &str = "theme_dark"; // theme_dark.svg
pub const THEME_LIGHT: &str = "theme_light"; // theme_light.svg
pub const SEND: &str = "send"; // send.svg
+pub const STOP: &str = "stop"; // circle_stop.svg
// Tool-specific icon mappings to actual SVG files
// These are direct constants defining the paths to SVG icons or existing types
diff --git a/crates/code_assistant/src/ui/gpui/messages.rs b/crates/code_assistant/src/ui/gpui/messages.rs
index cc13d9a..b50062f 100644
--- a/crates/code_assistant/src/ui/gpui/messages.rs
+++ b/crates/code_assistant/src/ui/gpui/messages.rs
@@ -1,7 +1,11 @@
use super::elements::MessageContainer;
-use gpui::{div, prelude::*, px, rgb, App, Context, Entity, FocusHandle, Focusable, Window};
+use gpui::{
+ bounce, div, ease_in_out, percentage, prelude::*, px, rgb, svg, Animation, AnimationExt, App,
+ Context, Entity, FocusHandle, Focusable, SharedString, Transformation, Window,
+};
use gpui_component::{scroll::ScrollbarAxis, v_flex, ActiveTheme, StyledExt};
use std::sync::{Arc, Mutex};
+use std::time::Duration;
/// MessagesView - Component responsible for displaying the message history
pub struct MessagesView {
@@ -97,9 +101,51 @@ impl Render for MessagesView {
message_container
};
- // Simply render each block entity
+ // Render all block elements
let elements = msg.read(cx).elements();
- message_container.children(elements)
+ let mut container_children = vec![];
+
+ // Add all existing blocks
+ for element in elements {
+ container_children.push(element.into_any_element());
+ }
+
+ // Add loading indicator if waiting for content
+ if msg.read(cx).is_waiting_for_content() {
+ container_children.push(
+ div()
+ .flex()
+ .items_center()
+ .gap_2()
+ .p_2()
+ .child(
+ svg()
+ .size(px(18.))
+ .path(SharedString::from("icons/arrow_circle.svg"))
+ .text_color(cx.theme().info)
+ .with_animation(
+ "loading_indicator",
+ Animation::new(Duration::from_secs(2))
+ .repeat()
+ .with_easing(bounce(ease_in_out)),
+ |svg, delta| {
+ svg.with_transformation(Transformation::rotate(
+ percentage(delta),
+ ))
+ },
+ ),
+ )
+ .child(
+ div()
+ .text_color(cx.theme().info)
+ .text_size(px(14.))
+ .child("Waiting for response..."),
+ )
+ .into_any_element(),
+ );
+ }
+
+ message_container.children(container_children)
})),
)
}
diff --git a/crates/code_assistant/src/ui/gpui/mod.rs b/crates/code_assistant/src/ui/gpui/mod.rs
index b6e1416..104c314 100644
--- a/crates/code_assistant/src/ui/gpui/mod.rs
+++ b/crates/code_assistant/src/ui/gpui/mod.rs
@@ -21,7 +21,9 @@ use crate::ui::gpui::{
simple_renderers::SimpleParameterRenderer,
ui_events::UiEvent,
};
-use crate::ui::{async_trait, DisplayFragment, ToolStatus, UIError, UIMessage, UserInterface};
+use crate::ui::{
+ async_trait, DisplayFragment, StreamingState, ToolStatus, UIError, UIMessage, UserInterface,
+};
use assets::Assets;
use async_channel;
use gpui::{actions, px, AppContext, AsyncApp, Entity, Global, Point};
@@ -31,6 +33,7 @@ pub use memory::MemoryView;
pub use messages::MessagesView;
pub use root::RootView;
use std::any::Any;
+
use std::sync::{Arc, Mutex};
use std::time::Duration;
use tracing::warn;
@@ -54,6 +57,7 @@ pub struct Gpui {
last_xml_tool_id: Arc>,
#[allow(dead_code)]
parameter_renderers: Arc, // TODO: Needed?!
+ streaming_state: Arc>,
}
// Implement Global trait for Gpui
@@ -69,6 +73,7 @@ impl Gpui {
let current_request_id = Arc::new(Mutex::new(0));
let current_tool_counter = Arc::new(Mutex::new(0));
let last_xml_tool_id = Arc::new(Mutex::new(String::new()));
+ let streaming_state = Arc::new(Mutex::new(StreamingState::Idle));
// Initialize parameter renderers registry with default renderer
let mut registry = ParameterRendererRegistry::new(Box::new(DefaultParameterRenderer));
@@ -113,6 +118,7 @@ impl Gpui {
current_tool_counter,
last_xml_tool_id,
parameter_renderers,
+ streaming_state,
}
}
@@ -210,6 +216,7 @@ impl Gpui {
cx,
input_value.clone(),
input_requested.clone(),
+ gpui_clone.streaming_state.clone(),
)
});
@@ -251,130 +258,33 @@ impl Gpui {
}
}
UiEvent::AppendToTextBlock { content } => {
- let mut queue = self.message_queue.lock().unwrap();
+ let queue = self.message_queue.lock().unwrap();
if let Some(last) = queue.last() {
- // Check if the last message is from the assistant, otherwise create a new one
-
- let is_user_message = cx
- .update_entity(&last, |message, _cx| message.is_user_message())
- .expect("Failed to update entity");
-
- if is_user_message {
- // Create a new assistant message
- let result = cx.new(|cx| {
- let new_message =
- MessageContainer::with_role(MessageRole::Assistant, cx);
- new_message.add_text_block(&content, cx);
- new_message
- });
- if let Ok(new_message) = result {
- queue.push(new_message);
- } else {
- warn!("Failed to create message entity");
- }
- } else {
- // Update the existing assistant message
- cx.update_entity(&last, |message, cx| {
- message.add_or_append_to_text_block(&content, cx)
- })
- .expect("Failed to update entity");
- }
- } else {
- // If there are no messages, create a new assistant message
- let result = cx.new(|cx| {
- let new_message = MessageContainer::with_role(MessageRole::Assistant, cx);
- new_message.add_text_block(&content, cx);
- new_message
- });
- if let Ok(new_message) = result {
- queue.push(new_message);
- } else {
- warn!("Failed to create message entity");
- }
+ // Since StreamingStarted ensures last container is Assistant, we can safely append
+ cx.update_entity(&last, |message, cx| {
+ message.add_or_append_to_text_block(&content, cx)
+ })
+ .expect("Failed to update entity");
}
}
UiEvent::AppendToThinkingBlock { content } => {
- let mut queue = self.message_queue.lock().unwrap();
+ let queue = self.message_queue.lock().unwrap();
if let Some(last) = queue.last() {
- // Check if the last message is from the assistant, otherwise create a new one
- let is_user_message = cx
- .update_entity(&last, |message, _cx| message.is_user_message())
- .expect("Failed to update entity");
-
- if is_user_message {
- // Create a new assistant message
- let result = cx.new(|cx| {
- let new_message =
- MessageContainer::with_role(MessageRole::Assistant, cx);
- new_message.add_thinking_block(&content, cx);
- new_message
- });
- if let Ok(new_message) = result {
- queue.push(new_message);
- } else {
- warn!("Failed to create message entity");
- }
- } else {
- // Update the existing assistant message
- cx.update_entity(&last, |message, cx| {
- message.add_or_append_to_thinking_block(&content, cx)
- })
- .expect("Failed to update entity");
- }
- } else {
- // If there are no messages, create a new assistant message
- let result = cx.new(|cx| {
- let new_message = MessageContainer::with_role(MessageRole::Assistant, cx);
- new_message.add_thinking_block(&content, cx);
- new_message
- });
- if let Ok(new_message) = result {
- queue.push(new_message);
- } else {
- warn!("Failed to create message entity");
- }
+ // Since StreamingStarted ensures last container is Assistant, we can safely append
+ cx.update_entity(&last, |message, cx| {
+ message.add_or_append_to_thinking_block(&content, cx)
+ })
+ .expect("Failed to update entity");
}
}
UiEvent::StartTool { name, id } => {
- let mut queue = self.message_queue.lock().unwrap();
+ let queue = self.message_queue.lock().unwrap();
if let Some(last) = queue.last() {
- // Check if the last message is from the assistant, otherwise create a new one
- let is_user_message = cx
- .update_entity(&last, |message, _cx| message.is_user_message())
- .expect("Failed to update entity");
-
- if is_user_message {
- // Create a new assistant message
- let result = cx.new(|cx| {
- let new_message =
- MessageContainer::with_role(MessageRole::Assistant, cx);
- new_message.add_tool_use_block(&name, &id, cx);
- new_message
- });
- if let Ok(new_message) = result {
- queue.push(new_message);
- } else {
- warn!("Failed to create message entity");
- }
- } else {
- // Update the existing assistant message
- cx.update_entity(&last, |message, cx| {
- message.add_tool_use_block(&name, &id, cx);
- })
- .expect("Failed to update entity");
- }
- } else {
- // Create a new assistant message if none exists
- let result = cx.new(|cx| {
- let new_message = MessageContainer::with_role(MessageRole::Assistant, cx);
- new_message.add_tool_use_block(&name, &id, cx);
- new_message
- });
- if let Ok(new_message) = result {
- queue.push(new_message);
- } else {
- warn!("Failed to create message entity");
- }
+ // Since StreamingStarted ensures last container is Assistant, we can safely add tool
+ cx.update_entity(&last, |message, cx| {
+ message.add_tool_use_block(&name, &id, cx);
+ })
+ .expect("Failed to update entity");
}
}
UiEvent::UpdateToolParameter {
@@ -425,6 +335,51 @@ impl Gpui {
}
cx.refresh().expect("Failed to refresh windows");
}
+ UiEvent::StreamingStarted(request_id) => {
+ let mut queue = self.message_queue.lock().unwrap();
+
+ // Check if we need to create a new assistant container
+ let needs_new_container = if let Some(last) = queue.last() {
+ cx.update_entity(&last, |message, _cx| message.is_user_message())
+ .expect("Failed to update entity")
+ } else {
+ true
+ };
+
+ if needs_new_container {
+ // Create new assistant container
+ let assistant_container = cx
+ .new(|cx| {
+ let container = MessageContainer::with_role(MessageRole::Assistant, cx);
+ container.set_current_request_id(request_id);
+ container.set_waiting_for_content(true);
+ container
+ })
+ .expect("Failed to create new container");
+ queue.push(assistant_container);
+ } else {
+ // Use existing assistant container
+ if let Some(last_message) = queue.last() {
+ cx.update_entity(last_message, |container, cx| {
+ container.set_current_request_id(request_id);
+ container.set_waiting_for_content(true);
+ cx.notify();
+ })
+ .expect("Failed to update existing container");
+ }
+ }
+ }
+ UiEvent::StreamingStopped { id, cancelled } => {
+ if cancelled {
+ let queue = self.message_queue.lock().unwrap();
+ for message_container in queue.iter() {
+ cx.update_entity(message_container, |message_container, cx| {
+ message_container.remove_blocks_with_request_id(id, cx);
+ })
+ .expect("Failed to update entity");
+ }
+ }
+ }
}
}
@@ -577,19 +532,41 @@ impl UserInterface for Gpui {
}
async fn begin_llm_request(&self) -> Result {
+ // Set streaming state to Streaming
+ *self.streaming_state.lock().unwrap() = StreamingState::Streaming;
+
// Increment request ID counter
let mut request_id = self.current_request_id.lock().unwrap();
*request_id += 1;
+ let current_id = *request_id;
// Reset tool counter for this request
let mut tool_counter = self.current_tool_counter.lock().unwrap();
*tool_counter = 0;
- Ok(*request_id)
+ // Send StreamingStarted event
+ self.push_event(UiEvent::StreamingStarted(current_id));
+
+ Ok(current_id)
}
- async fn end_llm_request(&self, _request_id: u64) -> Result<(), UIError> {
- // For now, we don't need special handling for request completion
+ async fn end_llm_request(&self, request_id: u64, cancelled: bool) -> Result<(), UIError> {
+ // Reset streaming state to Idle
+ *self.streaming_state.lock().unwrap() = StreamingState::Idle;
+
+ // Send StreamingStopped event
+ self.push_event(UiEvent::StreamingStopped {
+ id: request_id,
+ cancelled,
+ });
+
Ok(())
}
+
+ fn should_streaming_continue(&self) -> bool {
+ match *self.streaming_state.lock().unwrap() {
+ StreamingState::StopRequested => false,
+ _ => true,
+ }
+ }
}
diff --git a/crates/code_assistant/src/ui/gpui/root.rs b/crates/code_assistant/src/ui/gpui/root.rs
index 72c03f7..de8ccf4 100644
--- a/crates/code_assistant/src/ui/gpui/root.rs
+++ b/crates/code_assistant/src/ui/gpui/root.rs
@@ -3,9 +3,10 @@ use super::memory::MemoryView;
use super::messages::MessagesView;
use super::theme;
use super::CloseWindow;
+use crate::ui::StreamingState;
use gpui::{
- div, prelude::*, px, App, Context, CursorStyle, Entity, FocusHandle, Focusable, MouseButton,
- MouseUpEvent,
+ div, prelude::*, px, rgba, App, Context, CursorStyle, Entity, FocusHandle, Focusable,
+ MouseButton, MouseUpEvent,
};
use gpui_component::input::InputState;
use gpui_component::input::TextInput;
@@ -23,6 +24,8 @@ pub struct RootView {
input_requested: Arc>,
// Memory view state
memory_collapsed: bool,
+ // Streaming state - shared with Gpui
+ streaming_state: Arc>,
}
impl RootView {
@@ -33,6 +36,7 @@ impl RootView {
cx: &mut Context,
input_value: Arc>>,
input_requested: Arc>,
+ streaming_state: Arc>,
) -> Self {
Self {
text_input,
@@ -43,6 +47,7 @@ impl RootView {
input_value,
input_requested,
memory_collapsed: false,
+ streaming_state,
}
}
@@ -98,6 +103,17 @@ impl RootView {
});
cx.notify();
}
+
+ fn on_stop_click(
+ &mut self,
+ _: &MouseUpEvent,
+ _window: &mut gpui::Window,
+ cx: &mut Context,
+ ) {
+ // Set streaming state to StopRequested
+ *self.streaming_state.lock().unwrap() = StreamingState::StopRequested;
+ cx.notify();
+ }
}
impl Focusable for RootView {
@@ -107,9 +123,10 @@ impl Focusable for RootView {
}
impl Render for RootView {
- fn render(&mut self, _window: &mut gpui::Window, cx: &mut Context) -> impl IntoElement {
- // Check if input is requested
+ fn render(&mut self, window: &mut gpui::Window, cx: &mut Context) -> impl IntoElement {
+ // Check if input is requested and current streaming state
let is_input_requested = *self.input_requested.lock().unwrap();
+ let current_streaming_state = *self.streaming_state.lock().unwrap();
// Main container with titlebar and content
div()
@@ -239,36 +256,105 @@ impl Render for RootView {
.items_center()
.p_2()
.gap_2()
- .child(div().flex_1().child(TextInput::new(&self.text_input)))
- .child(
+ .child({
+ let text_input_handle =
+ self.text_input.read(cx).focus_handle(cx);
+ let is_focused = text_input_handle.is_focused(window);
+
div()
- .size(px(40.))
- .rounded_sm()
- .flex()
- .items_center()
- .justify_center()
- .cursor(if is_input_requested {
- CursorStyle::PointingHand
+ .flex_1()
+ .border_1()
+ .border_color(if is_focused {
+ cx.theme().primary // Blue border when focused
+ } else if cx.theme().is_dark() {
+ rgba(0x555555FF).into() // Brighter border for dark theme
} else {
- CursorStyle::OperationNotAllowed
+ rgba(0x999999FF).into() // Darker border for light theme
})
- .child(file_icons::render_icon(
- &file_icons::get().get_type_icon(file_icons::SEND),
- 22.0,
+ .rounded_md()
+ .track_focus(&text_input_handle)
+ .child(TextInput::new(&self.text_input))
+ })
+ .child({
+ // Create button based on streaming state
+ match current_streaming_state {
+ StreamingState::Idle => {
+ // Show send button, enabled only if input is requested
+ let mut button = div()
+ .size(px(40.))
+ .rounded_sm()
+ .flex()
+ .items_center()
+ .justify_center()
+ .cursor(if is_input_requested {
+ CursorStyle::PointingHand
+ } else {
+ CursorStyle::OperationNotAllowed
+ })
+ .child(file_icons::render_icon(
+ &file_icons::get()
+ .get_type_icon(file_icons::SEND),
+ 22.0,
+ if is_input_requested {
+ cx.theme().primary
+ } else {
+ cx.theme().muted_foreground
+ },
+ ">",
+ ));
+
if is_input_requested {
- cx.theme().primary
- } else {
- cx.theme().muted_foreground
- },
- ">",
- ))
- .when(is_input_requested, |style| {
- style.hover(|s| s.bg(cx.theme().muted)).on_mouse_up(
- MouseButton::Left,
- cx.listener(Self::on_submit_click),
- )
- }),
- ),
+ button = button
+ .hover(|s| s.bg(cx.theme().muted))
+ .on_mouse_up(
+ MouseButton::Left,
+ cx.listener(Self::on_submit_click),
+ );
+ }
+
+ button
+ }
+ StreamingState::Streaming => {
+ // Show stop button, enabled
+ div()
+ .size(px(40.))
+ .rounded_sm()
+ .flex()
+ .items_center()
+ .justify_center()
+ .cursor(CursorStyle::PointingHand)
+ .hover(|s| s.bg(cx.theme().muted))
+ .child(file_icons::render_icon(
+ &file_icons::get()
+ .get_type_icon(file_icons::STOP),
+ 22.0,
+ cx.theme().danger,
+ "⬜",
+ ))
+ .on_mouse_up(
+ MouseButton::Left,
+ cx.listener(Self::on_stop_click),
+ )
+ }
+ StreamingState::StopRequested => {
+ // Show stop button, disabled/grayed out
+ div()
+ .size(px(40.))
+ .rounded_sm()
+ .flex()
+ .items_center()
+ .justify_center()
+ .cursor(CursorStyle::OperationNotAllowed)
+ .child(file_icons::render_icon(
+ &file_icons::get()
+ .get_type_icon(file_icons::STOP),
+ 22.0,
+ cx.theme().muted_foreground,
+ "⬜",
+ ))
+ }
+ }
+ }),
),
)
// Right sidebar with memory view - only show if not collapsed
diff --git a/crates/code_assistant/src/ui/gpui/theme.rs b/crates/code_assistant/src/ui/gpui/theme.rs
index fb5b218..1e02789 100644
--- a/crates/code_assistant/src/ui/gpui/theme.rs
+++ b/crates/code_assistant/src/ui/gpui/theme.rs
@@ -25,9 +25,9 @@ pub fn custom_dark_theme() -> gpui_component::theme::ThemeColor {
// Buttons
colors.primary = rgb(0x0099EE).into(); // Primary button (submit)
- colors.primary_hover = rgb(0x4466cc).into();
- colors.danger = rgb(0x553333).into(); // Danger button (clear)
- colors.danger_hover = rgb(0x664444).into();
+ colors.primary_hover = rgb(0x4466CC).into();
+ colors.danger = rgb(0xFF2934).into(); // Danger button (stop)
+ colors.danger_hover = rgb(0xFF3D46).into();
// Tool status colors
colors.success = rgb(0x47D136).into();
@@ -61,8 +61,8 @@ pub fn custom_light_theme() -> gpui_component::theme::ThemeColor {
// Buttons
colors.primary = rgb(0x53AEFF).into(); // Primary button (submit)
colors.primary_hover = rgb(0x3355BB).into();
- colors.danger = rgb(0xBB3333).into(); // Danger button (clear)
- colors.danger_hover = rgb(0xCC4444).into();
+ colors.danger = rgb(0xFF2934).into(); // Danger button (stop)
+ colors.danger_hover = rgb(0xFF3D46).into();
// Tool status colors
colors.success = rgb(0x2BB517).into();
diff --git a/crates/code_assistant/src/ui/gpui/ui_events.rs b/crates/code_assistant/src/ui/gpui/ui_events.rs
index aefa07e..01c6435 100644
--- a/crates/code_assistant/src/ui/gpui/ui_events.rs
+++ b/crates/code_assistant/src/ui/gpui/ui_events.rs
@@ -30,4 +30,8 @@ pub enum UiEvent {
EndTool { id: String },
/// Update the working memory view
UpdateMemory { memory: WorkingMemory },
+ /// Streaming started for a request
+ StreamingStarted(u64),
+ /// Streaming stopped for a request
+ StreamingStopped { id: u64, cancelled: bool },
}
diff --git a/crates/code_assistant/src/ui/mod.rs b/crates/code_assistant/src/ui/mod.rs
index 2f9d248..5914224 100644
--- a/crates/code_assistant/src/ui/mod.rs
+++ b/crates/code_assistant/src/ui/mod.rs
@@ -14,6 +14,13 @@ pub enum ToolStatus {
Error, // Error during execution
}
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum StreamingState {
+ Idle, // No active streaming, ready to send
+ Streaming, // Currently streaming response
+ StopRequested, // User requested stop, waiting for stream to end
+}
+
#[derive(Debug, Clone)]
pub enum UIMessage {
// System actions that the agent takes
@@ -60,7 +67,10 @@ pub trait UserInterface: Send + Sync {
async fn begin_llm_request(&self) -> Result;
/// Informs the UI that an LLM request has completed
- async fn end_llm_request(&self, request_id: u64) -> Result<(), UIError>;
+ async fn end_llm_request(&self, request_id: u64, cancelled: bool) -> Result<(), UIError>;
+
+ /// Check if streaming should continue
+ fn should_streaming_continue(&self) -> bool;
}
#[cfg(test)]
diff --git a/crates/code_assistant/src/ui/streaming/test_utils.rs b/crates/code_assistant/src/ui/streaming/test_utils.rs
index 8c48220..207325a 100644
--- a/crates/code_assistant/src/ui/streaming/test_utils.rs
+++ b/crates/code_assistant/src/ui/streaming/test_utils.rs
@@ -125,10 +125,15 @@ impl UserInterface for TestUI {
Ok(42)
}
- async fn end_llm_request(&self, _request_id: u64) -> Result<(), UIError> {
+ async fn end_llm_request(&self, _request_id: u64, _cancelled: bool) -> Result<(), UIError> {
// Mock implementation does nothing with request completion
Ok(())
}
+
+ fn should_streaming_continue(&self) -> bool {
+ // Test implementation always continues streaming
+ true
+ }
}
/// Helper function to split text into small chunks for testing tag handling
diff --git a/crates/code_assistant/src/ui/terminal.rs b/crates/code_assistant/src/ui/terminal.rs
index 61be558..e50c9c3 100644
--- a/crates/code_assistant/src/ui/terminal.rs
+++ b/crates/code_assistant/src/ui/terminal.rs
@@ -223,15 +223,20 @@ impl UserInterface for TerminalUI {
Ok(request_id)
}
- async fn end_llm_request(&self, request_id: u64) -> Result<(), UIError> {
+ async fn end_llm_request(&self, request_id: u64, cancelled: bool) -> Result<(), UIError> {
// Optionally display a message that the request has completed
- self.write_line(
- &format!("Completed LLM request ({})", request_id)
- .dark_blue()
- .to_string(),
- )
- .await?;
+ let message = if cancelled {
+ format!("Cancelled LLM request ({})", request_id)
+ } else {
+ format!("Completed LLM request ({})", request_id)
+ };
+ self.write_line(&message.dark_blue().to_string()).await?;
Ok(())
}
+
+ fn should_streaming_continue(&self) -> bool {
+ // Terminal UI always continues streaming (no stop functionality)
+ true
+ }
}
diff --git a/crates/llm/src/aicore_converse.rs b/crates/llm/src/aicore_converse.rs
index d43c820..bcecfc4 100644
--- a/crates/llm/src/aicore_converse.rs
+++ b/crates/llm/src/aicore_converse.rs
@@ -399,6 +399,12 @@ impl AiCoreClient {
) -> Result<(LLMResponse, AnthropicRateLimitInfo)> {
let token = self.token_manager.get_valid_token().await?;
+ // Start recording before HTTP request to capture real latency
+ if let Some(recorder) = &self.recorder {
+ let request_json = serde_json::to_value(request)?;
+ recorder.start_recording(request_json)?;
+ }
+
let request_builder = self
.client
.post(&self.get_url(streaming_callback.is_some()))
@@ -627,13 +633,6 @@ impl AiCoreClient {
Ok(())
}
- // Start recording if a recorder is available
- if let Some(recorder) = &self.recorder {
- // Serialize request for recording
- let request_json = serde_json::to_value(request)?;
- recorder.start_recording(request_json)?;
- }
-
while let Some(chunk) = response.chunk().await? {
process_chunk(
&chunk,
diff --git a/crates/llm/src/aicore_invoke.rs b/crates/llm/src/aicore_invoke.rs
index 04cca26..72bb84c 100644
--- a/crates/llm/src/aicore_invoke.rs
+++ b/crates/llm/src/aicore_invoke.rs
@@ -384,6 +384,11 @@ impl AiCoreClient {
);
}
+ // Start recording before HTTP request to capture real latency
+ if let Some(recorder) = &self.recorder {
+ recorder.start_recording(request.clone())?;
+ }
+
let response = request_builder
.json(&request)
.send()
@@ -607,13 +612,6 @@ impl AiCoreClient {
Ok(())
}
- // Start recording if a recorder is available
- if let Some(recorder) = &self.recorder {
- // Serialize request for recording
- let request_json = serde_json::to_value(request)?;
- recorder.start_recording(request_json)?;
- }
-
while let Some(chunk) = response.chunk().await? {
process_chunk(
&chunk,
diff --git a/crates/llm/src/anthropic.rs b/crates/llm/src/anthropic.rs
index adf23d5..af6092c 100644
--- a/crates/llm/src/anthropic.rs
+++ b/crates/llm/src/anthropic.rs
@@ -384,6 +384,12 @@ impl AnthropicClient {
"application/json"
};
+ // Start recording before HTTP request to capture real latency
+ if let Some(recorder) = &self.recorder {
+ let request_json = serde_json::to_value(request)?;
+ recorder.start_recording(request_json)?;
+ }
+
let mut request_builder = self
.client
.post(self.get_url())
@@ -649,13 +655,6 @@ impl AnthropicClient {
Ok(())
}
- // Start recording if a recorder is available
- if let Some(recorder) = &self.recorder {
- // Serialize request for recording
- let request_json = serde_json::to_value(request)?;
- recorder.start_recording(request_json)?;
- }
-
while let Some(chunk) = response.chunk().await? {
process_chunk(
&chunk,