fix: remove stop sequence that truncated indexer LLM output

The stop_sequences contained "```" which caused the model to stop
immediately after outputting "```yaml", truncating the entire response.
Also wrap blocking indexer operations in spawn_blocking to avoid
runtime conflicts with reqwest::blocking::Client.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Eric Garcia 2026-01-24 19:25:51 -05:00
parent d77ea4ba3f
commit bfd2a01ede
3 changed files with 27 additions and 1 deletions

View file

@ -0,0 +1,17 @@
# Spike: Inconsistent Worktree Creation in Claude MCP
| | |
|---|---|
| **Status** | In Progress |
| **Date** | 2026-01-25 |
| **Time Box** | 2 hours |
---
## Question
Why are worktrees and feature branches not being consistently created when using Blue MCP in Claude? What triggers (or fails to trigger) worktree creation?
---
*Investigation notes by Blue*

View file

@ -1529,6 +1529,15 @@ async fn detect_ollama_model() -> Option<String> {
// ==================== Semantic Index Commands (RFC 0010) ====================
async fn handle_index_command(command: IndexCommands) -> Result<()> {
// Run the blocking indexer operations in a separate thread
// to avoid runtime conflicts with reqwest::blocking::Client
tokio::task::spawn_blocking(move || {
handle_index_command_blocking(command)
}).await??;
Ok(())
}
fn handle_index_command_blocking(command: IndexCommands) -> Result<()> {
use blue_core::store::DocumentStore;
use blue_core::{Indexer, IndexerConfig, is_indexable_file, LocalLlmConfig};
use blue_ollama::OllamaLlm;

View file

@ -106,7 +106,7 @@ impl<P: LlmProvider> Indexer<P> {
let options = CompletionOptions {
max_tokens: self.config.max_tokens,
temperature: self.config.temperature,
stop_sequences: vec!["```".to_string()], // Stop at end of YAML block
stop_sequences: vec![], // Let model complete naturally
};
let completion = self.provider.complete(&prompt, &options)