feat: add forge config caching and handle_pr_merge forge support

Completes RFC 0013 git forge integration:
- Add BlueConfig struct for .blue/config.yaml persistence
- Add detect_forge_type_cached() and create_forge_cached() functions
  that cache detected forge type to avoid repeated API probing
- Update handle_pr_merge to use native forge API instead of gh CLI
- Add force parameter for skipping precondition checks when gh unavailable

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Eric Garcia 2026-01-24 20:12:37 -05:00
parent ae98b6f230
commit d91589346b
3 changed files with 197 additions and 45 deletions

View file

@ -187,6 +187,85 @@ pub struct ForgeConfig {
pub repo: String, pub repo: String,
} }
/// Blue config file structure
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct BlueConfig {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub forge: Option<ForgeConfig>,
}
impl BlueConfig {
/// Load config from .blue/config.yaml
pub fn load(blue_dir: &std::path::Path) -> Option<Self> {
let config_path = blue_dir.join("config.yaml");
if !config_path.exists() {
return None;
}
let content = std::fs::read_to_string(&config_path).ok()?;
serde_yaml::from_str(&content).ok()
}
/// Save config to .blue/config.yaml
pub fn save(&self, blue_dir: &std::path::Path) -> Result<(), std::io::Error> {
let config_path = blue_dir.join("config.yaml");
let content = serde_yaml::to_string(self)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
std::fs::write(&config_path, content)
}
}
/// Detect forge type with caching support
///
/// If blue_dir is provided, will check for cached config first and save
/// detected type for future use.
pub fn detect_forge_type_cached(remote_url: &str, blue_dir: Option<&std::path::Path>) -> ForgeType {
let url = parse_git_url(remote_url);
// Check cache first
if let Some(dir) = blue_dir {
if let Some(config) = BlueConfig::load(dir) {
if let Some(forge) = config.forge {
// Validate cached config matches current remote
if forge.host == url.host && forge.owner == url.owner && forge.repo == url.repo {
return forge.forge_type;
}
}
}
}
// Detect and cache
let forge_type = detect_forge_type(remote_url);
// Save to cache if blue_dir provided
if let Some(dir) = blue_dir {
let forge_config = ForgeConfig {
forge_type,
host: url.host,
owner: url.owner,
repo: url.repo,
};
let mut config = BlueConfig::load(dir).unwrap_or_default();
config.forge = Some(forge_config);
let _ = config.save(dir); // Ignore errors - caching is best-effort
}
forge_type
}
/// Create a forge instance with caching support
pub fn create_forge_cached(remote_url: &str, blue_dir: Option<&std::path::Path>) -> Result<Box<dyn Forge>, ForgeError> {
let url = parse_git_url(remote_url);
let forge_type = detect_forge_type_cached(remote_url, blue_dir);
let token = get_token(forge_type)?;
match forge_type {
ForgeType::GitHub => Ok(Box::new(GitHubForge::new(token))),
ForgeType::Forgejo => Ok(Box::new(ForgejoForge::new(&url.host, token))),
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -26,7 +26,7 @@ pub mod voice;
pub mod workflow; pub mod workflow;
pub use documents::{Adr, Audit, AuditFinding, AuditSeverity, AuditType, Decision, Rfc, Spike, SpikeOutcome, Status, Task, update_markdown_status}; pub use documents::{Adr, Audit, AuditFinding, AuditSeverity, AuditType, Decision, Rfc, Spike, SpikeOutcome, Status, Task, update_markdown_status};
pub use forge::{CreatePrOpts, Forge, ForgeConfig, ForgeError, ForgeType, ForgejoForge, GitHubForge, GitUrl, MergeStrategy, PrState, PullRequest, create_forge, detect_forge_type, get_token, parse_git_url}; pub use forge::{BlueConfig, CreatePrOpts, Forge, ForgeConfig, ForgeError, ForgeType, ForgejoForge, GitHubForge, GitUrl, MergeStrategy, PrState, PullRequest, create_forge, create_forge_cached, detect_forge_type, detect_forge_type_cached, get_token, parse_git_url};
pub use indexer::{Indexer, IndexerConfig, IndexerError, IndexResult, ParsedSymbol, is_indexable_file, should_skip_dir, DEFAULT_INDEX_MODEL, MAX_FILE_LINES}; pub use indexer::{Indexer, IndexerConfig, IndexerError, IndexResult, ParsedSymbol, is_indexable_file, should_skip_dir, DEFAULT_INDEX_MODEL, MAX_FILE_LINES};
pub use llm::{CompletionOptions, CompletionResult, LlmBackendChoice, LlmConfig, LlmError, LlmManager, LlmProvider, LlmProviderChoice, LocalLlmConfig, ApiLlmConfig, KeywordLlm, MockLlm, ProviderStatus}; pub use llm::{CompletionOptions, CompletionResult, LlmBackendChoice, LlmConfig, LlmError, LlmManager, LlmProvider, LlmProviderChoice, LocalLlmConfig, ApiLlmConfig, KeywordLlm, MockLlm, ProviderStatus};
pub use repo::{detect_blue, BlueHome, RepoError, WorktreeInfo}; pub use repo::{detect_blue, BlueHome, RepoError, WorktreeInfo};

View file

@ -14,7 +14,7 @@
use std::process::Command; use std::process::Command;
use blue_core::{CreatePrOpts, MergeStrategy, ProjectState, create_forge, detect_forge_type, parse_git_url}; use blue_core::{CreatePrOpts, MergeStrategy, ProjectState, create_forge_cached, detect_forge_type_cached, parse_git_url};
use serde_json::{json, Value}; use serde_json::{json, Value};
use crate::error::ServerError; use crate::error::ServerError;
@ -91,7 +91,8 @@ pub fn handle_create(state: &ProjectState, args: &Value) -> Result<Value, Server
}; };
let git_url = parse_git_url(&remote_url); let git_url = parse_git_url(&remote_url);
let forge_type = detect_forge_type(&remote_url); let blue_dir = Some(state.home.blue_dir.as_path());
let forge_type = detect_forge_type_cached(&remote_url, blue_dir);
// Get current branch for head // Get current branch for head
let head = match get_current_branch(&state.home.root) { let head = match get_current_branch(&state.home.root) {
@ -107,8 +108,8 @@ pub fn handle_create(state: &ProjectState, args: &Value) -> Result<Value, Server
} }
}; };
// Create forge client and make PR // Create forge client and make PR (with caching)
let forge = match create_forge(&remote_url) { let forge = match create_forge_cached(&remote_url, blue_dir) {
Ok(f) => f, Ok(f) => f,
Err(e) => { Err(e) => {
return Ok(json!({ return Ok(json!({
@ -308,58 +309,130 @@ pub fn handle_check_approvals(_state: &ProjectState, args: &Value) -> Result<Val
} }
/// Handle blue_pr_merge /// Handle blue_pr_merge
pub fn handle_merge(_state: &ProjectState, args: &Value) -> Result<Value, ServerError> { ///
let pr_number = args.get("pr_number").and_then(|v| v.as_u64()).map(|n| n as u32); /// Merges a PR using the detected forge's native API.
pub fn handle_merge(state: &ProjectState, args: &Value) -> Result<Value, ServerError> {
let pr_number = args.get("pr_number").and_then(|v| v.as_u64());
let squash = args.get("squash").and_then(|v| v.as_bool()).unwrap_or(true); let squash = args.get("squash").and_then(|v| v.as_bool()).unwrap_or(true);
// Fetch PR and check preconditions // Get remote URL and create forge client
let pr_data = fetch_pr_data(pr_number)?; let remote_url = match get_remote_url(&state.home.root) {
let (approved, _) = fetch_pr_approvals(pr_number)?; Ok(url) => url,
Err(e) => {
return Ok(json!({
"status": "error",
"message": blue_core::voice::error(
"Couldn't detect git remote",
&e
)
}));
}
};
let git_url = parse_git_url(&remote_url);
let blue_dir = Some(state.home.blue_dir.as_path());
let forge_type = detect_forge_type_cached(&remote_url, blue_dir);
let forge = match create_forge_cached(&remote_url, blue_dir) {
Ok(f) => f,
Err(e) => {
return Ok(json!({
"status": "error",
"message": blue_core::voice::error(
"Couldn't create forge client",
&format!("{}", e)
)
}));
}
};
// Get PR number - either from args or try to detect from current branch
let number = match pr_number {
Some(n) => n,
None => {
// Try to get PR for current branch via gh CLI as fallback
let pr_data = fetch_pr_data(None)?;
pr_data.number as u64
}
};
// Check preconditions via gh CLI (works for GitHub, may not work for Forgejo)
// TODO: Add review fetching to Forge trait for full cross-forge support
let preconditions_result = check_merge_preconditions(pr_number.map(|n| n as u32));
if let Err(precondition_error) = preconditions_result {
// If we can't check preconditions (e.g., gh not configured), warn but allow
// the user to proceed - the forge will reject if not allowed
if !args.get("force").and_then(|v| v.as_bool()).unwrap_or(false) {
return Ok(json!({
"status": "warning",
"message": blue_core::voice::error(
"Couldn't verify preconditions",
&format!("{}. Use force=true to merge anyway.", precondition_error)
),
"hint": "Precondition checks require gh CLI. The forge may still reject the merge."
}));
}
}
// Perform the merge
let strategy = if squash {
MergeStrategy::Squash
} else {
MergeStrategy::Merge
};
match forge.merge_pr(&git_url.owner, &git_url.repo, number, strategy) {
Ok(()) => {
Ok(json!({
"status": "success",
"pr_number": number,
"forge": forge_type.to_string(),
"strategy": if squash { "squash" } else { "merge" },
"message": blue_core::voice::success(
&format!("Merged PR #{}", number),
Some("Run blue_worktree_cleanup to clean up local worktree.")
),
"next_steps": [
"Run blue_worktree_cleanup to remove worktree and local branch"
]
}))
}
Err(e) => {
Ok(json!({
"status": "error",
"message": blue_core::voice::error(
"Merge failed",
&format!("{}", e)
)
}))
}
}
}
/// Check merge preconditions (approval, test plan)
/// Returns Ok(()) if ready to merge, Err with reason otherwise
fn check_merge_preconditions(pr_number: Option<u32>) -> Result<(), String> {
// Try to fetch PR data via gh CLI
let pr_data = fetch_pr_data(pr_number)
.map_err(|e| format!("Couldn't fetch PR data: {:?}", e))?;
let (approved, _) = fetch_pr_approvals(pr_number)
.map_err(|e| format!("Couldn't fetch approvals: {:?}", e))?;
let items = parse_test_plan(&pr_data.body); let items = parse_test_plan(&pr_data.body);
let all_items_checked = items.iter().all(|(_, checked, _)| *checked); let all_items_checked = items.iter().all(|(_, checked, _)| *checked);
// Enforce preconditions
if !approved { if !approved {
return Ok(json!({ return Err("PR not approved. Get reviewer approval first.".to_string());
"status": "error",
"message": blue_core::voice::error(
"Can't merge without approval",
"Get user approval on GitHub first"
)
}));
} }
if !all_items_checked { if !all_items_checked {
let unchecked = items.iter().filter(|(_, checked, _)| !*checked).count(); let unchecked = items.iter().filter(|(_, checked, _)| !*checked).count();
return Ok(json!({ return Err(format!("{} test plan items still unchecked", unchecked));
"status": "error",
"message": blue_core::voice::error(
&format!("{} test plan items still unchecked", unchecked),
"Run blue_pr_verify to complete verification"
)
}));
} }
let merge_cmd = format!( Ok(())
"gh pr merge {} {}--delete-branch",
pr_data.number,
if squash { "--squash " } else { "" }
);
Ok(json!({
"status": "success",
"command": merge_cmd,
"pr_number": pr_data.number,
"squash": squash,
"next_steps": [
format!("Run: {}", merge_cmd),
"Run blue_worktree_remove to clean up"
],
"message": blue_core::voice::success(
&format!("PR #{} ready to merge", pr_data.number),
Some("Run the command to merge.")
)
}))
} }
// ============================================================================= // =============================================================================