From 4a607fe0568fd918e20a0b550aa029ac3f9bb877 Mon Sep 17 00:00:00 2001 From: Edouard Zemb Date: Sun, 8 Feb 2026 21:18:48 +0100 Subject: [PATCH] chore(ci): add fmt check, cargo-audit, and init_logging timing test --- .github/workflows/test.yml | 10 +- crates/tf-config/src/config.rs | 1310 ++++++++++++++----- crates/tf-logging/src/init.rs | 195 ++- crates/tf-logging/src/redact.rs | 251 ++-- crates/tf-logging/tests/integration_test.rs | 63 +- crates/tf-security/src/error.rs | 16 +- crates/tf-security/src/keyring.rs | 26 +- 7 files changed, 1393 insertions(+), 478 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index adaaedc..c101247 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,15 +28,21 @@ jobs: - name: Setup Rust uses: dtolnay/rust-toolchain@stable with: - components: clippy + components: clippy, rustfmt - name: Cache cargo uses: Swatinem/rust-cache@v2 + - name: Check formatting + run: cargo fmt --all -- --check + - name: Run clippy run: cargo clippy --workspace -- -D warnings - - name: Run tests (unit tests only) + - name: Audit dependencies + run: cargo install cargo-audit --locked && cargo audit + + - name: Run tests run: cargo test --workspace # Rust tests requiring OS keyring (Linux with gnome-keyring) diff --git a/crates/tf-config/src/config.rs b/crates/tf-config/src/config.rs index 432a11f..a31991e 100644 --- a/crates/tf-config/src/config.rs +++ b/crates/tf-config/src/config.rs @@ -216,18 +216,45 @@ pub fn redact_url_sensitive_params(url: &str) -> String { // Includes both snake_case and camelCase variants const SENSITIVE_PARAMS: &[&str] = &[ // Common names - "token", "api_key", "apikey", "key", "secret", - "password", "passwd", "pwd", "auth", "authorization", + "token", + "api_key", + "apikey", + "key", + "secret", + "password", + "passwd", + "pwd", + "auth", + "authorization", // snake_case variants - "access_token", "refresh_token", "bearer", "credentials", - "client_secret", "private_key", "session_token", "auth_token", + "access_token", + "refresh_token", + "bearer", + "credentials", + "client_secret", + "private_key", + "session_token", + "auth_token", // camelCase variants - "accesstoken", "refreshtoken", "clientsecret", "privatekey", - "sessiontoken", "authtoken", "apitoken", "secretkey", - "accesskey", "secretaccesskey", + "accesstoken", + "refreshtoken", + "clientsecret", + "privatekey", + "sessiontoken", + "authtoken", + "apitoken", + "secretkey", + "accesskey", + "secretaccesskey", // kebab-case variants (with hyphens) - "api-key", "access-token", "refresh-token", "client-secret", - "private-key", "session-token", "auth-token", "secret-key", + "api-key", + "access-token", + "refresh-token", + "client-secret", + "private-key", + "session-token", + "auth-token", + "secret-key", "access-key", ]; @@ -411,8 +438,19 @@ pub fn redact_url_sensitive_params(url: &str) -> String { fn redact_url_path_secrets(path: &str) -> String { // Sensitive path segment indicators - the segment AFTER these will be redacted const SENSITIVE_PATH_SEGMENTS: &[&str] = &[ - "token", "tokens", "api_key", "apikey", "key", "keys", "secret", "secrets", - "password", "auth", "credential", "credentials", "access_token", + "token", + "tokens", + "api_key", + "apikey", + "key", + "keys", + "secret", + "secrets", + "password", + "auth", + "credential", + "credentials", + "access_token", ]; let segments: Vec<&str> = path.split('/').collect(); @@ -514,7 +552,8 @@ impl Redact for SquashConfig { fn redacted(&self) -> String { format!( "SquashConfig {{ endpoint: {:?}, username: {:?}, password: [REDACTED] }}", - redact_url_sensitive_params(&self.endpoint), self.username + redact_url_sensitive_params(&self.endpoint), + self.username ) } } @@ -534,9 +573,14 @@ impl Redact for LlmConfig { "LlmConfig {{ mode: {:?}, local_endpoint: {:?}, local_model: {:?}, \ cloud_enabled: {}, cloud_endpoint: {:?}, cloud_model: {:?}, \ api_key: [REDACTED], timeout_seconds: {}, max_tokens: {} }}", - self.mode, redacted_local_endpoint, self.local_model, - self.cloud_enabled, redacted_cloud_endpoint, self.cloud_model, - self.timeout_seconds, self.max_tokens + self.mode, + redacted_local_endpoint, + self.local_model, + self.cloud_enabled, + redacted_cloud_endpoint, + self.cloud_model, + self.timeout_seconds, + self.max_tokens ) } } @@ -748,7 +792,12 @@ impl ProjectConfig { format!( "{}\nOutput folder: {}\n{}\n{}\n{}\n{}", - profile_status, self.output_folder, jira_status, squash_status, llm_status, templates_status + profile_status, + self.output_folder, + jira_status, + squash_status, + llm_status, + templates_status ) } } @@ -843,19 +892,25 @@ pub fn load_config(path: &Path) -> Result { SerdeErrorKind::MissingField { field, hint } => { ConfigError::missing_field(field, hint) } - SerdeErrorKind::InvalidEnumValue { field, reason, hint } => { - ConfigError::invalid_value(field, reason, hint) - } - SerdeErrorKind::InvalidEnumValueDynamic { field, reason, hint } => { - ConfigError::invalid_value(field, reason, hint) - } - SerdeErrorKind::UnknownField { field, location, hint } => { - ConfigError::invalid_value( - format!("{}.{}", location, field), - "is not a recognized configuration field", - hint, - ) - } + SerdeErrorKind::InvalidEnumValue { + field, + reason, + hint, + } => ConfigError::invalid_value(field, reason, hint), + SerdeErrorKind::InvalidEnumValueDynamic { + field, + reason, + hint, + } => ConfigError::invalid_value(field, reason, hint), + SerdeErrorKind::UnknownField { + field, + location, + hint, + } => ConfigError::invalid_value( + format!("{}.{}", location, field), + "is not a recognized configuration field", + hint, + ), }); } return Err(ConfigError::ParseError(e)); @@ -871,13 +926,28 @@ pub fn load_config(path: &Path) -> Result { /// Result of parsing a serde error for user-friendly transformation enum SerdeErrorKind { /// Missing required field - MissingField { field: &'static str, hint: &'static str }, + MissingField { + field: &'static str, + hint: &'static str, + }, /// Invalid enum variant (static field name) - InvalidEnumValue { field: &'static str, reason: &'static str, hint: &'static str }, + InvalidEnumValue { + field: &'static str, + reason: &'static str, + hint: &'static str, + }, /// Invalid enum variant (dynamic field name extracted from error message) - InvalidEnumValueDynamic { field: String, reason: &'static str, hint: &'static str }, + InvalidEnumValueDynamic { + field: String, + reason: &'static str, + hint: &'static str, + }, /// Unknown field (when deny_unknown_fields is active) - UnknownField { field: String, location: &'static str, hint: &'static str }, + UnknownField { + field: String, + location: &'static str, + hint: &'static str, + }, } /// Extract field path from serde_yaml error message patterns. @@ -898,7 +968,10 @@ fn extract_field_from_error(err_msg: &str) -> Option { if let Some(end) = rest.find([':', ' ', '\n']) { let field_path = &rest[..end]; // Validate it looks like a field path (only alphanumeric, dots, underscores) - if field_path.chars().all(|c| c.is_alphanumeric() || c == '.' || c == '_') { + if field_path + .chars() + .all(|c| c.is_alphanumeric() || c == '.' || c == '_') + { return Some(field_path.to_string()); } } @@ -950,7 +1023,10 @@ fn extract_location_from_error(err_msg: &str) -> String { let location_end = after_line.len().min(30); let location = after_line[..location_end].trim(); if location.chars().any(|c| c.is_ascii_digit()) { - return format!(" (at {})", location.trim_end_matches(|c: char| !c.is_ascii_digit())); + return format!( + " (at {})", + location.trim_end_matches(|c: char| !c.is_ascii_digit()) + ); } } String::new() @@ -1014,7 +1090,9 @@ fn parse_serde_error(err_msg: &str) -> Option { // Handle invalid enum variant errors for LlmMode // serde_yaml format: "unknown variant `invalid`, expected one of `auto`, `local`, `cloud`" - if err_msg.contains("unknown variant") && (err_msg.contains("auto") || err_msg.contains("local") || err_msg.contains("cloud")) { + if err_msg.contains("unknown variant") + && (err_msg.contains("auto") || err_msg.contains("local") || err_msg.contains("cloud")) + { return Some(SerdeErrorKind::InvalidEnumValue { field: "llm.mode", reason: "is not a valid mode", @@ -1058,7 +1136,9 @@ fn parse_serde_error(err_msg: &str) -> Option { // serde_yaml format: "invalid type: integer `123`, expected struct TemplatesConfig" // or: "invalid type: string \"yes\", expected struct LlmConfig" - if err_msg.contains("TemplatesConfig") || (err_msg.contains("templates") && err_msg.contains("expected struct")) { + if err_msg.contains("TemplatesConfig") + || (err_msg.contains("templates") && err_msg.contains("expected struct")) + { return Some(SerdeErrorKind::InvalidEnumValue { field: "templates", reason: "has invalid type (expected a section with fields, not a scalar value)", @@ -1066,7 +1146,9 @@ fn parse_serde_error(err_msg: &str) -> Option { }); } - if err_msg.contains("LlmConfig") || (err_msg.contains("llm") && err_msg.contains("expected struct")) { + if err_msg.contains("LlmConfig") + || (err_msg.contains("llm") && err_msg.contains("expected struct")) + { return Some(SerdeErrorKind::InvalidEnumValue { field: "llm", reason: "has invalid type (expected a section with fields, not a scalar value)", @@ -1074,7 +1156,9 @@ fn parse_serde_error(err_msg: &str) -> Option { }); } - if err_msg.contains("JiraConfig") || (err_msg.contains("jira") && err_msg.contains("expected struct")) { + if err_msg.contains("JiraConfig") + || (err_msg.contains("jira") && err_msg.contains("expected struct")) + { return Some(SerdeErrorKind::InvalidEnumValue { field: "jira", reason: "has invalid type (expected a section with fields, not a scalar value)", @@ -1082,7 +1166,9 @@ fn parse_serde_error(err_msg: &str) -> Option { }); } - if err_msg.contains("SquashConfig") || (err_msg.contains("squash") && err_msg.contains("expected struct")) { + if err_msg.contains("SquashConfig") + || (err_msg.contains("squash") && err_msg.contains("expected struct")) + { return Some(SerdeErrorKind::InvalidEnumValue { field: "squash", reason: "has invalid type (expected a section with fields, not a scalar value)", @@ -1093,14 +1179,19 @@ fn parse_serde_error(err_msg: &str) -> Option { // Handle scalar field type errors within LlmConfig // serde_yaml format: "invalid type: string \"abc\", expected u32" // These occur for timeout_seconds, max_tokens fields - if err_msg.contains("expected u32") || err_msg.contains("expected u64") || err_msg.contains("expected i32") || err_msg.contains("expected i64") { + if err_msg.contains("expected u32") + || err_msg.contains("expected u64") + || err_msg.contains("expected i32") + || err_msg.contains("expected i64") + { // Check if this is within llm section by looking for llm-specific field names if err_msg.contains("timeout") || err_msg.contains("max_token") { if err_msg.contains("timeout") { return Some(SerdeErrorKind::InvalidEnumValue { field: "llm.timeout_seconds", reason: "has invalid type (expected an integer)", - hint: "a positive integer for timeout in seconds (e.g., timeout_seconds: 120)", + hint: + "a positive integer for timeout in seconds (e.g., timeout_seconds: 120)", }); } if err_msg.contains("max_token") { @@ -1124,7 +1215,8 @@ fn parse_serde_error(err_msg: &str) -> Option { return Some(SerdeErrorKind::InvalidEnumValueDynamic { field: format!("integer field{}", location_hint), reason: "has invalid type (expected an integer)", - hint: "a valid positive integer - check timeout_seconds, max_tokens, or port numbers", + hint: + "a valid positive integer - check timeout_seconds, max_tokens, or port numbers", }); } @@ -1151,7 +1243,10 @@ fn parse_serde_error(err_msg: &str) -> Option { // Handle string type errors with field extraction // serde_yaml may include field path in various formats - if err_msg.contains("expected a string") && !err_msg.contains("project_name") && !err_msg.contains("output_folder") { + if err_msg.contains("expected a string") + && !err_msg.contains("project_name") + && !err_msg.contains("output_folder") + { // Try to extract field name from serde_yaml error message patterns: // Pattern 1: "jira.token: invalid type" (field path prefix) // Pattern 2: "at line X, column Y, while parsing jira.token" (path in context) @@ -1166,7 +1261,11 @@ fn parse_serde_error(err_msg: &str) -> Option { // Check for known nested string fields by context if err_msg.contains("token") { - let section = if err_msg.to_lowercase().contains("jira") { "jira" } else { "configuration" }; + let section = if err_msg.to_lowercase().contains("jira") { + "jira" + } else { + "configuration" + }; return Some(SerdeErrorKind::InvalidEnumValueDynamic { field: format!("{}.token", section), reason: "has invalid type (expected a string)", @@ -1311,14 +1410,19 @@ fn detect_section_from_expected_fields(err_msg: &str) -> &'static str { // Check for jira-specific fields (endpoint, token) // Jira has only endpoint and token, so if we see both, it's jira - if expected_section.contains("`endpoint`") && expected_section.contains("`token`") - && !expected_section.contains("`username`") && !expected_section.contains("`mode`") { + if expected_section.contains("`endpoint`") + && expected_section.contains("`token`") + && !expected_section.contains("`username`") + && !expected_section.contains("`mode`") + { return "jira"; } // Check for squash-specific fields (endpoint, username, password) - if expected_section.contains("`endpoint`") && expected_section.contains("`username`") - && expected_section.contains("`password`") { + if expected_section.contains("`endpoint`") + && expected_section.contains("`username`") + && expected_section.contains("`password`") + { return "squash"; } @@ -1328,13 +1432,17 @@ fn detect_section_from_expected_fields(err_msg: &str) -> &'static str { } // Check for templates-specific fields (cr, ppt, anomaly) - if expected_section.contains("`cr`") && expected_section.contains("`ppt`") - && expected_section.contains("`anomaly`") { + if expected_section.contains("`cr`") + && expected_section.contains("`ppt`") + && expected_section.contains("`anomaly`") + { return "templates"; } // Check for root-level fields (project_name, output_folder, jira, squash, etc.) - if expected_section.contains("`project_name`") && expected_section.contains("`output_folder`") { + if expected_section.contains("`project_name`") + && expected_section.contains("`output_folder`") + { return "root"; } } @@ -1387,14 +1495,13 @@ fn is_valid_url(url: &str) -> bool { // Extract host and port (before path, query, or fragment) // URL format: host[:port][/path][?query][#fragment] // We need to handle URLs without path like "https://host?query" or "https://host#frag" - let host_port_end = trimmed - .find('/') - .unwrap_or_else(|| { - // No path - check for query or fragment - trimmed.find('?') - .or_else(|| trimmed.find('#')) - .unwrap_or(trimmed.len()) - }); + let host_port_end = trimmed.find('/').unwrap_or_else(|| { + // No path - check for query or fragment + trimmed + .find('?') + .or_else(|| trimmed.find('#')) + .unwrap_or(trimmed.len()) + }); let host_port = &trimmed[..host_port_end]; // Handle IPv6 addresses: [::1] or [::1]:8080 @@ -1424,7 +1531,10 @@ fn is_valid_url(url: &str) -> bool { } // Zone ID should contain only alphanumeric characters and common interface name chars // Allow: alphanumeric, hyphen, underscore, dot (common in interface names like eth0, wlan-0, etc.) - if !zone.chars().all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_' || c == '.') { + if !zone + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_' || c == '.') + { return false; } } @@ -1569,7 +1679,9 @@ fn is_valid_url(url: &str) -> bool { // Check if this looks like an IPv4 address (all labels are purely numeric) // If so, validate each octet is in range 0-255 - let all_numeric = labels.iter().all(|label| label.chars().all(|c| c.is_ascii_digit())); + let all_numeric = labels + .iter() + .all(|label| label.chars().all(|c| c.is_ascii_digit())); if all_numeric && labels.len() == 4 { // This is an IPv4 address - validate each octet for label in &labels { @@ -2142,7 +2254,8 @@ jira: // Truly invalid YAML should result in a parsing error assert!( err_msg.to_lowercase().contains("parse") || err_msg.contains("expected"), - "Expected parse error, got: {}", err_msg + "Expected parse error, got: {}", + err_msg ); } @@ -2642,7 +2755,10 @@ templates: use crate::template::TemplateKind; assert!(has_valid_template_extension("file.md", TemplateKind::Cr)); assert!(has_valid_template_extension("file.MD", TemplateKind::Cr)); - assert!(has_valid_template_extension("path/to/file.pptx", TemplateKind::Ppt)); + assert!(has_valid_template_extension( + "path/to/file.pptx", + TemplateKind::Ppt + )); assert!(!has_valid_template_extension("file.txt", TemplateKind::Cr)); assert!(!has_valid_template_extension("file.ppt", TemplateKind::Ppt)); } @@ -2764,10 +2880,16 @@ llm: let llm = config.llm.unwrap(); assert_eq!(llm.mode, LlmMode::Cloud); - assert_eq!(llm.local_endpoint.as_deref(), Some("http://localhost:11434")); + assert_eq!( + llm.local_endpoint.as_deref(), + Some("http://localhost:11434") + ); assert_eq!(llm.local_model.as_deref(), Some("mistral:7b-instruct")); assert!(llm.cloud_enabled); - assert_eq!(llm.cloud_endpoint.as_deref(), Some("https://api.openai.com/v1")); + assert_eq!( + llm.cloud_endpoint.as_deref(), + Some("https://api.openai.com/v1") + ); assert_eq!(llm.cloud_model.as_deref(), Some("gpt-4o-mini")); assert_eq!(llm.api_key.as_deref(), Some("sk-secret-key")); assert_eq!(llm.timeout_seconds, 60); @@ -3065,7 +3187,8 @@ typo_field: "value" // Should mention valid fields or indicate it's not recognized assert!( err_msg.contains("not a recognized") || err_msg.contains("unknown"), - "Error should indicate field is unknown: {}", err_msg + "Error should indicate field is unknown: {}", + err_msg ); } @@ -3130,7 +3253,10 @@ llm: let config = result.unwrap(); let llm = config.llm.unwrap(); assert_eq!(llm.mode, LlmMode::Cloud); - assert_eq!(llm.cloud_endpoint.as_deref(), Some("https://api.openai.com/v1")); + assert_eq!( + llm.cloud_endpoint.as_deref(), + Some("https://api.openai.com/v1") + ); assert_eq!(llm.cloud_model.as_deref(), Some("gpt-4o-mini")); } @@ -3186,7 +3312,7 @@ squash: assert!(!is_valid_url(&format!("http://{}", long_host))); // Invalid characters in hostname - assert!(!is_valid_url("http://jira_server")); // underscore not allowed + assert!(!is_valid_url("http://jira_server")); // underscore not allowed // Note: "http://jira.server" IS valid - it's a dotted hostname // and goes through the standard dot validation path @@ -3258,7 +3384,11 @@ squash: // Should fail either at parse or validation assert!(result.is_err()); let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("squash") || err_msg.contains("invalid type") || err_msg.contains("expected")); + assert!( + err_msg.contains("squash") + || err_msg.contains("invalid type") + || err_msg.contains("expected") + ); } // === REVIEW 10 TESTS: Scalar field type errors === @@ -3279,8 +3409,11 @@ llm: let err_msg = result.unwrap_err().to_string(); // Should provide field-specific hint assert!( - err_msg.contains("timeout") || err_msg.contains("invalid type") || err_msg.contains("integer"), - "Expected timeout-related error, got: {}", err_msg + err_msg.contains("timeout") + || err_msg.contains("invalid type") + || err_msg.contains("integer"), + "Expected timeout-related error, got: {}", + err_msg ); } @@ -3300,8 +3433,11 @@ llm: let err_msg = result.unwrap_err().to_string(); // Should provide field-specific hint assert!( - err_msg.contains("max_token") || err_msg.contains("invalid type") || err_msg.contains("integer"), - "Expected max_tokens-related error, got: {}", err_msg + err_msg.contains("max_token") + || err_msg.contains("invalid type") + || err_msg.contains("integer"), + "Expected max_tokens-related error, got: {}", + err_msg ); } @@ -3329,8 +3465,11 @@ llm: // If it failed, should have a meaningful error let err_msg = e.to_string(); assert!( - err_msg.contains("timeout") || err_msg.contains("invalid") || err_msg.contains("type"), - "Expected timeout-related error, got: {}", err_msg + err_msg.contains("timeout") + || err_msg.contains("invalid") + || err_msg.contains("type"), + "Expected timeout-related error, got: {}", + err_msg ); } } @@ -3353,8 +3492,11 @@ llm: let err_msg = result.unwrap_err().to_string(); // Should mention llm fields in the hint assert!( - err_msg.contains("llm") || err_msg.contains("mode") || err_msg.contains("local_endpoint"), - "Expected llm section hint, got: {}", err_msg + err_msg.contains("llm") + || err_msg.contains("mode") + || err_msg.contains("local_endpoint"), + "Expected llm section hint, got: {}", + err_msg ); } @@ -3375,7 +3517,8 @@ jira: // Should provide jira-specific hint assert!( err_msg.contains("jira") || err_msg.contains("endpoint") || err_msg.contains("token"), - "Expected jira section hint, got: {}", err_msg + "Expected jira section hint, got: {}", + err_msg ); } @@ -3628,8 +3771,12 @@ llm: let err_msg = result.unwrap_err().to_string(); // Should provide a helpful message about boolean type assert!( - err_msg.contains("boolean") || err_msg.contains("true") || err_msg.contains("false") || err_msg.contains("cloud_enabled"), - "Expected boolean-related error, got: {}", err_msg + err_msg.contains("boolean") + || err_msg.contains("true") + || err_msg.contains("false") + || err_msg.contains("cloud_enabled"), + "Expected boolean-related error, got: {}", + err_msg ); } @@ -3649,7 +3796,8 @@ llm: let err_msg = result.unwrap_err().to_string(); assert!( err_msg.contains("boolean") || err_msg.contains("true") || err_msg.contains("false"), - "Expected boolean-related error, got: {}", err_msg + "Expected boolean-related error, got: {}", + err_msg ); } @@ -3815,7 +3963,10 @@ llm: // OAuth implicit flow puts tokens in fragments - must be redacted (AC #3) let url = "https://example.com/callback#access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"), "Fragment token should be redacted"); + assert!( + !redacted.contains("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"), + "Fragment token should be redacted" + ); assert!(redacted.contains("access_token=[REDACTED]")); } @@ -3824,7 +3975,10 @@ llm: // API key in fragment should be redacted let url = "https://example.com#api_key=sk-secret123"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("sk-secret123"), "Fragment api_key should be redacted"); + assert!( + !redacted.contains("sk-secret123"), + "Fragment api_key should be redacted" + ); assert!(redacted.contains("api_key=[REDACTED]")); } @@ -3833,10 +3987,19 @@ llm: // Multiple params in fragment - redact only sensitive ones let url = "https://example.com#state=abc&token=secret&redirect=/home"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("secret"), "Fragment token should be redacted"); - assert!(redacted.contains("state=abc"), "Non-sensitive param should remain"); + assert!( + !redacted.contains("secret"), + "Fragment token should be redacted" + ); + assert!( + redacted.contains("state=abc"), + "Non-sensitive param should remain" + ); assert!(redacted.contains("token=[REDACTED]")); - assert!(redacted.contains("redirect=/home"), "Non-sensitive param should remain"); + assert!( + redacted.contains("redirect=/home"), + "Non-sensitive param should remain" + ); } #[test] @@ -3844,8 +4007,14 @@ llm: // Both query and fragment params should be redacted let url = "https://example.com?api_key=query_secret#token=frag_secret"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("query_secret"), "Query api_key should be redacted"); - assert!(!redacted.contains("frag_secret"), "Fragment token should be redacted"); + assert!( + !redacted.contains("query_secret"), + "Query api_key should be redacted" + ); + assert!( + !redacted.contains("frag_secret"), + "Fragment token should be redacted" + ); assert!(redacted.contains("api_key=[REDACTED]")); assert!(redacted.contains("token=[REDACTED]")); } @@ -3855,7 +4024,10 @@ llm: // Fragment without sensitive params should remain unchanged let url = "https://example.com#section=intro&page=2"; let redacted = redact_url_sensitive_params(url); - assert_eq!(redacted, url, "Non-sensitive fragment should remain unchanged"); + assert_eq!( + redacted, url, + "Non-sensitive fragment should remain unchanged" + ); } #[test] @@ -3863,7 +4035,10 @@ llm: // Simple fragment identifier (no key=value) should remain unchanged let url = "https://example.com/docs#introduction"; let redacted = redact_url_sensitive_params(url); - assert_eq!(redacted, url, "Simple fragment identifier should remain unchanged"); + assert_eq!( + redacted, url, + "Simple fragment identifier should remain unchanged" + ); } #[test] @@ -3874,7 +4049,11 @@ llm: }; let debug_output = format!("{:?}", jira); - assert!(!debug_output.contains("secret"), "Endpoint token should be redacted: {}", debug_output); + assert!( + !debug_output.contains("secret"), + "Endpoint token should be redacted: {}", + debug_output + ); assert!(debug_output.contains("[REDACTED]")); } @@ -3887,7 +4066,11 @@ llm: }; let debug_output = format!("{:?}", squash); - assert!(!debug_output.contains("secret123"), "Endpoint password should be redacted: {}", debug_output); + assert!( + !debug_output.contains("secret123"), + "Endpoint password should be redacted: {}", + debug_output + ); assert!(debug_output.contains("[REDACTED]")); } @@ -3899,7 +4082,11 @@ llm: }; let redacted = jira.redacted(); - assert!(!redacted.contains("sk-12345"), "Endpoint api_key should be redacted: {}", redacted); + assert!( + !redacted.contains("sk-12345"), + "Endpoint api_key should be redacted: {}", + redacted + ); assert!(redacted.contains("[REDACTED]")); } @@ -3973,30 +4160,54 @@ jira: fn test_ipv6_invalid_forms_rejected() { // Invalid IPv6 with too many consecutive colons (::::) assert!(!is_valid_url("http://[::::]"), ":::: should be rejected"); - assert!(!is_valid_url("http://[::::]:8080"), ":::: with port should be rejected"); - assert!(!is_valid_url("http://[1:::2]"), "::: (triple colon) should be rejected"); - assert!(!is_valid_url("http://[:::1]"), "::: at start should be rejected"); - assert!(!is_valid_url("http://[1:::]"), "::: at end should be rejected"); + assert!( + !is_valid_url("http://[::::]:8080"), + ":::: with port should be rejected" + ); + assert!( + !is_valid_url("http://[1:::2]"), + "::: (triple colon) should be rejected" + ); + assert!( + !is_valid_url("http://[:::1]"), + "::: at start should be rejected" + ); + assert!( + !is_valid_url("http://[1:::]"), + "::: at end should be rejected" + ); } #[test] fn test_ipv6_multiple_double_colon_rejected() { // Multiple :: groups are not allowed - assert!(!is_valid_url("http://[::1::2]"), "Multiple :: should be rejected"); - assert!(!is_valid_url("http://[1::2::3]"), "Multiple :: should be rejected"); + assert!( + !is_valid_url("http://[::1::2]"), + "Multiple :: should be rejected" + ); + assert!( + !is_valid_url("http://[1::2::3]"), + "Multiple :: should be rejected" + ); } #[test] fn test_ipv6_too_many_colons_rejected() { // More than 7 colons (8 groups) is invalid - assert!(!is_valid_url("http://[1:2:3:4:5:6:7:8:9]"), "More than 8 groups should be rejected"); + assert!( + !is_valid_url("http://[1:2:3:4:5:6:7:8:9]"), + "More than 8 groups should be rejected" + ); } #[test] fn test_ipv6_double_colon_alone_valid() { // :: alone is valid (represents all zeros - 0:0:0:0:0:0:0:0) assert!(is_valid_url("http://[::]"), ":: alone should be valid"); - assert!(is_valid_url("http://[::]:8080"), ":: with port should be valid"); + assert!( + is_valid_url("http://[::]:8080"), + ":: with port should be valid" + ); } #[test] @@ -4014,7 +4225,11 @@ jira: }; let debug_output = format!("{:?}", llm); - assert!(!debug_output.contains("secret123"), "cloud_endpoint api_key should be redacted in Debug: {}", debug_output); + assert!( + !debug_output.contains("secret123"), + "cloud_endpoint api_key should be redacted in Debug: {}", + debug_output + ); assert!(debug_output.contains("[REDACTED]")); assert!(debug_output.contains("foo=bar")); // Non-sensitive params should remain } @@ -4034,7 +4249,11 @@ jira: }; let redacted = llm.redacted(); - assert!(!redacted.contains("mysecret"), "cloud_endpoint token should be redacted in Redact: {}", redacted); + assert!( + !redacted.contains("mysecret"), + "cloud_endpoint token should be redacted in Redact: {}", + redacted + ); assert!(redacted.contains("[REDACTED]")); assert!(redacted.contains("version=v1")); // Non-sensitive params should remain } @@ -4052,7 +4271,11 @@ output_folder: 123 let result = load_config(file.path()); // Should succeed - numeric folder names are legitimate - assert!(result.is_ok(), "Numeric folder names should be accepted: {:?}", result.err()); + assert!( + result.is_ok(), + "Numeric folder names should be accepted: {:?}", + result.err() + ); let config = result.unwrap(); assert_eq!(config.output_folder, "123"); } @@ -4067,7 +4290,11 @@ output_folder: "2026" let file = create_temp_config(yaml); let result = load_config(file.path()); - assert!(result.is_ok(), "Year folder names should be accepted: {:?}", result.err()); + assert!( + result.is_ok(), + "Year folder names should be accepted: {:?}", + result.err() + ); let config = result.unwrap(); assert_eq!(config.output_folder, "2026"); } @@ -4084,9 +4311,16 @@ output_folder: true let result = load_config(file.path()); // Should fail - boolean scalars are rejected even after YAML coercion - assert!(result.is_err(), "Boolean scalars should be rejected for output_folder"); + assert!( + result.is_err(), + "Boolean scalars should be rejected for output_folder" + ); let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("output_folder"), "Error should mention field name: {}", err_msg); + assert!( + err_msg.contains("output_folder"), + "Error should mention field name: {}", + err_msg + ); } #[test] @@ -4101,7 +4335,10 @@ output_folder: null // Should fail - null scalar is rejected // Note: serde_yaml may fail at parsing level for required field, or our validation catches it - assert!(result.is_err(), "Null scalars should be rejected for output_folder"); + assert!( + result.is_err(), + "Null scalars should be rejected for output_folder" + ); } #[test] @@ -4179,11 +4416,19 @@ jira: let err_msg = result.unwrap_err().to_string(); // The error should NOT mention output_folder // It should either mention "token", "jira.token", "configuration field", or be a ParseError - assert!(!err_msg.contains("output_folder"), - "Error for jira.token should not be attributed to output_folder: {}", err_msg); + assert!( + !err_msg.contains("output_folder"), + "Error for jira.token should not be attributed to output_folder: {}", + err_msg + ); // Verify it identifies the correct field or provides actionable guidance - assert!(err_msg.contains("token") || err_msg.contains("configuration field") || err_msg.contains("Parse"), - "Error should mention token field or provide guidance: {}", err_msg); + assert!( + err_msg.contains("token") + || err_msg.contains("configuration field") + || err_msg.contains("Parse"), + "Error should mention token field or provide guidance: {}", + err_msg + ); } #[test] @@ -4203,8 +4448,11 @@ llm: assert!(result.is_err()); let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("llm.cloud_endpoint") && err_msg.contains("missing"), - "Should require cloud_endpoint in auto mode with cloud_enabled: {}", err_msg); + assert!( + err_msg.contains("llm.cloud_endpoint") && err_msg.contains("missing"), + "Should require cloud_endpoint in auto mode with cloud_enabled: {}", + err_msg + ); } #[test] @@ -4224,8 +4472,11 @@ llm: assert!(result.is_err()); let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("llm.cloud_model") && err_msg.contains("missing"), - "Should require cloud_model in auto mode with cloud_enabled: {}", err_msg); + assert!( + err_msg.contains("llm.cloud_model") && err_msg.contains("missing"), + "Should require cloud_model in auto mode with cloud_enabled: {}", + err_msg + ); } #[test] @@ -4245,8 +4496,11 @@ llm: assert!(result.is_err()); let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("llm.api_key") && err_msg.contains("missing"), - "Should require api_key in auto mode with cloud_enabled: {}", err_msg); + assert!( + err_msg.contains("llm.api_key") && err_msg.contains("missing"), + "Should require api_key in auto mode with cloud_enabled: {}", + err_msg + ); } #[test] @@ -4267,8 +4521,11 @@ llm: assert!(result.is_err()); let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("llm.api_key") && err_msg.contains("empty"), - "Should reject empty api_key in auto mode with cloud_enabled: {}", err_msg); + assert!( + err_msg.contains("llm.api_key") && err_msg.contains("empty"), + "Should reject empty api_key in auto mode with cloud_enabled: {}", + err_msg + ); } #[test] @@ -4289,8 +4546,11 @@ llm: assert!(result.is_err()); let err_msg = result.unwrap_err().to_string(); - assert!(err_msg.contains("llm.cloud_model") && err_msg.contains("empty"), - "Should reject empty cloud_model in auto mode with cloud_enabled: {}", err_msg); + assert!( + err_msg.contains("llm.cloud_model") && err_msg.contains("empty"), + "Should reject empty cloud_model in auto mode with cloud_enabled: {}", + err_msg + ); } #[test] @@ -4309,7 +4569,11 @@ llm: let file = create_temp_config(yaml); let result = load_config(file.path()); - assert!(result.is_ok(), "Valid auto+cloud_enabled config should work: {:?}", result); + assert!( + result.is_ok(), + "Valid auto+cloud_enabled config should work: {:?}", + result + ); } #[test] @@ -4325,7 +4589,11 @@ llm: let file = create_temp_config(yaml); let result = load_config(file.path()); - assert!(result.is_ok(), "auto mode with cloud_enabled=false should not require cloud fields: {:?}", result); + assert!( + result.is_ok(), + "auto mode with cloud_enabled=false should not require cloud fields: {:?}", + result + ); } // ==================== Review 14 Tests ==================== @@ -4333,30 +4601,66 @@ llm: #[test] fn test_ipv4_invalid_octet_rejected() { // IPv4 addresses with octets > 255 should be rejected - assert!(!is_valid_url("http://999.999.999.999"), "999.999.999.999 should be invalid"); - assert!(!is_valid_url("http://256.1.1.1"), "256 octet should be invalid"); - assert!(!is_valid_url("http://1.256.1.1"), "256 octet should be invalid"); - assert!(!is_valid_url("http://1.1.256.1"), "256 octet should be invalid"); - assert!(!is_valid_url("http://1.1.1.256"), "256 octet should be invalid"); - assert!(!is_valid_url("http://300.200.100.50"), "300 octet should be invalid"); + assert!( + !is_valid_url("http://999.999.999.999"), + "999.999.999.999 should be invalid" + ); + assert!( + !is_valid_url("http://256.1.1.1"), + "256 octet should be invalid" + ); + assert!( + !is_valid_url("http://1.256.1.1"), + "256 octet should be invalid" + ); + assert!( + !is_valid_url("http://1.1.256.1"), + "256 octet should be invalid" + ); + assert!( + !is_valid_url("http://1.1.1.256"), + "256 octet should be invalid" + ); + assert!( + !is_valid_url("http://300.200.100.50"), + "300 octet should be invalid" + ); } #[test] fn test_ipv4_valid_addresses() { // Valid IPv4 addresses should be accepted - assert!(is_valid_url("http://192.168.1.1"), "192.168.1.1 should be valid"); + assert!( + is_valid_url("http://192.168.1.1"), + "192.168.1.1 should be valid" + ); assert!(is_valid_url("http://10.0.0.1"), "10.0.0.1 should be valid"); - assert!(is_valid_url("http://255.255.255.255"), "255.255.255.255 should be valid"); + assert!( + is_valid_url("http://255.255.255.255"), + "255.255.255.255 should be valid" + ); assert!(is_valid_url("http://0.0.0.0"), "0.0.0.0 should be valid"); - assert!(is_valid_url("http://127.0.0.1:8080"), "127.0.0.1 with port should be valid"); + assert!( + is_valid_url("http://127.0.0.1:8080"), + "127.0.0.1 with port should be valid" + ); } #[test] fn test_ipv4_leading_zeros_rejected() { // Leading zeros in IPv4 octets should be rejected (strict validation) - assert!(!is_valid_url("http://01.1.1.1"), "Leading zero should be invalid"); - assert!(!is_valid_url("http://1.01.1.1"), "Leading zero should be invalid"); - assert!(!is_valid_url("http://192.168.001.001"), "Leading zeros should be invalid"); + assert!( + !is_valid_url("http://01.1.1.1"), + "Leading zero should be invalid" + ); + assert!( + !is_valid_url("http://1.01.1.1"), + "Leading zero should be invalid" + ); + assert!( + !is_valid_url("http://192.168.001.001"), + "Leading zeros should be invalid" + ); } #[test] @@ -4371,7 +4675,11 @@ llm: // camelCase sensitive params should be redacted let url_accesstoken = "https://api.example.com?accessToken=secret123&version=v1"; let redacted = redact_url_sensitive_params(url_accesstoken); - assert!(!redacted.contains("secret123"), "accessToken should be redacted: {}", redacted); + assert!( + !redacted.contains("secret123"), + "accessToken should be redacted: {}", + redacted + ); assert!(redacted.contains("accessToken=[REDACTED]")); assert!(redacted.contains("version=v1")); } @@ -4396,7 +4704,11 @@ llm: assert!(err.is_some(), "YAML syntax errors should be handled"); let err = err.unwrap(); match err { - SerdeErrorKind::InvalidEnumValue { field, reason, hint } => { + SerdeErrorKind::InvalidEnumValue { + field, + reason, + hint, + } => { assert!(field.contains("YAML")); assert!(reason.contains("invalid")); assert!(hint.contains("indentation") || hint.contains("format")); @@ -4431,8 +4743,14 @@ llm: fn test_ipv4_not_four_octets_treated_as_hostname() { // IPv4-like strings with wrong number of octets are treated as hostnames // These should be valid hostnames (if they pass label validation) - assert!(is_valid_url("http://192.168.1"), "3-octet should be valid hostname"); - assert!(is_valid_url("http://192.168.1.1.1"), "5-octet should be valid hostname"); + assert!( + is_valid_url("http://192.168.1"), + "3-octet should be valid hostname" + ); + assert!( + is_valid_url("http://192.168.1.1.1"), + "5-octet should be valid hostname" + ); } // ==================== Review 18 Tests ==================== @@ -4442,7 +4760,9 @@ llm: // Test that local_endpoint with sensitive query params is redacted in Debug output let llm = LlmConfig { mode: LlmMode::Local, - local_endpoint: Some("http://localhost:11434?api_key=secret-local-key&model=llama".to_string()), + local_endpoint: Some( + "http://localhost:11434?api_key=secret-local-key&model=llama".to_string(), + ), local_model: Some("llama2".to_string()), cloud_enabled: false, cloud_endpoint: None, @@ -4453,12 +4773,21 @@ llm: }; let debug_output = format!("{:?}", llm); - assert!(!debug_output.contains("secret-local-key"), - "local_endpoint api_key should be redacted in Debug: {}", debug_output); - assert!(debug_output.contains("[REDACTED]"), - "Debug output should contain [REDACTED]: {}", debug_output); - assert!(debug_output.contains("model=llama"), - "Non-sensitive params should remain: {}", debug_output); + assert!( + !debug_output.contains("secret-local-key"), + "local_endpoint api_key should be redacted in Debug: {}", + debug_output + ); + assert!( + debug_output.contains("[REDACTED]"), + "Debug output should contain [REDACTED]: {}", + debug_output + ); + assert!( + debug_output.contains("model=llama"), + "Non-sensitive params should remain: {}", + debug_output + ); } #[test] @@ -4477,12 +4806,21 @@ llm: }; let redacted = llm.redacted(); - assert!(!redacted.contains("mysecret"), - "local_endpoint token should be redacted in Redact: {}", redacted); - assert!(redacted.contains("[REDACTED]"), - "Redacted output should contain [REDACTED]: {}", redacted); - assert!(redacted.contains("format=json"), - "Non-sensitive params should remain: {}", redacted); + assert!( + !redacted.contains("mysecret"), + "local_endpoint token should be redacted in Redact: {}", + redacted + ); + assert!( + redacted.contains("[REDACTED]"), + "Redacted output should contain [REDACTED]: {}", + redacted + ); + assert!( + redacted.contains("format=json"), + "Non-sensitive params should remain: {}", + redacted + ); } #[test] @@ -4501,53 +4839,81 @@ llm: }; let debug_output = format!("{:?}", llm); - assert!(!debug_output.contains("secret"), - "local_endpoint userinfo password should be redacted: {}", debug_output); - assert!(!debug_output.contains("admin:"), - "local_endpoint userinfo should be fully redacted: {}", debug_output); - assert!(debug_output.contains("[REDACTED]@"), - "Redacted userinfo should be visible: {}", debug_output); + assert!( + !debug_output.contains("secret"), + "local_endpoint userinfo password should be redacted: {}", + debug_output + ); + assert!( + !debug_output.contains("admin:"), + "local_endpoint userinfo should be fully redacted: {}", + debug_output + ); + assert!( + debug_output.contains("[REDACTED]@"), + "Redacted userinfo should be visible: {}", + debug_output + ); let redacted = llm.redacted(); - assert!(!redacted.contains("secret"), - "Redact trait should also redact userinfo: {}", redacted); + assert!( + !redacted.contains("secret"), + "Redact trait should also redact userinfo: {}", + redacted + ); } #[test] fn test_url_with_query_no_path_valid() { // URLs with query string but no path should be valid // Review 18 fix: is_valid_url should accept these - assert!(is_valid_url("https://example.com?foo=bar"), - "URL with query but no path should be valid"); - assert!(is_valid_url("http://localhost?param=value"), - "localhost with query but no path should be valid"); - assert!(is_valid_url("https://api.example.com?key=value&other=123"), - "URL with multiple query params but no path should be valid"); + assert!( + is_valid_url("https://example.com?foo=bar"), + "URL with query but no path should be valid" + ); + assert!( + is_valid_url("http://localhost?param=value"), + "localhost with query but no path should be valid" + ); + assert!( + is_valid_url("https://api.example.com?key=value&other=123"), + "URL with multiple query params but no path should be valid" + ); } #[test] fn test_url_with_fragment_no_path_valid() { // URLs with fragment but no path should be valid - assert!(is_valid_url("https://example.com#section"), - "URL with fragment but no path should be valid"); - assert!(is_valid_url("http://localhost#anchor"), - "localhost with fragment but no path should be valid"); + assert!( + is_valid_url("https://example.com#section"), + "URL with fragment but no path should be valid" + ); + assert!( + is_valid_url("http://localhost#anchor"), + "localhost with fragment but no path should be valid" + ); } #[test] fn test_url_with_query_and_fragment_no_path_valid() { // URLs with both query and fragment but no path should be valid - assert!(is_valid_url("https://example.com?foo=bar#section"), - "URL with query and fragment but no path should be valid"); + assert!( + is_valid_url("https://example.com?foo=bar#section"), + "URL with query and fragment but no path should be valid" + ); } #[test] fn test_url_with_path_query_fragment_still_valid() { // Existing URLs with path, query, and fragment should still work - assert!(is_valid_url("https://example.com/path?foo=bar#section"), - "Full URL with path, query, and fragment should be valid"); - assert!(is_valid_url("http://localhost:8080/api/v1?key=value"), - "localhost with path and query should be valid"); + assert!( + is_valid_url("https://example.com/path?foo=bar#section"), + "Full URL with path, query, and fragment should be valid" + ); + assert!( + is_valid_url("http://localhost:8080/api/v1?key=value"), + "localhost with path and query should be valid" + ); } #[test] @@ -4556,7 +4922,8 @@ llm: use std::io::Write; // Create a unique temporary file using thread ID and timestamp to avoid collisions - let unique_id = format!("tf_config_test_{:?}_{}", + let unique_id = format!( + "tf_config_test_{:?}_{}", std::thread::current().id(), std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) @@ -4573,20 +4940,29 @@ llm: } // Create config pointing to the file (not a directory) - let yaml = format!(r#" + let yaml = format!( + r#" project_name: "test" output_folder: "{}" -"#, test_file.to_string_lossy().replace('\\', "/")); +"#, + test_file.to_string_lossy().replace('\\', "/") + ); let file = create_temp_config(&yaml); let config = load_config(file.path()).unwrap(); // check_output_folder_exists should return warning about not being a directory let warning = config.check_output_folder_exists(); - assert!(warning.is_some(), "Should warn when output_folder is a file, not directory"); + assert!( + warning.is_some(), + "Should warn when output_folder is a file, not directory" + ); let warning_msg = warning.unwrap(); - assert!(warning_msg.contains("not a directory"), - "Warning should mention 'not a directory': {}", warning_msg); + assert!( + warning_msg.contains("not a directory"), + "Warning should mention 'not a directory': {}", + warning_msg + ); // Cleanup let _ = std::fs::remove_file(&test_file); @@ -4598,11 +4974,17 @@ output_folder: "{}" // RFC 3986: authority ends at first /, ?, or # let url = "https://example.com#user@mention"; let redacted = redact_url_userinfo(url); - assert_eq!(redacted, url, "@ in fragment should not be treated as userinfo"); + assert_eq!( + redacted, url, + "@ in fragment should not be treated as userinfo" + ); let url2 = "https://example.com/path#section@ref"; let redacted2 = redact_url_userinfo(url2); - assert_eq!(redacted2, url2, "@ in fragment after path should not be treated as userinfo"); + assert_eq!( + redacted2, url2, + "@ in fragment after path should not be treated as userinfo" + ); } // ===== Review 21 fixes ===== @@ -4614,7 +4996,10 @@ output_folder: "{}" let url = "https://admin:p@ssword@example.com/api"; let redacted = redact_url_sensitive_params(url); assert!(!redacted.contains("admin"), "Username should be redacted"); - assert!(!redacted.contains("p@ssword"), "Password with @ should be redacted"); + assert!( + !redacted.contains("p@ssword"), + "Password with @ should be redacted" + ); assert!(redacted.contains("[REDACTED]@example.com")); assert_eq!(redacted, "https://[REDACTED]@example.com/api"); } @@ -4625,7 +5010,10 @@ output_folder: "{}" let url = "https://user:p@ss@word@host.example.com:8080/path"; let redacted = redact_url_sensitive_params(url); assert!(!redacted.contains("user"), "Username should be redacted"); - assert!(!redacted.contains("p@ss@word"), "Password with multiple @ should be redacted"); + assert!( + !redacted.contains("p@ss@word"), + "Password with multiple @ should be redacted" + ); assert!(redacted.contains(":8080")); assert_eq!(redacted, "https://[REDACTED]@host.example.com:8080/path"); } @@ -4639,7 +5027,10 @@ output_folder: "{}" assert!(!redacted.contains("admin")); assert!(!redacted.contains("C0mpl3x!P@ss%23123")); assert!(redacted.contains("token=[REDACTED]")); - assert_eq!(redacted, "https://[REDACTED]@api.example.com?token=[REDACTED]"); + assert_eq!( + redacted, + "https://[REDACTED]@api.example.com?token=[REDACTED]" + ); } #[test] @@ -4647,7 +5038,10 @@ output_folder: "{}" // MEDIUM: URL-encoded parameter name api%5Fkey (api_key) should be redacted let url = "https://example.com?api%5Fkey=secret123"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("secret123"), "Value of encoded api_key should be redacted"); + assert!( + !redacted.contains("secret123"), + "Value of encoded api_key should be redacted" + ); assert!(redacted.contains("api%5Fkey=[REDACTED]")); } @@ -4656,7 +5050,10 @@ output_folder: "{}" // URL-encoded 'token' (%74%6F%6B%65%6E) let url = "https://example.com?%74%6F%6B%65%6E=mysecret"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("mysecret"), "Value of encoded token should be redacted"); + assert!( + !redacted.contains("mysecret"), + "Value of encoded token should be redacted" + ); assert!(redacted.contains("[REDACTED]")); } @@ -4667,7 +5064,10 @@ output_folder: "{}" let redacted = redact_url_sensitive_params(url); assert!(!redacted.contains("secret1")); assert!(!redacted.contains("secret2")); - assert!(redacted.contains("foo=bar"), "Non-sensitive param should remain"); + assert!( + redacted.contains("foo=bar"), + "Non-sensitive param should remain" + ); assert!(redacted.contains("api%5Fkey=[REDACTED]")); assert!(redacted.contains("password=[REDACTED]")); } @@ -4677,7 +5077,10 @@ output_folder: "{}" // URL-encoded param in fragment (OAuth implicit flow) let url = "https://example.com#access%5Ftoken=xyz123"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("xyz123"), "Value of encoded access_token should be redacted"); + assert!( + !redacted.contains("xyz123"), + "Value of encoded access_token should be redacted" + ); assert!(redacted.contains("access%5Ftoken=[REDACTED]")); } @@ -4697,10 +5100,22 @@ output_folder: "{}" let url = "https://user:p@ss@example.com?api%5Fkey=secret&access%5Ftoken=mytoken123"; let redacted = redact_url_sensitive_params(url); assert!(!redacted.contains("user:"), "Username should be redacted"); - assert!(!redacted.contains("p@ss"), "Password with @ should be redacted"); - assert!(!redacted.contains("secret"), "api_key value should be redacted"); - assert!(!redacted.contains("mytoken123"), "access_token value should be redacted"); - assert_eq!(redacted, "https://[REDACTED]@example.com?api%5Fkey=[REDACTED]&access%5Ftoken=[REDACTED]"); + assert!( + !redacted.contains("p@ss"), + "Password with @ should be redacted" + ); + assert!( + !redacted.contains("secret"), + "api_key value should be redacted" + ); + assert!( + !redacted.contains("mytoken123"), + "access_token value should be redacted" + ); + assert_eq!( + redacted, + "https://[REDACTED]@example.com?api%5Fkey=[REDACTED]&access%5Ftoken=[REDACTED]" + ); } // ===================================================================== @@ -4713,8 +5128,14 @@ output_folder: "{}" // %25 is encoded %, so api%255Fkey decodes to api%5Fkey which decodes to api_key let url = "https://example.com?api%255Fkey=secret123"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("secret123"), "Double-encoded api_key value should be redacted"); - assert!(redacted.contains("=[REDACTED]"), "Should contain redacted value"); + assert!( + !redacted.contains("secret123"), + "Double-encoded api_key value should be redacted" + ); + assert!( + redacted.contains("=[REDACTED]"), + "Should contain redacted value" + ); } #[test] @@ -4724,7 +5145,10 @@ output_folder: "{}" let url = "https://example.com?api%255Fkey=mysecret"; let redacted = redact_url_sensitive_params(url); // After double-decode, api%255Fkey becomes api_key which is in sensitive list - assert!(!redacted.contains("mysecret"), "Double-encoded api_key should be redacted"); + assert!( + !redacted.contains("mysecret"), + "Double-encoded api_key should be redacted" + ); } #[test] @@ -4733,8 +5157,14 @@ output_folder: "{}" // api%255Fkey double-decodes to api_key which is in the sensitive list let url = "https://example.com?api%255Fkey=pass123&foo=bar"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("pass123"), "Double-encoded api_key value should be redacted"); - assert!(redacted.contains("foo=bar"), "Non-sensitive param should remain"); + assert!( + !redacted.contains("pass123"), + "Double-encoded api_key value should be redacted" + ); + assert!( + redacted.contains("foo=bar"), + "Non-sensitive param should remain" + ); } #[test] @@ -4742,8 +5172,14 @@ output_folder: "{}" // Kebab-case: api-key (with hyphen) let url = "https://example.com?api-key=secret-value"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("secret-value"), "api-key value should be redacted"); - assert!(redacted.contains("api-key=[REDACTED]"), "api-key param should be redacted"); + assert!( + !redacted.contains("secret-value"), + "api-key value should be redacted" + ); + assert!( + redacted.contains("api-key=[REDACTED]"), + "api-key param should be redacted" + ); } #[test] @@ -4751,16 +5187,28 @@ output_folder: "{}" // Kebab-case: access-token (with hyphen) let url = "https://example.com?access-token=mytoken123"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("mytoken123"), "access-token value should be redacted"); - assert!(redacted.contains("access-token=[REDACTED]"), "access-token param should be redacted"); + assert!( + !redacted.contains("mytoken123"), + "access-token value should be redacted" + ); + assert!( + redacted.contains("access-token=[REDACTED]"), + "access-token param should be redacted" + ); } #[test] fn test_redact_url_kebab_case_client_secret() { let url = "https://example.com?client-secret=very-secret&other=value"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("very-secret"), "client-secret value should be redacted"); - assert!(redacted.contains("other=value"), "Non-sensitive param should remain"); + assert!( + !redacted.contains("very-secret"), + "client-secret value should be redacted" + ); + assert!( + redacted.contains("other=value"), + "Non-sensitive param should remain" + ); } #[test] @@ -4768,10 +5216,22 @@ output_folder: "{}" // Multiple kebab-case sensitive params let url = "https://example.com?api-key=key1&access-token=tok1&session-token=sess1&foo=bar"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("key1"), "api-key value should be redacted"); - assert!(!redacted.contains("tok1"), "access-token value should be redacted"); - assert!(!redacted.contains("sess1"), "session-token value should be redacted"); - assert!(redacted.contains("foo=bar"), "Non-sensitive param should remain"); + assert!( + !redacted.contains("key1"), + "api-key value should be redacted" + ); + assert!( + !redacted.contains("tok1"), + "access-token value should be redacted" + ); + assert!( + !redacted.contains("sess1"), + "session-token value should be redacted" + ); + assert!( + redacted.contains("foo=bar"), + "Non-sensitive param should remain" + ); } #[test] @@ -4789,11 +5249,20 @@ jira: let result = load_config(&file_path); std::fs::remove_file(&file_path).ok(); - assert!(result.is_err(), "Should reject endpoint with trailing whitespace"); + assert!( + result.is_err(), + "Should reject endpoint with trailing whitespace" + ); let err = result.unwrap_err(); let err_msg = err.to_string(); - assert!(err_msg.contains("jira.endpoint"), "Error should mention jira.endpoint"); - assert!(err_msg.contains("whitespace"), "Error should mention whitespace"); + assert!( + err_msg.contains("jira.endpoint"), + "Error should mention jira.endpoint" + ); + assert!( + err_msg.contains("whitespace"), + "Error should mention whitespace" + ); } #[test] @@ -4811,9 +5280,15 @@ jira: let result = load_config(&file_path); std::fs::remove_file(&file_path).ok(); - assert!(result.is_err(), "Should reject endpoint with leading whitespace"); + assert!( + result.is_err(), + "Should reject endpoint with leading whitespace" + ); let err = result.unwrap_err(); - assert!(err.to_string().contains("whitespace"), "Error should mention whitespace"); + assert!( + err.to_string().contains("whitespace"), + "Error should mention whitespace" + ); } #[test] @@ -4831,9 +5306,15 @@ squash: let result = load_config(&file_path); std::fs::remove_file(&file_path).ok(); - assert!(result.is_err(), "Should reject squash endpoint with trailing whitespace"); + assert!( + result.is_err(), + "Should reject squash endpoint with trailing whitespace" + ); let err = result.unwrap_err(); - assert!(err.to_string().contains("squash.endpoint"), "Error should mention squash.endpoint"); + assert!( + err.to_string().contains("squash.endpoint"), + "Error should mention squash.endpoint" + ); } #[test] @@ -4852,9 +5333,15 @@ llm: let result = load_config(&file_path); std::fs::remove_file(&file_path).ok(); - assert!(result.is_err(), "Should reject local_endpoint with whitespace"); + assert!( + result.is_err(), + "Should reject local_endpoint with whitespace" + ); let err = result.unwrap_err(); - assert!(err.to_string().contains("llm.local_endpoint"), "Error should mention llm.local_endpoint"); + assert!( + err.to_string().contains("llm.local_endpoint"), + "Error should mention llm.local_endpoint" + ); } #[test] @@ -4876,9 +5363,15 @@ llm: let result = load_config(&file_path); std::fs::remove_file(&file_path).ok(); - assert!(result.is_err(), "Should reject cloud_endpoint with trailing whitespace"); + assert!( + result.is_err(), + "Should reject cloud_endpoint with trailing whitespace" + ); let err = result.unwrap_err(); - assert!(err.to_string().contains("llm.cloud_endpoint"), "Error should mention llm.cloud_endpoint"); + assert!( + err.to_string().contains("llm.cloud_endpoint"), + "Error should mention llm.cloud_endpoint" + ); } #[test] @@ -4911,9 +5404,18 @@ llm: // RFC 1866 allows semicolon as query parameter separator in HTML forms let url = "https://example.com?token=secret;foo=bar"; let redacted = redact_url_sensitive_params(url); - assert!(redacted.contains("token=[REDACTED]"), "Should redact token with semicolon separator"); - assert!(redacted.contains("foo=bar"), "Should preserve non-sensitive param"); - assert!(redacted.contains(";"), "Should preserve semicolon separator"); + assert!( + redacted.contains("token=[REDACTED]"), + "Should redact token with semicolon separator" + ); + assert!( + redacted.contains("foo=bar"), + "Should preserve non-sensitive param" + ); + assert!( + redacted.contains(";"), + "Should preserve semicolon separator" + ); } #[test] @@ -4921,9 +5423,18 @@ llm: // All params separated by semicolons let url = "https://example.com?api_key=sk123;password=secret;user=john"; let redacted = redact_url_sensitive_params(url); - assert!(redacted.contains("api_key=[REDACTED]"), "Should redact api_key"); - assert!(redacted.contains("password=[REDACTED]"), "Should redact password"); - assert!(redacted.contains("user=john"), "Should preserve non-sensitive param"); + assert!( + redacted.contains("api_key=[REDACTED]"), + "Should redact api_key" + ); + assert!( + redacted.contains("password=[REDACTED]"), + "Should redact password" + ); + assert!( + redacted.contains("user=john"), + "Should preserve non-sensitive param" + ); } #[test] @@ -4932,10 +5443,19 @@ llm: let url = "https://example.com?token=abc&secret=def;api_key=ghi;foo=bar&auth=xyz"; let redacted = redact_url_sensitive_params(url); assert!(redacted.contains("token=[REDACTED]"), "Should redact token"); - assert!(redacted.contains("secret=[REDACTED]"), "Should redact secret"); - assert!(redacted.contains("api_key=[REDACTED]"), "Should redact api_key"); + assert!( + redacted.contains("secret=[REDACTED]"), + "Should redact secret" + ); + assert!( + redacted.contains("api_key=[REDACTED]"), + "Should redact api_key" + ); assert!(redacted.contains("auth=[REDACTED]"), "Should redact auth"); - assert!(redacted.contains("foo=bar"), "Should preserve non-sensitive param"); + assert!( + redacted.contains("foo=bar"), + "Should preserve non-sensitive param" + ); } #[test] @@ -4943,8 +5463,14 @@ llm: // Whitespace around parameter key let url = "https://example.com?token =secret"; let redacted = redact_url_sensitive_params(url); - assert!(redacted.contains("[REDACTED]"), "Should redact token with trailing whitespace in key"); - assert!(!redacted.contains("secret"), "Should not expose secret value"); + assert!( + redacted.contains("[REDACTED]"), + "Should redact token with trailing whitespace in key" + ); + assert!( + !redacted.contains("secret"), + "Should not expose secret value" + ); } #[test] @@ -4952,8 +5478,14 @@ llm: // Leading whitespace in parameter key let url = "https://example.com? api_key=secret"; let redacted = redact_url_sensitive_params(url); - assert!(redacted.contains("[REDACTED]"), "Should redact api_key with leading whitespace"); - assert!(!redacted.contains("secret"), "Should not expose secret value"); + assert!( + redacted.contains("[REDACTED]"), + "Should redact api_key with leading whitespace" + ); + assert!( + !redacted.contains("secret"), + "Should not expose secret value" + ); } #[test] @@ -4961,38 +5493,83 @@ llm: // Whitespace around key with semicolon separator let url = "https://example.com?foo=bar; token =mysecret;other=value"; let redacted = redact_url_sensitive_params(url); - assert!(redacted.contains("[REDACTED]"), "Should redact token with whitespace and semicolon"); - assert!(!redacted.contains("mysecret"), "Should not expose secret value"); + assert!( + redacted.contains("[REDACTED]"), + "Should redact token with whitespace and semicolon" + ); + assert!( + !redacted.contains("mysecret"), + "Should not expose secret value" + ); assert!(redacted.contains("foo=bar"), "Should preserve first param"); - assert!(redacted.contains("other=value"), "Should preserve last param"); + assert!( + redacted.contains("other=value"), + "Should preserve last param" + ); } #[test] fn test_ipv6_zone_id_empty_rejected() { // Empty zone ID is invalid (e.g., "fe80::1%" with nothing after %) - assert!(!is_valid_url("http://[fe80::1%]:8080"), "Should reject empty zone ID"); - assert!(!is_valid_url("http://[fe80::1%]"), "Should reject empty zone ID without port"); - assert!(!is_valid_url("https://[::1%]"), "Should reject empty zone ID on localhost"); + assert!( + !is_valid_url("http://[fe80::1%]:8080"), + "Should reject empty zone ID" + ); + assert!( + !is_valid_url("http://[fe80::1%]"), + "Should reject empty zone ID without port" + ); + assert!( + !is_valid_url("https://[::1%]"), + "Should reject empty zone ID on localhost" + ); } #[test] fn test_ipv6_zone_id_valid() { // Valid zone IDs (interface names) - assert!(is_valid_url("http://[fe80::1%eth0]:8080"), "Should accept zone ID with interface name"); - assert!(is_valid_url("http://[fe80::1%eth0]"), "Should accept zone ID without port"); - assert!(is_valid_url("http://[fe80::1%wlan0]:3000"), "Should accept zone ID with wlan"); - assert!(is_valid_url("http://[fe80::1%en0]:80"), "Should accept zone ID with en0"); - assert!(is_valid_url("http://[fe80::1%lo]:8080"), "Should accept zone ID with lo"); + assert!( + is_valid_url("http://[fe80::1%eth0]:8080"), + "Should accept zone ID with interface name" + ); + assert!( + is_valid_url("http://[fe80::1%eth0]"), + "Should accept zone ID without port" + ); + assert!( + is_valid_url("http://[fe80::1%wlan0]:3000"), + "Should accept zone ID with wlan" + ); + assert!( + is_valid_url("http://[fe80::1%en0]:80"), + "Should accept zone ID with en0" + ); + assert!( + is_valid_url("http://[fe80::1%lo]:8080"), + "Should accept zone ID with lo" + ); // URL-encoded % is %25 - assert!(is_valid_url("http://[fe80::1%25eth0]:8080"), "Should accept URL-encoded zone ID"); + assert!( + is_valid_url("http://[fe80::1%25eth0]:8080"), + "Should accept URL-encoded zone ID" + ); } #[test] fn test_ipv6_zone_id_invalid_chars() { // Zone ID with invalid characters (only alphanumeric, hyphen, underscore, dot allowed) - assert!(!is_valid_url("http://[fe80::1%eth/0]:8080"), "Should reject zone ID with slash"); - assert!(!is_valid_url("http://[fe80::1%eth@0]:8080"), "Should reject zone ID with @"); - assert!(!is_valid_url("http://[fe80::1%eth 0]:8080"), "Should reject zone ID with space"); + assert!( + !is_valid_url("http://[fe80::1%eth/0]:8080"), + "Should reject zone ID with slash" + ); + assert!( + !is_valid_url("http://[fe80::1%eth@0]:8080"), + "Should reject zone ID with @" + ); + assert!( + !is_valid_url("http://[fe80::1%eth 0]:8080"), + "Should reject zone ID with space" + ); } #[test] @@ -5000,10 +5577,19 @@ llm: // Semicolon separator in fragment (OAuth implicit flow with semicolon) let url = "https://app.com/callback#token=abc;access_token=secret"; let redacted = redact_url_sensitive_params(url); - assert!(redacted.contains("token=[REDACTED]"), "Should redact token in fragment"); - assert!(redacted.contains("access_token=[REDACTED]"), "Should redact access_token in fragment"); + assert!( + redacted.contains("token=[REDACTED]"), + "Should redact token in fragment" + ); + assert!( + redacted.contains("access_token=[REDACTED]"), + "Should redact access_token in fragment" + ); assert!(!redacted.contains("abc"), "Should not expose token value"); - assert!(!redacted.contains("secret"), "Should not expose access_token value"); + assert!( + !redacted.contains("secret"), + "Should not expose access_token value" + ); } #[test] @@ -5011,10 +5597,19 @@ llm: // Both query and fragment with semicolon separators let url = "https://example.com?api_key=key1;foo=bar#token=tok1;baz=qux"; let redacted = redact_url_sensitive_params(url); - assert!(redacted.contains("api_key=[REDACTED]"), "Should redact api_key in query"); - assert!(redacted.contains("token=[REDACTED]"), "Should redact token in fragment"); + assert!( + redacted.contains("api_key=[REDACTED]"), + "Should redact api_key in query" + ); + assert!( + redacted.contains("token=[REDACTED]"), + "Should redact token in fragment" + ); assert!(redacted.contains("foo=bar"), "Should preserve foo in query"); - assert!(redacted.contains("baz=qux"), "Should preserve baz in fragment"); + assert!( + redacted.contains("baz=qux"), + "Should preserve baz in fragment" + ); } // ===== Coverage Plan Tests: check_output_folder_exists, active_profile_summary, redact edge cases ===== @@ -5037,9 +5632,16 @@ llm: }; let warning = config.check_output_folder_exists(); - assert!(warning.is_some(), "Should return warning for nonexistent folder"); + assert!( + warning.is_some(), + "Should return warning for nonexistent folder" + ); let msg = warning.unwrap(); - assert!(msg.contains("does not exist"), "Warning should mention 'does not exist': {}", msg); + assert!( + msg.contains("does not exist"), + "Warning should mention 'does not exist': {}", + msg + ); } #[test] @@ -5061,9 +5663,16 @@ llm: }; let warning = config.check_output_folder_exists(); - assert!(warning.is_some(), "Should return warning when output_folder is a file"); + assert!( + warning.is_some(), + "Should return warning when output_folder is a file" + ); let msg = warning.unwrap(); - assert!(msg.contains("not a directory"), "Warning should mention 'not a directory': {}", msg); + assert!( + msg.contains("not a directory"), + "Warning should mention 'not a directory': {}", + msg + ); } #[test] @@ -5083,7 +5692,10 @@ llm: }; let warning = config.check_output_folder_exists(); - assert!(warning.is_none(), "Should return None for existing directory"); + assert!( + warning.is_none(), + "Should return None for existing directory" + ); } #[test] @@ -5101,18 +5713,36 @@ llm: }; let summary = config.active_profile_summary(); - assert!(summary.contains("No profile active (using base configuration)"), - "Should indicate no profile is active: {}", summary); - assert!(summary.contains("Output folder: ./output"), - "Should show output_folder: {}", summary); - assert!(summary.contains("Jira: not configured"), - "Should show Jira not configured: {}", summary); - assert!(summary.contains("Squash: not configured"), - "Should show Squash not configured: {}", summary); - assert!(summary.contains("LLM: not configured"), - "Should show LLM not configured: {}", summary); - assert!(summary.contains("Templates: not configured"), - "Should show Templates not configured: {}", summary); + assert!( + summary.contains("No profile active (using base configuration)"), + "Should indicate no profile is active: {}", + summary + ); + assert!( + summary.contains("Output folder: ./output"), + "Should show output_folder: {}", + summary + ); + assert!( + summary.contains("Jira: not configured"), + "Should show Jira not configured: {}", + summary + ); + assert!( + summary.contains("Squash: not configured"), + "Should show Squash not configured: {}", + summary + ); + assert!( + summary.contains("LLM: not configured"), + "Should show LLM not configured: {}", + summary + ); + assert!( + summary.contains("Templates: not configured"), + "Should show Templates not configured: {}", + summary + ); } #[test] @@ -5147,21 +5777,41 @@ llm: }; let summary = config.active_profile_summary(); - assert!(summary.contains("Active profile: dev"), - "Should show active profile name: {}", summary); - assert!(summary.contains("Output folder: ./dev-output"), - "Should show output_folder: {}", summary); - assert!(summary.contains("Jira: https://jira.dev.example.com"), - "Should show Jira endpoint: {}", summary); - assert!(summary.contains("Squash: https://squash.dev.example.com"), - "Should show Squash endpoint: {}", summary); - assert!(summary.contains("LLM: local"), - "Should show LLM mode: {}", summary); + assert!( + summary.contains("Active profile: dev"), + "Should show active profile name: {}", + summary + ); + assert!( + summary.contains("Output folder: ./dev-output"), + "Should show output_folder: {}", + summary + ); + assert!( + summary.contains("Jira: https://jira.dev.example.com"), + "Should show Jira endpoint: {}", + summary + ); + assert!( + summary.contains("Squash: https://squash.dev.example.com"), + "Should show Squash endpoint: {}", + summary + ); + assert!( + summary.contains("LLM: local"), + "Should show LLM mode: {}", + summary + ); // Secrets should NOT appear in summary - assert!(!summary.contains("secret-token"), - "Token should not appear in summary: {}", summary); - assert!(!summary.contains("pass"), - "Password should not appear in summary"); + assert!( + !summary.contains("secret-token"), + "Token should not appear in summary: {}", + summary + ); + assert!( + !summary.contains("pass"), + "Password should not appear in summary" + ); } #[test] @@ -5169,17 +5819,29 @@ llm: // P1: Case-insensitive matching - Token, API_KEY, PASSWORD (uppercase variants) let url_token = "https://example.com?Token=secret1"; let redacted = redact_url_sensitive_params(url_token); - assert!(!redacted.contains("secret1"), "Token (capitalized) should be redacted: {}", redacted); + assert!( + !redacted.contains("secret1"), + "Token (capitalized) should be redacted: {}", + redacted + ); assert!(redacted.contains("[REDACTED]")); let url_api_key = "https://example.com?API_KEY=secret2"; let redacted = redact_url_sensitive_params(url_api_key); - assert!(!redacted.contains("secret2"), "API_KEY (uppercase) should be redacted: {}", redacted); + assert!( + !redacted.contains("secret2"), + "API_KEY (uppercase) should be redacted: {}", + redacted + ); assert!(redacted.contains("[REDACTED]")); let url_password = "https://example.com?PASSWORD=secret3"; let redacted = redact_url_sensitive_params(url_password); - assert!(!redacted.contains("secret3"), "PASSWORD (uppercase) should be redacted: {}", redacted); + assert!( + !redacted.contains("secret3"), + "PASSWORD (uppercase) should be redacted: {}", + redacted + ); assert!(redacted.contains("[REDACTED]")); } @@ -5188,10 +5850,16 @@ llm: // P1: Fragment containing sensitive param (#token=secret) is redacted let url = "https://example.com/page#token=my-secret-value"; let redacted = redact_url_sensitive_params(url); - assert!(!redacted.contains("my-secret-value"), - "Fragment token value should be redacted: {}", redacted); - assert!(redacted.contains("token=[REDACTED]"), - "Should show redacted token in fragment: {}", redacted); + assert!( + !redacted.contains("my-secret-value"), + "Fragment token value should be redacted: {}", + redacted + ); + assert!( + redacted.contains("token=[REDACTED]"), + "Should show redacted token in fragment: {}", + redacted + ); } #[test] @@ -5199,7 +5867,10 @@ llm: // P1: URL with no query params returns unchanged let url = "https://example.com/api/v2/resource"; let redacted = redact_url_sensitive_params(url); - assert_eq!(redacted, url, "URL without query params should be unchanged"); + assert_eq!( + redacted, url, + "URL without query params should be unchanged" + ); } #[test] @@ -5207,7 +5878,10 @@ llm: // P1: URL with only non-sensitive params returns unchanged let url = "https://example.com?page=1&limit=50&sort=name"; let redacted = redact_url_sensitive_params(url); - assert_eq!(redacted, url, "URL with only non-sensitive params should be unchanged"); + assert_eq!( + redacted, url, + "URL with only non-sensitive params should be unchanged" + ); } #[test] @@ -5215,13 +5889,41 @@ llm: // P1: URL with mix of sensitive and non-sensitive params - only sensitive redacted let url = "https://example.com?user=john&token=secret123&page=1&api_key=sk-abc&sort=asc"; let redacted = redact_url_sensitive_params(url); - assert!(redacted.contains("user=john"), "Non-sensitive 'user' should remain: {}", redacted); - assert!(redacted.contains("page=1"), "Non-sensitive 'page' should remain: {}", redacted); - assert!(redacted.contains("sort=asc"), "Non-sensitive 'sort' should remain: {}", redacted); - assert!(redacted.contains("token=[REDACTED]"), "Sensitive 'token' should be redacted: {}", redacted); - assert!(redacted.contains("api_key=[REDACTED]"), "Sensitive 'api_key' should be redacted: {}", redacted); - assert!(!redacted.contains("secret123"), "token value should not appear: {}", redacted); - assert!(!redacted.contains("sk-abc"), "api_key value should not appear: {}", redacted); + assert!( + redacted.contains("user=john"), + "Non-sensitive 'user' should remain: {}", + redacted + ); + assert!( + redacted.contains("page=1"), + "Non-sensitive 'page' should remain: {}", + redacted + ); + assert!( + redacted.contains("sort=asc"), + "Non-sensitive 'sort' should remain: {}", + redacted + ); + assert!( + redacted.contains("token=[REDACTED]"), + "Sensitive 'token' should be redacted: {}", + redacted + ); + assert!( + redacted.contains("api_key=[REDACTED]"), + "Sensitive 'api_key' should be redacted: {}", + redacted + ); + assert!( + !redacted.contains("secret123"), + "token value should not appear: {}", + redacted + ); + assert!( + !redacted.contains("sk-abc"), + "api_key value should not appear: {}", + redacted + ); } #[test] diff --git a/crates/tf-logging/src/init.rs b/crates/tf-logging/src/init.rs index 38b9ff2..3a19467 100644 --- a/crates/tf-logging/src/init.rs +++ b/crates/tf-logging/src/init.rs @@ -133,9 +133,7 @@ pub fn init_logging(config: &LoggingConfig) -> Result { }); } - let subscriber = tracing_subscriber::registry() - .with(filter) - .with(fmt_layer); + let subscriber = tracing_subscriber::registry().with(filter).with(fmt_layer); // Use set_default (thread-local) to allow multiple init calls in tests let dispatch = Dispatch::new(subscriber); @@ -151,8 +149,8 @@ pub fn init_logging(config: &LoggingConfig) -> Result { #[cfg(test)] mod tests { use super::*; - use assert_matches::assert_matches; use crate::config::LoggingConfig; + use assert_matches::assert_matches; use std::fs; use tempfile::tempdir; @@ -173,7 +171,10 @@ mod tests { let guard = init_logging(&config).unwrap(); // Verify directory was created - assert!(log_dir.exists(), "Log directory should be created by init_logging"); + assert!( + log_dir.exists(), + "Log directory should be created by init_logging" + ); assert!(log_dir.is_dir()); drop(guard); @@ -207,9 +208,12 @@ mod tests { // Read and parse log file let log_file = find_log_file(&log_dir); let content = fs::read_to_string(&log_file).unwrap(); - let last_line = content.lines().last().expect("Log file should have at least one line"); - let json: serde_json::Value = serde_json::from_str(last_line) - .expect("Log line should be valid JSON"); + let last_line = content + .lines() + .last() + .expect("Log file should have at least one line"); + let json: serde_json::Value = + serde_json::from_str(last_line).expect("Log line should be valid JSON"); // Required fields: timestamp, level, message, target assert!(json.get("timestamp").is_some(), "Missing 'timestamp' field"); @@ -221,7 +225,10 @@ mod tests { // Timestamp must be ISO 8601 (contains 'T') let ts = json["timestamp"].as_str().unwrap(); - assert!(ts.contains('T'), "Timestamp should be ISO 8601 format, got: {ts}"); + assert!( + ts.contains('T'), + "Timestamp should be ISO 8601 format, got: {ts}" + ); } // Test 0.5-UNIT-005: Logs written to configured directory @@ -245,7 +252,11 @@ mod tests { drop(guard); // Verify log directory was created at configured path - assert!(log_dir.exists(), "Log directory not created at: {:?}", log_dir); + assert!( + log_dir.exists(), + "Log directory not created at: {:?}", + log_dir + ); // Verify at least one log file exists let file_count = fs::read_dir(&log_dir) @@ -258,7 +269,10 @@ mod tests { // Verify content let log_file = find_log_file(&log_dir); let content = fs::read_to_string(&log_file).unwrap(); - assert!(content.contains("Test log event"), "Log file missing expected event"); + assert!( + content.contains("Test log event"), + "Log file missing expected event" + ); } // Test 0.5-UNIT-006: Default log level is info @@ -284,10 +298,14 @@ mod tests { let log_file = find_log_file(&log_dir); let content = fs::read_to_string(&log_file).unwrap(); - assert!(!content.contains("This debug message should not appear"), - "Debug message should be filtered at info level"); - assert!(content.contains("This info message should appear"), - "Info message should pass at info level"); + assert!( + !content.contains("This debug message should not appear"), + "Debug message should be filtered at info level" + ); + assert!( + content.contains("This info message should appear"), + "Info message should pass at info level" + ); } // Test 0.5-UNIT-007: RUST_LOG overrides configured level @@ -339,8 +357,10 @@ mod tests { let log_file = find_log_file(&log_dir); let content = fs::read_to_string(&log_file).unwrap(); - assert!(content.contains("Debug visible via RUST_LOG override"), - "RUST_LOG=debug should override config level and show debug messages"); + assert!( + content.contains("Debug visible via RUST_LOG override"), + "RUST_LOG=debug should override config level and show debug messages" + ); // _env_guard dropped here, cleaning up RUST_LOG } @@ -366,8 +386,10 @@ mod tests { let content = fs::read_to_string(&log_file).unwrap(); // ANSI escape codes start with \x1b[ - assert!(!content.contains("\x1b["), - "Log file should not contain ANSI escape codes"); + assert!( + !content.contains("\x1b["), + "Log file should not contain ANSI escape codes" + ); assert!(content.contains("Message to verify no ANSI escape codes")); } @@ -390,18 +412,28 @@ mod tests { let debug_output = format!("{:?}", guard); // Must contain the struct name - assert!(debug_output.contains("LogGuard"), - "Debug output should contain 'LogGuard', got: {debug_output}"); + assert!( + debug_output.contains("LogGuard"), + "Debug output should contain 'LogGuard', got: {debug_output}" + ); // Must NOT expose internal field names - assert!(!debug_output.contains("_worker_guard"), - "Debug output must not expose _worker_guard field"); - assert!(!debug_output.contains("_dispatch_guard"), - "Debug output must not expose _dispatch_guard field"); - assert!(!debug_output.contains("WorkerGuard"), - "Debug output must not expose WorkerGuard type"); - assert!(!debug_output.contains("DefaultGuard"), - "Debug output must not expose DefaultGuard type"); + assert!( + !debug_output.contains("_worker_guard"), + "Debug output must not expose _worker_guard field" + ); + assert!( + !debug_output.contains("_dispatch_guard"), + "Debug output must not expose _dispatch_guard field" + ); + assert!( + !debug_output.contains("WorkerGuard"), + "Debug output must not expose WorkerGuard type" + ); + assert!( + !debug_output.contains("DefaultGuard"), + "Debug output must not expose DefaultGuard type" + ); drop(guard); } @@ -437,10 +469,14 @@ mod tests { // After drop, verify logs were flushed to disk let log_file = find_log_file(&log_dir); let content = fs::read_to_string(&log_file).unwrap(); - assert!(content.contains("lifecycle test message"), - "Log should contain message emitted before guard move"); - assert!(content.contains("after move message"), - "Log should contain message emitted after guard move"); + assert!( + content.contains("lifecycle test message"), + "Log should contain message emitted before guard move" + ); + assert!( + content.contains("after move message"), + "Log should contain message emitted after guard move" + ); } // Test [AI-Review-R3 M2]: init_logging returns DirectoryCreationFailed on unwritable path @@ -479,7 +515,10 @@ mod tests { }; let result = init_logging(&config); - assert!(result.is_err(), "Malformed filter expression should return an error"); + assert!( + result.is_err(), + "Malformed filter expression should return an error" + ); let err = result.unwrap_err(); assert_matches!(err, LoggingError::InvalidLogLevel { ref level, ref hint } => { @@ -501,7 +540,10 @@ mod tests { }; let guard = init_logging(&config); - assert!(guard.is_ok(), "Complex filter expression should be accepted"); + assert!( + guard.is_ok(), + "Complex filter expression should be accepted" + ); // Verify debug events from tf_logging target pass the filter tracing::debug!(target: "tf_logging", "debug from tf_logging target"); @@ -513,12 +555,43 @@ mod tests { let log_file = find_log_file(&log_dir); let content = fs::read_to_string(&log_file).unwrap(); - assert!(content.contains("debug from tf_logging target"), - "tf_logging debug should pass with 'info,tf_logging=debug' filter"); - assert!(!content.contains("debug from other target"), - "other_crate debug should be filtered out"); - assert!(content.contains("info from other target"), - "other_crate info should pass the base info filter"); + assert!( + content.contains("debug from tf_logging target"), + "tf_logging debug should pass with 'info,tf_logging=debug' filter" + ); + assert!( + !content.contains("debug from other target"), + "other_crate debug should be filtered out" + ); + assert!( + content.contains("info from other target"), + "other_crate info should pass the base info filter" + ); + } + + // Test QW-PERF-001: init_logging completes within 100ms (NFR8 budget) + #[test] + fn test_init_logging_completes_within_100ms() { + let temp = tempdir().unwrap(); + let log_dir = temp.path().join("logs"); + + let config = LoggingConfig { + log_level: "info".to_string(), + log_dir: log_dir.to_string_lossy().to_string(), + log_to_stdout: false, + }; + + let start = std::time::Instant::now(); + let guard = init_logging(&config).unwrap(); + let elapsed = start.elapsed(); + + assert!( + elapsed.as_millis() < 100, + "init_logging took {}ms, expected < 100ms", + elapsed.as_millis() + ); + + drop(guard); } // Test [AI-Review]: log_to_stdout=true creates stdout layer and emits to file @@ -534,15 +607,20 @@ mod tests { }; let guard = init_logging(&config); - assert!(guard.is_ok(), "init_logging with log_to_stdout=true should succeed"); + assert!( + guard.is_ok(), + "init_logging with log_to_stdout=true should succeed" + ); tracing::info!("stdout test message"); drop(guard.unwrap()); let log_file = find_log_file(&log_dir); let content = fs::read_to_string(&log_file).unwrap(); - assert!(content.contains("stdout test message"), - "Log should still reach file when log_to_stdout=true"); + assert!( + content.contains("stdout test message"), + "Log should still reach file when log_to_stdout=true" + ); } // Test [AI-Review-R6 M3]: log_to_stdout actually produces output on stdout @@ -560,24 +638,35 @@ mod tests { .arg("--exact") .arg("init::tests::stdout_subprocess_entrypoint") .env("TF_LOGGING_STDOUT_TEST", "1") - .env("TF_LOGGING_STDOUT_LOG_DIR", log_dir.to_string_lossy().to_string()) + .env( + "TF_LOGGING_STDOUT_LOG_DIR", + log_dir.to_string_lossy().to_string(), + ) .output() .expect("Failed to execute stdout subprocess"); - assert!(output.status.success(), + assert!( + output.status.success(), "Subprocess stdout test failed:\nstderr:\n{}", - String::from_utf8_lossy(&output.stderr)); + String::from_utf8_lossy(&output.stderr) + ); let stdout_str = String::from_utf8_lossy(&output.stdout); - assert!(stdout_str.contains("stdout_capture_verification_message"), - "Expected log message on stdout, got:\n{stdout_str}"); + assert!( + stdout_str.contains("stdout_capture_verification_message"), + "Expected log message on stdout, got:\n{stdout_str}" + ); // Verify it's JSON-structured - let line = stdout_str.lines() + let line = stdout_str + .lines() .find(|l| l.contains("stdout_capture_verification_message")) .expect("Expected matching stdout line"); - let json: serde_json::Value = serde_json::from_str(line) - .expect("stdout log line should be valid JSON"); - assert!(json.get("timestamp").is_some(), "stdout JSON missing timestamp"); + let json: serde_json::Value = + serde_json::from_str(line).expect("stdout log line should be valid JSON"); + assert!( + json.get("timestamp").is_some(), + "stdout JSON missing timestamp" + ); } #[test] diff --git a/crates/tf-logging/src/redact.rs b/crates/tf-logging/src/redact.rs index c9c6694..54927bb 100644 --- a/crates/tf-logging/src/redact.rs +++ b/crates/tf-logging/src/redact.rs @@ -5,8 +5,8 @@ use serde_json::Value; use tracing::{Event, Subscriber}; -use tracing_subscriber::fmt::FormattedFields; use tracing_subscriber::fmt::format::Writer; +use tracing_subscriber::fmt::FormattedFields; use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields}; use tracing_subscriber::registry::LookupSpan; @@ -30,18 +30,30 @@ pub(crate) const SENSITIVE_FIELDS: &[&str] = &[ /// Pre-computed suffixes for compound field detection (e.g., `_token`, `-key`). /// Avoids per-call `format!` allocations in `is_sensitive()`. const SENSITIVE_SUFFIXES: &[&str] = &[ - "_token", "-token", - "_api_key", "-api_key", - "_apikey", "-apikey", - "_key", "-key", - "_secret", "-secret", - "_password", "-password", - "_passwd", "-passwd", - "_pwd", "-pwd", - "_auth", "-auth", - "_authorization", "-authorization", - "_credential", "-credential", - "_credentials", "-credentials", + "_token", + "-token", + "_api_key", + "-api_key", + "_apikey", + "-apikey", + "_key", + "-key", + "_secret", + "-secret", + "_password", + "-password", + "_passwd", + "-passwd", + "_pwd", + "-pwd", + "_auth", + "-auth", + "_authorization", + "-authorization", + "_credential", + "-credential", + "_credentials", + "-credentials", ]; /// A custom JSON event formatter that redacts sensitive fields. @@ -86,7 +98,9 @@ impl RedactingVisitor { // Suffix match for compound field names like access_token, // auth_token, session_key, api_secret, etc. // Uses pre-computed SENSITIVE_SUFFIXES to avoid per-call allocations. - SENSITIVE_SUFFIXES.iter().any(|suffix| lower.ends_with(suffix)) + SENSITIVE_SUFFIXES + .iter() + .any(|suffix| lower.ends_with(suffix)) } fn looks_like_url(value: &str) -> bool { @@ -172,12 +186,10 @@ impl tracing::field::Visit for RedactingVisitor { self.fields .insert(name.to_string(), Value::String("[REDACTED]".to_string())); } else if let Some(n) = serde_json::Number::from_f64(value) { - self.fields - .insert(name.to_string(), Value::Number(n)); + self.fields.insert(name.to_string(), Value::Number(n)); } else { // NaN/Infinity cannot be represented as JSON numbers - self.fields - .insert(name.to_string(), Value::Null); + self.fields.insert(name.to_string(), Value::Null); } } @@ -187,8 +199,7 @@ impl tracing::field::Visit for RedactingVisitor { self.fields .insert(name.to_string(), Value::String("[REDACTED]".to_string())); } else { - self.fields - .insert(name.to_string(), Value::Bool(value)); + self.fields.insert(name.to_string(), Value::Bool(value)); } } } @@ -237,10 +248,7 @@ where // Message if !visitor.message.is_empty() { - obj.insert( - "message".to_string(), - Value::String(visitor.message), - ); + obj.insert("message".to_string(), Value::String(visitor.message)); } // Fields @@ -267,10 +275,7 @@ where if !rendered.is_empty() { let span_fields = parse_and_redact_span_fields(rendered); if !span_fields.is_empty() { - span_obj.insert( - "fields".to_string(), - Value::Object(span_fields), - ); + span_obj.insert("fields".to_string(), Value::Object(span_fields)); } } } @@ -466,10 +471,16 @@ mod tests { tracing::info!($field = "secret_value_123", "test"); drop(guard); let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); - assert!(!content.contains("secret_value_123"), - "Field '{}' was not redacted", stringify!($field)); - assert!(content.contains("[REDACTED]"), - "'{}' should show [REDACTED]", stringify!($field)); + assert!( + !content.contains("secret_value_123"), + "Field '{}' was not redacted", + stringify!($field) + ); + assert!( + content.contains("[REDACTED]"), + "'{}' should show [REDACTED]", + stringify!($field) + ); } }; } @@ -506,9 +517,18 @@ mod tests { ); drop(guard); let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); - assert!(content.contains("triage"), "command field was incorrectly redacted"); - assert!(content.contains("success"), "status field was incorrectly redacted"); - assert!(content.contains("lot-42"), "scope field was incorrectly redacted"); + assert!( + content.contains("triage"), + "command field was incorrectly redacted" + ); + assert!( + content.contains("success"), + "status field was incorrectly redacted" + ); + assert!( + content.contains("lot-42"), + "scope field was incorrectly redacted" + ); } // Test 0.5-UNIT-004: URLs with sensitive params are redacted @@ -528,12 +548,18 @@ mod tests { ); drop(guard); let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); - assert!(!content.contains("abc123"), - "URL token parameter value should be redacted"); - assert!(content.contains("[REDACTED]"), - "Redacted URL should contain [REDACTED]"); - assert!(content.contains("user"), - "Non-sensitive URL parameter name should be preserved"); + assert!( + !content.contains("abc123"), + "URL token parameter value should be redacted" + ); + assert!( + content.contains("[REDACTED]"), + "Redacted URL should contain [REDACTED]" + ); + assert!( + content.contains("user"), + "Non-sensitive URL parameter name should be preserved" + ); } // Test 0.5-UNIT-009: Debug impl of LogGuard does not leak sensitive data @@ -550,14 +576,22 @@ mod tests { let debug_output = format!("{:?}", guard); // Debug output must not contain sensitive patterns - assert!(!debug_output.to_lowercase().contains("secret"), - "Debug output should not contain 'secret'"); - assert!(!debug_output.to_lowercase().contains("password"), - "Debug output should not contain 'password'"); - assert!(!debug_output.to_lowercase().contains("token"), - "Debug output should not contain 'token'"); - assert!(!debug_output.to_lowercase().contains("key"), - "Debug output should not contain 'key'"); + assert!( + !debug_output.to_lowercase().contains("secret"), + "Debug output should not contain 'secret'" + ); + assert!( + !debug_output.to_lowercase().contains("password"), + "Debug output should not contain 'password'" + ); + assert!( + !debug_output.to_lowercase().contains("token"), + "Debug output should not contain 'token'" + ); + assert!( + !debug_output.to_lowercase().contains("key"), + "Debug output should not contain 'key'" + ); } #[test] @@ -616,8 +650,10 @@ mod tests { tracing::info!(access_token = "my_secret_tok_123", "compound field test"); drop(guard); let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); - assert!(!content.contains("my_secret_tok_123"), - "Compound field 'access_token' value should be redacted"); + assert!( + !content.contains("my_secret_tok_123"), + "Compound field 'access_token' value should be redacted" + ); assert!(content.contains("[REDACTED]")); } @@ -656,7 +692,10 @@ mod tests { let json: serde_json::Value = serde_json::from_str(line).unwrap(); let fields = json.get("fields").expect("Missing fields"); let duration = fields.get("duration").expect("Missing duration field"); - assert!(duration.is_number(), "Float should be stored as JSON number, got: {duration}"); + assert!( + duration.is_number(), + "Float should be stored as JSON number, got: {duration}" + ); } // Test [AI-Review-R4 M2]: numeric and bool sensitive fields are redacted @@ -670,22 +709,38 @@ mod tests { log_to_stdout: false, }; let guard = init_logging(&config).unwrap(); - tracing::info!(token = 42_i64, api_key = 99_u64, secret = true, "numeric sensitive test"); + tracing::info!( + token = 42_i64, + api_key = 99_u64, + secret = true, + "numeric sensitive test" + ); drop(guard); let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); let line = content.lines().last().unwrap(); let json: serde_json::Value = serde_json::from_str(line).unwrap(); let fields = json.get("fields").expect("Missing fields"); // All three sensitive fields should be "[REDACTED]", not their numeric/bool values - assert_eq!(fields.get("token").unwrap(), "[REDACTED]", - "i64 sensitive field 'token' should be redacted"); - assert_eq!(fields.get("api_key").unwrap(), "[REDACTED]", - "u64 sensitive field 'api_key' should be redacted"); - assert_eq!(fields.get("secret").unwrap(), "[REDACTED]", - "bool sensitive field 'secret' should be redacted"); + assert_eq!( + fields.get("token").unwrap(), + "[REDACTED]", + "i64 sensitive field 'token' should be redacted" + ); + assert_eq!( + fields.get("api_key").unwrap(), + "[REDACTED]", + "u64 sensitive field 'api_key' should be redacted" + ); + assert_eq!( + fields.get("secret").unwrap(), + "[REDACTED]", + "bool sensitive field 'secret' should be redacted" + ); // Ensure the raw numeric values don't appear - assert!(!content.contains("\"42\"") && !content.contains(":42,") && !content.contains(":42}"), - "Numeric value 42 should not appear in output"); + assert!( + !content.contains("\"42\"") && !content.contains(":42,") && !content.contains(":42}"), + "Numeric value 42 should not appear in output" + ); } // --- P0: format_rfc3339() tests --- @@ -795,16 +850,24 @@ mod tests { fn test_parse_and_redact_span_fields_preserves_types() { let rendered = "count=42 enabled=true ratio=3.14 name=\"alice\""; let result = parse_and_redact_span_fields(rendered); - assert!(result.get("count").unwrap().is_number(), - "Integer span field should be parsed as JSON number"); + assert!( + result.get("count").unwrap().is_number(), + "Integer span field should be parsed as JSON number" + ); assert_eq!(result.get("count").unwrap(), 42); - assert!(result.get("enabled").unwrap().is_boolean(), - "Boolean span field should be parsed as JSON boolean"); + assert!( + result.get("enabled").unwrap().is_boolean(), + "Boolean span field should be parsed as JSON boolean" + ); assert_eq!(result.get("enabled").unwrap(), true); - assert!(result.get("ratio").unwrap().is_number(), - "Float span field should be parsed as JSON number"); - assert!(result.get("name").unwrap().is_string(), - "Quoted span field should remain a JSON string"); + assert!( + result.get("ratio").unwrap().is_number(), + "Float span field should be parsed as JSON number" + ); + assert!( + result.get("name").unwrap().is_string(), + "Quoted span field should remain a JSON string" + ); assert_eq!(result.get("name").unwrap(), "alice"); } @@ -827,15 +890,23 @@ mod tests { let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); let line = content.lines().last().unwrap(); let json: serde_json::Value = serde_json::from_str(line).unwrap(); - let spans = json.get("spans").and_then(|v| v.as_array()) + let spans = json + .get("spans") + .and_then(|v| v.as_array()) .expect("Expected 'spans' array"); let span_obj = &spans[0]; let fields = span_obj.get("fields").expect("Expected 'fields' in span"); let count = fields.get("count").expect("Missing count field"); let active = fields.get("active").expect("Missing active field"); - assert!(count.is_number(), "count should be a JSON number, got: {count}"); + assert!( + count.is_number(), + "count should be a JSON number, got: {count}" + ); assert_eq!(count, 42); - assert!(active.is_boolean(), "active should be a JSON boolean, got: {active}"); + assert!( + active.is_boolean(), + "active should be a JSON boolean, got: {active}" + ); assert_eq!(active, true); } @@ -856,10 +927,14 @@ mod tests { drop(_entered); drop(guard); let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); - assert!(!content.contains("super_secret_value"), - "Span sensitive field 'token' value should be redacted in log output"); - assert!(content.contains("[REDACTED]"), - "Span field should show [REDACTED]"); + assert!( + !content.contains("super_secret_value"), + "Span sensitive field 'token' value should be redacted in log output" + ); + assert!( + content.contains("[REDACTED]"), + "Span field should show [REDACTED]" + ); } // Test [AI-Review-R7 M2]: free-text message is NOT scanned for secrets @@ -884,12 +959,16 @@ mod tests { drop(guard); let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); // Named field IS redacted (correct behavior) - assert!(!content.contains("secret_in_field_xyz"), - "Named field secret should be redacted"); + assert!( + !content.contains("secret_in_field_xyz"), + "Named field secret should be redacted" + ); // Free-text message is NOT scanned (known limitation, documented) - assert!(content.contains("secret_in_message_abc"), - "Free-text message is NOT scanned — this is a documented limitation. \ - Callers must use named fields for sensitive data."); + assert!( + content.contains("secret_in_message_abc"), + "Free-text message is NOT scanned — this is a documented limitation. \ + Callers must use named fields for sensitive data." + ); } // Test [AI-Review-R6 L3]: span fields rendered as structured JSON, not opaque string @@ -911,13 +990,17 @@ mod tests { let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); let line = content.lines().last().unwrap(); let json: serde_json::Value = serde_json::from_str(line).unwrap(); - let spans = json.get("spans").and_then(|v| v.as_array()) + let spans = json + .get("spans") + .and_then(|v| v.as_array()) .expect("Expected 'spans' array"); let span_obj = &spans[0]; let fields = span_obj.get("fields").expect("Expected 'fields' in span"); // Fields should be a JSON object, not a string - assert!(fields.is_object(), - "Span fields should be a JSON object, got: {fields}"); + assert!( + fields.is_object(), + "Span fields should be a JSON object, got: {fields}" + ); let fields_map = fields.as_object().unwrap(); assert_eq!(fields_map.get("command").unwrap(), "triage"); assert_eq!(fields_map.get("scope").unwrap(), "lot-42"); diff --git a/crates/tf-logging/tests/integration_test.rs b/crates/tf-logging/tests/integration_test.rs index 9379406..cb12949 100644 --- a/crates/tf-logging/tests/integration_test.rs +++ b/crates/tf-logging/tests/integration_test.rs @@ -8,9 +8,9 @@ mod common; +use common::find_log_file; use std::fs; use std::process::Command; -use common::find_log_file; use tf_logging::{init_logging, LoggingConfig, LoggingError}; // Test 0.5-INT-001: Full logging lifecycle @@ -52,10 +52,13 @@ fn test_full_logging_lifecycle() { // Parse as JSON let lines: Vec<&str> = content.lines().collect(); - assert!(!lines.is_empty(), "Log file should contain at least one line"); + assert!( + !lines.is_empty(), + "Log file should contain at least one line" + ); - let json: serde_json::Value = serde_json::from_str(lines[0]) - .expect("First log line should be valid JSON"); + let json: serde_json::Value = + serde_json::from_str(lines[0]).expect("First log line should be valid JSON"); // Verify required JSON fields assert!(json.get("timestamp").is_some(), "Missing 'timestamp'"); @@ -73,8 +76,14 @@ fn test_full_logging_lifecycle() { ); // Verify normal fields are preserved - assert!(content.contains("triage"), "Normal field 'command=triage' should be preserved"); - assert!(content.contains("Pipeline complete"), "Log message should be preserved"); + assert!( + content.contains("triage"), + "Normal field 'command=triage' should be preserved" + ); + assert!( + content.contains("Pipeline complete"), + "Log message should be preserved" + ); } // Test 0.5-INT-002: Workspace integration @@ -131,12 +140,24 @@ fn test_multiple_sensitive_fields_redacted_in_single_event() { let content = fs::read_to_string(find_log_file(&log_dir)).unwrap(); // All sensitive values must be redacted - assert!(!content.contains("key_abc"), "api_key value should be redacted"); - assert!(!content.contains("pass_def"), "password value should be redacted"); - assert!(!content.contains("secret_ghi"), "secret value should be redacted"); + assert!( + !content.contains("key_abc"), + "api_key value should be redacted" + ); + assert!( + !content.contains("pass_def"), + "password value should be redacted" + ); + assert!( + !content.contains("secret_ghi"), + "secret value should be redacted" + ); // Normal field must be preserved - assert!(content.contains("visible_value"), "Normal field should be visible"); + assert!( + content.contains("visible_value"), + "Normal field should be visible" + ); } #[test] @@ -180,8 +201,10 @@ fn test_log_output_includes_parent_spans() { // Verify span fields are structured JSON objects (not opaque strings) let cli_span = spans.iter().find(|s| s["name"] == "cli_command").unwrap(); let fields = cli_span.get("fields").expect("Expected 'fields' in span"); - assert!(fields.is_object(), - "Span fields should be a JSON object, got: {fields}"); + assert!( + fields.is_object(), + "Span fields should be a JSON object, got: {fields}" + ); let fields_map = fields.as_object().unwrap(); assert_eq!(fields_map.get("command").unwrap(), "triage"); assert_eq!(fields_map.get("scope").unwrap(), "lot-42"); @@ -208,7 +231,10 @@ fn test_cli_command_simulation_via_subprocess() { .env("TF_LOGGING_RUN_CLI_SUBPROCESS", "1") .env("TF_LOGGING_CLI_COMMAND", "triage") .env("TF_LOGGING_CLI_SCOPE", "lot-42") - .env("TF_LOGGING_CLI_LOG_DIR", log_dir.to_string_lossy().to_string()) + .env( + "TF_LOGGING_CLI_LOG_DIR", + log_dir.to_string_lossy().to_string(), + ) .output() .expect("Failed to execute subprocess test entrypoint"); @@ -243,12 +269,11 @@ fn cli_subprocess_entrypoint() { return; } - let log_dir = std::env::var("TF_LOGGING_CLI_LOG_DIR") - .expect("TF_LOGGING_CLI_LOG_DIR must be set"); - let command = std::env::var("TF_LOGGING_CLI_COMMAND") - .expect("TF_LOGGING_CLI_COMMAND must be set"); - let scope = std::env::var("TF_LOGGING_CLI_SCOPE") - .expect("TF_LOGGING_CLI_SCOPE must be set"); + let log_dir = + std::env::var("TF_LOGGING_CLI_LOG_DIR").expect("TF_LOGGING_CLI_LOG_DIR must be set"); + let command = + std::env::var("TF_LOGGING_CLI_COMMAND").expect("TF_LOGGING_CLI_COMMAND must be set"); + let scope = std::env::var("TF_LOGGING_CLI_SCOPE").expect("TF_LOGGING_CLI_SCOPE must be set"); let config = LoggingConfig { log_level: "info".to_string(), diff --git a/crates/tf-security/src/error.rs b/crates/tf-security/src/error.rs index ed27c5e..b6d6d1e 100644 --- a/crates/tf-security/src/error.rs +++ b/crates/tf-security/src/error.rs @@ -407,9 +407,8 @@ mod tests { /// Then: c'est une erreur KeyringUnavailable avec platform et hint #[test] fn test_error_conversion_no_storage_access() { - let platform_err = keyring::Error::NoStorageAccess(Box::new( - std::io::Error::other("no keyring"), - )); + let platform_err = + keyring::Error::NoStorageAccess(Box::new(std::io::Error::other("no keyring"))); let err = SecretError::from_keyring_error(platform_err, "some-key"); @@ -497,20 +496,15 @@ mod tests { } // Ambiguous -> AccessDenied (empty vec since we can't easily construct CredentialApi) - let err2 = SecretError::from_keyring_error( - keyring::Error::Ambiguous(vec![]), - test_key, - ); + let err2 = SecretError::from_keyring_error(keyring::Error::Ambiguous(vec![]), test_key); match &err2 { SecretError::AccessDenied { key, .. } => assert_eq!(key, test_key), _ => panic!("Expected AccessDenied, got {:?}", err2), } // TooLong (catchall) -> StoreFailed - let err3 = SecretError::from_keyring_error( - keyring::Error::TooLong("x".to_string(), 1), - test_key, - ); + let err3 = + SecretError::from_keyring_error(keyring::Error::TooLong("x".to_string(), 1), test_key); match &err3 { SecretError::StoreFailed { key, .. } => assert_eq!(key, test_key), _ => panic!("Expected StoreFailed, got {:?}", err3), diff --git a/crates/tf-security/src/keyring.rs b/crates/tf-security/src/keyring.rs index 506234d..e661cd9 100644 --- a/crates/tf-security/src/keyring.rs +++ b/crates/tf-security/src/keyring.rs @@ -431,7 +431,10 @@ mod tests { // Then: returns Ok(false), not an error assert!(result.is_ok(), "try_has_secret should return Ok, not Err"); - assert!(!result.unwrap(), "try_has_secret should return Ok(false) for missing key"); + assert!( + !result.unwrap(), + "try_has_secret should return Ok(false) for missing key" + ); } /// Test: try_has_secret returns Ok(true) for existing secret @@ -453,7 +456,10 @@ mod tests { // Then: returns Ok(true) assert!(result.is_ok(), "try_has_secret should return Ok"); - assert!(result.unwrap(), "try_has_secret should return Ok(true) for existing key"); + assert!( + result.unwrap(), + "try_has_secret should return Ok(true) for existing key" + ); // Cleanup let _ = store.delete_secret(&key); @@ -604,11 +610,18 @@ mod tests { let store = SecretStore::new(""); // Should create successfully - assert_eq!(store.service_name(), "", "Empty service name should be preserved"); + assert_eq!( + store.service_name(), + "", + "Empty service name should be preserved" + ); // Debug output should still work let debug_str = format!("{:?}", store); - assert!(debug_str.contains("SecretStore"), "Debug should contain struct name"); + assert!( + debug_str.contains("SecretStore"), + "Debug should contain struct name" + ); } /// Test: Empty key handling @@ -858,7 +871,10 @@ mod tests { // This is a compile-time type check: try_has_secret returns Result let result: Result = store.try_has_secret(&key); // With keyring available, non-existent key returns Ok(false) - assert!(result.is_ok(), "try_has_secret should return Ok for non-existent key"); + assert!( + result.is_ok(), + "try_has_secret should return Ok for non-existent key" + ); assert!(!result.unwrap(), "Non-existent key should return Ok(false)"); }