From ec548e84d967d0c019eccfb327412f0223843e1c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 27 Dec 2025 09:46:19 +0000 Subject: [PATCH 1/3] Initial plan From e67443388984440ac650043c88ce2b437f9a6370 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 27 Dec 2025 09:54:10 +0000 Subject: [PATCH 2/3] Add async/await support to Python rules compiler Co-authored-by: jaypatrick <1800595+jaypatrick@users.noreply.github.com> --- src/rules-compiler-python/README.md | 88 +++++- src/rules-compiler-python/pyproject.toml | 2 + .../rules_compiler/__init__.py | 22 +- .../rules_compiler/compiler.py | 283 ++++++++++++++++++ src/rules-compiler-python/tests/test_async.py | 104 +++++++ 5 files changed, 496 insertions(+), 3 deletions(-) create mode 100644 src/rules-compiler-python/tests/test_async.py diff --git a/src/rules-compiler-python/README.md b/src/rules-compiler-python/README.md index 9adac95d..1ac37e72 100644 --- a/src/rules-compiler-python/README.md +++ b/src/rules-compiler-python/README.md @@ -67,7 +67,7 @@ rules-compiler -c config.yaml --fail-on-warnings ## Python API -### Basic Usage +### Basic Usage (Synchronous) ```python from rules_compiler import RulesCompiler @@ -85,6 +85,92 @@ else: print(f"Error: {result.error_message}") ``` +### Async/Await Usage (Python 3.9+) + +The Python compiler now supports asynchronous operations for better performance in I/O-bound scenarios: + +```python +import asyncio +from rules_compiler import RulesCompiler + +async def main(): + compiler = RulesCompiler() + + # Use async API for better performance + result = await compiler.compile_async( + "compiler-config.yaml", + copy_to_rules=True + ) + + if result.success: + print(f"Compiled {result.rule_count} rules") + print(f"Hash: {result.hash_short()}") + print(f"Time: {result.elapsed_formatted()}") + +# Run async function +asyncio.run(main()) +``` + +### Parallel Processing with Async + +Compile multiple configurations in parallel: + +```python +import asyncio +from rules_compiler import compile_rules_async + +async def compile_all(): + configs = ["config1.yaml", "config2.yaml", "config3.yaml"] + + # Compile all configurations in parallel + tasks = [compile_rules_async(config) for config in configs] + results = await asyncio.gather(*tasks) + + for result in results: + if result.success: + print(f"{result.config_name}: {result.rule_count} rules") + else: + print(f"Failed: {result.error_message}") + +asyncio.run(compile_all()) +``` + +### Async File Operations + +Use async functions for file operations: + +```python +import asyncio +from rules_compiler import count_rules_async, compute_hash_async + +async def analyze_file(path): + # Count rules and compute hash in parallel + count, hash_value = await asyncio.gather( + count_rules_async(path), + compute_hash_async(path) + ) + + print(f"File: {path}") + print(f"Rules: {count}") + print(f"Hash: {hash_value[:32]}...") + +asyncio.run(analyze_file("rules.txt")) +``` + +### Performance Considerations + +- **Async APIs** are recommended for: + - Large file operations + - Processing multiple configurations + - Integration with async frameworks (FastAPI, aiohttp, etc.) + +- **Sync APIs** are simpler for: + - Single compilation tasks + - Simple scripts + - Interactive use + +**Note**: The async APIs require the `aiofiles` package for optimal performance. If not installed, they will fall back to running sync operations in a thread pool. + ### Reading Configuration ```python diff --git a/src/rules-compiler-python/pyproject.toml b/src/rules-compiler-python/pyproject.toml index 3e708ff1..fe6e9422 100644 --- a/src/rules-compiler-python/pyproject.toml +++ b/src/rules-compiler-python/pyproject.toml @@ -31,12 +31,14 @@ keywords = ["adguard", "filter", "adblock", "hostlist", "compiler", "dns", "priv dependencies = [ "pyyaml>=6.0", "tomli>=2.0; python_version < '3.11'", + "aiofiles>=23.0", ] [project.optional-dependencies] dev = [ "pytest>=7.0", "pytest-cov>=4.0", + "pytest-asyncio>=0.21.0", "mypy>=1.0", "ruff>=0.1.0", "types-PyYAML>=6.0", diff --git a/src/rules-compiler-python/rules_compiler/__init__.py b/src/rules-compiler-python/rules_compiler/__init__.py index d17be230..5d7f97ee 100644 --- a/src/rules-compiler-python/rules_compiler/__init__.py +++ b/src/rules-compiler-python/rules_compiler/__init__.py @@ -4,14 +4,26 @@ This package provides a Python interface for compiling AdGuard filter rules using the @adguard/hostlist-compiler CLI tool. -Example: +Supports both synchronous and asynchronous APIs for flexible integration. + +Synchronous Example: >>> from rules_compiler import RulesCompiler >>> compiler = RulesCompiler() >>> result = compiler.compile("compiler-config.yaml", copy_to_rules=True) >>> print(f"Compiled {result.rule_count} rules in {result.elapsed_formatted()}") +Asynchronous Example: + >>> import asyncio + >>> from rules_compiler import RulesCompiler + >>> async def main(): + ... compiler = RulesCompiler() + ... result = await compiler.compile_async("compiler-config.yaml") + ... print(f"Compiled {result.rule_count} rules") + >>> asyncio.run(main()) + Features: - Multi-format configuration support (JSON, YAML, TOML) + - Synchronous and asynchronous APIs - Configuration validation - Custom error types for better error handling - Transformation presets (recommended, minimal, hosts) @@ -39,11 +51,14 @@ PlatformInfo, RulesCompiler, compile_rules, + compile_rules_async, validate_configuration, get_version_info, get_platform_info, count_rules, + count_rules_async, compute_hash, + compute_hash_async, hash_short, format_elapsed, find_command, @@ -84,13 +99,16 @@ "VersionInfo", "PlatformInfo", "RulesCompiler", - # Compiler functions + # Compiler functions (sync and async) "compile_rules", + "compile_rules_async", "validate_configuration", "get_version_info", "get_platform_info", "count_rules", + "count_rules_async", "compute_hash", + "compute_hash_async", "hash_short", "format_elapsed", "find_command", diff --git a/src/rules-compiler-python/rules_compiler/compiler.py b/src/rules-compiler-python/rules_compiler/compiler.py index fb6c9eb3..c011cbfa 100644 --- a/src/rules-compiler-python/rules_compiler/compiler.py +++ b/src/rules-compiler-python/rules_compiler/compiler.py @@ -1,9 +1,14 @@ """ Core compiler functionality for AdGuard filter rules. + +This module provides both synchronous and asynchronous APIs for compiling +AdGuard filter rules. The async APIs offer better performance for I/O-bound +operations and can be used in async applications. """ from __future__ import annotations +import asyncio import hashlib import json import logging @@ -17,6 +22,12 @@ from pathlib import Path from typing import Any +try: + import aiofiles + AIOFILES_AVAILABLE = True +except ImportError: + AIOFILES_AVAILABLE = False + from rules_compiler.config import ( CompilerConfiguration, ConfigurationFormat, @@ -200,6 +211,35 @@ def count_rules(file_path: str | Path) -> int: return count +async def count_rules_async(file_path: str | Path) -> int: + """ + Asynchronously count non-empty, non-comment lines in a file. + + Args: + file_path: Path to the file. + + Returns: + Number of rules. + """ + if not AIOFILES_AVAILABLE: + # Fall back to sync version in thread pool + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, count_rules, file_path) + + path = Path(file_path) + if not path.exists(): + return 0 + + count = 0 + async with aiofiles.open(path, "r", encoding="utf-8") as f: + async for line in f: + stripped = line.strip() + if stripped and not stripped.startswith(("!", "#")): + count += 1 + + return count + + def compute_hash(file_path: str | Path) -> str: """ Compute SHA-384 hash of a file. @@ -217,6 +257,31 @@ def compute_hash(file_path: str | Path) -> str: return sha384.hexdigest() +async def compute_hash_async(file_path: str | Path) -> str: + """ + Asynchronously compute SHA-384 hash of a file. + + Args: + file_path: Path to the file. + + Returns: + Hex-encoded hash string. + """ + if not AIOFILES_AVAILABLE: + # Fall back to sync version in thread pool + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, compute_hash, file_path) + + sha384 = hashlib.sha384() + async with aiofiles.open(file_path, "rb") as f: + while True: + chunk = await f.read(8192) + if not chunk: + break + sha384.update(chunk) + return sha384.hexdigest() + + def hash_short(hash_value: str, length: int = 32) -> str: """ Get shortened hash for display. @@ -373,6 +438,51 @@ def get_version_info(self) -> VersionInfo: """Get version information for all components.""" return get_version_info() + async def compile_async( + self, + config_path: str | Path, + output_path: str | Path | None = None, + copy_to_rules: bool = False, + rules_directory: str | Path | None = None, + format: ConfigurationFormat | None = None, + validate: bool = True, + fail_on_warnings: bool = False, + ) -> CompilerResult: + """ + Asynchronously compile filter rules. + + This method provides better performance for I/O-bound operations + and can be used in async applications. + + Args: + config_path: Path to configuration file. + output_path: Optional output file path. + copy_to_rules: Copy output to rules directory. + rules_directory: Custom rules directory path. + format: Force configuration format. + validate: Validate configuration before compiling. + fail_on_warnings: Fail compilation if configuration has validation warnings. + + Returns: + Compilation result. + + Example: + >>> compiler = RulesCompiler() + >>> result = await compiler.compile_async("config.yaml") + >>> if result.success: + ... print(f"Compiled {result.rule_count} rules") + """ + return await compile_rules_async( + config_path=config_path, + output_path=output_path, + copy_to_rules=copy_to_rules, + rules_directory=rules_directory, + format=format, + debug=self.debug, + validate=validate, + fail_on_warnings=fail_on_warnings, + ) + def compile_rules( config_path: str | Path, @@ -536,6 +646,179 @@ def compile_rules( return result +async def compile_rules_async( + config_path: str | Path, + output_path: str | Path | None = None, + copy_to_rules: bool = False, + rules_directory: str | Path | None = None, + format: ConfigurationFormat | None = None, + debug: bool = False, + validate: bool = True, + fail_on_warnings: bool = False, +) -> CompilerResult: + """ + Asynchronously compile filter rules using hostlist-compiler. + + This async version provides better performance for I/O-bound operations + and allows compilation to be integrated into async applications. + + Args: + config_path: Path to configuration file. + output_path: Optional output file path. + copy_to_rules: Copy output to rules directory. + rules_directory: Custom rules directory path. + format: Force configuration format. + debug: Enable debug logging. + validate: Validate configuration before compiling. + fail_on_warnings: Fail compilation if configuration has validation warnings. + + Returns: + Compilation result. + """ + result = CompilerResult(start_time=datetime.utcnow()) + config_path = Path(config_path).resolve() + temp_config_path = None + + try: + # Read configuration + config = read_configuration(config_path, format) + result.config_name = config.name + result.config_version = config.version + + # Validate configuration if requested + if validate: + validation_result = config.validate() + if not validation_result.is_valid: + raise ValidationError(validation_result.errors, validation_result.warnings) + if validation_result.warnings: + for warning in validation_result.warnings: + logger.warning(f"Config warning: {warning}") + if fail_on_warnings: + raise ValidationError( + errors=[], + warnings=validation_result.warnings, + message="Configuration has warnings (fail_on_warnings is enabled)", + ) + + # Determine output path + if output_path: + actual_output = Path(output_path).resolve() + else: + timestamp = datetime.utcnow().strftime("%Y%m%d-%H%M%S") + output_dir = config_path.parent / "output" + output_dir.mkdir(exist_ok=True) + actual_output = output_dir / f"compiled-{timestamp}.txt" + + result.output_path = str(actual_output) + + # Convert to JSON if needed (hostlist-compiler only supports JSON) + detected_format = format or config._source_format + if detected_format != ConfigurationFormat.JSON: + temp_config_path = tempfile.NamedTemporaryFile( + mode="w", + suffix=".json", + delete=False, + encoding="utf-8", + ) + temp_config_path.write(to_json(config)) + temp_config_path.close() + compile_config_path = temp_config_path.name + if debug: + logger.debug(f"Created temp JSON config: {compile_config_path}") + else: + compile_config_path = str(config_path) + + # Get compiler command + cmd, cwd = _get_compiler_command(compile_config_path, str(actual_output)) + + if debug: + logger.debug(f"Running: {' '.join(cmd)}") + logger.debug(f"Working directory: {cwd}") + + # Run compilation asynchronously + try: + proc = await asyncio.create_subprocess_exec( + *cmd, + cwd=cwd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout_bytes, stderr_bytes = await asyncio.wait_for( + proc.communicate(), + timeout=300, # 5 minute timeout + ) + stdout = stdout_bytes.decode("utf-8") if stdout_bytes else "" + stderr = stderr_bytes.decode("utf-8") if stderr_bytes else "" + returncode = proc.returncode + except asyncio.TimeoutError: + raise CompilerTimeoutError(300, " ".join(cmd)) + + result.stdout = stdout + result.stderr = stderr + + if returncode != 0: + raise CompilationError( + f"Compiler exited with code {returncode}", + exit_code=returncode, + stdout=stdout, + stderr=stderr, + ) + + # Verify output was created + if not actual_output.exists(): + raise OutputNotCreatedError(str(actual_output)) + + # Calculate statistics asynchronously + result.rule_count = await count_rules_async(actual_output) + result.output_hash = await compute_hash_async(actual_output) + result.success = True + + if debug: + logger.debug(f"Compiled {result.rule_count} rules") + logger.debug(f"Output hash: {result.hash_short()}...") + + # Copy to rules directory if requested + if copy_to_rules: + if rules_directory: + rules_dir = Path(rules_directory) + else: + rules_dir = config_path.parent.parent.parent / "rules" + + try: + # Run file copy in thread pool to avoid blocking + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, rules_dir.mkdir, True, 0o755) + dest_path = rules_dir / "adguard_user_filter.txt" + await loop.run_in_executor(None, shutil.copy2, actual_output, dest_path) + result.copied_to_rules = True + result.rules_destination = str(dest_path) + if debug: + logger.debug(f"Copied to: {dest_path}") + except (OSError, IOError) as e: + raise CopyError(str(actual_output), str(dest_path), str(e)) + + except (ValidationError, CompilationError, CompilerNotFoundError, + OutputNotCreatedError, CopyError, CompilerTimeoutError) as e: + result.success = False + result.error_message = str(e) + logger.error(f"Compilation failed: {e}") + + except Exception as e: + result.success = False + result.error_message = str(e) + logger.error(f"Compilation failed: {e}") + + finally: + # Clean up temp file + if temp_config_path and os.path.exists(temp_config_path.name): + os.unlink(temp_config_path.name) + + result.end_time = datetime.utcnow() + result.elapsed_ms = int((result.end_time - result.start_time).total_seconds() * 1000) + + return result + + def validate_configuration( config_path: str | Path, format: ConfigurationFormat | None = None, diff --git a/src/rules-compiler-python/tests/test_async.py b/src/rules-compiler-python/tests/test_async.py new file mode 100644 index 00000000..9829951c --- /dev/null +++ b/src/rules-compiler-python/tests/test_async.py @@ -0,0 +1,104 @@ +""" +Tests for async functionality in the rules compiler. +""" + +import asyncio +import tempfile +from pathlib import Path + +import pytest + +from rules_compiler import ( + RulesCompiler, + compile_rules_async, + count_rules_async, + compute_hash_async, +) + + +@pytest.mark.asyncio +async def test_count_rules_async() -> None: + """Test async rule counting.""" + with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as f: + f.write("! Comment\n") + f.write("example.com\n") + f.write("# Another comment\n") + f.write("test.com\n") + f.write("\n") + f.write("ads.net\n") + temp_path = f.name + + try: + count = await count_rules_async(temp_path) + assert count == 3 # Only non-comment, non-empty lines + finally: + Path(temp_path).unlink() + + +@pytest.mark.asyncio +async def test_compute_hash_async() -> None: + """Test async hash computation.""" + with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as f: + f.write("test content\n") + temp_path = f.name + + try: + hash_value = await compute_hash_async(temp_path) + assert len(hash_value) == 96 # SHA-384 produces 96 hex characters + assert all(c in "0123456789abcdef" for c in hash_value.lower()) + finally: + Path(temp_path).unlink() + + +@pytest.mark.asyncio +async def test_compiler_async_method() -> None: + """Test RulesCompiler.compile_async method exists and is callable.""" + compiler = RulesCompiler() + + # Just verify the method exists and is a coroutine function + assert hasattr(compiler, "compile_async") + assert asyncio.iscoroutinefunction(compiler.compile_async) + + +@pytest.mark.asyncio +async def test_compile_rules_async_function() -> None: + """Test compile_rules_async function exists and is callable.""" + assert asyncio.iscoroutinefunction(compile_rules_async) + + +def test_backwards_compatibility() -> None: + """Test that synchronous API still works.""" + from rules_compiler import RulesCompiler, compile_rules + + compiler = RulesCompiler() + + # Verify sync methods exist + assert hasattr(compiler, "compile") + assert callable(compiler.compile) + assert callable(compile_rules) + + +@pytest.mark.asyncio +async def test_parallel_async_operations() -> None: + """Test that multiple async operations can run in parallel.""" + # Create test files + test_files = [] + for i in range(3): + with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as f: + f.write(f"rule{i}.com\n") + test_files.append(f.name) + + try: + # Run multiple async operations in parallel + tasks = [count_rules_async(path) for path in test_files] + results = await asyncio.gather(*tasks) + + assert len(results) == 3 + assert all(count == 1 for count in results) + finally: + for path in test_files: + Path(path).unlink() + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) From 546450be1fb017c3adef1f36ecc33bcea178822c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 27 Dec 2025 09:56:52 +0000 Subject: [PATCH 3/3] Changes before error encountered Co-authored-by: jaypatrick <1800595+jaypatrick@users.noreply.github.com> --- src/rules-compiler-rust/Cargo.toml | 4 + src/rules-compiler-rust/src/compiler.rs | 223 ++++++++++++++++++++++++ 2 files changed, 227 insertions(+) diff --git a/src/rules-compiler-rust/Cargo.toml b/src/rules-compiler-rust/Cargo.toml index e5a17f17..f147791d 100644 --- a/src/rules-compiler-rust/Cargo.toml +++ b/src/rules-compiler-rust/Cargo.toml @@ -19,6 +19,9 @@ name = "rules-compiler" path = "src/main.rs" [dependencies] +# Async runtime +tokio = { workspace = true, features = ["fs", "process", "io-util"] } + # Serialization serde = { workspace = true } serde_json = { workspace = true } @@ -50,3 +53,4 @@ rustc_version_runtime = { workspace = true } [dev-dependencies] tempfile = { workspace = true } +tokio = { workspace = true, features = ["test-util", "macros"] } diff --git a/src/rules-compiler-rust/src/compiler.rs b/src/rules-compiler-rust/src/compiler.rs index 6e5dcffd..099b6ac1 100644 --- a/src/rules-compiler-rust/src/compiler.rs +++ b/src/rules-compiler-rust/src/compiler.rs @@ -365,6 +365,32 @@ pub fn count_rules>(path: P) -> usize { .count() } +/// Asynchronously count non-empty, non-comment lines in a file. +/// +/// This async version provides better performance for I/O-bound operations. +/// Lines starting with `!` or `#` are considered comments. +/// +/// # Errors +/// +/// Returns an error if the file can't be read. +pub async fn count_rules_async>(path: P) -> Result { + use tokio::io::{AsyncBufReadExt, BufReader}; + + let file = tokio::fs::File::open(path.as_ref()).await?; + let reader = BufReader::new(file); + let mut lines = reader.lines(); + let mut count = 0; + + while let Some(line) = lines.next_line().await? { + let trimmed = line.trim(); + if !trimmed.is_empty() && !trimmed.starts_with('!') && !trimmed.starts_with('#') { + count += 1; + } + } + + Ok(count) +} + /// Compute SHA-384 hash of a file. /// /// # Errors @@ -386,6 +412,31 @@ pub fn compute_hash>(path: P) -> Result { Ok(hex::encode(hasher.finalize())) } +/// Asynchronously compute SHA-384 hash of a file. +/// +/// This async version provides better performance for I/O-bound operations. +/// +/// # Errors +/// +/// Returns an error if the file can't be read. +pub async fn compute_hash_async>(path: P) -> Result { + use tokio::io::AsyncReadExt; + + let mut file = tokio::fs::File::open(path.as_ref()).await?; + let mut hasher = Sha384::new(); + let mut buffer = [0u8; 8192]; + + loop { + let bytes_read = file.read(&mut buffer).await?; + if bytes_read == 0 { + break; + } + hasher.update(&buffer[..bytes_read]); + } + + Ok(hex::encode(hasher.finalize())) +} + /// Get compiler command and arguments. fn get_compiler_command(config_path: &str, output_path: &str) -> Result<(String, Vec)> { if let Some(compiler_path) = find_command("hostlist-compiler") { @@ -595,6 +646,178 @@ pub fn compile_rules>( Ok(result) } +/// Asynchronously compile filter rules using hostlist-compiler. +/// +/// This async version provides better performance for I/O-bound operations +/// and allows compilation to be integrated into async applications. +/// +/// # Arguments +/// +/// * `config_path` - Path to the configuration file. +/// * `options` - Compilation options. +/// +/// # Errors +/// +/// Returns an error if compilation fails. +pub async fn compile_rules_async>( + config_path: P, + options: &CompileOptions, +) -> Result { + let start = Instant::now(); + let mut result = CompilerResult { + start_time: Utc::now(), + ..Default::default() + }; + + let config_path = tokio::fs::canonicalize(config_path.as_ref()) + .await + .map_err(|e| { + CompilerError::file_system( + format!("resolving config path {}", config_path.as_ref().display()), + e, + ) + })?; + + // Read configuration + let config = read_config(&config_path, options.format)?; + result.config_name = config.name.clone(); + result.config_version = config.version.clone(); + + // Validate if requested + if options.validate { + config.validate()?; + } + + // Determine output path + let output_path = options + .output_path + .clone() + .unwrap_or_else(|| generate_output_path(&config_path)); + result.output_path = output_path.clone(); + + // Convert to JSON if needed (hostlist-compiler only accepts JSON) + let (compile_config_path, temp_config_path) = if config.format() != Some(ConfigFormat::Json) { + let temp_path = + std::env::temp_dir().join(format!("compiler-config-{}.json", uuid::Uuid::new_v4())); + let json = to_json(&config)?; + tokio::fs::write(&temp_path, &json) + .await + .map_err(|e| { + CompilerError::file_system( + format!("writing temp config to {}", temp_path.display()), + e, + ) + })?; + + if options.debug { + eprintln!("[DEBUG] Created temp JSON config: {}", temp_path.display()); + eprintln!("[DEBUG] Config content:\n{json}"); + } + + (temp_path.clone(), Some(temp_path)) + } else { + (config_path.clone(), None) + }; + + // Ensure output directory exists + if let Some(output_dir) = output_path.parent() { + tokio::fs::create_dir_all(output_dir) + .await + .map_err(|e| { + CompilerError::file_system( + format!("creating output directory {}", output_dir.display()), + e, + ) + })?; + } + + // Get compiler command + let (cmd, args) = get_compiler_command( + compile_config_path.to_str().unwrap_or(""), + output_path.to_str().unwrap_or(""), + )?; + + if options.debug { + eprintln!("[DEBUG] Running: {cmd} {}", args.join(" ")); + } + + // Run compilation asynchronously + let output = tokio::process::Command::new(&cmd) + .args(&args) + .current_dir(config_path.parent().unwrap_or(Path::new("."))) + .output() + .await + .map_err(|e| CompilerError::process_execution(format!("{cmd} {}", args.join(" ")), e))?; + + result.stdout = String::from_utf8_lossy(&output.stdout).to_string(); + result.stderr = String::from_utf8_lossy(&output.stderr).to_string(); + + // Clean up temp file + if let Some(temp_path) = temp_config_path { + let _ = tokio::fs::remove_file(temp_path).await; + } + + // Check for compilation failure + if !output.status.success() { + result.error_message = Some(format!( + "compiler exited with code {:?}: {}", + output.status.code(), + result.stderr.trim() + )); + result.end_time = Utc::now(); + result.elapsed_ms = start.elapsed().as_millis() as u64; + return Ok(result); + } + + // Verify output was created + if !tokio::fs::try_exists(&output_path).await.unwrap_or(false) { + result.error_message = Some("output file was not created".to_string()); + result.end_time = Utc::now(); + result.elapsed_ms = start.elapsed().as_millis() as u64; + return Ok(result); + } + + // Calculate statistics asynchronously + result.rule_count = count_rules_async(&output_path).await?; + result.output_hash = compute_hash_async(&output_path).await?; + result.success = true; + + // Copy to rules directory if requested + if options.copy_to_rules { + let rules_dir = get_rules_directory(&config_path, options.rules_directory.as_deref()); + tokio::fs::create_dir_all(&rules_dir) + .await + .map_err(|e| { + CompilerError::file_system( + format!("creating rules directory {}", rules_dir.display()), + e, + ) + })?; + + let dest_path = rules_dir.join("adguard_user_filter.txt"); + tokio::fs::copy(&output_path, &dest_path) + .await + .map_err(|e| { + CompilerError::copy_failed( + format!( + "copying {} to {}", + output_path.display(), + dest_path.display() + ), + e, + ) + })?; + + result.copied_to_rules = true; + result.rules_destination = Some(dest_path); + } + + result.end_time = Utc::now(); + result.elapsed_ms = start.elapsed().as_millis() as u64; + + Ok(result) +} + #[cfg(test)] mod tests { use super::*;